lfx-nightly 0.1.12.dev34__py3-none-any.whl → 0.1.12.dev35__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of lfx-nightly might be problematic. Click here for more details.
- lfx/base/tools/flow_tool.py +1 -1
- lfx/base/tools/run_flow.py +1 -1
- lfx/cli/commands.py +17 -12
- lfx/cli/run.py +156 -95
- lfx/components/deactivated/sub_flow.py +1 -1
- lfx/components/logic/flow_tool.py +1 -1
- lfx/components/logic/run_flow.py +1 -1
- lfx/components/logic/sub_flow.py +1 -1
- lfx/custom/custom_component/custom_component.py +1 -1
- lfx/graph/vertex/param_handler.py +2 -2
- lfx/helpers/__init__.py +129 -1
- lfx/helpers/flow.py +0 -3
- lfx/inputs/input_mixin.py +2 -1
- lfx/inputs/inputs.py +5 -14
- lfx/log/logger.py +5 -1
- lfx/memory/__init__.py +10 -30
- lfx/schema/cross_module.py +80 -0
- lfx/schema/data.py +2 -1
- lfx/utils/langflow_utils.py +52 -0
- {lfx_nightly-0.1.12.dev34.dist-info → lfx_nightly-0.1.12.dev35.dist-info}/METADATA +1 -1
- {lfx_nightly-0.1.12.dev34.dist-info → lfx_nightly-0.1.12.dev35.dist-info}/RECORD +23 -21
- {lfx_nightly-0.1.12.dev34.dist-info → lfx_nightly-0.1.12.dev35.dist-info}/WHEEL +0 -0
- {lfx_nightly-0.1.12.dev34.dist-info → lfx_nightly-0.1.12.dev35.dist-info}/entry_points.txt +0 -0
lfx/base/tools/flow_tool.py
CHANGED
|
@@ -6,7 +6,7 @@ from langchain_core.tools import BaseTool, ToolException
|
|
|
6
6
|
from typing_extensions import override
|
|
7
7
|
|
|
8
8
|
from lfx.base.flow_processing.utils import build_data_from_result_data, format_flow_output_data
|
|
9
|
-
from lfx.helpers
|
|
9
|
+
from lfx.helpers import build_schema_from_inputs, get_arg_names, get_flow_inputs, run_flow
|
|
10
10
|
from lfx.log.logger import logger
|
|
11
11
|
from lfx.utils.async_helpers import run_until_complete
|
|
12
12
|
|
lfx/base/tools/run_flow.py
CHANGED
|
@@ -5,7 +5,7 @@ from lfx.custom.custom_component.component import Component, get_component_toolk
|
|
|
5
5
|
from lfx.field_typing import Tool
|
|
6
6
|
from lfx.graph.graph.base import Graph
|
|
7
7
|
from lfx.graph.vertex.base import Vertex
|
|
8
|
-
from lfx.helpers
|
|
8
|
+
from lfx.helpers import get_flow_inputs
|
|
9
9
|
from lfx.inputs.inputs import DropdownInput, InputTypes, MessageInput
|
|
10
10
|
from lfx.log.logger import logger
|
|
11
11
|
from lfx.schema.data import Data
|
lfx/cli/commands.py
CHANGED
|
@@ -85,6 +85,11 @@ def serve_command(
|
|
|
85
85
|
cat my_flow.json | lfx serve --stdin
|
|
86
86
|
echo '{"nodes": [...]}' | lfx serve --stdin
|
|
87
87
|
"""
|
|
88
|
+
# Configure logging with the specified level and import logger
|
|
89
|
+
from lfx.log.logger import configure, logger
|
|
90
|
+
|
|
91
|
+
configure(log_level=log_level)
|
|
92
|
+
|
|
88
93
|
verbose_print = create_verbose_printer(verbose=verbose)
|
|
89
94
|
|
|
90
95
|
# Validate input sources - exactly one must be provided
|
|
@@ -134,11 +139,11 @@ def serve_command(
|
|
|
134
139
|
temp_file_to_cleanup = None
|
|
135
140
|
|
|
136
141
|
if flow_json is not None:
|
|
137
|
-
|
|
142
|
+
logger.info("Processing inline JSON content...")
|
|
138
143
|
try:
|
|
139
144
|
# Validate JSON syntax
|
|
140
145
|
json_data = json.loads(flow_json)
|
|
141
|
-
|
|
146
|
+
logger.info("JSON content is valid")
|
|
142
147
|
|
|
143
148
|
# Create a temporary file with the JSON content
|
|
144
149
|
with tempfile.NamedTemporaryFile(mode="w", suffix=".json", delete=False) as temp_file:
|
|
@@ -146,7 +151,7 @@ def serve_command(
|
|
|
146
151
|
temp_file_to_cleanup = temp_file.name
|
|
147
152
|
|
|
148
153
|
script_path = temp_file_to_cleanup
|
|
149
|
-
|
|
154
|
+
logger.info(f"Created temporary file: {script_path}")
|
|
150
155
|
|
|
151
156
|
except json.JSONDecodeError as e:
|
|
152
157
|
typer.echo(f"Error: Invalid JSON content: {e}", err=True)
|
|
@@ -156,17 +161,17 @@ def serve_command(
|
|
|
156
161
|
raise typer.Exit(1) from e
|
|
157
162
|
|
|
158
163
|
elif stdin:
|
|
159
|
-
|
|
164
|
+
logger.info("Reading JSON content from stdin...")
|
|
160
165
|
try:
|
|
161
166
|
# Read all content from stdin
|
|
162
167
|
stdin_content = sys.stdin.read().strip()
|
|
163
168
|
if not stdin_content:
|
|
164
|
-
|
|
169
|
+
logger.error("No content received from stdin")
|
|
165
170
|
raise typer.Exit(1)
|
|
166
171
|
|
|
167
172
|
# Validate JSON syntax
|
|
168
173
|
json_data = json.loads(stdin_content)
|
|
169
|
-
|
|
174
|
+
logger.info("JSON content from stdin is valid")
|
|
170
175
|
|
|
171
176
|
# Create a temporary file with the JSON content
|
|
172
177
|
with tempfile.NamedTemporaryFile(mode="w", suffix=".json", delete=False) as temp_file:
|
|
@@ -174,7 +179,7 @@ def serve_command(
|
|
|
174
179
|
temp_file_to_cleanup = temp_file.name
|
|
175
180
|
|
|
176
181
|
script_path = temp_file_to_cleanup
|
|
177
|
-
|
|
182
|
+
logger.info(f"Created temporary file from stdin: {script_path}")
|
|
178
183
|
|
|
179
184
|
except json.JSONDecodeError as e:
|
|
180
185
|
verbose_print(f"Error: Invalid JSON content from stdin: {e}")
|
|
@@ -210,10 +215,10 @@ def serve_command(
|
|
|
210
215
|
raise typer.Exit(1)
|
|
211
216
|
|
|
212
217
|
# Prepare the graph
|
|
213
|
-
|
|
218
|
+
logger.info("Preparing graph for serving...")
|
|
214
219
|
try:
|
|
215
220
|
graph.prepare()
|
|
216
|
-
|
|
221
|
+
logger.info("Graph prepared successfully")
|
|
217
222
|
|
|
218
223
|
# Validate global variables for environment compatibility
|
|
219
224
|
if check_variables:
|
|
@@ -221,12 +226,12 @@ def serve_command(
|
|
|
221
226
|
|
|
222
227
|
validation_errors = validate_global_variables_for_env(graph)
|
|
223
228
|
if validation_errors:
|
|
224
|
-
|
|
229
|
+
logger.error("Global variable validation failed:")
|
|
225
230
|
for error in validation_errors:
|
|
226
|
-
|
|
231
|
+
logger.error(f" - {error}")
|
|
227
232
|
raise typer.Exit(1)
|
|
228
233
|
else:
|
|
229
|
-
|
|
234
|
+
logger.info("Global variable validation skipped")
|
|
230
235
|
except Exception as e:
|
|
231
236
|
verbose_print(f"✗ Failed to prepare graph: {e}")
|
|
232
237
|
raise typer.Exit(1) from e
|
lfx/cli/run.py
CHANGED
|
@@ -18,18 +18,29 @@ from lfx.cli.validation import validate_global_variables_for_env
|
|
|
18
18
|
from lfx.log.logger import logger
|
|
19
19
|
from lfx.schema.schema import InputValueRequest
|
|
20
20
|
|
|
21
|
+
# Verbosity level constants
|
|
22
|
+
VERBOSITY_DETAILED = 2
|
|
23
|
+
VERBOSITY_FULL = 3
|
|
21
24
|
|
|
22
|
-
|
|
25
|
+
|
|
26
|
+
def output_error(error_message: str, *, verbose: bool, exception: Exception | None = None) -> None:
|
|
23
27
|
"""Output error in JSON format to stdout when not verbose, or to stderr when verbose."""
|
|
24
28
|
if verbose:
|
|
25
29
|
typer.echo(f"{error_message}", file=sys.stderr)
|
|
30
|
+
|
|
31
|
+
error_response = {
|
|
32
|
+
"success": False,
|
|
33
|
+
"type": "error",
|
|
34
|
+
}
|
|
35
|
+
|
|
36
|
+
# Add clean exception data if available
|
|
37
|
+
if exception:
|
|
38
|
+
error_response["exception_type"] = type(exception).__name__
|
|
39
|
+
error_response["exception_message"] = str(exception)
|
|
26
40
|
else:
|
|
27
|
-
error_response =
|
|
28
|
-
|
|
29
|
-
|
|
30
|
-
"type": "error",
|
|
31
|
-
}
|
|
32
|
-
typer.echo(json.dumps(error_response))
|
|
41
|
+
error_response["exception_message"] = error_message
|
|
42
|
+
|
|
43
|
+
typer.echo(json.dumps(error_response))
|
|
33
44
|
|
|
34
45
|
|
|
35
46
|
@partial(syncify, raise_sync_error=False)
|
|
@@ -67,9 +78,20 @@ async def run(
|
|
|
67
78
|
help="Check global variables for environment compatibility",
|
|
68
79
|
),
|
|
69
80
|
verbose: bool = typer.Option(
|
|
70
|
-
|
|
71
|
-
|
|
72
|
-
|
|
81
|
+
False, # noqa: FBT003
|
|
82
|
+
"-v",
|
|
83
|
+
"--verbose",
|
|
84
|
+
help="Show basic progress information",
|
|
85
|
+
),
|
|
86
|
+
verbose_detailed: bool = typer.Option(
|
|
87
|
+
False, # noqa: FBT003
|
|
88
|
+
"-vv",
|
|
89
|
+
help="Show detailed progress and debug information",
|
|
90
|
+
),
|
|
91
|
+
verbose_full: bool = typer.Option(
|
|
92
|
+
False, # noqa: FBT003
|
|
93
|
+
"-vvv",
|
|
94
|
+
help="Show full debugging output including component logs",
|
|
73
95
|
),
|
|
74
96
|
timing: bool = typer.Option(
|
|
75
97
|
default=False,
|
|
@@ -88,6 +110,8 @@ async def run(
|
|
|
88
110
|
input_value: Input value to pass to the graph (positional argument)
|
|
89
111
|
input_value_option: Input value to pass to the graph (alternative option)
|
|
90
112
|
verbose: Show diagnostic output and execution details
|
|
113
|
+
verbose_detailed: Show detailed progress and debug information (-vv)
|
|
114
|
+
verbose_full: Show full debugging output including component logs (-vvv)
|
|
91
115
|
output_format: Format for output (json, text, message, or result)
|
|
92
116
|
flow_json: Inline JSON flow content as a string
|
|
93
117
|
stdin: Read JSON flow content from stdin
|
|
@@ -96,15 +120,22 @@ async def run(
|
|
|
96
120
|
"""
|
|
97
121
|
# Start timing if requested
|
|
98
122
|
import time
|
|
99
|
-
from datetime import datetime
|
|
100
|
-
|
|
101
|
-
def verbose_print(message: str, level: str = "INFO") -> None:
|
|
102
|
-
if verbose:
|
|
103
|
-
timestamp = datetime.now().strftime("%H:%M:%S.%f")[:-3] # Include milliseconds # noqa: DTZ005
|
|
104
|
-
typer.echo(f"[{timestamp}] {level}: {message}", file=sys.stderr)
|
|
105
123
|
|
|
106
|
-
|
|
107
|
-
|
|
124
|
+
# Configure logger based on verbosity level
|
|
125
|
+
from lfx.log.logger import configure
|
|
126
|
+
|
|
127
|
+
if verbose_full:
|
|
128
|
+
configure(log_level="DEBUG", output_file=sys.stderr) # Show everything including component debug logs
|
|
129
|
+
verbosity = 3
|
|
130
|
+
elif verbose_detailed:
|
|
131
|
+
configure(log_level="DEBUG", output_file=sys.stderr) # Show debug and above
|
|
132
|
+
verbosity = 2
|
|
133
|
+
elif verbose:
|
|
134
|
+
configure(log_level="INFO", output_file=sys.stderr) # Show info and above including our CLI info messages
|
|
135
|
+
verbosity = 1
|
|
136
|
+
else:
|
|
137
|
+
configure(log_level="CRITICAL", output_file=sys.stderr) # Only critical errors
|
|
138
|
+
verbosity = 0
|
|
108
139
|
|
|
109
140
|
start_time = time.time() if timing else None
|
|
110
141
|
|
|
@@ -127,15 +158,18 @@ async def run(
|
|
|
127
158
|
temp_file_to_cleanup = None
|
|
128
159
|
|
|
129
160
|
if flow_json is not None:
|
|
130
|
-
|
|
161
|
+
if verbosity > 0:
|
|
162
|
+
typer.echo("Processing inline JSON content...", file=sys.stderr)
|
|
131
163
|
try:
|
|
132
164
|
json_data = json.loads(flow_json)
|
|
133
|
-
|
|
165
|
+
if verbosity > 0:
|
|
166
|
+
typer.echo("JSON content is valid", file=sys.stderr)
|
|
134
167
|
with tempfile.NamedTemporaryFile(mode="w", suffix=".json", delete=False) as temp_file:
|
|
135
168
|
json.dump(json_data, temp_file, indent=2)
|
|
136
169
|
temp_file_to_cleanup = temp_file.name
|
|
137
170
|
script_path = Path(temp_file_to_cleanup)
|
|
138
|
-
|
|
171
|
+
if verbosity > 0:
|
|
172
|
+
typer.echo(f"Created temporary file: {script_path}", file=sys.stderr)
|
|
139
173
|
except json.JSONDecodeError as e:
|
|
140
174
|
output_error(f"Invalid JSON content: {e}", verbose=verbose)
|
|
141
175
|
raise typer.Exit(1) from e
|
|
@@ -143,19 +177,22 @@ async def run(
|
|
|
143
177
|
output_error(f"Error processing JSON content: {e}", verbose=verbose)
|
|
144
178
|
raise typer.Exit(1) from e
|
|
145
179
|
elif stdin:
|
|
146
|
-
|
|
180
|
+
if verbosity > 0:
|
|
181
|
+
typer.echo("Reading JSON content from stdin...", file=sys.stderr)
|
|
147
182
|
try:
|
|
148
183
|
stdin_content = sys.stdin.read().strip()
|
|
149
184
|
if not stdin_content:
|
|
150
185
|
output_error("No content received from stdin", verbose=verbose)
|
|
151
186
|
raise typer.Exit(1)
|
|
152
187
|
json_data = json.loads(stdin_content)
|
|
153
|
-
|
|
188
|
+
if verbosity > 0:
|
|
189
|
+
typer.echo("JSON content from stdin is valid", file=sys.stderr)
|
|
154
190
|
with tempfile.NamedTemporaryFile(mode="w", suffix=".json", delete=False) as temp_file:
|
|
155
191
|
json.dump(json_data, temp_file, indent=2)
|
|
156
192
|
temp_file_to_cleanup = temp_file.name
|
|
157
193
|
script_path = Path(temp_file_to_cleanup)
|
|
158
|
-
|
|
194
|
+
if verbosity > 0:
|
|
195
|
+
typer.echo(f"Created temporary file from stdin: {script_path}", file=sys.stderr)
|
|
159
196
|
except json.JSONDecodeError as e:
|
|
160
197
|
output_error(f"Invalid JSON content from stdin: {e}", verbose=verbose)
|
|
161
198
|
raise typer.Exit(1) from e
|
|
@@ -175,7 +212,8 @@ async def run(
|
|
|
175
212
|
error_msg = f"'{script_path}' must be a .py or .json file."
|
|
176
213
|
raise ValueError(error_msg)
|
|
177
214
|
file_type = "Python script" if file_extension == ".py" else "JSON flow"
|
|
178
|
-
|
|
215
|
+
if verbosity > 0:
|
|
216
|
+
typer.echo(f"Analyzing {file_type}: {script_path}", file=sys.stderr)
|
|
179
217
|
if file_extension == ".py":
|
|
180
218
|
graph_info = find_graph_variable(script_path)
|
|
181
219
|
if not graph_info:
|
|
@@ -183,47 +221,44 @@ async def run(
|
|
|
183
221
|
"No 'graph' variable found in the script. Expected to find an assignment like: graph = Graph(...)"
|
|
184
222
|
)
|
|
185
223
|
raise ValueError(error_msg)
|
|
186
|
-
|
|
187
|
-
|
|
188
|
-
|
|
189
|
-
|
|
224
|
+
if verbosity > 0:
|
|
225
|
+
typer.echo(f"Found 'graph' variable at line {graph_info['line_number']}", file=sys.stderr)
|
|
226
|
+
typer.echo(f"Type: {graph_info['type']}", file=sys.stderr)
|
|
227
|
+
typer.echo(f"Source: {graph_info['source_line']}", file=sys.stderr)
|
|
228
|
+
typer.echo("Loading and executing script...", file=sys.stderr)
|
|
190
229
|
graph = load_graph_from_script(script_path)
|
|
191
230
|
elif file_extension == ".json":
|
|
192
|
-
|
|
193
|
-
|
|
231
|
+
if verbosity > 0:
|
|
232
|
+
typer.echo("Valid JSON flow file detected", file=sys.stderr)
|
|
233
|
+
typer.echo("Loading and executing JSON flow", file=sys.stderr)
|
|
194
234
|
from lfx.load import aload_flow_from_json
|
|
195
235
|
|
|
196
236
|
graph = await aload_flow_from_json(script_path, disable_logs=not verbose)
|
|
197
237
|
except Exception as e:
|
|
198
238
|
error_type = type(e).__name__
|
|
199
|
-
|
|
200
|
-
|
|
201
|
-
if verbose:
|
|
202
|
-
# Enhanced error context for better debugging
|
|
203
|
-
debug_print(f"Exception type: {error_type}")
|
|
204
|
-
debug_print(f"Exception message: {e!s}")
|
|
239
|
+
logger.error(f"Graph loading failed with {error_type}")
|
|
205
240
|
|
|
241
|
+
if verbosity > 0:
|
|
206
242
|
# Try to identify common error patterns
|
|
207
243
|
if "ModuleNotFoundError" in str(e) or "No module named" in str(e):
|
|
208
|
-
|
|
244
|
+
logger.info("This appears to be a missing dependency issue")
|
|
209
245
|
if "langchain" in str(e).lower():
|
|
210
|
-
|
|
246
|
+
logger.info(
|
|
211
247
|
"Missing LangChain dependency detected. Try: pip install langchain-<provider>",
|
|
212
|
-
level="WARN",
|
|
213
248
|
)
|
|
214
249
|
elif "ImportError" in str(e):
|
|
215
|
-
|
|
250
|
+
logger.info("This appears to be an import issue - check component dependencies")
|
|
216
251
|
elif "AttributeError" in str(e):
|
|
217
|
-
|
|
252
|
+
logger.info("This appears to be a component configuration issue")
|
|
218
253
|
|
|
219
254
|
# Show full traceback in debug mode
|
|
220
|
-
logger.exception("Failed to load graph
|
|
255
|
+
logger.exception("Failed to load graph.")
|
|
221
256
|
|
|
222
|
-
output_error(f"Failed to load graph
|
|
257
|
+
output_error(f"Failed to load graph. {e}", verbose=verbose, exception=e)
|
|
223
258
|
if temp_file_to_cleanup:
|
|
224
259
|
try:
|
|
225
260
|
Path(temp_file_to_cleanup).unlink()
|
|
226
|
-
|
|
261
|
+
logger.info(f"Cleaned up temporary file: {temp_file_to_cleanup}")
|
|
227
262
|
except OSError:
|
|
228
263
|
pass
|
|
229
264
|
raise typer.Exit(1) from e
|
|
@@ -233,70 +268,70 @@ async def run(
|
|
|
233
268
|
# Mark end of loading phase if timing
|
|
234
269
|
load_end_time = time.time() if timing else None
|
|
235
270
|
|
|
236
|
-
|
|
271
|
+
if verbosity > 0:
|
|
272
|
+
typer.echo("Preparing graph for execution...", file=sys.stderr)
|
|
237
273
|
try:
|
|
238
274
|
# Add detailed preparation steps
|
|
239
|
-
if
|
|
240
|
-
|
|
241
|
-
|
|
275
|
+
if verbosity > 0:
|
|
276
|
+
logger.debug(f"Graph contains {len(graph.vertices)} vertices")
|
|
277
|
+
logger.debug(f"Graph contains {len(graph.edges)} edges")
|
|
242
278
|
|
|
243
279
|
# Show component types being used
|
|
244
280
|
component_types = set()
|
|
245
281
|
for vertex in graph.vertices:
|
|
246
282
|
if hasattr(vertex, "display_name"):
|
|
247
283
|
component_types.add(vertex.display_name)
|
|
248
|
-
|
|
284
|
+
logger.debug(f"Component types in graph: {', '.join(sorted(component_types))}")
|
|
249
285
|
|
|
250
286
|
graph.prepare()
|
|
251
|
-
|
|
287
|
+
logger.info("Graph preparation completed")
|
|
252
288
|
|
|
253
289
|
# Validate global variables for environment compatibility
|
|
254
290
|
if check_variables:
|
|
255
|
-
|
|
291
|
+
logger.info("Validating global variables...")
|
|
256
292
|
validation_errors = validate_global_variables_for_env(graph)
|
|
257
293
|
if validation_errors:
|
|
258
294
|
error_details = "Global variable validation failed: " + "; ".join(validation_errors)
|
|
259
|
-
|
|
295
|
+
logger.info(f"Variable validation failed: {len(validation_errors)} errors")
|
|
260
296
|
for error in validation_errors:
|
|
261
|
-
|
|
297
|
+
logger.debug(f"Validation error: {error}")
|
|
262
298
|
output_error(error_details, verbose=verbose)
|
|
263
299
|
if temp_file_to_cleanup:
|
|
264
300
|
try:
|
|
265
301
|
Path(temp_file_to_cleanup).unlink()
|
|
266
|
-
|
|
302
|
+
logger.info(f"Cleaned up temporary file: {temp_file_to_cleanup}")
|
|
267
303
|
except OSError:
|
|
268
304
|
pass
|
|
269
305
|
if validation_errors:
|
|
270
306
|
raise typer.Exit(1)
|
|
271
|
-
|
|
307
|
+
logger.info("Global variable validation passed")
|
|
272
308
|
else:
|
|
273
|
-
|
|
309
|
+
logger.info("Global variable validation skipped")
|
|
274
310
|
except Exception as e:
|
|
275
311
|
error_type = type(e).__name__
|
|
276
|
-
|
|
312
|
+
logger.info(f"Graph preparation failed with {error_type}")
|
|
277
313
|
|
|
278
|
-
if
|
|
279
|
-
|
|
314
|
+
if verbosity > 0:
|
|
315
|
+
logger.debug(f"Preparation error: {e!s}")
|
|
280
316
|
logger.exception("Failed to prepare graph - full traceback:")
|
|
281
317
|
|
|
282
|
-
output_error(f"Failed to prepare graph: {e}", verbose=verbose)
|
|
318
|
+
output_error(f"Failed to prepare graph: {e}", verbose=verbose, exception=e)
|
|
283
319
|
if temp_file_to_cleanup:
|
|
284
320
|
try:
|
|
285
321
|
Path(temp_file_to_cleanup).unlink()
|
|
286
|
-
|
|
322
|
+
logger.info(f"Cleaned up temporary file: {temp_file_to_cleanup}")
|
|
287
323
|
except OSError:
|
|
288
324
|
pass
|
|
289
325
|
raise typer.Exit(1) from e
|
|
290
326
|
|
|
291
|
-
|
|
327
|
+
logger.info("Executing graph...")
|
|
292
328
|
execution_start_time = time.time() if timing else None
|
|
293
|
-
|
|
294
329
|
if verbose:
|
|
295
|
-
|
|
330
|
+
logger.debug("Setting up execution environment")
|
|
296
331
|
if inputs:
|
|
297
|
-
|
|
332
|
+
logger.debug(f"Input provided: {inputs.input_value}")
|
|
298
333
|
else:
|
|
299
|
-
|
|
334
|
+
logger.debug("No input provided")
|
|
300
335
|
|
|
301
336
|
captured_stdout = StringIO()
|
|
302
337
|
captured_stderr = StringIO()
|
|
@@ -309,18 +344,20 @@ async def run(
|
|
|
309
344
|
|
|
310
345
|
try:
|
|
311
346
|
sys.stdout = captured_stdout
|
|
312
|
-
|
|
347
|
+
# Don't capture stderr at high verbosity levels to avoid duplication with direct logging
|
|
348
|
+
if verbosity < VERBOSITY_FULL:
|
|
349
|
+
sys.stderr = captured_stderr
|
|
313
350
|
results = []
|
|
314
351
|
|
|
315
|
-
|
|
352
|
+
logger.info("Starting graph execution...", level="DEBUG")
|
|
316
353
|
result_count = 0
|
|
317
354
|
|
|
318
355
|
async for result in graph.async_start(inputs):
|
|
319
356
|
result_count += 1
|
|
320
|
-
if
|
|
321
|
-
|
|
357
|
+
if verbosity > 0:
|
|
358
|
+
logger.debug(f"Processing result #{result_count}")
|
|
322
359
|
if hasattr(result, "vertex") and hasattr(result.vertex, "display_name"):
|
|
323
|
-
|
|
360
|
+
logger.debug(f"Component: {result.vertex.display_name}")
|
|
324
361
|
if timing:
|
|
325
362
|
step_end_time = time.time()
|
|
326
363
|
step_duration = step_end_time - execution_step_start
|
|
@@ -342,46 +379,70 @@ async def run(
|
|
|
342
379
|
|
|
343
380
|
results.append(result)
|
|
344
381
|
|
|
345
|
-
|
|
382
|
+
logger.info(f"Graph execution completed. Processed {result_count} results")
|
|
346
383
|
|
|
347
384
|
except Exception as e:
|
|
348
|
-
sys.stdout = original_stdout
|
|
349
|
-
sys.stderr = original_stderr
|
|
350
|
-
|
|
351
385
|
error_type = type(e).__name__
|
|
352
|
-
|
|
386
|
+
logger.info(f"Graph execution failed with {error_type}")
|
|
353
387
|
|
|
354
|
-
if
|
|
355
|
-
|
|
356
|
-
debug_print(f"Failed after processing {result_count} results")
|
|
388
|
+
if verbosity >= VERBOSITY_DETAILED: # Only show details at -vv and above
|
|
389
|
+
logger.debug(f"Failed after processing {result_count} results")
|
|
357
390
|
|
|
391
|
+
# Only show component output at maximum verbosity (-vvv)
|
|
392
|
+
if verbosity >= VERBOSITY_FULL:
|
|
358
393
|
# Capture any output that was generated before the error
|
|
359
|
-
|
|
394
|
+
# Only show captured stdout since stderr logging is already shown directly in verbose mode
|
|
395
|
+
captured_content = captured_stdout.getvalue()
|
|
360
396
|
if captured_content.strip():
|
|
361
|
-
|
|
362
|
-
|
|
363
|
-
|
|
397
|
+
# Check if captured content contains the same error that will be displayed at the end
|
|
398
|
+
error_text = str(e)
|
|
399
|
+
captured_lines = captured_content.strip().split("\n")
|
|
400
|
+
|
|
401
|
+
# Filter out lines that are duplicates of the final error message
|
|
402
|
+
unique_lines = [
|
|
403
|
+
line
|
|
404
|
+
for line in captured_lines
|
|
405
|
+
if not any(
|
|
406
|
+
error_part.strip() in line for error_part in error_text.split("\n") if error_part.strip()
|
|
407
|
+
)
|
|
408
|
+
]
|
|
409
|
+
|
|
410
|
+
if unique_lines:
|
|
411
|
+
logger.info("Component output before error:", level="DEBUG")
|
|
412
|
+
for line in unique_lines:
|
|
413
|
+
# Log each line directly using the logger to avoid nested formatting
|
|
414
|
+
if verbosity > 0:
|
|
415
|
+
# Remove any existing timestamp prefix to avoid duplication
|
|
416
|
+
clean_line = line
|
|
417
|
+
if "] " in line and line.startswith("2025-"):
|
|
418
|
+
# Extract just the log message after the timestamp and level
|
|
419
|
+
parts = line.split("] ", 1)
|
|
420
|
+
if len(parts) > 1:
|
|
421
|
+
clean_line = parts[1]
|
|
422
|
+
logger.debug(clean_line)
|
|
364
423
|
|
|
365
424
|
# Provide context about common execution errors
|
|
366
425
|
if "list can't be used in 'await' expression" in str(e):
|
|
367
|
-
|
|
368
|
-
|
|
426
|
+
logger.info("This appears to be an async/await mismatch in a component")
|
|
427
|
+
logger.info("Check that async methods are properly awaited")
|
|
369
428
|
elif "AttributeError" in error_type and "NoneType" in str(e):
|
|
370
|
-
|
|
371
|
-
|
|
429
|
+
logger.info("This appears to be a null reference error")
|
|
430
|
+
logger.info("A component may be receiving unexpected None values")
|
|
372
431
|
elif "ConnectionError" in str(e) or "TimeoutError" in str(e):
|
|
373
|
-
|
|
374
|
-
|
|
432
|
+
logger.info("This appears to be a network connectivity issue")
|
|
433
|
+
logger.info("Check API keys and network connectivity")
|
|
375
434
|
|
|
376
435
|
logger.exception("Failed to execute graph - full traceback:")
|
|
377
436
|
|
|
378
|
-
output_error(f"Failed to execute graph: {e}", verbose=verbose)
|
|
379
437
|
if temp_file_to_cleanup:
|
|
380
438
|
try:
|
|
381
439
|
Path(temp_file_to_cleanup).unlink()
|
|
382
|
-
|
|
440
|
+
logger.info(f"Cleaned up temporary file: {temp_file_to_cleanup}")
|
|
383
441
|
except OSError:
|
|
384
442
|
pass
|
|
443
|
+
sys.stdout = original_stdout
|
|
444
|
+
sys.stderr = original_stderr
|
|
445
|
+
output_error(f"Failed to execute graph: {e}", verbose=verbosity > 0, exception=e)
|
|
385
446
|
raise typer.Exit(1) from e
|
|
386
447
|
finally:
|
|
387
448
|
sys.stdout = original_stdout
|
|
@@ -389,7 +450,7 @@ async def run(
|
|
|
389
450
|
if temp_file_to_cleanup:
|
|
390
451
|
try:
|
|
391
452
|
Path(temp_file_to_cleanup).unlink()
|
|
392
|
-
|
|
453
|
+
logger.info(f"Cleaned up temporary file: {temp_file_to_cleanup}")
|
|
393
454
|
except OSError:
|
|
394
455
|
pass
|
|
395
456
|
|
|
@@ -424,7 +485,7 @@ async def run(
|
|
|
424
485
|
result_data["logs"] = captured_logs
|
|
425
486
|
if timing_metadata:
|
|
426
487
|
result_data["timing"] = timing_metadata
|
|
427
|
-
indent = 2 if
|
|
488
|
+
indent = 2 if verbosity > 0 else None
|
|
428
489
|
typer.echo(json.dumps(result_data, indent=indent))
|
|
429
490
|
elif output_format in {"text", "message"}:
|
|
430
491
|
result_data = extract_structured_result(results)
|
|
@@ -437,5 +498,5 @@ async def run(
|
|
|
437
498
|
result_data["logs"] = captured_logs
|
|
438
499
|
if timing_metadata:
|
|
439
500
|
result_data["timing"] = timing_metadata
|
|
440
|
-
indent = 2 if
|
|
501
|
+
indent = 2 if verbosity > 0 else None
|
|
441
502
|
typer.echo(json.dumps(result_data, indent=indent))
|
|
@@ -4,7 +4,7 @@ from lfx.base.flow_processing.utils import build_data_from_result_data
|
|
|
4
4
|
from lfx.custom.custom_component.custom_component import CustomComponent
|
|
5
5
|
from lfx.graph.graph.base import Graph
|
|
6
6
|
from lfx.graph.vertex.base import Vertex
|
|
7
|
-
from lfx.helpers
|
|
7
|
+
from lfx.helpers import get_flow_inputs
|
|
8
8
|
from lfx.log.logger import logger
|
|
9
9
|
from lfx.schema.data import Data
|
|
10
10
|
from lfx.schema.dotdict import dotdict
|
|
@@ -6,7 +6,7 @@ from lfx.base.langchain_utilities.model import LCToolComponent
|
|
|
6
6
|
from lfx.base.tools.flow_tool import FlowTool
|
|
7
7
|
from lfx.field_typing import Tool
|
|
8
8
|
from lfx.graph.graph.base import Graph
|
|
9
|
-
from lfx.helpers
|
|
9
|
+
from lfx.helpers import get_flow_inputs
|
|
10
10
|
from lfx.io import BoolInput, DropdownInput, Output, StrInput
|
|
11
11
|
from lfx.log.logger import logger
|
|
12
12
|
from lfx.schema.data import Data
|
lfx/components/logic/run_flow.py
CHANGED
lfx/components/logic/sub_flow.py
CHANGED
|
@@ -4,7 +4,7 @@ from lfx.base.flow_processing.utils import build_data_from_result_data
|
|
|
4
4
|
from lfx.custom.custom_component.component import Component
|
|
5
5
|
from lfx.graph.graph.base import Graph
|
|
6
6
|
from lfx.graph.vertex.base import Vertex
|
|
7
|
-
from lfx.helpers
|
|
7
|
+
from lfx.helpers import get_flow_inputs
|
|
8
8
|
from lfx.io import DropdownInput, Output
|
|
9
9
|
from lfx.log.logger import logger
|
|
10
10
|
from lfx.schema.data import Data
|
|
@@ -12,7 +12,7 @@ from pydantic import BaseModel
|
|
|
12
12
|
|
|
13
13
|
from lfx.custom import validate
|
|
14
14
|
from lfx.custom.custom_component.base_component import BaseComponent
|
|
15
|
-
from lfx.helpers
|
|
15
|
+
from lfx.helpers import list_flows, load_flow, run_flow
|
|
16
16
|
from lfx.log.logger import logger
|
|
17
17
|
from lfx.schema.data import Data
|
|
18
18
|
from lfx.services.deps import get_storage_service, get_variable_service, session_scope
|
|
@@ -161,7 +161,7 @@ class ParameterHandler:
|
|
|
161
161
|
elif field.get("required"):
|
|
162
162
|
field_display_name = field.get("display_name")
|
|
163
163
|
logger.warning(
|
|
164
|
-
"File path not found for
|
|
164
|
+
"File path not found for %s in component %s. Setting to None.",
|
|
165
165
|
field_display_name,
|
|
166
166
|
self.vertex.display_name,
|
|
167
167
|
)
|
|
@@ -255,7 +255,7 @@ class ParameterHandler:
|
|
|
255
255
|
else:
|
|
256
256
|
params[field_name] = ast.literal_eval(val) if val else None
|
|
257
257
|
except Exception: # noqa: BLE001
|
|
258
|
-
logger.debug("Error evaluating code for
|
|
258
|
+
logger.debug("Error evaluating code for %s", field_name)
|
|
259
259
|
params[field_name] = val
|
|
260
260
|
return params
|
|
261
261
|
|
lfx/helpers/__init__.py
CHANGED
|
@@ -1 +1,129 @@
|
|
|
1
|
-
"""Helpers module for lfx package.
|
|
1
|
+
"""Helpers module for the lfx package.
|
|
2
|
+
|
|
3
|
+
This module automatically chooses between the full langflow implementation
|
|
4
|
+
(when available) and the lfx implementation (when standalone).
|
|
5
|
+
"""
|
|
6
|
+
|
|
7
|
+
from lfx.utils.langflow_utils import has_langflow_memory
|
|
8
|
+
|
|
9
|
+
# Import the appropriate implementation
|
|
10
|
+
if has_langflow_memory():
|
|
11
|
+
try:
|
|
12
|
+
# Import full langflow implementation
|
|
13
|
+
# Base Model
|
|
14
|
+
from langflow.helpers.base_model import (
|
|
15
|
+
BaseModel,
|
|
16
|
+
SchemaField,
|
|
17
|
+
build_model_from_schema,
|
|
18
|
+
coalesce_bool,
|
|
19
|
+
)
|
|
20
|
+
|
|
21
|
+
# Custom
|
|
22
|
+
from langflow.helpers.custom import (
|
|
23
|
+
format_type,
|
|
24
|
+
)
|
|
25
|
+
|
|
26
|
+
# Data
|
|
27
|
+
from langflow.helpers.data import (
|
|
28
|
+
clean_string,
|
|
29
|
+
data_to_text,
|
|
30
|
+
data_to_text_list,
|
|
31
|
+
docs_to_data,
|
|
32
|
+
safe_convert,
|
|
33
|
+
)
|
|
34
|
+
|
|
35
|
+
# Flow
|
|
36
|
+
from langflow.helpers.flow import (
|
|
37
|
+
build_schema_from_inputs,
|
|
38
|
+
get_arg_names,
|
|
39
|
+
get_flow_inputs,
|
|
40
|
+
list_flows,
|
|
41
|
+
load_flow,
|
|
42
|
+
run_flow,
|
|
43
|
+
)
|
|
44
|
+
except ImportError:
|
|
45
|
+
# Fallback to lfx implementation if langflow import fails
|
|
46
|
+
# Base Model
|
|
47
|
+
from lfx.helpers.base_model import (
|
|
48
|
+
BaseModel,
|
|
49
|
+
SchemaField,
|
|
50
|
+
build_model_from_schema,
|
|
51
|
+
coalesce_bool,
|
|
52
|
+
)
|
|
53
|
+
|
|
54
|
+
# Custom
|
|
55
|
+
from lfx.helpers.custom import (
|
|
56
|
+
format_type,
|
|
57
|
+
)
|
|
58
|
+
|
|
59
|
+
# Data
|
|
60
|
+
from lfx.helpers.data import (
|
|
61
|
+
clean_string,
|
|
62
|
+
data_to_text,
|
|
63
|
+
data_to_text_list,
|
|
64
|
+
docs_to_data,
|
|
65
|
+
safe_convert,
|
|
66
|
+
)
|
|
67
|
+
|
|
68
|
+
# Flow
|
|
69
|
+
from lfx.helpers.flow import (
|
|
70
|
+
build_schema_from_inputs,
|
|
71
|
+
get_arg_names,
|
|
72
|
+
get_flow_inputs,
|
|
73
|
+
list_flows,
|
|
74
|
+
load_flow,
|
|
75
|
+
run_flow,
|
|
76
|
+
)
|
|
77
|
+
else:
|
|
78
|
+
# Use lfx implementation
|
|
79
|
+
# Base Model
|
|
80
|
+
from lfx.helpers.base_model import (
|
|
81
|
+
BaseModel,
|
|
82
|
+
SchemaField,
|
|
83
|
+
build_model_from_schema,
|
|
84
|
+
coalesce_bool,
|
|
85
|
+
)
|
|
86
|
+
|
|
87
|
+
# Custom
|
|
88
|
+
from lfx.helpers.custom import (
|
|
89
|
+
format_type,
|
|
90
|
+
)
|
|
91
|
+
|
|
92
|
+
# Data
|
|
93
|
+
from lfx.helpers.data import (
|
|
94
|
+
clean_string,
|
|
95
|
+
data_to_text,
|
|
96
|
+
data_to_text_list,
|
|
97
|
+
docs_to_data,
|
|
98
|
+
safe_convert,
|
|
99
|
+
)
|
|
100
|
+
|
|
101
|
+
# Flow
|
|
102
|
+
from lfx.helpers.flow import (
|
|
103
|
+
build_schema_from_inputs,
|
|
104
|
+
get_arg_names,
|
|
105
|
+
get_flow_inputs,
|
|
106
|
+
list_flows,
|
|
107
|
+
load_flow,
|
|
108
|
+
run_flow,
|
|
109
|
+
)
|
|
110
|
+
|
|
111
|
+
# Export the available functions
|
|
112
|
+
__all__ = [
|
|
113
|
+
"BaseModel",
|
|
114
|
+
"SchemaField",
|
|
115
|
+
"build_model_from_schema",
|
|
116
|
+
"build_schema_from_inputs",
|
|
117
|
+
"clean_string",
|
|
118
|
+
"coalesce_bool",
|
|
119
|
+
"data_to_text",
|
|
120
|
+
"data_to_text_list",
|
|
121
|
+
"docs_to_data",
|
|
122
|
+
"format_type",
|
|
123
|
+
"get_arg_names",
|
|
124
|
+
"get_flow_inputs",
|
|
125
|
+
"list_flows",
|
|
126
|
+
"load_flow",
|
|
127
|
+
"run_flow",
|
|
128
|
+
"safe_convert",
|
|
129
|
+
]
|
lfx/helpers/flow.py
CHANGED
lfx/inputs/input_mixin.py
CHANGED
|
@@ -12,6 +12,7 @@ from pydantic import (
|
|
|
12
12
|
|
|
13
13
|
from lfx.field_typing.range_spec import RangeSpec
|
|
14
14
|
from lfx.inputs.validators import CoalesceBool
|
|
15
|
+
from lfx.schema.cross_module import CrossModuleModel
|
|
15
16
|
|
|
16
17
|
|
|
17
18
|
class FieldTypes(str, Enum):
|
|
@@ -42,7 +43,7 @@ SerializableFieldTypes = Annotated[FieldTypes, PlainSerializer(lambda v: v.value
|
|
|
42
43
|
|
|
43
44
|
|
|
44
45
|
# Base mixin for common input field attributes and methods
|
|
45
|
-
class BaseInputMixin(
|
|
46
|
+
class BaseInputMixin(CrossModuleModel, validate_assignment=True): # type: ignore[call-arg]
|
|
46
47
|
model_config = ConfigDict(
|
|
47
48
|
arbitrary_types_allowed=True,
|
|
48
49
|
extra="forbid",
|
lfx/inputs/inputs.py
CHANGED
|
@@ -190,24 +190,15 @@ class MessageInput(StrInput, InputTraceMixin):
|
|
|
190
190
|
# If v is a instance of Message, then its fine
|
|
191
191
|
if isinstance(v, dict):
|
|
192
192
|
return Message(**v)
|
|
193
|
+
# Duck-typed Message check - works across module boundaries
|
|
193
194
|
if isinstance(v, Message):
|
|
195
|
+
# If it's from a different module (e.g., langflow.schema.Message),
|
|
196
|
+
# convert it to ensure we have the right type
|
|
197
|
+
if type(v).__module__ != Message.__module__:
|
|
198
|
+
return Message(**v.model_dump())
|
|
194
199
|
return v
|
|
195
|
-
# Check for Message-like objects by examining their fields
|
|
196
|
-
# This handles both langflow and lfx Message instances
|
|
197
|
-
if hasattr(v, "text") and hasattr(v, "model_dump") and callable(v.model_dump):
|
|
198
|
-
# Check if it has other Message-specific attributes
|
|
199
|
-
message_fields = {"text", "data", "sender", "session_id", "properties"}
|
|
200
|
-
obj_attrs = set(dir(v))
|
|
201
|
-
min_message_fields = 3
|
|
202
|
-
if len(message_fields.intersection(obj_attrs)) >= min_message_fields:
|
|
203
|
-
try:
|
|
204
|
-
return Message(**v.model_dump())
|
|
205
|
-
except (TypeError, ValueError):
|
|
206
|
-
# Fallback to text only if model_dump fails
|
|
207
|
-
return Message(text=v.text)
|
|
208
200
|
if isinstance(v, str | AsyncIterator | Iterator):
|
|
209
201
|
return Message(text=v)
|
|
210
|
-
# For simplified implementation, we'll skip MessageBase handling
|
|
211
202
|
msg = f"Invalid value type {type(v)}"
|
|
212
203
|
raise ValueError(msg)
|
|
213
204
|
|
lfx/log/logger.py
CHANGED
|
@@ -209,6 +209,7 @@ def configure(
|
|
|
209
209
|
log_format: str | None = None,
|
|
210
210
|
log_rotation: str | None = None,
|
|
211
211
|
cache: bool | None = None,
|
|
212
|
+
output_file=None,
|
|
212
213
|
) -> None:
|
|
213
214
|
"""Configure the logger."""
|
|
214
215
|
# Early-exit only if structlog is configured AND current min level matches the requested one.
|
|
@@ -297,11 +298,14 @@ def configure(
|
|
|
297
298
|
wrapper_class.min_level = numeric_level
|
|
298
299
|
|
|
299
300
|
# Configure structlog
|
|
301
|
+
# Default to stdout for backward compatibility, unless output_file is specified
|
|
302
|
+
log_output_file = output_file if output_file is not None else sys.stdout
|
|
303
|
+
|
|
300
304
|
structlog.configure(
|
|
301
305
|
processors=processors,
|
|
302
306
|
wrapper_class=wrapper_class,
|
|
303
307
|
context_class=dict,
|
|
304
|
-
logger_factory=structlog.PrintLoggerFactory(file=
|
|
308
|
+
logger_factory=structlog.PrintLoggerFactory(file=log_output_file)
|
|
305
309
|
if not log_file
|
|
306
310
|
else structlog.stdlib.LoggerFactory(),
|
|
307
311
|
cache_logger_on_first_use=cache if cache is not None else True,
|
lfx/memory/__init__.py
CHANGED
|
@@ -1,35 +1,15 @@
|
|
|
1
1
|
"""Memory management for lfx with dynamic loading.
|
|
2
2
|
|
|
3
|
-
This module automatically chooses between full langflow
|
|
4
|
-
(when available) and lfx
|
|
3
|
+
This module automatically chooses between the full langflow implementation
|
|
4
|
+
(when available) and the lfx implementation (when standalone).
|
|
5
5
|
"""
|
|
6
6
|
|
|
7
|
-
import
|
|
7
|
+
from lfx.utils.langflow_utils import has_langflow_memory
|
|
8
8
|
|
|
9
|
-
|
|
10
|
-
|
|
11
|
-
|
|
12
|
-
def _has_langflow_memory():
|
|
13
|
-
"""Check if langflow.memory with database support is available."""
|
|
14
|
-
try:
|
|
15
|
-
# Check if langflow.memory and MessageTable are available
|
|
16
|
-
return importlib.util.find_spec("langflow") is not None
|
|
17
|
-
except (ImportError, ModuleNotFoundError):
|
|
18
|
-
pass
|
|
19
|
-
except Exception as e: # noqa: BLE001
|
|
20
|
-
logger.error(f"Error checking for langflow.memory: {e}")
|
|
21
|
-
return False
|
|
22
|
-
|
|
23
|
-
|
|
24
|
-
#### TODO: This _LANGFLOW_AVAILABLE implementation should be changed later ####
|
|
25
|
-
# Consider refactoring to lazy loading or a more robust service discovery mechanism
|
|
26
|
-
# that can handle runtime availability changes.
|
|
27
|
-
_LANGFLOW_AVAILABLE = _has_langflow_memory()
|
|
28
|
-
|
|
29
|
-
# Import the appropriate implementations
|
|
30
|
-
if _LANGFLOW_AVAILABLE:
|
|
9
|
+
# Import the appropriate implementation
|
|
10
|
+
if has_langflow_memory():
|
|
31
11
|
try:
|
|
32
|
-
# Import
|
|
12
|
+
# Import full langflow implementation
|
|
33
13
|
from langflow.memory import (
|
|
34
14
|
aadd_messages,
|
|
35
15
|
aadd_messagetables,
|
|
@@ -43,8 +23,8 @@ if _LANGFLOW_AVAILABLE:
|
|
|
43
23
|
get_messages,
|
|
44
24
|
store_message,
|
|
45
25
|
)
|
|
46
|
-
except
|
|
47
|
-
#
|
|
26
|
+
except ImportError:
|
|
27
|
+
# Fallback to lfx implementation if langflow import fails
|
|
48
28
|
from lfx.memory.stubs import (
|
|
49
29
|
aadd_messages,
|
|
50
30
|
aadd_messagetables,
|
|
@@ -59,7 +39,7 @@ if _LANGFLOW_AVAILABLE:
|
|
|
59
39
|
store_message,
|
|
60
40
|
)
|
|
61
41
|
else:
|
|
62
|
-
# Use lfx
|
|
42
|
+
# Use lfx implementation
|
|
63
43
|
from lfx.memory.stubs import (
|
|
64
44
|
aadd_messages,
|
|
65
45
|
aadd_messagetables,
|
|
@@ -74,7 +54,7 @@ else:
|
|
|
74
54
|
store_message,
|
|
75
55
|
)
|
|
76
56
|
|
|
77
|
-
# Export the available functions
|
|
57
|
+
# Export the available functions
|
|
78
58
|
__all__ = [
|
|
79
59
|
"aadd_messages",
|
|
80
60
|
"aadd_messagetables",
|
|
@@ -0,0 +1,80 @@
|
|
|
1
|
+
"""Cross-module BaseModel for handling re-exported classes.
|
|
2
|
+
|
|
3
|
+
This module provides a metaclass and base model that enable isinstance checks
|
|
4
|
+
to work across module boundaries for Pydantic models. This is particularly useful
|
|
5
|
+
when the same class is re-exported from different modules (e.g., lfx.Message vs
|
|
6
|
+
langflow.schema.Message) but Python's isinstance() checks fail due to different
|
|
7
|
+
module paths.
|
|
8
|
+
"""
|
|
9
|
+
|
|
10
|
+
from __future__ import annotations
|
|
11
|
+
|
|
12
|
+
from typing import Any
|
|
13
|
+
|
|
14
|
+
from pydantic import BaseModel
|
|
15
|
+
|
|
16
|
+
|
|
17
|
+
class CrossModuleMeta(type(BaseModel)): # type: ignore[misc]
|
|
18
|
+
"""Metaclass that enables cross-module isinstance checks for Pydantic models.
|
|
19
|
+
|
|
20
|
+
This metaclass overrides __instancecheck__ to perform structural type checking
|
|
21
|
+
based on the model's fields rather than strict class identity. This allows
|
|
22
|
+
instances of the same model from different module paths to be recognized as
|
|
23
|
+
compatible.
|
|
24
|
+
"""
|
|
25
|
+
|
|
26
|
+
def __instancecheck__(cls, instance: Any) -> bool:
|
|
27
|
+
"""Check if instance is compatible with this class across module boundaries.
|
|
28
|
+
|
|
29
|
+
First performs a standard isinstance check. If that fails, falls back to
|
|
30
|
+
checking if the instance has all required Pydantic model attributes and
|
|
31
|
+
a compatible set of model fields.
|
|
32
|
+
|
|
33
|
+
Args:
|
|
34
|
+
instance: The object to check.
|
|
35
|
+
|
|
36
|
+
Returns:
|
|
37
|
+
bool: True if instance is compatible with this class.
|
|
38
|
+
"""
|
|
39
|
+
# First try standard isinstance check
|
|
40
|
+
if type.__instancecheck__(cls, instance):
|
|
41
|
+
return True
|
|
42
|
+
|
|
43
|
+
# If that fails, check for cross-module compatibility
|
|
44
|
+
# An object is cross-module compatible if it:
|
|
45
|
+
# 1. Has model_fields attribute (is a Pydantic model)
|
|
46
|
+
# 2. Has the same __class__.__name__
|
|
47
|
+
# 3. Has compatible model fields
|
|
48
|
+
if not hasattr(instance, "model_fields"):
|
|
49
|
+
return False
|
|
50
|
+
|
|
51
|
+
# Check if class names match
|
|
52
|
+
if instance.__class__.__name__ != cls.__name__:
|
|
53
|
+
return False
|
|
54
|
+
|
|
55
|
+
# Check if the instance has all required fields from cls
|
|
56
|
+
cls_fields = set(cls.model_fields.keys()) if hasattr(cls, "model_fields") else set()
|
|
57
|
+
instance_fields = set(instance.model_fields.keys())
|
|
58
|
+
|
|
59
|
+
# The instance must have at least the same fields as the class
|
|
60
|
+
# (it can have more, but not fewer required fields)
|
|
61
|
+
return cls_fields.issubset(instance_fields)
|
|
62
|
+
|
|
63
|
+
|
|
64
|
+
class CrossModuleModel(BaseModel, metaclass=CrossModuleMeta):
|
|
65
|
+
"""Base Pydantic model with cross-module isinstance support.
|
|
66
|
+
|
|
67
|
+
This class should be used as the base for models that may be re-exported
|
|
68
|
+
from different modules. It enables isinstance() checks to work across
|
|
69
|
+
module boundaries by using structural type checking.
|
|
70
|
+
|
|
71
|
+
Example:
|
|
72
|
+
>>> class Message(CrossModuleModel):
|
|
73
|
+
... text: str
|
|
74
|
+
...
|
|
75
|
+
>>> # Even if Message is imported from different paths:
|
|
76
|
+
>>> from lfx.schema.message import Message as LfxMessage
|
|
77
|
+
>>> from langflow.schema import Message as LangflowMessage
|
|
78
|
+
>>> msg = LfxMessage(text="hello")
|
|
79
|
+
>>> isinstance(msg, LangflowMessage) # True (with cross-module support)
|
|
80
|
+
"""
|
lfx/schema/data.py
CHANGED
|
@@ -14,6 +14,7 @@ from langchain_core.messages import AIMessage, BaseMessage, HumanMessage
|
|
|
14
14
|
from pydantic import BaseModel, ConfigDict, model_serializer, model_validator
|
|
15
15
|
|
|
16
16
|
from lfx.log.logger import logger
|
|
17
|
+
from lfx.schema.cross_module import CrossModuleModel
|
|
17
18
|
from lfx.utils.constants import MESSAGE_SENDER_AI, MESSAGE_SENDER_USER
|
|
18
19
|
from lfx.utils.image import create_image_content_dict
|
|
19
20
|
|
|
@@ -22,7 +23,7 @@ if TYPE_CHECKING:
|
|
|
22
23
|
from lfx.schema.message import Message
|
|
23
24
|
|
|
24
25
|
|
|
25
|
-
class Data(
|
|
26
|
+
class Data(CrossModuleModel):
|
|
26
27
|
"""Represents a record with text and optional data.
|
|
27
28
|
|
|
28
29
|
Attributes:
|
|
@@ -0,0 +1,52 @@
|
|
|
1
|
+
"""Langflow environment utility functions."""
|
|
2
|
+
|
|
3
|
+
import importlib.util
|
|
4
|
+
|
|
5
|
+
from lfx.log.logger import logger
|
|
6
|
+
|
|
7
|
+
|
|
8
|
+
class _LangflowModule:
|
|
9
|
+
# Static variable
|
|
10
|
+
# Tri-state:
|
|
11
|
+
# - None: Langflow check not performed yet
|
|
12
|
+
# - True: Langflow is available
|
|
13
|
+
# - False: Langflow is not available
|
|
14
|
+
_available = None
|
|
15
|
+
|
|
16
|
+
@classmethod
|
|
17
|
+
def is_available(cls):
|
|
18
|
+
return cls._available
|
|
19
|
+
|
|
20
|
+
@classmethod
|
|
21
|
+
def set_available(cls, value):
|
|
22
|
+
cls._available = value
|
|
23
|
+
|
|
24
|
+
|
|
25
|
+
def has_langflow_memory():
|
|
26
|
+
"""Check if langflow.memory (with database support) and MessageTable are available."""
|
|
27
|
+
# TODO: REVISIT: Optimize this implementation later
|
|
28
|
+
# - Consider refactoring to use lazy loading or a more robust service discovery mechanism
|
|
29
|
+
# that can handle runtime availability changes.
|
|
30
|
+
|
|
31
|
+
# Use cached check from previous invocation (if applicable)
|
|
32
|
+
|
|
33
|
+
is_langflow_available = _LangflowModule.is_available()
|
|
34
|
+
|
|
35
|
+
if is_langflow_available is not None:
|
|
36
|
+
return is_langflow_available
|
|
37
|
+
|
|
38
|
+
# First check (lazy load and cache check)
|
|
39
|
+
|
|
40
|
+
module_spec = None
|
|
41
|
+
|
|
42
|
+
try:
|
|
43
|
+
module_spec = importlib.util.find_spec("langflow")
|
|
44
|
+
except ImportError:
|
|
45
|
+
pass
|
|
46
|
+
except (TypeError, ValueError) as e:
|
|
47
|
+
logger.error(f"Error encountered checking for langflow.memory: {e}")
|
|
48
|
+
|
|
49
|
+
is_langflow_available = module_spec is not None
|
|
50
|
+
_LangflowModule.set_available(is_langflow_available)
|
|
51
|
+
|
|
52
|
+
return is_langflow_available
|
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
Metadata-Version: 2.4
|
|
2
2
|
Name: lfx-nightly
|
|
3
|
-
Version: 0.1.12.
|
|
3
|
+
Version: 0.1.12.dev35
|
|
4
4
|
Summary: Langflow Executor - A lightweight CLI tool for executing and serving Langflow AI flows
|
|
5
5
|
Author-email: Gabriel Luiz Freitas Almeida <gabriel@langflow.org>
|
|
6
6
|
Requires-Python: <3.14,>=3.10
|
|
@@ -81,16 +81,16 @@ lfx/base/tools/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
|
81
81
|
lfx/base/tools/base.py,sha256=CMYJzYMoJoAeN9XVDRIKLfhHZO_WMM0wFsRHQQ2ommc,940
|
|
82
82
|
lfx/base/tools/component_tool.py,sha256=WXc2is91CzcXWzzs5oAPaa0Rb_MpOhuzZTDDmfyoCwY,13490
|
|
83
83
|
lfx/base/tools/constants.py,sha256=AgulV7M3axHeTKQOmls-9Z1C7pTfh6Er1qahtFS2am4,1535
|
|
84
|
-
lfx/base/tools/flow_tool.py,sha256=
|
|
85
|
-
lfx/base/tools/run_flow.py,sha256=
|
|
84
|
+
lfx/base/tools/flow_tool.py,sha256=_NbedxYUPApBPza4uoRDeLSggLcjVLd_Z1Z9_Z7TDi4,4847
|
|
85
|
+
lfx/base/tools/run_flow.py,sha256=2UD7MZMTla-cIdr5nGr6lp8pj6_iZgZtwvXcnLhXqas,9204
|
|
86
86
|
lfx/base/vectorstores/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
87
87
|
lfx/base/vectorstores/model.py,sha256=pDAZ6D6XnMxGAV9hJjc3DYhjI9n77sc_FIs5lnpsDbU,6932
|
|
88
88
|
lfx/base/vectorstores/utils.py,sha256=OhBNYs9Z9poe82rTNFPdrESNRGuP6RO6-eOpwqJLBG0,750
|
|
89
89
|
lfx/base/vectorstores/vector_store_connection_decorator.py,sha256=2gh3DMhcMsCgVYFEFaVNMT3zsbd-fkFy5Bl_-jXDu8c,1998
|
|
90
90
|
lfx/cli/__init__.py,sha256=Oy17zrnwBlwJn80sMGyRJXos2n2eQGvSsh9CS_-v2R4,113
|
|
91
|
-
lfx/cli/commands.py,sha256=
|
|
91
|
+
lfx/cli/commands.py,sha256=IR4leAtfQUjsc9wI6q1LRx--u4sW-i5ixtKdMYju-hI,11912
|
|
92
92
|
lfx/cli/common.py,sha256=SvszBhWoOttM27rButhBOpvE8lO5UGMYL0NaG_OW8bc,22060
|
|
93
|
-
lfx/cli/run.py,sha256=
|
|
93
|
+
lfx/cli/run.py,sha256=_7JX6YpoO3XLCfD2BtZl3FsghMyo18B9SriLyO0EaoQ,21411
|
|
94
94
|
lfx/cli/script_loader.py,sha256=xWSpx57cBeX0UHmUgAk97aye9-hhD2Y6nKh68A-xaTA,8997
|
|
95
95
|
lfx/cli/serve_app.py,sha256=3U0QsoCkf-1DxSpxfNOr8ap7Giaxm_MfuLrii5GpIHM,22485
|
|
96
96
|
lfx/cli/validation.py,sha256=xZfL-rKt_Y-Lek19GMZffyxhPIyYMQHBIpR0Hwa_Ji8,2615
|
|
@@ -253,7 +253,7 @@ lfx/components/deactivated/selective_passthrough.py,sha256=YQN7iyEvm-T_YIAYD1Bmz
|
|
|
253
253
|
lfx/components/deactivated/should_run_next.py,sha256=xcSscsWs7Tte-nbZEpOxLrnu9CaUy98TBY70VPY0_WE,1613
|
|
254
254
|
lfx/components/deactivated/split_text.py,sha256=i_DFal5-j10k_56A7pA7F86oQ2lTMFt2W6aKsmjD_Ks,2005
|
|
255
255
|
lfx/components/deactivated/store_message.py,sha256=k_F6FiyVU-HWHulbKPgjrdh_Dg9g_H3E_H_RvrGSmsA,701
|
|
256
|
-
lfx/components/deactivated/sub_flow.py,sha256=
|
|
256
|
+
lfx/components/deactivated/sub_flow.py,sha256=xag1QeRaL0Ph3eqzWbgJtmK8q22kVyMdRyIq--vyWFw,4790
|
|
257
257
|
lfx/components/deactivated/vectara_self_query.py,sha256=nlRFL-FIdecgpaR70ohC8TslIN44oNVUpOx1nScGq9k,2952
|
|
258
258
|
lfx/components/deactivated/vector_store.py,sha256=7L1Z8Nl0RZjEGAj1O2tMhb6I6jUNNc5MBOui4a2FkXE,728
|
|
259
259
|
lfx/components/deepseek/__init__.py,sha256=gmyOcLeNEcnwSeowow0N0UhBDlSuZ_8x-DMUjwkNRFM,935
|
|
@@ -371,14 +371,14 @@ lfx/components/lmstudio/lmstudiomodel.py,sha256=73bEJ2CgqsnoYWhtqNq2Fpe9yedHK9ud
|
|
|
371
371
|
lfx/components/logic/__init__.py,sha256=nHxJDynHNaHDhdckwa8Y6UCyjlsoO0QcNaSPq51OuUM,1802
|
|
372
372
|
lfx/components/logic/conditional_router.py,sha256=RQaoM9FF63vXw6rebKA_j4-Hl2YRNvHRtwEq5eT48yY,8692
|
|
373
373
|
lfx/components/logic/data_conditional_router.py,sha256=b6G_QWajQqoFCQM-614QbrPoU2AVzkgMHA6AMUZybl0,5054
|
|
374
|
-
lfx/components/logic/flow_tool.py,sha256=
|
|
374
|
+
lfx/components/logic/flow_tool.py,sha256=k0jXnRn0TIarE7cw61w80R-a_XmloRTIHioYGeZrBeU,3984
|
|
375
375
|
lfx/components/logic/listen.py,sha256=k_wRN3yW5xtG1CjTdGYhL5LxdgCZ0Bi9cbWP54FkyuY,935
|
|
376
376
|
lfx/components/logic/llm_conditional_router.py,sha256=x-qCphrRd16yh_n2IQxqoCWu4AMMlI1QNLKBA0r7Rz8,16613
|
|
377
377
|
lfx/components/logic/loop.py,sha256=F9vGbfAH-zDQgnJpVy9yk4fdrSIXz1gomnAOYW71Gto,4682
|
|
378
378
|
lfx/components/logic/notify.py,sha256=A9aLooUwudRUsf2BRdE7CmGibCCRuQeCadneart9BEg,3086
|
|
379
379
|
lfx/components/logic/pass_message.py,sha256=BNPh7TOQ-svrhR2-uMQMMT0LBW0sT_zzIpbuWeEEPDY,1085
|
|
380
|
-
lfx/components/logic/run_flow.py,sha256=
|
|
381
|
-
lfx/components/logic/sub_flow.py,sha256=
|
|
380
|
+
lfx/components/logic/run_flow.py,sha256=DA08G-LYKmRr4srpLpfqma8iVUBdPEWYlOFgUrv6TDU,2914
|
|
381
|
+
lfx/components/logic/sub_flow.py,sha256=i6b7xco678C9rUeShH7syIJlGXUL2XUk4-AWkOO3wd8,4551
|
|
382
382
|
lfx/components/maritalk/__init__.py,sha256=7S6PYMe6VnaPE1BvQAnzJ2lGmSYsnTSj79biKsRYwrA,951
|
|
383
383
|
lfx/components/maritalk/maritalk.py,sha256=JcXaMRgerniZCWKTEAYjORpvTp1b5mapyQO76F3A5oo,1813
|
|
384
384
|
lfx/components/mem0/__init__.py,sha256=NwL5GN-YdRPXZU5Y5zd9hpTbBx6cPjVKG_8wwIGUiNs,85
|
|
@@ -565,7 +565,7 @@ lfx/custom/custom_component/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMp
|
|
|
565
565
|
lfx/custom/custom_component/base_component.py,sha256=Pxi-qCocrGIwcG0x5fu-7ty1Py71bl_KG9Fku5SeO_M,4053
|
|
566
566
|
lfx/custom/custom_component/component.py,sha256=6BIJxp6M8y1wCVUTGoQveYVDzeMDnuqcItcJpua_UUk,75795
|
|
567
567
|
lfx/custom/custom_component/component_with_cache.py,sha256=por6CiPL3EHdLp_DvfI7qz1n4tc1KkqMOJNbsxoqVaI,313
|
|
568
|
-
lfx/custom/custom_component/custom_component.py,sha256=
|
|
568
|
+
lfx/custom/custom_component/custom_component.py,sha256=801FjGiQk7M7GD3CbU19AmCVS5KZjPVcKUy_wBXnm0o,22301
|
|
569
569
|
lfx/custom/directory_reader/__init__.py,sha256=eFjlhKjpt2Kha_sJ2EqWofLRbpvfOTjvDSCpdpaTqWk,77
|
|
570
570
|
lfx/custom/directory_reader/directory_reader.py,sha256=nuv9vxCjmOiVtT0pad0Xcz8mHfQ5ve0EvBWR6RokwdM,15269
|
|
571
571
|
lfx/custom/directory_reader/utils.py,sha256=etts9VysmfP0kkbxn76shqLURPYZizF2YvEc4KeGPY4,6532
|
|
@@ -597,19 +597,19 @@ lfx/graph/vertex/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,
|
|
|
597
597
|
lfx/graph/vertex/base.py,sha256=drvm5o-y2NMyZ99uupHqjh4lUl8kIV4SeYol-q7FTdQ,33310
|
|
598
598
|
lfx/graph/vertex/constants.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
599
599
|
lfx/graph/vertex/exceptions.py,sha256=QTe-7TRCI0TXswRZh1kh0Z3KySjQsJgY5zTU6o0jboQ,193
|
|
600
|
-
lfx/graph/vertex/param_handler.py,sha256=
|
|
600
|
+
lfx/graph/vertex/param_handler.py,sha256=o6RmGvdo8rzasIZaalkAvC3LJA7oS8ETqpmXnOtarI8,12887
|
|
601
601
|
lfx/graph/vertex/schema.py,sha256=3h6c7TTdZI1mzfAebhD-6CCCRLu1mQ8UDmgJijx5zWg,552
|
|
602
602
|
lfx/graph/vertex/utils.py,sha256=iJmY4PXta5dYWTX2SMEbpfMKrzwkJVQmi8qstSv8D7I,738
|
|
603
603
|
lfx/graph/vertex/vertex_types.py,sha256=r4eMlTUTWR5CQf-W34BBzpHpQ9KdWcK3L-2byE22enU,21140
|
|
604
|
-
lfx/helpers/__init__.py,sha256=
|
|
604
|
+
lfx/helpers/__init__.py,sha256=XUsPqOxQNoleWGC4EtVxqgaEQjR4cHsHOsyJ3Kjm88I,2912
|
|
605
605
|
lfx/helpers/base_model.py,sha256=EiBdNJVE83BNKsg-IedyZYd78Mbl0m7BN2XTFeTlBhw,1956
|
|
606
606
|
lfx/helpers/custom.py,sha256=9Z6rVfIrih27qsGkV1lzVpkK-ifpQuOaGSUog_w4asM,306
|
|
607
607
|
lfx/helpers/data.py,sha256=1jGqlVrLEgr0M5J_wJf-vZ-IRTV13Jm5W6wuxYL2Hdg,6004
|
|
608
|
-
lfx/helpers/flow.py,sha256=
|
|
608
|
+
lfx/helpers/flow.py,sha256=U5H-b7g6-12YutZF8DNIAmVPRqjI6P0TFntfCIzl46A,5653
|
|
609
609
|
lfx/inputs/__init__.py,sha256=NeCBGqtJN7KGPpMmt6c4-840X9uafzYZGwXzkJ4bGqw,1213
|
|
610
610
|
lfx/inputs/constants.py,sha256=vG60oUv1xy5vrnZtU65jBCzb6oaOuiRDq1ucl_9bz70,47
|
|
611
|
-
lfx/inputs/input_mixin.py,sha256=
|
|
612
|
-
lfx/inputs/inputs.py,sha256=
|
|
611
|
+
lfx/inputs/input_mixin.py,sha256=fRnx-0FjIG-Vf_BCaXFMysca5f9CvRVKYhsD6mtNLnY,10896
|
|
612
|
+
lfx/inputs/inputs.py,sha256=jNZhr7Uud_YQ9gYBMrBeBbP-m2-n4pQQ1V-fp8eNFTc,25706
|
|
613
613
|
lfx/inputs/validators.py,sha256=i_PyQHQUmNpeS-_jRJNNsP3WlTPMkCJk2iFmFt3_ijw,505
|
|
614
614
|
lfx/interface/__init__.py,sha256=hlivcb8kMhU_V8VeXClNfz5fRyF-u5PZZMXkgu0U5a0,211
|
|
615
615
|
lfx/interface/components.py,sha256=BotYhF246Ixm41AQb2aD5OJ7G8dIX_uE_55ZOrI4C70,20058
|
|
@@ -626,9 +626,9 @@ lfx/load/__init__.py,sha256=y35GBUhVTOsG3GzL5UVL-RNAsu0D7T8MVPrNXoDMx7U,224
|
|
|
626
626
|
lfx/load/load.py,sha256=mpQG2RV2ZOysShEOguWKdnQI9TUub1Ds5j89ZbwiQhA,10451
|
|
627
627
|
lfx/load/utils.py,sha256=qa8aoMLW-X8FO8xVz3YVHQwjTSJYbYr_AOQAAp3smlc,3705
|
|
628
628
|
lfx/log/__init__.py,sha256=UATLSm1Fp9rVclAXP00LKQzzYKcaboVSuWNujlRR6P4,119
|
|
629
|
-
lfx/log/logger.py,sha256=
|
|
629
|
+
lfx/log/logger.py,sha256=UaUlWEwws7SVa24_9ZuPwRgefoatzRV7nnZV7YQZjwU,14238
|
|
630
630
|
lfx/logging/__init__.py,sha256=X5tXF5e1hc62adprRPLtKeaqm8-tpl6loXsxbh9IO-Q,367
|
|
631
|
-
lfx/memory/__init__.py,sha256=
|
|
631
|
+
lfx/memory/__init__.py,sha256=s7nCNKlcwLfT6Z_cXbiYjvoXQXZ-H2GqK1qsAuKBV08,1815
|
|
632
632
|
lfx/memory/stubs.py,sha256=kR6TRI2t6rPvA5Pja5XPC4yvKRBFBuJfdI0hJL8vfwU,9924
|
|
633
633
|
lfx/processing/__init__.py,sha256=jERZg6it9mhOzrbTAt9YtakSNXPSjUXFh5MfKBN48wA,41
|
|
634
634
|
lfx/processing/process.py,sha256=FSYjseEWEgfBxP4GDkfRVVSyrvXwyIb7U0pTVc1gV_w,9252
|
|
@@ -637,7 +637,8 @@ lfx/schema/__init__.py,sha256=PyFfbQryedA67VjrTbJMuGZaau1zHjVZ1YfZO4J8OW4,1656
|
|
|
637
637
|
lfx/schema/artifact.py,sha256=ooWPuiolxsRI9jeTSSQecYTb8vyIaGwPF8C1udZ5kYo,2604
|
|
638
638
|
lfx/schema/content_block.py,sha256=DBjj6O4UaV2bkzz7vcGP7-ZuLREl5O413LUfAz8bIbs,2042
|
|
639
639
|
lfx/schema/content_types.py,sha256=Towg01dGR9PMC7JrVkzfIPYMMJttl16XC6BvQXH03bw,2390
|
|
640
|
-
lfx/schema/
|
|
640
|
+
lfx/schema/cross_module.py,sha256=GiGYnzNHprKrxysf5hJxvvmy0GA0xxO_d3K1rUPGiA8,3148
|
|
641
|
+
lfx/schema/data.py,sha256=g1E3Tjd-o42xwAfvWGj_exV5zB70WaOv-6seE9I0Rzk,11726
|
|
641
642
|
lfx/schema/dataframe.py,sha256=Jo07krf3lI-dicAu5Y5Enf90NSEWkpD_di0rSj9krIM,7477
|
|
642
643
|
lfx/schema/dotdict.py,sha256=d6R5jv8V_pxaQUX3QP41ZzTz2wZpnZ0OFsylFf3xL-Q,2756
|
|
643
644
|
lfx/schema/encoders.py,sha256=7vlWHZnZuDv1UVuP9X7Xn8srP1HZqLygOmkps3EJyY0,332
|
|
@@ -713,13 +714,14 @@ lfx/utils/data_structure.py,sha256=xU3JNa_4jcGOVa_ctfMxiImEj6dKQQPE_zZsTAyy2T4,6
|
|
|
713
714
|
lfx/utils/exceptions.py,sha256=RgIkI4uBssJsJUnuhluNGDSzdcuW5fnxPLhGfXYU9Uc,973
|
|
714
715
|
lfx/utils/helpers.py,sha256=0LE0barnVp-8Y5cCoDRzhDzesvXqgiT7IXP6vtTSyGE,889
|
|
715
716
|
lfx/utils/image.py,sha256=W9boQgz4WH3GOgLrYaRDz2CbX5Za8pzi044X3EKvYbI,2370
|
|
717
|
+
lfx/utils/langflow_utils.py,sha256=JHCsYGAvpwXOhe8DrqFF08cYKGdjsz5_iA7glJDdEiY,1441
|
|
716
718
|
lfx/utils/lazy_load.py,sha256=UDtXi8N7NT9r-FRGxsLUfDtGU_X8yqt-RQqgpc9TqAw,394
|
|
717
719
|
lfx/utils/request_utils.py,sha256=A6vmwpr7f3ZUxHg6Sz2-BdUUsyAwg84-7N_DNoPC8_Q,518
|
|
718
720
|
lfx/utils/schemas.py,sha256=NbOtVQBrn4d0BAu-0H_eCTZI2CXkKZlRY37XCSmuJwc,3865
|
|
719
721
|
lfx/utils/util.py,sha256=Ww85wbr1-vjh2pXVtmTqoUVr6MXAW8S7eDx_Ys6HpE8,20696
|
|
720
722
|
lfx/utils/util_strings.py,sha256=nU_IcdphNaj6bAPbjeL-c1cInQPfTBit8mp5Y57lwQk,1686
|
|
721
723
|
lfx/utils/version.py,sha256=cHpbO0OJD2JQAvVaTH_6ibYeFbHJV0QDHs_YXXZ-bT8,671
|
|
722
|
-
lfx_nightly-0.1.12.
|
|
723
|
-
lfx_nightly-0.1.12.
|
|
724
|
-
lfx_nightly-0.1.12.
|
|
725
|
-
lfx_nightly-0.1.12.
|
|
724
|
+
lfx_nightly-0.1.12.dev35.dist-info/METADATA,sha256=15fUNw16xD_P0VF15znV4v47_8SRS1cEOAY2R05g-Fg,8290
|
|
725
|
+
lfx_nightly-0.1.12.dev35.dist-info/WHEEL,sha256=qtCwoSJWgHk21S1Kb4ihdzI2rlJ1ZKaIurTj_ngOhyQ,87
|
|
726
|
+
lfx_nightly-0.1.12.dev35.dist-info/entry_points.txt,sha256=1724p3RHDQRT2CKx_QRzEIa7sFuSVO0Ux70YfXfoMT4,42
|
|
727
|
+
lfx_nightly-0.1.12.dev35.dist-info/RECORD,,
|
|
File without changes
|
|
File without changes
|