langflow-base-nightly 1.7.0.dev48__py3-none-any.whl → 1.7.0.dev50__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- langflow/initial_setup/starter_projects/News Aggregator.json +2 -2
- {langflow_base_nightly-1.7.0.dev48.dist-info → langflow_base_nightly-1.7.0.dev50.dist-info}/METADATA +2 -2
- {langflow_base_nightly-1.7.0.dev48.dist-info → langflow_base_nightly-1.7.0.dev50.dist-info}/RECORD +5 -5
- {langflow_base_nightly-1.7.0.dev48.dist-info → langflow_base_nightly-1.7.0.dev50.dist-info}/WHEEL +0 -0
- {langflow_base_nightly-1.7.0.dev48.dist-info → langflow_base_nightly-1.7.0.dev50.dist-info}/entry_points.txt +0 -0
|
@@ -1925,7 +1925,7 @@
|
|
|
1925
1925
|
"last_updated": "2025-09-30T16:16:26.172Z",
|
|
1926
1926
|
"legacy": false,
|
|
1927
1927
|
"metadata": {
|
|
1928
|
-
"code_hash": "
|
|
1928
|
+
"code_hash": "9fb8e92f7b34",
|
|
1929
1929
|
"dependencies": {
|
|
1930
1930
|
"dependencies": [
|
|
1931
1931
|
{
|
|
@@ -2130,7 +2130,7 @@
|
|
|
2130
2130
|
"show": true,
|
|
2131
2131
|
"title_case": false,
|
|
2132
2132
|
"type": "code",
|
|
2133
|
-
"value": "import json\nfrom collections.abc import AsyncIterator, Iterator\nfrom pathlib import Path\n\nimport orjson\nimport pandas as pd\nfrom fastapi import UploadFile\nfrom fastapi.encoders import jsonable_encoder\n\nfrom lfx.custom import Component\nfrom lfx.inputs import SortableListInput\nfrom lfx.io import BoolInput, DropdownInput, HandleInput, SecretStrInput, StrInput\nfrom lfx.schema import Data, DataFrame, Message\nfrom lfx.services.deps import get_settings_service, get_storage_service, session_scope\nfrom lfx.template.field.base import Output\n\n\nclass SaveToFileComponent(Component):\n display_name = \"Write File\"\n description = \"Save data to local file, AWS S3, or Google Drive in the selected format.\"\n documentation: str = \"https://docs.langflow.org/write-file\"\n icon = \"file-text\"\n name = \"SaveToFile\"\n\n # File format options for different storage types\n LOCAL_DATA_FORMAT_CHOICES = [\"csv\", \"excel\", \"json\", \"markdown\"]\n LOCAL_MESSAGE_FORMAT_CHOICES = [\"txt\", \"json\", \"markdown\"]\n AWS_FORMAT_CHOICES = [\n \"txt\",\n \"json\",\n \"csv\",\n \"xml\",\n \"html\",\n \"md\",\n \"yaml\",\n \"log\",\n \"tsv\",\n \"jsonl\",\n \"parquet\",\n \"xlsx\",\n \"zip\",\n ]\n GDRIVE_FORMAT_CHOICES = [\"txt\", \"json\", \"csv\", \"xlsx\", \"slides\", \"docs\", \"jpg\", \"mp3\"]\n\n inputs = [\n # Storage location selection\n SortableListInput(\n name=\"storage_location\",\n display_name=\"Storage Location\",\n placeholder=\"Select Location\",\n info=\"Choose where to save the file.\",\n options=[\n {\"name\": \"Local\", \"icon\": \"hard-drive\"},\n {\"name\": \"AWS\", \"icon\": \"Amazon\"},\n {\"name\": \"Google Drive\", \"icon\": \"google\"},\n ],\n real_time_refresh=True,\n limit=1,\n ),\n # Common inputs\n HandleInput(\n name=\"input\",\n display_name=\"File Content\",\n info=\"The input to save.\",\n dynamic=True,\n input_types=[\"Data\", \"DataFrame\", \"Message\"],\n required=True,\n ),\n StrInput(\n name=\"file_name\",\n display_name=\"File Name\",\n info=\"Name file will be saved as (without extension).\",\n required=True,\n show=False,\n tool_mode=True,\n ),\n BoolInput(\n name=\"append_mode\",\n display_name=\"Append\",\n info=(\n \"Append to file if it exists (only for Local storage with plain text formats). \"\n \"Not supported for cloud storage (AWS/Google Drive).\"\n ),\n value=False,\n show=False,\n ),\n # Format inputs (dynamic based on storage location)\n DropdownInput(\n name=\"local_format\",\n display_name=\"File Format\",\n options=list(dict.fromkeys(LOCAL_DATA_FORMAT_CHOICES + LOCAL_MESSAGE_FORMAT_CHOICES)),\n info=\"Select the file format for local storage.\",\n value=\"json\",\n show=False,\n ),\n DropdownInput(\n name=\"aws_format\",\n display_name=\"File Format\",\n options=AWS_FORMAT_CHOICES,\n info=\"Select the file format for AWS S3 storage.\",\n value=\"txt\",\n show=False,\n ),\n DropdownInput(\n name=\"gdrive_format\",\n display_name=\"File Format\",\n options=GDRIVE_FORMAT_CHOICES,\n info=\"Select the file format for Google Drive storage.\",\n value=\"txt\",\n show=False,\n ),\n # AWS S3 specific inputs\n SecretStrInput(\n name=\"aws_access_key_id\",\n display_name=\"AWS Access Key ID\",\n info=\"AWS Access key ID.\",\n show=False,\n advanced=True,\n ),\n SecretStrInput(\n name=\"aws_secret_access_key\",\n display_name=\"AWS Secret Key\",\n info=\"AWS Secret Key.\",\n show=False,\n advanced=True,\n ),\n StrInput(\n name=\"bucket_name\",\n display_name=\"S3 Bucket Name\",\n info=\"Enter the name of the S3 bucket.\",\n show=False,\n advanced=True,\n ),\n StrInput(\n name=\"aws_region\",\n display_name=\"AWS Region\",\n info=\"AWS region (e.g., us-east-1, eu-west-1).\",\n show=False,\n advanced=True,\n ),\n StrInput(\n name=\"s3_prefix\",\n display_name=\"S3 Prefix\",\n info=\"Prefix for all files in S3.\",\n show=False,\n advanced=True,\n ),\n # Google Drive specific inputs\n SecretStrInput(\n name=\"service_account_key\",\n display_name=\"GCP Credentials Secret Key\",\n info=\"Your Google Cloud Platform service account JSON key as a secret string (complete JSON content).\",\n show=False,\n advanced=True,\n ),\n StrInput(\n name=\"folder_id\",\n display_name=\"Google Drive Folder ID\",\n info=(\n \"The Google Drive folder ID where the file will be uploaded. \"\n \"The folder must be shared with the service account email.\"\n ),\n required=True,\n show=False,\n advanced=True,\n ),\n ]\n\n outputs = [Output(display_name=\"File Path\", name=\"message\", method=\"save_to_file\")]\n\n def update_build_config(self, build_config, field_value, field_name=None):\n \"\"\"Update build configuration to show/hide fields based on storage location selection.\"\"\"\n if field_name != \"storage_location\":\n return build_config\n\n # Extract selected storage location\n selected = [location[\"name\"] for location in field_value] if isinstance(field_value, list) else []\n\n # Hide all dynamic fields first\n dynamic_fields = [\n \"file_name\", # Common fields (input is always visible)\n \"append_mode\",\n \"local_format\",\n \"aws_format\",\n \"gdrive_format\",\n \"aws_access_key_id\",\n \"aws_secret_access_key\",\n \"bucket_name\",\n \"aws_region\",\n \"s3_prefix\",\n \"service_account_key\",\n \"folder_id\",\n ]\n\n for f_name in dynamic_fields:\n if f_name in build_config:\n build_config[f_name][\"show\"] = False\n\n # Show fields based on selected storage location\n if len(selected) == 1:\n location = selected[0]\n\n # Show file_name when any storage location is selected\n if \"file_name\" in build_config:\n build_config[\"file_name\"][\"show\"] = True\n\n # Show append_mode only for Local storage (not supported for cloud storage)\n if \"append_mode\" in build_config:\n build_config[\"append_mode\"][\"show\"] = location == \"Local\"\n\n if location == \"Local\":\n if \"local_format\" in build_config:\n build_config[\"local_format\"][\"show\"] = True\n\n elif location == \"AWS\":\n aws_fields = [\n \"aws_format\",\n \"aws_access_key_id\",\n \"aws_secret_access_key\",\n \"bucket_name\",\n \"aws_region\",\n \"s3_prefix\",\n ]\n for f_name in aws_fields:\n if f_name in build_config:\n build_config[f_name][\"show\"] = True\n\n elif location == \"Google Drive\":\n gdrive_fields = [\"gdrive_format\", \"service_account_key\", \"folder_id\"]\n for f_name in gdrive_fields:\n if f_name in build_config:\n build_config[f_name][\"show\"] = True\n\n return build_config\n\n async def save_to_file(self) -> Message:\n \"\"\"Save the input to a file and upload it, returning a confirmation message.\"\"\"\n # Validate inputs\n if not self.file_name:\n msg = \"File name must be provided.\"\n raise ValueError(msg)\n if not self._get_input_type():\n msg = \"Input type is not set.\"\n raise ValueError(msg)\n\n # Get selected storage location\n storage_location = self._get_selected_storage_location()\n if not storage_location:\n msg = \"Storage location must be selected.\"\n raise ValueError(msg)\n\n # Route to appropriate save method based on storage location\n if storage_location == \"Local\":\n return await self._save_to_local()\n if storage_location == \"AWS\":\n return await self._save_to_aws()\n if storage_location == \"Google Drive\":\n return await self._save_to_google_drive()\n msg = f\"Unsupported storage location: {storage_location}\"\n raise ValueError(msg)\n\n def _get_input_type(self) -> str:\n \"\"\"Determine the input type based on the provided input.\"\"\"\n # Use exact type checking (type() is) instead of isinstance() to avoid inheritance issues.\n # Since Message inherits from Data, isinstance(message, Data) would return True for Message objects,\n # causing Message inputs to be incorrectly identified as Data type.\n if type(self.input) is DataFrame:\n return \"DataFrame\"\n if type(self.input) is Message:\n return \"Message\"\n if type(self.input) is Data:\n return \"Data\"\n msg = f\"Unsupported input type: {type(self.input)}\"\n raise ValueError(msg)\n\n def _get_default_format(self) -> str:\n \"\"\"Return the default file format based on input type.\"\"\"\n if self._get_input_type() == \"DataFrame\":\n return \"csv\"\n if self._get_input_type() == \"Data\":\n return \"json\"\n if self._get_input_type() == \"Message\":\n return \"json\"\n return \"json\" # Fallback\n\n def _adjust_file_path_with_format(self, path: Path, fmt: str) -> Path:\n \"\"\"Adjust the file path to include the correct extension.\"\"\"\n file_extension = path.suffix.lower().lstrip(\".\")\n if fmt == \"excel\":\n return Path(f\"{path}.xlsx\").expanduser() if file_extension not in [\"xlsx\", \"xls\"] else path\n return Path(f\"{path}.{fmt}\").expanduser() if file_extension != fmt else path\n\n def _is_plain_text_format(self, fmt: str) -> bool:\n \"\"\"Check if a file format is plain text (supports appending).\"\"\"\n plain_text_formats = [\"txt\", \"json\", \"markdown\", \"md\", \"csv\", \"xml\", \"html\", \"yaml\", \"log\", \"tsv\", \"jsonl\"]\n return fmt.lower() in plain_text_formats\n\n async def _upload_file(self, file_path: Path) -> None:\n \"\"\"Upload the saved file using the upload_user_file service.\"\"\"\n from langflow.api.v2.files import upload_user_file\n from langflow.services.database.models.user.crud import get_user_by_id\n\n # Ensure the file exists\n if not file_path.exists():\n msg = f\"File not found: {file_path}\"\n raise FileNotFoundError(msg)\n\n # Upload the file - always use append=False because the local file already contains\n # the correct content (either new or appended locally)\n with file_path.open(\"rb\") as f:\n async with session_scope() as db:\n if not self.user_id:\n msg = \"User ID is required for file saving.\"\n raise ValueError(msg)\n current_user = await get_user_by_id(db, self.user_id)\n\n await upload_user_file(\n file=UploadFile(filename=file_path.name, file=f, size=file_path.stat().st_size),\n session=db,\n current_user=current_user,\n storage_service=get_storage_service(),\n settings_service=get_settings_service(),\n append=False,\n )\n\n def _save_dataframe(self, dataframe: DataFrame, path: Path, fmt: str) -> str:\n \"\"\"Save a DataFrame to the specified file format.\"\"\"\n append_mode = getattr(self, \"append_mode\", False)\n should_append = append_mode and path.exists() and self._is_plain_text_format(fmt)\n\n if fmt == \"csv\":\n dataframe.to_csv(path, index=False, mode=\"a\" if should_append else \"w\", header=not should_append)\n elif fmt == \"excel\":\n dataframe.to_excel(path, index=False, engine=\"openpyxl\")\n elif fmt == \"json\":\n if should_append:\n # Read and parse existing JSON\n existing_data = []\n try:\n existing_content = path.read_text(encoding=\"utf-8\").strip()\n if existing_content:\n parsed = json.loads(existing_content)\n # Handle case where existing content is a single object\n if isinstance(parsed, dict):\n existing_data = [parsed]\n elif isinstance(parsed, list):\n existing_data = parsed\n except (json.JSONDecodeError, FileNotFoundError):\n # Treat parse errors or missing file as empty array\n existing_data = []\n\n # Append new data\n new_records = json.loads(dataframe.to_json(orient=\"records\"))\n existing_data.extend(new_records)\n\n # Write back as a single JSON array\n path.write_text(json.dumps(existing_data, indent=2), encoding=\"utf-8\")\n else:\n dataframe.to_json(path, orient=\"records\", indent=2)\n elif fmt == \"markdown\":\n content = dataframe.to_markdown(index=False)\n if should_append:\n path.write_text(path.read_text(encoding=\"utf-8\") + \"\\n\\n\" + content, encoding=\"utf-8\")\n else:\n path.write_text(content, encoding=\"utf-8\")\n else:\n msg = f\"Unsupported DataFrame format: {fmt}\"\n raise ValueError(msg)\n action = \"appended to\" if should_append else \"saved successfully as\"\n return f\"DataFrame {action} '{path}'\"\n\n def _save_data(self, data: Data, path: Path, fmt: str) -> str:\n \"\"\"Save a Data object to the specified file format.\"\"\"\n append_mode = getattr(self, \"append_mode\", False)\n should_append = append_mode and path.exists() and self._is_plain_text_format(fmt)\n\n if fmt == \"csv\":\n pd.DataFrame(data.data).to_csv(\n path,\n index=False,\n mode=\"a\" if should_append else \"w\",\n header=not should_append,\n )\n elif fmt == \"excel\":\n pd.DataFrame(data.data).to_excel(path, index=False, engine=\"openpyxl\")\n elif fmt == \"json\":\n new_data = jsonable_encoder(data.data)\n if should_append:\n # Read and parse existing JSON\n existing_data = []\n try:\n existing_content = path.read_text(encoding=\"utf-8\").strip()\n if existing_content:\n parsed = json.loads(existing_content)\n # Handle case where existing content is a single object\n if isinstance(parsed, dict):\n existing_data = [parsed]\n elif isinstance(parsed, list):\n existing_data = parsed\n except (json.JSONDecodeError, FileNotFoundError):\n # Treat parse errors or missing file as empty array\n existing_data = []\n\n # Append new data\n if isinstance(new_data, list):\n existing_data.extend(new_data)\n else:\n existing_data.append(new_data)\n\n # Write back as a single JSON array\n path.write_text(json.dumps(existing_data, indent=2), encoding=\"utf-8\")\n else:\n content = orjson.dumps(new_data, option=orjson.OPT_INDENT_2).decode(\"utf-8\")\n path.write_text(content, encoding=\"utf-8\")\n elif fmt == \"markdown\":\n content = pd.DataFrame(data.data).to_markdown(index=False)\n if should_append:\n path.write_text(path.read_text(encoding=\"utf-8\") + \"\\n\\n\" + content, encoding=\"utf-8\")\n else:\n path.write_text(content, encoding=\"utf-8\")\n else:\n msg = f\"Unsupported Data format: {fmt}\"\n raise ValueError(msg)\n action = \"appended to\" if should_append else \"saved successfully as\"\n return f\"Data {action} '{path}'\"\n\n async def _save_message(self, message: Message, path: Path, fmt: str) -> str:\n \"\"\"Save a Message to the specified file format, handling async iterators.\"\"\"\n content = \"\"\n if message.text is None:\n content = \"\"\n elif isinstance(message.text, AsyncIterator):\n async for item in message.text:\n content += str(item) + \" \"\n content = content.strip()\n elif isinstance(message.text, Iterator):\n content = \" \".join(str(item) for item in message.text)\n else:\n content = str(message.text)\n\n append_mode = getattr(self, \"append_mode\", False)\n should_append = append_mode and path.exists() and self._is_plain_text_format(fmt)\n\n if fmt == \"txt\":\n if should_append:\n path.write_text(path.read_text(encoding=\"utf-8\") + \"\\n\" + content, encoding=\"utf-8\")\n else:\n path.write_text(content, encoding=\"utf-8\")\n elif fmt == \"json\":\n new_message = {\"message\": content}\n if should_append:\n # Read and parse existing JSON\n existing_data = []\n try:\n existing_content = path.read_text(encoding=\"utf-8\").strip()\n if existing_content:\n parsed = json.loads(existing_content)\n # Handle case where existing content is a single object\n if isinstance(parsed, dict):\n existing_data = [parsed]\n elif isinstance(parsed, list):\n existing_data = parsed\n except (json.JSONDecodeError, FileNotFoundError):\n # Treat parse errors or missing file as empty array\n existing_data = []\n\n # Append new message\n existing_data.append(new_message)\n\n # Write back as a single JSON array\n path.write_text(json.dumps(existing_data, indent=2), encoding=\"utf-8\")\n else:\n path.write_text(json.dumps(new_message, indent=2), encoding=\"utf-8\")\n elif fmt == \"markdown\":\n md_content = f\"**Message:**\\n\\n{content}\"\n if should_append:\n path.write_text(path.read_text(encoding=\"utf-8\") + \"\\n\\n\" + md_content, encoding=\"utf-8\")\n else:\n path.write_text(md_content, encoding=\"utf-8\")\n else:\n msg = f\"Unsupported Message format: {fmt}\"\n raise ValueError(msg)\n action = \"appended to\" if should_append else \"saved successfully as\"\n return f\"Message {action} '{path}'\"\n\n def _get_selected_storage_location(self) -> str:\n \"\"\"Get the selected storage location from the SortableListInput.\"\"\"\n if hasattr(self, \"storage_location\") and self.storage_location:\n if isinstance(self.storage_location, list) and len(self.storage_location) > 0:\n return self.storage_location[0].get(\"name\", \"\")\n if isinstance(self.storage_location, dict):\n return self.storage_location.get(\"name\", \"\")\n return \"\"\n\n def _get_file_format_for_location(self, location: str) -> str:\n \"\"\"Get the appropriate file format based on storage location.\"\"\"\n if location == \"Local\":\n return getattr(self, \"local_format\", None) or self._get_default_format()\n if location == \"AWS\":\n return getattr(self, \"aws_format\", \"txt\")\n if location == \"Google Drive\":\n return getattr(self, \"gdrive_format\", \"txt\")\n return self._get_default_format()\n\n async def _save_to_local(self) -> Message:\n \"\"\"Save file to local storage (original functionality).\"\"\"\n file_format = self._get_file_format_for_location(\"Local\")\n\n # Validate file format based on input type\n allowed_formats = (\n self.LOCAL_MESSAGE_FORMAT_CHOICES if self._get_input_type() == \"Message\" else self.LOCAL_DATA_FORMAT_CHOICES\n )\n if file_format not in allowed_formats:\n msg = f\"Invalid file format '{file_format}' for {self._get_input_type()}. Allowed: {allowed_formats}\"\n raise ValueError(msg)\n\n # Prepare file path\n file_path = Path(self.file_name).expanduser()\n if not file_path.parent.exists():\n file_path.parent.mkdir(parents=True, exist_ok=True)\n file_path = self._adjust_file_path_with_format(file_path, file_format)\n\n # Save the input to file based on type\n if self._get_input_type() == \"DataFrame\":\n confirmation = self._save_dataframe(self.input, file_path, file_format)\n elif self._get_input_type() == \"Data\":\n confirmation = self._save_data(self.input, file_path, file_format)\n elif self._get_input_type() == \"Message\":\n confirmation = await self._save_message(self.input, file_path, file_format)\n else:\n msg = f\"Unsupported input type: {self._get_input_type()}\"\n raise ValueError(msg)\n\n # Upload the saved file\n await self._upload_file(file_path)\n\n # Return the final file path and confirmation message\n final_path = Path.cwd() / file_path if not file_path.is_absolute() else file_path\n return Message(text=f\"{confirmation} at {final_path}\")\n\n async def _save_to_aws(self) -> Message:\n \"\"\"Save file to AWS S3 using S3 functionality.\"\"\"\n # Validate AWS credentials\n if not getattr(self, \"aws_access_key_id\", None):\n msg = \"AWS Access Key ID is required for S3 storage\"\n raise ValueError(msg)\n if not getattr(self, \"aws_secret_access_key\", None):\n msg = \"AWS Secret Key is required for S3 storage\"\n raise ValueError(msg)\n if not getattr(self, \"bucket_name\", None):\n msg = \"S3 Bucket Name is required for S3 storage\"\n raise ValueError(msg)\n\n # Use S3 upload functionality\n try:\n import boto3\n except ImportError as e:\n msg = \"boto3 is not installed. Please install it using `uv pip install boto3`.\"\n raise ImportError(msg) from e\n\n # Create S3 client\n client_config = {\n \"aws_access_key_id\": self.aws_access_key_id,\n \"aws_secret_access_key\": self.aws_secret_access_key,\n }\n\n if hasattr(self, \"aws_region\") and self.aws_region:\n client_config[\"region_name\"] = self.aws_region\n\n s3_client = boto3.client(\"s3\", **client_config)\n\n # Extract content\n content = self._extract_content_for_upload()\n file_format = self._get_file_format_for_location(\"AWS\")\n\n # Generate file path\n file_path = f\"{self.file_name}.{file_format}\"\n if hasattr(self, \"s3_prefix\") and self.s3_prefix:\n file_path = f\"{self.s3_prefix.rstrip('/')}/{file_path}\"\n\n # Create temporary file\n import tempfile\n\n with tempfile.NamedTemporaryFile(\n mode=\"w\", encoding=\"utf-8\", suffix=f\".{file_format}\", delete=False\n ) as temp_file:\n temp_file.write(content)\n temp_file_path = temp_file.name\n\n try:\n # Upload to S3\n s3_client.upload_file(temp_file_path, self.bucket_name, file_path)\n s3_url = f\"s3://{self.bucket_name}/{file_path}\"\n return Message(text=f\"File successfully uploaded to {s3_url}\")\n finally:\n # Clean up temp file\n if Path(temp_file_path).exists():\n Path(temp_file_path).unlink()\n\n async def _save_to_google_drive(self) -> Message:\n \"\"\"Save file to Google Drive using Google Drive functionality.\"\"\"\n # Validate Google Drive credentials\n if not getattr(self, \"service_account_key\", None):\n msg = \"GCP Credentials Secret Key is required for Google Drive storage\"\n raise ValueError(msg)\n if not getattr(self, \"folder_id\", None):\n msg = \"Google Drive Folder ID is required for Google Drive storage\"\n raise ValueError(msg)\n\n # Use Google Drive upload functionality\n try:\n import json\n import tempfile\n\n from google.oauth2 import service_account\n from googleapiclient.discovery import build\n from googleapiclient.http import MediaFileUpload\n except ImportError as e:\n msg = \"Google API client libraries are not installed. Please install them.\"\n raise ImportError(msg) from e\n\n # Parse credentials with multiple fallback strategies\n credentials_dict = None\n parse_errors = []\n\n # Strategy 1: Parse as-is with strict=False to allow control characters\n try:\n credentials_dict = json.loads(self.service_account_key, strict=False)\n except json.JSONDecodeError as e:\n parse_errors.append(f\"Standard parse: {e!s}\")\n\n # Strategy 2: Strip whitespace and try again\n if credentials_dict is None:\n try:\n cleaned_key = self.service_account_key.strip()\n credentials_dict = json.loads(cleaned_key, strict=False)\n except json.JSONDecodeError as e:\n parse_errors.append(f\"Stripped parse: {e!s}\")\n\n # Strategy 3: Check if it's double-encoded (JSON string of a JSON string)\n if credentials_dict is None:\n try:\n decoded_once = json.loads(self.service_account_key, strict=False)\n if isinstance(decoded_once, str):\n credentials_dict = json.loads(decoded_once, strict=False)\n else:\n credentials_dict = decoded_once\n except json.JSONDecodeError as e:\n parse_errors.append(f\"Double-encoded parse: {e!s}\")\n\n # Strategy 4: Try to fix common issues with newlines in the private_key field\n if credentials_dict is None:\n try:\n # Replace literal \\n with actual newlines which is common in pasted JSON\n fixed_key = self.service_account_key.replace(\"\\\\n\", \"\\n\")\n credentials_dict = json.loads(fixed_key, strict=False)\n except json.JSONDecodeError as e:\n parse_errors.append(f\"Newline-fixed parse: {e!s}\")\n\n if credentials_dict is None:\n error_details = \"; \".join(parse_errors)\n msg = (\n f\"Unable to parse service account key JSON. Tried multiple strategies: {error_details}. \"\n \"Please ensure you've copied the entire JSON content from your service account key file. \"\n \"The JSON should start with '{' and contain fields like 'type', 'project_id', 'private_key', etc.\"\n )\n raise ValueError(msg)\n\n # Create Google Drive service with appropriate scopes\n # Use drive scope for folder access, file scope is too restrictive for folder verification\n credentials = service_account.Credentials.from_service_account_info(\n credentials_dict, scopes=[\"https://www.googleapis.com/auth/drive\"]\n )\n drive_service = build(\"drive\", \"v3\", credentials=credentials)\n\n # Extract content and format\n content = self._extract_content_for_upload()\n file_format = self._get_file_format_for_location(\"Google Drive\")\n\n # Handle special Google Drive formats\n if file_format in [\"slides\", \"docs\"]:\n return await self._save_to_google_apps(drive_service, credentials, content, file_format)\n\n # Create temporary file\n file_path = f\"{self.file_name}.{file_format}\"\n with tempfile.NamedTemporaryFile(\n mode=\"w\",\n encoding=\"utf-8\",\n suffix=f\".{file_format}\",\n delete=False,\n ) as temp_file:\n temp_file.write(content)\n temp_file_path = temp_file.name\n\n try:\n # Upload to Google Drive\n # Note: We skip explicit folder verification since it requires broader permissions.\n # If the folder doesn't exist or isn't accessible, the create() call will fail with a clear error.\n file_metadata = {\"name\": file_path, \"parents\": [self.folder_id]}\n media = MediaFileUpload(temp_file_path, resumable=True)\n\n try:\n uploaded_file = (\n drive_service.files().create(body=file_metadata, media_body=media, fields=\"id\").execute()\n )\n except Exception as e:\n msg = (\n f\"Unable to upload file to Google Drive folder '{self.folder_id}'. \"\n f\"Error: {e!s}. \"\n \"Please ensure: 1) The folder ID is correct, 2) The folder exists, \"\n \"3) The service account has been granted access to this folder.\"\n )\n raise ValueError(msg) from e\n\n file_id = uploaded_file.get(\"id\")\n file_url = f\"https://drive.google.com/file/d/{file_id}/view\"\n return Message(text=f\"File successfully uploaded to Google Drive: {file_url}\")\n finally:\n # Clean up temp file\n if Path(temp_file_path).exists():\n Path(temp_file_path).unlink()\n\n async def _save_to_google_apps(self, drive_service, credentials, content: str, app_type: str) -> Message:\n \"\"\"Save content to Google Apps (Slides or Docs).\"\"\"\n import time\n\n if app_type == \"slides\":\n from googleapiclient.discovery import build\n\n slides_service = build(\"slides\", \"v1\", credentials=credentials)\n\n file_metadata = {\n \"name\": self.file_name,\n \"mimeType\": \"application/vnd.google-apps.presentation\",\n \"parents\": [self.folder_id],\n }\n\n created_file = drive_service.files().create(body=file_metadata, fields=\"id\").execute()\n presentation_id = created_file[\"id\"]\n\n time.sleep(2) # Wait for file to be available # noqa: ASYNC251\n\n presentation = slides_service.presentations().get(presentationId=presentation_id).execute()\n slide_id = presentation[\"slides\"][0][\"objectId\"]\n\n # Add content to slide\n requests = [\n {\n \"createShape\": {\n \"objectId\": \"TextBox_01\",\n \"shapeType\": \"TEXT_BOX\",\n \"elementProperties\": {\n \"pageObjectId\": slide_id,\n \"size\": {\n \"height\": {\"magnitude\": 3000000, \"unit\": \"EMU\"},\n \"width\": {\"magnitude\": 6000000, \"unit\": \"EMU\"},\n },\n \"transform\": {\n \"scaleX\": 1,\n \"scaleY\": 1,\n \"translateX\": 1000000,\n \"translateY\": 1000000,\n \"unit\": \"EMU\",\n },\n },\n }\n },\n {\"insertText\": {\"objectId\": \"TextBox_01\", \"insertionIndex\": 0, \"text\": content}},\n ]\n\n slides_service.presentations().batchUpdate(\n presentationId=presentation_id, body={\"requests\": requests}\n ).execute()\n file_url = f\"https://docs.google.com/presentation/d/{presentation_id}/edit\"\n\n elif app_type == \"docs\":\n from googleapiclient.discovery import build\n\n docs_service = build(\"docs\", \"v1\", credentials=credentials)\n\n file_metadata = {\n \"name\": self.file_name,\n \"mimeType\": \"application/vnd.google-apps.document\",\n \"parents\": [self.folder_id],\n }\n\n created_file = drive_service.files().create(body=file_metadata, fields=\"id\").execute()\n document_id = created_file[\"id\"]\n\n time.sleep(2) # Wait for file to be available # noqa: ASYNC251\n\n # Add content to document\n requests = [{\"insertText\": {\"location\": {\"index\": 1}, \"text\": content}}]\n docs_service.documents().batchUpdate(documentId=document_id, body={\"requests\": requests}).execute()\n file_url = f\"https://docs.google.com/document/d/{document_id}/edit\"\n\n return Message(text=f\"File successfully created in Google {app_type.title()}: {file_url}\")\n\n def _extract_content_for_upload(self) -> str:\n \"\"\"Extract content from input for upload to cloud services.\"\"\"\n if self._get_input_type() == \"DataFrame\":\n return self.input.to_csv(index=False)\n if self._get_input_type() == \"Data\":\n if hasattr(self.input, \"data\") and self.input.data:\n if isinstance(self.input.data, dict):\n import json\n\n return json.dumps(self.input.data, indent=2, ensure_ascii=False)\n return str(self.input.data)\n return str(self.input)\n if self._get_input_type() == \"Message\":\n return str(self.input.text) if self.input.text else str(self.input)\n return str(self.input)\n"
|
|
2133
|
+
"value": "import json\nfrom collections.abc import AsyncIterator, Iterator\nfrom pathlib import Path\n\nimport orjson\nimport pandas as pd\nfrom fastapi import UploadFile\nfrom fastapi.encoders import jsonable_encoder\n\nfrom lfx.custom import Component\nfrom lfx.inputs import SortableListInput\nfrom lfx.io import BoolInput, DropdownInput, HandleInput, SecretStrInput, StrInput\nfrom lfx.schema import Data, DataFrame, Message\nfrom lfx.services.deps import get_settings_service, get_storage_service, session_scope\nfrom lfx.template.field.base import Output\nfrom lfx.utils.validate_cloud import is_astra_cloud_environment\n\n\ndef _get_storage_location_options():\n \"\"\"Get storage location options, filtering out Local if in Astra cloud environment.\"\"\"\n all_options = [{\"name\": \"AWS\", \"icon\": \"Amazon\"}, {\"name\": \"Google Drive\", \"icon\": \"google\"}]\n if is_astra_cloud_environment():\n return all_options\n return [{\"name\": \"Local\", \"icon\": \"hard-drive\"}, *all_options]\n\n\nclass SaveToFileComponent(Component):\n display_name = \"Write File\"\n description = \"Save data to local file, AWS S3, or Google Drive in the selected format.\"\n documentation: str = \"https://docs.langflow.org/write-file\"\n icon = \"file-text\"\n name = \"SaveToFile\"\n\n # File format options for different storage types\n LOCAL_DATA_FORMAT_CHOICES = [\"csv\", \"excel\", \"json\", \"markdown\"]\n LOCAL_MESSAGE_FORMAT_CHOICES = [\"txt\", \"json\", \"markdown\"]\n AWS_FORMAT_CHOICES = [\n \"txt\",\n \"json\",\n \"csv\",\n \"xml\",\n \"html\",\n \"md\",\n \"yaml\",\n \"log\",\n \"tsv\",\n \"jsonl\",\n \"parquet\",\n \"xlsx\",\n \"zip\",\n ]\n GDRIVE_FORMAT_CHOICES = [\"txt\", \"json\", \"csv\", \"xlsx\", \"slides\", \"docs\", \"jpg\", \"mp3\"]\n\n inputs = [\n # Storage location selection\n SortableListInput(\n name=\"storage_location\",\n display_name=\"Storage Location\",\n placeholder=\"Select Location\",\n info=\"Choose where to save the file.\",\n options=_get_storage_location_options(),\n real_time_refresh=True,\n limit=1,\n ),\n # Common inputs\n HandleInput(\n name=\"input\",\n display_name=\"File Content\",\n info=\"The input to save.\",\n dynamic=True,\n input_types=[\"Data\", \"DataFrame\", \"Message\"],\n required=True,\n ),\n StrInput(\n name=\"file_name\",\n display_name=\"File Name\",\n info=\"Name file will be saved as (without extension).\",\n required=True,\n show=False,\n tool_mode=True,\n ),\n BoolInput(\n name=\"append_mode\",\n display_name=\"Append\",\n info=(\n \"Append to file if it exists (only for Local storage with plain text formats). \"\n \"Not supported for cloud storage (AWS/Google Drive).\"\n ),\n value=False,\n show=False,\n ),\n # Format inputs (dynamic based on storage location)\n DropdownInput(\n name=\"local_format\",\n display_name=\"File Format\",\n options=list(dict.fromkeys(LOCAL_DATA_FORMAT_CHOICES + LOCAL_MESSAGE_FORMAT_CHOICES)),\n info=\"Select the file format for local storage.\",\n value=\"json\",\n show=False,\n ),\n DropdownInput(\n name=\"aws_format\",\n display_name=\"File Format\",\n options=AWS_FORMAT_CHOICES,\n info=\"Select the file format for AWS S3 storage.\",\n value=\"txt\",\n show=False,\n ),\n DropdownInput(\n name=\"gdrive_format\",\n display_name=\"File Format\",\n options=GDRIVE_FORMAT_CHOICES,\n info=\"Select the file format for Google Drive storage.\",\n value=\"txt\",\n show=False,\n ),\n # AWS S3 specific inputs\n SecretStrInput(\n name=\"aws_access_key_id\",\n display_name=\"AWS Access Key ID\",\n info=\"AWS Access key ID.\",\n show=False,\n advanced=True,\n ),\n SecretStrInput(\n name=\"aws_secret_access_key\",\n display_name=\"AWS Secret Key\",\n info=\"AWS Secret Key.\",\n show=False,\n advanced=True,\n ),\n StrInput(\n name=\"bucket_name\",\n display_name=\"S3 Bucket Name\",\n info=\"Enter the name of the S3 bucket.\",\n show=False,\n advanced=True,\n ),\n StrInput(\n name=\"aws_region\",\n display_name=\"AWS Region\",\n info=\"AWS region (e.g., us-east-1, eu-west-1).\",\n show=False,\n advanced=True,\n ),\n StrInput(\n name=\"s3_prefix\",\n display_name=\"S3 Prefix\",\n info=\"Prefix for all files in S3.\",\n show=False,\n advanced=True,\n ),\n # Google Drive specific inputs\n SecretStrInput(\n name=\"service_account_key\",\n display_name=\"GCP Credentials Secret Key\",\n info=\"Your Google Cloud Platform service account JSON key as a secret string (complete JSON content).\",\n show=False,\n advanced=True,\n ),\n StrInput(\n name=\"folder_id\",\n display_name=\"Google Drive Folder ID\",\n info=(\n \"The Google Drive folder ID where the file will be uploaded. \"\n \"The folder must be shared with the service account email.\"\n ),\n required=True,\n show=False,\n advanced=True,\n ),\n ]\n\n outputs = [Output(display_name=\"File Path\", name=\"message\", method=\"save_to_file\")]\n\n def update_build_config(self, build_config, field_value, field_name=None):\n \"\"\"Update build configuration to show/hide fields based on storage location selection.\"\"\"\n # Update options dynamically based on cloud environment\n # This ensures options are refreshed when build_config is updated\n if \"storage_location\" in build_config:\n updated_options = _get_storage_location_options()\n build_config[\"storage_location\"][\"options\"] = updated_options\n\n if field_name != \"storage_location\":\n return build_config\n\n # Extract selected storage location\n selected = [location[\"name\"] for location in field_value] if isinstance(field_value, list) else []\n\n # Hide all dynamic fields first\n dynamic_fields = [\n \"file_name\", # Common fields (input is always visible)\n \"append_mode\",\n \"local_format\",\n \"aws_format\",\n \"gdrive_format\",\n \"aws_access_key_id\",\n \"aws_secret_access_key\",\n \"bucket_name\",\n \"aws_region\",\n \"s3_prefix\",\n \"service_account_key\",\n \"folder_id\",\n ]\n\n for f_name in dynamic_fields:\n if f_name in build_config:\n build_config[f_name][\"show\"] = False\n\n # Show fields based on selected storage location\n if len(selected) == 1:\n location = selected[0]\n\n # Show file_name when any storage location is selected\n if \"file_name\" in build_config:\n build_config[\"file_name\"][\"show\"] = True\n\n # Show append_mode only for Local storage (not supported for cloud storage)\n if \"append_mode\" in build_config:\n build_config[\"append_mode\"][\"show\"] = location == \"Local\"\n\n if location == \"Local\":\n if \"local_format\" in build_config:\n build_config[\"local_format\"][\"show\"] = True\n\n elif location == \"AWS\":\n aws_fields = [\n \"aws_format\",\n \"aws_access_key_id\",\n \"aws_secret_access_key\",\n \"bucket_name\",\n \"aws_region\",\n \"s3_prefix\",\n ]\n for f_name in aws_fields:\n if f_name in build_config:\n build_config[f_name][\"show\"] = True\n\n elif location == \"Google Drive\":\n gdrive_fields = [\"gdrive_format\", \"service_account_key\", \"folder_id\"]\n for f_name in gdrive_fields:\n if f_name in build_config:\n build_config[f_name][\"show\"] = True\n\n return build_config\n\n async def save_to_file(self) -> Message:\n \"\"\"Save the input to a file and upload it, returning a confirmation message.\"\"\"\n # Validate inputs\n if not self.file_name:\n msg = \"File name must be provided.\"\n raise ValueError(msg)\n if not self._get_input_type():\n msg = \"Input type is not set.\"\n raise ValueError(msg)\n\n # Get selected storage location\n storage_location = self._get_selected_storage_location()\n if not storage_location:\n msg = \"Storage location must be selected.\"\n raise ValueError(msg)\n\n # Check if Local storage is disabled in cloud environment\n if storage_location == \"Local\" and is_astra_cloud_environment():\n msg = \"Local storage is not available in cloud environment. Please use AWS or Google Drive.\"\n raise ValueError(msg)\n\n # Route to appropriate save method based on storage location\n if storage_location == \"Local\":\n return await self._save_to_local()\n if storage_location == \"AWS\":\n return await self._save_to_aws()\n if storage_location == \"Google Drive\":\n return await self._save_to_google_drive()\n msg = f\"Unsupported storage location: {storage_location}\"\n raise ValueError(msg)\n\n def _get_input_type(self) -> str:\n \"\"\"Determine the input type based on the provided input.\"\"\"\n # Use exact type checking (type() is) instead of isinstance() to avoid inheritance issues.\n # Since Message inherits from Data, isinstance(message, Data) would return True for Message objects,\n # causing Message inputs to be incorrectly identified as Data type.\n if type(self.input) is DataFrame:\n return \"DataFrame\"\n if type(self.input) is Message:\n return \"Message\"\n if type(self.input) is Data:\n return \"Data\"\n msg = f\"Unsupported input type: {type(self.input)}\"\n raise ValueError(msg)\n\n def _get_default_format(self) -> str:\n \"\"\"Return the default file format based on input type.\"\"\"\n if self._get_input_type() == \"DataFrame\":\n return \"csv\"\n if self._get_input_type() == \"Data\":\n return \"json\"\n if self._get_input_type() == \"Message\":\n return \"json\"\n return \"json\" # Fallback\n\n def _adjust_file_path_with_format(self, path: Path, fmt: str) -> Path:\n \"\"\"Adjust the file path to include the correct extension.\"\"\"\n file_extension = path.suffix.lower().lstrip(\".\")\n if fmt == \"excel\":\n return Path(f\"{path}.xlsx\").expanduser() if file_extension not in [\"xlsx\", \"xls\"] else path\n return Path(f\"{path}.{fmt}\").expanduser() if file_extension != fmt else path\n\n def _is_plain_text_format(self, fmt: str) -> bool:\n \"\"\"Check if a file format is plain text (supports appending).\"\"\"\n plain_text_formats = [\"txt\", \"json\", \"markdown\", \"md\", \"csv\", \"xml\", \"html\", \"yaml\", \"log\", \"tsv\", \"jsonl\"]\n return fmt.lower() in plain_text_formats\n\n async def _upload_file(self, file_path: Path) -> None:\n \"\"\"Upload the saved file using the upload_user_file service.\"\"\"\n from langflow.api.v2.files import upload_user_file\n from langflow.services.database.models.user.crud import get_user_by_id\n\n # Ensure the file exists\n if not file_path.exists():\n msg = f\"File not found: {file_path}\"\n raise FileNotFoundError(msg)\n\n # Upload the file - always use append=False because the local file already contains\n # the correct content (either new or appended locally)\n with file_path.open(\"rb\") as f:\n async with session_scope() as db:\n if not self.user_id:\n msg = \"User ID is required for file saving.\"\n raise ValueError(msg)\n current_user = await get_user_by_id(db, self.user_id)\n\n await upload_user_file(\n file=UploadFile(filename=file_path.name, file=f, size=file_path.stat().st_size),\n session=db,\n current_user=current_user,\n storage_service=get_storage_service(),\n settings_service=get_settings_service(),\n append=False,\n )\n\n def _save_dataframe(self, dataframe: DataFrame, path: Path, fmt: str) -> str:\n \"\"\"Save a DataFrame to the specified file format.\"\"\"\n append_mode = getattr(self, \"append_mode\", False)\n should_append = append_mode and path.exists() and self._is_plain_text_format(fmt)\n\n if fmt == \"csv\":\n dataframe.to_csv(path, index=False, mode=\"a\" if should_append else \"w\", header=not should_append)\n elif fmt == \"excel\":\n dataframe.to_excel(path, index=False, engine=\"openpyxl\")\n elif fmt == \"json\":\n if should_append:\n # Read and parse existing JSON\n existing_data = []\n try:\n existing_content = path.read_text(encoding=\"utf-8\").strip()\n if existing_content:\n parsed = json.loads(existing_content)\n # Handle case where existing content is a single object\n if isinstance(parsed, dict):\n existing_data = [parsed]\n elif isinstance(parsed, list):\n existing_data = parsed\n except (json.JSONDecodeError, FileNotFoundError):\n # Treat parse errors or missing file as empty array\n existing_data = []\n\n # Append new data\n new_records = json.loads(dataframe.to_json(orient=\"records\"))\n existing_data.extend(new_records)\n\n # Write back as a single JSON array\n path.write_text(json.dumps(existing_data, indent=2), encoding=\"utf-8\")\n else:\n dataframe.to_json(path, orient=\"records\", indent=2)\n elif fmt == \"markdown\":\n content = dataframe.to_markdown(index=False)\n if should_append:\n path.write_text(path.read_text(encoding=\"utf-8\") + \"\\n\\n\" + content, encoding=\"utf-8\")\n else:\n path.write_text(content, encoding=\"utf-8\")\n else:\n msg = f\"Unsupported DataFrame format: {fmt}\"\n raise ValueError(msg)\n action = \"appended to\" if should_append else \"saved successfully as\"\n return f\"DataFrame {action} '{path}'\"\n\n def _save_data(self, data: Data, path: Path, fmt: str) -> str:\n \"\"\"Save a Data object to the specified file format.\"\"\"\n append_mode = getattr(self, \"append_mode\", False)\n should_append = append_mode and path.exists() and self._is_plain_text_format(fmt)\n\n if fmt == \"csv\":\n pd.DataFrame(data.data).to_csv(\n path,\n index=False,\n mode=\"a\" if should_append else \"w\",\n header=not should_append,\n )\n elif fmt == \"excel\":\n pd.DataFrame(data.data).to_excel(path, index=False, engine=\"openpyxl\")\n elif fmt == \"json\":\n new_data = jsonable_encoder(data.data)\n if should_append:\n # Read and parse existing JSON\n existing_data = []\n try:\n existing_content = path.read_text(encoding=\"utf-8\").strip()\n if existing_content:\n parsed = json.loads(existing_content)\n # Handle case where existing content is a single object\n if isinstance(parsed, dict):\n existing_data = [parsed]\n elif isinstance(parsed, list):\n existing_data = parsed\n except (json.JSONDecodeError, FileNotFoundError):\n # Treat parse errors or missing file as empty array\n existing_data = []\n\n # Append new data\n if isinstance(new_data, list):\n existing_data.extend(new_data)\n else:\n existing_data.append(new_data)\n\n # Write back as a single JSON array\n path.write_text(json.dumps(existing_data, indent=2), encoding=\"utf-8\")\n else:\n content = orjson.dumps(new_data, option=orjson.OPT_INDENT_2).decode(\"utf-8\")\n path.write_text(content, encoding=\"utf-8\")\n elif fmt == \"markdown\":\n content = pd.DataFrame(data.data).to_markdown(index=False)\n if should_append:\n path.write_text(path.read_text(encoding=\"utf-8\") + \"\\n\\n\" + content, encoding=\"utf-8\")\n else:\n path.write_text(content, encoding=\"utf-8\")\n else:\n msg = f\"Unsupported Data format: {fmt}\"\n raise ValueError(msg)\n action = \"appended to\" if should_append else \"saved successfully as\"\n return f\"Data {action} '{path}'\"\n\n async def _save_message(self, message: Message, path: Path, fmt: str) -> str:\n \"\"\"Save a Message to the specified file format, handling async iterators.\"\"\"\n content = \"\"\n if message.text is None:\n content = \"\"\n elif isinstance(message.text, AsyncIterator):\n async for item in message.text:\n content += str(item) + \" \"\n content = content.strip()\n elif isinstance(message.text, Iterator):\n content = \" \".join(str(item) for item in message.text)\n else:\n content = str(message.text)\n\n append_mode = getattr(self, \"append_mode\", False)\n should_append = append_mode and path.exists() and self._is_plain_text_format(fmt)\n\n if fmt == \"txt\":\n if should_append:\n path.write_text(path.read_text(encoding=\"utf-8\") + \"\\n\" + content, encoding=\"utf-8\")\n else:\n path.write_text(content, encoding=\"utf-8\")\n elif fmt == \"json\":\n new_message = {\"message\": content}\n if should_append:\n # Read and parse existing JSON\n existing_data = []\n try:\n existing_content = path.read_text(encoding=\"utf-8\").strip()\n if existing_content:\n parsed = json.loads(existing_content)\n # Handle case where existing content is a single object\n if isinstance(parsed, dict):\n existing_data = [parsed]\n elif isinstance(parsed, list):\n existing_data = parsed\n except (json.JSONDecodeError, FileNotFoundError):\n # Treat parse errors or missing file as empty array\n existing_data = []\n\n # Append new message\n existing_data.append(new_message)\n\n # Write back as a single JSON array\n path.write_text(json.dumps(existing_data, indent=2), encoding=\"utf-8\")\n else:\n path.write_text(json.dumps(new_message, indent=2), encoding=\"utf-8\")\n elif fmt == \"markdown\":\n md_content = f\"**Message:**\\n\\n{content}\"\n if should_append:\n path.write_text(path.read_text(encoding=\"utf-8\") + \"\\n\\n\" + md_content, encoding=\"utf-8\")\n else:\n path.write_text(md_content, encoding=\"utf-8\")\n else:\n msg = f\"Unsupported Message format: {fmt}\"\n raise ValueError(msg)\n action = \"appended to\" if should_append else \"saved successfully as\"\n return f\"Message {action} '{path}'\"\n\n def _get_selected_storage_location(self) -> str:\n \"\"\"Get the selected storage location from the SortableListInput.\"\"\"\n if hasattr(self, \"storage_location\") and self.storage_location:\n if isinstance(self.storage_location, list) and len(self.storage_location) > 0:\n return self.storage_location[0].get(\"name\", \"\")\n if isinstance(self.storage_location, dict):\n return self.storage_location.get(\"name\", \"\")\n return \"\"\n\n def _get_file_format_for_location(self, location: str) -> str:\n \"\"\"Get the appropriate file format based on storage location.\"\"\"\n if location == \"Local\":\n return getattr(self, \"local_format\", None) or self._get_default_format()\n if location == \"AWS\":\n return getattr(self, \"aws_format\", \"txt\")\n if location == \"Google Drive\":\n return getattr(self, \"gdrive_format\", \"txt\")\n return self._get_default_format()\n\n async def _save_to_local(self) -> Message:\n \"\"\"Save file to local storage (original functionality).\"\"\"\n file_format = self._get_file_format_for_location(\"Local\")\n\n # Validate file format based on input type\n allowed_formats = (\n self.LOCAL_MESSAGE_FORMAT_CHOICES if self._get_input_type() == \"Message\" else self.LOCAL_DATA_FORMAT_CHOICES\n )\n if file_format not in allowed_formats:\n msg = f\"Invalid file format '{file_format}' for {self._get_input_type()}. Allowed: {allowed_formats}\"\n raise ValueError(msg)\n\n # Prepare file path\n file_path = Path(self.file_name).expanduser()\n if not file_path.parent.exists():\n file_path.parent.mkdir(parents=True, exist_ok=True)\n file_path = self._adjust_file_path_with_format(file_path, file_format)\n\n # Save the input to file based on type\n if self._get_input_type() == \"DataFrame\":\n confirmation = self._save_dataframe(self.input, file_path, file_format)\n elif self._get_input_type() == \"Data\":\n confirmation = self._save_data(self.input, file_path, file_format)\n elif self._get_input_type() == \"Message\":\n confirmation = await self._save_message(self.input, file_path, file_format)\n else:\n msg = f\"Unsupported input type: {self._get_input_type()}\"\n raise ValueError(msg)\n\n # Upload the saved file\n await self._upload_file(file_path)\n\n # Return the final file path and confirmation message\n final_path = Path.cwd() / file_path if not file_path.is_absolute() else file_path\n return Message(text=f\"{confirmation} at {final_path}\")\n\n async def _save_to_aws(self) -> Message:\n \"\"\"Save file to AWS S3 using S3 functionality.\"\"\"\n # Validate AWS credentials\n if not getattr(self, \"aws_access_key_id\", None):\n msg = \"AWS Access Key ID is required for S3 storage\"\n raise ValueError(msg)\n if not getattr(self, \"aws_secret_access_key\", None):\n msg = \"AWS Secret Key is required for S3 storage\"\n raise ValueError(msg)\n if not getattr(self, \"bucket_name\", None):\n msg = \"S3 Bucket Name is required for S3 storage\"\n raise ValueError(msg)\n\n # Use S3 upload functionality\n try:\n import boto3\n except ImportError as e:\n msg = \"boto3 is not installed. Please install it using `uv pip install boto3`.\"\n raise ImportError(msg) from e\n\n # Create S3 client\n client_config = {\n \"aws_access_key_id\": self.aws_access_key_id,\n \"aws_secret_access_key\": self.aws_secret_access_key,\n }\n\n if hasattr(self, \"aws_region\") and self.aws_region:\n client_config[\"region_name\"] = self.aws_region\n\n s3_client = boto3.client(\"s3\", **client_config)\n\n # Extract content\n content = self._extract_content_for_upload()\n file_format = self._get_file_format_for_location(\"AWS\")\n\n # Generate file path\n file_path = f\"{self.file_name}.{file_format}\"\n if hasattr(self, \"s3_prefix\") and self.s3_prefix:\n file_path = f\"{self.s3_prefix.rstrip('/')}/{file_path}\"\n\n # Create temporary file\n import tempfile\n\n with tempfile.NamedTemporaryFile(\n mode=\"w\", encoding=\"utf-8\", suffix=f\".{file_format}\", delete=False\n ) as temp_file:\n temp_file.write(content)\n temp_file_path = temp_file.name\n\n try:\n # Upload to S3\n s3_client.upload_file(temp_file_path, self.bucket_name, file_path)\n s3_url = f\"s3://{self.bucket_name}/{file_path}\"\n return Message(text=f\"File successfully uploaded to {s3_url}\")\n finally:\n # Clean up temp file\n if Path(temp_file_path).exists():\n Path(temp_file_path).unlink()\n\n async def _save_to_google_drive(self) -> Message:\n \"\"\"Save file to Google Drive using Google Drive functionality.\"\"\"\n # Validate Google Drive credentials\n if not getattr(self, \"service_account_key\", None):\n msg = \"GCP Credentials Secret Key is required for Google Drive storage\"\n raise ValueError(msg)\n if not getattr(self, \"folder_id\", None):\n msg = \"Google Drive Folder ID is required for Google Drive storage\"\n raise ValueError(msg)\n\n # Use Google Drive upload functionality\n try:\n import json\n import tempfile\n\n from google.oauth2 import service_account\n from googleapiclient.discovery import build\n from googleapiclient.http import MediaFileUpload\n except ImportError as e:\n msg = \"Google API client libraries are not installed. Please install them.\"\n raise ImportError(msg) from e\n\n # Parse credentials with multiple fallback strategies\n credentials_dict = None\n parse_errors = []\n\n # Strategy 1: Parse as-is with strict=False to allow control characters\n try:\n credentials_dict = json.loads(self.service_account_key, strict=False)\n except json.JSONDecodeError as e:\n parse_errors.append(f\"Standard parse: {e!s}\")\n\n # Strategy 2: Strip whitespace and try again\n if credentials_dict is None:\n try:\n cleaned_key = self.service_account_key.strip()\n credentials_dict = json.loads(cleaned_key, strict=False)\n except json.JSONDecodeError as e:\n parse_errors.append(f\"Stripped parse: {e!s}\")\n\n # Strategy 3: Check if it's double-encoded (JSON string of a JSON string)\n if credentials_dict is None:\n try:\n decoded_once = json.loads(self.service_account_key, strict=False)\n if isinstance(decoded_once, str):\n credentials_dict = json.loads(decoded_once, strict=False)\n else:\n credentials_dict = decoded_once\n except json.JSONDecodeError as e:\n parse_errors.append(f\"Double-encoded parse: {e!s}\")\n\n # Strategy 4: Try to fix common issues with newlines in the private_key field\n if credentials_dict is None:\n try:\n # Replace literal \\n with actual newlines which is common in pasted JSON\n fixed_key = self.service_account_key.replace(\"\\\\n\", \"\\n\")\n credentials_dict = json.loads(fixed_key, strict=False)\n except json.JSONDecodeError as e:\n parse_errors.append(f\"Newline-fixed parse: {e!s}\")\n\n if credentials_dict is None:\n error_details = \"; \".join(parse_errors)\n msg = (\n f\"Unable to parse service account key JSON. Tried multiple strategies: {error_details}. \"\n \"Please ensure you've copied the entire JSON content from your service account key file. \"\n \"The JSON should start with '{' and contain fields like 'type', 'project_id', 'private_key', etc.\"\n )\n raise ValueError(msg)\n\n # Create Google Drive service with appropriate scopes\n # Use drive scope for folder access, file scope is too restrictive for folder verification\n credentials = service_account.Credentials.from_service_account_info(\n credentials_dict, scopes=[\"https://www.googleapis.com/auth/drive\"]\n )\n drive_service = build(\"drive\", \"v3\", credentials=credentials)\n\n # Extract content and format\n content = self._extract_content_for_upload()\n file_format = self._get_file_format_for_location(\"Google Drive\")\n\n # Handle special Google Drive formats\n if file_format in [\"slides\", \"docs\"]:\n return await self._save_to_google_apps(drive_service, credentials, content, file_format)\n\n # Create temporary file\n file_path = f\"{self.file_name}.{file_format}\"\n with tempfile.NamedTemporaryFile(\n mode=\"w\",\n encoding=\"utf-8\",\n suffix=f\".{file_format}\",\n delete=False,\n ) as temp_file:\n temp_file.write(content)\n temp_file_path = temp_file.name\n\n try:\n # Upload to Google Drive\n # Note: We skip explicit folder verification since it requires broader permissions.\n # If the folder doesn't exist or isn't accessible, the create() call will fail with a clear error.\n file_metadata = {\"name\": file_path, \"parents\": [self.folder_id]}\n media = MediaFileUpload(temp_file_path, resumable=True)\n\n try:\n uploaded_file = (\n drive_service.files().create(body=file_metadata, media_body=media, fields=\"id\").execute()\n )\n except Exception as e:\n msg = (\n f\"Unable to upload file to Google Drive folder '{self.folder_id}'. \"\n f\"Error: {e!s}. \"\n \"Please ensure: 1) The folder ID is correct, 2) The folder exists, \"\n \"3) The service account has been granted access to this folder.\"\n )\n raise ValueError(msg) from e\n\n file_id = uploaded_file.get(\"id\")\n file_url = f\"https://drive.google.com/file/d/{file_id}/view\"\n return Message(text=f\"File successfully uploaded to Google Drive: {file_url}\")\n finally:\n # Clean up temp file\n if Path(temp_file_path).exists():\n Path(temp_file_path).unlink()\n\n async def _save_to_google_apps(self, drive_service, credentials, content: str, app_type: str) -> Message:\n \"\"\"Save content to Google Apps (Slides or Docs).\"\"\"\n import time\n\n if app_type == \"slides\":\n from googleapiclient.discovery import build\n\n slides_service = build(\"slides\", \"v1\", credentials=credentials)\n\n file_metadata = {\n \"name\": self.file_name,\n \"mimeType\": \"application/vnd.google-apps.presentation\",\n \"parents\": [self.folder_id],\n }\n\n created_file = drive_service.files().create(body=file_metadata, fields=\"id\").execute()\n presentation_id = created_file[\"id\"]\n\n time.sleep(2) # Wait for file to be available # noqa: ASYNC251\n\n presentation = slides_service.presentations().get(presentationId=presentation_id).execute()\n slide_id = presentation[\"slides\"][0][\"objectId\"]\n\n # Add content to slide\n requests = [\n {\n \"createShape\": {\n \"objectId\": \"TextBox_01\",\n \"shapeType\": \"TEXT_BOX\",\n \"elementProperties\": {\n \"pageObjectId\": slide_id,\n \"size\": {\n \"height\": {\"magnitude\": 3000000, \"unit\": \"EMU\"},\n \"width\": {\"magnitude\": 6000000, \"unit\": \"EMU\"},\n },\n \"transform\": {\n \"scaleX\": 1,\n \"scaleY\": 1,\n \"translateX\": 1000000,\n \"translateY\": 1000000,\n \"unit\": \"EMU\",\n },\n },\n }\n },\n {\"insertText\": {\"objectId\": \"TextBox_01\", \"insertionIndex\": 0, \"text\": content}},\n ]\n\n slides_service.presentations().batchUpdate(\n presentationId=presentation_id, body={\"requests\": requests}\n ).execute()\n file_url = f\"https://docs.google.com/presentation/d/{presentation_id}/edit\"\n\n elif app_type == \"docs\":\n from googleapiclient.discovery import build\n\n docs_service = build(\"docs\", \"v1\", credentials=credentials)\n\n file_metadata = {\n \"name\": self.file_name,\n \"mimeType\": \"application/vnd.google-apps.document\",\n \"parents\": [self.folder_id],\n }\n\n created_file = drive_service.files().create(body=file_metadata, fields=\"id\").execute()\n document_id = created_file[\"id\"]\n\n time.sleep(2) # Wait for file to be available # noqa: ASYNC251\n\n # Add content to document\n requests = [{\"insertText\": {\"location\": {\"index\": 1}, \"text\": content}}]\n docs_service.documents().batchUpdate(documentId=document_id, body={\"requests\": requests}).execute()\n file_url = f\"https://docs.google.com/document/d/{document_id}/edit\"\n\n return Message(text=f\"File successfully created in Google {app_type.title()}: {file_url}\")\n\n def _extract_content_for_upload(self) -> str:\n \"\"\"Extract content from input for upload to cloud services.\"\"\"\n if self._get_input_type() == \"DataFrame\":\n return self.input.to_csv(index=False)\n if self._get_input_type() == \"Data\":\n if hasattr(self.input, \"data\") and self.input.data:\n if isinstance(self.input.data, dict):\n import json\n\n return json.dumps(self.input.data, indent=2, ensure_ascii=False)\n return str(self.input.data)\n return str(self.input)\n if self._get_input_type() == \"Message\":\n return str(self.input.text) if self.input.text else str(self.input)\n return str(self.input)\n"
|
|
2134
2134
|
},
|
|
2135
2135
|
"file_name": {
|
|
2136
2136
|
"_input_type": "StrInput",
|
{langflow_base_nightly-1.7.0.dev48.dist-info → langflow_base_nightly-1.7.0.dev50.dist-info}/METADATA
RENAMED
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
Metadata-Version: 2.4
|
|
2
2
|
Name: langflow-base-nightly
|
|
3
|
-
Version: 1.7.0.
|
|
3
|
+
Version: 1.7.0.dev50
|
|
4
4
|
Summary: A Python package with a built-in web application
|
|
5
5
|
Project-URL: Repository, https://github.com/langflow-ai/langflow
|
|
6
6
|
Project-URL: Documentation, https://docs.langflow.org
|
|
@@ -46,7 +46,7 @@ Requires-Dist: langchain-experimental<1.0.0,>=0.3.0
|
|
|
46
46
|
Requires-Dist: langchain-ibm<1.0.0,>=0.3.8
|
|
47
47
|
Requires-Dist: langchainhub~=0.1.15
|
|
48
48
|
Requires-Dist: langchain~=0.3.21
|
|
49
|
-
Requires-Dist: lfx-nightly==0.2.0.
|
|
49
|
+
Requires-Dist: lfx-nightly==0.2.0.dev50
|
|
50
50
|
Requires-Dist: loguru<1.0.0,>=0.7.1
|
|
51
51
|
Requires-Dist: mcp<2.0.0,>=1.17.0
|
|
52
52
|
Requires-Dist: multiprocess<1.0.0,>=0.70.14
|
{langflow_base_nightly-1.7.0.dev48.dist-info → langflow_base_nightly-1.7.0.dev50.dist-info}/RECORD
RENAMED
|
@@ -2046,7 +2046,7 @@ langflow/initial_setup/starter_projects/Knowledge Retrieval.json,sha256=NrI5ZXtj
|
|
|
2046
2046
|
langflow/initial_setup/starter_projects/Market Research.json,sha256=hh2-uhcEBQ1Bk2L88SuOyG3jyZzwRVJAnPSdFhd1bcU,187153
|
|
2047
2047
|
langflow/initial_setup/starter_projects/Meeting Summary.json,sha256=pPiCoSP8tqdm-YH5x1A29msjZyYlR3NAbyXE_6OhAEY,222208
|
|
2048
2048
|
langflow/initial_setup/starter_projects/Memory Chatbot.json,sha256=vVojdjZZK_JO0u0pzF02KiiANJ39T9M8weUlAlNLsX8,98781
|
|
2049
|
-
langflow/initial_setup/starter_projects/News Aggregator.json,sha256=
|
|
2049
|
+
langflow/initial_setup/starter_projects/News Aggregator.json,sha256=T-ZOVkuRX1umtDUaFiULk8iPYCteisUJRnP54ALvf1c,171105
|
|
2050
2050
|
langflow/initial_setup/starter_projects/Nvidia Remix.json,sha256=0KwfrN_yqE82qkqBFSh-qZ4-yQxcOau_Ak-tPeN5598,353697
|
|
2051
2051
|
langflow/initial_setup/starter_projects/Pokédex Agent.json,sha256=ljLhzn6NMTVDIFFcSaNvW0otFw9SSApYRzVL5xD_aiE,133612
|
|
2052
2052
|
langflow/initial_setup/starter_projects/Portfolio Website Code Generator.json,sha256=gbZ85uAuczxfH4wXGMiXDp9kQW5Hme643xhPwfuzswA,194180
|
|
@@ -2272,7 +2272,7 @@ langflow/utils/util.py,sha256=bZqi9Fqj2mlp9tKUA-Q4ePpooxtbuVLjlAvdml4kcjs,1516
|
|
|
2272
2272
|
langflow/utils/validate.py,sha256=BPqoIMvjl4wbMJTTWo1zMHP0kQCa2TfmDT9f-nPT9Ng,112
|
|
2273
2273
|
langflow/utils/version.py,sha256=OjSj0smls9XnPd4-LpTH9AWyUO_NAn5mncqKkkXl_fw,2840
|
|
2274
2274
|
langflow/utils/voice_utils.py,sha256=Ypxg8s5jFd1o5wBbx1W8oKK7vh4kwo0-iuTcFqIwy5I,3350
|
|
2275
|
-
langflow_base_nightly-1.7.0.
|
|
2276
|
-
langflow_base_nightly-1.7.0.
|
|
2277
|
-
langflow_base_nightly-1.7.0.
|
|
2278
|
-
langflow_base_nightly-1.7.0.
|
|
2275
|
+
langflow_base_nightly-1.7.0.dev50.dist-info/METADATA,sha256=3XOQ5wHvOXLOeCtNmb6LXEG5wjxrgdIUsnnuOaFt0og,4461
|
|
2276
|
+
langflow_base_nightly-1.7.0.dev50.dist-info/WHEEL,sha256=WLgqFyCfm_KASv4WHyYy0P3pM_m7J5L9k2skdKLirC8,87
|
|
2277
|
+
langflow_base_nightly-1.7.0.dev50.dist-info/entry_points.txt,sha256=JvuLdXSrkeDmDdpb8M-VvFIzb84n4HmqUcIP10_EIF8,57
|
|
2278
|
+
langflow_base_nightly-1.7.0.dev50.dist-info/RECORD,,
|
{langflow_base_nightly-1.7.0.dev48.dist-info → langflow_base_nightly-1.7.0.dev50.dist-info}/WHEEL
RENAMED
|
File without changes
|
|
File without changes
|