lfx-nightly 0.1.12.dev26__py3-none-any.whl → 0.1.12.dev28__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -0,0 +1,625 @@
1
+ import json
2
+ from collections.abc import AsyncIterator, Iterator
3
+ from pathlib import Path
4
+
5
+ import orjson
6
+ import pandas as pd
7
+ from fastapi import UploadFile
8
+ from fastapi.encoders import jsonable_encoder
9
+
10
+ from lfx.custom import Component
11
+ from lfx.inputs import SortableListInput
12
+ from lfx.io import DropdownInput, HandleInput, SecretStrInput, StrInput
13
+ from lfx.schema import Data, DataFrame, Message
14
+ from lfx.services.deps import get_settings_service, get_storage_service, session_scope
15
+ from lfx.template.field.base import Output
16
+
17
+
18
+ class SaveToFileComponent(Component):
19
+ display_name = "Write File"
20
+ description = "Save data to local file, AWS S3, or Google Drive in the selected format."
21
+ documentation: str = "https://docs.langflow.org/components-processing#save-file"
22
+ icon = "file-text"
23
+ name = "SaveToFile"
24
+
25
+ # File format options for different storage types
26
+ LOCAL_DATA_FORMAT_CHOICES = ["csv", "excel", "json", "markdown"]
27
+ LOCAL_MESSAGE_FORMAT_CHOICES = ["txt", "json", "markdown"]
28
+ AWS_FORMAT_CHOICES = [
29
+ "txt",
30
+ "json",
31
+ "csv",
32
+ "xml",
33
+ "html",
34
+ "md",
35
+ "yaml",
36
+ "log",
37
+ "tsv",
38
+ "jsonl",
39
+ "parquet",
40
+ "xlsx",
41
+ "zip",
42
+ ]
43
+ GDRIVE_FORMAT_CHOICES = ["txt", "json", "csv", "xlsx", "slides", "docs", "jpg", "mp3"]
44
+
45
+ inputs = [
46
+ # Storage location selection
47
+ SortableListInput(
48
+ name="storage_location",
49
+ display_name="Storage Location",
50
+ placeholder="Select Location",
51
+ info="Choose where to save the file.",
52
+ options=[
53
+ {"name": "Local", "icon": "hard-drive"},
54
+ {"name": "AWS", "icon": "Amazon"},
55
+ {"name": "Google Drive", "icon": "google"},
56
+ ],
57
+ real_time_refresh=True,
58
+ limit=1,
59
+ ),
60
+ # Common inputs
61
+ HandleInput(
62
+ name="input",
63
+ display_name="File Content",
64
+ info="The input to save.",
65
+ dynamic=True,
66
+ input_types=["Data", "DataFrame", "Message"],
67
+ required=True,
68
+ ),
69
+ StrInput(
70
+ name="file_name",
71
+ display_name="File Name",
72
+ info="Name file will be saved as (without extension).",
73
+ required=True,
74
+ show=False,
75
+ ),
76
+ # Format inputs (dynamic based on storage location)
77
+ DropdownInput(
78
+ name="local_format",
79
+ display_name="File Format",
80
+ options=list(dict.fromkeys(LOCAL_DATA_FORMAT_CHOICES + LOCAL_MESSAGE_FORMAT_CHOICES)),
81
+ info="Select the file format for local storage.",
82
+ value="json",
83
+ show=False,
84
+ ),
85
+ DropdownInput(
86
+ name="aws_format",
87
+ display_name="File Format",
88
+ options=AWS_FORMAT_CHOICES,
89
+ info="Select the file format for AWS S3 storage.",
90
+ value="txt",
91
+ show=False,
92
+ ),
93
+ DropdownInput(
94
+ name="gdrive_format",
95
+ display_name="File Format",
96
+ options=GDRIVE_FORMAT_CHOICES,
97
+ info="Select the file format for Google Drive storage.",
98
+ value="txt",
99
+ show=False,
100
+ ),
101
+ # AWS S3 specific inputs
102
+ SecretStrInput(
103
+ name="aws_access_key_id",
104
+ display_name="AWS Access Key ID",
105
+ info="AWS Access key ID.",
106
+ show=False,
107
+ advanced=True,
108
+ ),
109
+ SecretStrInput(
110
+ name="aws_secret_access_key",
111
+ display_name="AWS Secret Key",
112
+ info="AWS Secret Key.",
113
+ show=False,
114
+ advanced=True,
115
+ ),
116
+ StrInput(
117
+ name="bucket_name",
118
+ display_name="S3 Bucket Name",
119
+ info="Enter the name of the S3 bucket.",
120
+ show=False,
121
+ advanced=True,
122
+ ),
123
+ StrInput(
124
+ name="aws_region",
125
+ display_name="AWS Region",
126
+ info="AWS region (e.g., us-east-1, eu-west-1).",
127
+ show=False,
128
+ advanced=True,
129
+ ),
130
+ StrInput(
131
+ name="s3_prefix",
132
+ display_name="S3 Prefix",
133
+ info="Prefix for all files in S3.",
134
+ show=False,
135
+ advanced=True,
136
+ ),
137
+ # Google Drive specific inputs
138
+ SecretStrInput(
139
+ name="service_account_key",
140
+ display_name="GCP Credentials Secret Key",
141
+ info="Your Google Cloud Platform service account JSON key as a secret string (complete JSON content).",
142
+ show=False,
143
+ advanced=True,
144
+ ),
145
+ StrInput(
146
+ name="folder_id",
147
+ display_name="Google Drive Folder ID",
148
+ info=(
149
+ "The Google Drive folder ID where the file will be uploaded. "
150
+ "The folder must be shared with the service account email."
151
+ ),
152
+ show=False,
153
+ advanced=True,
154
+ ),
155
+ ]
156
+
157
+ outputs = [Output(display_name="File Path", name="message", method="save_to_file")]
158
+
159
+ def update_build_config(self, build_config, field_value, field_name=None):
160
+ """Update build configuration to show/hide fields based on storage location selection."""
161
+ if field_name != "storage_location":
162
+ return build_config
163
+
164
+ # Extract selected storage location
165
+ selected = [location["name"] for location in field_value] if isinstance(field_value, list) else []
166
+
167
+ # Hide all dynamic fields first
168
+ dynamic_fields = [
169
+ "file_name", # Common fields (input is always visible)
170
+ "local_format",
171
+ "aws_format",
172
+ "gdrive_format",
173
+ "aws_access_key_id",
174
+ "aws_secret_access_key",
175
+ "bucket_name",
176
+ "aws_region",
177
+ "s3_prefix",
178
+ "service_account_key",
179
+ "folder_id",
180
+ ]
181
+
182
+ for f_name in dynamic_fields:
183
+ if f_name in build_config:
184
+ build_config[f_name]["show"] = False
185
+
186
+ # Show fields based on selected storage location
187
+ if len(selected) == 1:
188
+ location = selected[0]
189
+
190
+ # Show file_name when any storage location is selected (input is always visible)
191
+ if "file_name" in build_config:
192
+ build_config["file_name"]["show"] = True
193
+
194
+ if location == "Local":
195
+ if "local_format" in build_config:
196
+ build_config["local_format"]["show"] = True
197
+
198
+ elif location == "AWS":
199
+ aws_fields = [
200
+ "aws_format",
201
+ "aws_access_key_id",
202
+ "aws_secret_access_key",
203
+ "bucket_name",
204
+ "aws_region",
205
+ "s3_prefix",
206
+ ]
207
+ for f_name in aws_fields:
208
+ if f_name in build_config:
209
+ build_config[f_name]["show"] = True
210
+
211
+ elif location == "Google Drive":
212
+ gdrive_fields = ["gdrive_format", "service_account_key", "folder_id"]
213
+ for f_name in gdrive_fields:
214
+ if f_name in build_config:
215
+ build_config[f_name]["show"] = True
216
+
217
+ return build_config
218
+
219
+ async def save_to_file(self) -> Message:
220
+ """Save the input to a file and upload it, returning a confirmation message."""
221
+ # Validate inputs
222
+ if not self.file_name:
223
+ msg = "File name must be provided."
224
+ raise ValueError(msg)
225
+ if not self._get_input_type():
226
+ msg = "Input type is not set."
227
+ raise ValueError(msg)
228
+
229
+ # Get selected storage location
230
+ storage_location = self._get_selected_storage_location()
231
+ if not storage_location:
232
+ msg = "Storage location must be selected."
233
+ raise ValueError(msg)
234
+
235
+ # Route to appropriate save method based on storage location
236
+ if storage_location == "Local":
237
+ return await self._save_to_local()
238
+ if storage_location == "AWS":
239
+ return await self._save_to_aws()
240
+ if storage_location == "Google Drive":
241
+ return await self._save_to_google_drive()
242
+ msg = f"Unsupported storage location: {storage_location}"
243
+ raise ValueError(msg)
244
+
245
+ def _get_input_type(self) -> str:
246
+ """Determine the input type based on the provided input."""
247
+ # Use exact type checking (type() is) instead of isinstance() to avoid inheritance issues.
248
+ # Since Message inherits from Data, isinstance(message, Data) would return True for Message objects,
249
+ # causing Message inputs to be incorrectly identified as Data type.
250
+ if type(self.input) is DataFrame:
251
+ return "DataFrame"
252
+ if type(self.input) is Message:
253
+ return "Message"
254
+ if type(self.input) is Data:
255
+ return "Data"
256
+ msg = f"Unsupported input type: {type(self.input)}"
257
+ raise ValueError(msg)
258
+
259
+ def _get_default_format(self) -> str:
260
+ """Return the default file format based on input type."""
261
+ if self._get_input_type() == "DataFrame":
262
+ return "csv"
263
+ if self._get_input_type() == "Data":
264
+ return "json"
265
+ if self._get_input_type() == "Message":
266
+ return "json"
267
+ return "json" # Fallback
268
+
269
+ def _adjust_file_path_with_format(self, path: Path, fmt: str) -> Path:
270
+ """Adjust the file path to include the correct extension."""
271
+ file_extension = path.suffix.lower().lstrip(".")
272
+ if fmt == "excel":
273
+ return Path(f"{path}.xlsx").expanduser() if file_extension not in ["xlsx", "xls"] else path
274
+ return Path(f"{path}.{fmt}").expanduser() if file_extension != fmt else path
275
+
276
+ async def _upload_file(self, file_path: Path) -> None:
277
+ """Upload the saved file using the upload_user_file service."""
278
+ from langflow.api.v2.files import upload_user_file
279
+ from langflow.services.database.models.user.crud import get_user_by_id
280
+
281
+ # Ensure the file exists
282
+ if not file_path.exists():
283
+ msg = f"File not found: {file_path}"
284
+ raise FileNotFoundError(msg)
285
+
286
+ # Upload the file
287
+ with file_path.open("rb") as f:
288
+ async with session_scope() as db:
289
+ if not self.user_id:
290
+ msg = "User ID is required for file saving."
291
+ raise ValueError(msg)
292
+ current_user = await get_user_by_id(db, self.user_id)
293
+
294
+ await upload_user_file(
295
+ file=UploadFile(filename=file_path.name, file=f, size=file_path.stat().st_size),
296
+ session=db,
297
+ current_user=current_user,
298
+ storage_service=get_storage_service(),
299
+ settings_service=get_settings_service(),
300
+ )
301
+
302
+ def _save_dataframe(self, dataframe: DataFrame, path: Path, fmt: str) -> str:
303
+ """Save a DataFrame to the specified file format."""
304
+ if fmt == "csv":
305
+ dataframe.to_csv(path, index=False)
306
+ elif fmt == "excel":
307
+ dataframe.to_excel(path, index=False, engine="openpyxl")
308
+ elif fmt == "json":
309
+ dataframe.to_json(path, orient="records", indent=2)
310
+ elif fmt == "markdown":
311
+ path.write_text(dataframe.to_markdown(index=False), encoding="utf-8")
312
+ else:
313
+ msg = f"Unsupported DataFrame format: {fmt}"
314
+ raise ValueError(msg)
315
+ return f"DataFrame saved successfully as '{path}'"
316
+
317
+ def _save_data(self, data: Data, path: Path, fmt: str) -> str:
318
+ """Save a Data object to the specified file format."""
319
+ if fmt == "csv":
320
+ pd.DataFrame(data.data).to_csv(path, index=False)
321
+ elif fmt == "excel":
322
+ pd.DataFrame(data.data).to_excel(path, index=False, engine="openpyxl")
323
+ elif fmt == "json":
324
+ path.write_text(
325
+ orjson.dumps(jsonable_encoder(data.data), option=orjson.OPT_INDENT_2).decode("utf-8"), encoding="utf-8"
326
+ )
327
+ elif fmt == "markdown":
328
+ path.write_text(pd.DataFrame(data.data).to_markdown(index=False), encoding="utf-8")
329
+ else:
330
+ msg = f"Unsupported Data format: {fmt}"
331
+ raise ValueError(msg)
332
+ return f"Data saved successfully as '{path}'"
333
+
334
+ async def _save_message(self, message: Message, path: Path, fmt: str) -> str:
335
+ """Save a Message to the specified file format, handling async iterators."""
336
+ content = ""
337
+ if message.text is None:
338
+ content = ""
339
+ elif isinstance(message.text, AsyncIterator):
340
+ async for item in message.text:
341
+ content += str(item) + " "
342
+ content = content.strip()
343
+ elif isinstance(message.text, Iterator):
344
+ content = " ".join(str(item) for item in message.text)
345
+ else:
346
+ content = str(message.text)
347
+
348
+ if fmt == "txt":
349
+ path.write_text(content, encoding="utf-8")
350
+ elif fmt == "json":
351
+ path.write_text(json.dumps({"message": content}, indent=2), encoding="utf-8")
352
+ elif fmt == "markdown":
353
+ path.write_text(f"**Message:**\n\n{content}", encoding="utf-8")
354
+ else:
355
+ msg = f"Unsupported Message format: {fmt}"
356
+ raise ValueError(msg)
357
+ return f"Message saved successfully as '{path}'"
358
+
359
+ def _get_selected_storage_location(self) -> str:
360
+ """Get the selected storage location from the SortableListInput."""
361
+ if hasattr(self, "storage_location") and self.storage_location:
362
+ if isinstance(self.storage_location, list) and len(self.storage_location) > 0:
363
+ return self.storage_location[0].get("name", "")
364
+ if isinstance(self.storage_location, dict):
365
+ return self.storage_location.get("name", "")
366
+ return ""
367
+
368
+ def _get_file_format_for_location(self, location: str) -> str:
369
+ """Get the appropriate file format based on storage location."""
370
+ if location == "Local":
371
+ return getattr(self, "local_format", None) or self._get_default_format()
372
+ if location == "AWS":
373
+ return getattr(self, "aws_format", "txt")
374
+ if location == "Google Drive":
375
+ return getattr(self, "gdrive_format", "txt")
376
+ return self._get_default_format()
377
+
378
+ async def _save_to_local(self) -> Message:
379
+ """Save file to local storage (original functionality)."""
380
+ file_format = self._get_file_format_for_location("Local")
381
+
382
+ # Validate file format based on input type
383
+ allowed_formats = (
384
+ self.LOCAL_MESSAGE_FORMAT_CHOICES if self._get_input_type() == "Message" else self.LOCAL_DATA_FORMAT_CHOICES
385
+ )
386
+ if file_format not in allowed_formats:
387
+ msg = f"Invalid file format '{file_format}' for {self._get_input_type()}. Allowed: {allowed_formats}"
388
+ raise ValueError(msg)
389
+
390
+ # Prepare file path
391
+ file_path = Path(self.file_name).expanduser()
392
+ if not file_path.parent.exists():
393
+ file_path.parent.mkdir(parents=True, exist_ok=True)
394
+ file_path = self._adjust_file_path_with_format(file_path, file_format)
395
+
396
+ # Save the input to file based on type
397
+ if self._get_input_type() == "DataFrame":
398
+ confirmation = self._save_dataframe(self.input, file_path, file_format)
399
+ elif self._get_input_type() == "Data":
400
+ confirmation = self._save_data(self.input, file_path, file_format)
401
+ elif self._get_input_type() == "Message":
402
+ confirmation = await self._save_message(self.input, file_path, file_format)
403
+ else:
404
+ msg = f"Unsupported input type: {self._get_input_type()}"
405
+ raise ValueError(msg)
406
+
407
+ # Upload the saved file
408
+ await self._upload_file(file_path)
409
+
410
+ # Return the final file path and confirmation message
411
+ final_path = Path.cwd() / file_path if not file_path.is_absolute() else file_path
412
+ return Message(text=f"{confirmation} at {final_path}")
413
+
414
+ async def _save_to_aws(self) -> Message:
415
+ """Save file to AWS S3 using S3 functionality."""
416
+ # Validate AWS credentials
417
+ if not getattr(self, "aws_access_key_id", None):
418
+ msg = "AWS Access Key ID is required for S3 storage"
419
+ raise ValueError(msg)
420
+ if not getattr(self, "aws_secret_access_key", None):
421
+ msg = "AWS Secret Key is required for S3 storage"
422
+ raise ValueError(msg)
423
+ if not getattr(self, "bucket_name", None):
424
+ msg = "S3 Bucket Name is required for S3 storage"
425
+ raise ValueError(msg)
426
+
427
+ # Use S3 upload functionality
428
+ try:
429
+ import boto3
430
+ except ImportError as e:
431
+ msg = "boto3 is not installed. Please install it using `uv pip install boto3`."
432
+ raise ImportError(msg) from e
433
+
434
+ # Create S3 client
435
+ client_config = {
436
+ "aws_access_key_id": self.aws_access_key_id,
437
+ "aws_secret_access_key": self.aws_secret_access_key,
438
+ }
439
+
440
+ if hasattr(self, "aws_region") and self.aws_region:
441
+ client_config["region_name"] = self.aws_region
442
+
443
+ s3_client = boto3.client("s3", **client_config)
444
+
445
+ # Extract content
446
+ content = self._extract_content_for_upload()
447
+ file_format = self._get_file_format_for_location("AWS")
448
+
449
+ # Generate file path
450
+ file_path = f"{self.file_name}.{file_format}"
451
+ if hasattr(self, "s3_prefix") and self.s3_prefix:
452
+ file_path = f"{self.s3_prefix.rstrip('/')}/{file_path}"
453
+
454
+ # Create temporary file
455
+ import tempfile
456
+
457
+ with tempfile.NamedTemporaryFile(mode="w", suffix=f".{file_format}", delete=False) as temp_file:
458
+ temp_file.write(content)
459
+ temp_file_path = temp_file.name
460
+
461
+ try:
462
+ # Upload to S3
463
+ s3_client.upload_file(temp_file_path, self.bucket_name, file_path)
464
+ s3_url = f"s3://{self.bucket_name}/{file_path}"
465
+ return Message(text=f"File successfully uploaded to {s3_url}")
466
+ finally:
467
+ # Clean up temp file
468
+ if Path(temp_file_path).exists():
469
+ Path(temp_file_path).unlink()
470
+
471
+ async def _save_to_google_drive(self) -> Message:
472
+ """Save file to Google Drive using Google Drive functionality."""
473
+ # Validate Google Drive credentials
474
+ if not getattr(self, "service_account_key", None):
475
+ msg = "GCP Credentials Secret Key is required for Google Drive storage"
476
+ raise ValueError(msg)
477
+ if not getattr(self, "folder_id", None):
478
+ msg = "Google Drive Folder ID is required for Google Drive storage"
479
+ raise ValueError(msg)
480
+
481
+ # Use Google Drive upload functionality
482
+ try:
483
+ import json
484
+ import tempfile
485
+
486
+ from google.oauth2 import service_account
487
+ from googleapiclient.discovery import build
488
+ from googleapiclient.http import MediaFileUpload
489
+ except ImportError as e:
490
+ msg = "Google API client libraries are not installed. Please install them."
491
+ raise ImportError(msg) from e
492
+
493
+ # Parse credentials
494
+ try:
495
+ credentials_dict = json.loads(self.service_account_key)
496
+ except json.JSONDecodeError as e:
497
+ msg = f"Invalid JSON in service account key: {e!s}"
498
+ raise ValueError(msg) from e
499
+
500
+ # Create Google Drive service
501
+ credentials = service_account.Credentials.from_service_account_info(
502
+ credentials_dict, scopes=["https://www.googleapis.com/auth/drive.file"]
503
+ )
504
+ drive_service = build("drive", "v3", credentials=credentials)
505
+
506
+ # Extract content and format
507
+ content = self._extract_content_for_upload()
508
+ file_format = self._get_file_format_for_location("Google Drive")
509
+
510
+ # Handle special Google Drive formats
511
+ if file_format in ["slides", "docs"]:
512
+ return await self._save_to_google_apps(drive_service, content, file_format)
513
+
514
+ # Create temporary file
515
+ file_path = f"{self.file_name}.{file_format}"
516
+ with tempfile.NamedTemporaryFile(mode="w", suffix=f".{file_format}", delete=False) as temp_file:
517
+ temp_file.write(content)
518
+ temp_file_path = temp_file.name
519
+
520
+ try:
521
+ # Upload to Google Drive
522
+ file_metadata = {"name": file_path, "parents": [self.folder_id]}
523
+ media = MediaFileUpload(temp_file_path, resumable=True)
524
+
525
+ uploaded_file = drive_service.files().create(body=file_metadata, media_body=media, fields="id").execute()
526
+
527
+ file_id = uploaded_file.get("id")
528
+ file_url = f"https://drive.google.com/file/d/{file_id}/view"
529
+ return Message(text=f"File successfully uploaded to Google Drive: {file_url}")
530
+ finally:
531
+ # Clean up temp file
532
+ if Path(temp_file_path).exists():
533
+ Path(temp_file_path).unlink()
534
+
535
+ async def _save_to_google_apps(self, drive_service, content: str, app_type: str) -> Message:
536
+ """Save content to Google Apps (Slides or Docs)."""
537
+ import time
538
+
539
+ if app_type == "slides":
540
+ from googleapiclient.discovery import build
541
+
542
+ slides_service = build("slides", "v1", credentials=drive_service._http.credentials)
543
+
544
+ file_metadata = {
545
+ "name": self.file_name,
546
+ "mimeType": "application/vnd.google-apps.presentation",
547
+ "parents": [self.folder_id],
548
+ }
549
+
550
+ created_file = drive_service.files().create(body=file_metadata, fields="id").execute()
551
+ presentation_id = created_file["id"]
552
+
553
+ time.sleep(2) # Wait for file to be available # noqa: ASYNC251
554
+
555
+ presentation = slides_service.presentations().get(presentationId=presentation_id).execute()
556
+ slide_id = presentation["slides"][0]["objectId"]
557
+
558
+ # Add content to slide
559
+ requests = [
560
+ {
561
+ "createShape": {
562
+ "objectId": "TextBox_01",
563
+ "shapeType": "TEXT_BOX",
564
+ "elementProperties": {
565
+ "pageObjectId": slide_id,
566
+ "size": {
567
+ "height": {"magnitude": 3000000, "unit": "EMU"},
568
+ "width": {"magnitude": 6000000, "unit": "EMU"},
569
+ },
570
+ "transform": {
571
+ "scaleX": 1,
572
+ "scaleY": 1,
573
+ "translateX": 1000000,
574
+ "translateY": 1000000,
575
+ "unit": "EMU",
576
+ },
577
+ },
578
+ }
579
+ },
580
+ {"insertText": {"objectId": "TextBox_01", "insertionIndex": 0, "text": content}},
581
+ ]
582
+
583
+ slides_service.presentations().batchUpdate(
584
+ presentationId=presentation_id, body={"requests": requests}
585
+ ).execute()
586
+ file_url = f"https://docs.google.com/presentation/d/{presentation_id}/edit"
587
+
588
+ elif app_type == "docs":
589
+ from googleapiclient.discovery import build
590
+
591
+ docs_service = build("docs", "v1", credentials=drive_service._http.credentials)
592
+
593
+ file_metadata = {
594
+ "name": self.file_name,
595
+ "mimeType": "application/vnd.google-apps.document",
596
+ "parents": [self.folder_id],
597
+ }
598
+
599
+ created_file = drive_service.files().create(body=file_metadata, fields="id").execute()
600
+ document_id = created_file["id"]
601
+
602
+ time.sleep(2) # Wait for file to be available # noqa: ASYNC251
603
+
604
+ # Add content to document
605
+ requests = [{"insertText": {"location": {"index": 1}, "text": content}}]
606
+ docs_service.documents().batchUpdate(documentId=document_id, body={"requests": requests}).execute()
607
+ file_url = f"https://docs.google.com/document/d/{document_id}/edit"
608
+
609
+ return Message(text=f"File successfully created in Google {app_type.title()}: {file_url}")
610
+
611
+ def _extract_content_for_upload(self) -> str:
612
+ """Extract content from input for upload to cloud services."""
613
+ if self._get_input_type() == "DataFrame":
614
+ return self.input.to_csv(index=False)
615
+ if self._get_input_type() == "Data":
616
+ if hasattr(self.input, "data") and self.input.data:
617
+ if isinstance(self.input.data, dict):
618
+ import json
619
+
620
+ return json.dumps(self.input.data, indent=2, ensure_ascii=False)
621
+ return str(self.input.data)
622
+ return str(self.input)
623
+ if self._get_input_type() == "Message":
624
+ return str(self.input.text) if self.input.text else str(self.input)
625
+ return str(self.input)