chainlit 2.7.0__py3-none-any.whl → 2.7.1__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of chainlit might be problematic. Click here for more details.

Files changed (85) hide show
  1. {chainlit-2.7.0.dist-info → chainlit-2.7.1.dist-info}/METADATA +1 -1
  2. chainlit-2.7.1.dist-info/RECORD +4 -0
  3. chainlit/__init__.py +0 -207
  4. chainlit/__main__.py +0 -4
  5. chainlit/_utils.py +0 -8
  6. chainlit/action.py +0 -33
  7. chainlit/auth/__init__.py +0 -95
  8. chainlit/auth/cookie.py +0 -197
  9. chainlit/auth/jwt.py +0 -42
  10. chainlit/cache.py +0 -45
  11. chainlit/callbacks.py +0 -433
  12. chainlit/chat_context.py +0 -64
  13. chainlit/chat_settings.py +0 -34
  14. chainlit/cli/__init__.py +0 -235
  15. chainlit/config.py +0 -621
  16. chainlit/context.py +0 -112
  17. chainlit/data/__init__.py +0 -111
  18. chainlit/data/acl.py +0 -19
  19. chainlit/data/base.py +0 -107
  20. chainlit/data/chainlit_data_layer.py +0 -687
  21. chainlit/data/dynamodb.py +0 -616
  22. chainlit/data/literalai.py +0 -501
  23. chainlit/data/sql_alchemy.py +0 -741
  24. chainlit/data/storage_clients/__init__.py +0 -0
  25. chainlit/data/storage_clients/azure.py +0 -84
  26. chainlit/data/storage_clients/azure_blob.py +0 -94
  27. chainlit/data/storage_clients/base.py +0 -28
  28. chainlit/data/storage_clients/gcs.py +0 -101
  29. chainlit/data/storage_clients/s3.py +0 -88
  30. chainlit/data/utils.py +0 -29
  31. chainlit/discord/__init__.py +0 -6
  32. chainlit/discord/app.py +0 -364
  33. chainlit/element.py +0 -454
  34. chainlit/emitter.py +0 -450
  35. chainlit/hello.py +0 -12
  36. chainlit/input_widget.py +0 -182
  37. chainlit/langchain/__init__.py +0 -6
  38. chainlit/langchain/callbacks.py +0 -682
  39. chainlit/langflow/__init__.py +0 -25
  40. chainlit/llama_index/__init__.py +0 -6
  41. chainlit/llama_index/callbacks.py +0 -206
  42. chainlit/logger.py +0 -16
  43. chainlit/markdown.py +0 -57
  44. chainlit/mcp.py +0 -99
  45. chainlit/message.py +0 -619
  46. chainlit/mistralai/__init__.py +0 -50
  47. chainlit/oauth_providers.py +0 -835
  48. chainlit/openai/__init__.py +0 -53
  49. chainlit/py.typed +0 -0
  50. chainlit/secret.py +0 -9
  51. chainlit/semantic_kernel/__init__.py +0 -111
  52. chainlit/server.py +0 -1616
  53. chainlit/session.py +0 -304
  54. chainlit/sidebar.py +0 -55
  55. chainlit/slack/__init__.py +0 -6
  56. chainlit/slack/app.py +0 -427
  57. chainlit/socket.py +0 -381
  58. chainlit/step.py +0 -490
  59. chainlit/sync.py +0 -43
  60. chainlit/teams/__init__.py +0 -6
  61. chainlit/teams/app.py +0 -348
  62. chainlit/translations/bn.json +0 -214
  63. chainlit/translations/el-GR.json +0 -214
  64. chainlit/translations/en-US.json +0 -214
  65. chainlit/translations/fr-FR.json +0 -214
  66. chainlit/translations/gu.json +0 -214
  67. chainlit/translations/he-IL.json +0 -214
  68. chainlit/translations/hi.json +0 -214
  69. chainlit/translations/ja.json +0 -214
  70. chainlit/translations/kn.json +0 -214
  71. chainlit/translations/ml.json +0 -214
  72. chainlit/translations/mr.json +0 -214
  73. chainlit/translations/nl.json +0 -214
  74. chainlit/translations/ta.json +0 -214
  75. chainlit/translations/te.json +0 -214
  76. chainlit/translations/zh-CN.json +0 -214
  77. chainlit/translations.py +0 -60
  78. chainlit/types.py +0 -334
  79. chainlit/user.py +0 -43
  80. chainlit/user_session.py +0 -153
  81. chainlit/utils.py +0 -173
  82. chainlit/version.py +0 -8
  83. chainlit-2.7.0.dist-info/RECORD +0 -84
  84. {chainlit-2.7.0.dist-info → chainlit-2.7.1.dist-info}/WHEEL +0 -0
  85. {chainlit-2.7.0.dist-info → chainlit-2.7.1.dist-info}/entry_points.txt +0 -0
File without changes
@@ -1,84 +0,0 @@
1
- from typing import TYPE_CHECKING, Any, Dict, Optional, Union
2
-
3
- from azure.storage.filedatalake import (
4
- ContentSettings,
5
- DataLakeFileClient,
6
- DataLakeServiceClient,
7
- FileSystemClient,
8
- )
9
-
10
- from chainlit.data.storage_clients.base import BaseStorageClient
11
- from chainlit.logger import logger
12
-
13
- if TYPE_CHECKING:
14
- from azure.core.credentials import (
15
- AzureNamedKeyCredential,
16
- AzureSasCredential,
17
- TokenCredential,
18
- )
19
-
20
-
21
- class AzureStorageClient(BaseStorageClient):
22
- """
23
- Class to enable Azure Data Lake Storage (ADLS) Gen2
24
-
25
- parms:
26
- account_url: "https://<your_account>.dfs.core.windows.net"
27
- credential: Access credential (AzureKeyCredential)
28
- sas_token: Optionally include SAS token to append to urls
29
- """
30
-
31
- def __init__(
32
- self,
33
- account_url: str,
34
- container: str,
35
- credential: Optional[
36
- Union[
37
- str,
38
- Dict[str, str],
39
- "AzureNamedKeyCredential",
40
- "AzureSasCredential",
41
- "TokenCredential",
42
- ]
43
- ],
44
- sas_token: Optional[str] = None,
45
- ):
46
- try:
47
- self.data_lake_client = DataLakeServiceClient(
48
- account_url=account_url, credential=credential
49
- )
50
- self.container_client: FileSystemClient = (
51
- self.data_lake_client.get_file_system_client(file_system=container)
52
- )
53
- self.sas_token = sas_token
54
- logger.info("AzureStorageClient initialized")
55
- except Exception as e:
56
- logger.warning(f"AzureStorageClient initialization error: {e}")
57
-
58
- async def upload_file(
59
- self,
60
- object_key: str,
61
- data: Union[bytes, str],
62
- mime: str = "application/octet-stream",
63
- overwrite: bool = True,
64
- content_disposition: str | None = None,
65
- ) -> Dict[str, Any]:
66
- try:
67
- file_client: DataLakeFileClient = self.container_client.get_file_client(
68
- object_key
69
- )
70
- content_settings = ContentSettings(
71
- content_type=mime, content_disposition=content_disposition
72
- )
73
- file_client.upload_data(
74
- data, overwrite=overwrite, content_settings=content_settings
75
- )
76
- url = (
77
- f"{file_client.url}{self.sas_token}"
78
- if self.sas_token
79
- else file_client.url
80
- )
81
- return {"object_key": object_key, "url": url}
82
- except Exception as e:
83
- logger.warning(f"AzureStorageClient, upload_file error: {e}")
84
- return {}
@@ -1,94 +0,0 @@
1
- from datetime import datetime, timedelta, timezone
2
- from typing import Any, Dict, Union
3
-
4
- from azure.storage.blob import BlobSasPermissions, ContentSettings, generate_blob_sas
5
- from azure.storage.blob.aio import BlobServiceClient as AsyncBlobServiceClient
6
-
7
- from chainlit.data.storage_clients.base import BaseStorageClient, storage_expiry_time
8
- from chainlit.logger import logger
9
-
10
-
11
- class AzureBlobStorageClient(BaseStorageClient):
12
- def __init__(self, container_name: str, storage_account: str, storage_key: str):
13
- self.container_name = container_name
14
- self.storage_account = storage_account
15
- self.storage_key = storage_key
16
- connection_string = (
17
- f"DefaultEndpointsProtocol=https;"
18
- f"AccountName={storage_account};"
19
- f"AccountKey={storage_key};"
20
- f"EndpointSuffix=core.windows.net"
21
- )
22
- self.service_client = AsyncBlobServiceClient.from_connection_string(
23
- connection_string
24
- )
25
- self.container_client = self.service_client.get_container_client(
26
- self.container_name
27
- )
28
- logger.info("AzureBlobStorageClient initialized")
29
-
30
- async def get_read_url(self, object_key: str) -> str:
31
- if not self.storage_key:
32
- raise Exception("Not using Azure Storage")
33
-
34
- sas_permissions = BlobSasPermissions(read=True)
35
- start_time = datetime.now(tz=timezone.utc)
36
- expiry_time = start_time + timedelta(seconds=storage_expiry_time)
37
-
38
- sas_token = generate_blob_sas(
39
- account_name=self.storage_account,
40
- container_name=self.container_name,
41
- blob_name=object_key,
42
- account_key=self.storage_key,
43
- permission=sas_permissions,
44
- start=start_time,
45
- expiry=expiry_time,
46
- )
47
-
48
- return f"https://{self.storage_account}.blob.core.windows.net/{self.container_name}/{object_key}?{sas_token}"
49
-
50
- async def upload_file(
51
- self,
52
- object_key: str,
53
- data: Union[bytes, str],
54
- mime: str = "application/octet-stream",
55
- overwrite: bool = True,
56
- content_disposition: str | None = None,
57
- ) -> Dict[str, Any]:
58
- try:
59
- blob_client = self.container_client.get_blob_client(object_key)
60
-
61
- if isinstance(data, str):
62
- data = data.encode("utf-8")
63
-
64
- content_settings = ContentSettings(
65
- content_type=mime, content_disposition=content_disposition
66
- )
67
-
68
- await blob_client.upload_blob(
69
- data, overwrite=overwrite, content_settings=content_settings
70
- )
71
-
72
- properties = await blob_client.get_blob_properties()
73
-
74
- return {
75
- "path": object_key,
76
- "object_key": object_key,
77
- "url": await self.get_read_url(object_key),
78
- "size": properties.size,
79
- "last_modified": properties.last_modified,
80
- "etag": properties.etag,
81
- "content_type": properties.content_settings.content_type,
82
- }
83
-
84
- except Exception as e:
85
- raise Exception(f"Failed to upload file to Azure Blob Storage: {e!s}")
86
-
87
- async def delete_file(self, object_key: str) -> bool:
88
- try:
89
- blob_client = self.container_client.get_blob_client(blob=object_key)
90
- await blob_client.delete_blob()
91
- return True
92
- except Exception as e:
93
- logger.warning(f"AzureBlobStorageClient, delete_file error: {e}")
94
- return False
@@ -1,28 +0,0 @@
1
- import os
2
- from abc import ABC, abstractmethod
3
- from typing import Any, Dict, Union
4
-
5
- storage_expiry_time = int(os.getenv("STORAGE_EXPIRY_TIME", 3600))
6
-
7
-
8
- class BaseStorageClient(ABC):
9
- """Base class for non-text data persistence like Azure Data Lake, S3, Google Storage, etc."""
10
-
11
- @abstractmethod
12
- async def upload_file(
13
- self,
14
- object_key: str,
15
- data: Union[bytes, str],
16
- mime: str = "application/octet-stream",
17
- overwrite: bool = True,
18
- content_disposition: str | None = None,
19
- ) -> Dict[str, Any]:
20
- pass
21
-
22
- @abstractmethod
23
- async def delete_file(self, object_key: str) -> bool:
24
- pass
25
-
26
- @abstractmethod
27
- async def get_read_url(self, object_key: str) -> str:
28
- pass
@@ -1,101 +0,0 @@
1
- from typing import Any, Dict, Optional, Union
2
-
3
- from google.auth import default
4
- from google.cloud import storage # type: ignore
5
- from google.oauth2 import service_account
6
-
7
- from chainlit import make_async
8
- from chainlit.data.storage_clients.base import BaseStorageClient, storage_expiry_time
9
- from chainlit.logger import logger
10
-
11
-
12
- class GCSStorageClient(BaseStorageClient):
13
- def __init__(
14
- self,
15
- bucket_name: str,
16
- project_id: Optional[str] = None,
17
- client_email: Optional[str] = None,
18
- private_key: Optional[str] = None,
19
- ):
20
- if client_email and private_key and project_id:
21
- # Go to IAM & Admin, click on Service Accounts, and generate a new JSON key
22
- logger.info("Using Private Key from Environment Variable")
23
- credentials = service_account.Credentials.from_service_account_info(
24
- {
25
- "type": "service_account",
26
- "project_id": project_id,
27
- "private_key": private_key,
28
- "client_email": client_email,
29
- "token_uri": "https://oauth2.googleapis.com/token",
30
- }
31
- )
32
- else:
33
- # Application Default Credentials (e.g. in Google Cloud Run)
34
- logger.info("Using Application Default Credentials.")
35
- credentials, default_project_id = default()
36
- if not project_id:
37
- project_id = default_project_id
38
-
39
- self.client = storage.Client(project=project_id, credentials=credentials)
40
- self.bucket = self.client.bucket(bucket_name)
41
- logger.info("GCSStorageClient initialized")
42
-
43
- def sync_get_read_url(self, object_key: str) -> str:
44
- return self.bucket.blob(object_key).generate_signed_url(
45
- version="v4", expiration=storage_expiry_time, method="GET"
46
- )
47
-
48
- async def get_read_url(self, object_key: str) -> str:
49
- return await make_async(self.sync_get_read_url)(object_key)
50
-
51
- def sync_upload_file(
52
- self,
53
- object_key: str,
54
- data: Union[bytes, str],
55
- mime: str = "application/octet-stream",
56
- overwrite: bool = True,
57
- ) -> Dict[str, Any]:
58
- try:
59
- blob = self.bucket.blob(object_key)
60
-
61
- if not overwrite and blob.exists():
62
- raise Exception(
63
- f"File {object_key} already exists and overwrite is False"
64
- )
65
-
66
- if isinstance(data, str):
67
- data = data.encode("utf-8")
68
-
69
- blob.upload_from_string(data, content_type=mime)
70
-
71
- # Return signed URL
72
- return {
73
- "object_key": object_key,
74
- "url": self.sync_get_read_url(object_key),
75
- }
76
-
77
- except Exception as e:
78
- raise Exception(f"Failed to upload file to GCS: {e!s}")
79
-
80
- async def upload_file(
81
- self,
82
- object_key: str,
83
- data: Union[bytes, str],
84
- mime: str = "application/octet-stream",
85
- overwrite: bool = True,
86
- content_disposition: str | None = None,
87
- ) -> Dict[str, Any]:
88
- return await make_async(self.sync_upload_file)(
89
- object_key, data, mime, overwrite
90
- )
91
-
92
- def sync_delete_file(self, object_key: str) -> bool:
93
- try:
94
- self.bucket.blob(object_key).delete()
95
- return True
96
- except Exception as e:
97
- logger.warning(f"GCSStorageClient, delete_file error: {e}")
98
- return False
99
-
100
- async def delete_file(self, object_key: str) -> bool:
101
- return await make_async(self.sync_delete_file)(object_key)
@@ -1,88 +0,0 @@
1
- import os
2
- from typing import Any, Dict, Union
3
-
4
- import boto3 # type: ignore
5
-
6
- from chainlit import make_async
7
- from chainlit.data.storage_clients.base import BaseStorageClient, storage_expiry_time
8
- from chainlit.logger import logger
9
-
10
-
11
- class S3StorageClient(BaseStorageClient):
12
- """
13
- Class to enable Amazon S3 storage provider
14
- """
15
-
16
- def __init__(self, bucket: str, **kwargs: Any):
17
- try:
18
- self.bucket = bucket
19
- self.client = boto3.client("s3", **kwargs)
20
- logger.info("S3StorageClient initialized")
21
- except Exception as e:
22
- logger.warning(f"S3StorageClient initialization error: {e}")
23
-
24
- def sync_get_read_url(self, object_key: str) -> str:
25
- try:
26
- url = self.client.generate_presigned_url(
27
- "get_object",
28
- Params={"Bucket": self.bucket, "Key": object_key},
29
- ExpiresIn=storage_expiry_time,
30
- )
31
- return url
32
- except Exception as e:
33
- logger.warning(f"S3StorageClient, get_read_url error: {e}")
34
- return object_key
35
-
36
- async def get_read_url(self, object_key: str) -> str:
37
- return await make_async(self.sync_get_read_url)(object_key)
38
-
39
- def sync_upload_file(
40
- self,
41
- object_key: str,
42
- data: Union[bytes, str],
43
- mime: str = "application/octet-stream",
44
- overwrite: bool = True,
45
- content_disposition: str | None = None,
46
- ) -> Dict[str, Any]:
47
- try:
48
- if content_disposition is not None:
49
- self.client.put_object(
50
- Bucket=self.bucket,
51
- Key=object_key,
52
- Body=data,
53
- ContentType=mime,
54
- ContentDisposition=content_disposition,
55
- )
56
- else:
57
- self.client.put_object(
58
- Bucket=self.bucket, Key=object_key, Body=data, ContentType=mime
59
- )
60
- endpoint = os.environ.get("DEV_AWS_ENDPOINT", "amazonaws.com")
61
- url = f"https://{self.bucket}.s3.{endpoint}/{object_key}"
62
- return {"object_key": object_key, "url": url}
63
- except Exception as e:
64
- logger.warning(f"S3StorageClient, upload_file error: {e}")
65
- return {}
66
-
67
- async def upload_file(
68
- self,
69
- object_key: str,
70
- data: Union[bytes, str],
71
- mime: str = "application/octet-stream",
72
- overwrite: bool = True,
73
- content_disposition: str | None = None,
74
- ) -> Dict[str, Any]:
75
- return await make_async(self.sync_upload_file)(
76
- object_key, data, mime, overwrite, content_disposition
77
- )
78
-
79
- def sync_delete_file(self, object_key: str) -> bool:
80
- try:
81
- self.client.delete_object(Bucket=self.bucket, Key=object_key)
82
- return True
83
- except Exception as e:
84
- logger.warning(f"S3StorageClient, delete_file error: {e}")
85
- return False
86
-
87
- async def delete_file(self, object_key: str) -> bool:
88
- return await make_async(self.sync_delete_file)(object_key)
chainlit/data/utils.py DELETED
@@ -1,29 +0,0 @@
1
- import functools
2
- from collections import deque
3
-
4
- from chainlit.context import context
5
- from chainlit.session import WebsocketSession
6
-
7
-
8
- def queue_until_user_message():
9
- def decorator(method):
10
- @functools.wraps(method)
11
- async def wrapper(self, *args, **kwargs):
12
- if (
13
- isinstance(context.session, WebsocketSession)
14
- and not context.session.has_first_interaction
15
- ):
16
- # Queue the method invocation waiting for the first user message
17
- queues = context.session.thread_queues
18
- method_name = method.__name__
19
- if method_name not in queues:
20
- queues[method_name] = deque()
21
- queues[method_name].append((method, self, args, kwargs))
22
-
23
- else:
24
- # Otherwise, Execute the method immediately
25
- return await method(self, *args, **kwargs)
26
-
27
- return wrapper
28
-
29
- return decorator
@@ -1,6 +0,0 @@
1
- import importlib.util
2
-
3
- if importlib.util.find_spec("discord") is None:
4
- raise ValueError(
5
- "The discord package is required to integrate Chainlit with a Discord app. Run `pip install discord --upgrade`"
6
- )