letschatty 0.4.337__py3-none-any.whl → 0.4.338__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (84) hide show
  1. letschatty/models/ai_microservices/__init__.py +3 -3
  2. letschatty/models/ai_microservices/expected_output.py +2 -29
  3. letschatty/models/ai_microservices/lambda_events.py +39 -135
  4. letschatty/models/ai_microservices/lambda_invokation_types.py +1 -3
  5. letschatty/models/ai_microservices/n8n_ai_agents_payload.py +1 -3
  6. letschatty/models/analytics/events/__init__.py +2 -3
  7. letschatty/models/analytics/events/chat_based_events/chat_funnel.py +69 -13
  8. letschatty/models/analytics/events/company_based_events/asset_events.py +9 -2
  9. letschatty/models/analytics/events/event_type_to_classes.py +6 -3
  10. letschatty/models/analytics/events/event_types.py +9 -50
  11. letschatty/models/chat/chat.py +2 -0
  12. letschatty/models/chat/chat_with_assets.py +6 -1
  13. letschatty/models/chat/client.py +0 -2
  14. letschatty/models/chat/continuous_conversation.py +1 -1
  15. letschatty/models/company/CRM/funnel.py +365 -33
  16. letschatty/models/company/__init__.py +2 -1
  17. letschatty/models/company/assets/ai_agents_v2/ai_agents_decision_output.py +1 -1
  18. letschatty/models/company/assets/ai_agents_v2/chain_of_thought_in_chat.py +3 -5
  19. letschatty/models/company/assets/ai_agents_v2/chatty_ai_agent_in_chat.py +2 -37
  20. letschatty/models/company/assets/ai_agents_v2/chatty_ai_mode.py +2 -2
  21. letschatty/models/company/assets/ai_agents_v2/get_chat_with_prompt_response.py +0 -1
  22. letschatty/models/company/assets/ai_agents_v2/pre_qualify_config.py +2 -14
  23. letschatty/models/company/assets/ai_agents_v2/statuses.py +33 -0
  24. letschatty/models/company/assets/automation.py +10 -19
  25. letschatty/models/company/assets/chat_assets.py +9 -0
  26. letschatty/models/company/assets/company_assets.py +2 -0
  27. letschatty/models/company/company_shopify_integration.py +10 -0
  28. letschatty/models/company/form_field.py +9 -2
  29. letschatty/models/data_base/collection_interface.py +29 -101
  30. letschatty/models/data_base/mongo_connection.py +9 -92
  31. letschatty/models/messages/chatty_messages/schema/chatty_content/content_document.py +4 -2
  32. letschatty/models/messages/chatty_messages/schema/chatty_content/content_media.py +4 -3
  33. letschatty/models/utils/custom_exceptions/custom_exceptions.py +1 -14
  34. letschatty/services/ai_agents/smart_follow_up_context_builder_v2.py +2 -5
  35. letschatty/services/chat/chat_service.py +8 -1
  36. letschatty/services/chatty_assets/__init__.py +0 -12
  37. letschatty/services/chatty_assets/asset_service.py +13 -190
  38. letschatty/services/chatty_assets/base_container.py +2 -3
  39. letschatty/services/chatty_assets/base_container_with_collection.py +26 -35
  40. letschatty/services/continuous_conversation_service/continuous_conversation_helper.py +0 -11
  41. letschatty/services/events/events_manager.py +1 -218
  42. letschatty/services/factories/analytics/events_factory.py +6 -66
  43. letschatty/services/factories/lambda_ai_orchestrartor/lambda_events_factory.py +8 -25
  44. letschatty/services/messages_helpers/get_caption_or_body_or_preview.py +4 -6
  45. {letschatty-0.4.337.dist-info → letschatty-0.4.338.dist-info}/METADATA +1 -1
  46. {letschatty-0.4.337.dist-info → letschatty-0.4.338.dist-info}/RECORD +48 -82
  47. letschatty/models/analytics/events/chat_based_events/ai_agent_execution_event.py +0 -71
  48. letschatty/services/chatty_assets/assets_collections.py +0 -137
  49. letschatty/services/chatty_assets/collections/__init__.py +0 -38
  50. letschatty/services/chatty_assets/collections/ai_agent_collection.py +0 -19
  51. letschatty/services/chatty_assets/collections/ai_agent_in_chat_collection.py +0 -32
  52. letschatty/services/chatty_assets/collections/ai_component_collection.py +0 -21
  53. letschatty/services/chatty_assets/collections/chain_of_thought_collection.py +0 -30
  54. letschatty/services/chatty_assets/collections/chat_collection.py +0 -21
  55. letschatty/services/chatty_assets/collections/contact_point_collection.py +0 -21
  56. letschatty/services/chatty_assets/collections/fast_answer_collection.py +0 -21
  57. letschatty/services/chatty_assets/collections/filter_criteria_collection.py +0 -18
  58. letschatty/services/chatty_assets/collections/flow_collection.py +0 -20
  59. letschatty/services/chatty_assets/collections/product_collection.py +0 -20
  60. letschatty/services/chatty_assets/collections/sale_collection.py +0 -20
  61. letschatty/services/chatty_assets/collections/source_collection.py +0 -21
  62. letschatty/services/chatty_assets/collections/tag_collection.py +0 -19
  63. letschatty/services/chatty_assets/collections/topic_collection.py +0 -21
  64. letschatty/services/chatty_assets/collections/user_collection.py +0 -20
  65. letschatty/services/chatty_assets/example_usage.py +0 -44
  66. letschatty/services/chatty_assets/services/__init__.py +0 -37
  67. letschatty/services/chatty_assets/services/ai_agent_in_chat_service.py +0 -73
  68. letschatty/services/chatty_assets/services/ai_agent_service.py +0 -23
  69. letschatty/services/chatty_assets/services/chain_of_thought_service.py +0 -70
  70. letschatty/services/chatty_assets/services/chat_service.py +0 -25
  71. letschatty/services/chatty_assets/services/contact_point_service.py +0 -29
  72. letschatty/services/chatty_assets/services/fast_answer_service.py +0 -32
  73. letschatty/services/chatty_assets/services/filter_criteria_service.py +0 -30
  74. letschatty/services/chatty_assets/services/flow_service.py +0 -25
  75. letschatty/services/chatty_assets/services/product_service.py +0 -30
  76. letschatty/services/chatty_assets/services/sale_service.py +0 -25
  77. letschatty/services/chatty_assets/services/source_service.py +0 -28
  78. letschatty/services/chatty_assets/services/tag_service.py +0 -32
  79. letschatty/services/chatty_assets/services/topic_service.py +0 -31
  80. letschatty/services/chatty_assets/services/user_service.py +0 -32
  81. letschatty/services/events/__init__.py +0 -6
  82. letschatty/services/factories/analytics/ai_agent_event_factory.py +0 -161
  83. {letschatty-0.4.337.dist-info → letschatty-0.4.338.dist-info}/LICENSE +0 -0
  84. {letschatty-0.4.337.dist-info → letschatty-0.4.338.dist-info}/WHEEL +0 -0
@@ -1,22 +1,11 @@
1
1
  from __future__ import annotations
2
- from typing import TypeVar, Generic, Type, Callable, Protocol, Optional, ClassVar, TYPE_CHECKING, List
3
-
4
- from bson import ObjectId
5
- from letschatty.models.utils.types import StrObjectId
2
+ from typing import TypeVar, Generic, Type, Callable, Protocol, Optional
6
3
  from .base_container_with_collection import ChattyAssetCollectionInterface, ChattyAssetContainerWithCollection, CacheConfig
7
4
  from ...models.base_models import ChattyAssetModel
8
5
  from ...models.base_models.chatty_asset_model import ChattyAssetPreview
9
6
  from ...models.data_base.mongo_connection import MongoConnection
10
7
  import logging
11
8
  import os
12
-
13
- if TYPE_CHECKING:
14
- from ...models.analytics.events.base import EventType
15
- from ...models.company.empresa import EmpresaModel
16
- from ...models.execution.execution import ExecutionContext
17
- from ...models.company.assets.company_assets import CompanyAssetType
18
- from ...models.utils.types.deletion_type import DeletionType
19
-
20
9
  logger = logging.getLogger("AssetService")
21
10
 
22
11
  # Protocol for assets that specify their preview type
@@ -55,75 +44,17 @@ class AssetCollection(Generic[T, P], ChattyAssetCollectionInterface[T, P]):
55
44
  raise ValueError(f"Data must be a dictionary, got {type(data)}: {data}")
56
45
  return self._create_instance_method(data)
57
46
 
58
-
59
47
  class AssetService(Generic[T, P], ChattyAssetContainerWithCollection[T, P]):
60
- """
61
- Generic service for handling CRUD operations for any Chatty asset.
62
-
63
- Supports optional automatic event handling for API implementations.
64
- Set these class attributes to enable events:
65
- - asset_type_enum: CompanyAssetType (e.g., CompanyAssetType.PRODUCTS)
66
- - event_type_created: EventType (e.g., EventType.PRODUCT_CREATED)
67
- - event_type_updated: EventType (e.g., EventType.PRODUCT_UPDATED)
68
- - event_type_deleted: EventType (e.g., EventType.PRODUCT_DELETED)
69
- """
70
-
71
- # Optional: Set these in subclasses to enable automatic event handling
72
- asset_type_enum: ClassVar[Optional['CompanyAssetType']] = None
73
- event_type_created: ClassVar[Optional['EventType']] = None
74
- event_type_updated: ClassVar[Optional['EventType']] = None
75
- event_type_deleted: ClassVar[Optional['EventType']] = None
76
-
77
- collection: AssetCollection[T, P] # Type annotation for better type checking
48
+ """Generic service for handling CRUD operations for any Chatty asset"""
78
49
 
79
50
  def __init__(self,
80
- collection: AssetCollection[T, P],
51
+ collection_name: str,
52
+ asset_type: Type[T],
53
+ connection: MongoConnection,
54
+ create_instance_method: Callable[[dict], T],
55
+ preview_type: Optional[Type[P]] = None,
81
56
  cache_config: CacheConfig = CacheConfig.default()):
82
- """
83
- Initialize AssetService with a pre-configured collection.
84
-
85
- The item_type and preview_type are automatically extracted from the collection,
86
- eliminating redundancy and simplifying the API.
87
-
88
- Args:
89
- collection: Pre-configured AssetCollection subclass
90
- cache_config: Cache configuration
91
- """
92
- logger.debug(f"AssetService {self.__class__.__name__} initializing with collection")
93
- super().__init__(
94
- item_type=collection.type,
95
- preview_type=collection.preview_type,
96
- collection=collection,
97
- cache_config=cache_config,
98
- )
99
- logger.debug(f"AssetService {self.__class__.__name__} initialized")
100
-
101
- @classmethod
102
- def from_config(cls,
103
- collection_name: str,
104
- asset_type: Type[T],
105
- connection: MongoConnection,
106
- create_instance_method: Callable[[dict], T],
107
- preview_type: Optional[Type[P]] = None,
108
- cache_config: CacheConfig = CacheConfig.default()) -> 'AssetService[T, P]':
109
- """
110
- Create an AssetService using the legacy configuration pattern.
111
-
112
- This class method is provided for backward compatibility.
113
- New code should use pre-configured AssetCollection subclasses.
114
-
115
- Args:
116
- collection_name: MongoDB collection name
117
- asset_type: The asset model type
118
- connection: MongoDB connection
119
- create_instance_method: Factory method to create asset instances
120
- preview_type: Optional preview type
121
- cache_config: Cache configuration
122
-
123
- Returns:
124
- AssetService instance
125
- """
126
- logger.debug(f"AssetService creating from config for {collection_name}")
57
+ logger.debug(f"AssetService {self.__class__.__name__} initializing for {collection_name}")
127
58
  asset_collection = AssetCollection(
128
59
  collection=collection_name,
129
60
  asset_type=asset_type,
@@ -131,102 +62,13 @@ class AssetService(Generic[T, P], ChattyAssetContainerWithCollection[T, P]):
131
62
  create_instance_method=create_instance_method,
132
63
  preview_type=preview_type
133
64
  )
134
- return cls(
65
+ super().__init__(
66
+ item_type=asset_type,
67
+ preview_type=preview_type,
135
68
  collection=asset_collection,
136
- cache_config=cache_config
69
+ cache_config=cache_config,
137
70
  )
138
-
139
- def _should_handle_events(self) -> bool:
140
- """Check if this service should handle events automatically"""
141
- return (self.asset_type_enum is not None and
142
- self.event_type_created is not None and
143
- self.event_type_updated is not None and
144
- self.event_type_deleted is not None)
145
-
146
- def _queue_event(self, item: T, event_type: 'EventType', execution_context: 'ExecutionContext', company_info: 'EmpresaModel'):
147
- """Queue an event for this asset if event handling is enabled"""
148
- if not self._should_handle_events() or not self.asset_type_enum:
149
- return
150
-
151
- try:
152
- from ...services.factories.analytics.events_factory import EventFactory
153
- from ...services.events import events_manager
154
-
155
- # Type guard - company_id should exist on ChattyAssetModel
156
- if not hasattr(item, 'company_id'):
157
- logger.warning(f"Asset {type(item).__name__} missing company_id, skipping event")
158
- return
159
-
160
- events = EventFactory.asset_events(
161
- company_id=item.company_id, # type: ignore[attr-defined]
162
- executor_id=execution_context.executor.id,
163
- asset=item,
164
- asset_type=self.asset_type_enum,
165
- event_type=event_type,
166
- time=execution_context.time,
167
- trace_id=execution_context.trace_id,
168
- executor_type=execution_context.executor.type,
169
- company_info=company_info
170
- )
171
- events_manager.queue_events(events)
172
- except ImportError:
173
- # Events not available (microservice context) - skip
174
- pass
175
-
176
- # All methods are now async-only for better performance
177
- async def insert(self, item: T, execution_context: 'ExecutionContext', company_info: Optional['EmpresaModel'] = None) -> T:
178
- """Insert with automatic event handling if configured"""
179
- result = await super().insert(item, execution_context)
180
- if company_info and self._should_handle_events() and self.event_type_created:
181
- self._queue_event(result, self.event_type_created, execution_context, company_info)
182
- return result
183
-
184
- async def update(self, id: str, new_item: T, execution_context: 'ExecutionContext', company_info: Optional['EmpresaModel'] = None) -> T:
185
- """Update with automatic event handling if configured"""
186
- result = await super().update(id, new_item, execution_context)
187
- if company_info and self._should_handle_events() and self.event_type_updated:
188
- self._queue_event(result, self.event_type_updated, execution_context, company_info)
189
- return result
190
-
191
- async def delete(self, id: str, execution_context: 'ExecutionContext', company_info: Optional['EmpresaModel'] = None, deletion_type: Optional['DeletionType'] = None) -> T:
192
- """Delete with automatic event handling if configured"""
193
- from ...models.utils.types.deletion_type import DeletionType as DT
194
- result = await super().delete(id, execution_context, deletion_type or DT.LOGICAL)
195
- if company_info and self._should_handle_events() and self.event_type_deleted:
196
- self._queue_event(result, self.event_type_deleted, execution_context, company_info)
197
- return result
198
-
199
- async def restore(self, id: str, execution_context: 'ExecutionContext', company_info: Optional['EmpresaModel'] = None) -> T:
200
- """Restore with automatic event handling if configured"""
201
- result = await super().restore(id, execution_context)
202
- if company_info and self._should_handle_events() and self.event_type_updated:
203
- self._queue_event(result, self.event_type_updated, execution_context, company_info)
204
- return result
205
-
206
- # Generic convenience methods
207
- async def create_asset(self, data: dict, execution_context: 'ExecutionContext', company_info: 'EmpresaModel') -> T:
208
- """
209
- Generic create method - creates instance from dict and inserts with events.
210
- Can be called as create_asset or aliased to create_product/create_tag/etc.
211
- """
212
- data["company_id"] = execution_context.company_id
213
- item = self.collection.create_instance(data)
214
- return await self.insert(item, execution_context, company_info)
215
-
216
- async def update_asset(self, id: str, data: dict, execution_context: 'ExecutionContext', company_info: 'EmpresaModel') -> T:
217
- """
218
- Generic update method - creates instance from dict and updates with events.
219
- Can be called as update_asset or aliased to update_product/update_tag/etc.
220
- """
221
- new_item = self.collection.create_instance(data)
222
- return await self.update(id, new_item, execution_context, company_info)
223
-
224
- async def delete_asset(self, id: str, execution_context: 'ExecutionContext', company_info: 'EmpresaModel') -> T:
225
- """
226
- Generic delete method - deletes with events.
227
- Can be called as delete_asset or aliased to delete_product/delete_tag/etc.
228
- """
229
- return await self.delete(id, execution_context, company_info)
71
+ logger.debug(f"AssetService {self.__class__.__name__} initialized for {collection_name}")
230
72
 
231
73
  def get_preview_type(self) -> Type[P]:
232
74
  """Get the preview type from the asset class if it has one"""
@@ -239,22 +81,3 @@ class AssetService(Generic[T, P], ChattyAssetContainerWithCollection[T, P]):
239
81
  preview_type = self.get_preview_type()
240
82
  return super().get_preview_by_id(id, company_id, preview_type)
241
83
 
242
- # Additional async read methods (passthrough to base class)
243
- async def get_by_id(self, id: str) -> T:
244
- """Get by ID"""
245
- return await super().get_by_id(id)
246
-
247
- async def get_all(self, company_id: str) -> List[T]:
248
- """Get all for company"""
249
- return await super().get_all(company_id)
250
-
251
- async def get_by_query(self, query: dict, company_id: Optional[str]) -> List[T]:
252
- """Get by query"""
253
- return await super().get_by_query(query, company_id)
254
-
255
- async def get_item_dumped(self, id: str) -> dict:
256
- """Get item by ID and return as JSON serialized dict for frontend"""
257
- from ...models.utils.types.serializer_type import SerializerType
258
- item = await self.get_by_id(id)
259
- return item.model_dump_json(serializer=SerializerType.FRONTEND)
260
-
@@ -123,9 +123,8 @@ class ChattyAssetBaseContainer(Generic[T, P], ABC):
123
123
  else:
124
124
  return list(self.items.values())
125
125
 
126
- async def get_all_dict_id_item(self, company_id:Optional[StrObjectId]) -> Dict[StrObjectId, T]:
127
- items = await self.get_all(company_id)
128
- return {item.id: item for item in items}
126
+ def get_all_dict_id_item(self, company_id:Optional[StrObjectId]) -> Dict[StrObjectId, T]:
127
+ return {item.id: item for item in self.get_all(company_id)}
129
128
 
130
129
  def get_all_previews(self, company_id:Optional[StrObjectId]) -> List[P]:
131
130
  logger.debug(f"Getting all previews for {self.__class__.__name__}")
@@ -64,8 +64,7 @@ class ChattyAssetContainerWithCollection(ChattyAssetBaseContainer[T, P], ABC):
64
64
  self.update_previews_thread()
65
65
  self.load_from_db_thread(company_id=None)
66
66
 
67
- # All methods are now async-only for better performance
68
- async def insert(self, item: T, execution_context: ExecutionContext) -> T:
67
+ def insert(self, item: T, execution_context: ExecutionContext) -> T:
69
68
  """
70
69
  Add an item to the container and insert it into the database collection.
71
70
 
@@ -78,12 +77,12 @@ class ChattyAssetContainerWithCollection(ChattyAssetBaseContainer[T, P], ABC):
78
77
  """
79
78
  logger.debug(f"{self.__class__.__name__} inserting item {item}")
80
79
  inserted_item = super().insert(item)
81
- await self.collection.insert(inserted_item)
80
+ self.collection.insert(inserted_item)
82
81
  execution_context.set_event_time(inserted_item.created_at)
83
82
  self.update_previews_thread()
84
83
  return inserted_item
85
84
 
86
- async def update(self, id: str, new_item: T, execution_context: ExecutionContext) -> T:
85
+ def update(self, id: str, new_item: T, execution_context: ExecutionContext) -> T:
87
86
  """
88
87
  Update an item in the container and in the database collection.
89
88
 
@@ -106,18 +105,18 @@ class ChattyAssetContainerWithCollection(ChattyAssetBaseContainer[T, P], ABC):
106
105
  if id != updated_item.id:
107
106
  logger.error(f"Item id {id} does not match updated item id {updated_item.id}")
108
107
  raise ValueError(f"Item id {id} does not match updated item id {updated_item.id}")
109
- await self.collection.update(updated_item)
108
+ self.collection.update(updated_item)
110
109
  execution_context.set_event_time(updated_item.updated_at)
111
110
  self.update_preview(updated_item)
112
111
  self.update_previews_thread()
113
112
  return updated_item
114
113
 
115
114
  except NotFoundError as e:
116
- outdated_item = await self.collection.get_by_id(id)
115
+ outdated_item = self.collection.get_by_id(id)
117
116
  if outdated_item:
118
117
  updated_item = outdated_item.update(new_item)
119
118
  self.items[id] = updated_item
120
- await self.collection.update(updated_item)
119
+ self.collection.update(updated_item)
121
120
  execution_context.set_event_time(updated_item.updated_at)
122
121
  self.update_previews_thread()
123
122
  return updated_item
@@ -126,7 +125,7 @@ class ChattyAssetContainerWithCollection(ChattyAssetBaseContainer[T, P], ABC):
126
125
  f"Item with id {id} not found in {self.__class__.__name__} nor in collection DB"
127
126
  )
128
127
 
129
- async def delete(self, id: str, execution_context: ExecutionContext,deletion_type : DeletionType = DeletionType.LOGICAL) -> T:
128
+ def delete(self, id: str, execution_context: ExecutionContext,deletion_type : DeletionType = DeletionType.LOGICAL) -> T:
130
129
  """
131
130
  Delete an item from the container and the collection.
132
131
 
@@ -143,16 +142,16 @@ class ChattyAssetContainerWithCollection(ChattyAssetBaseContainer[T, P], ABC):
143
142
  deleted_item = super().delete(id)
144
143
  self.delete_preview(id)
145
144
  execution_context.set_event_time(datetime.now(ZoneInfo("UTC")))
146
- await self.collection.delete(id, deletion_type)
145
+ self.collection.delete(id, deletion_type)
147
146
  return deleted_item
148
147
  except NotFoundError as e:
149
- await self.collection.delete(id, deletion_type)
148
+ self.collection.delete(id, deletion_type)
150
149
  self.delete_preview(id)
151
150
  self.update_previews_thread()
152
151
  execution_context.set_event_time(datetime.now(ZoneInfo("UTC")))
153
- return await self.collection.get_by_id(id)
152
+ return self.collection.get_by_id(id)
154
153
 
155
- async def get_by_id(self, id: str) -> T:
154
+ def get_by_id(self, id: str) -> T:
156
155
  """
157
156
  Get an item from the container.
158
157
 
@@ -175,7 +174,7 @@ class ChattyAssetContainerWithCollection(ChattyAssetBaseContainer[T, P], ABC):
175
174
  # #if they are supposed to be in memory, we raise an error since it shouldn't be in the collection DB
176
175
  # raise NotFoundError(f"Item with id {id} not found in {self.__class__.__name__} nor in collection DB")
177
176
  logger.debug(f"{self.__class__.__name__} getting item {id} not found in container, trying to get from collection")
178
- item = await self.collection.get_by_id(id)
177
+ item = self.collection.get_by_id(id)
179
178
  if item:
180
179
  if item.deleted_at is not None:
181
180
  return item
@@ -240,7 +239,7 @@ class ChattyAssetContainerWithCollection(ChattyAssetBaseContainer[T, P], ABC):
240
239
  logger.debug(f"Clearing previews cache of {self.__class__.__name__}")
241
240
  self.set_preview_items([])
242
241
 
243
- async def get_all(self, company_id: Optional[StrObjectId]) -> List[T]:
242
+ def get_all(self, company_id: Optional[StrObjectId]) -> List[T]:
244
243
  # Get items from memory
245
244
  logger.debug(f"{self.__class__.__name__} getting all items from memory and collection")
246
245
  memory_items = super().get_all(company_id=company_id)
@@ -248,7 +247,7 @@ class ChattyAssetContainerWithCollection(ChattyAssetBaseContainer[T, P], ABC):
248
247
  memory_ids = [ObjectId(item.id) for item in memory_items]
249
248
  # Build the query for collection items
250
249
  query = {"deleted_at": None, "_id": {"$nin": memory_ids}}
251
- collection_items = await self.collection.get_docs(query=query, company_id=company_id)
250
+ collection_items = self.collection.get_docs(query=query, company_id=company_id)
252
251
  all_items = memory_items + collection_items
253
252
  return sorted(all_items, key=lambda x: x.created_at, reverse=True)
254
253
 
@@ -262,8 +261,6 @@ class ChattyAssetContainerWithCollection(ChattyAssetBaseContainer[T, P], ABC):
262
261
  def update_previews_thread(self):
263
262
  """We start a thread to update the previews cache so it doesn't block the main thread"""
264
263
  # self.update_previews_cache()
265
- if not self.cache_config.keep_previews_always_in_memory:
266
- return
267
264
  thread = threading.Thread(target=self.update_previews_cache)
268
265
  thread.start()
269
266
 
@@ -278,13 +275,13 @@ class ChattyAssetContainerWithCollection(ChattyAssetBaseContainer[T, P], ABC):
278
275
  self.set_preview_items(collection_items)
279
276
  return collection_items
280
277
 
281
- async def get_by_query(self, query: dict, company_id: Optional[StrObjectId]) -> List[T]:
278
+ def get_by_query(self, query: dict, company_id: Optional[StrObjectId]) -> List[T]:
282
279
  logger.debug(f"{self.__class__.__name__} getting items by query {query} from collection")
283
- return await self.collection.get_docs(query=query, company_id=company_id)
280
+ return self.collection.get_docs(query=query, company_id=company_id)
284
281
 
285
- async def get_deleted(self, company_id: Optional[StrObjectId]) -> List[T]:
282
+ def get_deleted(self, company_id: Optional[StrObjectId]) -> List[T]:
286
283
  logger.debug(f"{self.__class__.__name__} getting deleted items from collection")
287
- return await self.collection.get_docs(query={"deleted_at": {"$ne": None}}, company_id=company_id)
284
+ return self.collection.get_docs(query={"deleted_at": {"$ne": None}}, company_id=company_id)
288
285
 
289
286
  def load_from_db_thread(self, company_id: Optional[StrObjectId]):
290
287
  """We start a thread to load the items from the database so it doesn't block the main thread"""
@@ -295,27 +292,21 @@ class ChattyAssetContainerWithCollection(ChattyAssetBaseContainer[T, P], ABC):
295
292
  thread.start()
296
293
 
297
294
  def load_from_db(self, company_id: Optional[StrObjectId]):
298
- """Pass company_id=None to load all items from the database. Uses sync client for background loading."""
295
+ """Pass company_id=None to load all items from the database."""
299
296
  logger.debug(f"{self.__class__.__name__} loading items from collection")
300
- # Background loading uses sync client (less critical, runs in thread)
301
- query: Dict[str, Any] = {"deleted_at": None}
302
- if company_id:
303
- query["company_id"] = company_id
304
- docs = list(self.collection.collection.find(filter=query))
305
- # Create instances once and reuse
306
- loaded_items = [self.collection.create_instance(doc) for doc in docs]
307
- self.items = {item.id: item for item in loaded_items}
308
-
309
- async def restore(self, id: str, execution_context: ExecutionContext) -> T:
297
+ # self.items = {item.id: item for item in self.collection.get_docs(query={}, company_id=company_id)}
298
+ self.items = {item.id: item for item in self.collection.get_docs(query={"deleted_at": None}, company_id=company_id)}
299
+
300
+ def restore(self, id: str, execution_context: ExecutionContext) -> T:
310
301
  logger.debug(f"{self.__class__.__name__} restoring item {id} with execution context {execution_context}")
311
302
  if id in self.items:
312
303
  raise ValueError(f"Item with id {id} already exists in {self.__class__.__name__}")
313
- restored_item = await self.collection.get_by_id(id)
304
+ restored_item = self.collection.get_by_id(id)
314
305
  if restored_item is None:
315
306
  raise NotFoundError(f"Item with id {id} not found in collection DB")
316
307
  restored_item.deleted_at = None
317
308
  restored_item.update_now()
318
309
  execution_context.set_event_time(restored_item.updated_at)
319
310
  self.items[id] = restored_item
320
- await self.collection.update(restored_item)
321
- return restored_item
311
+ self.collection.update(restored_item)
312
+ return restored_item
@@ -269,15 +269,4 @@ class ContinuousConversationHelper:
269
269
  central_notif_content = ChattyContentCentral(body=body, status=CentralNotificationStatus.WARNING, calls_to_action=[cta.value for cta in cc.calls_to_action])
270
270
  central_notif = CentralNotificationFactory.continuous_conversation_status(cc=cc, content=central_notif_content)
271
271
  ChatService.add_central_notification(central_notification=central_notif, chat=chat)
272
- return cc
273
-
274
- @staticmethod
275
- def handle_failed_template_cc(chat: Chat, cc: ContinuousConversation, error_details: str) -> ContinuousConversation:
276
- """This is for the handling of a failed template CC"""
277
- cc.set_status(status=ContinuousConversationStatus.FAILED)
278
- body=f"Continuous conversation failed to be sent: {error_details}"
279
- logger.debug(f"{body} | CC status: {cc.status} | CC id: {cc.id} | chat id: {chat.identifier}")
280
- central_notif_content = ChattyContentCentral(body=body, status=CentralNotificationStatus.ERROR, calls_to_action=[cta.value for cta in cc.calls_to_action])
281
- central_notif = CentralNotificationFactory.continuous_conversation_status(cc=cc, content=central_notif_content)
282
- ChatService.add_central_notification(central_notification=central_notif, chat=chat)
283
272
  return cc
@@ -1,219 +1,2 @@
1
- """
2
- Events Manager - Handles queuing and publishing events to EventBridge
1
+ from letschatty.models.base_models.singleton import SingletonMeta
3
2
 
4
- This is a generic implementation that can be configured for different environments.
5
- """
6
- from ...models.base_models.singleton import SingletonMeta
7
- from ...models.analytics.events.base import Event, EventType
8
- from typing import List, Optional, Callable
9
- import logging
10
- import boto3
11
- import queue
12
- import threading
13
- import time
14
- from datetime import datetime
15
- from zoneinfo import ZoneInfo
16
- import os
17
- import json
18
-
19
- logger = logging.getLogger("EventsManager")
20
-
21
-
22
- class EventsManager(metaclass=SingletonMeta):
23
- """
24
- Manages event queuing and publishing to AWS EventBridge.
25
-
26
- Can be configured via environment variables or init parameters.
27
- """
28
-
29
- def __init__(self,
30
- event_bus_name: Optional[str] = None,
31
- source: Optional[str] = None,
32
- publish_events: Optional[bool] = None,
33
- failed_events_callback: Optional[Callable] = None):
34
- """
35
- Initialize EventsManager.
36
-
37
- Args:
38
- event_bus_name: AWS EventBridge event bus name (or uses env var)
39
- source: Source identifier for events (or uses env var)
40
- publish_events: Whether to publish events (or uses env var)
41
- failed_events_callback: Optional callback for handling failed events
42
- """
43
- self.events_queue: queue.Queue[Event] = queue.Queue()
44
- self.eventbridge_client = boto3.client('events', region_name='us-east-1')
45
-
46
- # Configuration - prefer parameters, fall back to env vars
47
- self.event_bus_name = event_bus_name or os.getenv('CHATTY_EVENT_BUS_NAME', 'chatty-events')
48
- self.source = source or os.getenv('CHATTY_EVENT_SOURCE')
49
- if not self.source:
50
- raise ValueError("Source must be provided either as a parameter or through the CHATTY_EVENT_SOURCE environment variable.")
51
- self.publish_events = publish_events if publish_events is not None else os.getenv('PUBLISH_EVENTS_TO_EVENTBRIDGE', 'true').lower() == 'true'
52
-
53
- self.max_retries = 3
54
- self.thread_lock = threading.Lock()
55
- self.thread_running = False
56
- self.max_thread_runtime = 300
57
- self.failed_events_callback = failed_events_callback
58
-
59
- logger.debug(f"EventsManager initialized: bus={self.event_bus_name}, source={self.source}, publish={self.publish_events}")
60
-
61
- def queue_events(self, events: List[Event]):
62
- """Queue events and spawn a thread to publish them if one isn't already running"""
63
- if not self.publish_events:
64
- logger.debug("Event publishing disabled, skipping")
65
- return
66
-
67
- for event in events:
68
- logger.debug(f"Queueing event: {event.type.value} {event.company_id}")
69
- logger.debug(f"Event: {event.model_dump_json()}")
70
- self.events_queue.put(event)
71
-
72
- logger.debug(f"Queued {len(events)} events")
73
- if events:
74
- logger.debug(f"1° event: {events[0].model_dump_json()}")
75
-
76
- # Only start a new thread if one isn't already running
77
- with self.thread_lock:
78
- if not self.thread_running:
79
- logger.debug("Starting publisher thread")
80
- self.thread_running = True
81
- thread = threading.Thread(
82
- target=self._process_queue,
83
- daemon=True,
84
- name="EventBridge-Publisher"
85
- )
86
- thread.start()
87
- logger.debug("Started publisher thread")
88
- else:
89
- logger.debug("Publisher thread already running, using existing thread")
90
-
91
- def _process_queue(self):
92
- """Process all events in the queue and then terminate"""
93
- try:
94
- start_time = time.time()
95
- while not self.events_queue.empty():
96
- logger.debug("Processing queue")
97
- events_batch = []
98
- if time.time() - start_time > self.max_thread_runtime:
99
- logger.warning(f"Thread ran for more than {self.max_thread_runtime}s - terminating")
100
- break
101
-
102
- # Collect up to 10 events (EventBridge limit)
103
- for _ in range(10):
104
- try:
105
- event = self.events_queue.get(timeout=0.5)
106
- events_batch.append(event)
107
- self.events_queue.task_done()
108
- except queue.Empty:
109
- logger.debug("Queue is empty")
110
- break
111
-
112
- # Publish this batch
113
- if events_batch:
114
- self._publish_batch(events_batch)
115
-
116
- except Exception as e:
117
- logger.exception(f"Error in publisher thread: {str(e)}")
118
-
119
- finally:
120
- # Mark thread as completed
121
- with self.thread_lock:
122
- self.thread_running = False
123
-
124
- def _publish_batch(self, events: List[Event]):
125
- """Send a batch of events to EventBridge with retries"""
126
- if not events:
127
- return
128
-
129
- entries = []
130
- for event in events:
131
- entry = {
132
- 'Source': self.source,
133
- 'DetailType': event.type.value,
134
- 'Detail': json.dumps(event.model_dump_json()),
135
- 'EventBusName': self.event_bus_name
136
- }
137
- logger.debug(f"Appending event: {event.type.value}")
138
- entries.append(entry)
139
-
140
- for retry in range(self.max_retries):
141
- try:
142
- logger.debug(f"Sending {len(entries)} events to EventBridge")
143
- logger.debug(f"Entries: {entries}")
144
- response = self.eventbridge_client.put_events(Entries=entries)
145
- logger.debug(f"Response: {response}")
146
-
147
- if response.get('FailedEntryCount', 0) == 0:
148
- logger.info(f"Successfully published {len(events)} events")
149
- return
150
-
151
- # Handle partial failures
152
- failed_entries: List[dict] = []
153
- failed_events: List[Event] = []
154
-
155
- for i, result in enumerate(response.get('Entries', [])):
156
- if 'ErrorCode' in result:
157
- failed_entries.append(entries[i])
158
- failed_events.append(events[i])
159
- logger.error(f"Failed to publish event: {events[i].type.value}")
160
-
161
- if retry < self.max_retries - 1 and failed_entries:
162
- logger.info(f"Retrying {len(failed_entries)} events")
163
- entries = failed_entries
164
- events = failed_events
165
- else:
166
- # Store failed events via callback if provided
167
- if self.failed_events_callback and failed_events:
168
- failed_events_with_errors = []
169
- for i, event in enumerate(failed_events):
170
- result = response.get('Entries', [])[i]
171
- failed_event_data = {
172
- "event": event.model_dump_json(),
173
- "error_code": result.get('ErrorCode'),
174
- "error_message": result.get('ErrorMessage'),
175
- "retry_count": self.max_retries,
176
- "timestamp": datetime.now(ZoneInfo("UTC"))
177
- }
178
- failed_events_with_errors.append(failed_event_data)
179
-
180
- try:
181
- self.failed_events_callback(failed_events_with_errors)
182
- except Exception as e:
183
- logger.error(f"Error calling failed_events_callback: {e}")
184
-
185
- logger.error(f"Gave up on {len(failed_entries)} events after {self.max_retries} attempts")
186
- return
187
-
188
- except Exception as e:
189
- if retry < self.max_retries - 1:
190
- logger.warning(f"Error publishing events (attempt {retry+1}/{self.max_retries}): {str(e)}")
191
- time.sleep(0.5 * (2 ** retry)) # Exponential backoff
192
- else:
193
- logger.exception(f"Failed to publish events after {self.max_retries} attempts")
194
- return
195
-
196
- def flush(self):
197
- """Wait for all queued events to be processed"""
198
- # If no thread is running but we have events, start one
199
- with self.thread_lock:
200
- if not self.thread_running and not self.events_queue.empty():
201
- self.thread_running = True
202
- thread = threading.Thread(
203
- target=self._process_queue,
204
- daemon=True,
205
- name="EventBridge-Publisher"
206
- )
207
- thread.start()
208
-
209
- # Wait for queue to be empty
210
- try:
211
- self.events_queue.join()
212
- return True
213
- except Exception:
214
- logger.warning("Error waiting for events queue to complete")
215
- return False
216
-
217
-
218
- # Singleton instance
219
- events_manager = EventsManager()