fusesell 1.2.3__tar.gz → 1.2.5__tar.gz
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of fusesell might be problematic. Click here for more details.
- {fusesell-1.2.3 → fusesell-1.2.5}/CHANGELOG.md +9 -0
- {fusesell-1.2.3/fusesell.egg-info → fusesell-1.2.5}/PKG-INFO +4 -2
- {fusesell-1.2.3 → fusesell-1.2.5}/README.md +5 -3
- {fusesell-1.2.3 → fusesell-1.2.5/fusesell.egg-info}/PKG-INFO +4 -2
- {fusesell-1.2.3 → fusesell-1.2.5}/fusesell_local/__init__.py +1 -1
- {fusesell-1.2.3 → fusesell-1.2.5}/fusesell_local/pipeline.py +11 -5
- {fusesell-1.2.3 → fusesell-1.2.5}/fusesell_local/stages/follow_up.py +98 -22
- {fusesell-1.2.3 → fusesell-1.2.5}/fusesell_local/stages/initial_outreach.py +98 -29
- {fusesell-1.2.3 → fusesell-1.2.5}/fusesell_local/utils/data_manager.py +214 -28
- {fusesell-1.2.3 → fusesell-1.2.5}/fusesell_local/utils/event_scheduler.py +386 -76
- {fusesell-1.2.3 → fusesell-1.2.5}/pyproject.toml +1 -1
- {fusesell-1.2.3 → fusesell-1.2.5}/LICENSE +0 -0
- {fusesell-1.2.3 → fusesell-1.2.5}/MANIFEST.in +0 -0
- {fusesell-1.2.3 → fusesell-1.2.5}/fusesell.egg-info/SOURCES.txt +0 -0
- {fusesell-1.2.3 → fusesell-1.2.5}/fusesell.egg-info/dependency_links.txt +0 -0
- {fusesell-1.2.3 → fusesell-1.2.5}/fusesell.egg-info/entry_points.txt +0 -0
- {fusesell-1.2.3 → fusesell-1.2.5}/fusesell.egg-info/requires.txt +0 -0
- {fusesell-1.2.3 → fusesell-1.2.5}/fusesell.egg-info/top_level.txt +0 -0
- {fusesell-1.2.3 → fusesell-1.2.5}/fusesell.py +0 -0
- {fusesell-1.2.3 → fusesell-1.2.5}/fusesell_local/api.py +0 -0
- {fusesell-1.2.3 → fusesell-1.2.5}/fusesell_local/cli.py +0 -0
- {fusesell-1.2.3 → fusesell-1.2.5}/fusesell_local/config/__init__.py +0 -0
- {fusesell-1.2.3 → fusesell-1.2.5}/fusesell_local/config/prompts.py +0 -0
- {fusesell-1.2.3 → fusesell-1.2.5}/fusesell_local/config/settings.py +0 -0
- {fusesell-1.2.3 → fusesell-1.2.5}/fusesell_local/stages/__init__.py +0 -0
- {fusesell-1.2.3 → fusesell-1.2.5}/fusesell_local/stages/base_stage.py +0 -0
- {fusesell-1.2.3 → fusesell-1.2.5}/fusesell_local/stages/data_acquisition.py +0 -0
- {fusesell-1.2.3 → fusesell-1.2.5}/fusesell_local/stages/data_preparation.py +0 -0
- {fusesell-1.2.3 → fusesell-1.2.5}/fusesell_local/stages/lead_scoring.py +0 -0
- {fusesell-1.2.3 → fusesell-1.2.5}/fusesell_local/tests/conftest.py +0 -0
- {fusesell-1.2.3 → fusesell-1.2.5}/fusesell_local/tests/test_api.py +0 -0
- {fusesell-1.2.3 → fusesell-1.2.5}/fusesell_local/tests/test_cli.py +0 -0
- {fusesell-1.2.3 → fusesell-1.2.5}/fusesell_local/tests/test_data_manager_products.py +0 -0
- {fusesell-1.2.3 → fusesell-1.2.5}/fusesell_local/tests/test_data_manager_sales_process.py +0 -0
- {fusesell-1.2.3 → fusesell-1.2.5}/fusesell_local/tests/test_data_manager_teams.py +0 -0
- {fusesell-1.2.3 → fusesell-1.2.5}/fusesell_local/utils/__init__.py +0 -0
- {fusesell-1.2.3 → fusesell-1.2.5}/fusesell_local/utils/birthday_email_manager.py +0 -0
- {fusesell-1.2.3 → fusesell-1.2.5}/fusesell_local/utils/llm_client.py +0 -0
- {fusesell-1.2.3 → fusesell-1.2.5}/fusesell_local/utils/logger.py +0 -0
- {fusesell-1.2.3 → fusesell-1.2.5}/fusesell_local/utils/timezone_detector.py +0 -0
- {fusesell-1.2.3 → fusesell-1.2.5}/fusesell_local/utils/validators.py +0 -0
- {fusesell-1.2.3 → fusesell-1.2.5}/requirements.txt +0 -0
- {fusesell-1.2.3 → fusesell-1.2.5}/setup.cfg +0 -0
|
@@ -2,6 +2,15 @@
|
|
|
2
2
|
|
|
3
3
|
All notable changes to FuseSell Local will be documented in this file.
|
|
4
4
|
|
|
5
|
+
# [1.2.5] - 2025-10-24
|
|
6
|
+
|
|
7
|
+
### Added
|
|
8
|
+
- Local `reminder_task` table and scheduler plumbing so scheduled outreach mirrors the server flow and can be consumed by RealTimeX orchestration.
|
|
9
|
+
- Initial outreach and follow-up stages now emit reminder metadata whenever emails are scheduled, including team/customer context.
|
|
10
|
+
|
|
11
|
+
### Changed
|
|
12
|
+
- Event scheduler returns reminder IDs alongside scheduled events while preserving immutable default prompts when layering team overrides.
|
|
13
|
+
|
|
5
14
|
# [1.2.3] - 2025-10-21
|
|
6
15
|
|
|
7
16
|
### Added
|
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
Metadata-Version: 2.4
|
|
2
2
|
Name: fusesell
|
|
3
|
-
Version: 1.2.
|
|
3
|
+
Version: 1.2.5
|
|
4
4
|
Summary: Local implementation of FuseSell AI sales automation pipeline
|
|
5
5
|
Author-email: FuseSell Team <team@fusesell.ai>
|
|
6
6
|
License-Expression: MIT
|
|
@@ -45,7 +45,9 @@ Dynamic: license-file
|
|
|
45
45
|
|
|
46
46
|
FuseSell Local is a production-ready implementation of the FuseSell AI sales automation system, converted from server-based YAML workflows to a comprehensive Python command-line tool with full data ownership and privacy control.
|
|
47
47
|
|
|
48
|
-
|
|
48
|
+
Latest release: `fusesell==1.2.1` is available on PyPI via `pip install fusesell`.
|
|
49
|
+
|
|
50
|
+
Contributors should review the [Repository Guidelines](AGENTS.md) before opening a pull request.
|
|
49
51
|
|
|
50
52
|
## 🚀 Complete Pipeline Overview
|
|
51
53
|
|
|
@@ -2,9 +2,11 @@
|
|
|
2
2
|
|
|
3
3
|
**Complete AI-powered sales automation pipeline that runs entirely on your local machine.**
|
|
4
4
|
|
|
5
|
-
FuseSell Local is a production-ready implementation of the FuseSell AI sales automation system, converted from server-based YAML workflows to a comprehensive Python command-line tool with full data ownership and privacy control.
|
|
6
|
-
|
|
7
|
-
|
|
5
|
+
FuseSell Local is a production-ready implementation of the FuseSell AI sales automation system, converted from server-based YAML workflows to a comprehensive Python command-line tool with full data ownership and privacy control.
|
|
6
|
+
|
|
7
|
+
Latest release: `fusesell==1.2.1` is available on PyPI via `pip install fusesell`.
|
|
8
|
+
|
|
9
|
+
Contributors should review the [Repository Guidelines](AGENTS.md) before opening a pull request.
|
|
8
10
|
|
|
9
11
|
## 🚀 Complete Pipeline Overview
|
|
10
12
|
|
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
Metadata-Version: 2.4
|
|
2
2
|
Name: fusesell
|
|
3
|
-
Version: 1.2.
|
|
3
|
+
Version: 1.2.5
|
|
4
4
|
Summary: Local implementation of FuseSell AI sales automation pipeline
|
|
5
5
|
Author-email: FuseSell Team <team@fusesell.ai>
|
|
6
6
|
License-Expression: MIT
|
|
@@ -45,7 +45,9 @@ Dynamic: license-file
|
|
|
45
45
|
|
|
46
46
|
FuseSell Local is a production-ready implementation of the FuseSell AI sales automation system, converted from server-based YAML workflows to a comprehensive Python command-line tool with full data ownership and privacy control.
|
|
47
47
|
|
|
48
|
-
|
|
48
|
+
Latest release: `fusesell==1.2.1` is available on PyPI via `pip install fusesell`.
|
|
49
|
+
|
|
50
|
+
Contributors should review the [Repository Guidelines](AGENTS.md) before opening a pull request.
|
|
49
51
|
|
|
50
52
|
## 🚀 Complete Pipeline Overview
|
|
51
53
|
|
|
@@ -251,11 +251,17 @@ class FuseSellPipeline:
|
|
|
251
251
|
|
|
252
252
|
self.logger.info("-" * 40)
|
|
253
253
|
self.logger.info("TIMING VALIDATION:")
|
|
254
|
-
if discrepancy_percentage < 5.0:
|
|
255
|
-
self.logger.info(
|
|
256
|
-
|
|
257
|
-
|
|
258
|
-
|
|
254
|
+
if discrepancy_percentage < 5.0:
|
|
255
|
+
self.logger.info(
|
|
256
|
+
f"[OK] Timing validation PASSED (discrepancy: {discrepancy_percentage:.1f}%)"
|
|
257
|
+
)
|
|
258
|
+
else:
|
|
259
|
+
self.logger.warning(
|
|
260
|
+
f"[WARN] Timing validation WARNING (discrepancy: {discrepancy_percentage:.1f}%)"
|
|
261
|
+
)
|
|
262
|
+
self.logger.warning(
|
|
263
|
+
f" Expected ~{total_stage_time:.2f}s, got {total_duration:.2f}s"
|
|
264
|
+
)
|
|
259
265
|
|
|
260
266
|
self.logger.info("=" * 60)
|
|
261
267
|
|
|
@@ -877,22 +877,29 @@ Generate only the email content, no additional commentary:"""
|
|
|
877
877
|
input_data = context.get('input_data', {})
|
|
878
878
|
|
|
879
879
|
# Initialize event scheduler
|
|
880
|
-
scheduler = EventScheduler(self.config.get('data_dir', './fusesell_data'))
|
|
881
|
-
|
|
882
|
-
# Check if immediate sending is requested
|
|
883
|
-
send_immediately = input_data.get('send_immediately', False)
|
|
884
|
-
|
|
885
|
-
|
|
886
|
-
|
|
887
|
-
|
|
888
|
-
|
|
889
|
-
|
|
890
|
-
|
|
891
|
-
|
|
892
|
-
|
|
893
|
-
|
|
894
|
-
|
|
895
|
-
|
|
880
|
+
scheduler = EventScheduler(self.config.get('data_dir', './fusesell_data'))
|
|
881
|
+
|
|
882
|
+
# Check if immediate sending is requested
|
|
883
|
+
send_immediately = input_data.get('send_immediately', False)
|
|
884
|
+
reminder_context = self._build_follow_up_reminder_context(
|
|
885
|
+
draft,
|
|
886
|
+
recipient_address,
|
|
887
|
+
recipient_name,
|
|
888
|
+
context
|
|
889
|
+
)
|
|
890
|
+
|
|
891
|
+
# Schedule the follow-up email event
|
|
892
|
+
schedule_result = scheduler.schedule_email_event(
|
|
893
|
+
draft_id=draft.get('draft_id'),
|
|
894
|
+
recipient_address=recipient_address,
|
|
895
|
+
recipient_name=recipient_name,
|
|
896
|
+
org_id=input_data.get('org_id', 'default'),
|
|
897
|
+
team_id=input_data.get('team_id'),
|
|
898
|
+
customer_timezone=input_data.get('customer_timezone'),
|
|
899
|
+
email_type='follow_up',
|
|
900
|
+
send_immediately=send_immediately,
|
|
901
|
+
reminder_context=reminder_context
|
|
902
|
+
)
|
|
896
903
|
|
|
897
904
|
if schedule_result['success']:
|
|
898
905
|
self.logger.info(f"Follow-up email event scheduled successfully: {schedule_result['event_id']} for {schedule_result['scheduled_time']}")
|
|
@@ -912,12 +919,81 @@ Generate only the email content, no additional commentary:"""
|
|
|
912
919
|
}
|
|
913
920
|
|
|
914
921
|
except Exception as e:
|
|
915
|
-
self.logger.error(f"Follow-up email scheduling failed: {str(e)}")
|
|
916
|
-
return {
|
|
917
|
-
'success': False,
|
|
918
|
-
'message': f'Follow-up email scheduling failed: {str(e)}',
|
|
919
|
-
'error': str(e)
|
|
920
|
-
}
|
|
922
|
+
self.logger.error(f"Follow-up email scheduling failed: {str(e)}")
|
|
923
|
+
return {
|
|
924
|
+
'success': False,
|
|
925
|
+
'message': f'Follow-up email scheduling failed: {str(e)}',
|
|
926
|
+
'error': str(e)
|
|
927
|
+
}
|
|
928
|
+
|
|
929
|
+
def _build_follow_up_reminder_context(
|
|
930
|
+
self,
|
|
931
|
+
draft: Dict[str, Any],
|
|
932
|
+
recipient_address: str,
|
|
933
|
+
recipient_name: str,
|
|
934
|
+
context: Dict[str, Any]
|
|
935
|
+
) -> Dict[str, Any]:
|
|
936
|
+
"""
|
|
937
|
+
Build reminder_task metadata for scheduled follow-up emails.
|
|
938
|
+
"""
|
|
939
|
+
input_data = context.get('input_data', {})
|
|
940
|
+
org_id = input_data.get('org_id', 'default') or 'default'
|
|
941
|
+
customer_id = input_data.get('customer_id') or context.get('execution_id') or 'unknown'
|
|
942
|
+
task_id = context.get('execution_id') or input_data.get('task_id') or 'unknown_task'
|
|
943
|
+
team_id = input_data.get('team_id')
|
|
944
|
+
team_name = input_data.get('team_name')
|
|
945
|
+
language = input_data.get('language')
|
|
946
|
+
customer_name = input_data.get('customer_name')
|
|
947
|
+
staff_name = input_data.get('staff_name')
|
|
948
|
+
interaction_type = input_data.get('interaction_type', 'follow_up')
|
|
949
|
+
follow_up_iteration = input_data.get('current_follow_up_time') or 1
|
|
950
|
+
reminder_room = self.config.get('reminder_room_id') or input_data.get('reminder_room_id')
|
|
951
|
+
draft_id = draft.get('draft_id') or 'unknown_draft'
|
|
952
|
+
product_name = draft.get('product_name') or input_data.get('product_name')
|
|
953
|
+
|
|
954
|
+
customextra = {
|
|
955
|
+
'reminder_content': 'follow_up',
|
|
956
|
+
'org_id': org_id,
|
|
957
|
+
'customer_id': customer_id,
|
|
958
|
+
'task_id': task_id,
|
|
959
|
+
'customer_name': customer_name,
|
|
960
|
+
'language': language,
|
|
961
|
+
'recipient_address': recipient_address,
|
|
962
|
+
'recipient_name': recipient_name,
|
|
963
|
+
'staff_name': staff_name,
|
|
964
|
+
'team_id': team_id,
|
|
965
|
+
'team_name': team_name,
|
|
966
|
+
'interaction_type': interaction_type,
|
|
967
|
+
'action_status': 'scheduled',
|
|
968
|
+
'current_follow_up_time': follow_up_iteration,
|
|
969
|
+
'draft_id': draft_id,
|
|
970
|
+
'import_uuid': f"{org_id}_{customer_id}_{task_id}_{draft_id}"
|
|
971
|
+
}
|
|
972
|
+
|
|
973
|
+
if product_name:
|
|
974
|
+
customextra['product_name'] = product_name
|
|
975
|
+
if draft.get('approach'):
|
|
976
|
+
customextra['approach'] = draft.get('approach')
|
|
977
|
+
if draft.get('mail_tone'):
|
|
978
|
+
customextra['mail_tone'] = draft.get('mail_tone')
|
|
979
|
+
if draft.get('message_type'):
|
|
980
|
+
customextra['message_type'] = draft.get('message_type')
|
|
981
|
+
|
|
982
|
+
return {
|
|
983
|
+
'status': 'published',
|
|
984
|
+
'task': f"FuseSell follow-up {org_id}_{customer_id} - {task_id}",
|
|
985
|
+
'tags': ['fusesell', 'follow-up'],
|
|
986
|
+
'room_id': reminder_room,
|
|
987
|
+
'org_id': org_id,
|
|
988
|
+
'customer_id': customer_id,
|
|
989
|
+
'task_id': task_id,
|
|
990
|
+
'team_id': team_id,
|
|
991
|
+
'team_name': team_name,
|
|
992
|
+
'language': language,
|
|
993
|
+
'customer_name': customer_name,
|
|
994
|
+
'staff_name': staff_name,
|
|
995
|
+
'customextra': customextra
|
|
996
|
+
}
|
|
921
997
|
# Data access methods (similar to initial outreach)
|
|
922
998
|
def _get_customer_data(self, context: Dict[str, Any]) -> Dict[str, Any]:
|
|
923
999
|
"""Get customer data from previous stages or input."""
|
|
@@ -250,23 +250,30 @@ class InitialOutreachStage(BaseStage):
|
|
|
250
250
|
|
|
251
251
|
input_data = context.get('input_data', {})
|
|
252
252
|
|
|
253
|
-
# Initialize event scheduler
|
|
254
|
-
scheduler = EventScheduler(self.config.get('data_dir', './fusesell_data'))
|
|
255
|
-
|
|
256
|
-
# Check if immediate sending is requested
|
|
257
|
-
send_immediately = input_data.get('send_immediately', False)
|
|
258
|
-
|
|
259
|
-
|
|
260
|
-
|
|
261
|
-
|
|
262
|
-
|
|
263
|
-
|
|
264
|
-
|
|
265
|
-
|
|
266
|
-
|
|
267
|
-
|
|
268
|
-
|
|
269
|
-
|
|
253
|
+
# Initialize event scheduler
|
|
254
|
+
scheduler = EventScheduler(self.config.get('data_dir', './fusesell_data'))
|
|
255
|
+
|
|
256
|
+
# Check if immediate sending is requested
|
|
257
|
+
send_immediately = input_data.get('send_immediately', False)
|
|
258
|
+
reminder_context = self._build_initial_reminder_context(
|
|
259
|
+
draft,
|
|
260
|
+
recipient_address,
|
|
261
|
+
recipient_name,
|
|
262
|
+
context
|
|
263
|
+
)
|
|
264
|
+
|
|
265
|
+
# Schedule the email event
|
|
266
|
+
schedule_result = scheduler.schedule_email_event(
|
|
267
|
+
draft_id=draft.get('draft_id'),
|
|
268
|
+
recipient_address=recipient_address,
|
|
269
|
+
recipient_name=recipient_name,
|
|
270
|
+
org_id=input_data.get('org_id', 'default'),
|
|
271
|
+
team_id=input_data.get('team_id'),
|
|
272
|
+
customer_timezone=input_data.get('customer_timezone'),
|
|
273
|
+
email_type='initial',
|
|
274
|
+
send_immediately=send_immediately,
|
|
275
|
+
reminder_context=reminder_context
|
|
276
|
+
)
|
|
270
277
|
|
|
271
278
|
if schedule_result['success']:
|
|
272
279
|
self.logger.info(f"Email event scheduled successfully: {schedule_result['event_id']} for {schedule_result['scheduled_time']}")
|
|
@@ -287,18 +294,80 @@ class InitialOutreachStage(BaseStage):
|
|
|
287
294
|
}
|
|
288
295
|
|
|
289
296
|
except Exception as e:
|
|
290
|
-
self.logger.error(f"Email scheduling failed: {str(e)}")
|
|
291
|
-
return {
|
|
292
|
-
'success': False,
|
|
293
|
-
'message': f'Email scheduling failed: {str(e)}',
|
|
294
|
-
'error': str(e)
|
|
295
|
-
}
|
|
296
|
-
|
|
297
|
-
def
|
|
298
|
-
|
|
299
|
-
|
|
300
|
-
|
|
301
|
-
|
|
297
|
+
self.logger.error(f"Email scheduling failed: {str(e)}")
|
|
298
|
+
return {
|
|
299
|
+
'success': False,
|
|
300
|
+
'message': f'Email scheduling failed: {str(e)}',
|
|
301
|
+
'error': str(e)
|
|
302
|
+
}
|
|
303
|
+
|
|
304
|
+
def _build_initial_reminder_context(
|
|
305
|
+
self,
|
|
306
|
+
draft: Dict[str, Any],
|
|
307
|
+
recipient_address: str,
|
|
308
|
+
recipient_name: str,
|
|
309
|
+
context: Dict[str, Any]
|
|
310
|
+
) -> Dict[str, Any]:
|
|
311
|
+
"""
|
|
312
|
+
Build reminder_task metadata for scheduled initial outreach emails.
|
|
313
|
+
"""
|
|
314
|
+
input_data = context.get('input_data', {})
|
|
315
|
+
org_id = input_data.get('org_id', 'default') or 'default'
|
|
316
|
+
customer_id = input_data.get('customer_id') or context.get('execution_id') or 'unknown'
|
|
317
|
+
task_id = context.get('execution_id') or input_data.get('task_id') or 'unknown_task'
|
|
318
|
+
team_id = input_data.get('team_id')
|
|
319
|
+
team_name = input_data.get('team_name')
|
|
320
|
+
language = input_data.get('language')
|
|
321
|
+
customer_name = input_data.get('customer_name')
|
|
322
|
+
staff_name = input_data.get('staff_name')
|
|
323
|
+
reminder_room = self.config.get('reminder_room_id') or input_data.get('reminder_room_id')
|
|
324
|
+
draft_id = draft.get('draft_id') or 'unknown_draft'
|
|
325
|
+
|
|
326
|
+
customextra = {
|
|
327
|
+
'reminder_content': 'draft_send',
|
|
328
|
+
'org_id': org_id,
|
|
329
|
+
'customer_id': customer_id,
|
|
330
|
+
'task_id': task_id,
|
|
331
|
+
'customer_name': customer_name,
|
|
332
|
+
'language': language,
|
|
333
|
+
'recipient_address': recipient_address,
|
|
334
|
+
'recipient_name': recipient_name,
|
|
335
|
+
'staff_name': staff_name,
|
|
336
|
+
'team_id': team_id,
|
|
337
|
+
'team_name': team_name,
|
|
338
|
+
'interaction_type': input_data.get('interaction_type'),
|
|
339
|
+
'draft_id': draft_id,
|
|
340
|
+
'import_uuid': f"{org_id}_{customer_id}_{task_id}_{draft_id}"
|
|
341
|
+
}
|
|
342
|
+
|
|
343
|
+
if draft.get('product_name'):
|
|
344
|
+
customextra['product_name'] = draft.get('product_name')
|
|
345
|
+
if draft.get('approach'):
|
|
346
|
+
customextra['approach'] = draft.get('approach')
|
|
347
|
+
if draft.get('mail_tone'):
|
|
348
|
+
customextra['mail_tone'] = draft.get('mail_tone')
|
|
349
|
+
|
|
350
|
+
return {
|
|
351
|
+
'status': 'published',
|
|
352
|
+
'task': f"FuseSell initial outreach {org_id}_{customer_id} - {task_id}",
|
|
353
|
+
'tags': ['fusesell', 'init-outreach'],
|
|
354
|
+
'room_id': reminder_room,
|
|
355
|
+
'org_id': org_id,
|
|
356
|
+
'customer_id': customer_id,
|
|
357
|
+
'task_id': task_id,
|
|
358
|
+
'team_id': team_id,
|
|
359
|
+
'team_name': team_name,
|
|
360
|
+
'language': language,
|
|
361
|
+
'customer_name': customer_name,
|
|
362
|
+
'staff_name': staff_name,
|
|
363
|
+
'customextra': customextra
|
|
364
|
+
}
|
|
365
|
+
|
|
366
|
+
def _handle_close(self, context: Dict[str, Any]) -> Dict[str, Any]:
|
|
367
|
+
"""
|
|
368
|
+
Handle close action - Close outreach when customer feels negative.
|
|
369
|
+
|
|
370
|
+
Args:
|
|
302
371
|
context: Execution context
|
|
303
372
|
|
|
304
373
|
Returns:
|
|
@@ -4,10 +4,10 @@ Handles SQLite database operations and local file management
|
|
|
4
4
|
"""
|
|
5
5
|
|
|
6
6
|
import sqlite3
|
|
7
|
-
import json
|
|
8
|
-
import os
|
|
9
|
-
import uuid
|
|
10
|
-
from typing import Dict, Any, List, Optional, Union
|
|
7
|
+
import json
|
|
8
|
+
import os
|
|
9
|
+
import uuid
|
|
10
|
+
from typing import Dict, Any, List, Optional, Sequence, Union
|
|
11
11
|
from datetime import datetime
|
|
12
12
|
import logging
|
|
13
13
|
from pathlib import Path
|
|
@@ -454,11 +454,11 @@ class LocalDataManager:
|
|
|
454
454
|
submission_date TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
|
|
455
455
|
retrieved_date TIMESTAMP DEFAULT CURRENT_TIMESTAMP
|
|
456
456
|
)
|
|
457
|
-
""")
|
|
458
|
-
|
|
459
|
-
# Create scheduler_rules table (equivalent to gs_scheduler)
|
|
460
|
-
cursor.execute("""
|
|
461
|
-
CREATE TABLE IF NOT EXISTS scheduler_rules (
|
|
457
|
+
""")
|
|
458
|
+
|
|
459
|
+
# Create scheduler_rules table (equivalent to gs_scheduler)
|
|
460
|
+
cursor.execute("""
|
|
461
|
+
CREATE TABLE IF NOT EXISTS scheduler_rules (
|
|
462
462
|
id TEXT PRIMARY KEY,
|
|
463
463
|
org_id TEXT NOT NULL,
|
|
464
464
|
org_name TEXT,
|
|
@@ -478,14 +478,36 @@ class LocalDataManager:
|
|
|
478
478
|
username TEXT,
|
|
479
479
|
fullname TEXT,
|
|
480
480
|
instance_id TEXT,
|
|
481
|
-
submission_date TIMESTAMP DEFAULT CURRENT_TIMESTAMP
|
|
482
|
-
)
|
|
483
|
-
""")
|
|
484
|
-
|
|
485
|
-
# Create
|
|
486
|
-
cursor.execute("""
|
|
487
|
-
CREATE TABLE IF NOT EXISTS
|
|
488
|
-
id TEXT PRIMARY KEY,
|
|
481
|
+
submission_date TIMESTAMP DEFAULT CURRENT_TIMESTAMP
|
|
482
|
+
)
|
|
483
|
+
""")
|
|
484
|
+
|
|
485
|
+
# Create reminder_task table (equivalent to Directus reminder_task)
|
|
486
|
+
cursor.execute("""
|
|
487
|
+
CREATE TABLE IF NOT EXISTS reminder_task (
|
|
488
|
+
id TEXT PRIMARY KEY,
|
|
489
|
+
status TEXT NOT NULL,
|
|
490
|
+
task TEXT NOT NULL,
|
|
491
|
+
cron TEXT NOT NULL,
|
|
492
|
+
room_id TEXT,
|
|
493
|
+
tags TEXT,
|
|
494
|
+
customextra TEXT,
|
|
495
|
+
org_id TEXT,
|
|
496
|
+
customer_id TEXT,
|
|
497
|
+
task_id TEXT,
|
|
498
|
+
import_uuid TEXT,
|
|
499
|
+
scheduled_time TIMESTAMP,
|
|
500
|
+
created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
|
|
501
|
+
updated_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
|
|
502
|
+
executed_at TIMESTAMP,
|
|
503
|
+
error_message TEXT
|
|
504
|
+
)
|
|
505
|
+
""")
|
|
506
|
+
|
|
507
|
+
# Create extracted_files table (equivalent to gs_plan_setting_extracted_file)
|
|
508
|
+
cursor.execute("""
|
|
509
|
+
CREATE TABLE IF NOT EXISTS extracted_files (
|
|
510
|
+
id TEXT PRIMARY KEY,
|
|
489
511
|
org_id TEXT NOT NULL,
|
|
490
512
|
plan_id TEXT,
|
|
491
513
|
team_id TEXT,
|
|
@@ -585,17 +607,25 @@ class LocalDataManager:
|
|
|
585
607
|
"CREATE INDEX IF NOT EXISTS idx_team_settings_team_id ON team_settings(team_id)")
|
|
586
608
|
cursor.execute(
|
|
587
609
|
"CREATE INDEX IF NOT EXISTS idx_products_org_id ON products(org_id)")
|
|
588
|
-
cursor.execute(
|
|
589
|
-
"CREATE INDEX IF NOT EXISTS idx_gs_customer_llmtask_task_id ON gs_customer_llmtask(task_id)")
|
|
590
|
-
cursor.execute(
|
|
591
|
-
"CREATE INDEX IF NOT EXISTS idx_prompts_org_id ON prompts(org_id)")
|
|
592
|
-
cursor.execute(
|
|
593
|
-
"CREATE INDEX IF NOT EXISTS idx_scheduler_rules_org_id ON scheduler_rules(org_id)")
|
|
594
|
-
cursor.execute(
|
|
595
|
-
"CREATE INDEX IF NOT EXISTS
|
|
596
|
-
cursor.execute(
|
|
597
|
-
"CREATE INDEX IF NOT EXISTS
|
|
598
|
-
cursor.execute(
|
|
610
|
+
cursor.execute(
|
|
611
|
+
"CREATE INDEX IF NOT EXISTS idx_gs_customer_llmtask_task_id ON gs_customer_llmtask(task_id)")
|
|
612
|
+
cursor.execute(
|
|
613
|
+
"CREATE INDEX IF NOT EXISTS idx_prompts_org_id ON prompts(org_id)")
|
|
614
|
+
cursor.execute(
|
|
615
|
+
"CREATE INDEX IF NOT EXISTS idx_scheduler_rules_org_id ON scheduler_rules(org_id)")
|
|
616
|
+
cursor.execute(
|
|
617
|
+
"CREATE INDEX IF NOT EXISTS idx_reminder_task_status ON reminder_task(status)")
|
|
618
|
+
cursor.execute(
|
|
619
|
+
"CREATE INDEX IF NOT EXISTS idx_reminder_task_org_id ON reminder_task(org_id)")
|
|
620
|
+
cursor.execute(
|
|
621
|
+
"CREATE INDEX IF NOT EXISTS idx_reminder_task_task_id ON reminder_task(task_id)")
|
|
622
|
+
cursor.execute(
|
|
623
|
+
"CREATE INDEX IF NOT EXISTS idx_reminder_task_cron ON reminder_task(cron)")
|
|
624
|
+
cursor.execute(
|
|
625
|
+
"CREATE INDEX IF NOT EXISTS idx_extracted_files_org_id ON extracted_files(org_id)")
|
|
626
|
+
cursor.execute(
|
|
627
|
+
"CREATE INDEX IF NOT EXISTS idx_llm_worker_plan_org_id ON llm_worker_plan(org_id)")
|
|
628
|
+
cursor.execute(
|
|
599
629
|
"CREATE INDEX IF NOT EXISTS idx_gs_company_criteria_org_id ON gs_company_criteria(org_id)")
|
|
600
630
|
|
|
601
631
|
# Create compatibility views for backward compatibility
|
|
@@ -1408,6 +1438,162 @@ class LocalDataManager:
|
|
|
1408
1438
|
self.logger.error(f"Failed to get team settings: {str(e)}")
|
|
1409
1439
|
raise
|
|
1410
1440
|
|
|
1441
|
+
def build_team_settings_snapshot(
|
|
1442
|
+
self,
|
|
1443
|
+
team_id: str,
|
|
1444
|
+
sections: Optional[Sequence[str]] = None
|
|
1445
|
+
) -> Dict[str, Any]:
|
|
1446
|
+
"""
|
|
1447
|
+
Build a response payload containing team settings in the expected RealTimeX format.
|
|
1448
|
+
|
|
1449
|
+
Args:
|
|
1450
|
+
team_id: Team identifier
|
|
1451
|
+
sections: Optional sequence of section names to include. Accepts either
|
|
1452
|
+
full keys (e.g. ``gs_team_product``) or shorthand without the prefix.
|
|
1453
|
+
|
|
1454
|
+
Returns:
|
|
1455
|
+
Dictionary shaped as ``{"data": [{...}]}``. When no settings exist,
|
|
1456
|
+
returns ``{"data": []}``.
|
|
1457
|
+
"""
|
|
1458
|
+
settings = self.get_team_settings(team_id)
|
|
1459
|
+
if not settings:
|
|
1460
|
+
return {"data": []}
|
|
1461
|
+
|
|
1462
|
+
available_fields = [
|
|
1463
|
+
'gs_team_organization',
|
|
1464
|
+
'gs_team_rep',
|
|
1465
|
+
'gs_team_product',
|
|
1466
|
+
'gs_team_schedule_time',
|
|
1467
|
+
'gs_team_initial_outreach',
|
|
1468
|
+
'gs_team_follow_up',
|
|
1469
|
+
'gs_team_auto_interaction',
|
|
1470
|
+
'gs_team_followup_schedule_time',
|
|
1471
|
+
'gs_team_birthday_email',
|
|
1472
|
+
]
|
|
1473
|
+
|
|
1474
|
+
if sections:
|
|
1475
|
+
normalized = set()
|
|
1476
|
+
for item in sections:
|
|
1477
|
+
if not item:
|
|
1478
|
+
continue
|
|
1479
|
+
item = item.strip()
|
|
1480
|
+
if not item:
|
|
1481
|
+
continue
|
|
1482
|
+
if item.startswith("gs_team_"):
|
|
1483
|
+
normalized.add(item)
|
|
1484
|
+
else:
|
|
1485
|
+
normalized.add(f"gs_team_{item}")
|
|
1486
|
+
fields_to_include = [field for field in available_fields if field in normalized]
|
|
1487
|
+
else:
|
|
1488
|
+
fields_to_include = available_fields
|
|
1489
|
+
|
|
1490
|
+
list_like_fields = {
|
|
1491
|
+
'gs_team_organization',
|
|
1492
|
+
'gs_team_rep',
|
|
1493
|
+
'gs_team_product',
|
|
1494
|
+
'gs_team_auto_interaction',
|
|
1495
|
+
}
|
|
1496
|
+
list_field_defaults = {
|
|
1497
|
+
'gs_team_organization': {
|
|
1498
|
+
'org_name': None,
|
|
1499
|
+
'address': None,
|
|
1500
|
+
'website': None,
|
|
1501
|
+
'industry': None,
|
|
1502
|
+
'description': None,
|
|
1503
|
+
'logo': None,
|
|
1504
|
+
'primary_email': None,
|
|
1505
|
+
'primary_phone': None,
|
|
1506
|
+
'primary_color': None,
|
|
1507
|
+
'is_active': False,
|
|
1508
|
+
'avg_rating': None,
|
|
1509
|
+
'total_sales': None,
|
|
1510
|
+
'total_products': None,
|
|
1511
|
+
'date_joined': None,
|
|
1512
|
+
'last_active': None,
|
|
1513
|
+
'social_media_links': [],
|
|
1514
|
+
},
|
|
1515
|
+
'gs_team_rep': {
|
|
1516
|
+
'name': None,
|
|
1517
|
+
'email': None,
|
|
1518
|
+
'phone': None,
|
|
1519
|
+
'position': None,
|
|
1520
|
+
'website': None,
|
|
1521
|
+
'logo': None,
|
|
1522
|
+
'username': None,
|
|
1523
|
+
'is_primary': False,
|
|
1524
|
+
'primary_color': None,
|
|
1525
|
+
'primary_phone': None,
|
|
1526
|
+
},
|
|
1527
|
+
'gs_team_product': {
|
|
1528
|
+
'product_id': None,
|
|
1529
|
+
'product_name': None,
|
|
1530
|
+
'image_url': None,
|
|
1531
|
+
'enabled': True,
|
|
1532
|
+
'priority': None,
|
|
1533
|
+
},
|
|
1534
|
+
'gs_team_auto_interaction': {
|
|
1535
|
+
'from_email': '',
|
|
1536
|
+
'from_name': '',
|
|
1537
|
+
'from_number': '',
|
|
1538
|
+
'tool_type': 'Email',
|
|
1539
|
+
'email_cc': '',
|
|
1540
|
+
'email_bcc': '',
|
|
1541
|
+
},
|
|
1542
|
+
}
|
|
1543
|
+
alias_fields = {
|
|
1544
|
+
'gs_team_organization': {
|
|
1545
|
+
'name': 'org_name',
|
|
1546
|
+
'brand_palette': 'primary_color',
|
|
1547
|
+
},
|
|
1548
|
+
}
|
|
1549
|
+
|
|
1550
|
+
snapshot: Dict[str, Any] = {}
|
|
1551
|
+
for field in fields_to_include:
|
|
1552
|
+
value = settings.get(field)
|
|
1553
|
+
if value is None:
|
|
1554
|
+
continue
|
|
1555
|
+
|
|
1556
|
+
if field in list_like_fields:
|
|
1557
|
+
if isinstance(value, list):
|
|
1558
|
+
normalized_items = []
|
|
1559
|
+
defaults = list_field_defaults.get(field, {})
|
|
1560
|
+
aliases = alias_fields.get(field, {})
|
|
1561
|
+
for item in value:
|
|
1562
|
+
if not isinstance(item, dict):
|
|
1563
|
+
continue
|
|
1564
|
+
normalized = {}
|
|
1565
|
+
for key, default_val in defaults.items():
|
|
1566
|
+
if key == 'social_media_links':
|
|
1567
|
+
current = item.get(key)
|
|
1568
|
+
normalized[key] = current if isinstance(current, list) else []
|
|
1569
|
+
else:
|
|
1570
|
+
normalized[key] = item.get(key, default_val)
|
|
1571
|
+
for legacy_key, target_key in aliases.items():
|
|
1572
|
+
if normalized.get(target_key) in (None, '', []):
|
|
1573
|
+
if legacy_key in item:
|
|
1574
|
+
normalized[target_key] = item[legacy_key]
|
|
1575
|
+
# include any additional keys that might exist
|
|
1576
|
+
normalized_items.append(normalized)
|
|
1577
|
+
snapshot[field] = normalized_items
|
|
1578
|
+
elif value:
|
|
1579
|
+
defaults = list_field_defaults.get(field, {})
|
|
1580
|
+
aliases = alias_fields.get(field, {})
|
|
1581
|
+
normalized = {key: value.get(key, default_val) for key, default_val in defaults.items()}
|
|
1582
|
+
for legacy_key, target_key in aliases.items():
|
|
1583
|
+
if normalized.get(target_key) in (None, '', []):
|
|
1584
|
+
if legacy_key in value:
|
|
1585
|
+
normalized[target_key] = value[legacy_key]
|
|
1586
|
+
snapshot[field] = [normalized]
|
|
1587
|
+
else:
|
|
1588
|
+
snapshot[field] = []
|
|
1589
|
+
else:
|
|
1590
|
+
snapshot[field] = value
|
|
1591
|
+
|
|
1592
|
+
if not snapshot:
|
|
1593
|
+
return {"data": []}
|
|
1594
|
+
|
|
1595
|
+
return {"data": [snapshot]}
|
|
1596
|
+
|
|
1411
1597
|
def _deserialize_product_row(self, row: sqlite3.Row) -> Dict[str, Any]:
|
|
1412
1598
|
"""
|
|
1413
1599
|
Convert a product row into a dictionary with JSON fields parsed.
|