fusesell 1.2.4__py3-none-any.whl → 1.2.6__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of fusesell might be problematic. Click here for more details.

@@ -3,14 +3,15 @@ Local Data Manager for FuseSell Local Implementation
3
3
  Handles SQLite database operations and local file management
4
4
  """
5
5
 
6
- import sqlite3
6
+ import sqlite3
7
7
  import json
8
8
  import os
9
9
  import uuid
10
+ import shutil
10
11
  from typing import Dict, Any, List, Optional, Sequence, Union
11
- from datetime import datetime
12
- import logging
13
- from pathlib import Path
12
+ from datetime import datetime
13
+ import logging
14
+ from pathlib import Path
14
15
 
15
16
 
16
17
  class LocalDataManager:
@@ -69,10 +70,72 @@ class LocalDataManager:
69
70
  # Initialize database with optimization check
70
71
  self._init_database_optimized()
71
72
 
72
- def _create_directories(self) -> None:
73
- """Create necessary directories for data storage."""
74
- for directory in [self.data_dir, self.config_dir, self.drafts_dir, self.logs_dir]:
75
- directory.mkdir(parents=True, exist_ok=True)
73
+ def _create_directories(self) -> None:
74
+ """Create necessary directories for data storage."""
75
+ for directory in [self.data_dir, self.config_dir, self.drafts_dir, self.logs_dir]:
76
+ directory.mkdir(parents=True, exist_ok=True)
77
+ self._ensure_default_config_files()
78
+
79
+ def _ensure_default_config_files(self) -> None:
80
+ """
81
+ Copy bundled configuration defaults into the writable data directory when missing.
82
+
83
+ Ensures first-run executions always have the same baseline prompts, scoring criteria,
84
+ and email templates as the packaged FuseSell server flows.
85
+ """
86
+ try:
87
+ package_config_dir = Path(__file__).resolve().parents[2] / "fusesell_data" / "config"
88
+ except Exception as exc:
89
+ self.logger.debug(f"Unable to resolve packaged config directory: {exc}")
90
+ return
91
+
92
+ if not package_config_dir.exists():
93
+ self.logger.debug("Packaged config directory not found; skipping default config seeding")
94
+ return
95
+
96
+ default_files = [
97
+ "prompts.json",
98
+ "scoring_criteria.json",
99
+ "email_templates.json",
100
+ ]
101
+
102
+ for filename in default_files:
103
+ target = self.config_dir / filename
104
+ if target.exists():
105
+ continue
106
+
107
+ source = package_config_dir / filename
108
+ if not source.exists():
109
+ self.logger.debug(f"Packaged default {filename} not found; skipping seed")
110
+ continue
111
+
112
+ try:
113
+ shutil.copyfile(source, target)
114
+ self.logger.info(f"Seeded default configuration file: {filename}")
115
+ except Exception as exc:
116
+ self.logger.warning(f"Failed to seed default configuration {filename}: {exc}")
117
+
118
+ def _load_packaged_config_file(self, filename: str) -> Dict[str, Any]:
119
+ """
120
+ Load a configuration JSON file bundled with the package as a fallback.
121
+
122
+ Args:
123
+ filename: Name of the configuration file to load.
124
+
125
+ Returns:
126
+ Parsed configuration dictionary or empty dict on failure.
127
+ """
128
+ try:
129
+ package_config_dir = Path(__file__).resolve().parents[2] / "fusesell_data" / "config"
130
+ path = package_config_dir / filename
131
+ if not path.exists():
132
+ return {}
133
+
134
+ with path.open("r", encoding="utf-8") as handle:
135
+ return json.load(handle)
136
+ except Exception as exc:
137
+ self.logger.debug(f"Failed to load packaged config {filename}: {exc}")
138
+ return {}
76
139
 
77
140
  def _init_database_optimized(self) -> None:
78
141
  """
@@ -454,11 +517,11 @@ class LocalDataManager:
454
517
  submission_date TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
455
518
  retrieved_date TIMESTAMP DEFAULT CURRENT_TIMESTAMP
456
519
  )
457
- """)
458
-
459
- # Create scheduler_rules table (equivalent to gs_scheduler)
460
- cursor.execute("""
461
- CREATE TABLE IF NOT EXISTS scheduler_rules (
520
+ """)
521
+
522
+ # Create scheduler_rules table (equivalent to gs_scheduler)
523
+ cursor.execute("""
524
+ CREATE TABLE IF NOT EXISTS scheduler_rules (
462
525
  id TEXT PRIMARY KEY,
463
526
  org_id TEXT NOT NULL,
464
527
  org_name TEXT,
@@ -478,14 +541,36 @@ class LocalDataManager:
478
541
  username TEXT,
479
542
  fullname TEXT,
480
543
  instance_id TEXT,
481
- submission_date TIMESTAMP DEFAULT CURRENT_TIMESTAMP
482
- )
483
- """)
484
-
485
- # Create extracted_files table (equivalent to gs_plan_setting_extracted_file)
486
- cursor.execute("""
487
- CREATE TABLE IF NOT EXISTS extracted_files (
488
- id TEXT PRIMARY KEY,
544
+ submission_date TIMESTAMP DEFAULT CURRENT_TIMESTAMP
545
+ )
546
+ """)
547
+
548
+ # Create reminder_task table (equivalent to Directus reminder_task)
549
+ cursor.execute("""
550
+ CREATE TABLE IF NOT EXISTS reminder_task (
551
+ id TEXT PRIMARY KEY,
552
+ status TEXT NOT NULL,
553
+ task TEXT NOT NULL,
554
+ cron TEXT NOT NULL,
555
+ room_id TEXT,
556
+ tags TEXT,
557
+ customextra TEXT,
558
+ org_id TEXT,
559
+ customer_id TEXT,
560
+ task_id TEXT,
561
+ import_uuid TEXT,
562
+ scheduled_time TIMESTAMP,
563
+ created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
564
+ updated_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
565
+ executed_at TIMESTAMP,
566
+ error_message TEXT
567
+ )
568
+ """)
569
+
570
+ # Create extracted_files table (equivalent to gs_plan_setting_extracted_file)
571
+ cursor.execute("""
572
+ CREATE TABLE IF NOT EXISTS extracted_files (
573
+ id TEXT PRIMARY KEY,
489
574
  org_id TEXT NOT NULL,
490
575
  plan_id TEXT,
491
576
  team_id TEXT,
@@ -585,17 +670,25 @@ class LocalDataManager:
585
670
  "CREATE INDEX IF NOT EXISTS idx_team_settings_team_id ON team_settings(team_id)")
586
671
  cursor.execute(
587
672
  "CREATE INDEX IF NOT EXISTS idx_products_org_id ON products(org_id)")
588
- cursor.execute(
589
- "CREATE INDEX IF NOT EXISTS idx_gs_customer_llmtask_task_id ON gs_customer_llmtask(task_id)")
590
- cursor.execute(
591
- "CREATE INDEX IF NOT EXISTS idx_prompts_org_id ON prompts(org_id)")
592
- cursor.execute(
593
- "CREATE INDEX IF NOT EXISTS idx_scheduler_rules_org_id ON scheduler_rules(org_id)")
594
- cursor.execute(
595
- "CREATE INDEX IF NOT EXISTS idx_extracted_files_org_id ON extracted_files(org_id)")
596
- cursor.execute(
597
- "CREATE INDEX IF NOT EXISTS idx_llm_worker_plan_org_id ON llm_worker_plan(org_id)")
598
- cursor.execute(
673
+ cursor.execute(
674
+ "CREATE INDEX IF NOT EXISTS idx_gs_customer_llmtask_task_id ON gs_customer_llmtask(task_id)")
675
+ cursor.execute(
676
+ "CREATE INDEX IF NOT EXISTS idx_prompts_org_id ON prompts(org_id)")
677
+ cursor.execute(
678
+ "CREATE INDEX IF NOT EXISTS idx_scheduler_rules_org_id ON scheduler_rules(org_id)")
679
+ cursor.execute(
680
+ "CREATE INDEX IF NOT EXISTS idx_reminder_task_status ON reminder_task(status)")
681
+ cursor.execute(
682
+ "CREATE INDEX IF NOT EXISTS idx_reminder_task_org_id ON reminder_task(org_id)")
683
+ cursor.execute(
684
+ "CREATE INDEX IF NOT EXISTS idx_reminder_task_task_id ON reminder_task(task_id)")
685
+ cursor.execute(
686
+ "CREATE INDEX IF NOT EXISTS idx_reminder_task_cron ON reminder_task(cron)")
687
+ cursor.execute(
688
+ "CREATE INDEX IF NOT EXISTS idx_extracted_files_org_id ON extracted_files(org_id)")
689
+ cursor.execute(
690
+ "CREATE INDEX IF NOT EXISTS idx_llm_worker_plan_org_id ON llm_worker_plan(org_id)")
691
+ cursor.execute(
599
692
  "CREATE INDEX IF NOT EXISTS idx_gs_company_criteria_org_id ON gs_company_criteria(org_id)")
600
693
 
601
694
  # Create compatibility views for backward compatibility
@@ -1020,56 +1113,71 @@ class LocalDataManager:
1020
1113
  self.logger.error(f"Failed to get stage results: {str(e)}")
1021
1114
  raise
1022
1115
 
1023
- def load_prompts(self) -> Dict[str, Any]:
1024
- """
1025
- Load prompt templates from configuration.
1026
-
1027
- Returns:
1028
- Dictionary of prompt templates
1029
- """
1030
- try:
1031
- prompts_file = self.config_dir / "prompts.json"
1032
- if prompts_file.exists():
1033
- with open(prompts_file, 'r', encoding='utf-8') as f:
1034
- return json.load(f)
1035
- return {}
1036
- except Exception as e:
1037
- self.logger.error(f"Failed to load prompts: {str(e)}")
1038
- return {}
1039
-
1040
- def load_scoring_criteria(self) -> Dict[str, Any]:
1116
+ def load_prompts(self) -> Dict[str, Any]:
1117
+ """
1118
+ Load prompt templates from configuration.
1119
+
1120
+ Returns:
1121
+ Dictionary of prompt templates
1122
+ """
1123
+ try:
1124
+ prompts_file = self.config_dir / "prompts.json"
1125
+ if prompts_file.exists():
1126
+ with open(prompts_file, 'r', encoding='utf-8') as f:
1127
+ return json.load(f)
1128
+
1129
+ packaged = self._load_packaged_config_file("prompts.json")
1130
+ if packaged:
1131
+ return packaged
1132
+
1133
+ return {}
1134
+ except Exception as e:
1135
+ self.logger.error(f"Failed to load prompts: {str(e)}")
1136
+ return {}
1137
+
1138
+ def load_scoring_criteria(self) -> Dict[str, Any]:
1041
1139
  """
1042
1140
  Load scoring criteria configuration.
1043
1141
 
1044
- Returns:
1045
- Dictionary of scoring criteria
1046
- """
1047
- try:
1048
- criteria_file = self.config_dir / "scoring_criteria.json"
1049
- if criteria_file.exists():
1050
- with open(criteria_file, 'r', encoding='utf-8') as f:
1051
- return json.load(f)
1052
- return {}
1053
- except Exception as e:
1054
- self.logger.error(f"Failed to load scoring criteria: {str(e)}")
1055
- return {}
1056
-
1057
- def load_email_templates(self) -> Dict[str, Any]:
1142
+ Returns:
1143
+ Dictionary of scoring criteria
1144
+ """
1145
+ try:
1146
+ criteria_file = self.config_dir / "scoring_criteria.json"
1147
+ if criteria_file.exists():
1148
+ with open(criteria_file, 'r', encoding='utf-8') as f:
1149
+ return json.load(f)
1150
+
1151
+ packaged = self._load_packaged_config_file("scoring_criteria.json")
1152
+ if packaged:
1153
+ return packaged
1154
+
1155
+ return {}
1156
+ except Exception as e:
1157
+ self.logger.error(f"Failed to load scoring criteria: {str(e)}")
1158
+ return {}
1159
+
1160
+ def load_email_templates(self) -> Dict[str, Any]:
1058
1161
  """
1059
1162
  Load email templates configuration.
1060
1163
 
1061
- Returns:
1062
- Dictionary of email templates
1063
- """
1064
- try:
1065
- templates_file = self.config_dir / "email_templates.json"
1066
- if templates_file.exists():
1067
- with open(templates_file, 'r', encoding='utf-8') as f:
1068
- return json.load(f)
1069
- return {}
1070
- except Exception as e:
1071
- self.logger.error(f"Failed to load email templates: {str(e)}")
1072
- return {}
1164
+ Returns:
1165
+ Dictionary of email templates
1166
+ """
1167
+ try:
1168
+ templates_file = self.config_dir / "email_templates.json"
1169
+ if templates_file.exists():
1170
+ with open(templates_file, 'r', encoding='utf-8') as f:
1171
+ return json.load(f)
1172
+
1173
+ packaged = self._load_packaged_config_file("email_templates.json")
1174
+ if packaged:
1175
+ return packaged
1176
+
1177
+ return {}
1178
+ except Exception as e:
1179
+ self.logger.error(f"Failed to load email templates: {str(e)}")
1180
+ return {}
1073
1181
 
1074
1182
  def _generate_customer_id(self) -> str:
1075
1183
  """Generate unique customer ID."""
@@ -1463,6 +1571,59 @@ class LocalDataManager:
1463
1571
  'gs_team_product',
1464
1572
  'gs_team_auto_interaction',
1465
1573
  }
1574
+ list_field_defaults = {
1575
+ 'gs_team_organization': {
1576
+ 'org_name': None,
1577
+ 'address': None,
1578
+ 'website': None,
1579
+ 'industry': None,
1580
+ 'description': None,
1581
+ 'logo': None,
1582
+ 'primary_email': None,
1583
+ 'primary_phone': None,
1584
+ 'primary_color': None,
1585
+ 'is_active': False,
1586
+ 'avg_rating': None,
1587
+ 'total_sales': None,
1588
+ 'total_products': None,
1589
+ 'date_joined': None,
1590
+ 'last_active': None,
1591
+ 'social_media_links': [],
1592
+ },
1593
+ 'gs_team_rep': {
1594
+ 'name': None,
1595
+ 'email': None,
1596
+ 'phone': None,
1597
+ 'position': None,
1598
+ 'website': None,
1599
+ 'logo': None,
1600
+ 'username': None,
1601
+ 'is_primary': False,
1602
+ 'primary_color': None,
1603
+ 'primary_phone': None,
1604
+ },
1605
+ 'gs_team_product': {
1606
+ 'product_id': None,
1607
+ 'product_name': None,
1608
+ 'image_url': None,
1609
+ 'enabled': True,
1610
+ 'priority': None,
1611
+ },
1612
+ 'gs_team_auto_interaction': {
1613
+ 'from_email': '',
1614
+ 'from_name': '',
1615
+ 'from_number': '',
1616
+ 'tool_type': 'Email',
1617
+ 'email_cc': '',
1618
+ 'email_bcc': '',
1619
+ },
1620
+ }
1621
+ alias_fields = {
1622
+ 'gs_team_organization': {
1623
+ 'name': 'org_name',
1624
+ 'brand_palette': 'primary_color',
1625
+ },
1626
+ }
1466
1627
 
1467
1628
  snapshot: Dict[str, Any] = {}
1468
1629
  for field in fields_to_include:
@@ -1472,9 +1633,35 @@ class LocalDataManager:
1472
1633
 
1473
1634
  if field in list_like_fields:
1474
1635
  if isinstance(value, list):
1475
- snapshot[field] = value
1636
+ normalized_items = []
1637
+ defaults = list_field_defaults.get(field, {})
1638
+ aliases = alias_fields.get(field, {})
1639
+ for item in value:
1640
+ if not isinstance(item, dict):
1641
+ continue
1642
+ normalized = {}
1643
+ for key, default_val in defaults.items():
1644
+ if key == 'social_media_links':
1645
+ current = item.get(key)
1646
+ normalized[key] = current if isinstance(current, list) else []
1647
+ else:
1648
+ normalized[key] = item.get(key, default_val)
1649
+ for legacy_key, target_key in aliases.items():
1650
+ if normalized.get(target_key) in (None, '', []):
1651
+ if legacy_key in item:
1652
+ normalized[target_key] = item[legacy_key]
1653
+ # include any additional keys that might exist
1654
+ normalized_items.append(normalized)
1655
+ snapshot[field] = normalized_items
1476
1656
  elif value:
1477
- snapshot[field] = [value]
1657
+ defaults = list_field_defaults.get(field, {})
1658
+ aliases = alias_fields.get(field, {})
1659
+ normalized = {key: value.get(key, default_val) for key, default_val in defaults.items()}
1660
+ for legacy_key, target_key in aliases.items():
1661
+ if normalized.get(target_key) in (None, '', []):
1662
+ if legacy_key in value:
1663
+ normalized[target_key] = value[legacy_key]
1664
+ snapshot[field] = [normalized]
1478
1665
  else:
1479
1666
  snapshot[field] = []
1480
1667
  else: