fusesell 1.2.2__py3-none-any.whl → 1.2.4__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of fusesell might be problematic. Click here for more details.
- {fusesell-1.2.2.dist-info → fusesell-1.2.4.dist-info}/METADATA +4 -2
- {fusesell-1.2.2.dist-info → fusesell-1.2.4.dist-info}/RECORD +10 -10
- fusesell_local/__init__.py +1 -1
- fusesell_local/cli.py +39 -12
- fusesell_local/tests/test_data_manager_products.py +67 -1
- fusesell_local/utils/data_manager.py +310 -156
- {fusesell-1.2.2.dist-info → fusesell-1.2.4.dist-info}/WHEEL +0 -0
- {fusesell-1.2.2.dist-info → fusesell-1.2.4.dist-info}/entry_points.txt +0 -0
- {fusesell-1.2.2.dist-info → fusesell-1.2.4.dist-info}/licenses/LICENSE +0 -0
- {fusesell-1.2.2.dist-info → fusesell-1.2.4.dist-info}/top_level.txt +0 -0
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
Metadata-Version: 2.4
|
|
2
2
|
Name: fusesell
|
|
3
|
-
Version: 1.2.
|
|
3
|
+
Version: 1.2.4
|
|
4
4
|
Summary: Local implementation of FuseSell AI sales automation pipeline
|
|
5
5
|
Author-email: FuseSell Team <team@fusesell.ai>
|
|
6
6
|
License-Expression: MIT
|
|
@@ -45,7 +45,9 @@ Dynamic: license-file
|
|
|
45
45
|
|
|
46
46
|
FuseSell Local is a production-ready implementation of the FuseSell AI sales automation system, converted from server-based YAML workflows to a comprehensive Python command-line tool with full data ownership and privacy control.
|
|
47
47
|
|
|
48
|
-
|
|
48
|
+
Latest release: `fusesell==1.2.1` is available on PyPI via `pip install fusesell`.
|
|
49
|
+
|
|
50
|
+
Contributors should review the [Repository Guidelines](AGENTS.md) before opening a pull request.
|
|
49
51
|
|
|
50
52
|
## 🚀 Complete Pipeline Overview
|
|
51
53
|
|
|
@@ -1,8 +1,8 @@
|
|
|
1
1
|
fusesell.py,sha256=t5PjkhWEJGINp4k517u0EX0ge7lzuHOUHHro-BE1mGk,596
|
|
2
|
-
fusesell-1.2.
|
|
3
|
-
fusesell_local/__init__.py,sha256=
|
|
2
|
+
fusesell-1.2.4.dist-info/licenses/LICENSE,sha256=GDz1ZoC4lB0kwjERpzqc_OdA_awYVso2aBnUH-ErW_w,1070
|
|
3
|
+
fusesell_local/__init__.py,sha256=5kIksTuzPSk57Yq7vqJC3pFHs9ZOyxaGSkA9QxqzEq0,966
|
|
4
4
|
fusesell_local/api.py,sha256=AcPune5YJdgi7nsMeusCUqc49z5UiycsQb6n3yiV_No,10839
|
|
5
|
-
fusesell_local/cli.py,sha256=
|
|
5
|
+
fusesell_local/cli.py,sha256=MYnVxuEf5KTR4VcO3sc-VtP9NkWlSixJsYfOWST2Ds0,65859
|
|
6
6
|
fusesell_local/pipeline.py,sha256=KO5oAIHZ3L_uAZWOszauJyv0QWlsQMIDNGRuwQSxNmQ,39531
|
|
7
7
|
fusesell_local/config/__init__.py,sha256=0ErO7QiSDqKn-LHcjIRdLZzh5QaRTkRsIlwfgpkkDz8,209
|
|
8
8
|
fusesell_local/config/prompts.py,sha256=5O3Y2v3GCi9d9FEyR6Ekc1UXVq2TcZp3Rrspvx4bkac,10164
|
|
@@ -17,19 +17,19 @@ fusesell_local/stages/lead_scoring.py,sha256=ir3l849eMGrGLf0OYUcmA1F3FwyYhAplS4n
|
|
|
17
17
|
fusesell_local/tests/conftest.py,sha256=TWUtlP6cNPVOYkTPz-j9BzS_KnXdPWy8D-ObPLHvXYs,366
|
|
18
18
|
fusesell_local/tests/test_api.py,sha256=763rUVb5pAuAQOovug6Ka0T9eGK8-WVOC_J08M7TETo,1827
|
|
19
19
|
fusesell_local/tests/test_cli.py,sha256=iNgU8nDlVrcQM5MpBUTIJ5q3oh2-jgX77hJeaqBxToM,1007
|
|
20
|
-
fusesell_local/tests/test_data_manager_products.py,sha256=
|
|
20
|
+
fusesell_local/tests/test_data_manager_products.py,sha256=g8EUSxTqdg18VifzhuOtDDywiMYzwOWFADny5Vntc28,4691
|
|
21
21
|
fusesell_local/tests/test_data_manager_sales_process.py,sha256=NbwxQ9oBKCZfrkRQYxzHHQ08F7epqPUsyeGz_vm3kf8,4447
|
|
22
22
|
fusesell_local/tests/test_data_manager_teams.py,sha256=kjk4V4r9ja4EVREIiQMxkuZd470SSwRHJAvpHln9KO4,4578
|
|
23
23
|
fusesell_local/utils/__init__.py,sha256=TVemlo0wpckhNUxP3a1Tky3ekswy8JdIHaXBlkKXKBQ,330
|
|
24
24
|
fusesell_local/utils/birthday_email_manager.py,sha256=NKLoUyzPedyhewZPma21SOoU8p9wPquehloer7TRA9U,20478
|
|
25
|
-
fusesell_local/utils/data_manager.py,sha256=
|
|
25
|
+
fusesell_local/utils/data_manager.py,sha256=MVozSojGXbIr9mhaTIt5bQrFO_1o-4BJux03FPW0jxA,180881
|
|
26
26
|
fusesell_local/utils/event_scheduler.py,sha256=rjtWwtYQoJP0YwoN1-43t6K9GpLfqRq3c7Fv4papvbI,25725
|
|
27
27
|
fusesell_local/utils/llm_client.py,sha256=FVc25UlGt6hro7h5Iw7PHSXY3E3_67Xc-SUbHuMSRs0,10437
|
|
28
28
|
fusesell_local/utils/logger.py,sha256=sWlV8Tjyz_Z8J4zXKOnNalh8_iD6ytfrwPZpD-wcEOs,6259
|
|
29
29
|
fusesell_local/utils/timezone_detector.py,sha256=0cAE4c8ZXqCA8AvxRKm6PrFKmAmsbq3HOHR6w-mW3KQ,39997
|
|
30
30
|
fusesell_local/utils/validators.py,sha256=Z1VzeoxFsnuzlIA_ZaMWoy-0Cgyqupd47kIdljlMDbs,15438
|
|
31
|
-
fusesell-1.2.
|
|
32
|
-
fusesell-1.2.
|
|
33
|
-
fusesell-1.2.
|
|
34
|
-
fusesell-1.2.
|
|
35
|
-
fusesell-1.2.
|
|
31
|
+
fusesell-1.2.4.dist-info/METADATA,sha256=cBzLD9FHG3yQu300mnPm48TKgNH1adgcF66gIF9vqHk,35074
|
|
32
|
+
fusesell-1.2.4.dist-info/WHEEL,sha256=_zCd3N1l69ArxyTb8rzEoP9TpbYXkqRFSNOD5OuxnTs,91
|
|
33
|
+
fusesell-1.2.4.dist-info/entry_points.txt,sha256=Vqek7tbiX7iF4rQkCRBZvT5WrB0HUduqKTsI2ZjhsXo,53
|
|
34
|
+
fusesell-1.2.4.dist-info/top_level.txt,sha256=VP9y1K6DEq6gNq2UgLd7ChujxViF6OzeAVCK7IUBXPA,24
|
|
35
|
+
fusesell-1.2.4.dist-info/RECORD,,
|
fusesell_local/__init__.py
CHANGED
fusesell_local/cli.py
CHANGED
|
@@ -410,11 +410,32 @@ Examples:
|
|
|
410
410
|
update_parser.add_argument(
|
|
411
411
|
'--subcategory', help='New product subcategory')
|
|
412
412
|
|
|
413
|
-
# Product list
|
|
414
|
-
list_parser = product_subparsers.add_parser(
|
|
415
|
-
'list', help='List products')
|
|
416
|
-
list_parser.add_argument(
|
|
417
|
-
'--org-id', required=True, help='Organization ID')
|
|
413
|
+
# Product list
|
|
414
|
+
list_parser = product_subparsers.add_parser(
|
|
415
|
+
'list', help='List products')
|
|
416
|
+
list_parser.add_argument(
|
|
417
|
+
'--org-id', required=True, help='Organization ID')
|
|
418
|
+
list_parser.add_argument(
|
|
419
|
+
'--status',
|
|
420
|
+
choices=['active', 'inactive', 'all'],
|
|
421
|
+
default='active',
|
|
422
|
+
help='Filter products by status (default: active)',
|
|
423
|
+
)
|
|
424
|
+
list_parser.add_argument(
|
|
425
|
+
'--search-term',
|
|
426
|
+
help='Keyword to match against product name or descriptions',
|
|
427
|
+
)
|
|
428
|
+
list_parser.add_argument(
|
|
429
|
+
'--limit',
|
|
430
|
+
type=int,
|
|
431
|
+
help='Maximum number of products to return',
|
|
432
|
+
)
|
|
433
|
+
list_parser.add_argument(
|
|
434
|
+
'--sort',
|
|
435
|
+
choices=['name', 'created_at', 'updated_at'],
|
|
436
|
+
default='name',
|
|
437
|
+
help='Sort order for results (default: name)',
|
|
438
|
+
)
|
|
418
439
|
|
|
419
440
|
def _add_settings_arguments(self, parser: argparse.ArgumentParser) -> None:
|
|
420
441
|
"""Add settings management arguments."""
|
|
@@ -1029,13 +1050,19 @@ Examples:
|
|
|
1029
1050
|
f"Product not found: {args.product_id}", file=sys.stderr)
|
|
1030
1051
|
return 1
|
|
1031
1052
|
|
|
1032
|
-
elif action == 'list':
|
|
1033
|
-
products = data_manager.
|
|
1034
|
-
|
|
1035
|
-
|
|
1036
|
-
|
|
1037
|
-
|
|
1038
|
-
|
|
1053
|
+
elif action == 'list':
|
|
1054
|
+
products = data_manager.search_products(
|
|
1055
|
+
org_id=args.org_id,
|
|
1056
|
+
status=getattr(args, 'status', 'active'),
|
|
1057
|
+
search_term=getattr(args, 'search_term', None),
|
|
1058
|
+
limit=getattr(args, 'limit', None),
|
|
1059
|
+
sort=getattr(args, 'sort', 'name'),
|
|
1060
|
+
)
|
|
1061
|
+
if products:
|
|
1062
|
+
print(f"Products for organization {args.org_id}:")
|
|
1063
|
+
for product in products:
|
|
1064
|
+
print(
|
|
1065
|
+
f" {product['product_id']}: {product['product_name']} - {product.get('short_description', 'No description')}")
|
|
1039
1066
|
else:
|
|
1040
1067
|
print(f"No products found for organization {args.org_id}")
|
|
1041
1068
|
return 0
|
|
@@ -71,4 +71,70 @@ def test_get_products_by_org_returns_active_products_only(data_manager):
|
|
|
71
71
|
|
|
72
72
|
results = data_manager.get_products_by_org(active["org_id"])
|
|
73
73
|
assert len(results) == 1
|
|
74
|
-
assert results[0]["product_id"] == active["product_id"]
|
|
74
|
+
assert results[0]["product_id"] == active["product_id"]
|
|
75
|
+
|
|
76
|
+
|
|
77
|
+
def test_search_products_filters_by_keyword(data_manager):
|
|
78
|
+
alpha = _sample_product_payload(
|
|
79
|
+
"prod-alpha",
|
|
80
|
+
productName="Alpha CRM",
|
|
81
|
+
shortDescription="CRM automation platform",
|
|
82
|
+
keywords=["CRM", "pipeline"],
|
|
83
|
+
)
|
|
84
|
+
beta = _sample_product_payload(
|
|
85
|
+
"prod-beta",
|
|
86
|
+
productName="Beta Ops",
|
|
87
|
+
shortDescription="Operations toolkit",
|
|
88
|
+
keywords=["ops"],
|
|
89
|
+
)
|
|
90
|
+
|
|
91
|
+
data_manager.save_product(alpha)
|
|
92
|
+
data_manager.save_product(beta)
|
|
93
|
+
|
|
94
|
+
results = data_manager.search_products(
|
|
95
|
+
org_id="org-123",
|
|
96
|
+
search_term="crm",
|
|
97
|
+
)
|
|
98
|
+
|
|
99
|
+
assert len(results) == 1
|
|
100
|
+
assert results[0]["product_id"] == "prod-alpha"
|
|
101
|
+
|
|
102
|
+
|
|
103
|
+
def test_search_products_limit_and_sort(data_manager):
|
|
104
|
+
first = _sample_product_payload("prod-c", productName="Charlie Suite")
|
|
105
|
+
second = _sample_product_payload("prod-a", productName="Alpha Suite")
|
|
106
|
+
third = _sample_product_payload("prod-b", productName="Bravo Suite")
|
|
107
|
+
|
|
108
|
+
data_manager.save_product(first)
|
|
109
|
+
data_manager.save_product(second)
|
|
110
|
+
data_manager.save_product(third)
|
|
111
|
+
|
|
112
|
+
# Update timestamps to control order
|
|
113
|
+
with sqlite3.connect(data_manager.db_path) as conn:
|
|
114
|
+
conn.execute(
|
|
115
|
+
"UPDATE products SET updated_at = ? WHERE product_id = ?",
|
|
116
|
+
("2024-01-01 10:00:00", "prod-a"),
|
|
117
|
+
)
|
|
118
|
+
conn.execute(
|
|
119
|
+
"UPDATE products SET updated_at = ? WHERE product_id = ?",
|
|
120
|
+
("2024-01-02 10:00:00", "prod-b"),
|
|
121
|
+
)
|
|
122
|
+
conn.execute(
|
|
123
|
+
"UPDATE products SET updated_at = ? WHERE product_id = ?",
|
|
124
|
+
("2024-01-03 10:00:00", "prod-c"),
|
|
125
|
+
)
|
|
126
|
+
conn.commit()
|
|
127
|
+
|
|
128
|
+
by_name = data_manager.search_products(
|
|
129
|
+
org_id="org-123",
|
|
130
|
+
sort="name",
|
|
131
|
+
limit=2,
|
|
132
|
+
)
|
|
133
|
+
assert [p["product_id"] for p in by_name] == ["prod-a", "prod-b"]
|
|
134
|
+
|
|
135
|
+
by_updated = data_manager.search_products(
|
|
136
|
+
org_id="org-123",
|
|
137
|
+
sort="updated_at",
|
|
138
|
+
limit=2,
|
|
139
|
+
)
|
|
140
|
+
assert [p["product_id"] for p in by_updated] == ["prod-c", "prod-b"]
|
|
@@ -4,24 +4,49 @@ Handles SQLite database operations and local file management
|
|
|
4
4
|
"""
|
|
5
5
|
|
|
6
6
|
import sqlite3
|
|
7
|
-
import json
|
|
8
|
-
import os
|
|
9
|
-
import uuid
|
|
10
|
-
from typing import Dict, Any, List, Optional, Union
|
|
7
|
+
import json
|
|
8
|
+
import os
|
|
9
|
+
import uuid
|
|
10
|
+
from typing import Dict, Any, List, Optional, Sequence, Union
|
|
11
11
|
from datetime import datetime
|
|
12
12
|
import logging
|
|
13
13
|
from pathlib import Path
|
|
14
14
|
|
|
15
15
|
|
|
16
|
-
class LocalDataManager:
|
|
17
|
-
"""
|
|
18
|
-
Manages local data storage using SQLite database and JSON files.
|
|
19
|
-
Provides interface for storing execution results, customer data, and configurations.
|
|
20
|
-
"""
|
|
21
|
-
|
|
22
|
-
# Class-level tracking to prevent multiple initializations
|
|
23
|
-
_initialized_databases = set()
|
|
24
|
-
_initialization_lock = False
|
|
16
|
+
class LocalDataManager:
|
|
17
|
+
"""
|
|
18
|
+
Manages local data storage using SQLite database and JSON files.
|
|
19
|
+
Provides interface for storing execution results, customer data, and configurations.
|
|
20
|
+
"""
|
|
21
|
+
|
|
22
|
+
# Class-level tracking to prevent multiple initializations
|
|
23
|
+
_initialized_databases = set()
|
|
24
|
+
_initialization_lock = False
|
|
25
|
+
_product_json_fields = [
|
|
26
|
+
'target_users',
|
|
27
|
+
'key_features',
|
|
28
|
+
'unique_selling_points',
|
|
29
|
+
'pain_points_solved',
|
|
30
|
+
'competitive_advantages',
|
|
31
|
+
'pricing',
|
|
32
|
+
'pricing_rules',
|
|
33
|
+
'sales_metrics',
|
|
34
|
+
'customer_feedback',
|
|
35
|
+
'keywords',
|
|
36
|
+
'related_products',
|
|
37
|
+
'seasonal_demand',
|
|
38
|
+
'market_insights',
|
|
39
|
+
'case_studies',
|
|
40
|
+
'testimonials',
|
|
41
|
+
'success_metrics',
|
|
42
|
+
'product_variants',
|
|
43
|
+
'technical_specifications',
|
|
44
|
+
'compatibility',
|
|
45
|
+
'support_info',
|
|
46
|
+
'regulatory_compliance',
|
|
47
|
+
'localization',
|
|
48
|
+
'shipping_info'
|
|
49
|
+
]
|
|
25
50
|
|
|
26
51
|
def __init__(self, data_dir: str = "./fusesell_data"):
|
|
27
52
|
"""
|
|
@@ -1342,50 +1367,149 @@ class LocalDataManager:
|
|
|
1342
1367
|
self.logger.error(f"Failed to save team settings: {str(e)}")
|
|
1343
1368
|
raise
|
|
1344
1369
|
|
|
1345
|
-
def get_team_settings(self, team_id: str) -> Optional[Dict[str, Any]]:
|
|
1346
|
-
"""
|
|
1347
|
-
Get team settings by team ID.
|
|
1348
|
-
|
|
1349
|
-
Args:
|
|
1350
|
-
team_id: Team identifier
|
|
1351
|
-
|
|
1352
|
-
Returns:
|
|
1353
|
-
Team settings dictionary or None if not found
|
|
1354
|
-
"""
|
|
1355
|
-
try:
|
|
1356
|
-
with sqlite3.connect(self.db_path) as conn:
|
|
1357
|
-
conn.row_factory = sqlite3.Row
|
|
1358
|
-
cursor = conn.cursor()
|
|
1359
|
-
cursor.execute(
|
|
1360
|
-
"SELECT * FROM team_settings WHERE team_id = ?", (team_id,))
|
|
1361
|
-
row = cursor.fetchone()
|
|
1362
|
-
|
|
1363
|
-
if row:
|
|
1364
|
-
result = dict(row)
|
|
1365
|
-
# Parse JSON fields
|
|
1366
|
-
json_fields = [
|
|
1367
|
-
'gs_team_organization', 'gs_team_rep', 'gs_team_product',
|
|
1368
|
-
'gs_team_schedule_time', 'gs_team_initial_outreach', 'gs_team_follow_up',
|
|
1369
|
-
'gs_team_auto_interaction', 'gs_team_followup_schedule_time', 'gs_team_birthday_email'
|
|
1370
|
-
]
|
|
1371
|
-
|
|
1372
|
-
for field in json_fields:
|
|
1373
|
-
if result[field]:
|
|
1374
|
-
try:
|
|
1375
|
-
result[field] = json.loads(result[field])
|
|
1376
|
-
except json.JSONDecodeError:
|
|
1377
|
-
result[field] = None
|
|
1378
|
-
|
|
1379
|
-
return result
|
|
1380
|
-
return None
|
|
1381
|
-
|
|
1382
|
-
except Exception as e:
|
|
1383
|
-
self.logger.error(f"Failed to get team settings: {str(e)}")
|
|
1384
|
-
raise
|
|
1385
|
-
|
|
1386
|
-
def
|
|
1387
|
-
|
|
1388
|
-
|
|
1370
|
+
def get_team_settings(self, team_id: str) -> Optional[Dict[str, Any]]:
|
|
1371
|
+
"""
|
|
1372
|
+
Get team settings by team ID.
|
|
1373
|
+
|
|
1374
|
+
Args:
|
|
1375
|
+
team_id: Team identifier
|
|
1376
|
+
|
|
1377
|
+
Returns:
|
|
1378
|
+
Team settings dictionary or None if not found
|
|
1379
|
+
"""
|
|
1380
|
+
try:
|
|
1381
|
+
with sqlite3.connect(self.db_path) as conn:
|
|
1382
|
+
conn.row_factory = sqlite3.Row
|
|
1383
|
+
cursor = conn.cursor()
|
|
1384
|
+
cursor.execute(
|
|
1385
|
+
"SELECT * FROM team_settings WHERE team_id = ?", (team_id,))
|
|
1386
|
+
row = cursor.fetchone()
|
|
1387
|
+
|
|
1388
|
+
if row:
|
|
1389
|
+
result = dict(row)
|
|
1390
|
+
# Parse JSON fields
|
|
1391
|
+
json_fields = [
|
|
1392
|
+
'gs_team_organization', 'gs_team_rep', 'gs_team_product',
|
|
1393
|
+
'gs_team_schedule_time', 'gs_team_initial_outreach', 'gs_team_follow_up',
|
|
1394
|
+
'gs_team_auto_interaction', 'gs_team_followup_schedule_time', 'gs_team_birthday_email'
|
|
1395
|
+
]
|
|
1396
|
+
|
|
1397
|
+
for field in json_fields:
|
|
1398
|
+
if result[field]:
|
|
1399
|
+
try:
|
|
1400
|
+
result[field] = json.loads(result[field])
|
|
1401
|
+
except json.JSONDecodeError:
|
|
1402
|
+
result[field] = None
|
|
1403
|
+
|
|
1404
|
+
return result
|
|
1405
|
+
return None
|
|
1406
|
+
|
|
1407
|
+
except Exception as e:
|
|
1408
|
+
self.logger.error(f"Failed to get team settings: {str(e)}")
|
|
1409
|
+
raise
|
|
1410
|
+
|
|
1411
|
+
def build_team_settings_snapshot(
|
|
1412
|
+
self,
|
|
1413
|
+
team_id: str,
|
|
1414
|
+
sections: Optional[Sequence[str]] = None
|
|
1415
|
+
) -> Dict[str, Any]:
|
|
1416
|
+
"""
|
|
1417
|
+
Build a response payload containing team settings in the expected RealTimeX format.
|
|
1418
|
+
|
|
1419
|
+
Args:
|
|
1420
|
+
team_id: Team identifier
|
|
1421
|
+
sections: Optional sequence of section names to include. Accepts either
|
|
1422
|
+
full keys (e.g. ``gs_team_product``) or shorthand without the prefix.
|
|
1423
|
+
|
|
1424
|
+
Returns:
|
|
1425
|
+
Dictionary shaped as ``{"data": [{...}]}``. When no settings exist,
|
|
1426
|
+
returns ``{"data": []}``.
|
|
1427
|
+
"""
|
|
1428
|
+
settings = self.get_team_settings(team_id)
|
|
1429
|
+
if not settings:
|
|
1430
|
+
return {"data": []}
|
|
1431
|
+
|
|
1432
|
+
available_fields = [
|
|
1433
|
+
'gs_team_organization',
|
|
1434
|
+
'gs_team_rep',
|
|
1435
|
+
'gs_team_product',
|
|
1436
|
+
'gs_team_schedule_time',
|
|
1437
|
+
'gs_team_initial_outreach',
|
|
1438
|
+
'gs_team_follow_up',
|
|
1439
|
+
'gs_team_auto_interaction',
|
|
1440
|
+
'gs_team_followup_schedule_time',
|
|
1441
|
+
'gs_team_birthday_email',
|
|
1442
|
+
]
|
|
1443
|
+
|
|
1444
|
+
if sections:
|
|
1445
|
+
normalized = set()
|
|
1446
|
+
for item in sections:
|
|
1447
|
+
if not item:
|
|
1448
|
+
continue
|
|
1449
|
+
item = item.strip()
|
|
1450
|
+
if not item:
|
|
1451
|
+
continue
|
|
1452
|
+
if item.startswith("gs_team_"):
|
|
1453
|
+
normalized.add(item)
|
|
1454
|
+
else:
|
|
1455
|
+
normalized.add(f"gs_team_{item}")
|
|
1456
|
+
fields_to_include = [field for field in available_fields if field in normalized]
|
|
1457
|
+
else:
|
|
1458
|
+
fields_to_include = available_fields
|
|
1459
|
+
|
|
1460
|
+
list_like_fields = {
|
|
1461
|
+
'gs_team_organization',
|
|
1462
|
+
'gs_team_rep',
|
|
1463
|
+
'gs_team_product',
|
|
1464
|
+
'gs_team_auto_interaction',
|
|
1465
|
+
}
|
|
1466
|
+
|
|
1467
|
+
snapshot: Dict[str, Any] = {}
|
|
1468
|
+
for field in fields_to_include:
|
|
1469
|
+
value = settings.get(field)
|
|
1470
|
+
if value is None:
|
|
1471
|
+
continue
|
|
1472
|
+
|
|
1473
|
+
if field in list_like_fields:
|
|
1474
|
+
if isinstance(value, list):
|
|
1475
|
+
snapshot[field] = value
|
|
1476
|
+
elif value:
|
|
1477
|
+
snapshot[field] = [value]
|
|
1478
|
+
else:
|
|
1479
|
+
snapshot[field] = []
|
|
1480
|
+
else:
|
|
1481
|
+
snapshot[field] = value
|
|
1482
|
+
|
|
1483
|
+
if not snapshot:
|
|
1484
|
+
return {"data": []}
|
|
1485
|
+
|
|
1486
|
+
return {"data": [snapshot]}
|
|
1487
|
+
|
|
1488
|
+
def _deserialize_product_row(self, row: sqlite3.Row) -> Dict[str, Any]:
|
|
1489
|
+
"""
|
|
1490
|
+
Convert a product row into a dictionary with JSON fields parsed.
|
|
1491
|
+
|
|
1492
|
+
Args:
|
|
1493
|
+
row: SQLite row containing product data
|
|
1494
|
+
|
|
1495
|
+
Returns:
|
|
1496
|
+
Dictionary representation of the row with JSON fields decoded
|
|
1497
|
+
"""
|
|
1498
|
+
product = dict(row)
|
|
1499
|
+
|
|
1500
|
+
for field in self._product_json_fields:
|
|
1501
|
+
value = product.get(field)
|
|
1502
|
+
if value:
|
|
1503
|
+
try:
|
|
1504
|
+
product[field] = json.loads(value)
|
|
1505
|
+
except (json.JSONDecodeError, TypeError):
|
|
1506
|
+
product[field] = None
|
|
1507
|
+
|
|
1508
|
+
return product
|
|
1509
|
+
|
|
1510
|
+
def save_product(self, product_data: Dict[str, Any]) -> str:
|
|
1511
|
+
"""
|
|
1512
|
+
Save or update product information.
|
|
1389
1513
|
|
|
1390
1514
|
Args:
|
|
1391
1515
|
product_data: Product information dictionary
|
|
@@ -1569,50 +1693,119 @@ class LocalDataManager:
|
|
|
1569
1693
|
self.logger.error(f"Failed to save product: {str(e)}")
|
|
1570
1694
|
raise
|
|
1571
1695
|
|
|
1572
|
-
def
|
|
1573
|
-
|
|
1574
|
-
|
|
1575
|
-
|
|
1576
|
-
|
|
1577
|
-
|
|
1578
|
-
|
|
1579
|
-
|
|
1580
|
-
|
|
1581
|
-
|
|
1582
|
-
|
|
1583
|
-
|
|
1584
|
-
|
|
1585
|
-
|
|
1586
|
-
|
|
1587
|
-
|
|
1588
|
-
|
|
1589
|
-
|
|
1590
|
-
|
|
1591
|
-
|
|
1592
|
-
|
|
1593
|
-
|
|
1594
|
-
|
|
1595
|
-
|
|
1596
|
-
|
|
1597
|
-
|
|
1598
|
-
|
|
1599
|
-
|
|
1600
|
-
|
|
1601
|
-
|
|
1602
|
-
|
|
1603
|
-
|
|
1604
|
-
|
|
1605
|
-
|
|
1606
|
-
|
|
1607
|
-
|
|
1608
|
-
|
|
1609
|
-
|
|
1610
|
-
|
|
1611
|
-
|
|
1612
|
-
|
|
1613
|
-
|
|
1614
|
-
|
|
1615
|
-
|
|
1696
|
+
def search_products(
|
|
1697
|
+
self,
|
|
1698
|
+
org_id: str,
|
|
1699
|
+
status: Optional[str] = "active",
|
|
1700
|
+
search_term: Optional[str] = None,
|
|
1701
|
+
limit: Optional[int] = None,
|
|
1702
|
+
sort: Optional[str] = "name"
|
|
1703
|
+
) -> List[Dict[str, Any]]:
|
|
1704
|
+
"""
|
|
1705
|
+
Search products for an organization with optional filters.
|
|
1706
|
+
|
|
1707
|
+
Args:
|
|
1708
|
+
org_id: Organization identifier
|
|
1709
|
+
status: Product status filter ("active", "inactive", or "all")
|
|
1710
|
+
search_term: Keyword to match against name, descriptions, or keywords
|
|
1711
|
+
limit: Maximum number of products to return
|
|
1712
|
+
sort: Sort order ("name", "created_at", "updated_at")
|
|
1713
|
+
|
|
1714
|
+
Returns:
|
|
1715
|
+
List of product dictionaries
|
|
1716
|
+
"""
|
|
1717
|
+
try:
|
|
1718
|
+
def _is_placeholder(value: Any) -> bool:
|
|
1719
|
+
return isinstance(value, str) and value.strip().startswith("{{") and value.strip().endswith("}}")
|
|
1720
|
+
|
|
1721
|
+
# Normalize status
|
|
1722
|
+
normalized_status: Optional[str] = status
|
|
1723
|
+
if _is_placeholder(normalized_status):
|
|
1724
|
+
normalized_status = None
|
|
1725
|
+
if isinstance(normalized_status, str):
|
|
1726
|
+
normalized_status = normalized_status.strip().lower()
|
|
1727
|
+
if normalized_status not in {'active', 'inactive', 'all'}:
|
|
1728
|
+
normalized_status = 'active'
|
|
1729
|
+
|
|
1730
|
+
# Normalize sort
|
|
1731
|
+
normalized_sort: Optional[str] = sort
|
|
1732
|
+
if _is_placeholder(normalized_sort):
|
|
1733
|
+
normalized_sort = None
|
|
1734
|
+
if isinstance(normalized_sort, str):
|
|
1735
|
+
normalized_sort = normalized_sort.strip().lower()
|
|
1736
|
+
sort_map = {
|
|
1737
|
+
'name': ("product_name COLLATE NOCASE", "ASC"),
|
|
1738
|
+
'created_at': ("datetime(created_at)", "DESC"),
|
|
1739
|
+
'updated_at': ("datetime(updated_at)", "DESC"),
|
|
1740
|
+
}
|
|
1741
|
+
order_by, direction = sort_map.get(normalized_sort, sort_map['name'])
|
|
1742
|
+
|
|
1743
|
+
# Normalize search term
|
|
1744
|
+
normalized_search: Optional[str] = None
|
|
1745
|
+
if not _is_placeholder(search_term) and search_term is not None:
|
|
1746
|
+
normalized_search = str(search_term).strip()
|
|
1747
|
+
if normalized_search == "":
|
|
1748
|
+
normalized_search = None
|
|
1749
|
+
|
|
1750
|
+
# Normalize limit
|
|
1751
|
+
normalized_limit: Optional[int] = None
|
|
1752
|
+
if not _is_placeholder(limit) and limit is not None:
|
|
1753
|
+
try:
|
|
1754
|
+
normalized_limit = int(limit)
|
|
1755
|
+
if normalized_limit <= 0:
|
|
1756
|
+
normalized_limit = None
|
|
1757
|
+
except (TypeError, ValueError):
|
|
1758
|
+
normalized_limit = None
|
|
1759
|
+
|
|
1760
|
+
where_clauses = ["org_id = ?"]
|
|
1761
|
+
params: List[Any] = [org_id]
|
|
1762
|
+
|
|
1763
|
+
if normalized_status != 'all':
|
|
1764
|
+
where_clauses.append("status = ?")
|
|
1765
|
+
params.append(normalized_status)
|
|
1766
|
+
|
|
1767
|
+
query = "SELECT * FROM products WHERE " + " AND ".join(where_clauses)
|
|
1768
|
+
|
|
1769
|
+
if normalized_search:
|
|
1770
|
+
like_value = f"%{normalized_search.lower()}%"
|
|
1771
|
+
query += (
|
|
1772
|
+
" AND ("
|
|
1773
|
+
"LOWER(product_name) LIKE ? OR "
|
|
1774
|
+
"LOWER(COALESCE(short_description, '')) LIKE ? OR "
|
|
1775
|
+
"LOWER(COALESCE(long_description, '')) LIKE ? OR "
|
|
1776
|
+
"LOWER(COALESCE(keywords, '')) LIKE ?)"
|
|
1777
|
+
)
|
|
1778
|
+
params.extend([like_value] * 4)
|
|
1779
|
+
|
|
1780
|
+
query += f" ORDER BY {order_by} {direction}"
|
|
1781
|
+
|
|
1782
|
+
if normalized_limit is not None:
|
|
1783
|
+
query += " LIMIT ?"
|
|
1784
|
+
params.append(normalized_limit)
|
|
1785
|
+
|
|
1786
|
+
with sqlite3.connect(self.db_path) as conn:
|
|
1787
|
+
conn.row_factory = sqlite3.Row
|
|
1788
|
+
cursor = conn.cursor()
|
|
1789
|
+
cursor.execute(query, params)
|
|
1790
|
+
rows = cursor.fetchall()
|
|
1791
|
+
|
|
1792
|
+
return [self._deserialize_product_row(row) for row in rows]
|
|
1793
|
+
|
|
1794
|
+
except Exception as e:
|
|
1795
|
+
self.logger.error(f"Failed to search products: {str(e)}")
|
|
1796
|
+
raise
|
|
1797
|
+
|
|
1798
|
+
def get_products_by_org(self, org_id: str) -> List[Dict[str, Any]]:
|
|
1799
|
+
"""
|
|
1800
|
+
Backward-compatible helper that returns active products for an organization.
|
|
1801
|
+
|
|
1802
|
+
Args:
|
|
1803
|
+
org_id: Organization identifier
|
|
1804
|
+
|
|
1805
|
+
Returns:
|
|
1806
|
+
List of active product dictionaries
|
|
1807
|
+
"""
|
|
1808
|
+
return self.search_products(org_id=org_id, status="active")
|
|
1616
1809
|
|
|
1617
1810
|
def get_products_by_team(self, team_id: str) -> List[Dict[str, Any]]:
|
|
1618
1811
|
"""
|
|
@@ -1649,34 +1842,13 @@ class LocalDataManager:
|
|
|
1649
1842
|
cursor.execute(
|
|
1650
1843
|
f"SELECT * FROM products WHERE product_id IN ({placeholders}) AND status = 'active'", product_ids)
|
|
1651
1844
|
|
|
1652
|
-
|
|
1653
|
-
|
|
1654
|
-
|
|
1655
|
-
|
|
1656
|
-
|
|
1657
|
-
|
|
1658
|
-
|
|
1659
|
-
'customer_feedback', 'keywords', 'related_products', 'seasonal_demand',
|
|
1660
|
-
'market_insights', 'case_studies', 'testimonials', 'success_metrics',
|
|
1661
|
-
'product_variants', 'technical_specifications', 'compatibility', 'support_info',
|
|
1662
|
-
'regulatory_compliance', 'localization', 'shipping_info'
|
|
1663
|
-
]
|
|
1664
|
-
|
|
1665
|
-
for field in json_fields:
|
|
1666
|
-
if product[field]:
|
|
1667
|
-
try:
|
|
1668
|
-
product[field] = json.loads(product[field])
|
|
1669
|
-
except json.JSONDecodeError:
|
|
1670
|
-
product[field] = None
|
|
1671
|
-
|
|
1672
|
-
products.append(product)
|
|
1673
|
-
|
|
1674
|
-
return products
|
|
1675
|
-
|
|
1676
|
-
except Exception as e:
|
|
1677
|
-
self.logger.error(f"Failed to get products by team: {str(e)}")
|
|
1678
|
-
raise
|
|
1679
|
-
|
|
1845
|
+
return [self._deserialize_product_row(row)
|
|
1846
|
+
for row in cursor.fetchall()]
|
|
1847
|
+
|
|
1848
|
+
except Exception as e:
|
|
1849
|
+
self.logger.error(f"Failed to get products by team: {str(e)}")
|
|
1850
|
+
raise
|
|
1851
|
+
|
|
1680
1852
|
def get_product(self, product_id: str) -> Optional[Dict[str, Any]]:
|
|
1681
1853
|
"""
|
|
1682
1854
|
Get product by ID.
|
|
@@ -1692,33 +1864,15 @@ class LocalDataManager:
|
|
|
1692
1864
|
conn.row_factory = sqlite3.Row
|
|
1693
1865
|
cursor = conn.cursor()
|
|
1694
1866
|
cursor.execute("SELECT * FROM products WHERE product_id = ?", (product_id,))
|
|
1695
|
-
row = cursor.fetchone()
|
|
1696
|
-
|
|
1697
|
-
if row:
|
|
1698
|
-
|
|
1699
|
-
|
|
1700
|
-
|
|
1701
|
-
|
|
1702
|
-
|
|
1703
|
-
|
|
1704
|
-
'market_insights', 'case_studies', 'testimonials', 'success_metrics',
|
|
1705
|
-
'product_variants', 'technical_specifications', 'compatibility', 'support_info',
|
|
1706
|
-
'regulatory_compliance', 'localization', 'shipping_info'
|
|
1707
|
-
]
|
|
1708
|
-
|
|
1709
|
-
for field in json_fields:
|
|
1710
|
-
if product[field]:
|
|
1711
|
-
try:
|
|
1712
|
-
product[field] = json.loads(product[field])
|
|
1713
|
-
except json.JSONDecodeError:
|
|
1714
|
-
product[field] = None
|
|
1715
|
-
|
|
1716
|
-
return product
|
|
1717
|
-
return None
|
|
1718
|
-
|
|
1719
|
-
except Exception as e:
|
|
1720
|
-
self.logger.error(f"Error getting product {product_id}: {str(e)}")
|
|
1721
|
-
raise
|
|
1867
|
+
row = cursor.fetchone()
|
|
1868
|
+
|
|
1869
|
+
if row:
|
|
1870
|
+
return self._deserialize_product_row(row)
|
|
1871
|
+
return None
|
|
1872
|
+
|
|
1873
|
+
except Exception as e:
|
|
1874
|
+
self.logger.error(f"Error getting product {product_id}: {str(e)}")
|
|
1875
|
+
raise
|
|
1722
1876
|
|
|
1723
1877
|
def update_product(self, product_id: str, product_data: Dict[str, Any]) -> bool:
|
|
1724
1878
|
"""
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|