fusesell 1.2.2__tar.gz → 1.2.3__tar.gz
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of fusesell might be problematic. Click here for more details.
- {fusesell-1.2.2 → fusesell-1.2.3}/CHANGELOG.md +13 -2
- {fusesell-1.2.2/fusesell.egg-info → fusesell-1.2.3}/PKG-INFO +1 -1
- {fusesell-1.2.2 → fusesell-1.2.3/fusesell.egg-info}/PKG-INFO +1 -1
- {fusesell-1.2.2 → fusesell-1.2.3}/fusesell_local/__init__.py +1 -1
- {fusesell-1.2.2 → fusesell-1.2.3}/fusesell_local/cli.py +39 -12
- {fusesell-1.2.2 → fusesell-1.2.3}/fusesell_local/tests/test_data_manager_products.py +67 -1
- {fusesell-1.2.2 → fusesell-1.2.3}/fusesell_local/utils/data_manager.py +229 -152
- {fusesell-1.2.2 → fusesell-1.2.3}/pyproject.toml +1 -1
- {fusesell-1.2.2 → fusesell-1.2.3}/LICENSE +0 -0
- {fusesell-1.2.2 → fusesell-1.2.3}/MANIFEST.in +0 -0
- {fusesell-1.2.2 → fusesell-1.2.3}/README.md +0 -0
- {fusesell-1.2.2 → fusesell-1.2.3}/fusesell.egg-info/SOURCES.txt +0 -0
- {fusesell-1.2.2 → fusesell-1.2.3}/fusesell.egg-info/dependency_links.txt +0 -0
- {fusesell-1.2.2 → fusesell-1.2.3}/fusesell.egg-info/entry_points.txt +0 -0
- {fusesell-1.2.2 → fusesell-1.2.3}/fusesell.egg-info/requires.txt +0 -0
- {fusesell-1.2.2 → fusesell-1.2.3}/fusesell.egg-info/top_level.txt +0 -0
- {fusesell-1.2.2 → fusesell-1.2.3}/fusesell.py +0 -0
- {fusesell-1.2.2 → fusesell-1.2.3}/fusesell_local/api.py +0 -0
- {fusesell-1.2.2 → fusesell-1.2.3}/fusesell_local/config/__init__.py +0 -0
- {fusesell-1.2.2 → fusesell-1.2.3}/fusesell_local/config/prompts.py +0 -0
- {fusesell-1.2.2 → fusesell-1.2.3}/fusesell_local/config/settings.py +0 -0
- {fusesell-1.2.2 → fusesell-1.2.3}/fusesell_local/pipeline.py +0 -0
- {fusesell-1.2.2 → fusesell-1.2.3}/fusesell_local/stages/__init__.py +0 -0
- {fusesell-1.2.2 → fusesell-1.2.3}/fusesell_local/stages/base_stage.py +0 -0
- {fusesell-1.2.2 → fusesell-1.2.3}/fusesell_local/stages/data_acquisition.py +0 -0
- {fusesell-1.2.2 → fusesell-1.2.3}/fusesell_local/stages/data_preparation.py +0 -0
- {fusesell-1.2.2 → fusesell-1.2.3}/fusesell_local/stages/follow_up.py +0 -0
- {fusesell-1.2.2 → fusesell-1.2.3}/fusesell_local/stages/initial_outreach.py +0 -0
- {fusesell-1.2.2 → fusesell-1.2.3}/fusesell_local/stages/lead_scoring.py +0 -0
- {fusesell-1.2.2 → fusesell-1.2.3}/fusesell_local/tests/conftest.py +0 -0
- {fusesell-1.2.2 → fusesell-1.2.3}/fusesell_local/tests/test_api.py +0 -0
- {fusesell-1.2.2 → fusesell-1.2.3}/fusesell_local/tests/test_cli.py +0 -0
- {fusesell-1.2.2 → fusesell-1.2.3}/fusesell_local/tests/test_data_manager_sales_process.py +0 -0
- {fusesell-1.2.2 → fusesell-1.2.3}/fusesell_local/tests/test_data_manager_teams.py +0 -0
- {fusesell-1.2.2 → fusesell-1.2.3}/fusesell_local/utils/__init__.py +0 -0
- {fusesell-1.2.2 → fusesell-1.2.3}/fusesell_local/utils/birthday_email_manager.py +0 -0
- {fusesell-1.2.2 → fusesell-1.2.3}/fusesell_local/utils/event_scheduler.py +0 -0
- {fusesell-1.2.2 → fusesell-1.2.3}/fusesell_local/utils/llm_client.py +0 -0
- {fusesell-1.2.2 → fusesell-1.2.3}/fusesell_local/utils/logger.py +0 -0
- {fusesell-1.2.2 → fusesell-1.2.3}/fusesell_local/utils/timezone_detector.py +0 -0
- {fusesell-1.2.2 → fusesell-1.2.3}/fusesell_local/utils/validators.py +0 -0
- {fusesell-1.2.2 → fusesell-1.2.3}/requirements.txt +0 -0
- {fusesell-1.2.2 → fusesell-1.2.3}/setup.cfg +0 -0
|
@@ -1,7 +1,18 @@
|
|
|
1
|
-
# Changelog
|
|
2
|
-
|
|
1
|
+
# Changelog
|
|
2
|
+
|
|
3
3
|
All notable changes to FuseSell Local will be documented in this file.
|
|
4
4
|
|
|
5
|
+
# [1.2.3] - 2025-10-21
|
|
6
|
+
|
|
7
|
+
### Added
|
|
8
|
+
- `LocalDataManager.search_products()` for server-compatible product filtering (status, keyword, limit, sort).
|
|
9
|
+
- CLI `product list` flags and `list_products.py` filters wired to the new search helper.
|
|
10
|
+
- Regression tests covering keyword search, sorting, and limiting behavior.
|
|
11
|
+
|
|
12
|
+
### Changed
|
|
13
|
+
- `get_products_by_org` now delegates to the filtered search path to avoid loading inactive results.
|
|
14
|
+
- Product management documentation updated for RealTimeX flows and CLI filter usage.
|
|
15
|
+
|
|
5
16
|
# [1.2.2] - 2025-10-21
|
|
6
17
|
|
|
7
18
|
### Added
|
|
@@ -410,11 +410,32 @@ Examples:
|
|
|
410
410
|
update_parser.add_argument(
|
|
411
411
|
'--subcategory', help='New product subcategory')
|
|
412
412
|
|
|
413
|
-
# Product list
|
|
414
|
-
list_parser = product_subparsers.add_parser(
|
|
415
|
-
'list', help='List products')
|
|
416
|
-
list_parser.add_argument(
|
|
417
|
-
'--org-id', required=True, help='Organization ID')
|
|
413
|
+
# Product list
|
|
414
|
+
list_parser = product_subparsers.add_parser(
|
|
415
|
+
'list', help='List products')
|
|
416
|
+
list_parser.add_argument(
|
|
417
|
+
'--org-id', required=True, help='Organization ID')
|
|
418
|
+
list_parser.add_argument(
|
|
419
|
+
'--status',
|
|
420
|
+
choices=['active', 'inactive', 'all'],
|
|
421
|
+
default='active',
|
|
422
|
+
help='Filter products by status (default: active)',
|
|
423
|
+
)
|
|
424
|
+
list_parser.add_argument(
|
|
425
|
+
'--search-term',
|
|
426
|
+
help='Keyword to match against product name or descriptions',
|
|
427
|
+
)
|
|
428
|
+
list_parser.add_argument(
|
|
429
|
+
'--limit',
|
|
430
|
+
type=int,
|
|
431
|
+
help='Maximum number of products to return',
|
|
432
|
+
)
|
|
433
|
+
list_parser.add_argument(
|
|
434
|
+
'--sort',
|
|
435
|
+
choices=['name', 'created_at', 'updated_at'],
|
|
436
|
+
default='name',
|
|
437
|
+
help='Sort order for results (default: name)',
|
|
438
|
+
)
|
|
418
439
|
|
|
419
440
|
def _add_settings_arguments(self, parser: argparse.ArgumentParser) -> None:
|
|
420
441
|
"""Add settings management arguments."""
|
|
@@ -1029,13 +1050,19 @@ Examples:
|
|
|
1029
1050
|
f"Product not found: {args.product_id}", file=sys.stderr)
|
|
1030
1051
|
return 1
|
|
1031
1052
|
|
|
1032
|
-
elif action == 'list':
|
|
1033
|
-
products = data_manager.
|
|
1034
|
-
|
|
1035
|
-
|
|
1036
|
-
|
|
1037
|
-
|
|
1038
|
-
|
|
1053
|
+
elif action == 'list':
|
|
1054
|
+
products = data_manager.search_products(
|
|
1055
|
+
org_id=args.org_id,
|
|
1056
|
+
status=getattr(args, 'status', 'active'),
|
|
1057
|
+
search_term=getattr(args, 'search_term', None),
|
|
1058
|
+
limit=getattr(args, 'limit', None),
|
|
1059
|
+
sort=getattr(args, 'sort', 'name'),
|
|
1060
|
+
)
|
|
1061
|
+
if products:
|
|
1062
|
+
print(f"Products for organization {args.org_id}:")
|
|
1063
|
+
for product in products:
|
|
1064
|
+
print(
|
|
1065
|
+
f" {product['product_id']}: {product['product_name']} - {product.get('short_description', 'No description')}")
|
|
1039
1066
|
else:
|
|
1040
1067
|
print(f"No products found for organization {args.org_id}")
|
|
1041
1068
|
return 0
|
|
@@ -71,4 +71,70 @@ def test_get_products_by_org_returns_active_products_only(data_manager):
|
|
|
71
71
|
|
|
72
72
|
results = data_manager.get_products_by_org(active["org_id"])
|
|
73
73
|
assert len(results) == 1
|
|
74
|
-
assert results[0]["product_id"] == active["product_id"]
|
|
74
|
+
assert results[0]["product_id"] == active["product_id"]
|
|
75
|
+
|
|
76
|
+
|
|
77
|
+
def test_search_products_filters_by_keyword(data_manager):
|
|
78
|
+
alpha = _sample_product_payload(
|
|
79
|
+
"prod-alpha",
|
|
80
|
+
productName="Alpha CRM",
|
|
81
|
+
shortDescription="CRM automation platform",
|
|
82
|
+
keywords=["CRM", "pipeline"],
|
|
83
|
+
)
|
|
84
|
+
beta = _sample_product_payload(
|
|
85
|
+
"prod-beta",
|
|
86
|
+
productName="Beta Ops",
|
|
87
|
+
shortDescription="Operations toolkit",
|
|
88
|
+
keywords=["ops"],
|
|
89
|
+
)
|
|
90
|
+
|
|
91
|
+
data_manager.save_product(alpha)
|
|
92
|
+
data_manager.save_product(beta)
|
|
93
|
+
|
|
94
|
+
results = data_manager.search_products(
|
|
95
|
+
org_id="org-123",
|
|
96
|
+
search_term="crm",
|
|
97
|
+
)
|
|
98
|
+
|
|
99
|
+
assert len(results) == 1
|
|
100
|
+
assert results[0]["product_id"] == "prod-alpha"
|
|
101
|
+
|
|
102
|
+
|
|
103
|
+
def test_search_products_limit_and_sort(data_manager):
|
|
104
|
+
first = _sample_product_payload("prod-c", productName="Charlie Suite")
|
|
105
|
+
second = _sample_product_payload("prod-a", productName="Alpha Suite")
|
|
106
|
+
third = _sample_product_payload("prod-b", productName="Bravo Suite")
|
|
107
|
+
|
|
108
|
+
data_manager.save_product(first)
|
|
109
|
+
data_manager.save_product(second)
|
|
110
|
+
data_manager.save_product(third)
|
|
111
|
+
|
|
112
|
+
# Update timestamps to control order
|
|
113
|
+
with sqlite3.connect(data_manager.db_path) as conn:
|
|
114
|
+
conn.execute(
|
|
115
|
+
"UPDATE products SET updated_at = ? WHERE product_id = ?",
|
|
116
|
+
("2024-01-01 10:00:00", "prod-a"),
|
|
117
|
+
)
|
|
118
|
+
conn.execute(
|
|
119
|
+
"UPDATE products SET updated_at = ? WHERE product_id = ?",
|
|
120
|
+
("2024-01-02 10:00:00", "prod-b"),
|
|
121
|
+
)
|
|
122
|
+
conn.execute(
|
|
123
|
+
"UPDATE products SET updated_at = ? WHERE product_id = ?",
|
|
124
|
+
("2024-01-03 10:00:00", "prod-c"),
|
|
125
|
+
)
|
|
126
|
+
conn.commit()
|
|
127
|
+
|
|
128
|
+
by_name = data_manager.search_products(
|
|
129
|
+
org_id="org-123",
|
|
130
|
+
sort="name",
|
|
131
|
+
limit=2,
|
|
132
|
+
)
|
|
133
|
+
assert [p["product_id"] for p in by_name] == ["prod-a", "prod-b"]
|
|
134
|
+
|
|
135
|
+
by_updated = data_manager.search_products(
|
|
136
|
+
org_id="org-123",
|
|
137
|
+
sort="updated_at",
|
|
138
|
+
limit=2,
|
|
139
|
+
)
|
|
140
|
+
assert [p["product_id"] for p in by_updated] == ["prod-c", "prod-b"]
|
|
@@ -13,15 +13,40 @@ import logging
|
|
|
13
13
|
from pathlib import Path
|
|
14
14
|
|
|
15
15
|
|
|
16
|
-
class LocalDataManager:
|
|
17
|
-
"""
|
|
18
|
-
Manages local data storage using SQLite database and JSON files.
|
|
19
|
-
Provides interface for storing execution results, customer data, and configurations.
|
|
20
|
-
"""
|
|
21
|
-
|
|
22
|
-
# Class-level tracking to prevent multiple initializations
|
|
23
|
-
_initialized_databases = set()
|
|
24
|
-
_initialization_lock = False
|
|
16
|
+
class LocalDataManager:
|
|
17
|
+
"""
|
|
18
|
+
Manages local data storage using SQLite database and JSON files.
|
|
19
|
+
Provides interface for storing execution results, customer data, and configurations.
|
|
20
|
+
"""
|
|
21
|
+
|
|
22
|
+
# Class-level tracking to prevent multiple initializations
|
|
23
|
+
_initialized_databases = set()
|
|
24
|
+
_initialization_lock = False
|
|
25
|
+
_product_json_fields = [
|
|
26
|
+
'target_users',
|
|
27
|
+
'key_features',
|
|
28
|
+
'unique_selling_points',
|
|
29
|
+
'pain_points_solved',
|
|
30
|
+
'competitive_advantages',
|
|
31
|
+
'pricing',
|
|
32
|
+
'pricing_rules',
|
|
33
|
+
'sales_metrics',
|
|
34
|
+
'customer_feedback',
|
|
35
|
+
'keywords',
|
|
36
|
+
'related_products',
|
|
37
|
+
'seasonal_demand',
|
|
38
|
+
'market_insights',
|
|
39
|
+
'case_studies',
|
|
40
|
+
'testimonials',
|
|
41
|
+
'success_metrics',
|
|
42
|
+
'product_variants',
|
|
43
|
+
'technical_specifications',
|
|
44
|
+
'compatibility',
|
|
45
|
+
'support_info',
|
|
46
|
+
'regulatory_compliance',
|
|
47
|
+
'localization',
|
|
48
|
+
'shipping_info'
|
|
49
|
+
]
|
|
25
50
|
|
|
26
51
|
def __init__(self, data_dir: str = "./fusesell_data"):
|
|
27
52
|
"""
|
|
@@ -1342,50 +1367,72 @@ class LocalDataManager:
|
|
|
1342
1367
|
self.logger.error(f"Failed to save team settings: {str(e)}")
|
|
1343
1368
|
raise
|
|
1344
1369
|
|
|
1345
|
-
def get_team_settings(self, team_id: str) -> Optional[Dict[str, Any]]:
|
|
1346
|
-
"""
|
|
1347
|
-
Get team settings by team ID.
|
|
1348
|
-
|
|
1349
|
-
Args:
|
|
1350
|
-
team_id: Team identifier
|
|
1351
|
-
|
|
1352
|
-
Returns:
|
|
1353
|
-
Team settings dictionary or None if not found
|
|
1354
|
-
"""
|
|
1355
|
-
try:
|
|
1356
|
-
with sqlite3.connect(self.db_path) as conn:
|
|
1357
|
-
conn.row_factory = sqlite3.Row
|
|
1358
|
-
cursor = conn.cursor()
|
|
1359
|
-
cursor.execute(
|
|
1360
|
-
"SELECT * FROM team_settings WHERE team_id = ?", (team_id,))
|
|
1361
|
-
row = cursor.fetchone()
|
|
1362
|
-
|
|
1363
|
-
if row:
|
|
1364
|
-
result = dict(row)
|
|
1365
|
-
# Parse JSON fields
|
|
1366
|
-
json_fields = [
|
|
1367
|
-
'gs_team_organization', 'gs_team_rep', 'gs_team_product',
|
|
1368
|
-
'gs_team_schedule_time', 'gs_team_initial_outreach', 'gs_team_follow_up',
|
|
1369
|
-
'gs_team_auto_interaction', 'gs_team_followup_schedule_time', 'gs_team_birthday_email'
|
|
1370
|
-
]
|
|
1371
|
-
|
|
1372
|
-
for field in json_fields:
|
|
1373
|
-
if result[field]:
|
|
1374
|
-
try:
|
|
1375
|
-
result[field] = json.loads(result[field])
|
|
1376
|
-
except json.JSONDecodeError:
|
|
1377
|
-
result[field] = None
|
|
1378
|
-
|
|
1379
|
-
return result
|
|
1380
|
-
return None
|
|
1381
|
-
|
|
1382
|
-
except Exception as e:
|
|
1383
|
-
self.logger.error(f"Failed to get team settings: {str(e)}")
|
|
1384
|
-
raise
|
|
1385
|
-
|
|
1386
|
-
def
|
|
1387
|
-
"""
|
|
1388
|
-
|
|
1370
|
+
def get_team_settings(self, team_id: str) -> Optional[Dict[str, Any]]:
|
|
1371
|
+
"""
|
|
1372
|
+
Get team settings by team ID.
|
|
1373
|
+
|
|
1374
|
+
Args:
|
|
1375
|
+
team_id: Team identifier
|
|
1376
|
+
|
|
1377
|
+
Returns:
|
|
1378
|
+
Team settings dictionary or None if not found
|
|
1379
|
+
"""
|
|
1380
|
+
try:
|
|
1381
|
+
with sqlite3.connect(self.db_path) as conn:
|
|
1382
|
+
conn.row_factory = sqlite3.Row
|
|
1383
|
+
cursor = conn.cursor()
|
|
1384
|
+
cursor.execute(
|
|
1385
|
+
"SELECT * FROM team_settings WHERE team_id = ?", (team_id,))
|
|
1386
|
+
row = cursor.fetchone()
|
|
1387
|
+
|
|
1388
|
+
if row:
|
|
1389
|
+
result = dict(row)
|
|
1390
|
+
# Parse JSON fields
|
|
1391
|
+
json_fields = [
|
|
1392
|
+
'gs_team_organization', 'gs_team_rep', 'gs_team_product',
|
|
1393
|
+
'gs_team_schedule_time', 'gs_team_initial_outreach', 'gs_team_follow_up',
|
|
1394
|
+
'gs_team_auto_interaction', 'gs_team_followup_schedule_time', 'gs_team_birthday_email'
|
|
1395
|
+
]
|
|
1396
|
+
|
|
1397
|
+
for field in json_fields:
|
|
1398
|
+
if result[field]:
|
|
1399
|
+
try:
|
|
1400
|
+
result[field] = json.loads(result[field])
|
|
1401
|
+
except json.JSONDecodeError:
|
|
1402
|
+
result[field] = None
|
|
1403
|
+
|
|
1404
|
+
return result
|
|
1405
|
+
return None
|
|
1406
|
+
|
|
1407
|
+
except Exception as e:
|
|
1408
|
+
self.logger.error(f"Failed to get team settings: {str(e)}")
|
|
1409
|
+
raise
|
|
1410
|
+
|
|
1411
|
+
def _deserialize_product_row(self, row: sqlite3.Row) -> Dict[str, Any]:
|
|
1412
|
+
"""
|
|
1413
|
+
Convert a product row into a dictionary with JSON fields parsed.
|
|
1414
|
+
|
|
1415
|
+
Args:
|
|
1416
|
+
row: SQLite row containing product data
|
|
1417
|
+
|
|
1418
|
+
Returns:
|
|
1419
|
+
Dictionary representation of the row with JSON fields decoded
|
|
1420
|
+
"""
|
|
1421
|
+
product = dict(row)
|
|
1422
|
+
|
|
1423
|
+
for field in self._product_json_fields:
|
|
1424
|
+
value = product.get(field)
|
|
1425
|
+
if value:
|
|
1426
|
+
try:
|
|
1427
|
+
product[field] = json.loads(value)
|
|
1428
|
+
except (json.JSONDecodeError, TypeError):
|
|
1429
|
+
product[field] = None
|
|
1430
|
+
|
|
1431
|
+
return product
|
|
1432
|
+
|
|
1433
|
+
def save_product(self, product_data: Dict[str, Any]) -> str:
|
|
1434
|
+
"""
|
|
1435
|
+
Save or update product information.
|
|
1389
1436
|
|
|
1390
1437
|
Args:
|
|
1391
1438
|
product_data: Product information dictionary
|
|
@@ -1569,50 +1616,119 @@ class LocalDataManager:
|
|
|
1569
1616
|
self.logger.error(f"Failed to save product: {str(e)}")
|
|
1570
1617
|
raise
|
|
1571
1618
|
|
|
1572
|
-
def
|
|
1573
|
-
|
|
1574
|
-
|
|
1575
|
-
|
|
1576
|
-
|
|
1577
|
-
|
|
1578
|
-
|
|
1579
|
-
|
|
1580
|
-
|
|
1581
|
-
|
|
1582
|
-
|
|
1583
|
-
|
|
1584
|
-
|
|
1585
|
-
|
|
1586
|
-
|
|
1587
|
-
|
|
1588
|
-
|
|
1589
|
-
|
|
1590
|
-
|
|
1591
|
-
|
|
1592
|
-
|
|
1593
|
-
|
|
1594
|
-
|
|
1595
|
-
|
|
1596
|
-
|
|
1597
|
-
|
|
1598
|
-
|
|
1599
|
-
|
|
1600
|
-
|
|
1601
|
-
|
|
1602
|
-
|
|
1603
|
-
|
|
1604
|
-
|
|
1605
|
-
|
|
1606
|
-
|
|
1607
|
-
|
|
1608
|
-
|
|
1609
|
-
|
|
1610
|
-
|
|
1611
|
-
|
|
1612
|
-
|
|
1613
|
-
|
|
1614
|
-
|
|
1615
|
-
|
|
1619
|
+
def search_products(
|
|
1620
|
+
self,
|
|
1621
|
+
org_id: str,
|
|
1622
|
+
status: Optional[str] = "active",
|
|
1623
|
+
search_term: Optional[str] = None,
|
|
1624
|
+
limit: Optional[int] = None,
|
|
1625
|
+
sort: Optional[str] = "name"
|
|
1626
|
+
) -> List[Dict[str, Any]]:
|
|
1627
|
+
"""
|
|
1628
|
+
Search products for an organization with optional filters.
|
|
1629
|
+
|
|
1630
|
+
Args:
|
|
1631
|
+
org_id: Organization identifier
|
|
1632
|
+
status: Product status filter ("active", "inactive", or "all")
|
|
1633
|
+
search_term: Keyword to match against name, descriptions, or keywords
|
|
1634
|
+
limit: Maximum number of products to return
|
|
1635
|
+
sort: Sort order ("name", "created_at", "updated_at")
|
|
1636
|
+
|
|
1637
|
+
Returns:
|
|
1638
|
+
List of product dictionaries
|
|
1639
|
+
"""
|
|
1640
|
+
try:
|
|
1641
|
+
def _is_placeholder(value: Any) -> bool:
|
|
1642
|
+
return isinstance(value, str) and value.strip().startswith("{{") and value.strip().endswith("}}")
|
|
1643
|
+
|
|
1644
|
+
# Normalize status
|
|
1645
|
+
normalized_status: Optional[str] = status
|
|
1646
|
+
if _is_placeholder(normalized_status):
|
|
1647
|
+
normalized_status = None
|
|
1648
|
+
if isinstance(normalized_status, str):
|
|
1649
|
+
normalized_status = normalized_status.strip().lower()
|
|
1650
|
+
if normalized_status not in {'active', 'inactive', 'all'}:
|
|
1651
|
+
normalized_status = 'active'
|
|
1652
|
+
|
|
1653
|
+
# Normalize sort
|
|
1654
|
+
normalized_sort: Optional[str] = sort
|
|
1655
|
+
if _is_placeholder(normalized_sort):
|
|
1656
|
+
normalized_sort = None
|
|
1657
|
+
if isinstance(normalized_sort, str):
|
|
1658
|
+
normalized_sort = normalized_sort.strip().lower()
|
|
1659
|
+
sort_map = {
|
|
1660
|
+
'name': ("product_name COLLATE NOCASE", "ASC"),
|
|
1661
|
+
'created_at': ("datetime(created_at)", "DESC"),
|
|
1662
|
+
'updated_at': ("datetime(updated_at)", "DESC"),
|
|
1663
|
+
}
|
|
1664
|
+
order_by, direction = sort_map.get(normalized_sort, sort_map['name'])
|
|
1665
|
+
|
|
1666
|
+
# Normalize search term
|
|
1667
|
+
normalized_search: Optional[str] = None
|
|
1668
|
+
if not _is_placeholder(search_term) and search_term is not None:
|
|
1669
|
+
normalized_search = str(search_term).strip()
|
|
1670
|
+
if normalized_search == "":
|
|
1671
|
+
normalized_search = None
|
|
1672
|
+
|
|
1673
|
+
# Normalize limit
|
|
1674
|
+
normalized_limit: Optional[int] = None
|
|
1675
|
+
if not _is_placeholder(limit) and limit is not None:
|
|
1676
|
+
try:
|
|
1677
|
+
normalized_limit = int(limit)
|
|
1678
|
+
if normalized_limit <= 0:
|
|
1679
|
+
normalized_limit = None
|
|
1680
|
+
except (TypeError, ValueError):
|
|
1681
|
+
normalized_limit = None
|
|
1682
|
+
|
|
1683
|
+
where_clauses = ["org_id = ?"]
|
|
1684
|
+
params: List[Any] = [org_id]
|
|
1685
|
+
|
|
1686
|
+
if normalized_status != 'all':
|
|
1687
|
+
where_clauses.append("status = ?")
|
|
1688
|
+
params.append(normalized_status)
|
|
1689
|
+
|
|
1690
|
+
query = "SELECT * FROM products WHERE " + " AND ".join(where_clauses)
|
|
1691
|
+
|
|
1692
|
+
if normalized_search:
|
|
1693
|
+
like_value = f"%{normalized_search.lower()}%"
|
|
1694
|
+
query += (
|
|
1695
|
+
" AND ("
|
|
1696
|
+
"LOWER(product_name) LIKE ? OR "
|
|
1697
|
+
"LOWER(COALESCE(short_description, '')) LIKE ? OR "
|
|
1698
|
+
"LOWER(COALESCE(long_description, '')) LIKE ? OR "
|
|
1699
|
+
"LOWER(COALESCE(keywords, '')) LIKE ?)"
|
|
1700
|
+
)
|
|
1701
|
+
params.extend([like_value] * 4)
|
|
1702
|
+
|
|
1703
|
+
query += f" ORDER BY {order_by} {direction}"
|
|
1704
|
+
|
|
1705
|
+
if normalized_limit is not None:
|
|
1706
|
+
query += " LIMIT ?"
|
|
1707
|
+
params.append(normalized_limit)
|
|
1708
|
+
|
|
1709
|
+
with sqlite3.connect(self.db_path) as conn:
|
|
1710
|
+
conn.row_factory = sqlite3.Row
|
|
1711
|
+
cursor = conn.cursor()
|
|
1712
|
+
cursor.execute(query, params)
|
|
1713
|
+
rows = cursor.fetchall()
|
|
1714
|
+
|
|
1715
|
+
return [self._deserialize_product_row(row) for row in rows]
|
|
1716
|
+
|
|
1717
|
+
except Exception as e:
|
|
1718
|
+
self.logger.error(f"Failed to search products: {str(e)}")
|
|
1719
|
+
raise
|
|
1720
|
+
|
|
1721
|
+
def get_products_by_org(self, org_id: str) -> List[Dict[str, Any]]:
|
|
1722
|
+
"""
|
|
1723
|
+
Backward-compatible helper that returns active products for an organization.
|
|
1724
|
+
|
|
1725
|
+
Args:
|
|
1726
|
+
org_id: Organization identifier
|
|
1727
|
+
|
|
1728
|
+
Returns:
|
|
1729
|
+
List of active product dictionaries
|
|
1730
|
+
"""
|
|
1731
|
+
return self.search_products(org_id=org_id, status="active")
|
|
1616
1732
|
|
|
1617
1733
|
def get_products_by_team(self, team_id: str) -> List[Dict[str, Any]]:
|
|
1618
1734
|
"""
|
|
@@ -1649,34 +1765,13 @@ class LocalDataManager:
|
|
|
1649
1765
|
cursor.execute(
|
|
1650
1766
|
f"SELECT * FROM products WHERE product_id IN ({placeholders}) AND status = 'active'", product_ids)
|
|
1651
1767
|
|
|
1652
|
-
|
|
1653
|
-
|
|
1654
|
-
|
|
1655
|
-
|
|
1656
|
-
|
|
1657
|
-
|
|
1658
|
-
|
|
1659
|
-
'customer_feedback', 'keywords', 'related_products', 'seasonal_demand',
|
|
1660
|
-
'market_insights', 'case_studies', 'testimonials', 'success_metrics',
|
|
1661
|
-
'product_variants', 'technical_specifications', 'compatibility', 'support_info',
|
|
1662
|
-
'regulatory_compliance', 'localization', 'shipping_info'
|
|
1663
|
-
]
|
|
1664
|
-
|
|
1665
|
-
for field in json_fields:
|
|
1666
|
-
if product[field]:
|
|
1667
|
-
try:
|
|
1668
|
-
product[field] = json.loads(product[field])
|
|
1669
|
-
except json.JSONDecodeError:
|
|
1670
|
-
product[field] = None
|
|
1671
|
-
|
|
1672
|
-
products.append(product)
|
|
1673
|
-
|
|
1674
|
-
return products
|
|
1675
|
-
|
|
1676
|
-
except Exception as e:
|
|
1677
|
-
self.logger.error(f"Failed to get products by team: {str(e)}")
|
|
1678
|
-
raise
|
|
1679
|
-
|
|
1768
|
+
return [self._deserialize_product_row(row)
|
|
1769
|
+
for row in cursor.fetchall()]
|
|
1770
|
+
|
|
1771
|
+
except Exception as e:
|
|
1772
|
+
self.logger.error(f"Failed to get products by team: {str(e)}")
|
|
1773
|
+
raise
|
|
1774
|
+
|
|
1680
1775
|
def get_product(self, product_id: str) -> Optional[Dict[str, Any]]:
|
|
1681
1776
|
"""
|
|
1682
1777
|
Get product by ID.
|
|
@@ -1692,33 +1787,15 @@ class LocalDataManager:
|
|
|
1692
1787
|
conn.row_factory = sqlite3.Row
|
|
1693
1788
|
cursor = conn.cursor()
|
|
1694
1789
|
cursor.execute("SELECT * FROM products WHERE product_id = ?", (product_id,))
|
|
1695
|
-
row = cursor.fetchone()
|
|
1696
|
-
|
|
1697
|
-
if row:
|
|
1698
|
-
|
|
1699
|
-
|
|
1700
|
-
|
|
1701
|
-
|
|
1702
|
-
|
|
1703
|
-
|
|
1704
|
-
'market_insights', 'case_studies', 'testimonials', 'success_metrics',
|
|
1705
|
-
'product_variants', 'technical_specifications', 'compatibility', 'support_info',
|
|
1706
|
-
'regulatory_compliance', 'localization', 'shipping_info'
|
|
1707
|
-
]
|
|
1708
|
-
|
|
1709
|
-
for field in json_fields:
|
|
1710
|
-
if product[field]:
|
|
1711
|
-
try:
|
|
1712
|
-
product[field] = json.loads(product[field])
|
|
1713
|
-
except json.JSONDecodeError:
|
|
1714
|
-
product[field] = None
|
|
1715
|
-
|
|
1716
|
-
return product
|
|
1717
|
-
return None
|
|
1718
|
-
|
|
1719
|
-
except Exception as e:
|
|
1720
|
-
self.logger.error(f"Error getting product {product_id}: {str(e)}")
|
|
1721
|
-
raise
|
|
1790
|
+
row = cursor.fetchone()
|
|
1791
|
+
|
|
1792
|
+
if row:
|
|
1793
|
+
return self._deserialize_product_row(row)
|
|
1794
|
+
return None
|
|
1795
|
+
|
|
1796
|
+
except Exception as e:
|
|
1797
|
+
self.logger.error(f"Error getting product {product_id}: {str(e)}")
|
|
1798
|
+
raise
|
|
1722
1799
|
|
|
1723
1800
|
def update_product(self, product_id: str, product_data: Dict[str, Any]) -> bool:
|
|
1724
1801
|
"""
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|