fusesell 1.3.42__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- fusesell-1.3.42.dist-info/METADATA +873 -0
- fusesell-1.3.42.dist-info/RECORD +35 -0
- fusesell-1.3.42.dist-info/WHEEL +5 -0
- fusesell-1.3.42.dist-info/entry_points.txt +2 -0
- fusesell-1.3.42.dist-info/licenses/LICENSE +21 -0
- fusesell-1.3.42.dist-info/top_level.txt +2 -0
- fusesell.py +20 -0
- fusesell_local/__init__.py +37 -0
- fusesell_local/api.py +343 -0
- fusesell_local/cli.py +1480 -0
- fusesell_local/config/__init__.py +11 -0
- fusesell_local/config/default_email_templates.json +34 -0
- fusesell_local/config/default_prompts.json +19 -0
- fusesell_local/config/default_scoring_criteria.json +154 -0
- fusesell_local/config/prompts.py +245 -0
- fusesell_local/config/settings.py +277 -0
- fusesell_local/pipeline.py +978 -0
- fusesell_local/stages/__init__.py +19 -0
- fusesell_local/stages/base_stage.py +603 -0
- fusesell_local/stages/data_acquisition.py +1820 -0
- fusesell_local/stages/data_preparation.py +1238 -0
- fusesell_local/stages/follow_up.py +1728 -0
- fusesell_local/stages/initial_outreach.py +2972 -0
- fusesell_local/stages/lead_scoring.py +1452 -0
- fusesell_local/utils/__init__.py +36 -0
- fusesell_local/utils/agent_context.py +552 -0
- fusesell_local/utils/auto_setup.py +361 -0
- fusesell_local/utils/birthday_email_manager.py +467 -0
- fusesell_local/utils/data_manager.py +4857 -0
- fusesell_local/utils/event_scheduler.py +959 -0
- fusesell_local/utils/llm_client.py +342 -0
- fusesell_local/utils/logger.py +203 -0
- fusesell_local/utils/output_helpers.py +2443 -0
- fusesell_local/utils/timezone_detector.py +914 -0
- fusesell_local/utils/validators.py +436 -0
|
@@ -0,0 +1,4857 @@
|
|
|
1
|
+
"""
|
|
2
|
+
Local Data Manager for FuseSell Local Implementation
|
|
3
|
+
Handles SQLite database operations and local file management
|
|
4
|
+
"""
|
|
5
|
+
|
|
6
|
+
import sqlite3
|
|
7
|
+
import json
|
|
8
|
+
import os
|
|
9
|
+
import uuid
|
|
10
|
+
from typing import Dict, Any, List, Optional, Sequence, Union
|
|
11
|
+
from datetime import datetime
|
|
12
|
+
import logging
|
|
13
|
+
from pathlib import Path
|
|
14
|
+
|
|
15
|
+
|
|
16
|
+
class LocalDataManager:
|
|
17
|
+
"""
|
|
18
|
+
Manages local data storage using SQLite database and JSON files.
|
|
19
|
+
Provides interface for storing execution results, customer data, and configurations.
|
|
20
|
+
"""
|
|
21
|
+
|
|
22
|
+
# Class-level tracking to prevent multiple initializations
|
|
23
|
+
_initialized_databases = set()
|
|
24
|
+
_initialization_lock = False
|
|
25
|
+
_product_json_fields = [
|
|
26
|
+
'target_users',
|
|
27
|
+
'key_features',
|
|
28
|
+
'unique_selling_points',
|
|
29
|
+
'pain_points_solved',
|
|
30
|
+
'competitive_advantages',
|
|
31
|
+
'pricing',
|
|
32
|
+
'pricing_rules',
|
|
33
|
+
'sales_metrics',
|
|
34
|
+
'customer_feedback',
|
|
35
|
+
'keywords',
|
|
36
|
+
'related_products',
|
|
37
|
+
'seasonal_demand',
|
|
38
|
+
'market_insights',
|
|
39
|
+
'case_studies',
|
|
40
|
+
'testimonials',
|
|
41
|
+
'success_metrics',
|
|
42
|
+
'product_variants',
|
|
43
|
+
'technical_specifications',
|
|
44
|
+
'compatibility',
|
|
45
|
+
'support_info',
|
|
46
|
+
'regulatory_compliance',
|
|
47
|
+
'localization',
|
|
48
|
+
'shipping_info'
|
|
49
|
+
]
|
|
50
|
+
|
|
51
|
+
def __init__(self, data_dir: str = "./fusesell_data"):
|
|
52
|
+
"""
|
|
53
|
+
Initialize data manager with specified data directory.
|
|
54
|
+
|
|
55
|
+
Args:
|
|
56
|
+
data_dir: Directory path for storing local data
|
|
57
|
+
"""
|
|
58
|
+
self.data_dir = Path(data_dir)
|
|
59
|
+
self.db_path = self.data_dir / "fusesell.db"
|
|
60
|
+
self.config_dir = self.data_dir / "config"
|
|
61
|
+
self.drafts_dir = self.data_dir / "drafts"
|
|
62
|
+
self.logs_dir = self.data_dir / "logs"
|
|
63
|
+
|
|
64
|
+
self.logger = logging.getLogger("fusesell.data_manager")
|
|
65
|
+
|
|
66
|
+
# Create directories if they don't exist
|
|
67
|
+
self._create_directories()
|
|
68
|
+
|
|
69
|
+
# Initialize database with optimization check
|
|
70
|
+
self._init_database_optimized()
|
|
71
|
+
|
|
72
|
+
def _create_directories(self) -> None:
|
|
73
|
+
"""Create necessary directories for data storage."""
|
|
74
|
+
for directory in [self.data_dir, self.config_dir, self.drafts_dir, self.logs_dir]:
|
|
75
|
+
directory.mkdir(parents=True, exist_ok=True)
|
|
76
|
+
|
|
77
|
+
def _init_database_optimized(self) -> None:
|
|
78
|
+
"""
|
|
79
|
+
Initialize database with optimization to avoid redundant initialization.
|
|
80
|
+
Only performs full initialization if database doesn't exist or is incomplete.
|
|
81
|
+
Uses a class-level lock to prevent concurrent initialization.
|
|
82
|
+
"""
|
|
83
|
+
try:
|
|
84
|
+
db_path_str = str(self.db_path)
|
|
85
|
+
|
|
86
|
+
# Check if this database has already been initialized in this process
|
|
87
|
+
if db_path_str in LocalDataManager._initialized_databases:
|
|
88
|
+
self.logger.debug("Database already initialized in this process, skipping initialization")
|
|
89
|
+
return
|
|
90
|
+
|
|
91
|
+
# Use class-level lock to prevent concurrent initialization
|
|
92
|
+
if LocalDataManager._initialization_lock:
|
|
93
|
+
self.logger.debug("Database initialization in progress by another instance, skipping")
|
|
94
|
+
return
|
|
95
|
+
|
|
96
|
+
LocalDataManager._initialization_lock = True
|
|
97
|
+
|
|
98
|
+
try:
|
|
99
|
+
# Double-check after acquiring lock
|
|
100
|
+
if db_path_str in LocalDataManager._initialized_databases:
|
|
101
|
+
self.logger.debug("Database already initialized by another instance, skipping initialization")
|
|
102
|
+
return
|
|
103
|
+
|
|
104
|
+
# Check if database exists and has basic tables
|
|
105
|
+
if self.db_path.exists():
|
|
106
|
+
with sqlite3.connect(self.db_path) as conn:
|
|
107
|
+
cursor = conn.cursor()
|
|
108
|
+
|
|
109
|
+
# Check if key tables exist (use tables that actually exist in our schema)
|
|
110
|
+
cursor.execute("""
|
|
111
|
+
SELECT name FROM sqlite_master
|
|
112
|
+
WHERE type='table' AND name IN ('stage_results', 'customers', 'llm_worker_task')
|
|
113
|
+
""")
|
|
114
|
+
existing_tables = [row[0] for row in cursor.fetchall()]
|
|
115
|
+
|
|
116
|
+
self.logger.debug(f"Database exists, found tables: {existing_tables}")
|
|
117
|
+
|
|
118
|
+
if len(existing_tables) >= 3:
|
|
119
|
+
self._migrate_email_drafts_table(cursor)
|
|
120
|
+
self.logger.info("Database already initialized, skipping full initialization")
|
|
121
|
+
LocalDataManager._initialized_databases.add(db_path_str)
|
|
122
|
+
return
|
|
123
|
+
|
|
124
|
+
# Perform full initialization
|
|
125
|
+
self.logger.info("Performing database initialization")
|
|
126
|
+
self._init_database()
|
|
127
|
+
LocalDataManager._initialized_databases.add(db_path_str)
|
|
128
|
+
|
|
129
|
+
finally:
|
|
130
|
+
LocalDataManager._initialization_lock = False
|
|
131
|
+
|
|
132
|
+
except Exception as e:
|
|
133
|
+
LocalDataManager._initialization_lock = False
|
|
134
|
+
self.logger.warning(f"Database optimization check failed, performing full initialization: {str(e)}")
|
|
135
|
+
self._init_database()
|
|
136
|
+
LocalDataManager._initialized_databases.add(db_path_str)
|
|
137
|
+
|
|
138
|
+
def _init_database(self) -> None:
|
|
139
|
+
"""Initialize SQLite database with required tables."""
|
|
140
|
+
try:
|
|
141
|
+
with sqlite3.connect(self.db_path) as conn:
|
|
142
|
+
cursor = conn.cursor()
|
|
143
|
+
|
|
144
|
+
# Create executions table
|
|
145
|
+
cursor.execute("""
|
|
146
|
+
CREATE TABLE IF NOT EXISTS executions (
|
|
147
|
+
execution_id TEXT PRIMARY KEY,
|
|
148
|
+
org_id TEXT NOT NULL,
|
|
149
|
+
org_name TEXT,
|
|
150
|
+
customer_website TEXT,
|
|
151
|
+
customer_name TEXT,
|
|
152
|
+
status TEXT NOT NULL,
|
|
153
|
+
started_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
|
|
154
|
+
completed_at TIMESTAMP,
|
|
155
|
+
config_json TEXT,
|
|
156
|
+
results_json TEXT
|
|
157
|
+
)
|
|
158
|
+
""")
|
|
159
|
+
|
|
160
|
+
# Create stage_results table
|
|
161
|
+
cursor.execute("""
|
|
162
|
+
CREATE TABLE IF NOT EXISTS stage_results (
|
|
163
|
+
id TEXT PRIMARY KEY,
|
|
164
|
+
execution_id TEXT NOT NULL,
|
|
165
|
+
stage_name TEXT NOT NULL,
|
|
166
|
+
status TEXT NOT NULL,
|
|
167
|
+
input_data TEXT,
|
|
168
|
+
output_data TEXT,
|
|
169
|
+
started_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
|
|
170
|
+
completed_at TIMESTAMP,
|
|
171
|
+
error_message TEXT,
|
|
172
|
+
FOREIGN KEY (execution_id) REFERENCES executions(execution_id)
|
|
173
|
+
)
|
|
174
|
+
""")
|
|
175
|
+
|
|
176
|
+
# Create customers table
|
|
177
|
+
cursor.execute("""
|
|
178
|
+
CREATE TABLE IF NOT EXISTS customers (
|
|
179
|
+
customer_id TEXT PRIMARY KEY,
|
|
180
|
+
org_id TEXT NOT NULL,
|
|
181
|
+
company_name TEXT,
|
|
182
|
+
website TEXT,
|
|
183
|
+
industry TEXT,
|
|
184
|
+
contact_name TEXT,
|
|
185
|
+
contact_email TEXT,
|
|
186
|
+
contact_phone TEXT,
|
|
187
|
+
address TEXT,
|
|
188
|
+
profile_data TEXT,
|
|
189
|
+
created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
|
|
190
|
+
updated_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP
|
|
191
|
+
)
|
|
192
|
+
""")
|
|
193
|
+
|
|
194
|
+
# Create lead_scores table
|
|
195
|
+
cursor.execute("""
|
|
196
|
+
CREATE TABLE IF NOT EXISTS lead_scores (
|
|
197
|
+
id TEXT PRIMARY KEY,
|
|
198
|
+
execution_id TEXT NOT NULL,
|
|
199
|
+
customer_id TEXT,
|
|
200
|
+
product_id TEXT,
|
|
201
|
+
score REAL,
|
|
202
|
+
criteria_breakdown TEXT,
|
|
203
|
+
created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
|
|
204
|
+
FOREIGN KEY (execution_id) REFERENCES executions(execution_id)
|
|
205
|
+
)
|
|
206
|
+
""")
|
|
207
|
+
|
|
208
|
+
# Create email_drafts table
|
|
209
|
+
cursor.execute("""
|
|
210
|
+
CREATE TABLE IF NOT EXISTS email_drafts (
|
|
211
|
+
draft_id TEXT PRIMARY KEY,
|
|
212
|
+
execution_id TEXT NOT NULL,
|
|
213
|
+
customer_id TEXT,
|
|
214
|
+
subject TEXT,
|
|
215
|
+
content TEXT,
|
|
216
|
+
draft_type TEXT,
|
|
217
|
+
version INTEGER DEFAULT 1,
|
|
218
|
+
status TEXT DEFAULT 'draft',
|
|
219
|
+
metadata TEXT,
|
|
220
|
+
priority_order INTEGER DEFAULT 0,
|
|
221
|
+
created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
|
|
222
|
+
updated_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
|
|
223
|
+
FOREIGN KEY (execution_id) REFERENCES executions(execution_id)
|
|
224
|
+
)
|
|
225
|
+
""")
|
|
226
|
+
|
|
227
|
+
# Create llm_worker_task table (server-compatible)
|
|
228
|
+
cursor.execute("""
|
|
229
|
+
CREATE TABLE IF NOT EXISTS llm_worker_task (
|
|
230
|
+
task_id TEXT PRIMARY KEY,
|
|
231
|
+
plan_id TEXT NOT NULL,
|
|
232
|
+
org_id TEXT NOT NULL,
|
|
233
|
+
status TEXT NOT NULL DEFAULT 'running',
|
|
234
|
+
current_runtime_index INTEGER DEFAULT 0,
|
|
235
|
+
messages JSON,
|
|
236
|
+
request_body JSON,
|
|
237
|
+
created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
|
|
238
|
+
updated_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
|
|
239
|
+
FOREIGN KEY (plan_id) REFERENCES llm_worker_plan(id)
|
|
240
|
+
)
|
|
241
|
+
""")
|
|
242
|
+
|
|
243
|
+
# Simply drop and recreate llm_worker_operation table to ensure correct schema
|
|
244
|
+
cursor.execute("DROP TABLE IF EXISTS llm_worker_operation")
|
|
245
|
+
self.logger.info(
|
|
246
|
+
"Creating llm_worker_operation table with server-compatible schema - FIXED VERSION")
|
|
247
|
+
|
|
248
|
+
# Create llm_worker_operation table (server-compatible)
|
|
249
|
+
cursor.execute("""
|
|
250
|
+
CREATE TABLE IF NOT EXISTS llm_worker_operation (
|
|
251
|
+
operation_id TEXT PRIMARY KEY,
|
|
252
|
+
task_id TEXT NOT NULL,
|
|
253
|
+
executor_name TEXT NOT NULL,
|
|
254
|
+
runtime_index INTEGER NOT NULL DEFAULT 0,
|
|
255
|
+
chain_index INTEGER NOT NULL DEFAULT 0,
|
|
256
|
+
execution_status TEXT NOT NULL DEFAULT 'running',
|
|
257
|
+
input_data JSON,
|
|
258
|
+
output_data JSON,
|
|
259
|
+
date_created TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
|
|
260
|
+
date_updated TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
|
|
261
|
+
FOREIGN KEY (task_id) REFERENCES llm_worker_task(task_id)
|
|
262
|
+
)
|
|
263
|
+
""")
|
|
264
|
+
|
|
265
|
+
# Create teams table (equivalent to llm_worker_plan_team)
|
|
266
|
+
cursor.execute("""
|
|
267
|
+
CREATE TABLE IF NOT EXISTS teams (
|
|
268
|
+
team_id TEXT PRIMARY KEY,
|
|
269
|
+
org_id TEXT NOT NULL,
|
|
270
|
+
org_name TEXT,
|
|
271
|
+
plan_id TEXT NOT NULL,
|
|
272
|
+
plan_name TEXT,
|
|
273
|
+
project_code TEXT,
|
|
274
|
+
name TEXT NOT NULL,
|
|
275
|
+
description TEXT,
|
|
276
|
+
avatar TEXT,
|
|
277
|
+
completed_settings INTEGER DEFAULT 0,
|
|
278
|
+
total_settings INTEGER DEFAULT 0,
|
|
279
|
+
completed_settings_list TEXT,
|
|
280
|
+
missing_settings_list TEXT,
|
|
281
|
+
status TEXT DEFAULT 'active',
|
|
282
|
+
created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
|
|
283
|
+
updated_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP
|
|
284
|
+
)
|
|
285
|
+
""")
|
|
286
|
+
|
|
287
|
+
# Create team_settings table (equivalent to gs_team_settings)
|
|
288
|
+
cursor.execute("""
|
|
289
|
+
CREATE TABLE IF NOT EXISTS team_settings (
|
|
290
|
+
id TEXT PRIMARY KEY,
|
|
291
|
+
team_id TEXT NOT NULL,
|
|
292
|
+
org_id TEXT NOT NULL,
|
|
293
|
+
plan_id TEXT NOT NULL,
|
|
294
|
+
plan_name TEXT,
|
|
295
|
+
project_code TEXT,
|
|
296
|
+
team_name TEXT,
|
|
297
|
+
gs_team_organization TEXT,
|
|
298
|
+
gs_team_rep TEXT,
|
|
299
|
+
gs_team_product TEXT,
|
|
300
|
+
gs_team_schedule_time TEXT,
|
|
301
|
+
gs_team_initial_outreach TEXT,
|
|
302
|
+
gs_team_follow_up TEXT,
|
|
303
|
+
gs_team_auto_interaction TEXT,
|
|
304
|
+
gs_team_followup_schedule_time TEXT,
|
|
305
|
+
gs_team_birthday_email TEXT,
|
|
306
|
+
created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
|
|
307
|
+
updated_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
|
|
308
|
+
FOREIGN KEY (team_id) REFERENCES teams(team_id)
|
|
309
|
+
)
|
|
310
|
+
""")
|
|
311
|
+
|
|
312
|
+
# Check if we need to migrate old column names
|
|
313
|
+
try:
|
|
314
|
+
cursor.execute("PRAGMA table_info(team_settings)")
|
|
315
|
+
columns = [row[1] for row in cursor.fetchall()]
|
|
316
|
+
|
|
317
|
+
# Check if we have old column names and need to migrate
|
|
318
|
+
old_columns = ['organization_settings', 'sales_rep_settings', 'product_settings']
|
|
319
|
+
|
|
320
|
+
if any(col in columns for col in old_columns):
|
|
321
|
+
self.logger.info("Migrating team_settings table to new column names")
|
|
322
|
+
|
|
323
|
+
# Create new table with correct column names
|
|
324
|
+
cursor.execute("""
|
|
325
|
+
CREATE TABLE IF NOT EXISTS team_settings_new (
|
|
326
|
+
id TEXT PRIMARY KEY,
|
|
327
|
+
team_id TEXT NOT NULL,
|
|
328
|
+
org_id TEXT NOT NULL,
|
|
329
|
+
plan_id TEXT NOT NULL,
|
|
330
|
+
plan_name TEXT,
|
|
331
|
+
project_code TEXT,
|
|
332
|
+
team_name TEXT,
|
|
333
|
+
gs_team_organization TEXT,
|
|
334
|
+
gs_team_rep TEXT,
|
|
335
|
+
gs_team_product TEXT,
|
|
336
|
+
gs_team_schedule_time TEXT,
|
|
337
|
+
gs_team_initial_outreach TEXT,
|
|
338
|
+
gs_team_follow_up TEXT,
|
|
339
|
+
gs_team_auto_interaction TEXT,
|
|
340
|
+
gs_team_followup_schedule_time TEXT,
|
|
341
|
+
gs_team_birthday_email TEXT,
|
|
342
|
+
created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
|
|
343
|
+
updated_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
|
|
344
|
+
FOREIGN KEY (team_id) REFERENCES teams(team_id)
|
|
345
|
+
)
|
|
346
|
+
""")
|
|
347
|
+
|
|
348
|
+
# Copy data from old table to new table
|
|
349
|
+
cursor.execute("""
|
|
350
|
+
INSERT OR IGNORE INTO team_settings_new
|
|
351
|
+
(id, team_id, org_id, plan_id, plan_name, project_code, team_name,
|
|
352
|
+
gs_team_organization, gs_team_rep, gs_team_product, gs_team_schedule_time,
|
|
353
|
+
gs_team_initial_outreach, gs_team_follow_up, gs_team_auto_interaction,
|
|
354
|
+
gs_team_followup_schedule_time, gs_team_birthday_email, created_at, updated_at)
|
|
355
|
+
SELECT
|
|
356
|
+
id, team_id, org_id, plan_id, plan_name, project_code, team_name,
|
|
357
|
+
organization_settings, sales_rep_settings, product_settings, schedule_time_settings,
|
|
358
|
+
initial_outreach_settings, follow_up_settings, auto_interaction_settings,
|
|
359
|
+
followup_schedule_settings, birthday_email_settings, created_at, updated_at
|
|
360
|
+
FROM team_settings
|
|
361
|
+
""")
|
|
362
|
+
|
|
363
|
+
# Drop old table and rename new one
|
|
364
|
+
cursor.execute("DROP TABLE team_settings")
|
|
365
|
+
cursor.execute("ALTER TABLE team_settings_new RENAME TO team_settings")
|
|
366
|
+
|
|
367
|
+
self.logger.info("Team settings table migration completed")
|
|
368
|
+
except Exception as e:
|
|
369
|
+
self.logger.debug(f"Migration check/execution failed (may be normal): {str(e)}")
|
|
370
|
+
|
|
371
|
+
# Ensure teams table has status column for enabling/disabling teams
|
|
372
|
+
try:
|
|
373
|
+
cursor.execute("PRAGMA table_info(teams)")
|
|
374
|
+
team_columns = [row[1] for row in cursor.fetchall()]
|
|
375
|
+
|
|
376
|
+
if "status" not in team_columns:
|
|
377
|
+
self.logger.info("Adding status column to teams table")
|
|
378
|
+
cursor.execute(
|
|
379
|
+
"ALTER TABLE teams ADD COLUMN status TEXT DEFAULT 'active'"
|
|
380
|
+
)
|
|
381
|
+
except Exception as e:
|
|
382
|
+
self.logger.debug(
|
|
383
|
+
f"Teams status column migration skipped/failed (may be normal): {str(e)}"
|
|
384
|
+
)
|
|
385
|
+
|
|
386
|
+
# Create products table (equivalent to sell_products)
|
|
387
|
+
cursor.execute("""
|
|
388
|
+
CREATE TABLE IF NOT EXISTS products (
|
|
389
|
+
product_id TEXT PRIMARY KEY,
|
|
390
|
+
org_id TEXT NOT NULL,
|
|
391
|
+
org_name TEXT,
|
|
392
|
+
project_code TEXT,
|
|
393
|
+
product_name TEXT NOT NULL,
|
|
394
|
+
short_description TEXT,
|
|
395
|
+
long_description TEXT,
|
|
396
|
+
category TEXT,
|
|
397
|
+
subcategory TEXT,
|
|
398
|
+
target_users TEXT,
|
|
399
|
+
key_features TEXT,
|
|
400
|
+
unique_selling_points TEXT,
|
|
401
|
+
pain_points_solved TEXT,
|
|
402
|
+
competitive_advantages TEXT,
|
|
403
|
+
pricing TEXT,
|
|
404
|
+
pricing_rules TEXT,
|
|
405
|
+
product_website TEXT,
|
|
406
|
+
demo_available BOOLEAN DEFAULT FALSE,
|
|
407
|
+
trial_available BOOLEAN DEFAULT FALSE,
|
|
408
|
+
sales_contact_email TEXT,
|
|
409
|
+
image_url TEXT,
|
|
410
|
+
sales_metrics TEXT,
|
|
411
|
+
customer_feedback TEXT,
|
|
412
|
+
keywords TEXT,
|
|
413
|
+
related_products TEXT,
|
|
414
|
+
seasonal_demand TEXT,
|
|
415
|
+
market_insights TEXT,
|
|
416
|
+
case_studies TEXT,
|
|
417
|
+
testimonials TEXT,
|
|
418
|
+
success_metrics TEXT,
|
|
419
|
+
product_variants TEXT,
|
|
420
|
+
availability TEXT,
|
|
421
|
+
technical_specifications TEXT,
|
|
422
|
+
compatibility TEXT,
|
|
423
|
+
support_info TEXT,
|
|
424
|
+
regulatory_compliance TEXT,
|
|
425
|
+
localization TEXT,
|
|
426
|
+
installation_requirements TEXT,
|
|
427
|
+
user_manual_url TEXT,
|
|
428
|
+
return_policy TEXT,
|
|
429
|
+
shipping_info TEXT,
|
|
430
|
+
schema_version TEXT DEFAULT '1.3',
|
|
431
|
+
status TEXT DEFAULT 'active',
|
|
432
|
+
created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
|
|
433
|
+
updated_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP
|
|
434
|
+
)
|
|
435
|
+
""")
|
|
436
|
+
|
|
437
|
+
# Create gs_customer_llmtask table (server-compatible)
|
|
438
|
+
cursor.execute("""
|
|
439
|
+
CREATE TABLE IF NOT EXISTS gs_customer_llmtask (
|
|
440
|
+
id TEXT PRIMARY KEY,
|
|
441
|
+
task_id TEXT NOT NULL,
|
|
442
|
+
customer_id TEXT NOT NULL,
|
|
443
|
+
customer_name TEXT NOT NULL,
|
|
444
|
+
customer_phone TEXT,
|
|
445
|
+
customer_address TEXT,
|
|
446
|
+
customer_email TEXT,
|
|
447
|
+
customer_industry TEXT,
|
|
448
|
+
customer_taxcode TEXT,
|
|
449
|
+
customer_website TEXT,
|
|
450
|
+
contact_name TEXT,
|
|
451
|
+
org_id TEXT NOT NULL,
|
|
452
|
+
org_name TEXT,
|
|
453
|
+
project_code TEXT,
|
|
454
|
+
crm_dob DATE,
|
|
455
|
+
image_url TEXT,
|
|
456
|
+
created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
|
|
457
|
+
FOREIGN KEY (task_id) REFERENCES llm_worker_task(task_id),
|
|
458
|
+
FOREIGN KEY (customer_id) REFERENCES customers(customer_id)
|
|
459
|
+
)
|
|
460
|
+
""")
|
|
461
|
+
|
|
462
|
+
# Create prompts table (equivalent to gs_plan_team_prompt)
|
|
463
|
+
cursor.execute("""
|
|
464
|
+
CREATE TABLE IF NOT EXISTS prompts (
|
|
465
|
+
id TEXT PRIMARY KEY,
|
|
466
|
+
execution_id TEXT,
|
|
467
|
+
org_id TEXT NOT NULL,
|
|
468
|
+
plan_id TEXT,
|
|
469
|
+
team_id TEXT,
|
|
470
|
+
project_code TEXT,
|
|
471
|
+
input_stage TEXT NOT NULL,
|
|
472
|
+
prompt TEXT NOT NULL,
|
|
473
|
+
fewshots BOOLEAN DEFAULT FALSE,
|
|
474
|
+
instance_id TEXT,
|
|
475
|
+
submission_date TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
|
|
476
|
+
retrieved_date TIMESTAMP DEFAULT CURRENT_TIMESTAMP
|
|
477
|
+
)
|
|
478
|
+
""")
|
|
479
|
+
|
|
480
|
+
# Create scheduler_rules table (equivalent to gs_scheduler)
|
|
481
|
+
cursor.execute("""
|
|
482
|
+
CREATE TABLE IF NOT EXISTS scheduler_rules (
|
|
483
|
+
id TEXT PRIMARY KEY,
|
|
484
|
+
org_id TEXT NOT NULL,
|
|
485
|
+
org_name TEXT,
|
|
486
|
+
plan_id TEXT,
|
|
487
|
+
plan_name TEXT,
|
|
488
|
+
team_id TEXT,
|
|
489
|
+
team_name TEXT,
|
|
490
|
+
project_code TEXT,
|
|
491
|
+
input_stage TEXT NOT NULL,
|
|
492
|
+
input_stage_label TEXT,
|
|
493
|
+
language TEXT,
|
|
494
|
+
rule_config TEXT,
|
|
495
|
+
is_autorun_time_rule BOOLEAN DEFAULT FALSE,
|
|
496
|
+
status_code INTEGER,
|
|
497
|
+
message TEXT,
|
|
498
|
+
md_code TEXT,
|
|
499
|
+
username TEXT,
|
|
500
|
+
fullname TEXT,
|
|
501
|
+
instance_id TEXT,
|
|
502
|
+
submission_date TIMESTAMP DEFAULT CURRENT_TIMESTAMP
|
|
503
|
+
)
|
|
504
|
+
""")
|
|
505
|
+
|
|
506
|
+
# Create reminder_task table (equivalent to Directus reminder_task)
|
|
507
|
+
cursor.execute("""
|
|
508
|
+
CREATE TABLE IF NOT EXISTS reminder_task (
|
|
509
|
+
id TEXT PRIMARY KEY,
|
|
510
|
+
status TEXT NOT NULL,
|
|
511
|
+
task TEXT NOT NULL,
|
|
512
|
+
cron TEXT NOT NULL,
|
|
513
|
+
room_id TEXT,
|
|
514
|
+
tags TEXT,
|
|
515
|
+
customextra TEXT,
|
|
516
|
+
org_id TEXT,
|
|
517
|
+
customer_id TEXT,
|
|
518
|
+
task_id TEXT,
|
|
519
|
+
import_uuid TEXT,
|
|
520
|
+
scheduled_time TIMESTAMP,
|
|
521
|
+
created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
|
|
522
|
+
updated_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
|
|
523
|
+
executed_at TIMESTAMP,
|
|
524
|
+
error_message TEXT
|
|
525
|
+
)
|
|
526
|
+
""")
|
|
527
|
+
|
|
528
|
+
# Create extracted_files table (equivalent to gs_plan_setting_extracted_file)
|
|
529
|
+
cursor.execute("""
|
|
530
|
+
CREATE TABLE IF NOT EXISTS extracted_files (
|
|
531
|
+
id TEXT PRIMARY KEY,
|
|
532
|
+
org_id TEXT NOT NULL,
|
|
533
|
+
plan_id TEXT,
|
|
534
|
+
team_id TEXT,
|
|
535
|
+
project_code TEXT,
|
|
536
|
+
import_uuid TEXT,
|
|
537
|
+
file_url TEXT,
|
|
538
|
+
project_url TEXT,
|
|
539
|
+
extracted_data TEXT,
|
|
540
|
+
username TEXT,
|
|
541
|
+
fullname TEXT,
|
|
542
|
+
instance_id TEXT,
|
|
543
|
+
submission_date TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
|
|
544
|
+
retrieved_date TIMESTAMP DEFAULT CURRENT_TIMESTAMP
|
|
545
|
+
)
|
|
546
|
+
""")
|
|
547
|
+
|
|
548
|
+
# Create llm_worker_plan table (server schema)
|
|
549
|
+
cursor.execute("""
|
|
550
|
+
CREATE TABLE IF NOT EXISTS llm_worker_plan (
|
|
551
|
+
id TEXT PRIMARY KEY,
|
|
552
|
+
name TEXT NOT NULL,
|
|
553
|
+
description TEXT,
|
|
554
|
+
org_id TEXT,
|
|
555
|
+
status TEXT,
|
|
556
|
+
executors TEXT,
|
|
557
|
+
settings TEXT,
|
|
558
|
+
created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
|
|
559
|
+
updated_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
|
|
560
|
+
date_created TIMESTAMP,
|
|
561
|
+
date_updated TIMESTAMP,
|
|
562
|
+
user_created TEXT,
|
|
563
|
+
user_updated TEXT,
|
|
564
|
+
sort INTEGER
|
|
565
|
+
)
|
|
566
|
+
""")
|
|
567
|
+
|
|
568
|
+
# Create gs_company_criteria table (server schema)
|
|
569
|
+
cursor.execute("""
|
|
570
|
+
CREATE TABLE IF NOT EXISTS gs_company_criteria (
|
|
571
|
+
id TEXT PRIMARY KEY,
|
|
572
|
+
name TEXT NOT NULL,
|
|
573
|
+
definition TEXT,
|
|
574
|
+
weight REAL,
|
|
575
|
+
guidelines TEXT,
|
|
576
|
+
scoring_factors TEXT,
|
|
577
|
+
org_id TEXT,
|
|
578
|
+
status TEXT,
|
|
579
|
+
created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
|
|
580
|
+
updated_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
|
|
581
|
+
date_created TIMESTAMP,
|
|
582
|
+
date_updated TIMESTAMP,
|
|
583
|
+
user_created TEXT,
|
|
584
|
+
user_updated TEXT,
|
|
585
|
+
sort INTEGER
|
|
586
|
+
)
|
|
587
|
+
""")
|
|
588
|
+
|
|
589
|
+
# Create indexes for better performance
|
|
590
|
+
# Check if executions is a table before creating index (it might be a view)
|
|
591
|
+
cursor.execute(
|
|
592
|
+
"SELECT type FROM sqlite_master WHERE name='executions'")
|
|
593
|
+
executions_type = cursor.fetchone()
|
|
594
|
+
if executions_type and executions_type[0] == 'table':
|
|
595
|
+
cursor.execute(
|
|
596
|
+
"CREATE INDEX IF NOT EXISTS idx_executions_org_id ON executions(org_id)")
|
|
597
|
+
|
|
598
|
+
cursor.execute(
|
|
599
|
+
"CREATE INDEX IF NOT EXISTS idx_stage_results_execution_id ON stage_results(execution_id)")
|
|
600
|
+
cursor.execute(
|
|
601
|
+
"CREATE INDEX IF NOT EXISTS idx_customers_org_id ON customers(org_id)")
|
|
602
|
+
cursor.execute(
|
|
603
|
+
"CREATE INDEX IF NOT EXISTS idx_lead_scores_execution_id ON lead_scores(execution_id)")
|
|
604
|
+
cursor.execute(
|
|
605
|
+
"CREATE INDEX IF NOT EXISTS idx_email_drafts_execution_id ON email_drafts(execution_id)")
|
|
606
|
+
cursor.execute(
|
|
607
|
+
"CREATE INDEX IF NOT EXISTS idx_llm_worker_task_org_id ON llm_worker_task(org_id)")
|
|
608
|
+
# Server-compatible indexes for performance
|
|
609
|
+
cursor.execute(
|
|
610
|
+
"CREATE INDEX IF NOT EXISTS idx_llm_worker_task_org_id ON llm_worker_task(org_id)")
|
|
611
|
+
cursor.execute(
|
|
612
|
+
"CREATE INDEX IF NOT EXISTS idx_llm_worker_task_plan_id ON llm_worker_task(plan_id)")
|
|
613
|
+
cursor.execute(
|
|
614
|
+
"CREATE INDEX IF NOT EXISTS idx_llm_worker_task_status ON llm_worker_task(status)")
|
|
615
|
+
cursor.execute(
|
|
616
|
+
"CREATE INDEX IF NOT EXISTS idx_llm_worker_operation_task_id ON llm_worker_operation(task_id)")
|
|
617
|
+
cursor.execute(
|
|
618
|
+
"CREATE INDEX IF NOT EXISTS idx_llm_worker_operation_task_runtime ON llm_worker_operation(task_id, runtime_index)")
|
|
619
|
+
cursor.execute(
|
|
620
|
+
"CREATE INDEX IF NOT EXISTS idx_llm_worker_operation_executor_status ON llm_worker_operation(executor_name, execution_status)")
|
|
621
|
+
cursor.execute(
|
|
622
|
+
"CREATE INDEX IF NOT EXISTS idx_llm_worker_operation_created_date ON llm_worker_operation(date_created)")
|
|
623
|
+
|
|
624
|
+
# Existing indexes
|
|
625
|
+
cursor.execute(
|
|
626
|
+
"CREATE INDEX IF NOT EXISTS idx_teams_org_id ON teams(org_id)")
|
|
627
|
+
cursor.execute(
|
|
628
|
+
"CREATE INDEX IF NOT EXISTS idx_team_settings_team_id ON team_settings(team_id)")
|
|
629
|
+
cursor.execute(
|
|
630
|
+
"CREATE INDEX IF NOT EXISTS idx_products_org_id ON products(org_id)")
|
|
631
|
+
cursor.execute(
|
|
632
|
+
"CREATE INDEX IF NOT EXISTS idx_gs_customer_llmtask_task_id ON gs_customer_llmtask(task_id)")
|
|
633
|
+
cursor.execute(
|
|
634
|
+
"CREATE INDEX IF NOT EXISTS idx_prompts_org_id ON prompts(org_id)")
|
|
635
|
+
cursor.execute(
|
|
636
|
+
"CREATE INDEX IF NOT EXISTS idx_scheduler_rules_org_id ON scheduler_rules(org_id)")
|
|
637
|
+
cursor.execute(
|
|
638
|
+
"CREATE INDEX IF NOT EXISTS idx_reminder_task_status ON reminder_task(status)")
|
|
639
|
+
cursor.execute(
|
|
640
|
+
"CREATE INDEX IF NOT EXISTS idx_reminder_task_org_id ON reminder_task(org_id)")
|
|
641
|
+
cursor.execute(
|
|
642
|
+
"CREATE INDEX IF NOT EXISTS idx_reminder_task_task_id ON reminder_task(task_id)")
|
|
643
|
+
cursor.execute(
|
|
644
|
+
"CREATE INDEX IF NOT EXISTS idx_reminder_task_cron ON reminder_task(cron)")
|
|
645
|
+
cursor.execute(
|
|
646
|
+
"CREATE INDEX IF NOT EXISTS idx_extracted_files_org_id ON extracted_files(org_id)")
|
|
647
|
+
cursor.execute(
|
|
648
|
+
"CREATE INDEX IF NOT EXISTS idx_llm_worker_plan_org_id ON llm_worker_plan(org_id)")
|
|
649
|
+
cursor.execute(
|
|
650
|
+
"CREATE INDEX IF NOT EXISTS idx_gs_company_criteria_org_id ON gs_company_criteria(org_id)")
|
|
651
|
+
|
|
652
|
+
# Create compatibility views for backward compatibility
|
|
653
|
+
cursor.execute("""
|
|
654
|
+
CREATE VIEW IF NOT EXISTS executions_view AS
|
|
655
|
+
SELECT
|
|
656
|
+
task_id as execution_id,
|
|
657
|
+
org_id,
|
|
658
|
+
'' as org_name,
|
|
659
|
+
'' as customer_website,
|
|
660
|
+
'' as customer_name,
|
|
661
|
+
status,
|
|
662
|
+
created_at as started_at,
|
|
663
|
+
updated_at as completed_at,
|
|
664
|
+
request_body as config_json,
|
|
665
|
+
'{}' as results_json
|
|
666
|
+
FROM llm_worker_task
|
|
667
|
+
""")
|
|
668
|
+
|
|
669
|
+
cursor.execute("""
|
|
670
|
+
CREATE VIEW IF NOT EXISTS stage_results_view AS
|
|
671
|
+
SELECT
|
|
672
|
+
operation_id as id,
|
|
673
|
+
task_id as execution_id,
|
|
674
|
+
executor_name as stage_name,
|
|
675
|
+
execution_status as status,
|
|
676
|
+
input_data,
|
|
677
|
+
output_data,
|
|
678
|
+
date_created as started_at,
|
|
679
|
+
date_updated as completed_at,
|
|
680
|
+
CASE WHEN execution_status = 'failed'
|
|
681
|
+
THEN json_extract(output_data, '$.error')
|
|
682
|
+
ELSE NULL END as error_message
|
|
683
|
+
FROM llm_worker_operation
|
|
684
|
+
""")
|
|
685
|
+
|
|
686
|
+
# Ensure email_drafts table has latest columns
|
|
687
|
+
self._migrate_email_drafts_table(cursor)
|
|
688
|
+
|
|
689
|
+
conn.commit()
|
|
690
|
+
|
|
691
|
+
# Initialize default data for new tables
|
|
692
|
+
self._initialize_default_data()
|
|
693
|
+
|
|
694
|
+
self.logger.info("Database initialized successfully")
|
|
695
|
+
|
|
696
|
+
except Exception as e:
|
|
697
|
+
self.logger.error(f"Failed to initialize database: {str(e)}")
|
|
698
|
+
raise
|
|
699
|
+
|
|
700
|
+
def _migrate_email_drafts_table(self, cursor: sqlite3.Cursor) -> None:
|
|
701
|
+
"""
|
|
702
|
+
Ensure email_drafts table has expected columns for metadata and priority.
|
|
703
|
+
|
|
704
|
+
Args:
|
|
705
|
+
cursor: Active database cursor
|
|
706
|
+
"""
|
|
707
|
+
try:
|
|
708
|
+
cursor.execute("PRAGMA table_info(email_drafts)")
|
|
709
|
+
columns = {row[1] for row in cursor.fetchall()}
|
|
710
|
+
|
|
711
|
+
if "status" not in columns:
|
|
712
|
+
cursor.execute("ALTER TABLE email_drafts ADD COLUMN status TEXT DEFAULT 'draft'")
|
|
713
|
+
if "metadata" not in columns:
|
|
714
|
+
cursor.execute("ALTER TABLE email_drafts ADD COLUMN metadata TEXT")
|
|
715
|
+
if "priority_order" not in columns:
|
|
716
|
+
cursor.execute("ALTER TABLE email_drafts ADD COLUMN priority_order INTEGER DEFAULT 0")
|
|
717
|
+
if "updated_at" not in columns:
|
|
718
|
+
try:
|
|
719
|
+
cursor.execute("ALTER TABLE email_drafts ADD COLUMN updated_at TIMESTAMP")
|
|
720
|
+
cursor.execute(
|
|
721
|
+
"UPDATE email_drafts SET updated_at = CURRENT_TIMESTAMP WHERE updated_at IS NULL"
|
|
722
|
+
)
|
|
723
|
+
except Exception as exc:
|
|
724
|
+
self.logger.debug(f"Updated_at column add skipped: {exc}")
|
|
725
|
+
|
|
726
|
+
try:
|
|
727
|
+
cursor.execute(
|
|
728
|
+
"""
|
|
729
|
+
WITH ordered AS (
|
|
730
|
+
SELECT draft_id,
|
|
731
|
+
ROW_NUMBER() OVER (PARTITION BY execution_id ORDER BY created_at, draft_id) AS rn
|
|
732
|
+
FROM email_drafts
|
|
733
|
+
WHERE IFNULL(priority_order, 0) <= 0
|
|
734
|
+
)
|
|
735
|
+
UPDATE email_drafts
|
|
736
|
+
SET priority_order = (
|
|
737
|
+
SELECT rn FROM ordered WHERE ordered.draft_id = email_drafts.draft_id
|
|
738
|
+
)
|
|
739
|
+
WHERE draft_id IN (SELECT draft_id FROM ordered)
|
|
740
|
+
"""
|
|
741
|
+
)
|
|
742
|
+
except Exception as exc:
|
|
743
|
+
self.logger.debug(f"Priority backfill skipped: {exc}")
|
|
744
|
+
|
|
745
|
+
try:
|
|
746
|
+
cursor.connection.commit()
|
|
747
|
+
except Exception:
|
|
748
|
+
pass
|
|
749
|
+
except Exception as exc:
|
|
750
|
+
self.logger.warning(f"Email drafts table migration skipped/failed: {exc}")
|
|
751
|
+
|
|
752
|
+
def save_execution(
|
|
753
|
+
self,
|
|
754
|
+
execution_id: str,
|
|
755
|
+
org_id: str,
|
|
756
|
+
config: Dict[str, Any],
|
|
757
|
+
org_name: Optional[str] = None,
|
|
758
|
+
customer_website: Optional[str] = None,
|
|
759
|
+
customer_name: Optional[str] = None
|
|
760
|
+
) -> None:
|
|
761
|
+
"""
|
|
762
|
+
Save execution record to database.
|
|
763
|
+
|
|
764
|
+
Args:
|
|
765
|
+
execution_id: Unique execution identifier
|
|
766
|
+
org_id: Organization ID
|
|
767
|
+
config: Execution configuration
|
|
768
|
+
org_name: Organization name
|
|
769
|
+
customer_website: Customer website URL
|
|
770
|
+
customer_name: Customer company name
|
|
771
|
+
"""
|
|
772
|
+
try:
|
|
773
|
+
with sqlite3.connect(self.db_path) as conn:
|
|
774
|
+
cursor = conn.cursor()
|
|
775
|
+
cursor.execute("""
|
|
776
|
+
INSERT INTO executions
|
|
777
|
+
(execution_id, org_id, org_name, customer_website, customer_name, status, config_json)
|
|
778
|
+
VALUES (?, ?, ?, ?, ?, ?, ?)
|
|
779
|
+
""", (
|
|
780
|
+
execution_id, org_id, org_name, customer_website,
|
|
781
|
+
customer_name, 'running', json.dumps(config)
|
|
782
|
+
))
|
|
783
|
+
conn.commit()
|
|
784
|
+
self.logger.debug(f"Saved execution record: {execution_id}")
|
|
785
|
+
|
|
786
|
+
except Exception as e:
|
|
787
|
+
self.logger.error(f"Failed to save execution: {str(e)}")
|
|
788
|
+
raise
|
|
789
|
+
|
|
790
|
+
def update_execution_status(
|
|
791
|
+
self,
|
|
792
|
+
execution_id: str,
|
|
793
|
+
status: str,
|
|
794
|
+
results: Optional[Dict[str, Any]] = None
|
|
795
|
+
) -> None:
|
|
796
|
+
"""
|
|
797
|
+
Update execution status and results.
|
|
798
|
+
|
|
799
|
+
Args:
|
|
800
|
+
execution_id: Execution identifier
|
|
801
|
+
status: New status (running, completed, failed)
|
|
802
|
+
results: Optional execution results
|
|
803
|
+
"""
|
|
804
|
+
try:
|
|
805
|
+
with sqlite3.connect(self.db_path) as conn:
|
|
806
|
+
cursor = conn.cursor()
|
|
807
|
+
|
|
808
|
+
if results:
|
|
809
|
+
cursor.execute("""
|
|
810
|
+
UPDATE executions
|
|
811
|
+
SET status = ?, completed_at = CURRENT_TIMESTAMP, results_json = ?
|
|
812
|
+
WHERE execution_id = ?
|
|
813
|
+
""", (status, json.dumps(results), execution_id))
|
|
814
|
+
else:
|
|
815
|
+
cursor.execute("""
|
|
816
|
+
UPDATE executions
|
|
817
|
+
SET status = ?, completed_at = CURRENT_TIMESTAMP
|
|
818
|
+
WHERE execution_id = ?
|
|
819
|
+
""", (status, execution_id))
|
|
820
|
+
|
|
821
|
+
conn.commit()
|
|
822
|
+
self.logger.debug(
|
|
823
|
+
f"Updated execution status: {execution_id} -> {status}")
|
|
824
|
+
|
|
825
|
+
except Exception as e:
|
|
826
|
+
self.logger.error(f"Failed to update execution status: {str(e)}")
|
|
827
|
+
raise
|
|
828
|
+
|
|
829
|
+
def save_stage_result(
|
|
830
|
+
self,
|
|
831
|
+
execution_id: str,
|
|
832
|
+
stage_name: str,
|
|
833
|
+
input_data: Dict[str, Any],
|
|
834
|
+
output_data: Dict[str, Any],
|
|
835
|
+
status: str,
|
|
836
|
+
error_message: Optional[str] = None
|
|
837
|
+
) -> None:
|
|
838
|
+
"""
|
|
839
|
+
Save stage execution result.
|
|
840
|
+
|
|
841
|
+
Args:
|
|
842
|
+
execution_id: Execution identifier
|
|
843
|
+
stage_name: Name of the stage
|
|
844
|
+
input_data: Stage input data
|
|
845
|
+
output_data: Stage output data
|
|
846
|
+
status: Stage execution status
|
|
847
|
+
error_message: Optional error message
|
|
848
|
+
"""
|
|
849
|
+
try:
|
|
850
|
+
with sqlite3.connect(self.db_path) as conn:
|
|
851
|
+
cursor = conn.cursor()
|
|
852
|
+
cursor.execute("""
|
|
853
|
+
INSERT INTO stage_results
|
|
854
|
+
(id, execution_id, stage_name, status, input_data, output_data, completed_at, error_message)
|
|
855
|
+
VALUES (?, ?, ?, ?, ?, ?, CURRENT_TIMESTAMP, ?)
|
|
856
|
+
""", (
|
|
857
|
+
f"uuid:{str(uuid.uuid4())}", execution_id, stage_name, status,
|
|
858
|
+
json.dumps(input_data), json.dumps(
|
|
859
|
+
output_data), error_message
|
|
860
|
+
))
|
|
861
|
+
conn.commit()
|
|
862
|
+
self.logger.debug(
|
|
863
|
+
f"Saved stage result: {execution_id}/{stage_name}")
|
|
864
|
+
|
|
865
|
+
except Exception as e:
|
|
866
|
+
self.logger.error(f"Failed to save stage result: {str(e)}")
|
|
867
|
+
raise
|
|
868
|
+
|
|
869
|
+
def save_customer(self, customer_data: Dict[str, Any]) -> str:
|
|
870
|
+
"""
|
|
871
|
+
Save or update customer information.
|
|
872
|
+
|
|
873
|
+
Args:
|
|
874
|
+
customer_data: Customer information dictionary
|
|
875
|
+
|
|
876
|
+
Returns:
|
|
877
|
+
Customer ID
|
|
878
|
+
"""
|
|
879
|
+
try:
|
|
880
|
+
customer_id = customer_data.get(
|
|
881
|
+
'customer_id') or self._generate_customer_id()
|
|
882
|
+
|
|
883
|
+
with sqlite3.connect(self.db_path) as conn:
|
|
884
|
+
cursor = conn.cursor()
|
|
885
|
+
|
|
886
|
+
# Check if customer exists
|
|
887
|
+
cursor.execute(
|
|
888
|
+
"SELECT customer_id FROM customers WHERE customer_id = ?", (customer_id,))
|
|
889
|
+
exists = cursor.fetchone()
|
|
890
|
+
|
|
891
|
+
if exists:
|
|
892
|
+
# Update existing customer
|
|
893
|
+
cursor.execute("""
|
|
894
|
+
UPDATE customers
|
|
895
|
+
SET company_name = ?, website = ?, industry = ?, contact_name = ?,
|
|
896
|
+
contact_email = ?, contact_phone = ?, address = ?,
|
|
897
|
+
profile_data = ?, updated_at = CURRENT_TIMESTAMP
|
|
898
|
+
WHERE customer_id = ?
|
|
899
|
+
""", (
|
|
900
|
+
customer_data.get('company_name'),
|
|
901
|
+
customer_data.get('website'),
|
|
902
|
+
customer_data.get('industry'),
|
|
903
|
+
customer_data.get('contact_name'),
|
|
904
|
+
customer_data.get('contact_email'),
|
|
905
|
+
customer_data.get('contact_phone'),
|
|
906
|
+
customer_data.get('address'),
|
|
907
|
+
json.dumps(customer_data),
|
|
908
|
+
customer_id
|
|
909
|
+
))
|
|
910
|
+
else:
|
|
911
|
+
# Insert new customer
|
|
912
|
+
cursor.execute("""
|
|
913
|
+
INSERT INTO customers
|
|
914
|
+
(customer_id, org_id, company_name, website, industry, contact_name,
|
|
915
|
+
contact_email, contact_phone, address, profile_data)
|
|
916
|
+
VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?)
|
|
917
|
+
""", (
|
|
918
|
+
customer_id,
|
|
919
|
+
customer_data.get('org_id'),
|
|
920
|
+
customer_data.get('company_name'),
|
|
921
|
+
customer_data.get('website'),
|
|
922
|
+
customer_data.get('industry'),
|
|
923
|
+
customer_data.get('contact_name'),
|
|
924
|
+
customer_data.get('contact_email'),
|
|
925
|
+
customer_data.get('contact_phone'),
|
|
926
|
+
customer_data.get('address'),
|
|
927
|
+
json.dumps(customer_data)
|
|
928
|
+
))
|
|
929
|
+
|
|
930
|
+
conn.commit()
|
|
931
|
+
self.logger.debug(f"Saved customer: {customer_id}")
|
|
932
|
+
return customer_id
|
|
933
|
+
|
|
934
|
+
except Exception as e:
|
|
935
|
+
self.logger.error(f"Failed to save customer: {str(e)}")
|
|
936
|
+
raise
|
|
937
|
+
|
|
938
|
+
def save_customer_task(self, customer_task_data: Dict[str, Any]) -> str:
|
|
939
|
+
"""
|
|
940
|
+
Save customer task data to gs_customer_llmtask table (server-compatible).
|
|
941
|
+
|
|
942
|
+
Args:
|
|
943
|
+
customer_task_data: Customer task information dictionary
|
|
944
|
+
|
|
945
|
+
Returns:
|
|
946
|
+
Record ID
|
|
947
|
+
"""
|
|
948
|
+
try:
|
|
949
|
+
record_id = f"{customer_task_data.get('task_id')}_{customer_task_data.get('customer_id')}"
|
|
950
|
+
|
|
951
|
+
with sqlite3.connect(self.db_path) as conn:
|
|
952
|
+
cursor = conn.cursor()
|
|
953
|
+
|
|
954
|
+
# Insert or replace customer task data
|
|
955
|
+
cursor.execute("""
|
|
956
|
+
INSERT OR REPLACE INTO gs_customer_llmtask
|
|
957
|
+
(id, task_id, customer_id, customer_name, customer_phone, customer_address,
|
|
958
|
+
customer_email, customer_industry, customer_taxcode, customer_website,
|
|
959
|
+
contact_name, org_id, org_name, project_code, crm_dob, image_url)
|
|
960
|
+
VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)
|
|
961
|
+
""", (
|
|
962
|
+
record_id,
|
|
963
|
+
customer_task_data.get('task_id'),
|
|
964
|
+
customer_task_data.get('customer_id'),
|
|
965
|
+
customer_task_data.get('customer_name'),
|
|
966
|
+
customer_task_data.get('customer_phone'),
|
|
967
|
+
customer_task_data.get('customer_address'),
|
|
968
|
+
customer_task_data.get('customer_email'),
|
|
969
|
+
customer_task_data.get('customer_industry'),
|
|
970
|
+
customer_task_data.get('customer_taxcode'),
|
|
971
|
+
customer_task_data.get('customer_website'),
|
|
972
|
+
customer_task_data.get('contact_name'),
|
|
973
|
+
customer_task_data.get('org_id'),
|
|
974
|
+
customer_task_data.get('org_name'),
|
|
975
|
+
customer_task_data.get('project_code'),
|
|
976
|
+
customer_task_data.get('crm_dob'),
|
|
977
|
+
customer_task_data.get('image_url')
|
|
978
|
+
))
|
|
979
|
+
|
|
980
|
+
conn.commit()
|
|
981
|
+
self.logger.debug(f"Saved customer task: {record_id}")
|
|
982
|
+
return record_id
|
|
983
|
+
|
|
984
|
+
except Exception as e:
|
|
985
|
+
self.logger.error(f"Failed to save customer task data: {str(e)}")
|
|
986
|
+
raise
|
|
987
|
+
|
|
988
|
+
def save_lead_score(
|
|
989
|
+
self,
|
|
990
|
+
execution_id: str,
|
|
991
|
+
customer_id: str,
|
|
992
|
+
product_id: str,
|
|
993
|
+
score: float,
|
|
994
|
+
criteria_breakdown: Dict[str, Any]
|
|
995
|
+
) -> None:
|
|
996
|
+
"""
|
|
997
|
+
Save lead scoring result.
|
|
998
|
+
|
|
999
|
+
Args:
|
|
1000
|
+
execution_id: Execution identifier
|
|
1001
|
+
customer_id: Customer identifier
|
|
1002
|
+
product_id: Product identifier
|
|
1003
|
+
score: Lead score (0-100)
|
|
1004
|
+
criteria_breakdown: Detailed scoring breakdown
|
|
1005
|
+
"""
|
|
1006
|
+
try:
|
|
1007
|
+
with sqlite3.connect(self.db_path) as conn:
|
|
1008
|
+
cursor = conn.cursor()
|
|
1009
|
+
cursor.execute("""
|
|
1010
|
+
INSERT INTO lead_scores
|
|
1011
|
+
(id, execution_id, customer_id, product_id, score, criteria_breakdown)
|
|
1012
|
+
VALUES (?, ?, ?, ?, ?, ?)
|
|
1013
|
+
""", (
|
|
1014
|
+
f"uuid:{str(uuid.uuid4())}", execution_id, customer_id, product_id, score,
|
|
1015
|
+
json.dumps(criteria_breakdown)
|
|
1016
|
+
))
|
|
1017
|
+
conn.commit()
|
|
1018
|
+
self.logger.debug(
|
|
1019
|
+
f"Saved lead score: {customer_id}/{product_id} = {score}")
|
|
1020
|
+
|
|
1021
|
+
except Exception as e:
|
|
1022
|
+
self.logger.error(f"Failed to save lead score: {str(e)}")
|
|
1023
|
+
raise
|
|
1024
|
+
|
|
1025
|
+
def save_email_draft(
|
|
1026
|
+
self,
|
|
1027
|
+
draft_id: Union[str, Dict[str, Any]],
|
|
1028
|
+
execution_id: Optional[str] = None,
|
|
1029
|
+
customer_id: Optional[str] = None,
|
|
1030
|
+
subject: Optional[str] = None,
|
|
1031
|
+
content: Optional[str] = None,
|
|
1032
|
+
draft_type: str = "initial_outreach",
|
|
1033
|
+
version: int = 1,
|
|
1034
|
+
status: str = "draft",
|
|
1035
|
+
metadata: Optional[Union[Dict[str, Any], str]] = None,
|
|
1036
|
+
priority_order: int = 0
|
|
1037
|
+
) -> None:
|
|
1038
|
+
"""
|
|
1039
|
+
Save email draft. Accepts either explicit parameters or a draft data dictionary.
|
|
1040
|
+
|
|
1041
|
+
Args:
|
|
1042
|
+
draft_id: Draft identifier or draft dictionary with keys
|
|
1043
|
+
execution_id: Execution identifier
|
|
1044
|
+
customer_id: Customer identifier
|
|
1045
|
+
subject: Email subject
|
|
1046
|
+
content: Email content
|
|
1047
|
+
draft_type: Type of draft (initial_outreach, follow_up)
|
|
1048
|
+
version: Draft version number
|
|
1049
|
+
status: Draft status
|
|
1050
|
+
metadata: Additional metadata (dict or JSON string)
|
|
1051
|
+
priority_order: Numeric priority for scheduling/selection
|
|
1052
|
+
"""
|
|
1053
|
+
try:
|
|
1054
|
+
if isinstance(draft_id, dict):
|
|
1055
|
+
data = draft_id
|
|
1056
|
+
draft_id = data.get("draft_id")
|
|
1057
|
+
execution_id = data.get("execution_id")
|
|
1058
|
+
customer_id = data.get("customer_id")
|
|
1059
|
+
subject = data.get("subject")
|
|
1060
|
+
content = data.get("content")
|
|
1061
|
+
draft_type = data.get("draft_type", draft_type)
|
|
1062
|
+
version = data.get("version", version)
|
|
1063
|
+
status = data.get("status", status)
|
|
1064
|
+
metadata = data.get("metadata")
|
|
1065
|
+
priority_order = data.get("priority_order", priority_order)
|
|
1066
|
+
|
|
1067
|
+
metadata_json = metadata
|
|
1068
|
+
if isinstance(metadata, dict):
|
|
1069
|
+
metadata_json = json.dumps(metadata)
|
|
1070
|
+
|
|
1071
|
+
with sqlite3.connect(self.db_path) as conn:
|
|
1072
|
+
cursor = conn.cursor()
|
|
1073
|
+
cursor.execute(
|
|
1074
|
+
"""
|
|
1075
|
+
INSERT INTO email_drafts
|
|
1076
|
+
(draft_id, execution_id, customer_id, subject, content, draft_type, version, status, metadata, priority_order)
|
|
1077
|
+
VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?)
|
|
1078
|
+
""",
|
|
1079
|
+
(
|
|
1080
|
+
draft_id,
|
|
1081
|
+
execution_id,
|
|
1082
|
+
customer_id,
|
|
1083
|
+
subject,
|
|
1084
|
+
content,
|
|
1085
|
+
draft_type,
|
|
1086
|
+
version,
|
|
1087
|
+
status,
|
|
1088
|
+
metadata_json,
|
|
1089
|
+
priority_order or 0,
|
|
1090
|
+
),
|
|
1091
|
+
)
|
|
1092
|
+
conn.commit()
|
|
1093
|
+
self.logger.debug(f"Saved email draft: {draft_id}")
|
|
1094
|
+
|
|
1095
|
+
except Exception as e:
|
|
1096
|
+
self.logger.error(f"Failed to save email draft: {str(e)}")
|
|
1097
|
+
raise
|
|
1098
|
+
|
|
1099
|
+
def get_execution(self, execution_id: str) -> Optional[Dict[str, Any]]:
|
|
1100
|
+
"""
|
|
1101
|
+
Get execution record by ID.
|
|
1102
|
+
|
|
1103
|
+
Args:
|
|
1104
|
+
execution_id: Execution identifier
|
|
1105
|
+
|
|
1106
|
+
Returns:
|
|
1107
|
+
Execution record dictionary or None if not found
|
|
1108
|
+
"""
|
|
1109
|
+
try:
|
|
1110
|
+
with sqlite3.connect(self.db_path) as conn:
|
|
1111
|
+
conn.row_factory = sqlite3.Row
|
|
1112
|
+
cursor = conn.cursor()
|
|
1113
|
+
cursor.execute(
|
|
1114
|
+
"SELECT * FROM executions WHERE execution_id = ?", (execution_id,))
|
|
1115
|
+
row = cursor.fetchone()
|
|
1116
|
+
|
|
1117
|
+
if row:
|
|
1118
|
+
result = dict(row)
|
|
1119
|
+
if result['config_json']:
|
|
1120
|
+
result['config'] = json.loads(result['config_json'])
|
|
1121
|
+
if result['results_json']:
|
|
1122
|
+
result['results'] = json.loads(result['results_json'])
|
|
1123
|
+
return result
|
|
1124
|
+
return None
|
|
1125
|
+
|
|
1126
|
+
except Exception as e:
|
|
1127
|
+
self.logger.error(f"Failed to get execution: {str(e)}")
|
|
1128
|
+
raise
|
|
1129
|
+
|
|
1130
|
+
def get_stage_results(self, execution_id: str) -> List[Dict[str, Any]]:
|
|
1131
|
+
"""
|
|
1132
|
+
Get all stage results for an execution.
|
|
1133
|
+
|
|
1134
|
+
Args:
|
|
1135
|
+
execution_id: Execution identifier
|
|
1136
|
+
|
|
1137
|
+
Returns:
|
|
1138
|
+
List of stage result dictionaries
|
|
1139
|
+
"""
|
|
1140
|
+
try:
|
|
1141
|
+
with sqlite3.connect(self.db_path) as conn:
|
|
1142
|
+
conn.row_factory = sqlite3.Row
|
|
1143
|
+
cursor = conn.cursor()
|
|
1144
|
+
cursor.execute("""
|
|
1145
|
+
SELECT * FROM stage_results
|
|
1146
|
+
WHERE execution_id = ?
|
|
1147
|
+
ORDER BY started_at
|
|
1148
|
+
""", (execution_id,))
|
|
1149
|
+
|
|
1150
|
+
results = []
|
|
1151
|
+
for row in cursor.fetchall():
|
|
1152
|
+
result = dict(row)
|
|
1153
|
+
if result['input_data']:
|
|
1154
|
+
result['input_data'] = json.loads(result['input_data'])
|
|
1155
|
+
if result['output_data']:
|
|
1156
|
+
result['output_data'] = json.loads(
|
|
1157
|
+
result['output_data'])
|
|
1158
|
+
results.append(result)
|
|
1159
|
+
|
|
1160
|
+
return results
|
|
1161
|
+
|
|
1162
|
+
except Exception as e:
|
|
1163
|
+
self.logger.error(f"Failed to get stage results: {str(e)}")
|
|
1164
|
+
raise
|
|
1165
|
+
|
|
1166
|
+
def load_prompts(self) -> Dict[str, Any]:
|
|
1167
|
+
"""
|
|
1168
|
+
Load prompt templates from configuration.
|
|
1169
|
+
Priority: custom prompts (data_dir/config/prompts.json) > default prompts (package)
|
|
1170
|
+
|
|
1171
|
+
Returns:
|
|
1172
|
+
Dictionary of prompt templates
|
|
1173
|
+
"""
|
|
1174
|
+
try:
|
|
1175
|
+
# Load default prompts from package
|
|
1176
|
+
default_prompts = self._load_default_prompts()
|
|
1177
|
+
|
|
1178
|
+
# Load custom prompts from user's data directory
|
|
1179
|
+
custom_prompts_file = self.config_dir / "prompts.json"
|
|
1180
|
+
custom_prompts = {}
|
|
1181
|
+
if custom_prompts_file.exists():
|
|
1182
|
+
with open(custom_prompts_file, 'r', encoding='utf-8') as f:
|
|
1183
|
+
custom_prompts = json.load(f)
|
|
1184
|
+
self.logger.debug(f"Loaded custom prompts from {custom_prompts_file}")
|
|
1185
|
+
|
|
1186
|
+
# Merge prompts - custom overrides default
|
|
1187
|
+
merged_prompts = default_prompts.copy()
|
|
1188
|
+
for stage_name, stage_prompts in custom_prompts.items():
|
|
1189
|
+
if stage_name in merged_prompts:
|
|
1190
|
+
merged_prompts[stage_name].update(stage_prompts)
|
|
1191
|
+
else:
|
|
1192
|
+
merged_prompts[stage_name] = stage_prompts
|
|
1193
|
+
|
|
1194
|
+
return merged_prompts
|
|
1195
|
+
except Exception as e:
|
|
1196
|
+
self.logger.error(f"Failed to load prompts: {str(e)}")
|
|
1197
|
+
return {}
|
|
1198
|
+
|
|
1199
|
+
def _load_default_prompts(self) -> Dict[str, Any]:
|
|
1200
|
+
"""
|
|
1201
|
+
Load default system prompts from package.
|
|
1202
|
+
|
|
1203
|
+
Returns:
|
|
1204
|
+
Dictionary of default prompt templates
|
|
1205
|
+
"""
|
|
1206
|
+
return self._load_default_config('default_prompts.json')
|
|
1207
|
+
|
|
1208
|
+
def load_scoring_criteria(self) -> Dict[str, Any]:
|
|
1209
|
+
"""
|
|
1210
|
+
Load scoring criteria configuration.
|
|
1211
|
+
Priority: custom criteria (data_dir/config/scoring_criteria.json) > default criteria (package)
|
|
1212
|
+
|
|
1213
|
+
Returns:
|
|
1214
|
+
Dictionary of scoring criteria
|
|
1215
|
+
"""
|
|
1216
|
+
try:
|
|
1217
|
+
# Load default criteria from package
|
|
1218
|
+
default_criteria = self._load_default_config('default_scoring_criteria.json')
|
|
1219
|
+
|
|
1220
|
+
# Load custom criteria from user's data directory
|
|
1221
|
+
custom_criteria_file = self.config_dir / "scoring_criteria.json"
|
|
1222
|
+
if custom_criteria_file.exists():
|
|
1223
|
+
with open(custom_criteria_file, 'r', encoding='utf-8') as f:
|
|
1224
|
+
custom_criteria = json.load(f)
|
|
1225
|
+
# Merge custom with default (custom overrides)
|
|
1226
|
+
merged = default_criteria.copy()
|
|
1227
|
+
merged.update(custom_criteria)
|
|
1228
|
+
return merged
|
|
1229
|
+
|
|
1230
|
+
return default_criteria
|
|
1231
|
+
except Exception as e:
|
|
1232
|
+
self.logger.error(f"Failed to load scoring criteria: {str(e)}")
|
|
1233
|
+
return {}
|
|
1234
|
+
|
|
1235
|
+
def load_email_templates(self) -> Dict[str, Any]:
|
|
1236
|
+
"""
|
|
1237
|
+
Load email templates configuration.
|
|
1238
|
+
Priority: custom templates (data_dir/config/email_templates.json) > default templates (package)
|
|
1239
|
+
|
|
1240
|
+
Returns:
|
|
1241
|
+
Dictionary of email templates
|
|
1242
|
+
"""
|
|
1243
|
+
try:
|
|
1244
|
+
# Load default templates from package
|
|
1245
|
+
default_templates = self._load_default_config('default_email_templates.json')
|
|
1246
|
+
|
|
1247
|
+
# Load custom templates from user's data directory
|
|
1248
|
+
custom_templates_file = self.config_dir / "email_templates.json"
|
|
1249
|
+
if custom_templates_file.exists():
|
|
1250
|
+
with open(custom_templates_file, 'r', encoding='utf-8') as f:
|
|
1251
|
+
custom_templates = json.load(f)
|
|
1252
|
+
# Merge custom with default (custom overrides)
|
|
1253
|
+
merged = default_templates.copy()
|
|
1254
|
+
merged.update(custom_templates)
|
|
1255
|
+
return merged
|
|
1256
|
+
|
|
1257
|
+
return default_templates
|
|
1258
|
+
except Exception as e:
|
|
1259
|
+
self.logger.error(f"Failed to load email templates: {str(e)}")
|
|
1260
|
+
return {}
|
|
1261
|
+
|
|
1262
|
+
def _load_default_config(self, filename: str) -> Dict[str, Any]:
|
|
1263
|
+
"""
|
|
1264
|
+
Load default configuration file from package.
|
|
1265
|
+
|
|
1266
|
+
Args:
|
|
1267
|
+
filename: Name of the config file (e.g., 'default_prompts.json')
|
|
1268
|
+
|
|
1269
|
+
Returns:
|
|
1270
|
+
Dictionary of configuration data
|
|
1271
|
+
"""
|
|
1272
|
+
try:
|
|
1273
|
+
import importlib.resources as pkg_resources
|
|
1274
|
+
from pathlib import Path
|
|
1275
|
+
|
|
1276
|
+
# Try to load from package resources
|
|
1277
|
+
try:
|
|
1278
|
+
# Python 3.9+
|
|
1279
|
+
with pkg_resources.files('fusesell_local.config').joinpath(filename).open('r', encoding='utf-8') as f:
|
|
1280
|
+
return json.load(f)
|
|
1281
|
+
except AttributeError:
|
|
1282
|
+
# Python 3.8 fallback
|
|
1283
|
+
with pkg_resources.open_text('fusesell_local.config', filename, encoding='utf-8') as f:
|
|
1284
|
+
return json.load(f)
|
|
1285
|
+
except Exception as e:
|
|
1286
|
+
self.logger.warning(f"Failed to load default config {filename} from package: {str(e)}")
|
|
1287
|
+
# Fallback: try to load from installed location
|
|
1288
|
+
try:
|
|
1289
|
+
import fusesell_local
|
|
1290
|
+
package_dir = Path(fusesell_local.__file__).parent
|
|
1291
|
+
default_config_file = package_dir / "config" / filename
|
|
1292
|
+
if default_config_file.exists():
|
|
1293
|
+
with open(default_config_file, 'r', encoding='utf-8') as f:
|
|
1294
|
+
return json.load(f)
|
|
1295
|
+
except Exception as fallback_error:
|
|
1296
|
+
self.logger.warning(f"Fallback load also failed for {filename}: {str(fallback_error)}")
|
|
1297
|
+
|
|
1298
|
+
return {}
|
|
1299
|
+
|
|
1300
|
+
def generate_custom_prompt(
|
|
1301
|
+
self,
|
|
1302
|
+
stage_name: str,
|
|
1303
|
+
prompt_key: str,
|
|
1304
|
+
user_request: str,
|
|
1305
|
+
llm_client: Any = None,
|
|
1306
|
+
required_fields: Optional[List[str]] = None
|
|
1307
|
+
) -> str:
|
|
1308
|
+
"""
|
|
1309
|
+
Generate a custom prompt based on user's natural language request.
|
|
1310
|
+
|
|
1311
|
+
Args:
|
|
1312
|
+
stage_name: The stage name (e.g., 'initial_outreach', 'follow_up')
|
|
1313
|
+
prompt_key: The prompt key (e.g., 'email_generation')
|
|
1314
|
+
user_request: User's natural language customization request
|
|
1315
|
+
llm_client: LLM client instance for generating the prompt
|
|
1316
|
+
required_fields: List of required fields that must always exist in the prompt
|
|
1317
|
+
|
|
1318
|
+
Returns:
|
|
1319
|
+
Generated custom prompt string
|
|
1320
|
+
"""
|
|
1321
|
+
if not llm_client:
|
|
1322
|
+
raise ValueError("LLM client is required for custom prompt generation")
|
|
1323
|
+
|
|
1324
|
+
# Load default prompt as base
|
|
1325
|
+
default_prompts = self._load_default_prompts()
|
|
1326
|
+
default_prompt = default_prompts.get(stage_name, {}).get(prompt_key, "")
|
|
1327
|
+
|
|
1328
|
+
if not default_prompt:
|
|
1329
|
+
self.logger.warning(f"No default prompt found for {stage_name}.{prompt_key}")
|
|
1330
|
+
default_prompt = ""
|
|
1331
|
+
|
|
1332
|
+
# Build required fields context
|
|
1333
|
+
required_fields_str = ""
|
|
1334
|
+
if required_fields:
|
|
1335
|
+
required_fields_str = f"\n\nRequired fields that MUST be present in the prompt: {', '.join(required_fields)}"
|
|
1336
|
+
|
|
1337
|
+
# Generate custom prompt using LLM
|
|
1338
|
+
system_prompt = f"""You are an expert at creating email generation prompts for sales automation systems.
|
|
1339
|
+
Your task is to modify the default system prompt based on the user's customization request.
|
|
1340
|
+
|
|
1341
|
+
IMPORTANT RULES:
|
|
1342
|
+
1. Preserve all placeholder variables (##variable_name##) from the original prompt
|
|
1343
|
+
2. Maintain the JSON output structure requirements
|
|
1344
|
+
3. Keep essential instructions about email formatting and validation
|
|
1345
|
+
4. Incorporate the user's customization request naturally{required_fields_str}
|
|
1346
|
+
5. Return ONLY the modified prompt text, no explanations or markdown formatting
|
|
1347
|
+
6. The output should be a complete, standalone prompt that can be used directly"""
|
|
1348
|
+
|
|
1349
|
+
user_prompt = f"""Default System Prompt:
|
|
1350
|
+
{default_prompt}
|
|
1351
|
+
|
|
1352
|
+
User's Customization Request:
|
|
1353
|
+
{user_request}
|
|
1354
|
+
|
|
1355
|
+
Generate the modified prompt that incorporates the user's request while maintaining all critical elements."""
|
|
1356
|
+
|
|
1357
|
+
try:
|
|
1358
|
+
response = llm_client.chat_completion(
|
|
1359
|
+
messages=[
|
|
1360
|
+
{"role": "system", "content": system_prompt},
|
|
1361
|
+
{"role": "user", "content": user_prompt}
|
|
1362
|
+
],
|
|
1363
|
+
temperature=0.3,
|
|
1364
|
+
max_tokens=4000
|
|
1365
|
+
)
|
|
1366
|
+
|
|
1367
|
+
return response.strip()
|
|
1368
|
+
|
|
1369
|
+
except Exception as e:
|
|
1370
|
+
self.logger.error(f"Failed to generate custom prompt: {str(e)}")
|
|
1371
|
+
raise
|
|
1372
|
+
|
|
1373
|
+
def save_custom_prompt(
|
|
1374
|
+
self,
|
|
1375
|
+
stage_name: str,
|
|
1376
|
+
prompt_key: str,
|
|
1377
|
+
custom_prompt: str
|
|
1378
|
+
) -> None:
|
|
1379
|
+
"""
|
|
1380
|
+
Save custom prompt to user's data directory.
|
|
1381
|
+
|
|
1382
|
+
Args:
|
|
1383
|
+
stage_name: The stage name (e.g., 'initial_outreach', 'follow_up')
|
|
1384
|
+
prompt_key: The prompt key (e.g., 'email_generation')
|
|
1385
|
+
custom_prompt: The custom prompt to save
|
|
1386
|
+
"""
|
|
1387
|
+
try:
|
|
1388
|
+
custom_prompts_file = self.config_dir / "prompts.json"
|
|
1389
|
+
|
|
1390
|
+
# Load existing custom prompts
|
|
1391
|
+
existing_prompts = {}
|
|
1392
|
+
if custom_prompts_file.exists():
|
|
1393
|
+
with open(custom_prompts_file, 'r', encoding='utf-8') as f:
|
|
1394
|
+
existing_prompts = json.load(f)
|
|
1395
|
+
|
|
1396
|
+
# Update with new custom prompt
|
|
1397
|
+
if stage_name not in existing_prompts:
|
|
1398
|
+
existing_prompts[stage_name] = {}
|
|
1399
|
+
existing_prompts[stage_name][prompt_key] = custom_prompt
|
|
1400
|
+
|
|
1401
|
+
# Save back to file
|
|
1402
|
+
with open(custom_prompts_file, 'w', encoding='utf-8') as f:
|
|
1403
|
+
json.dump(existing_prompts, f, indent=2, ensure_ascii=False)
|
|
1404
|
+
|
|
1405
|
+
self.logger.info(f"Saved custom prompt for {stage_name}.{prompt_key}")
|
|
1406
|
+
|
|
1407
|
+
except Exception as e:
|
|
1408
|
+
self.logger.error(f"Failed to save custom prompt: {str(e)}")
|
|
1409
|
+
raise
|
|
1410
|
+
|
|
1411
|
+
def process_initial_outreach_customization(
|
|
1412
|
+
self,
|
|
1413
|
+
initial_outreach_config: Dict[str, Any],
|
|
1414
|
+
llm_client: Any = None
|
|
1415
|
+
) -> Dict[str, Any]:
|
|
1416
|
+
"""
|
|
1417
|
+
Process initial_outreach configuration and generate custom prompts if requested.
|
|
1418
|
+
|
|
1419
|
+
Args:
|
|
1420
|
+
initial_outreach_config: Initial outreach configuration with optional customization_request
|
|
1421
|
+
llm_client: LLM client for generating custom prompts
|
|
1422
|
+
|
|
1423
|
+
Returns:
|
|
1424
|
+
Processed initial_outreach configuration
|
|
1425
|
+
"""
|
|
1426
|
+
if not initial_outreach_config:
|
|
1427
|
+
return {}
|
|
1428
|
+
|
|
1429
|
+
# Required fields that should always exist
|
|
1430
|
+
required_fields = ['tone']
|
|
1431
|
+
|
|
1432
|
+
# Extract customization request if present
|
|
1433
|
+
customization_request = initial_outreach_config.get('customization_request')
|
|
1434
|
+
|
|
1435
|
+
if customization_request and llm_client:
|
|
1436
|
+
try:
|
|
1437
|
+
# Generate custom prompt for initial_outreach stage
|
|
1438
|
+
custom_prompt = self.generate_custom_prompt(
|
|
1439
|
+
stage_name='initial_outreach',
|
|
1440
|
+
prompt_key='email_generation',
|
|
1441
|
+
user_request=customization_request,
|
|
1442
|
+
llm_client=llm_client,
|
|
1443
|
+
required_fields=required_fields
|
|
1444
|
+
)
|
|
1445
|
+
|
|
1446
|
+
# Save the custom prompt
|
|
1447
|
+
self.save_custom_prompt(
|
|
1448
|
+
stage_name='initial_outreach',
|
|
1449
|
+
prompt_key='email_generation',
|
|
1450
|
+
custom_prompt=custom_prompt
|
|
1451
|
+
)
|
|
1452
|
+
|
|
1453
|
+
self.logger.info("Generated and saved custom prompt for initial_outreach")
|
|
1454
|
+
|
|
1455
|
+
except Exception as e:
|
|
1456
|
+
self.logger.error(f"Failed to process customization request: {str(e)}")
|
|
1457
|
+
# Don't fail the entire save operation, just log the error
|
|
1458
|
+
|
|
1459
|
+
# Build the processed configuration (keep all fields except customization_request)
|
|
1460
|
+
processed_config = {}
|
|
1461
|
+
for key, value in initial_outreach_config.items():
|
|
1462
|
+
if key != 'customization_request':
|
|
1463
|
+
processed_config[key] = value
|
|
1464
|
+
|
|
1465
|
+
# Ensure tone field exists
|
|
1466
|
+
if 'tone' not in processed_config:
|
|
1467
|
+
processed_config['tone'] = 'Professional'
|
|
1468
|
+
|
|
1469
|
+
return processed_config
|
|
1470
|
+
|
|
1471
|
+
def _generate_customer_id(self) -> str:
|
|
1472
|
+
"""Generate unique customer ID."""
|
|
1473
|
+
import uuid
|
|
1474
|
+
return f"uuid:{str(uuid.uuid4())}"
|
|
1475
|
+
|
|
1476
|
+
def _normalize_status_value(self, status: Optional[Union[str, bool]]) -> Optional[str]:
|
|
1477
|
+
"""
|
|
1478
|
+
Normalize activation status values.
|
|
1479
|
+
|
|
1480
|
+
Args:
|
|
1481
|
+
status: Status value provided by the caller
|
|
1482
|
+
|
|
1483
|
+
Returns:
|
|
1484
|
+
Normalized status ("active"/"inactive") or None if not provided
|
|
1485
|
+
"""
|
|
1486
|
+
if status is None:
|
|
1487
|
+
return None
|
|
1488
|
+
|
|
1489
|
+
if isinstance(status, bool):
|
|
1490
|
+
return 'active' if status else 'inactive'
|
|
1491
|
+
|
|
1492
|
+
normalized = str(status).strip().lower()
|
|
1493
|
+
if not normalized:
|
|
1494
|
+
return None
|
|
1495
|
+
|
|
1496
|
+
if normalized not in {'active', 'inactive'}:
|
|
1497
|
+
raise ValueError("Status must be 'active' or 'inactive'")
|
|
1498
|
+
|
|
1499
|
+
return normalized
|
|
1500
|
+
|
|
1501
|
+
# ===== TEAM MANAGEMENT METHODS =====
|
|
1502
|
+
|
|
1503
|
+
def save_team(
|
|
1504
|
+
self,
|
|
1505
|
+
team_id: str,
|
|
1506
|
+
org_id: str,
|
|
1507
|
+
org_name: str,
|
|
1508
|
+
plan_id: str,
|
|
1509
|
+
name: str,
|
|
1510
|
+
description: str = None,
|
|
1511
|
+
plan_name: str = None,
|
|
1512
|
+
project_code: str = None,
|
|
1513
|
+
avatar: str = None,
|
|
1514
|
+
status: Optional[Union[str, bool]] = None
|
|
1515
|
+
) -> str:
|
|
1516
|
+
"""
|
|
1517
|
+
Save or update team information.
|
|
1518
|
+
|
|
1519
|
+
Args:
|
|
1520
|
+
team_id: Team identifier
|
|
1521
|
+
org_id: Organization identifier
|
|
1522
|
+
org_name: Organization name
|
|
1523
|
+
plan_id: Plan identifier
|
|
1524
|
+
name: Team name
|
|
1525
|
+
description: Team description
|
|
1526
|
+
plan_name: Plan name
|
|
1527
|
+
project_code: Project code
|
|
1528
|
+
avatar: Avatar URL
|
|
1529
|
+
|
|
1530
|
+
Returns:
|
|
1531
|
+
Team ID
|
|
1532
|
+
"""
|
|
1533
|
+
try:
|
|
1534
|
+
with sqlite3.connect(self.db_path) as conn:
|
|
1535
|
+
cursor = conn.cursor()
|
|
1536
|
+
|
|
1537
|
+
cursor.execute("SELECT status FROM teams WHERE team_id = ?", (team_id,))
|
|
1538
|
+
existing_row = cursor.fetchone()
|
|
1539
|
+
normalized_status = self._normalize_status_value(status)
|
|
1540
|
+
status_value = normalized_status or (
|
|
1541
|
+
existing_row[0] if existing_row and len(existing_row) > 0 else None
|
|
1542
|
+
) or 'active'
|
|
1543
|
+
|
|
1544
|
+
if existing_row:
|
|
1545
|
+
# Update existing team
|
|
1546
|
+
cursor.execute("""
|
|
1547
|
+
UPDATE teams SET
|
|
1548
|
+
org_name = ?, plan_name = ?, project_code = ?, name = ?, description = ?,
|
|
1549
|
+
avatar = ?, status = ?, updated_at = CURRENT_TIMESTAMP
|
|
1550
|
+
WHERE team_id = ?
|
|
1551
|
+
""", (org_name, plan_name, project_code, name, description, avatar, status_value, team_id))
|
|
1552
|
+
else:
|
|
1553
|
+
# Insert new team
|
|
1554
|
+
cursor.execute("""
|
|
1555
|
+
INSERT INTO teams
|
|
1556
|
+
(team_id, org_id, org_name, plan_id, plan_name, project_code, name, description, avatar, status)
|
|
1557
|
+
VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?)
|
|
1558
|
+
""", (team_id, org_id, org_name, plan_id, plan_name, project_code, name, description, avatar, status_value))
|
|
1559
|
+
|
|
1560
|
+
conn.commit()
|
|
1561
|
+
self.logger.debug(f"Saved team: {team_id}")
|
|
1562
|
+
return team_id
|
|
1563
|
+
|
|
1564
|
+
except Exception as e:
|
|
1565
|
+
self.logger.error(f"Error saving team {team_id}: {str(e)}")
|
|
1566
|
+
raise
|
|
1567
|
+
|
|
1568
|
+
def get_team(self, team_id: str) -> Optional[Dict[str, Any]]:
|
|
1569
|
+
"""
|
|
1570
|
+
Get team by ID.
|
|
1571
|
+
|
|
1572
|
+
Args:
|
|
1573
|
+
team_id: Team identifier
|
|
1574
|
+
|
|
1575
|
+
Returns:
|
|
1576
|
+
Team data or None if not found
|
|
1577
|
+
"""
|
|
1578
|
+
try:
|
|
1579
|
+
with sqlite3.connect(self.db_path) as conn:
|
|
1580
|
+
cursor = conn.cursor()
|
|
1581
|
+
cursor.execute("SELECT * FROM teams WHERE team_id = ?", (team_id,))
|
|
1582
|
+
row = cursor.fetchone()
|
|
1583
|
+
|
|
1584
|
+
if row:
|
|
1585
|
+
columns = [description[0] for description in cursor.description]
|
|
1586
|
+
return dict(zip(columns, row))
|
|
1587
|
+
return None
|
|
1588
|
+
|
|
1589
|
+
except Exception as e:
|
|
1590
|
+
self.logger.error(f"Error getting team {team_id}: {str(e)}")
|
|
1591
|
+
raise
|
|
1592
|
+
|
|
1593
|
+
def list_teams(self, org_id: str, status: Optional[str] = "active") -> List[Dict[str, Any]]:
|
|
1594
|
+
"""
|
|
1595
|
+
List all teams for an organization.
|
|
1596
|
+
|
|
1597
|
+
Args:
|
|
1598
|
+
org_id: Organization identifier
|
|
1599
|
+
|
|
1600
|
+
Returns:
|
|
1601
|
+
List of team data
|
|
1602
|
+
"""
|
|
1603
|
+
try:
|
|
1604
|
+
# Normalize status
|
|
1605
|
+
normalized_status: Optional[str] = status
|
|
1606
|
+
if isinstance(normalized_status, str):
|
|
1607
|
+
normalized_status = normalized_status.strip().lower()
|
|
1608
|
+
if normalized_status not in {'active', 'inactive', 'all'}:
|
|
1609
|
+
normalized_status = 'active'
|
|
1610
|
+
|
|
1611
|
+
where_clauses = ["org_id = ?"]
|
|
1612
|
+
params: List[Any] = [org_id]
|
|
1613
|
+
|
|
1614
|
+
if normalized_status != 'all':
|
|
1615
|
+
where_clauses.append("status = ?")
|
|
1616
|
+
params.append(normalized_status)
|
|
1617
|
+
|
|
1618
|
+
query = "SELECT * FROM teams WHERE " + " AND ".join(where_clauses)
|
|
1619
|
+
query += " ORDER BY created_at DESC"
|
|
1620
|
+
|
|
1621
|
+
with sqlite3.connect(self.db_path) as conn:
|
|
1622
|
+
cursor = conn.cursor()
|
|
1623
|
+
cursor.execute(query, params)
|
|
1624
|
+
rows = cursor.fetchall()
|
|
1625
|
+
|
|
1626
|
+
columns = [description[0] for description in cursor.description]
|
|
1627
|
+
return [dict(zip(columns, row)) for row in rows]
|
|
1628
|
+
|
|
1629
|
+
except Exception as e:
|
|
1630
|
+
self.logger.error(f"Error listing teams for org {org_id}: {str(e)}")
|
|
1631
|
+
raise
|
|
1632
|
+
|
|
1633
|
+
def update_team(
|
|
1634
|
+
self,
|
|
1635
|
+
team_id: str,
|
|
1636
|
+
name: str = None,
|
|
1637
|
+
description: str = None,
|
|
1638
|
+
plan_name: str = None,
|
|
1639
|
+
project_code: str = None,
|
|
1640
|
+
avatar: str = None,
|
|
1641
|
+
status: Optional[Union[str, bool]] = None
|
|
1642
|
+
) -> bool:
|
|
1643
|
+
"""
|
|
1644
|
+
Update team information.
|
|
1645
|
+
|
|
1646
|
+
Args:
|
|
1647
|
+
team_id: Team identifier
|
|
1648
|
+
name: New team name
|
|
1649
|
+
description: New team description
|
|
1650
|
+
plan_name: New plan name
|
|
1651
|
+
project_code: New project code
|
|
1652
|
+
avatar: New avatar URL
|
|
1653
|
+
|
|
1654
|
+
Returns:
|
|
1655
|
+
True if updated successfully
|
|
1656
|
+
"""
|
|
1657
|
+
try:
|
|
1658
|
+
with sqlite3.connect(self.db_path) as conn:
|
|
1659
|
+
cursor = conn.cursor()
|
|
1660
|
+
|
|
1661
|
+
# Build update query dynamically
|
|
1662
|
+
updates = []
|
|
1663
|
+
params = []
|
|
1664
|
+
|
|
1665
|
+
if name is not None:
|
|
1666
|
+
updates.append("name = ?")
|
|
1667
|
+
params.append(name)
|
|
1668
|
+
if description is not None:
|
|
1669
|
+
updates.append("description = ?")
|
|
1670
|
+
params.append(description)
|
|
1671
|
+
if plan_name is not None:
|
|
1672
|
+
updates.append("plan_name = ?")
|
|
1673
|
+
params.append(plan_name)
|
|
1674
|
+
if project_code is not None:
|
|
1675
|
+
updates.append("project_code = ?")
|
|
1676
|
+
params.append(project_code)
|
|
1677
|
+
if avatar is not None:
|
|
1678
|
+
updates.append("avatar = ?")
|
|
1679
|
+
params.append(avatar)
|
|
1680
|
+
if status is not None:
|
|
1681
|
+
normalized_status = self._normalize_status_value(status)
|
|
1682
|
+
if normalized_status is not None:
|
|
1683
|
+
updates.append("status = ?")
|
|
1684
|
+
params.append(normalized_status)
|
|
1685
|
+
|
|
1686
|
+
if not updates:
|
|
1687
|
+
return True # Nothing to update
|
|
1688
|
+
|
|
1689
|
+
updates.append("updated_at = CURRENT_TIMESTAMP")
|
|
1690
|
+
params.append(team_id)
|
|
1691
|
+
|
|
1692
|
+
query = f"UPDATE teams SET {', '.join(updates)} WHERE team_id = ?"
|
|
1693
|
+
cursor.execute(query, params)
|
|
1694
|
+
|
|
1695
|
+
conn.commit()
|
|
1696
|
+
self.logger.debug(f"Updated team: {team_id}")
|
|
1697
|
+
return cursor.rowcount > 0
|
|
1698
|
+
|
|
1699
|
+
except Exception as e:
|
|
1700
|
+
self.logger.error(f"Error updating team {team_id}: {str(e)}")
|
|
1701
|
+
raise
|
|
1702
|
+
|
|
1703
|
+
def update_team_status(
|
|
1704
|
+
self,
|
|
1705
|
+
team_id: str,
|
|
1706
|
+
status: Union[str, bool]
|
|
1707
|
+
) -> bool:
|
|
1708
|
+
"""
|
|
1709
|
+
Update the activation status for a team.
|
|
1710
|
+
|
|
1711
|
+
Args:
|
|
1712
|
+
team_id: Team identifier
|
|
1713
|
+
status: Target status ("active" or "inactive")
|
|
1714
|
+
|
|
1715
|
+
Returns:
|
|
1716
|
+
True if a record was updated
|
|
1717
|
+
"""
|
|
1718
|
+
normalized_status = self._normalize_status_value(status)
|
|
1719
|
+
if normalized_status is None:
|
|
1720
|
+
raise ValueError("Status is required when updating team status")
|
|
1721
|
+
|
|
1722
|
+
try:
|
|
1723
|
+
with sqlite3.connect(self.db_path) as conn:
|
|
1724
|
+
cursor = conn.cursor()
|
|
1725
|
+
cursor.execute(
|
|
1726
|
+
"""
|
|
1727
|
+
UPDATE teams
|
|
1728
|
+
SET status = ?, updated_at = CURRENT_TIMESTAMP
|
|
1729
|
+
WHERE team_id = ?
|
|
1730
|
+
""",
|
|
1731
|
+
(normalized_status, team_id)
|
|
1732
|
+
)
|
|
1733
|
+
conn.commit()
|
|
1734
|
+
if cursor.rowcount:
|
|
1735
|
+
self.logger.debug(f"Updated team status: {team_id} -> {normalized_status}")
|
|
1736
|
+
return cursor.rowcount > 0
|
|
1737
|
+
|
|
1738
|
+
except Exception as e:
|
|
1739
|
+
self.logger.error(f"Error updating team status {team_id}: {str(e)}")
|
|
1740
|
+
raise
|
|
1741
|
+
|
|
1742
|
+
# ===== TEAM SETTINGS MANAGEMENT METHODS =====
|
|
1743
|
+
|
|
1744
|
+
def save_team_settings(
|
|
1745
|
+
self,
|
|
1746
|
+
team_id: str,
|
|
1747
|
+
org_id: str,
|
|
1748
|
+
plan_id: str,
|
|
1749
|
+
team_name: str,
|
|
1750
|
+
gs_team_organization: Optional[Dict[str, Any]] = None,
|
|
1751
|
+
gs_team_rep: Optional[List[Dict[str, Any]]] = None,
|
|
1752
|
+
gs_team_product: Optional[List[Dict[str, Any]]] = None,
|
|
1753
|
+
gs_team_schedule_time: Optional[Dict[str, Any]] = None,
|
|
1754
|
+
gs_team_initial_outreach: Optional[Dict[str, Any]] = None,
|
|
1755
|
+
gs_team_follow_up: Optional[Dict[str, Any]] = None,
|
|
1756
|
+
gs_team_auto_interaction: Optional[Dict[str, Any]] = None,
|
|
1757
|
+
gs_team_followup_schedule_time: Optional[Dict[str, Any]] = None,
|
|
1758
|
+
gs_team_birthday_email: Optional[Dict[str, Any]] = None,
|
|
1759
|
+
llm_client: Any = None
|
|
1760
|
+
) -> None:
|
|
1761
|
+
"""
|
|
1762
|
+
Save or update team settings.
|
|
1763
|
+
|
|
1764
|
+
Args:
|
|
1765
|
+
team_id: Team identifier
|
|
1766
|
+
org_id: Organization identifier
|
|
1767
|
+
plan_id: Plan identifier
|
|
1768
|
+
team_name: Team name
|
|
1769
|
+
gs_team_organization: Organization configuration
|
|
1770
|
+
gs_team_rep: Sales representative settings
|
|
1771
|
+
gs_team_product: Product configuration
|
|
1772
|
+
gs_team_schedule_time: Scheduling configuration
|
|
1773
|
+
gs_team_initial_outreach: Initial outreach configuration
|
|
1774
|
+
gs_team_follow_up: Follow-up configuration
|
|
1775
|
+
gs_team_auto_interaction: Auto interaction rules (can include customization_request)
|
|
1776
|
+
gs_team_followup_schedule_time: Follow-up scheduling rules
|
|
1777
|
+
gs_team_birthday_email: Birthday email configuration
|
|
1778
|
+
llm_client: Optional LLM client for custom prompt generation
|
|
1779
|
+
"""
|
|
1780
|
+
try:
|
|
1781
|
+
settings_id = f"{team_id}_{org_id}"
|
|
1782
|
+
|
|
1783
|
+
# Process initial_outreach customization if present
|
|
1784
|
+
processed_initial_outreach = gs_team_initial_outreach
|
|
1785
|
+
if gs_team_initial_outreach and isinstance(gs_team_initial_outreach, dict):
|
|
1786
|
+
if gs_team_initial_outreach.get('customization_request'):
|
|
1787
|
+
try:
|
|
1788
|
+
processed_initial_outreach = self.process_initial_outreach_customization(
|
|
1789
|
+
gs_team_initial_outreach,
|
|
1790
|
+
llm_client=llm_client
|
|
1791
|
+
)
|
|
1792
|
+
self.logger.info("Processed initial_outreach customization request")
|
|
1793
|
+
except Exception as e:
|
|
1794
|
+
self.logger.warning(f"Failed to process initial_outreach customization: {str(e)}")
|
|
1795
|
+
# Continue with original config if processing fails
|
|
1796
|
+
processed_initial_outreach = gs_team_initial_outreach
|
|
1797
|
+
|
|
1798
|
+
with sqlite3.connect(self.db_path) as conn:
|
|
1799
|
+
cursor = conn.cursor()
|
|
1800
|
+
|
|
1801
|
+
# Check if settings exist
|
|
1802
|
+
cursor.execute(
|
|
1803
|
+
"SELECT id FROM team_settings WHERE team_id = ?", (team_id,))
|
|
1804
|
+
exists = cursor.fetchone()
|
|
1805
|
+
|
|
1806
|
+
if exists:
|
|
1807
|
+
# Update existing settings
|
|
1808
|
+
cursor.execute("""
|
|
1809
|
+
UPDATE team_settings
|
|
1810
|
+
SET org_id = ?, plan_id = ?, team_name = ?,
|
|
1811
|
+
gs_team_organization = ?, gs_team_rep = ?, gs_team_product = ?,
|
|
1812
|
+
gs_team_schedule_time = ?, gs_team_initial_outreach = ?, gs_team_follow_up = ?,
|
|
1813
|
+
gs_team_auto_interaction = ?, gs_team_followup_schedule_time = ?, gs_team_birthday_email = ?,
|
|
1814
|
+
updated_at = CURRENT_TIMESTAMP
|
|
1815
|
+
WHERE team_id = ?
|
|
1816
|
+
""", (
|
|
1817
|
+
org_id, plan_id, team_name,
|
|
1818
|
+
json.dumps(
|
|
1819
|
+
gs_team_organization) if gs_team_organization else None,
|
|
1820
|
+
json.dumps(
|
|
1821
|
+
gs_team_rep) if gs_team_rep else None,
|
|
1822
|
+
json.dumps(
|
|
1823
|
+
gs_team_product) if gs_team_product else None,
|
|
1824
|
+
json.dumps(
|
|
1825
|
+
gs_team_schedule_time) if gs_team_schedule_time else None,
|
|
1826
|
+
json.dumps(
|
|
1827
|
+
processed_initial_outreach) if processed_initial_outreach else None,
|
|
1828
|
+
json.dumps(
|
|
1829
|
+
gs_team_follow_up) if gs_team_follow_up else None,
|
|
1830
|
+
json.dumps(
|
|
1831
|
+
gs_team_auto_interaction) if gs_team_auto_interaction else None,
|
|
1832
|
+
json.dumps(
|
|
1833
|
+
gs_team_followup_schedule_time) if gs_team_followup_schedule_time else None,
|
|
1834
|
+
json.dumps(
|
|
1835
|
+
gs_team_birthday_email) if gs_team_birthday_email else None,
|
|
1836
|
+
team_id
|
|
1837
|
+
))
|
|
1838
|
+
else:
|
|
1839
|
+
# Insert new settings
|
|
1840
|
+
cursor.execute("""
|
|
1841
|
+
INSERT INTO team_settings
|
|
1842
|
+
(id, team_id, org_id, plan_id, team_name, gs_team_organization, gs_team_rep,
|
|
1843
|
+
gs_team_product, gs_team_schedule_time, gs_team_initial_outreach, gs_team_follow_up,
|
|
1844
|
+
gs_team_auto_interaction, gs_team_followup_schedule_time, gs_team_birthday_email)
|
|
1845
|
+
VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)
|
|
1846
|
+
""", (
|
|
1847
|
+
settings_id, team_id, org_id, plan_id, team_name,
|
|
1848
|
+
json.dumps(
|
|
1849
|
+
gs_team_organization) if gs_team_organization else None,
|
|
1850
|
+
json.dumps(
|
|
1851
|
+
gs_team_rep) if gs_team_rep else None,
|
|
1852
|
+
json.dumps(
|
|
1853
|
+
gs_team_product) if gs_team_product else None,
|
|
1854
|
+
json.dumps(
|
|
1855
|
+
gs_team_schedule_time) if gs_team_schedule_time else None,
|
|
1856
|
+
json.dumps(
|
|
1857
|
+
processed_initial_outreach) if processed_initial_outreach else None,
|
|
1858
|
+
json.dumps(
|
|
1859
|
+
gs_team_follow_up) if gs_team_follow_up else None,
|
|
1860
|
+
json.dumps(
|
|
1861
|
+
gs_team_auto_interaction) if gs_team_auto_interaction else None,
|
|
1862
|
+
json.dumps(
|
|
1863
|
+
gs_team_followup_schedule_time) if gs_team_followup_schedule_time else None,
|
|
1864
|
+
json.dumps(
|
|
1865
|
+
gs_team_birthday_email) if gs_team_birthday_email else None
|
|
1866
|
+
))
|
|
1867
|
+
|
|
1868
|
+
conn.commit()
|
|
1869
|
+
self.logger.debug(f"Saved team settings: {team_id}")
|
|
1870
|
+
|
|
1871
|
+
except Exception as e:
|
|
1872
|
+
self.logger.error(f"Failed to save team settings: {str(e)}")
|
|
1873
|
+
raise
|
|
1874
|
+
|
|
1875
|
+
def get_team_settings(self, team_id: str) -> Optional[Dict[str, Any]]:
|
|
1876
|
+
"""
|
|
1877
|
+
Get team settings by team ID.
|
|
1878
|
+
|
|
1879
|
+
Args:
|
|
1880
|
+
team_id: Team identifier
|
|
1881
|
+
|
|
1882
|
+
Returns:
|
|
1883
|
+
Team settings dictionary or None if not found
|
|
1884
|
+
"""
|
|
1885
|
+
try:
|
|
1886
|
+
with sqlite3.connect(self.db_path) as conn:
|
|
1887
|
+
conn.row_factory = sqlite3.Row
|
|
1888
|
+
cursor = conn.cursor()
|
|
1889
|
+
cursor.execute(
|
|
1890
|
+
"SELECT * FROM team_settings WHERE team_id = ?", (team_id,))
|
|
1891
|
+
row = cursor.fetchone()
|
|
1892
|
+
|
|
1893
|
+
if row:
|
|
1894
|
+
result = dict(row)
|
|
1895
|
+
# Parse JSON fields
|
|
1896
|
+
json_fields = [
|
|
1897
|
+
'gs_team_organization', 'gs_team_rep', 'gs_team_product',
|
|
1898
|
+
'gs_team_schedule_time', 'gs_team_initial_outreach', 'gs_team_follow_up',
|
|
1899
|
+
'gs_team_auto_interaction', 'gs_team_followup_schedule_time', 'gs_team_birthday_email'
|
|
1900
|
+
]
|
|
1901
|
+
|
|
1902
|
+
for field in json_fields:
|
|
1903
|
+
if result[field]:
|
|
1904
|
+
try:
|
|
1905
|
+
result[field] = json.loads(result[field])
|
|
1906
|
+
except json.JSONDecodeError:
|
|
1907
|
+
result[field] = None
|
|
1908
|
+
|
|
1909
|
+
return result
|
|
1910
|
+
return None
|
|
1911
|
+
|
|
1912
|
+
except Exception as e:
|
|
1913
|
+
self.logger.error(f"Failed to get team settings: {str(e)}")
|
|
1914
|
+
raise
|
|
1915
|
+
|
|
1916
|
+
def build_team_settings_snapshot(
|
|
1917
|
+
self,
|
|
1918
|
+
team_id: str,
|
|
1919
|
+
sections: Optional[Sequence[str]] = None
|
|
1920
|
+
) -> Dict[str, Any]:
|
|
1921
|
+
"""
|
|
1922
|
+
Build a response payload containing team settings in the expected RealTimeX format.
|
|
1923
|
+
|
|
1924
|
+
Args:
|
|
1925
|
+
team_id: Team identifier
|
|
1926
|
+
sections: Optional sequence of section names to include. Accepts either
|
|
1927
|
+
full keys (e.g. ``gs_team_product``) or shorthand without the prefix.
|
|
1928
|
+
|
|
1929
|
+
Returns:
|
|
1930
|
+
Dictionary shaped as ``{"data": [{...}]}``. When no settings exist,
|
|
1931
|
+
returns ``{"data": []}``.
|
|
1932
|
+
"""
|
|
1933
|
+
settings = self.get_team_settings(team_id)
|
|
1934
|
+
if not settings:
|
|
1935
|
+
return {"data": []}
|
|
1936
|
+
|
|
1937
|
+
available_fields = [
|
|
1938
|
+
'gs_team_organization',
|
|
1939
|
+
'gs_team_rep',
|
|
1940
|
+
'gs_team_product',
|
|
1941
|
+
'gs_team_schedule_time',
|
|
1942
|
+
'gs_team_initial_outreach',
|
|
1943
|
+
'gs_team_follow_up',
|
|
1944
|
+
'gs_team_auto_interaction',
|
|
1945
|
+
'gs_team_followup_schedule_time',
|
|
1946
|
+
'gs_team_birthday_email',
|
|
1947
|
+
]
|
|
1948
|
+
|
|
1949
|
+
if sections:
|
|
1950
|
+
normalized = set()
|
|
1951
|
+
for item in sections:
|
|
1952
|
+
if not item:
|
|
1953
|
+
continue
|
|
1954
|
+
item = item.strip()
|
|
1955
|
+
if not item:
|
|
1956
|
+
continue
|
|
1957
|
+
if item.startswith("gs_team_"):
|
|
1958
|
+
normalized.add(item)
|
|
1959
|
+
else:
|
|
1960
|
+
normalized.add(f"gs_team_{item}")
|
|
1961
|
+
fields_to_include = [field for field in available_fields if field in normalized]
|
|
1962
|
+
else:
|
|
1963
|
+
fields_to_include = available_fields
|
|
1964
|
+
|
|
1965
|
+
list_like_fields = {
|
|
1966
|
+
'gs_team_organization',
|
|
1967
|
+
'gs_team_rep',
|
|
1968
|
+
'gs_team_product',
|
|
1969
|
+
'gs_team_auto_interaction',
|
|
1970
|
+
}
|
|
1971
|
+
list_field_defaults = {
|
|
1972
|
+
'gs_team_organization': {
|
|
1973
|
+
'org_name': None,
|
|
1974
|
+
'address': None,
|
|
1975
|
+
'website': None,
|
|
1976
|
+
'industry': None,
|
|
1977
|
+
'description': None,
|
|
1978
|
+
'logo': None,
|
|
1979
|
+
'primary_email': None,
|
|
1980
|
+
'primary_phone': None,
|
|
1981
|
+
'primary_color': None,
|
|
1982
|
+
'is_active': False,
|
|
1983
|
+
'avg_rating': None,
|
|
1984
|
+
'total_sales': None,
|
|
1985
|
+
'total_products': None,
|
|
1986
|
+
'date_joined': None,
|
|
1987
|
+
'last_active': None,
|
|
1988
|
+
'social_media_links': [],
|
|
1989
|
+
},
|
|
1990
|
+
'gs_team_rep': {
|
|
1991
|
+
'name': None,
|
|
1992
|
+
'email': None,
|
|
1993
|
+
'phone': None,
|
|
1994
|
+
'position': None,
|
|
1995
|
+
'website': None,
|
|
1996
|
+
'logo': None,
|
|
1997
|
+
'username': None,
|
|
1998
|
+
'is_primary': False,
|
|
1999
|
+
'primary_color': None,
|
|
2000
|
+
'primary_phone': None,
|
|
2001
|
+
},
|
|
2002
|
+
'gs_team_product': {
|
|
2003
|
+
'product_id': None,
|
|
2004
|
+
'product_name': None,
|
|
2005
|
+
'image_url': None,
|
|
2006
|
+
'enabled': True,
|
|
2007
|
+
'priority': None,
|
|
2008
|
+
},
|
|
2009
|
+
'gs_team_auto_interaction': {
|
|
2010
|
+
'from_email': '',
|
|
2011
|
+
'from_name': '',
|
|
2012
|
+
'from_number': '',
|
|
2013
|
+
'tool_type': 'Email',
|
|
2014
|
+
'email_cc': '',
|
|
2015
|
+
'email_bcc': '',
|
|
2016
|
+
},
|
|
2017
|
+
}
|
|
2018
|
+
alias_fields = {
|
|
2019
|
+
'gs_team_organization': {
|
|
2020
|
+
'name': 'org_name',
|
|
2021
|
+
'brand_palette': 'primary_color',
|
|
2022
|
+
},
|
|
2023
|
+
}
|
|
2024
|
+
|
|
2025
|
+
snapshot: Dict[str, Any] = {}
|
|
2026
|
+
for field in fields_to_include:
|
|
2027
|
+
value = settings.get(field)
|
|
2028
|
+
if value is None:
|
|
2029
|
+
continue
|
|
2030
|
+
|
|
2031
|
+
if field in list_like_fields:
|
|
2032
|
+
if isinstance(value, list):
|
|
2033
|
+
normalized_items = []
|
|
2034
|
+
defaults = list_field_defaults.get(field, {})
|
|
2035
|
+
aliases = alias_fields.get(field, {})
|
|
2036
|
+
for item in value:
|
|
2037
|
+
if not isinstance(item, dict):
|
|
2038
|
+
continue
|
|
2039
|
+
normalized = {}
|
|
2040
|
+
for key, default_val in defaults.items():
|
|
2041
|
+
if key == 'social_media_links':
|
|
2042
|
+
current = item.get(key)
|
|
2043
|
+
normalized[key] = current if isinstance(current, list) else []
|
|
2044
|
+
else:
|
|
2045
|
+
normalized[key] = item.get(key, default_val)
|
|
2046
|
+
for legacy_key, target_key in aliases.items():
|
|
2047
|
+
if normalized.get(target_key) in (None, '', []):
|
|
2048
|
+
if legacy_key in item:
|
|
2049
|
+
normalized[target_key] = item[legacy_key]
|
|
2050
|
+
# include any additional keys that might exist
|
|
2051
|
+
normalized_items.append(normalized)
|
|
2052
|
+
snapshot[field] = normalized_items
|
|
2053
|
+
elif value:
|
|
2054
|
+
defaults = list_field_defaults.get(field, {})
|
|
2055
|
+
aliases = alias_fields.get(field, {})
|
|
2056
|
+
normalized = {key: value.get(key, default_val) for key, default_val in defaults.items()}
|
|
2057
|
+
for legacy_key, target_key in aliases.items():
|
|
2058
|
+
if normalized.get(target_key) in (None, '', []):
|
|
2059
|
+
if legacy_key in value:
|
|
2060
|
+
normalized[target_key] = value[legacy_key]
|
|
2061
|
+
snapshot[field] = [normalized]
|
|
2062
|
+
else:
|
|
2063
|
+
snapshot[field] = []
|
|
2064
|
+
else:
|
|
2065
|
+
snapshot[field] = value
|
|
2066
|
+
|
|
2067
|
+
if not snapshot:
|
|
2068
|
+
return {"data": []}
|
|
2069
|
+
|
|
2070
|
+
return {"data": [snapshot]}
|
|
2071
|
+
|
|
2072
|
+
def _deserialize_product_row(self, row: sqlite3.Row) -> Dict[str, Any]:
|
|
2073
|
+
"""
|
|
2074
|
+
Convert a product row into a dictionary with JSON fields parsed.
|
|
2075
|
+
|
|
2076
|
+
Args:
|
|
2077
|
+
row: SQLite row containing product data
|
|
2078
|
+
|
|
2079
|
+
Returns:
|
|
2080
|
+
Dictionary representation of the row with JSON fields decoded
|
|
2081
|
+
"""
|
|
2082
|
+
product = dict(row)
|
|
2083
|
+
|
|
2084
|
+
for field in self._product_json_fields:
|
|
2085
|
+
value = product.get(field)
|
|
2086
|
+
if value:
|
|
2087
|
+
try:
|
|
2088
|
+
product[field] = json.loads(value)
|
|
2089
|
+
except (json.JSONDecodeError, TypeError):
|
|
2090
|
+
product[field] = None
|
|
2091
|
+
|
|
2092
|
+
return product
|
|
2093
|
+
|
|
2094
|
+
def save_product(self, product_data: Dict[str, Any]) -> str:
|
|
2095
|
+
"""
|
|
2096
|
+
Save or update product information.
|
|
2097
|
+
|
|
2098
|
+
Args:
|
|
2099
|
+
product_data: Product information dictionary
|
|
2100
|
+
|
|
2101
|
+
Returns:
|
|
2102
|
+
Product ID
|
|
2103
|
+
"""
|
|
2104
|
+
try:
|
|
2105
|
+
product_id = product_data.get('product_id') or product_data.get(
|
|
2106
|
+
'id') or self._generate_product_id()
|
|
2107
|
+
|
|
2108
|
+
with sqlite3.connect(self.db_path) as conn:
|
|
2109
|
+
cursor = conn.cursor()
|
|
2110
|
+
|
|
2111
|
+
cursor.execute(
|
|
2112
|
+
"SELECT status FROM products WHERE product_id = ?", (product_id,))
|
|
2113
|
+
existing_row = cursor.fetchone()
|
|
2114
|
+
existing_status = existing_row[0] if existing_row else None
|
|
2115
|
+
normalized_status = self._normalize_status_value(product_data.get('status'))
|
|
2116
|
+
status_value = normalized_status or existing_status or 'active'
|
|
2117
|
+
|
|
2118
|
+
if existing_row:
|
|
2119
|
+
# Update existing product
|
|
2120
|
+
cursor.execute("""
|
|
2121
|
+
UPDATE products
|
|
2122
|
+
SET org_id = ?, org_name = ?, project_code = ?, product_name = ?,
|
|
2123
|
+
short_description = ?, long_description = ?, category = ?, subcategory = ?,
|
|
2124
|
+
target_users = ?, key_features = ?, unique_selling_points = ?, pain_points_solved = ?,
|
|
2125
|
+
competitive_advantages = ?, pricing = ?, pricing_rules = ?, product_website = ?,
|
|
2126
|
+
demo_available = ?, trial_available = ?, sales_contact_email = ?, image_url = ?,
|
|
2127
|
+
sales_metrics = ?, customer_feedback = ?, keywords = ?, related_products = ?,
|
|
2128
|
+
seasonal_demand = ?, market_insights = ?, case_studies = ?, testimonials = ?,
|
|
2129
|
+
success_metrics = ?, product_variants = ?, availability = ?, technical_specifications = ?,
|
|
2130
|
+
compatibility = ?, support_info = ?, regulatory_compliance = ?, localization = ?,
|
|
2131
|
+
installation_requirements = ?, user_manual_url = ?, return_policy = ?, shipping_info = ?,
|
|
2132
|
+
status = ?, updated_at = CURRENT_TIMESTAMP
|
|
2133
|
+
WHERE product_id = ?
|
|
2134
|
+
""", (
|
|
2135
|
+
product_data.get('org_id'), product_data.get(
|
|
2136
|
+
'org_name'), product_data.get('project_code'),
|
|
2137
|
+
product_data.get('productName'), product_data.get(
|
|
2138
|
+
'shortDescription'), product_data.get('longDescription'),
|
|
2139
|
+
product_data.get('category'), product_data.get(
|
|
2140
|
+
'subcategory'),
|
|
2141
|
+
json.dumps(product_data.get('targetUsers')) if product_data.get(
|
|
2142
|
+
'targetUsers') else None,
|
|
2143
|
+
json.dumps(product_data.get('keyFeatures')) if product_data.get(
|
|
2144
|
+
'keyFeatures') else None,
|
|
2145
|
+
json.dumps(product_data.get('uniqueSellingPoints')) if product_data.get(
|
|
2146
|
+
'uniqueSellingPoints') else None,
|
|
2147
|
+
json.dumps(product_data.get('painPointsSolved')) if product_data.get(
|
|
2148
|
+
'painPointsSolved') else None,
|
|
2149
|
+
json.dumps(product_data.get('competitiveAdvantages')) if product_data.get(
|
|
2150
|
+
'competitiveAdvantages') else None,
|
|
2151
|
+
json.dumps(product_data.get('pricing')) if product_data.get(
|
|
2152
|
+
'pricing') else None,
|
|
2153
|
+
json.dumps(product_data.get('pricingRules')) if product_data.get(
|
|
2154
|
+
'pricingRules') else None,
|
|
2155
|
+
product_data.get('productWebsite'), product_data.get(
|
|
2156
|
+
'demoAvailable', False),
|
|
2157
|
+
product_data.get('trialAvailable', False), product_data.get(
|
|
2158
|
+
'salesContactEmail'),
|
|
2159
|
+
product_data.get('imageUrl'),
|
|
2160
|
+
json.dumps(product_data.get('salesMetrics')) if product_data.get(
|
|
2161
|
+
'salesMetrics') else None,
|
|
2162
|
+
json.dumps(product_data.get('customerFeedback')) if product_data.get(
|
|
2163
|
+
'customerFeedback') else None,
|
|
2164
|
+
json.dumps(product_data.get('keywords')) if product_data.get(
|
|
2165
|
+
'keywords') else None,
|
|
2166
|
+
json.dumps(product_data.get('relatedProducts')) if product_data.get(
|
|
2167
|
+
'relatedProducts') else None,
|
|
2168
|
+
json.dumps(product_data.get('seasonalDemand')) if product_data.get(
|
|
2169
|
+
'seasonalDemand') else None,
|
|
2170
|
+
json.dumps(product_data.get('marketInsights')) if product_data.get(
|
|
2171
|
+
'marketInsights') else None,
|
|
2172
|
+
json.dumps(product_data.get('caseStudies')) if product_data.get(
|
|
2173
|
+
'caseStudies') else None,
|
|
2174
|
+
json.dumps(product_data.get('testimonials')) if product_data.get(
|
|
2175
|
+
'testimonials') else None,
|
|
2176
|
+
json.dumps(product_data.get('successMetrics')) if product_data.get(
|
|
2177
|
+
'successMetrics') else None,
|
|
2178
|
+
json.dumps(product_data.get('productVariants')) if product_data.get(
|
|
2179
|
+
'productVariants') else None,
|
|
2180
|
+
product_data.get('availability'),
|
|
2181
|
+
json.dumps(product_data.get('technicalSpecifications')) if product_data.get(
|
|
2182
|
+
'technicalSpecifications') else None,
|
|
2183
|
+
json.dumps(product_data.get('compatibility')) if product_data.get(
|
|
2184
|
+
'compatibility') else None,
|
|
2185
|
+
json.dumps(product_data.get('supportInfo')) if product_data.get(
|
|
2186
|
+
'supportInfo') else None,
|
|
2187
|
+
json.dumps(product_data.get('regulatoryCompliance')) if product_data.get(
|
|
2188
|
+
'regulatoryCompliance') else None,
|
|
2189
|
+
json.dumps(product_data.get('localization')) if product_data.get(
|
|
2190
|
+
'localization') else None,
|
|
2191
|
+
product_data.get('installationRequirements'), product_data.get(
|
|
2192
|
+
'userManualUrl'),
|
|
2193
|
+
product_data.get('returnPolicy'),
|
|
2194
|
+
json.dumps(product_data.get('shippingInfo')) if product_data.get(
|
|
2195
|
+
'shippingInfo') else None,
|
|
2196
|
+
status_value,
|
|
2197
|
+
product_id
|
|
2198
|
+
))
|
|
2199
|
+
else:
|
|
2200
|
+
# Insert new product
|
|
2201
|
+
cursor.execute("""
|
|
2202
|
+
INSERT INTO products
|
|
2203
|
+
(product_id, org_id, org_name, project_code, product_name, short_description, long_description,
|
|
2204
|
+
category, subcategory, target_users, key_features, unique_selling_points, pain_points_solved,
|
|
2205
|
+
competitive_advantages, pricing, pricing_rules, product_website, demo_available, trial_available,
|
|
2206
|
+
sales_contact_email, image_url, sales_metrics, customer_feedback, keywords, related_products,
|
|
2207
|
+
seasonal_demand, market_insights, case_studies, testimonials, success_metrics, product_variants,
|
|
2208
|
+
availability, technical_specifications, compatibility, support_info, regulatory_compliance,
|
|
2209
|
+
localization, installation_requirements, user_manual_url, return_policy, shipping_info, status)
|
|
2210
|
+
VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)
|
|
2211
|
+
""", (
|
|
2212
|
+
product_id, product_data.get('org_id'), product_data.get(
|
|
2213
|
+
'org_name'), product_data.get('project_code'),
|
|
2214
|
+
product_data.get('productName'), product_data.get(
|
|
2215
|
+
'shortDescription'), product_data.get('longDescription'),
|
|
2216
|
+
product_data.get('category'), product_data.get(
|
|
2217
|
+
'subcategory'),
|
|
2218
|
+
json.dumps(product_data.get('targetUsers')) if product_data.get(
|
|
2219
|
+
'targetUsers') else None,
|
|
2220
|
+
json.dumps(product_data.get('keyFeatures')) if product_data.get(
|
|
2221
|
+
'keyFeatures') else None,
|
|
2222
|
+
json.dumps(product_data.get('uniqueSellingPoints')) if product_data.get(
|
|
2223
|
+
'uniqueSellingPoints') else None,
|
|
2224
|
+
json.dumps(product_data.get('painPointsSolved')) if product_data.get(
|
|
2225
|
+
'painPointsSolved') else None,
|
|
2226
|
+
json.dumps(product_data.get('competitiveAdvantages')) if product_data.get(
|
|
2227
|
+
'competitiveAdvantages') else None,
|
|
2228
|
+
json.dumps(product_data.get('pricing')) if product_data.get(
|
|
2229
|
+
'pricing') else None,
|
|
2230
|
+
json.dumps(product_data.get('pricingRules')) if product_data.get(
|
|
2231
|
+
'pricingRules') else None,
|
|
2232
|
+
product_data.get('productWebsite'), product_data.get(
|
|
2233
|
+
'demoAvailable', False),
|
|
2234
|
+
product_data.get('trialAvailable', False), product_data.get(
|
|
2235
|
+
'salesContactEmail'),
|
|
2236
|
+
product_data.get('imageUrl'),
|
|
2237
|
+
json.dumps(product_data.get('salesMetrics')) if product_data.get(
|
|
2238
|
+
'salesMetrics') else None,
|
|
2239
|
+
json.dumps(product_data.get('customerFeedback')) if product_data.get(
|
|
2240
|
+
'customerFeedback') else None,
|
|
2241
|
+
json.dumps(product_data.get('keywords')) if product_data.get(
|
|
2242
|
+
'keywords') else None,
|
|
2243
|
+
json.dumps(product_data.get('relatedProducts')) if product_data.get(
|
|
2244
|
+
'relatedProducts') else None,
|
|
2245
|
+
json.dumps(product_data.get('seasonalDemand')) if product_data.get(
|
|
2246
|
+
'seasonalDemand') else None,
|
|
2247
|
+
json.dumps(product_data.get('marketInsights')) if product_data.get(
|
|
2248
|
+
'marketInsights') else None,
|
|
2249
|
+
json.dumps(product_data.get('caseStudies')) if product_data.get(
|
|
2250
|
+
'caseStudies') else None,
|
|
2251
|
+
json.dumps(product_data.get('testimonials')) if product_data.get(
|
|
2252
|
+
'testimonials') else None,
|
|
2253
|
+
json.dumps(product_data.get('successMetrics')) if product_data.get(
|
|
2254
|
+
'successMetrics') else None,
|
|
2255
|
+
json.dumps(product_data.get('productVariants')) if product_data.get(
|
|
2256
|
+
'productVariants') else None,
|
|
2257
|
+
product_data.get('availability'),
|
|
2258
|
+
json.dumps(product_data.get('technicalSpecifications')) if product_data.get(
|
|
2259
|
+
'technicalSpecifications') else None,
|
|
2260
|
+
json.dumps(product_data.get('compatibility')) if product_data.get(
|
|
2261
|
+
'compatibility') else None,
|
|
2262
|
+
json.dumps(product_data.get('supportInfo')) if product_data.get(
|
|
2263
|
+
'supportInfo') else None,
|
|
2264
|
+
json.dumps(product_data.get('regulatoryCompliance')) if product_data.get(
|
|
2265
|
+
'regulatoryCompliance') else None,
|
|
2266
|
+
json.dumps(product_data.get('localization')) if product_data.get(
|
|
2267
|
+
'localization') else None,
|
|
2268
|
+
product_data.get('installationRequirements'), product_data.get(
|
|
2269
|
+
'userManualUrl'),
|
|
2270
|
+
product_data.get('returnPolicy'),
|
|
2271
|
+
json.dumps(product_data.get('shippingInfo')) if product_data.get(
|
|
2272
|
+
'shippingInfo') else None,
|
|
2273
|
+
status_value
|
|
2274
|
+
))
|
|
2275
|
+
|
|
2276
|
+
conn.commit()
|
|
2277
|
+
self.logger.debug(f"Saved product: {product_id}")
|
|
2278
|
+
return product_id
|
|
2279
|
+
|
|
2280
|
+
except Exception as e:
|
|
2281
|
+
self.logger.error(f"Failed to save product: {str(e)}")
|
|
2282
|
+
raise
|
|
2283
|
+
|
|
2284
|
+
def search_products(
|
|
2285
|
+
self,
|
|
2286
|
+
org_id: str,
|
|
2287
|
+
status: Optional[str] = "active",
|
|
2288
|
+
search_term: Optional[str] = None,
|
|
2289
|
+
limit: Optional[int] = None,
|
|
2290
|
+
sort: Optional[str] = "name"
|
|
2291
|
+
) -> List[Dict[str, Any]]:
|
|
2292
|
+
"""
|
|
2293
|
+
Search products for an organization with optional filters.
|
|
2294
|
+
|
|
2295
|
+
Args:
|
|
2296
|
+
org_id: Organization identifier
|
|
2297
|
+
status: Product status filter ("active", "inactive", or "all")
|
|
2298
|
+
search_term: Keyword to match against name, descriptions, or keywords
|
|
2299
|
+
limit: Maximum number of products to return
|
|
2300
|
+
sort: Sort order ("name", "created_at", "updated_at")
|
|
2301
|
+
|
|
2302
|
+
Returns:
|
|
2303
|
+
List of product dictionaries
|
|
2304
|
+
"""
|
|
2305
|
+
try:
|
|
2306
|
+
def _is_placeholder(value: Any) -> bool:
|
|
2307
|
+
return isinstance(value, str) and value.strip().startswith("{{") and value.strip().endswith("}}")
|
|
2308
|
+
|
|
2309
|
+
# Normalize status
|
|
2310
|
+
normalized_status: Optional[str] = status
|
|
2311
|
+
if _is_placeholder(normalized_status):
|
|
2312
|
+
normalized_status = None
|
|
2313
|
+
if isinstance(normalized_status, str):
|
|
2314
|
+
normalized_status = normalized_status.strip().lower()
|
|
2315
|
+
if normalized_status not in {'active', 'inactive', 'all'}:
|
|
2316
|
+
normalized_status = 'active'
|
|
2317
|
+
|
|
2318
|
+
# Normalize sort
|
|
2319
|
+
normalized_sort: Optional[str] = sort
|
|
2320
|
+
if _is_placeholder(normalized_sort):
|
|
2321
|
+
normalized_sort = None
|
|
2322
|
+
if isinstance(normalized_sort, str):
|
|
2323
|
+
normalized_sort = normalized_sort.strip().lower()
|
|
2324
|
+
sort_map = {
|
|
2325
|
+
'name': ("product_name COLLATE NOCASE", "ASC"),
|
|
2326
|
+
'created_at': ("datetime(created_at)", "DESC"),
|
|
2327
|
+
'updated_at': ("datetime(updated_at)", "DESC"),
|
|
2328
|
+
}
|
|
2329
|
+
order_by, direction = sort_map.get(normalized_sort, sort_map['name'])
|
|
2330
|
+
|
|
2331
|
+
# Normalize search term
|
|
2332
|
+
normalized_search: Optional[str] = None
|
|
2333
|
+
if not _is_placeholder(search_term) and search_term is not None:
|
|
2334
|
+
normalized_search = str(search_term).strip()
|
|
2335
|
+
if normalized_search == "":
|
|
2336
|
+
normalized_search = None
|
|
2337
|
+
|
|
2338
|
+
# Normalize limit
|
|
2339
|
+
normalized_limit: Optional[int] = None
|
|
2340
|
+
if not _is_placeholder(limit) and limit is not None:
|
|
2341
|
+
try:
|
|
2342
|
+
normalized_limit = int(limit)
|
|
2343
|
+
if normalized_limit <= 0:
|
|
2344
|
+
normalized_limit = None
|
|
2345
|
+
except (TypeError, ValueError):
|
|
2346
|
+
normalized_limit = None
|
|
2347
|
+
|
|
2348
|
+
where_clauses = ["org_id = ?"]
|
|
2349
|
+
params: List[Any] = [org_id]
|
|
2350
|
+
|
|
2351
|
+
if normalized_status != 'all':
|
|
2352
|
+
where_clauses.append("status = ?")
|
|
2353
|
+
params.append(normalized_status)
|
|
2354
|
+
|
|
2355
|
+
query = "SELECT * FROM products WHERE " + " AND ".join(where_clauses)
|
|
2356
|
+
|
|
2357
|
+
if normalized_search:
|
|
2358
|
+
like_value = f"%{normalized_search.lower()}%"
|
|
2359
|
+
query += (
|
|
2360
|
+
" AND ("
|
|
2361
|
+
"LOWER(product_name) LIKE ? OR "
|
|
2362
|
+
"LOWER(COALESCE(short_description, '')) LIKE ? OR "
|
|
2363
|
+
"LOWER(COALESCE(long_description, '')) LIKE ? OR "
|
|
2364
|
+
"LOWER(COALESCE(keywords, '')) LIKE ?)"
|
|
2365
|
+
)
|
|
2366
|
+
params.extend([like_value] * 4)
|
|
2367
|
+
|
|
2368
|
+
query += f" ORDER BY {order_by} {direction}"
|
|
2369
|
+
|
|
2370
|
+
if normalized_limit is not None:
|
|
2371
|
+
query += " LIMIT ?"
|
|
2372
|
+
params.append(normalized_limit)
|
|
2373
|
+
|
|
2374
|
+
with sqlite3.connect(self.db_path) as conn:
|
|
2375
|
+
conn.row_factory = sqlite3.Row
|
|
2376
|
+
cursor = conn.cursor()
|
|
2377
|
+
cursor.execute(query, params)
|
|
2378
|
+
rows = cursor.fetchall()
|
|
2379
|
+
|
|
2380
|
+
return [self._deserialize_product_row(row) for row in rows]
|
|
2381
|
+
|
|
2382
|
+
except Exception as e:
|
|
2383
|
+
self.logger.error(f"Failed to search products: {str(e)}")
|
|
2384
|
+
raise
|
|
2385
|
+
|
|
2386
|
+
def get_products_by_org(self, org_id: str) -> List[Dict[str, Any]]:
|
|
2387
|
+
"""
|
|
2388
|
+
Backward-compatible helper that returns active products for an organization.
|
|
2389
|
+
|
|
2390
|
+
Args:
|
|
2391
|
+
org_id: Organization identifier
|
|
2392
|
+
|
|
2393
|
+
Returns:
|
|
2394
|
+
List of active product dictionaries
|
|
2395
|
+
"""
|
|
2396
|
+
return self.search_products(org_id=org_id, status="active")
|
|
2397
|
+
|
|
2398
|
+
def get_products_by_team(self, team_id: str) -> List[Dict[str, Any]]:
|
|
2399
|
+
"""
|
|
2400
|
+
Get products configured for a specific team.
|
|
2401
|
+
|
|
2402
|
+
Args:
|
|
2403
|
+
team_id: Team identifier
|
|
2404
|
+
|
|
2405
|
+
Returns:
|
|
2406
|
+
List of product dictionaries
|
|
2407
|
+
"""
|
|
2408
|
+
try:
|
|
2409
|
+
# Get team settings first
|
|
2410
|
+
team_settings = self.get_team_settings(team_id)
|
|
2411
|
+
if not team_settings or not team_settings.get('gs_team_product'):
|
|
2412
|
+
return []
|
|
2413
|
+
|
|
2414
|
+
# Extract product IDs from team settings
|
|
2415
|
+
product_settings = team_settings['gs_team_product']
|
|
2416
|
+
if not isinstance(product_settings, list):
|
|
2417
|
+
return []
|
|
2418
|
+
|
|
2419
|
+
product_ids = [p.get('product_id')
|
|
2420
|
+
for p in product_settings if p.get('product_id')]
|
|
2421
|
+
if not product_ids:
|
|
2422
|
+
return []
|
|
2423
|
+
|
|
2424
|
+
# Get products by IDs
|
|
2425
|
+
with sqlite3.connect(self.db_path) as conn:
|
|
2426
|
+
conn.row_factory = sqlite3.Row
|
|
2427
|
+
cursor = conn.cursor()
|
|
2428
|
+
|
|
2429
|
+
placeholders = ','.join(['?' for _ in product_ids])
|
|
2430
|
+
cursor.execute(
|
|
2431
|
+
f"SELECT * FROM products WHERE product_id IN ({placeholders}) AND status = 'active'", product_ids)
|
|
2432
|
+
|
|
2433
|
+
return [self._deserialize_product_row(row)
|
|
2434
|
+
for row in cursor.fetchall()]
|
|
2435
|
+
|
|
2436
|
+
except Exception as e:
|
|
2437
|
+
self.logger.error(f"Failed to get products by team: {str(e)}")
|
|
2438
|
+
raise
|
|
2439
|
+
|
|
2440
|
+
def get_product(self, product_id: str) -> Optional[Dict[str, Any]]:
|
|
2441
|
+
"""
|
|
2442
|
+
Get product by ID.
|
|
2443
|
+
|
|
2444
|
+
Args:
|
|
2445
|
+
product_id: Product identifier
|
|
2446
|
+
|
|
2447
|
+
Returns:
|
|
2448
|
+
Product data or None if not found
|
|
2449
|
+
"""
|
|
2450
|
+
try:
|
|
2451
|
+
with sqlite3.connect(self.db_path) as conn:
|
|
2452
|
+
conn.row_factory = sqlite3.Row
|
|
2453
|
+
cursor = conn.cursor()
|
|
2454
|
+
cursor.execute("SELECT * FROM products WHERE product_id = ?", (product_id,))
|
|
2455
|
+
row = cursor.fetchone()
|
|
2456
|
+
|
|
2457
|
+
if row:
|
|
2458
|
+
return self._deserialize_product_row(row)
|
|
2459
|
+
return None
|
|
2460
|
+
|
|
2461
|
+
except Exception as e:
|
|
2462
|
+
self.logger.error(f"Error getting product {product_id}: {str(e)}")
|
|
2463
|
+
raise
|
|
2464
|
+
|
|
2465
|
+
def update_product(self, product_id: str, product_data: Dict[str, Any]) -> bool:
|
|
2466
|
+
"""
|
|
2467
|
+
Update product information.
|
|
2468
|
+
|
|
2469
|
+
Args:
|
|
2470
|
+
product_id: Product identifier
|
|
2471
|
+
product_data: Updated product data
|
|
2472
|
+
|
|
2473
|
+
Returns:
|
|
2474
|
+
True if updated successfully
|
|
2475
|
+
"""
|
|
2476
|
+
try:
|
|
2477
|
+
# Get existing product data first
|
|
2478
|
+
existing_product = self.get_product(product_id)
|
|
2479
|
+
if not existing_product:
|
|
2480
|
+
self.logger.error(f"Product not found: {product_id}")
|
|
2481
|
+
return False
|
|
2482
|
+
|
|
2483
|
+
# Convert existing data to save_product format (snake_case to camelCase)
|
|
2484
|
+
converted_existing = {
|
|
2485
|
+
'product_id': existing_product.get('product_id'),
|
|
2486
|
+
'org_id': existing_product.get('org_id'),
|
|
2487
|
+
'org_name': existing_product.get('org_name'),
|
|
2488
|
+
'project_code': existing_product.get('project_code'),
|
|
2489
|
+
'productName': existing_product.get('product_name'),
|
|
2490
|
+
'shortDescription': existing_product.get('short_description'),
|
|
2491
|
+
'longDescription': existing_product.get('long_description'),
|
|
2492
|
+
'category': existing_product.get('category'),
|
|
2493
|
+
'subcategory': existing_product.get('subcategory'),
|
|
2494
|
+
'targetUsers': existing_product.get('target_users'),
|
|
2495
|
+
'keyFeatures': existing_product.get('key_features'),
|
|
2496
|
+
'uniqueSellingPoints': existing_product.get('unique_selling_points'),
|
|
2497
|
+
'painPointsSolved': existing_product.get('pain_points_solved'),
|
|
2498
|
+
'competitiveAdvantages': existing_product.get('competitive_advantages'),
|
|
2499
|
+
'pricing': existing_product.get('pricing'),
|
|
2500
|
+
'pricingRules': existing_product.get('pricing_rules'),
|
|
2501
|
+
'productWebsite': existing_product.get('product_website'),
|
|
2502
|
+
'demoAvailable': existing_product.get('demo_available'),
|
|
2503
|
+
'trialAvailable': existing_product.get('trial_available'),
|
|
2504
|
+
'salesContactEmail': existing_product.get('sales_contact_email'),
|
|
2505
|
+
'imageUrl': existing_product.get('image_url'),
|
|
2506
|
+
'salesMetrics': existing_product.get('sales_metrics'),
|
|
2507
|
+
'customerFeedback': existing_product.get('customer_feedback'),
|
|
2508
|
+
'keywords': existing_product.get('keywords'),
|
|
2509
|
+
'relatedProducts': existing_product.get('related_products'),
|
|
2510
|
+
'seasonalDemand': existing_product.get('seasonal_demand'),
|
|
2511
|
+
'marketInsights': existing_product.get('market_insights'),
|
|
2512
|
+
'caseStudies': existing_product.get('case_studies'),
|
|
2513
|
+
'testimonials': existing_product.get('testimonials'),
|
|
2514
|
+
'successMetrics': existing_product.get('success_metrics'),
|
|
2515
|
+
'productVariants': existing_product.get('product_variants'),
|
|
2516
|
+
'availability': existing_product.get('availability'),
|
|
2517
|
+
'technicalSpecifications': existing_product.get('technical_specifications'),
|
|
2518
|
+
'compatibility': existing_product.get('compatibility'),
|
|
2519
|
+
'supportInfo': existing_product.get('support_info'),
|
|
2520
|
+
'regulatoryCompliance': existing_product.get('regulatory_compliance'),
|
|
2521
|
+
'localization': existing_product.get('localization'),
|
|
2522
|
+
'installationRequirements': existing_product.get('installation_requirements'),
|
|
2523
|
+
'userManualUrl': existing_product.get('user_manual_url'),
|
|
2524
|
+
'returnPolicy': existing_product.get('return_policy'),
|
|
2525
|
+
'shippingInfo': existing_product.get('shipping_info'),
|
|
2526
|
+
'status': existing_product.get('status')
|
|
2527
|
+
}
|
|
2528
|
+
|
|
2529
|
+
# Merge existing data with updates
|
|
2530
|
+
merged_data = converted_existing.copy()
|
|
2531
|
+
merged_data.update(product_data)
|
|
2532
|
+
merged_data['product_id'] = product_id
|
|
2533
|
+
|
|
2534
|
+
# Use save_product with merged data
|
|
2535
|
+
updated_id = self.save_product(merged_data)
|
|
2536
|
+
return updated_id == product_id
|
|
2537
|
+
|
|
2538
|
+
except Exception as e:
|
|
2539
|
+
self.logger.error(f"Error updating product {product_id}: {str(e)}")
|
|
2540
|
+
raise
|
|
2541
|
+
|
|
2542
|
+
def update_product_status(self, product_id: str, status: Union[str, bool]) -> bool:
|
|
2543
|
+
"""
|
|
2544
|
+
Update activation status for a product.
|
|
2545
|
+
|
|
2546
|
+
Args:
|
|
2547
|
+
product_id: Product identifier
|
|
2548
|
+
status: Target status ("active" or "inactive")
|
|
2549
|
+
|
|
2550
|
+
Returns:
|
|
2551
|
+
True if a product record was updated
|
|
2552
|
+
"""
|
|
2553
|
+
normalized_status = self._normalize_status_value(status)
|
|
2554
|
+
if normalized_status is None:
|
|
2555
|
+
raise ValueError("Status is required when updating product status")
|
|
2556
|
+
|
|
2557
|
+
try:
|
|
2558
|
+
with sqlite3.connect(self.db_path) as conn:
|
|
2559
|
+
cursor = conn.cursor()
|
|
2560
|
+
cursor.execute(
|
|
2561
|
+
"""
|
|
2562
|
+
UPDATE products
|
|
2563
|
+
SET status = ?, updated_at = CURRENT_TIMESTAMP
|
|
2564
|
+
WHERE product_id = ?
|
|
2565
|
+
""",
|
|
2566
|
+
(normalized_status, product_id)
|
|
2567
|
+
)
|
|
2568
|
+
conn.commit()
|
|
2569
|
+
if cursor.rowcount:
|
|
2570
|
+
self.logger.debug(f"Updated product status: {product_id} -> {normalized_status}")
|
|
2571
|
+
return cursor.rowcount > 0
|
|
2572
|
+
|
|
2573
|
+
except Exception as e:
|
|
2574
|
+
self.logger.error(f"Failed to update product status {product_id}: {str(e)}")
|
|
2575
|
+
raise
|
|
2576
|
+
|
|
2577
|
+
def save_scoring_criteria(self, org_id: str, criteria: List[Dict[str, Any]]) -> None:
|
|
2578
|
+
"""
|
|
2579
|
+
Save scoring criteria for an organization.
|
|
2580
|
+
|
|
2581
|
+
Args:
|
|
2582
|
+
org_id: Organization identifier
|
|
2583
|
+
criteria: List of scoring criteria
|
|
2584
|
+
"""
|
|
2585
|
+
try:
|
|
2586
|
+
# Save to configuration file
|
|
2587
|
+
criteria_file = self.config_dir / "scoring_criteria.json"
|
|
2588
|
+
|
|
2589
|
+
# Load existing criteria
|
|
2590
|
+
existing_criteria = {}
|
|
2591
|
+
if criteria_file.exists():
|
|
2592
|
+
with open(criteria_file, 'r') as f:
|
|
2593
|
+
existing_criteria = json.load(f)
|
|
2594
|
+
|
|
2595
|
+
# Update criteria for this org
|
|
2596
|
+
existing_criteria[org_id] = criteria
|
|
2597
|
+
|
|
2598
|
+
# Save back to file
|
|
2599
|
+
with open(criteria_file, 'w') as f:
|
|
2600
|
+
json.dump(existing_criteria, f, indent=2)
|
|
2601
|
+
|
|
2602
|
+
self.logger.debug(f"Saved scoring criteria for org: {org_id}")
|
|
2603
|
+
|
|
2604
|
+
except Exception as e:
|
|
2605
|
+
self.logger.error(f"Failed to save scoring criteria: {str(e)}")
|
|
2606
|
+
raise
|
|
2607
|
+
|
|
2608
|
+
def get_scoring_criteria(self, org_id: str) -> List[Dict[str, Any]]:
|
|
2609
|
+
"""
|
|
2610
|
+
Get scoring criteria for an organization.
|
|
2611
|
+
|
|
2612
|
+
Args:
|
|
2613
|
+
org_id: Organization identifier
|
|
2614
|
+
|
|
2615
|
+
Returns:
|
|
2616
|
+
List of scoring criteria
|
|
2617
|
+
"""
|
|
2618
|
+
try:
|
|
2619
|
+
criteria_file = self.config_dir / "scoring_criteria.json"
|
|
2620
|
+
|
|
2621
|
+
if criteria_file.exists():
|
|
2622
|
+
with open(criteria_file, 'r') as f:
|
|
2623
|
+
all_criteria = json.load(f)
|
|
2624
|
+
return all_criteria.get(org_id, [])
|
|
2625
|
+
|
|
2626
|
+
return []
|
|
2627
|
+
|
|
2628
|
+
except Exception as e:
|
|
2629
|
+
self.logger.error(f"Failed to get scoring criteria: {str(e)}")
|
|
2630
|
+
return []
|
|
2631
|
+
|
|
2632
|
+
def _generate_product_id(self) -> str:
|
|
2633
|
+
"""Generate unique product ID."""
|
|
2634
|
+
import uuid
|
|
2635
|
+
return f"uuid:{str(uuid.uuid4())}"
|
|
2636
|
+
|
|
2637
|
+
def _initialize_default_data(self):
|
|
2638
|
+
"""Initialize default data for llm_worker_plan and gs_company_criteria tables."""
|
|
2639
|
+
try:
|
|
2640
|
+
with sqlite3.connect(self.db_path) as conn:
|
|
2641
|
+
cursor = conn.cursor()
|
|
2642
|
+
|
|
2643
|
+
# Check if llm_worker_plan has data
|
|
2644
|
+
cursor.execute("SELECT COUNT(*) FROM llm_worker_plan")
|
|
2645
|
+
plan_count = cursor.fetchone()[0]
|
|
2646
|
+
|
|
2647
|
+
if plan_count == 0:
|
|
2648
|
+
# Insert default llm_worker_plan record
|
|
2649
|
+
default_plan = {
|
|
2650
|
+
'id': '569cdcbd-cf6d-4e33-b0b2-d2f6f15a0832',
|
|
2651
|
+
'name': 'FuseSell AI (v1.025)',
|
|
2652
|
+
'description': 'Default FuseSell AI plan for local development',
|
|
2653
|
+
'org_id': 'rta',
|
|
2654
|
+
'status': 'published',
|
|
2655
|
+
'executors': json.dumps([
|
|
2656
|
+
{
|
|
2657
|
+
'llm_worker_executor_id': {
|
|
2658
|
+
'name': 'gs_161_data_acquisition',
|
|
2659
|
+
'display_name': 'Data Acquisition'
|
|
2660
|
+
}
|
|
2661
|
+
},
|
|
2662
|
+
{
|
|
2663
|
+
'llm_worker_executor_id': {
|
|
2664
|
+
'name': 'gs_161_data_preparation',
|
|
2665
|
+
'display_name': 'Data Preparation'
|
|
2666
|
+
}
|
|
2667
|
+
},
|
|
2668
|
+
{
|
|
2669
|
+
'llm_worker_executor_id': {
|
|
2670
|
+
'name': 'gs_161_lead_scoring',
|
|
2671
|
+
'display_name': 'Lead Scoring'
|
|
2672
|
+
}
|
|
2673
|
+
},
|
|
2674
|
+
{
|
|
2675
|
+
'llm_worker_executor_id': {
|
|
2676
|
+
'name': 'gs_162_initial_outreach',
|
|
2677
|
+
'display_name': 'Initial Outreach'
|
|
2678
|
+
}
|
|
2679
|
+
},
|
|
2680
|
+
{
|
|
2681
|
+
'llm_worker_executor_id': {
|
|
2682
|
+
'name': 'gs_162_follow_up',
|
|
2683
|
+
'display_name': 'Follow Up'
|
|
2684
|
+
}
|
|
2685
|
+
}
|
|
2686
|
+
]),
|
|
2687
|
+
'settings': json.dumps({}),
|
|
2688
|
+
'date_created': datetime.now().isoformat(),
|
|
2689
|
+
'user_created': 'system'
|
|
2690
|
+
}
|
|
2691
|
+
|
|
2692
|
+
cursor.execute("""
|
|
2693
|
+
INSERT INTO llm_worker_plan
|
|
2694
|
+
(id, name, description, org_id, status, executors, settings,
|
|
2695
|
+
date_created, user_created)
|
|
2696
|
+
VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?)
|
|
2697
|
+
""", (
|
|
2698
|
+
default_plan['id'],
|
|
2699
|
+
default_plan['name'],
|
|
2700
|
+
default_plan['description'],
|
|
2701
|
+
default_plan['org_id'],
|
|
2702
|
+
default_plan['status'],
|
|
2703
|
+
default_plan['executors'],
|
|
2704
|
+
default_plan['settings'],
|
|
2705
|
+
default_plan['date_created'],
|
|
2706
|
+
default_plan['user_created']
|
|
2707
|
+
))
|
|
2708
|
+
|
|
2709
|
+
self.logger.debug(
|
|
2710
|
+
"Initialized default llm_worker_plan data")
|
|
2711
|
+
|
|
2712
|
+
# Check if gs_company_criteria has data
|
|
2713
|
+
cursor.execute("SELECT COUNT(*) FROM gs_company_criteria")
|
|
2714
|
+
criteria_count = cursor.fetchone()[0]
|
|
2715
|
+
|
|
2716
|
+
if criteria_count == 0:
|
|
2717
|
+
# Insert default gs_company_criteria records (based on fetched data)
|
|
2718
|
+
default_criteria = [
|
|
2719
|
+
{
|
|
2720
|
+
'id': 'criteria_industry_fit',
|
|
2721
|
+
'name': 'industry_fit',
|
|
2722
|
+
'definition': 'How well the customer\'s industry aligns with the product\'s target market',
|
|
2723
|
+
'weight': 0.15,
|
|
2724
|
+
'guidelines': json.dumps({
|
|
2725
|
+
'low': {'range': [0, 49], 'description': "Industries with minimal overlap or relevance to product capabilities"},
|
|
2726
|
+
'medium': {'range': [50, 79], 'description': 'Industries with potential for product adoption but limited case studies'},
|
|
2727
|
+
'high': {'range': [80, 100], 'description': 'Industries where product has proven success (e.g., IT services, software development, project management firms)'}
|
|
2728
|
+
}),
|
|
2729
|
+
'scoring_factors': json.dumps([
|
|
2730
|
+
'Perfect industry match: 80-100',
|
|
2731
|
+
'Related industry: 60-79',
|
|
2732
|
+
'Adjacent industry: 40-59',
|
|
2733
|
+
'Unrelated industry: 0-39'
|
|
2734
|
+
]),
|
|
2735
|
+
'org_id': 'rta',
|
|
2736
|
+
'status': 'published',
|
|
2737
|
+
'date_created': datetime.now().isoformat(),
|
|
2738
|
+
'user_created': 'system'
|
|
2739
|
+
},
|
|
2740
|
+
{
|
|
2741
|
+
'id': 'criteria_company_size',
|
|
2742
|
+
'name': 'company_size',
|
|
2743
|
+
'definition': 'Company size alignment with product\'s ideal customer profile',
|
|
2744
|
+
'weight': 0.15,
|
|
2745
|
+
'guidelines': json.dumps({
|
|
2746
|
+
'low': {'range': [0, 49], 'description': 'Companies below 20 or above 1000 employees, or outside the specified revenue ranges'},
|
|
2747
|
+
'medium': {'range': [50, 79], 'description': 'Companies with 20-49 or 501-1000 employees, $1M-$4.9M or $50.1M-$100M revenue'},
|
|
2748
|
+
'high': {'range': [80, 100], 'description': 'Companies with 50-500 employees and $5M-$50M annual revenue'}
|
|
2749
|
+
}),
|
|
2750
|
+
'scoring_factors': json.dumps([
|
|
2751
|
+
'Ideal size range: 80-100',
|
|
2752
|
+
'Close to ideal: 60-79',
|
|
2753
|
+
'Acceptable size: 40-59',
|
|
2754
|
+
'Poor size fit: 0-39'
|
|
2755
|
+
]),
|
|
2756
|
+
'org_id': 'rta',
|
|
2757
|
+
'status': 'published',
|
|
2758
|
+
'date_created': datetime.now().isoformat(),
|
|
2759
|
+
'user_created': 'system'
|
|
2760
|
+
},
|
|
2761
|
+
{
|
|
2762
|
+
'id': 'criteria_pain_points',
|
|
2763
|
+
'name': 'pain_points',
|
|
2764
|
+
'definition': 'How well the product addresses customer\'s identified pain points',
|
|
2765
|
+
'weight': 0.3,
|
|
2766
|
+
'guidelines': json.dumps({
|
|
2767
|
+
'low': {'range': [0, 49], 'description': "Few or no relevant pain points, or challenges outside product's primary focus"},
|
|
2768
|
+
'medium': {'range': [50, 79], 'description': 'Some relevant pain points addressed, with potential for significant impact'},
|
|
2769
|
+
'high': {'range': [80, 100], 'description': "Multiple critical pain points directly addressed by product's core features"}
|
|
2770
|
+
}),
|
|
2771
|
+
'scoring_factors': json.dumps([
|
|
2772
|
+
'Addresses all major pain points: 80-100',
|
|
2773
|
+
'Addresses most pain points: 60-79',
|
|
2774
|
+
'Addresses some pain points: 40-59',
|
|
2775
|
+
'Addresses few/no pain points: 0-39'
|
|
2776
|
+
]),
|
|
2777
|
+
'org_id': 'rta',
|
|
2778
|
+
'status': 'published',
|
|
2779
|
+
'date_created': datetime.now().isoformat(),
|
|
2780
|
+
'user_created': 'system'
|
|
2781
|
+
},
|
|
2782
|
+
{
|
|
2783
|
+
'id': 'criteria_product_fit',
|
|
2784
|
+
'name': 'product_fit',
|
|
2785
|
+
'definition': 'Overall product-customer compatibility',
|
|
2786
|
+
'weight': 0.2,
|
|
2787
|
+
'guidelines': json.dumps({
|
|
2788
|
+
'low': {'range': [0, 49], 'description': "Significant gaps between product's capabilities and the prospect's needs, or extensive customization required"},
|
|
2789
|
+
'medium': {'range': [50, 79], 'description': 'Product addresses most key needs, some customization or additional features may be necessary'},
|
|
2790
|
+
'high': {'range': [80, 100], 'description': "Product's features closely match the prospect's primary needs with minimal customization required"}
|
|
2791
|
+
}),
|
|
2792
|
+
'scoring_factors': json.dumps([
|
|
2793
|
+
'Excellent feature match: 80-100',
|
|
2794
|
+
'Good feature match: 60-79',
|
|
2795
|
+
'Basic feature match: 40-59',
|
|
2796
|
+
'Poor feature match: 0-39'
|
|
2797
|
+
]),
|
|
2798
|
+
'org_id': 'rta',
|
|
2799
|
+
'status': 'published',
|
|
2800
|
+
'date_created': datetime.now().isoformat(),
|
|
2801
|
+
'user_created': 'system'
|
|
2802
|
+
},
|
|
2803
|
+
{
|
|
2804
|
+
'id': 'criteria_geographic_fit',
|
|
2805
|
+
'name': 'geographic_market_fit',
|
|
2806
|
+
'definition': 'Geographic alignment between customer location and product availability',
|
|
2807
|
+
'weight': 0.2,
|
|
2808
|
+
'guidelines': json.dumps({
|
|
2809
|
+
'low': {'range': [0, 30], 'description': "Customer location is outside of the product's designated target markets"},
|
|
2810
|
+
'medium': {'range': [31, 70], 'description': "Customer location is in regions adjacent to or with strong ties to the product's primary markets"},
|
|
2811
|
+
'high': {'range': [71, 100], 'description': "Customer location is within the product's primary target markets"}
|
|
2812
|
+
}),
|
|
2813
|
+
'scoring_factors': json.dumps([
|
|
2814
|
+
'Strong market presence: 80-100',
|
|
2815
|
+
'Moderate presence: 60-79',
|
|
2816
|
+
'Limited presence: 40-59',
|
|
2817
|
+
'No market presence: 0-39'
|
|
2818
|
+
]),
|
|
2819
|
+
'org_id': 'rta',
|
|
2820
|
+
'status': 'published',
|
|
2821
|
+
'date_created': datetime.now().isoformat(),
|
|
2822
|
+
'user_created': 'system'
|
|
2823
|
+
}
|
|
2824
|
+
]
|
|
2825
|
+
|
|
2826
|
+
for criteria in default_criteria:
|
|
2827
|
+
cursor.execute("""
|
|
2828
|
+
INSERT INTO gs_company_criteria
|
|
2829
|
+
(id, name, definition, weight, guidelines, scoring_factors, org_id, status,
|
|
2830
|
+
date_created, user_created)
|
|
2831
|
+
VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?)
|
|
2832
|
+
""", (
|
|
2833
|
+
criteria['id'],
|
|
2834
|
+
criteria['name'],
|
|
2835
|
+
criteria['definition'],
|
|
2836
|
+
criteria['weight'],
|
|
2837
|
+
criteria['guidelines'],
|
|
2838
|
+
criteria['scoring_factors'],
|
|
2839
|
+
criteria['org_id'],
|
|
2840
|
+
criteria['status'],
|
|
2841
|
+
criteria['date_created'],
|
|
2842
|
+
criteria['user_created']
|
|
2843
|
+
))
|
|
2844
|
+
|
|
2845
|
+
self.logger.debug(
|
|
2846
|
+
f"Initialized {len(default_criteria)} default gs_company_criteria records")
|
|
2847
|
+
|
|
2848
|
+
# Initialize default products if none exist
|
|
2849
|
+
cursor.execute(
|
|
2850
|
+
"SELECT COUNT(*) FROM products WHERE org_id = 'rta'")
|
|
2851
|
+
product_count = cursor.fetchone()[0]
|
|
2852
|
+
|
|
2853
|
+
if product_count == 0:
|
|
2854
|
+
default_products = [
|
|
2855
|
+
{
|
|
2856
|
+
'product_id': 'prod-12345678-1234-1234-1234-123456789012',
|
|
2857
|
+
'org_id': 'rta',
|
|
2858
|
+
'org_name': 'RTA',
|
|
2859
|
+
'project_code': 'FUSESELL',
|
|
2860
|
+
'product_name': 'FuseSell AI Pro',
|
|
2861
|
+
'short_description': 'AI-powered sales automation platform',
|
|
2862
|
+
'long_description': 'Comprehensive sales automation solution with AI-driven lead scoring, email generation, and customer analysis capabilities',
|
|
2863
|
+
'category': 'Sales Automation',
|
|
2864
|
+
'subcategory': 'AI-Powered CRM',
|
|
2865
|
+
'target_users': json.dumps(['Sales teams', 'Marketing professionals', 'Business development managers']),
|
|
2866
|
+
'key_features': json.dumps(['AI lead scoring', 'Automated email generation', 'Customer data analysis', 'Pipeline management']),
|
|
2867
|
+
'pain_points_solved': json.dumps(['Manual lead qualification', 'Inconsistent email outreach', 'Poor lead prioritization']),
|
|
2868
|
+
'competitive_advantages': json.dumps(['Advanced AI algorithms', 'Local data processing', 'Customizable workflows']),
|
|
2869
|
+
'localization': json.dumps(['North America', 'Europe', 'Asia-Pacific', 'Vietnam']),
|
|
2870
|
+
'market_insights': json.dumps({'targetIndustries': ['Technology', 'SaaS', 'Professional Services'], 'idealCompanySize': '50-500 employees'}),
|
|
2871
|
+
'status': 'active'
|
|
2872
|
+
},
|
|
2873
|
+
{
|
|
2874
|
+
'product_id': 'prod-87654321-4321-4321-4321-210987654321',
|
|
2875
|
+
'org_id': 'rta',
|
|
2876
|
+
'org_name': 'RTA',
|
|
2877
|
+
'project_code': 'FUSESELL',
|
|
2878
|
+
'product_name': 'FuseSell Starter',
|
|
2879
|
+
'short_description': 'Entry-level sales automation tool',
|
|
2880
|
+
'long_description': 'Basic sales automation features for small teams getting started with sales technology',
|
|
2881
|
+
'category': 'Sales Automation',
|
|
2882
|
+
'subcategory': 'Basic CRM',
|
|
2883
|
+
'target_users': json.dumps(['Small sales teams', 'Startups', 'Solo entrepreneurs']),
|
|
2884
|
+
'key_features': json.dumps(['Contact management', 'Email templates', 'Basic reporting', 'Lead tracking']),
|
|
2885
|
+
'pain_points_solved': json.dumps(['Manual contact management', 'Basic email automation needs']),
|
|
2886
|
+
'competitive_advantages': json.dumps(['Easy to use', 'Affordable pricing', 'Quick setup']),
|
|
2887
|
+
'localization': json.dumps(['Global']),
|
|
2888
|
+
'market_insights': json.dumps({'targetIndustries': ['All industries'], 'idealCompanySize': '1-50 employees'}),
|
|
2889
|
+
'status': 'active'
|
|
2890
|
+
}
|
|
2891
|
+
]
|
|
2892
|
+
|
|
2893
|
+
for product in default_products:
|
|
2894
|
+
cursor.execute("""
|
|
2895
|
+
INSERT INTO products
|
|
2896
|
+
(product_id, org_id, org_name, project_code, product_name, short_description,
|
|
2897
|
+
long_description, category, subcategory, target_users, key_features,
|
|
2898
|
+
pain_points_solved, competitive_advantages, localization, market_insights, status)
|
|
2899
|
+
VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)
|
|
2900
|
+
""", (
|
|
2901
|
+
product['product_id'], product['org_id'], product['org_name'], product['project_code'],
|
|
2902
|
+
product['product_name'], product['short_description'], product['long_description'],
|
|
2903
|
+
product['category'], product['subcategory'], product['target_users'], product['key_features'],
|
|
2904
|
+
product['pain_points_solved'], product['competitive_advantages'], product['localization'],
|
|
2905
|
+
product['market_insights'], product['status']
|
|
2906
|
+
))
|
|
2907
|
+
|
|
2908
|
+
self.logger.debug(
|
|
2909
|
+
f"Initialized {len(default_products)} default products")
|
|
2910
|
+
|
|
2911
|
+
# Initialize default team settings if none exist
|
|
2912
|
+
cursor.execute(
|
|
2913
|
+
"SELECT COUNT(*) FROM team_settings WHERE org_id = 'rta'")
|
|
2914
|
+
team_count = cursor.fetchone()[0]
|
|
2915
|
+
|
|
2916
|
+
if team_count == 0:
|
|
2917
|
+
default_team_settings = {
|
|
2918
|
+
'id': 'team_rta_default_settings',
|
|
2919
|
+
'team_id': 'team_rta_default',
|
|
2920
|
+
'org_id': 'rta',
|
|
2921
|
+
'plan_id': '569cdcbd-cf6d-4e33-b0b2-d2f6f15a0832',
|
|
2922
|
+
'plan_name': 'FuseSell AI (v1.025)',
|
|
2923
|
+
'project_code': 'FUSESELL',
|
|
2924
|
+
'team_name': 'RTA Default Team',
|
|
2925
|
+
'gs_team_organization': json.dumps({
|
|
2926
|
+
'name': 'RTA',
|
|
2927
|
+
'industry': 'Technology',
|
|
2928
|
+
'website': 'https://rta.vn'
|
|
2929
|
+
}),
|
|
2930
|
+
'gs_team_rep': json.dumps([{
|
|
2931
|
+
'name': 'Sales Team',
|
|
2932
|
+
'email': 'sales@rta.vn',
|
|
2933
|
+
'position': 'Sales Representative',
|
|
2934
|
+
'is_primary': True
|
|
2935
|
+
}]),
|
|
2936
|
+
'gs_team_product': json.dumps([
|
|
2937
|
+
{'product_id': 'prod-12345678-1234-1234-1234-123456789012',
|
|
2938
|
+
'enabled': True, 'priority': 1},
|
|
2939
|
+
{'product_id': 'prod-87654321-4321-4321-4321-210987654321',
|
|
2940
|
+
'enabled': True, 'priority': 2}
|
|
2941
|
+
]),
|
|
2942
|
+
'gs_team_schedule_time': json.dumps({
|
|
2943
|
+
'business_hours_start': '08:00',
|
|
2944
|
+
'business_hours_end': '20:00',
|
|
2945
|
+
'default_delay_hours': 2,
|
|
2946
|
+
'respect_weekends': True
|
|
2947
|
+
}),
|
|
2948
|
+
'gs_team_initial_outreach': json.dumps({
|
|
2949
|
+
'default_tone': 'professional',
|
|
2950
|
+
'approaches': [
|
|
2951
|
+
'professional_direct',
|
|
2952
|
+
'consultative',
|
|
2953
|
+
'industry_expert',
|
|
2954
|
+
'relationship_building'
|
|
2955
|
+
],
|
|
2956
|
+
'subject_line_variations': 1
|
|
2957
|
+
}),
|
|
2958
|
+
'gs_team_follow_up': json.dumps({
|
|
2959
|
+
'max_follow_ups': 5,
|
|
2960
|
+
'default_interval_days': 3,
|
|
2961
|
+
'strategies': [
|
|
2962
|
+
'gentle_reminder',
|
|
2963
|
+
'value_add',
|
|
2964
|
+
'alternative_approach',
|
|
2965
|
+
'final_attempt',
|
|
2966
|
+
'graceful_farewell'
|
|
2967
|
+
]
|
|
2968
|
+
}),
|
|
2969
|
+
'gs_team_auto_interaction': json.dumps({
|
|
2970
|
+
'enabled': True,
|
|
2971
|
+
'handoff_threshold': 0.8,
|
|
2972
|
+
'monitoring': 'standard'
|
|
2973
|
+
}),
|
|
2974
|
+
'gs_team_followup_schedule_time': json.dumps({
|
|
2975
|
+
'timezone': 'Asia/Ho_Chi_Minh',
|
|
2976
|
+
'window': 'business_hours'
|
|
2977
|
+
}),
|
|
2978
|
+
'gs_team_birthday_email': json.dumps({
|
|
2979
|
+
'enabled': True,
|
|
2980
|
+
'template': 'birthday_2025'
|
|
2981
|
+
})
|
|
2982
|
+
}
|
|
2983
|
+
|
|
2984
|
+
cursor.execute("""
|
|
2985
|
+
INSERT INTO team_settings
|
|
2986
|
+
(id, team_id, org_id, plan_id, plan_name, project_code, team_name,
|
|
2987
|
+
gs_team_organization, gs_team_rep, gs_team_product,
|
|
2988
|
+
gs_team_schedule_time, gs_team_initial_outreach, gs_team_follow_up,
|
|
2989
|
+
gs_team_auto_interaction, gs_team_followup_schedule_time, gs_team_birthday_email)
|
|
2990
|
+
VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)
|
|
2991
|
+
""", (
|
|
2992
|
+
default_team_settings['id'],
|
|
2993
|
+
default_team_settings['team_id'],
|
|
2994
|
+
default_team_settings['org_id'],
|
|
2995
|
+
default_team_settings['plan_id'],
|
|
2996
|
+
default_team_settings['plan_name'],
|
|
2997
|
+
default_team_settings['project_code'],
|
|
2998
|
+
default_team_settings['team_name'],
|
|
2999
|
+
default_team_settings['gs_team_organization'],
|
|
3000
|
+
default_team_settings['gs_team_rep'],
|
|
3001
|
+
default_team_settings['gs_team_product'],
|
|
3002
|
+
default_team_settings['gs_team_schedule_time'],
|
|
3003
|
+
default_team_settings['gs_team_initial_outreach'],
|
|
3004
|
+
default_team_settings['gs_team_follow_up'],
|
|
3005
|
+
default_team_settings['gs_team_auto_interaction'],
|
|
3006
|
+
default_team_settings['gs_team_followup_schedule_time'],
|
|
3007
|
+
default_team_settings['gs_team_birthday_email']
|
|
3008
|
+
))
|
|
3009
|
+
|
|
3010
|
+
self.logger.debug("Initialized default team settings")
|
|
3011
|
+
|
|
3012
|
+
conn.commit()
|
|
3013
|
+
|
|
3014
|
+
except Exception as e:
|
|
3015
|
+
self.logger.warning(f"Failed to initialize default data: {str(e)}")
|
|
3016
|
+
# Don't raise exception - this is not critical for basic functionality
|
|
3017
|
+
|
|
3018
|
+
def get_gs_company_criteria(self, org_id: str) -> List[Dict[str, Any]]:
|
|
3019
|
+
"""
|
|
3020
|
+
Get scoring criteria from gs_company_criteria table (server schema).
|
|
3021
|
+
|
|
3022
|
+
Args:
|
|
3023
|
+
org_id: Organization identifier
|
|
3024
|
+
|
|
3025
|
+
Returns:
|
|
3026
|
+
List of scoring criteria from gs_company_criteria table
|
|
3027
|
+
"""
|
|
3028
|
+
try:
|
|
3029
|
+
with sqlite3.connect(self.db_path) as conn:
|
|
3030
|
+
conn.row_factory = sqlite3.Row
|
|
3031
|
+
cursor = conn.cursor()
|
|
3032
|
+
cursor.execute("""
|
|
3033
|
+
SELECT * FROM gs_company_criteria
|
|
3034
|
+
WHERE org_id = ? AND status = 'published'
|
|
3035
|
+
ORDER BY name
|
|
3036
|
+
""", (org_id,))
|
|
3037
|
+
|
|
3038
|
+
criteria = []
|
|
3039
|
+
for row in cursor.fetchall():
|
|
3040
|
+
criterion = dict(row)
|
|
3041
|
+
|
|
3042
|
+
# Parse JSON fields
|
|
3043
|
+
if criterion['guidelines']:
|
|
3044
|
+
try:
|
|
3045
|
+
criterion['guidelines'] = json.loads(
|
|
3046
|
+
criterion['guidelines'])
|
|
3047
|
+
except json.JSONDecodeError:
|
|
3048
|
+
pass
|
|
3049
|
+
|
|
3050
|
+
if criterion['scoring_factors']:
|
|
3051
|
+
try:
|
|
3052
|
+
criterion['scoring_factors'] = json.loads(
|
|
3053
|
+
criterion['scoring_factors'])
|
|
3054
|
+
except json.JSONDecodeError:
|
|
3055
|
+
pass
|
|
3056
|
+
|
|
3057
|
+
criteria.append(criterion)
|
|
3058
|
+
|
|
3059
|
+
return criteria
|
|
3060
|
+
|
|
3061
|
+
except Exception as e:
|
|
3062
|
+
self.logger.error(f"Failed to get gs_company_criteria: {str(e)}")
|
|
3063
|
+
return []
|
|
3064
|
+
|
|
3065
|
+
def get_llm_worker_plan(self, plan_id: str) -> Optional[Dict[str, Any]]:
|
|
3066
|
+
"""
|
|
3067
|
+
Get llm_worker_plan data by plan ID.
|
|
3068
|
+
|
|
3069
|
+
Args:
|
|
3070
|
+
plan_id: Plan identifier
|
|
3071
|
+
|
|
3072
|
+
Returns:
|
|
3073
|
+
Plan data dictionary or None if not found
|
|
3074
|
+
"""
|
|
3075
|
+
try:
|
|
3076
|
+
with sqlite3.connect(self.db_path) as conn:
|
|
3077
|
+
conn.row_factory = sqlite3.Row
|
|
3078
|
+
cursor = conn.cursor()
|
|
3079
|
+
cursor.execute(
|
|
3080
|
+
"SELECT * FROM llm_worker_plan WHERE id = ?", (plan_id,))
|
|
3081
|
+
row = cursor.fetchone()
|
|
3082
|
+
|
|
3083
|
+
if row:
|
|
3084
|
+
result = dict(row)
|
|
3085
|
+
|
|
3086
|
+
# Parse JSON fields
|
|
3087
|
+
if result['executors']:
|
|
3088
|
+
try:
|
|
3089
|
+
result['executors'] = json.loads(
|
|
3090
|
+
result['executors'])
|
|
3091
|
+
except json.JSONDecodeError:
|
|
3092
|
+
result['executors'] = []
|
|
3093
|
+
|
|
3094
|
+
if result['settings']:
|
|
3095
|
+
try:
|
|
3096
|
+
result['settings'] = json.loads(result['settings'])
|
|
3097
|
+
except json.JSONDecodeError:
|
|
3098
|
+
result['settings'] = {}
|
|
3099
|
+
|
|
3100
|
+
return result
|
|
3101
|
+
return None
|
|
3102
|
+
|
|
3103
|
+
except Exception as e:
|
|
3104
|
+
self.logger.error(f"Failed to get llm_worker_plan: {str(e)}")
|
|
3105
|
+
return None
|
|
3106
|
+
|
|
3107
|
+
# ===== TASK MANAGEMENT METHODS (Correct Schema Implementation) =====
|
|
3108
|
+
|
|
3109
|
+
def save_task(
|
|
3110
|
+
self,
|
|
3111
|
+
task_id: str,
|
|
3112
|
+
plan_id: str,
|
|
3113
|
+
org_id: str,
|
|
3114
|
+
status: str = "running",
|
|
3115
|
+
messages: Optional[List[str]] = None,
|
|
3116
|
+
request_body: Optional[Dict[str, Any]] = None
|
|
3117
|
+
) -> None:
|
|
3118
|
+
"""
|
|
3119
|
+
Save sales process task (equivalent to llm_worker_task).
|
|
3120
|
+
|
|
3121
|
+
Args:
|
|
3122
|
+
task_id: Unique task identifier (sales process ID)
|
|
3123
|
+
plan_id: Plan identifier
|
|
3124
|
+
org_id: Organization identifier
|
|
3125
|
+
status: Task status (running, completed, failed)
|
|
3126
|
+
messages: Optional messages for the task
|
|
3127
|
+
request_body: Initial request data for the sales process
|
|
3128
|
+
"""
|
|
3129
|
+
try:
|
|
3130
|
+
with sqlite3.connect(self.db_path) as conn:
|
|
3131
|
+
cursor = conn.cursor()
|
|
3132
|
+
cursor.execute("""
|
|
3133
|
+
INSERT OR REPLACE INTO llm_worker_task
|
|
3134
|
+
(task_id, plan_id, org_id, status, current_runtime_index, messages, request_body)
|
|
3135
|
+
VALUES (?, ?, ?, ?, ?, ?, ?)
|
|
3136
|
+
""", (
|
|
3137
|
+
task_id, plan_id, org_id, status, 0,
|
|
3138
|
+
json.dumps(messages) if messages else None,
|
|
3139
|
+
json.dumps(request_body) if request_body else None
|
|
3140
|
+
))
|
|
3141
|
+
conn.commit()
|
|
3142
|
+
self.logger.debug(f"Saved task: {task_id}")
|
|
3143
|
+
|
|
3144
|
+
except Exception as e:
|
|
3145
|
+
self.logger.error(f"Failed to save task: {str(e)}")
|
|
3146
|
+
raise
|
|
3147
|
+
|
|
3148
|
+
def update_task_status(
|
|
3149
|
+
self,
|
|
3150
|
+
task_id: str,
|
|
3151
|
+
status: str,
|
|
3152
|
+
runtime_index: Optional[int] = None
|
|
3153
|
+
) -> None:
|
|
3154
|
+
"""
|
|
3155
|
+
Update task status and runtime index.
|
|
3156
|
+
|
|
3157
|
+
Args:
|
|
3158
|
+
task_id: Task identifier
|
|
3159
|
+
status: New status
|
|
3160
|
+
runtime_index: Current runtime index (stage number)
|
|
3161
|
+
"""
|
|
3162
|
+
try:
|
|
3163
|
+
with sqlite3.connect(self.db_path) as conn:
|
|
3164
|
+
cursor = conn.cursor()
|
|
3165
|
+
|
|
3166
|
+
if runtime_index is not None:
|
|
3167
|
+
cursor.execute("""
|
|
3168
|
+
UPDATE llm_worker_task
|
|
3169
|
+
SET status = ?, current_runtime_index = ?, updated_at = CURRENT_TIMESTAMP
|
|
3170
|
+
WHERE task_id = ?
|
|
3171
|
+
""", (status, runtime_index, task_id))
|
|
3172
|
+
else:
|
|
3173
|
+
cursor.execute("""
|
|
3174
|
+
UPDATE llm_worker_task
|
|
3175
|
+
SET status = ?, updated_at = CURRENT_TIMESTAMP
|
|
3176
|
+
WHERE task_id = ?
|
|
3177
|
+
""", (status, task_id))
|
|
3178
|
+
|
|
3179
|
+
conn.commit()
|
|
3180
|
+
self.logger.debug(
|
|
3181
|
+
f"Updated task status: {task_id} -> {status}")
|
|
3182
|
+
|
|
3183
|
+
except Exception as e:
|
|
3184
|
+
self.logger.error(f"Failed to update task status: {str(e)}")
|
|
3185
|
+
raise
|
|
3186
|
+
|
|
3187
|
+
def save_operation(
|
|
3188
|
+
self,
|
|
3189
|
+
operation_id: str,
|
|
3190
|
+
task_id: str,
|
|
3191
|
+
executor_id: str,
|
|
3192
|
+
chain_order: int,
|
|
3193
|
+
chain_index: int,
|
|
3194
|
+
runtime_index: int,
|
|
3195
|
+
item_index: int,
|
|
3196
|
+
execution_status: str,
|
|
3197
|
+
input_data: Optional[Dict[str, Any]] = None,
|
|
3198
|
+
output_data: Optional[Dict[str, Any]] = None,
|
|
3199
|
+
payload: Optional[Dict[str, Any]] = None,
|
|
3200
|
+
user_messages: Optional[List[str]] = None
|
|
3201
|
+
) -> None:
|
|
3202
|
+
"""
|
|
3203
|
+
Save stage operation execution (equivalent to llm_worker_operation).
|
|
3204
|
+
|
|
3205
|
+
Args:
|
|
3206
|
+
operation_id: Unique operation identifier
|
|
3207
|
+
task_id: Parent task identifier
|
|
3208
|
+
executor_id: Stage executor identifier (e.g., 'data_acquisition')
|
|
3209
|
+
chain_order: Order in the execution chain
|
|
3210
|
+
chain_index: Chain index
|
|
3211
|
+
runtime_index: Runtime index (stage number)
|
|
3212
|
+
item_index: Item index
|
|
3213
|
+
execution_status: Operation status (running, done, failed)
|
|
3214
|
+
input_data: Input data for the operation
|
|
3215
|
+
output_data: Output data from the operation
|
|
3216
|
+
payload: Additional payload data
|
|
3217
|
+
user_messages: User messages for the operation
|
|
3218
|
+
"""
|
|
3219
|
+
try:
|
|
3220
|
+
with sqlite3.connect(self.db_path) as conn:
|
|
3221
|
+
cursor = conn.cursor()
|
|
3222
|
+
cursor.execute("""
|
|
3223
|
+
INSERT OR REPLACE INTO llm_worker_operation
|
|
3224
|
+
(operation_id, task_id, executor_id, chain_order, chain_index,
|
|
3225
|
+
runtime_index, item_index, execution_status, input_data,
|
|
3226
|
+
output_data, payload, user_messages)
|
|
3227
|
+
VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)
|
|
3228
|
+
""", (
|
|
3229
|
+
operation_id, task_id, executor_id, chain_order, chain_index,
|
|
3230
|
+
runtime_index, item_index, execution_status,
|
|
3231
|
+
json.dumps(input_data) if input_data else None,
|
|
3232
|
+
json.dumps(output_data) if output_data else None,
|
|
3233
|
+
json.dumps(payload) if payload else None,
|
|
3234
|
+
json.dumps(user_messages) if user_messages else None
|
|
3235
|
+
))
|
|
3236
|
+
conn.commit()
|
|
3237
|
+
self.logger.debug(f"Saved operation: {operation_id}")
|
|
3238
|
+
|
|
3239
|
+
except Exception as e:
|
|
3240
|
+
self.logger.error(f"Failed to save operation: {str(e)}")
|
|
3241
|
+
raise
|
|
3242
|
+
|
|
3243
|
+
def get_task_operations(self, task_id: str) -> List[Dict[str, Any]]:
|
|
3244
|
+
"""
|
|
3245
|
+
Get all operations for a specific task.
|
|
3246
|
+
|
|
3247
|
+
Args:
|
|
3248
|
+
task_id: Task identifier
|
|
3249
|
+
|
|
3250
|
+
Returns:
|
|
3251
|
+
List of operation records
|
|
3252
|
+
"""
|
|
3253
|
+
try:
|
|
3254
|
+
with sqlite3.connect(self.db_path) as conn:
|
|
3255
|
+
cursor = conn.cursor()
|
|
3256
|
+
cursor.execute("""
|
|
3257
|
+
SELECT * FROM llm_worker_operation
|
|
3258
|
+
WHERE task_id = ?
|
|
3259
|
+
ORDER BY runtime_index, chain_order
|
|
3260
|
+
""", (task_id,))
|
|
3261
|
+
|
|
3262
|
+
columns = [description[0]
|
|
3263
|
+
for description in cursor.description]
|
|
3264
|
+
operations = []
|
|
3265
|
+
|
|
3266
|
+
for row in cursor.fetchall():
|
|
3267
|
+
operation = dict(zip(columns, row))
|
|
3268
|
+
# Parse JSON fields
|
|
3269
|
+
for field in ['input_data', 'output_data', 'payload', 'user_messages']:
|
|
3270
|
+
if operation[field]:
|
|
3271
|
+
try:
|
|
3272
|
+
operation[field] = json.loads(operation[field])
|
|
3273
|
+
except json.JSONDecodeError:
|
|
3274
|
+
pass
|
|
3275
|
+
operations.append(operation)
|
|
3276
|
+
|
|
3277
|
+
return operations
|
|
3278
|
+
|
|
3279
|
+
except Exception as e:
|
|
3280
|
+
self.logger.error(f"Failed to get task operations: {str(e)}")
|
|
3281
|
+
return []
|
|
3282
|
+
|
|
3283
|
+
def get_task_by_id(self, task_id: str) -> Optional[Dict[str, Any]]:
|
|
3284
|
+
"""
|
|
3285
|
+
Get task by ID.
|
|
3286
|
+
|
|
3287
|
+
Args:
|
|
3288
|
+
task_id: Task identifier
|
|
3289
|
+
|
|
3290
|
+
Returns:
|
|
3291
|
+
Task record or None if not found
|
|
3292
|
+
"""
|
|
3293
|
+
try:
|
|
3294
|
+
with sqlite3.connect(self.db_path) as conn:
|
|
3295
|
+
cursor = conn.cursor()
|
|
3296
|
+
cursor.execute(
|
|
3297
|
+
"SELECT * FROM llm_worker_task WHERE task_id = ?", (task_id,))
|
|
3298
|
+
|
|
3299
|
+
row = cursor.fetchone()
|
|
3300
|
+
if row:
|
|
3301
|
+
columns = [description[0]
|
|
3302
|
+
for description in cursor.description]
|
|
3303
|
+
task = dict(zip(columns, row))
|
|
3304
|
+
|
|
3305
|
+
# Parse JSON fields
|
|
3306
|
+
for field in ['messages', 'request_body']:
|
|
3307
|
+
if task[field]:
|
|
3308
|
+
try:
|
|
3309
|
+
task[field] = json.loads(task[field])
|
|
3310
|
+
except json.JSONDecodeError:
|
|
3311
|
+
pass
|
|
3312
|
+
|
|
3313
|
+
return task
|
|
3314
|
+
|
|
3315
|
+
return None
|
|
3316
|
+
|
|
3317
|
+
except Exception as e:
|
|
3318
|
+
self.logger.error(f"Failed to get task: {str(e)}")
|
|
3319
|
+
return None
|
|
3320
|
+
|
|
3321
|
+
def list_tasks(
|
|
3322
|
+
self,
|
|
3323
|
+
org_id: Optional[str] = None,
|
|
3324
|
+
status: Optional[str] = None,
|
|
3325
|
+
limit: int = 50
|
|
3326
|
+
) -> List[Dict[str, Any]]:
|
|
3327
|
+
"""
|
|
3328
|
+
List tasks with optional filtering.
|
|
3329
|
+
|
|
3330
|
+
Args:
|
|
3331
|
+
org_id: Optional organization filter
|
|
3332
|
+
status: Optional status filter
|
|
3333
|
+
limit: Maximum number of results
|
|
3334
|
+
|
|
3335
|
+
Returns:
|
|
3336
|
+
List of task records
|
|
3337
|
+
"""
|
|
3338
|
+
try:
|
|
3339
|
+
with sqlite3.connect(self.db_path) as conn:
|
|
3340
|
+
cursor = conn.cursor()
|
|
3341
|
+
|
|
3342
|
+
query = "SELECT * FROM llm_worker_task"
|
|
3343
|
+
params = []
|
|
3344
|
+
conditions = []
|
|
3345
|
+
|
|
3346
|
+
if org_id:
|
|
3347
|
+
conditions.append("org_id = ?")
|
|
3348
|
+
params.append(org_id)
|
|
3349
|
+
|
|
3350
|
+
if status:
|
|
3351
|
+
conditions.append("status = ?")
|
|
3352
|
+
params.append(status)
|
|
3353
|
+
|
|
3354
|
+
if conditions:
|
|
3355
|
+
query += " WHERE " + " AND ".join(conditions)
|
|
3356
|
+
|
|
3357
|
+
query += " ORDER BY created_at DESC LIMIT ?"
|
|
3358
|
+
params.append(limit)
|
|
3359
|
+
|
|
3360
|
+
cursor.execute(query, params)
|
|
3361
|
+
|
|
3362
|
+
columns = [description[0]
|
|
3363
|
+
for description in cursor.description]
|
|
3364
|
+
tasks = []
|
|
3365
|
+
|
|
3366
|
+
for row in cursor.fetchall():
|
|
3367
|
+
task = dict(zip(columns, row))
|
|
3368
|
+
# Parse JSON fields
|
|
3369
|
+
for field in ['messages', 'request_body']:
|
|
3370
|
+
if task[field]:
|
|
3371
|
+
try:
|
|
3372
|
+
task[field] = json.loads(task[field])
|
|
3373
|
+
except json.JSONDecodeError:
|
|
3374
|
+
pass
|
|
3375
|
+
tasks.append(task)
|
|
3376
|
+
|
|
3377
|
+
return tasks
|
|
3378
|
+
|
|
3379
|
+
except Exception as e:
|
|
3380
|
+
self.logger.error(f"Failed to list tasks: {str(e)}")
|
|
3381
|
+
return []
|
|
3382
|
+
# ===== SALES PROCESS QUERY METHODS =====
|
|
3383
|
+
|
|
3384
|
+
def find_sales_processes_by_customer(self, customer_name: str) -> List[Dict[str, Any]]:
|
|
3385
|
+
"""
|
|
3386
|
+
Find all sales processes for a specific customer.
|
|
3387
|
+
|
|
3388
|
+
Args:
|
|
3389
|
+
customer_name: Customer name to search for
|
|
3390
|
+
|
|
3391
|
+
Returns:
|
|
3392
|
+
List of task records matching the customer
|
|
3393
|
+
"""
|
|
3394
|
+
try:
|
|
3395
|
+
with sqlite3.connect(self.db_path) as conn:
|
|
3396
|
+
cursor = conn.cursor()
|
|
3397
|
+
cursor.execute("""
|
|
3398
|
+
SELECT t.*,
|
|
3399
|
+
json_extract(t.request_body, '$.customer_info') as customer_info,
|
|
3400
|
+
json_extract(t.request_body, '$.org_name') as org_name
|
|
3401
|
+
FROM llm_worker_task t
|
|
3402
|
+
WHERE json_extract(t.request_body, '$.customer_info') LIKE ?
|
|
3403
|
+
ORDER BY t.created_at DESC
|
|
3404
|
+
""", (f'%{customer_name}%',))
|
|
3405
|
+
|
|
3406
|
+
columns = [description[0]
|
|
3407
|
+
for description in cursor.description]
|
|
3408
|
+
processes = []
|
|
3409
|
+
|
|
3410
|
+
for row in cursor.fetchall():
|
|
3411
|
+
process = dict(zip(columns, row))
|
|
3412
|
+
# Parse JSON fields
|
|
3413
|
+
for field in ['messages', 'request_body']:
|
|
3414
|
+
if process[field]:
|
|
3415
|
+
try:
|
|
3416
|
+
process[field] = json.loads(process[field])
|
|
3417
|
+
except json.JSONDecodeError:
|
|
3418
|
+
pass
|
|
3419
|
+
processes.append(process)
|
|
3420
|
+
|
|
3421
|
+
return processes
|
|
3422
|
+
|
|
3423
|
+
except Exception as e:
|
|
3424
|
+
self.logger.error(
|
|
3425
|
+
f"Failed to find sales processes by customer: {str(e)}")
|
|
3426
|
+
return []
|
|
3427
|
+
|
|
3428
|
+
def get_sales_process_stages(self, task_id: str) -> List[Dict[str, Any]]:
|
|
3429
|
+
"""
|
|
3430
|
+
Get all stage executions for a specific sales process.
|
|
3431
|
+
|
|
3432
|
+
Args:
|
|
3433
|
+
task_id: Sales process (task) identifier
|
|
3434
|
+
|
|
3435
|
+
Returns:
|
|
3436
|
+
List of operation records for the sales process
|
|
3437
|
+
"""
|
|
3438
|
+
try:
|
|
3439
|
+
with sqlite3.connect(self.db_path) as conn:
|
|
3440
|
+
cursor = conn.cursor()
|
|
3441
|
+
cursor.execute("""
|
|
3442
|
+
SELECT
|
|
3443
|
+
operation_id,
|
|
3444
|
+
executor_id,
|
|
3445
|
+
runtime_index,
|
|
3446
|
+
execution_status,
|
|
3447
|
+
input_data,
|
|
3448
|
+
output_data,
|
|
3449
|
+
created_at,
|
|
3450
|
+
updated_at
|
|
3451
|
+
FROM llm_worker_operation
|
|
3452
|
+
WHERE task_id = ?
|
|
3453
|
+
ORDER BY runtime_index, chain_order
|
|
3454
|
+
""", (task_id,))
|
|
3455
|
+
|
|
3456
|
+
columns = [description[0]
|
|
3457
|
+
for description in cursor.description]
|
|
3458
|
+
stages = []
|
|
3459
|
+
|
|
3460
|
+
for row in cursor.fetchall():
|
|
3461
|
+
stage = dict(zip(columns, row))
|
|
3462
|
+
# Parse JSON fields
|
|
3463
|
+
for field in ['input_data', 'output_data']:
|
|
3464
|
+
if stage[field]:
|
|
3465
|
+
try:
|
|
3466
|
+
stage[field] = json.loads(stage[field])
|
|
3467
|
+
except json.JSONDecodeError:
|
|
3468
|
+
pass
|
|
3469
|
+
|
|
3470
|
+
# Map executor_id to readable stage name
|
|
3471
|
+
executor_mapping = {
|
|
3472
|
+
'gs_161_data_acquisition': 'Data Acquisition',
|
|
3473
|
+
'gs_161_data_preparation': 'Data Preparation',
|
|
3474
|
+
'gs_161_lead_scoring': 'Lead Scoring',
|
|
3475
|
+
'gs_162_initial_outreach': 'Initial Outreach',
|
|
3476
|
+
'gs_162_follow_up': 'Follow-up'
|
|
3477
|
+
}
|
|
3478
|
+
stage['stage_name'] = executor_mapping.get(
|
|
3479
|
+
stage['executor_id'], stage['executor_id'])
|
|
3480
|
+
|
|
3481
|
+
stages.append(stage)
|
|
3482
|
+
|
|
3483
|
+
return stages
|
|
3484
|
+
|
|
3485
|
+
except Exception as e:
|
|
3486
|
+
self.logger.error(f"Failed to get sales process stages: {str(e)}")
|
|
3487
|
+
return []
|
|
3488
|
+
|
|
3489
|
+
def get_sales_process_summary(self, task_id: str) -> Optional[Dict[str, Any]]:
|
|
3490
|
+
"""
|
|
3491
|
+
Get a complete summary of a sales process including task info and all stages.
|
|
3492
|
+
|
|
3493
|
+
Args:
|
|
3494
|
+
task_id: Sales process (task) identifier
|
|
3495
|
+
|
|
3496
|
+
Returns:
|
|
3497
|
+
Complete sales process summary or None if not found
|
|
3498
|
+
"""
|
|
3499
|
+
try:
|
|
3500
|
+
# Get task info
|
|
3501
|
+
task = self.get_task_by_id(task_id)
|
|
3502
|
+
if not task:
|
|
3503
|
+
return None
|
|
3504
|
+
|
|
3505
|
+
# Get all stage operations
|
|
3506
|
+
stages = self.get_sales_process_stages(task_id)
|
|
3507
|
+
|
|
3508
|
+
# Get related data
|
|
3509
|
+
lead_scores = []
|
|
3510
|
+
email_drafts = []
|
|
3511
|
+
|
|
3512
|
+
try:
|
|
3513
|
+
with sqlite3.connect(self.db_path) as conn:
|
|
3514
|
+
cursor = conn.cursor()
|
|
3515
|
+
|
|
3516
|
+
# Get lead scores
|
|
3517
|
+
cursor.execute("""
|
|
3518
|
+
SELECT product_id, score, criteria_breakdown, created_at
|
|
3519
|
+
FROM lead_scores
|
|
3520
|
+
WHERE execution_id = ?
|
|
3521
|
+
""", (task_id,))
|
|
3522
|
+
|
|
3523
|
+
for row in cursor.fetchall():
|
|
3524
|
+
lead_scores.append({
|
|
3525
|
+
'product_id': row[0],
|
|
3526
|
+
'score': row[1],
|
|
3527
|
+
'criteria_breakdown': json.loads(row[2]) if row[2] else {},
|
|
3528
|
+
'created_at': row[3]
|
|
3529
|
+
})
|
|
3530
|
+
|
|
3531
|
+
# Get email drafts
|
|
3532
|
+
cursor.execute("""
|
|
3533
|
+
SELECT draft_id, subject, content, draft_type, priority_order, created_at
|
|
3534
|
+
FROM email_drafts
|
|
3535
|
+
WHERE execution_id = ?
|
|
3536
|
+
""", (task_id,))
|
|
3537
|
+
|
|
3538
|
+
for row in cursor.fetchall():
|
|
3539
|
+
email_drafts.append({
|
|
3540
|
+
'draft_id': row[0],
|
|
3541
|
+
'subject': row[1],
|
|
3542
|
+
# Truncate content
|
|
3543
|
+
'content': row[2][:200] + '...' if len(row[2]) > 200 else row[2],
|
|
3544
|
+
'draft_type': row[3],
|
|
3545
|
+
'priority_order': row[4],
|
|
3546
|
+
'created_at': row[5]
|
|
3547
|
+
})
|
|
3548
|
+
|
|
3549
|
+
except Exception as e:
|
|
3550
|
+
self.logger.warning(
|
|
3551
|
+
f"Failed to get related data for task {task_id}: {str(e)}")
|
|
3552
|
+
|
|
3553
|
+
return {
|
|
3554
|
+
'task_info': task,
|
|
3555
|
+
'stages': stages,
|
|
3556
|
+
'lead_scores': lead_scores,
|
|
3557
|
+
'email_drafts': email_drafts,
|
|
3558
|
+
'summary': {
|
|
3559
|
+
'total_stages': len(stages),
|
|
3560
|
+
'completed_stages': len([s for s in stages if s['execution_status'] == 'done']),
|
|
3561
|
+
'failed_stages': len([s for s in stages if s['execution_status'] == 'failed']),
|
|
3562
|
+
'total_lead_scores': len(lead_scores),
|
|
3563
|
+
'total_email_drafts': len(email_drafts)
|
|
3564
|
+
}
|
|
3565
|
+
}
|
|
3566
|
+
|
|
3567
|
+
except Exception as e:
|
|
3568
|
+
self.logger.error(f"Failed to get sales process summary: {str(e)}")
|
|
3569
|
+
return None
|
|
3570
|
+
|
|
3571
|
+
# ===== CUSTOMER DATA PERSISTENCE METHODS =====
|
|
3572
|
+
|
|
3573
|
+
def update_customer_from_profile(self, customer_id: str, profile_data: Dict[str, Any]) -> None:
|
|
3574
|
+
"""
|
|
3575
|
+
Update customer record with profile data from data preparation stage.
|
|
3576
|
+
|
|
3577
|
+
Args:
|
|
3578
|
+
customer_id: Customer identifier
|
|
3579
|
+
profile_data: Structured profile data from data preparation
|
|
3580
|
+
"""
|
|
3581
|
+
try:
|
|
3582
|
+
with sqlite3.connect(self.db_path) as conn:
|
|
3583
|
+
cursor = conn.cursor()
|
|
3584
|
+
|
|
3585
|
+
# Extract data from profile structure
|
|
3586
|
+
company_info = profile_data.get('companyInfo', {})
|
|
3587
|
+
contact_info = profile_data.get('primaryContact', {})
|
|
3588
|
+
|
|
3589
|
+
cursor.execute("""
|
|
3590
|
+
UPDATE customers
|
|
3591
|
+
SET company_name = ?, website = ?, industry = ?,
|
|
3592
|
+
contact_name = ?, contact_email = ?, contact_phone = ?,
|
|
3593
|
+
address = ?, profile_data = ?, updated_at = CURRENT_TIMESTAMP
|
|
3594
|
+
WHERE customer_id = ?
|
|
3595
|
+
""", (
|
|
3596
|
+
company_info.get('name', ''),
|
|
3597
|
+
company_info.get('website', ''),
|
|
3598
|
+
company_info.get('industry', ''),
|
|
3599
|
+
contact_info.get('name', ''),
|
|
3600
|
+
contact_info.get('email', ''),
|
|
3601
|
+
contact_info.get('phone', ''),
|
|
3602
|
+
company_info.get('address', ''),
|
|
3603
|
+
json.dumps(profile_data),
|
|
3604
|
+
customer_id
|
|
3605
|
+
))
|
|
3606
|
+
|
|
3607
|
+
conn.commit()
|
|
3608
|
+
self.logger.debug(f"Updated customer profile: {customer_id}")
|
|
3609
|
+
|
|
3610
|
+
except Exception as e:
|
|
3611
|
+
self.logger.error(f"Failed to update customer profile: {str(e)}")
|
|
3612
|
+
raise
|
|
3613
|
+
|
|
3614
|
+
def get_customer_task(self, task_id: str, customer_id: str) -> Optional[Dict[str, Any]]:
|
|
3615
|
+
"""
|
|
3616
|
+
Get customer task data by task_id and customer_id.
|
|
3617
|
+
|
|
3618
|
+
Args:
|
|
3619
|
+
task_id: Task identifier
|
|
3620
|
+
customer_id: Customer identifier
|
|
3621
|
+
|
|
3622
|
+
Returns:
|
|
3623
|
+
Customer task data or None if not found
|
|
3624
|
+
"""
|
|
3625
|
+
try:
|
|
3626
|
+
with sqlite3.connect(self.db_path) as conn:
|
|
3627
|
+
conn.row_factory = sqlite3.Row
|
|
3628
|
+
cursor = conn.cursor()
|
|
3629
|
+
|
|
3630
|
+
cursor.execute("""
|
|
3631
|
+
SELECT * FROM gs_customer_llmtask
|
|
3632
|
+
WHERE task_id = ? AND customer_id = ?
|
|
3633
|
+
""", (task_id, customer_id))
|
|
3634
|
+
|
|
3635
|
+
row = cursor.fetchone()
|
|
3636
|
+
if row:
|
|
3637
|
+
return dict(row)
|
|
3638
|
+
return None
|
|
3639
|
+
|
|
3640
|
+
except Exception as e:
|
|
3641
|
+
self.logger.error(f"Failed to get customer task: {str(e)}")
|
|
3642
|
+
return None
|
|
3643
|
+
|
|
3644
|
+
# ===== SCHEMA MIGRATION METHODS =====
|
|
3645
|
+
|
|
3646
|
+
def backup_existing_schema(self) -> str:
|
|
3647
|
+
"""
|
|
3648
|
+
Create backup of existing execution data before migration.
|
|
3649
|
+
|
|
3650
|
+
Returns:
|
|
3651
|
+
Backup file path
|
|
3652
|
+
"""
|
|
3653
|
+
try:
|
|
3654
|
+
import shutil
|
|
3655
|
+
from datetime import datetime
|
|
3656
|
+
|
|
3657
|
+
backup_path = f"{self.db_path}.backup_{datetime.now().strftime('%Y%m%d_%H%M%S')}"
|
|
3658
|
+
shutil.copy2(self.db_path, backup_path)
|
|
3659
|
+
|
|
3660
|
+
self.logger.info(f"Database backup created: {backup_path}")
|
|
3661
|
+
return backup_path
|
|
3662
|
+
|
|
3663
|
+
except Exception as e:
|
|
3664
|
+
self.logger.error(f"Failed to create backup: {str(e)}")
|
|
3665
|
+
raise
|
|
3666
|
+
|
|
3667
|
+
def migrate_executions_to_tasks(self) -> int:
|
|
3668
|
+
"""
|
|
3669
|
+
Migrate existing executions table data to new llm_worker_task table format.
|
|
3670
|
+
|
|
3671
|
+
Returns:
|
|
3672
|
+
Number of records migrated
|
|
3673
|
+
"""
|
|
3674
|
+
try:
|
|
3675
|
+
with sqlite3.connect(self.db_path) as conn:
|
|
3676
|
+
cursor = conn.cursor()
|
|
3677
|
+
|
|
3678
|
+
# Check if old executions table exists
|
|
3679
|
+
cursor.execute("""
|
|
3680
|
+
SELECT name FROM sqlite_master
|
|
3681
|
+
WHERE type='table' AND name='executions'
|
|
3682
|
+
""")
|
|
3683
|
+
if not cursor.fetchone():
|
|
3684
|
+
self.logger.info(
|
|
3685
|
+
"No executions table found, skipping migration")
|
|
3686
|
+
return 0
|
|
3687
|
+
|
|
3688
|
+
# Get existing executions
|
|
3689
|
+
cursor.execute("""
|
|
3690
|
+
SELECT execution_id, org_id, org_name, status, started_at,
|
|
3691
|
+
completed_at, config_json
|
|
3692
|
+
FROM executions
|
|
3693
|
+
""")
|
|
3694
|
+
executions = cursor.fetchall()
|
|
3695
|
+
|
|
3696
|
+
migrated_count = 0
|
|
3697
|
+
for execution in executions:
|
|
3698
|
+
execution_id, org_id, org_name, status, started_at, completed_at, config_json = execution
|
|
3699
|
+
|
|
3700
|
+
# Parse config_json to extract request_body
|
|
3701
|
+
request_body = {}
|
|
3702
|
+
if config_json:
|
|
3703
|
+
try:
|
|
3704
|
+
config_data = json.loads(config_json)
|
|
3705
|
+
request_body = {
|
|
3706
|
+
'org_id': org_id,
|
|
3707
|
+
'org_name': org_name,
|
|
3708
|
+
'customer_info': config_data.get('customer_name', ''),
|
|
3709
|
+
'language': config_data.get('language', 'english'),
|
|
3710
|
+
'input_website': config_data.get('customer_website', ''),
|
|
3711
|
+
'execution_id': execution_id
|
|
3712
|
+
}
|
|
3713
|
+
except json.JSONDecodeError:
|
|
3714
|
+
request_body = {
|
|
3715
|
+
'org_id': org_id, 'org_name': org_name}
|
|
3716
|
+
|
|
3717
|
+
# Map execution status to task status
|
|
3718
|
+
task_status = 'completed' if status == 'completed' else 'failed' if status == 'failed' else 'running'
|
|
3719
|
+
|
|
3720
|
+
# Insert into llm_worker_task table
|
|
3721
|
+
cursor.execute("""
|
|
3722
|
+
INSERT OR REPLACE INTO llm_worker_task
|
|
3723
|
+
(task_id, plan_id, org_id, status, current_runtime_index,
|
|
3724
|
+
messages, request_body, created_at, updated_at)
|
|
3725
|
+
VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?)
|
|
3726
|
+
""", (
|
|
3727
|
+
execution_id,
|
|
3728
|
+
'569cdcbd-cf6d-4e33-b0b2-d2f6f15a0832', # Default plan ID
|
|
3729
|
+
org_id,
|
|
3730
|
+
task_status,
|
|
3731
|
+
0, # Default runtime index
|
|
3732
|
+
json.dumps([]), # Empty messages
|
|
3733
|
+
json.dumps(request_body),
|
|
3734
|
+
started_at,
|
|
3735
|
+
completed_at or started_at
|
|
3736
|
+
))
|
|
3737
|
+
|
|
3738
|
+
migrated_count += 1
|
|
3739
|
+
|
|
3740
|
+
conn.commit()
|
|
3741
|
+
self.logger.info(
|
|
3742
|
+
f"Migrated {migrated_count} executions to llm_worker_task table")
|
|
3743
|
+
return migrated_count
|
|
3744
|
+
|
|
3745
|
+
except Exception as e:
|
|
3746
|
+
self.logger.error(
|
|
3747
|
+
f"Failed to migrate executions to tasks: {str(e)}")
|
|
3748
|
+
raise
|
|
3749
|
+
|
|
3750
|
+
def migrate_stage_results_to_operations(self) -> int:
|
|
3751
|
+
"""
|
|
3752
|
+
Migrate existing stage_results table data to new llm_worker_operation table format.
|
|
3753
|
+
|
|
3754
|
+
Returns:
|
|
3755
|
+
Number of records migrated
|
|
3756
|
+
"""
|
|
3757
|
+
try:
|
|
3758
|
+
with sqlite3.connect(self.db_path) as conn:
|
|
3759
|
+
cursor = conn.cursor()
|
|
3760
|
+
|
|
3761
|
+
# Check if old stage_results table exists
|
|
3762
|
+
cursor.execute("""
|
|
3763
|
+
SELECT name FROM sqlite_master
|
|
3764
|
+
WHERE type='table' AND name='stage_results'
|
|
3765
|
+
""")
|
|
3766
|
+
if not cursor.fetchone():
|
|
3767
|
+
self.logger.info(
|
|
3768
|
+
"No stage_results table found, skipping migration")
|
|
3769
|
+
return 0
|
|
3770
|
+
|
|
3771
|
+
# Get existing stage results
|
|
3772
|
+
cursor.execute("""
|
|
3773
|
+
SELECT id, execution_id, stage_name, status, input_data,
|
|
3774
|
+
output_data, started_at, completed_at, error_message
|
|
3775
|
+
FROM stage_results
|
|
3776
|
+
ORDER BY execution_id, started_at
|
|
3777
|
+
""")
|
|
3778
|
+
stage_results = cursor.fetchall()
|
|
3779
|
+
|
|
3780
|
+
migrated_count = 0
|
|
3781
|
+
current_execution = None
|
|
3782
|
+
chain_index = 0
|
|
3783
|
+
|
|
3784
|
+
for stage_result in stage_results:
|
|
3785
|
+
(stage_id, execution_id, stage_name, status, input_data,
|
|
3786
|
+
output_data, started_at, completed_at, error_message) = stage_result
|
|
3787
|
+
|
|
3788
|
+
# Reset chain_index for new execution
|
|
3789
|
+
if current_execution != execution_id:
|
|
3790
|
+
current_execution = execution_id
|
|
3791
|
+
chain_index = 0
|
|
3792
|
+
|
|
3793
|
+
# Parse JSON data
|
|
3794
|
+
input_json = {}
|
|
3795
|
+
output_json = {}
|
|
3796
|
+
|
|
3797
|
+
if input_data:
|
|
3798
|
+
try:
|
|
3799
|
+
input_json = json.loads(input_data) if isinstance(
|
|
3800
|
+
input_data, str) else input_data
|
|
3801
|
+
except (json.JSONDecodeError, TypeError):
|
|
3802
|
+
input_json = {'raw_input': str(input_data)}
|
|
3803
|
+
|
|
3804
|
+
if output_data:
|
|
3805
|
+
try:
|
|
3806
|
+
output_json = json.loads(output_data) if isinstance(
|
|
3807
|
+
output_data, str) else output_data
|
|
3808
|
+
except (json.JSONDecodeError, TypeError):
|
|
3809
|
+
output_json = {'raw_output': str(output_data)}
|
|
3810
|
+
|
|
3811
|
+
# Add error message to output if failed
|
|
3812
|
+
if status == 'failed' and error_message:
|
|
3813
|
+
output_json['error'] = error_message
|
|
3814
|
+
|
|
3815
|
+
# Map stage status to execution status
|
|
3816
|
+
execution_status = 'done' if status == 'success' else 'failed' if status == 'failed' else 'running'
|
|
3817
|
+
|
|
3818
|
+
# Generate operation ID
|
|
3819
|
+
operation_id = f"{execution_id}_{stage_name}_{chain_index}"
|
|
3820
|
+
|
|
3821
|
+
# Insert into llm_worker_operation table
|
|
3822
|
+
cursor.execute("""
|
|
3823
|
+
INSERT OR REPLACE INTO llm_worker_operation
|
|
3824
|
+
(operation_id, task_id, executor_name, runtime_index,
|
|
3825
|
+
chain_index, execution_status, input_data, output_data,
|
|
3826
|
+
date_created, date_updated)
|
|
3827
|
+
VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?)
|
|
3828
|
+
""", (
|
|
3829
|
+
operation_id,
|
|
3830
|
+
execution_id,
|
|
3831
|
+
stage_name,
|
|
3832
|
+
0, # Default runtime index
|
|
3833
|
+
chain_index,
|
|
3834
|
+
execution_status,
|
|
3835
|
+
json.dumps(input_json),
|
|
3836
|
+
json.dumps(output_json),
|
|
3837
|
+
started_at,
|
|
3838
|
+
completed_at or started_at
|
|
3839
|
+
))
|
|
3840
|
+
|
|
3841
|
+
chain_index += 1
|
|
3842
|
+
migrated_count += 1
|
|
3843
|
+
|
|
3844
|
+
conn.commit()
|
|
3845
|
+
self.logger.info(
|
|
3846
|
+
f"Migrated {migrated_count} stage results to llm_worker_operation table")
|
|
3847
|
+
return migrated_count
|
|
3848
|
+
|
|
3849
|
+
except Exception as e:
|
|
3850
|
+
self.logger.error(
|
|
3851
|
+
f"Failed to migrate stage results to operations: {str(e)}")
|
|
3852
|
+
raise
|
|
3853
|
+
|
|
3854
|
+
def validate_migration(self) -> bool:
|
|
3855
|
+
"""
|
|
3856
|
+
Validate that migration was successful by comparing data integrity.
|
|
3857
|
+
|
|
3858
|
+
Returns:
|
|
3859
|
+
True if migration is valid, False otherwise
|
|
3860
|
+
"""
|
|
3861
|
+
try:
|
|
3862
|
+
with sqlite3.connect(self.db_path) as conn:
|
|
3863
|
+
cursor = conn.cursor()
|
|
3864
|
+
|
|
3865
|
+
validation_errors = []
|
|
3866
|
+
|
|
3867
|
+
# Check if new tables exist
|
|
3868
|
+
required_tables = ['tasks', 'operations']
|
|
3869
|
+
for table in required_tables:
|
|
3870
|
+
cursor.execute("""
|
|
3871
|
+
SELECT name FROM sqlite_master
|
|
3872
|
+
WHERE type='table' AND name=?
|
|
3873
|
+
""", (table,))
|
|
3874
|
+
if not cursor.fetchone():
|
|
3875
|
+
validation_errors.append(
|
|
3876
|
+
f"Required table '{table}' not found")
|
|
3877
|
+
|
|
3878
|
+
# Check if old tables still exist (for rollback capability)
|
|
3879
|
+
legacy_tables = ['executions', 'stage_results']
|
|
3880
|
+
for table in legacy_tables:
|
|
3881
|
+
cursor.execute("""
|
|
3882
|
+
SELECT name FROM sqlite_master
|
|
3883
|
+
WHERE type='table' AND name=?
|
|
3884
|
+
""", (table,))
|
|
3885
|
+
if not cursor.fetchone():
|
|
3886
|
+
validation_errors.append(
|
|
3887
|
+
f"Legacy table '{table}' not found for rollback")
|
|
3888
|
+
|
|
3889
|
+
# Validate data counts match
|
|
3890
|
+
cursor.execute("SELECT COUNT(*) FROM executions")
|
|
3891
|
+
old_execution_count = cursor.fetchone()[0]
|
|
3892
|
+
|
|
3893
|
+
cursor.execute("SELECT COUNT(*) FROM llm_worker_task")
|
|
3894
|
+
new_task_count = cursor.fetchone()[0]
|
|
3895
|
+
|
|
3896
|
+
if old_execution_count != new_task_count:
|
|
3897
|
+
validation_errors.append(
|
|
3898
|
+
f"Execution count mismatch: {old_execution_count} executions vs {new_task_count} tasks"
|
|
3899
|
+
)
|
|
3900
|
+
|
|
3901
|
+
cursor.execute("SELECT COUNT(*) FROM stage_results")
|
|
3902
|
+
old_stage_count = cursor.fetchone()[0]
|
|
3903
|
+
|
|
3904
|
+
cursor.execute("SELECT COUNT(*) FROM llm_worker_operation")
|
|
3905
|
+
new_operation_count = cursor.fetchone()[0]
|
|
3906
|
+
|
|
3907
|
+
if old_stage_count != new_operation_count:
|
|
3908
|
+
validation_errors.append(
|
|
3909
|
+
f"Stage count mismatch: {old_stage_count} stage_results vs {new_operation_count} operations"
|
|
3910
|
+
)
|
|
3911
|
+
|
|
3912
|
+
# Validate JSON data integrity
|
|
3913
|
+
cursor.execute(
|
|
3914
|
+
"SELECT operation_id, input_data, output_data FROM llm_worker_operation LIMIT 10")
|
|
3915
|
+
for operation_id, input_data, output_data in cursor.fetchall():
|
|
3916
|
+
try:
|
|
3917
|
+
if input_data:
|
|
3918
|
+
json.loads(input_data)
|
|
3919
|
+
if output_data:
|
|
3920
|
+
json.loads(output_data)
|
|
3921
|
+
except json.JSONDecodeError as e:
|
|
3922
|
+
validation_errors.append(
|
|
3923
|
+
f"Invalid JSON in operation {operation_id}: {e}")
|
|
3924
|
+
|
|
3925
|
+
# Validate foreign key relationships
|
|
3926
|
+
cursor.execute("""
|
|
3927
|
+
SELECT COUNT(*) FROM llm_worker_operation o
|
|
3928
|
+
LEFT JOIN llm_worker_task t ON o.task_id = t.task_id
|
|
3929
|
+
WHERE t.task_id IS NULL
|
|
3930
|
+
""")
|
|
3931
|
+
orphaned_operations = cursor.fetchone()[0]
|
|
3932
|
+
if orphaned_operations > 0:
|
|
3933
|
+
validation_errors.append(
|
|
3934
|
+
f"Found {orphaned_operations} orphaned operations")
|
|
3935
|
+
|
|
3936
|
+
if validation_errors:
|
|
3937
|
+
self.logger.error(
|
|
3938
|
+
f"Migration validation failed: {validation_errors}")
|
|
3939
|
+
return False
|
|
3940
|
+
|
|
3941
|
+
self.logger.info("Migration validation successful")
|
|
3942
|
+
return True
|
|
3943
|
+
|
|
3944
|
+
except Exception as e:
|
|
3945
|
+
self.logger.error(f"Migration validation error: {str(e)}")
|
|
3946
|
+
return False
|
|
3947
|
+
|
|
3948
|
+
def rollback_migration(self, backup_path: str = None) -> bool:
|
|
3949
|
+
"""
|
|
3950
|
+
Rollback migration by restoring from backup.
|
|
3951
|
+
|
|
3952
|
+
Args:
|
|
3953
|
+
backup_path: Path to backup file, if None will find latest backup
|
|
3954
|
+
|
|
3955
|
+
Returns:
|
|
3956
|
+
True if rollback successful, False otherwise
|
|
3957
|
+
"""
|
|
3958
|
+
try:
|
|
3959
|
+
import shutil
|
|
3960
|
+
import glob
|
|
3961
|
+
|
|
3962
|
+
# Find backup file if not provided
|
|
3963
|
+
if not backup_path:
|
|
3964
|
+
backup_pattern = f"{self.db_path}.backup_*"
|
|
3965
|
+
backup_files = glob.glob(backup_pattern)
|
|
3966
|
+
if not backup_files:
|
|
3967
|
+
self.logger.error("No backup files found for rollback")
|
|
3968
|
+
return False
|
|
3969
|
+
backup_path = max(backup_files) # Get most recent backup
|
|
3970
|
+
|
|
3971
|
+
if not os.path.exists(backup_path):
|
|
3972
|
+
self.logger.error(f"Backup file not found: {backup_path}")
|
|
3973
|
+
return False
|
|
3974
|
+
|
|
3975
|
+
# Create a backup of current state before rollback
|
|
3976
|
+
current_backup = f"{self.db_path}.pre_rollback_{datetime.now().strftime('%Y%m%d_%H%M%S')}"
|
|
3977
|
+
shutil.copy2(self.db_path, current_backup)
|
|
3978
|
+
|
|
3979
|
+
# Restore from backup
|
|
3980
|
+
shutil.copy2(backup_path, self.db_path)
|
|
3981
|
+
|
|
3982
|
+
self.logger.info(
|
|
3983
|
+
f"Migration rolled back from backup: {backup_path}")
|
|
3984
|
+
self.logger.info(f"Current state backed up to: {current_backup}")
|
|
3985
|
+
return True
|
|
3986
|
+
|
|
3987
|
+
except Exception as e:
|
|
3988
|
+
self.logger.error(f"Rollback failed: {str(e)}")
|
|
3989
|
+
return False
|
|
3990
|
+
|
|
3991
|
+
def execute_full_migration(self) -> bool:
|
|
3992
|
+
"""
|
|
3993
|
+
Execute complete migration process with error handling and rollback.
|
|
3994
|
+
|
|
3995
|
+
Returns:
|
|
3996
|
+
True if migration successful, False otherwise
|
|
3997
|
+
"""
|
|
3998
|
+
backup_path = None
|
|
3999
|
+
try:
|
|
4000
|
+
self.logger.info("Starting schema migration process")
|
|
4001
|
+
|
|
4002
|
+
# Step 1: Create backup
|
|
4003
|
+
backup_path = self.backup_existing_schema()
|
|
4004
|
+
|
|
4005
|
+
# Step 2: Migrate executions to tasks
|
|
4006
|
+
task_count = self.migrate_executions_to_tasks()
|
|
4007
|
+
|
|
4008
|
+
# Step 3: Migrate stage results to operations
|
|
4009
|
+
operation_count = self.migrate_stage_results_to_operations()
|
|
4010
|
+
|
|
4011
|
+
# Step 4: Validate migration
|
|
4012
|
+
if not self.validate_migration():
|
|
4013
|
+
self.logger.error("Migration validation failed, rolling back")
|
|
4014
|
+
self.rollback_migration(backup_path)
|
|
4015
|
+
return False
|
|
4016
|
+
|
|
4017
|
+
self.logger.info(
|
|
4018
|
+
f"Migration completed successfully: {task_count} tasks, {operation_count} operations")
|
|
4019
|
+
return True
|
|
4020
|
+
|
|
4021
|
+
except Exception as e:
|
|
4022
|
+
self.logger.error(f"Migration failed: {str(e)}")
|
|
4023
|
+
if backup_path:
|
|
4024
|
+
self.logger.info("Attempting rollback...")
|
|
4025
|
+
self.rollback_migration(backup_path)
|
|
4026
|
+
return False
|
|
4027
|
+
|
|
4028
|
+
# ===== SERVER-COMPATIBLE TASK MANAGEMENT METHODS =====
|
|
4029
|
+
|
|
4030
|
+
def create_task(
|
|
4031
|
+
self,
|
|
4032
|
+
task_id: str,
|
|
4033
|
+
plan_id: str,
|
|
4034
|
+
org_id: str,
|
|
4035
|
+
request_body: Dict[str, Any],
|
|
4036
|
+
status: str = "running"
|
|
4037
|
+
) -> None:
|
|
4038
|
+
"""
|
|
4039
|
+
Create task record with proper server schema (llm_worker_task).
|
|
4040
|
+
|
|
4041
|
+
Args:
|
|
4042
|
+
task_id: Unique task identifier (sales process ID)
|
|
4043
|
+
plan_id: Plan identifier
|
|
4044
|
+
org_id: Organization identifier
|
|
4045
|
+
request_body: Initial request data for the sales process
|
|
4046
|
+
status: Task status (running, completed, failed)
|
|
4047
|
+
"""
|
|
4048
|
+
try:
|
|
4049
|
+
with sqlite3.connect(self.db_path) as conn:
|
|
4050
|
+
cursor = conn.cursor()
|
|
4051
|
+
cursor.execute("""
|
|
4052
|
+
INSERT OR REPLACE INTO llm_worker_task
|
|
4053
|
+
(task_id, plan_id, org_id, status, current_runtime_index,
|
|
4054
|
+
messages, request_body, created_at, updated_at)
|
|
4055
|
+
VALUES (?, ?, ?, ?, ?, ?, ?, CURRENT_TIMESTAMP, CURRENT_TIMESTAMP)
|
|
4056
|
+
""", (
|
|
4057
|
+
task_id,
|
|
4058
|
+
plan_id,
|
|
4059
|
+
org_id,
|
|
4060
|
+
status,
|
|
4061
|
+
0, # Initial runtime index
|
|
4062
|
+
json.dumps([]), # Empty messages initially
|
|
4063
|
+
json.dumps(request_body)
|
|
4064
|
+
))
|
|
4065
|
+
conn.commit()
|
|
4066
|
+
self.logger.debug(f"Created task: {task_id}")
|
|
4067
|
+
|
|
4068
|
+
except Exception as e:
|
|
4069
|
+
self.logger.error(f"Failed to create task: {str(e)}")
|
|
4070
|
+
raise
|
|
4071
|
+
|
|
4072
|
+
def update_task_status(
|
|
4073
|
+
self,
|
|
4074
|
+
task_id: str,
|
|
4075
|
+
status: str,
|
|
4076
|
+
runtime_index: Optional[int] = None
|
|
4077
|
+
) -> None:
|
|
4078
|
+
"""
|
|
4079
|
+
Update task status and runtime_index with proper server schema.
|
|
4080
|
+
|
|
4081
|
+
Args:
|
|
4082
|
+
task_id: Task identifier
|
|
4083
|
+
status: New task status (running, completed, failed)
|
|
4084
|
+
runtime_index: Optional runtime index to update
|
|
4085
|
+
"""
|
|
4086
|
+
try:
|
|
4087
|
+
with sqlite3.connect(self.db_path) as conn:
|
|
4088
|
+
cursor = conn.cursor()
|
|
4089
|
+
|
|
4090
|
+
if runtime_index is not None:
|
|
4091
|
+
cursor.execute("""
|
|
4092
|
+
UPDATE llm_worker_task
|
|
4093
|
+
SET status = ?, current_runtime_index = ?, updated_at = CURRENT_TIMESTAMP
|
|
4094
|
+
WHERE task_id = ?
|
|
4095
|
+
""", (status, runtime_index, task_id))
|
|
4096
|
+
else:
|
|
4097
|
+
cursor.execute("""
|
|
4098
|
+
UPDATE llm_worker_task
|
|
4099
|
+
SET status = ?, updated_at = CURRENT_TIMESTAMP
|
|
4100
|
+
WHERE task_id = ?
|
|
4101
|
+
""", (status, task_id))
|
|
4102
|
+
|
|
4103
|
+
conn.commit()
|
|
4104
|
+
self.logger.debug(
|
|
4105
|
+
f"Updated task {task_id}: status={status}, runtime_index={runtime_index}")
|
|
4106
|
+
|
|
4107
|
+
except Exception as e:
|
|
4108
|
+
self.logger.error(f"Failed to update task status: {str(e)}")
|
|
4109
|
+
raise
|
|
4110
|
+
|
|
4111
|
+
def get_task(self, task_id: str) -> Optional[Dict[str, Any]]:
|
|
4112
|
+
"""
|
|
4113
|
+
Get task record with all related data.
|
|
4114
|
+
|
|
4115
|
+
Args:
|
|
4116
|
+
task_id: Task identifier
|
|
4117
|
+
|
|
4118
|
+
Returns:
|
|
4119
|
+
Task data or None if not found
|
|
4120
|
+
"""
|
|
4121
|
+
try:
|
|
4122
|
+
with sqlite3.connect(self.db_path) as conn:
|
|
4123
|
+
conn.row_factory = sqlite3.Row
|
|
4124
|
+
cursor = conn.cursor()
|
|
4125
|
+
|
|
4126
|
+
cursor.execute("""
|
|
4127
|
+
SELECT * FROM llm_worker_task WHERE task_id = ?
|
|
4128
|
+
""", (task_id,))
|
|
4129
|
+
|
|
4130
|
+
row = cursor.fetchone()
|
|
4131
|
+
if row:
|
|
4132
|
+
task_data = dict(row)
|
|
4133
|
+
|
|
4134
|
+
# Parse JSON fields
|
|
4135
|
+
if task_data['messages']:
|
|
4136
|
+
try:
|
|
4137
|
+
task_data['messages'] = json.loads(
|
|
4138
|
+
task_data['messages'])
|
|
4139
|
+
except json.JSONDecodeError:
|
|
4140
|
+
task_data['messages'] = []
|
|
4141
|
+
|
|
4142
|
+
if task_data['request_body']:
|
|
4143
|
+
try:
|
|
4144
|
+
task_data['request_body'] = json.loads(
|
|
4145
|
+
task_data['request_body'])
|
|
4146
|
+
except json.JSONDecodeError:
|
|
4147
|
+
task_data['request_body'] = {}
|
|
4148
|
+
|
|
4149
|
+
return task_data
|
|
4150
|
+
|
|
4151
|
+
return None
|
|
4152
|
+
|
|
4153
|
+
except Exception as e:
|
|
4154
|
+
self.logger.error(f"Failed to get task: {str(e)}")
|
|
4155
|
+
return None
|
|
4156
|
+
|
|
4157
|
+
def add_task_message(self, task_id: str, message: str) -> None:
|
|
4158
|
+
"""
|
|
4159
|
+
Add message to task messages array.
|
|
4160
|
+
|
|
4161
|
+
Args:
|
|
4162
|
+
task_id: Task identifier
|
|
4163
|
+
message: Message to add
|
|
4164
|
+
"""
|
|
4165
|
+
try:
|
|
4166
|
+
task = self.get_task(task_id)
|
|
4167
|
+
if not task:
|
|
4168
|
+
self.logger.warning(f"Task not found: {task_id}")
|
|
4169
|
+
return
|
|
4170
|
+
|
|
4171
|
+
messages = task.get('messages', [])
|
|
4172
|
+
messages.append({
|
|
4173
|
+
'message': message,
|
|
4174
|
+
'timestamp': datetime.now().isoformat()
|
|
4175
|
+
})
|
|
4176
|
+
|
|
4177
|
+
with sqlite3.connect(self.db_path) as conn:
|
|
4178
|
+
cursor = conn.cursor()
|
|
4179
|
+
cursor.execute("""
|
|
4180
|
+
UPDATE llm_worker_task
|
|
4181
|
+
SET messages = ?, updated_at = CURRENT_TIMESTAMP
|
|
4182
|
+
WHERE task_id = ?
|
|
4183
|
+
""", (json.dumps(messages), task_id))
|
|
4184
|
+
conn.commit()
|
|
4185
|
+
|
|
4186
|
+
except Exception as e:
|
|
4187
|
+
self.logger.error(f"Failed to add task message: {str(e)}")
|
|
4188
|
+
raise
|
|
4189
|
+
|
|
4190
|
+
# ===== SERVER-COMPATIBLE OPERATION MANAGEMENT METHODS =====
|
|
4191
|
+
|
|
4192
|
+
def create_operation(
|
|
4193
|
+
self,
|
|
4194
|
+
task_id: str,
|
|
4195
|
+
executor_name: str,
|
|
4196
|
+
runtime_index: int,
|
|
4197
|
+
chain_index: int,
|
|
4198
|
+
input_data: Dict[str, Any]
|
|
4199
|
+
) -> str:
|
|
4200
|
+
"""
|
|
4201
|
+
Create operation record with input_data (llm_worker_operation).
|
|
4202
|
+
|
|
4203
|
+
Args:
|
|
4204
|
+
task_id: Parent task identifier
|
|
4205
|
+
executor_name: Stage name (data_acquisition, lead_scoring, etc.)
|
|
4206
|
+
runtime_index: Execution attempt number
|
|
4207
|
+
chain_index: Position in execution chain
|
|
4208
|
+
input_data: Stage-specific input data
|
|
4209
|
+
|
|
4210
|
+
Returns:
|
|
4211
|
+
Generated operation_id
|
|
4212
|
+
"""
|
|
4213
|
+
try:
|
|
4214
|
+
# Generate unique operation ID
|
|
4215
|
+
operation_id = f"{task_id}_{executor_name}_{runtime_index}_{chain_index}"
|
|
4216
|
+
|
|
4217
|
+
with sqlite3.connect(self.db_path) as conn:
|
|
4218
|
+
cursor = conn.cursor()
|
|
4219
|
+
cursor.execute("""
|
|
4220
|
+
INSERT OR REPLACE INTO llm_worker_operation
|
|
4221
|
+
(operation_id, task_id, executor_name, runtime_index,
|
|
4222
|
+
chain_index, execution_status, input_data, output_data,
|
|
4223
|
+
date_created, date_updated)
|
|
4224
|
+
VALUES (?, ?, ?, ?, ?, ?, ?, ?, CURRENT_TIMESTAMP, CURRENT_TIMESTAMP)
|
|
4225
|
+
""", (
|
|
4226
|
+
operation_id,
|
|
4227
|
+
task_id,
|
|
4228
|
+
executor_name,
|
|
4229
|
+
runtime_index,
|
|
4230
|
+
chain_index,
|
|
4231
|
+
'running', # Initial status
|
|
4232
|
+
json.dumps(input_data),
|
|
4233
|
+
json.dumps({}) # Empty output initially
|
|
4234
|
+
))
|
|
4235
|
+
conn.commit()
|
|
4236
|
+
self.logger.debug(f"Created operation: {operation_id}")
|
|
4237
|
+
return operation_id
|
|
4238
|
+
|
|
4239
|
+
except Exception as e:
|
|
4240
|
+
self.logger.error(f"Failed to create operation: {str(e)}")
|
|
4241
|
+
raise
|
|
4242
|
+
|
|
4243
|
+
def update_operation_status(
|
|
4244
|
+
self,
|
|
4245
|
+
operation_id: str,
|
|
4246
|
+
execution_status: str,
|
|
4247
|
+
output_data: Dict[str, Any]
|
|
4248
|
+
) -> None:
|
|
4249
|
+
"""
|
|
4250
|
+
Update operation execution_status and output_data.
|
|
4251
|
+
|
|
4252
|
+
Args:
|
|
4253
|
+
operation_id: Operation identifier
|
|
4254
|
+
execution_status: New status (done, failed, running)
|
|
4255
|
+
output_data: Stage-specific output data
|
|
4256
|
+
"""
|
|
4257
|
+
try:
|
|
4258
|
+
with sqlite3.connect(self.db_path) as conn:
|
|
4259
|
+
cursor = conn.cursor()
|
|
4260
|
+
cursor.execute("""
|
|
4261
|
+
UPDATE llm_worker_operation
|
|
4262
|
+
SET execution_status = ?, output_data = ?, date_updated = CURRENT_TIMESTAMP
|
|
4263
|
+
WHERE operation_id = ?
|
|
4264
|
+
""", (execution_status, json.dumps(output_data), operation_id))
|
|
4265
|
+
|
|
4266
|
+
conn.commit()
|
|
4267
|
+
self.logger.debug(
|
|
4268
|
+
f"Updated operation {operation_id}: status={execution_status}")
|
|
4269
|
+
|
|
4270
|
+
except Exception as e:
|
|
4271
|
+
self.logger.error(f"Failed to update operation status: {str(e)}")
|
|
4272
|
+
raise
|
|
4273
|
+
|
|
4274
|
+
def get_operations_by_task(self, task_id: str) -> List[Dict[str, Any]]:
|
|
4275
|
+
"""
|
|
4276
|
+
Get all operations for a specific task.
|
|
4277
|
+
|
|
4278
|
+
Args:
|
|
4279
|
+
task_id: Task identifier
|
|
4280
|
+
|
|
4281
|
+
Returns:
|
|
4282
|
+
List of operation records
|
|
4283
|
+
"""
|
|
4284
|
+
try:
|
|
4285
|
+
with sqlite3.connect(self.db_path) as conn:
|
|
4286
|
+
conn.row_factory = sqlite3.Row
|
|
4287
|
+
cursor = conn.cursor()
|
|
4288
|
+
|
|
4289
|
+
cursor.execute("""
|
|
4290
|
+
SELECT * FROM llm_worker_operation
|
|
4291
|
+
WHERE task_id = ?
|
|
4292
|
+
ORDER BY runtime_index, chain_index
|
|
4293
|
+
""", (task_id,))
|
|
4294
|
+
|
|
4295
|
+
operations = []
|
|
4296
|
+
for row in cursor.fetchall():
|
|
4297
|
+
operation = dict(row)
|
|
4298
|
+
|
|
4299
|
+
# Parse JSON fields
|
|
4300
|
+
if operation['input_data']:
|
|
4301
|
+
try:
|
|
4302
|
+
operation['input_data'] = json.loads(
|
|
4303
|
+
operation['input_data'])
|
|
4304
|
+
except json.JSONDecodeError:
|
|
4305
|
+
operation['input_data'] = {}
|
|
4306
|
+
|
|
4307
|
+
if operation['output_data']:
|
|
4308
|
+
try:
|
|
4309
|
+
operation['output_data'] = json.loads(
|
|
4310
|
+
operation['output_data'])
|
|
4311
|
+
except json.JSONDecodeError:
|
|
4312
|
+
operation['output_data'] = {}
|
|
4313
|
+
|
|
4314
|
+
operations.append(operation)
|
|
4315
|
+
|
|
4316
|
+
return operations
|
|
4317
|
+
|
|
4318
|
+
except Exception as e:
|
|
4319
|
+
self.logger.error(f"Failed to get operations by task: {str(e)}")
|
|
4320
|
+
return []
|
|
4321
|
+
|
|
4322
|
+
def get_operation(self, operation_id: str) -> Optional[Dict[str, Any]]:
|
|
4323
|
+
"""
|
|
4324
|
+
Get individual operation record.
|
|
4325
|
+
|
|
4326
|
+
Args:
|
|
4327
|
+
operation_id: Operation identifier
|
|
4328
|
+
|
|
4329
|
+
Returns:
|
|
4330
|
+
Operation data or None if not found
|
|
4331
|
+
"""
|
|
4332
|
+
try:
|
|
4333
|
+
with sqlite3.connect(self.db_path) as conn:
|
|
4334
|
+
conn.row_factory = sqlite3.Row
|
|
4335
|
+
cursor = conn.cursor()
|
|
4336
|
+
|
|
4337
|
+
cursor.execute("""
|
|
4338
|
+
SELECT * FROM llm_worker_operation WHERE operation_id = ?
|
|
4339
|
+
""", (operation_id,))
|
|
4340
|
+
|
|
4341
|
+
row = cursor.fetchone()
|
|
4342
|
+
if row:
|
|
4343
|
+
operation = dict(row)
|
|
4344
|
+
|
|
4345
|
+
# Parse JSON fields
|
|
4346
|
+
if operation['input_data']:
|
|
4347
|
+
try:
|
|
4348
|
+
operation['input_data'] = json.loads(
|
|
4349
|
+
operation['input_data'])
|
|
4350
|
+
except json.JSONDecodeError:
|
|
4351
|
+
operation['input_data'] = {}
|
|
4352
|
+
|
|
4353
|
+
if operation['output_data']:
|
|
4354
|
+
try:
|
|
4355
|
+
operation['output_data'] = json.loads(
|
|
4356
|
+
operation['output_data'])
|
|
4357
|
+
except json.JSONDecodeError:
|
|
4358
|
+
operation['output_data'] = {}
|
|
4359
|
+
|
|
4360
|
+
return operation
|
|
4361
|
+
|
|
4362
|
+
return None
|
|
4363
|
+
|
|
4364
|
+
except Exception as e:
|
|
4365
|
+
self.logger.error(f"Failed to get operation: {str(e)}")
|
|
4366
|
+
return None
|
|
4367
|
+
|
|
4368
|
+
def get_operations_by_executor(
|
|
4369
|
+
self,
|
|
4370
|
+
executor_name: str,
|
|
4371
|
+
org_id: Optional[str] = None,
|
|
4372
|
+
execution_status: Optional[str] = None
|
|
4373
|
+
) -> List[Dict[str, Any]]:
|
|
4374
|
+
"""
|
|
4375
|
+
Get operations by executor name (stage-specific queries).
|
|
4376
|
+
|
|
4377
|
+
Args:
|
|
4378
|
+
executor_name: Stage name to filter by
|
|
4379
|
+
org_id: Optional organization filter
|
|
4380
|
+
execution_status: Optional status filter
|
|
4381
|
+
|
|
4382
|
+
Returns:
|
|
4383
|
+
List of matching operations
|
|
4384
|
+
"""
|
|
4385
|
+
try:
|
|
4386
|
+
with sqlite3.connect(self.db_path) as conn:
|
|
4387
|
+
conn.row_factory = sqlite3.Row
|
|
4388
|
+
cursor = conn.cursor()
|
|
4389
|
+
|
|
4390
|
+
query = """
|
|
4391
|
+
SELECT o.* FROM llm_worker_operation o
|
|
4392
|
+
JOIN llm_worker_task t ON o.task_id = t.task_id
|
|
4393
|
+
WHERE o.executor_name = ?
|
|
4394
|
+
"""
|
|
4395
|
+
params = [executor_name]
|
|
4396
|
+
|
|
4397
|
+
if org_id:
|
|
4398
|
+
query += " AND t.org_id = ?"
|
|
4399
|
+
params.append(org_id)
|
|
4400
|
+
|
|
4401
|
+
if execution_status:
|
|
4402
|
+
query += " AND o.execution_status = ?"
|
|
4403
|
+
params.append(execution_status)
|
|
4404
|
+
|
|
4405
|
+
query += " ORDER BY o.date_created DESC"
|
|
4406
|
+
|
|
4407
|
+
cursor.execute(query, params)
|
|
4408
|
+
|
|
4409
|
+
operations = []
|
|
4410
|
+
for row in cursor.fetchall():
|
|
4411
|
+
operation = dict(row)
|
|
4412
|
+
|
|
4413
|
+
# Parse JSON fields
|
|
4414
|
+
if operation['input_data']:
|
|
4415
|
+
try:
|
|
4416
|
+
operation['input_data'] = json.loads(
|
|
4417
|
+
operation['input_data'])
|
|
4418
|
+
except json.JSONDecodeError:
|
|
4419
|
+
operation['input_data'] = {}
|
|
4420
|
+
|
|
4421
|
+
if operation['output_data']:
|
|
4422
|
+
try:
|
|
4423
|
+
operation['output_data'] = json.loads(
|
|
4424
|
+
operation['output_data'])
|
|
4425
|
+
except json.JSONDecodeError:
|
|
4426
|
+
operation['output_data'] = {}
|
|
4427
|
+
|
|
4428
|
+
operations.append(operation)
|
|
4429
|
+
|
|
4430
|
+
return operations
|
|
4431
|
+
|
|
4432
|
+
except Exception as e:
|
|
4433
|
+
self.logger.error(
|
|
4434
|
+
f"Failed to get operations by executor: {str(e)}")
|
|
4435
|
+
return []
|
|
4436
|
+
|
|
4437
|
+
# ===== SERVER-COMPATIBLE QUERY METHODS =====
|
|
4438
|
+
|
|
4439
|
+
def get_task_with_operations(self, task_id: str) -> Optional[Dict[str, Any]]:
|
|
4440
|
+
"""
|
|
4441
|
+
Get complete task details with all related operations.
|
|
4442
|
+
|
|
4443
|
+
Args:
|
|
4444
|
+
task_id: Task identifier
|
|
4445
|
+
|
|
4446
|
+
Returns:
|
|
4447
|
+
Complete task data with operations or None if not found
|
|
4448
|
+
"""
|
|
4449
|
+
try:
|
|
4450
|
+
task = self.get_task(task_id)
|
|
4451
|
+
if not task:
|
|
4452
|
+
return None
|
|
4453
|
+
|
|
4454
|
+
operations = self.get_operations_by_task(task_id)
|
|
4455
|
+
|
|
4456
|
+
# Add operations to task data
|
|
4457
|
+
task['operations'] = operations
|
|
4458
|
+
|
|
4459
|
+
# Add summary statistics
|
|
4460
|
+
task['summary'] = {
|
|
4461
|
+
'total_operations': len(operations),
|
|
4462
|
+
'completed_operations': len([op for op in operations if op['execution_status'] == 'done']),
|
|
4463
|
+
'failed_operations': len([op for op in operations if op['execution_status'] == 'failed']),
|
|
4464
|
+
'running_operations': len([op for op in operations if op['execution_status'] == 'running'])
|
|
4465
|
+
}
|
|
4466
|
+
|
|
4467
|
+
return task
|
|
4468
|
+
|
|
4469
|
+
except Exception as e:
|
|
4470
|
+
self.logger.error(f"Failed to get task with operations: {str(e)}")
|
|
4471
|
+
return None
|
|
4472
|
+
|
|
4473
|
+
def get_execution_timeline(self, task_id: str, runtime_index: Optional[int] = None) -> List[Dict[str, Any]]:
|
|
4474
|
+
"""
|
|
4475
|
+
Get chronological operation tracking for specific execution attempt.
|
|
4476
|
+
|
|
4477
|
+
Args:
|
|
4478
|
+
task_id: Task identifier
|
|
4479
|
+
runtime_index: Optional specific runtime index
|
|
4480
|
+
|
|
4481
|
+
Returns:
|
|
4482
|
+
List of operations in chronological order
|
|
4483
|
+
"""
|
|
4484
|
+
try:
|
|
4485
|
+
with sqlite3.connect(self.db_path) as conn:
|
|
4486
|
+
conn.row_factory = sqlite3.Row
|
|
4487
|
+
cursor = conn.cursor()
|
|
4488
|
+
|
|
4489
|
+
query = """
|
|
4490
|
+
SELECT * FROM llm_worker_operation
|
|
4491
|
+
WHERE task_id = ?
|
|
4492
|
+
"""
|
|
4493
|
+
params = [task_id]
|
|
4494
|
+
|
|
4495
|
+
if runtime_index is not None:
|
|
4496
|
+
query += " AND runtime_index = ?"
|
|
4497
|
+
params.append(runtime_index)
|
|
4498
|
+
|
|
4499
|
+
query += " ORDER BY runtime_index, chain_index, date_created"
|
|
4500
|
+
|
|
4501
|
+
cursor.execute(query, params)
|
|
4502
|
+
|
|
4503
|
+
timeline = []
|
|
4504
|
+
for row in cursor.fetchall():
|
|
4505
|
+
operation = dict(row)
|
|
4506
|
+
|
|
4507
|
+
# Parse JSON fields
|
|
4508
|
+
if operation['input_data']:
|
|
4509
|
+
try:
|
|
4510
|
+
operation['input_data'] = json.loads(
|
|
4511
|
+
operation['input_data'])
|
|
4512
|
+
except json.JSONDecodeError:
|
|
4513
|
+
operation['input_data'] = {}
|
|
4514
|
+
|
|
4515
|
+
if operation['output_data']:
|
|
4516
|
+
try:
|
|
4517
|
+
operation['output_data'] = json.loads(
|
|
4518
|
+
operation['output_data'])
|
|
4519
|
+
except json.JSONDecodeError:
|
|
4520
|
+
operation['output_data'] = {}
|
|
4521
|
+
|
|
4522
|
+
timeline.append(operation)
|
|
4523
|
+
|
|
4524
|
+
return timeline
|
|
4525
|
+
|
|
4526
|
+
except Exception as e:
|
|
4527
|
+
self.logger.error(f"Failed to get execution timeline: {str(e)}")
|
|
4528
|
+
return []
|
|
4529
|
+
|
|
4530
|
+
def get_stage_performance_metrics(
|
|
4531
|
+
self,
|
|
4532
|
+
executor_name: str,
|
|
4533
|
+
org_id: Optional[str] = None,
|
|
4534
|
+
date_range: Optional[tuple] = None
|
|
4535
|
+
) -> Dict[str, Any]:
|
|
4536
|
+
"""
|
|
4537
|
+
Get performance analysis for specific stage.
|
|
4538
|
+
|
|
4539
|
+
Args:
|
|
4540
|
+
executor_name: Stage name
|
|
4541
|
+
org_id: Optional organization filter
|
|
4542
|
+
date_range: Optional (start_date, end_date) tuple
|
|
4543
|
+
|
|
4544
|
+
Returns:
|
|
4545
|
+
Performance metrics dictionary
|
|
4546
|
+
"""
|
|
4547
|
+
try:
|
|
4548
|
+
with sqlite3.connect(self.db_path) as conn:
|
|
4549
|
+
cursor = conn.cursor()
|
|
4550
|
+
|
|
4551
|
+
query = """
|
|
4552
|
+
SELECT
|
|
4553
|
+
o.execution_status,
|
|
4554
|
+
COUNT(*) as count,
|
|
4555
|
+
AVG(julianday(o.date_updated) - julianday(o.date_created)) * 24 * 60 as avg_duration_minutes
|
|
4556
|
+
FROM llm_worker_operation o
|
|
4557
|
+
JOIN llm_worker_task t ON o.task_id = t.task_id
|
|
4558
|
+
WHERE o.executor_name = ?
|
|
4559
|
+
"""
|
|
4560
|
+
params = [executor_name]
|
|
4561
|
+
|
|
4562
|
+
if org_id:
|
|
4563
|
+
query += " AND t.org_id = ?"
|
|
4564
|
+
params.append(org_id)
|
|
4565
|
+
|
|
4566
|
+
if date_range:
|
|
4567
|
+
query += " AND o.date_created BETWEEN ? AND ?"
|
|
4568
|
+
params.extend(date_range)
|
|
4569
|
+
|
|
4570
|
+
query += " GROUP BY o.execution_status"
|
|
4571
|
+
|
|
4572
|
+
cursor.execute(query, params)
|
|
4573
|
+
|
|
4574
|
+
metrics = {
|
|
4575
|
+
'executor_name': executor_name,
|
|
4576
|
+
'org_id': org_id,
|
|
4577
|
+
'total_executions': 0,
|
|
4578
|
+
'success_rate': 0.0,
|
|
4579
|
+
'failure_rate': 0.0,
|
|
4580
|
+
'avg_duration_minutes': 0.0,
|
|
4581
|
+
'status_breakdown': {}
|
|
4582
|
+
}
|
|
4583
|
+
|
|
4584
|
+
total_count = 0
|
|
4585
|
+
success_count = 0
|
|
4586
|
+
total_duration = 0.0
|
|
4587
|
+
|
|
4588
|
+
for row in cursor.fetchall():
|
|
4589
|
+
status, count, avg_duration = row
|
|
4590
|
+
total_count += count
|
|
4591
|
+
metrics['status_breakdown'][status] = {
|
|
4592
|
+
'count': count,
|
|
4593
|
+
'avg_duration_minutes': avg_duration or 0.0
|
|
4594
|
+
}
|
|
4595
|
+
|
|
4596
|
+
if status == 'done':
|
|
4597
|
+
success_count = count
|
|
4598
|
+
|
|
4599
|
+
if avg_duration:
|
|
4600
|
+
total_duration += avg_duration * count
|
|
4601
|
+
|
|
4602
|
+
if total_count > 0:
|
|
4603
|
+
metrics['total_executions'] = total_count
|
|
4604
|
+
metrics['success_rate'] = (
|
|
4605
|
+
success_count / total_count) * 100
|
|
4606
|
+
metrics['failure_rate'] = (
|
|
4607
|
+
(total_count - success_count) / total_count) * 100
|
|
4608
|
+
metrics['avg_duration_minutes'] = total_duration / \
|
|
4609
|
+
total_count
|
|
4610
|
+
|
|
4611
|
+
return metrics
|
|
4612
|
+
|
|
4613
|
+
except Exception as e:
|
|
4614
|
+
self.logger.error(
|
|
4615
|
+
f"Failed to get stage performance metrics: {str(e)}")
|
|
4616
|
+
return {'error': str(e)}
|
|
4617
|
+
|
|
4618
|
+
def find_failed_operations(
|
|
4619
|
+
self,
|
|
4620
|
+
org_id: Optional[str] = None,
|
|
4621
|
+
executor_name: Optional[str] = None,
|
|
4622
|
+
limit: int = 50
|
|
4623
|
+
) -> List[Dict[str, Any]]:
|
|
4624
|
+
"""
|
|
4625
|
+
Find failed operations for debugging.
|
|
4626
|
+
|
|
4627
|
+
Args:
|
|
4628
|
+
org_id: Optional organization filter
|
|
4629
|
+
executor_name: Optional stage filter
|
|
4630
|
+
limit: Maximum number of results
|
|
4631
|
+
|
|
4632
|
+
Returns:
|
|
4633
|
+
List of failed operations with error details
|
|
4634
|
+
"""
|
|
4635
|
+
try:
|
|
4636
|
+
with sqlite3.connect(self.db_path) as conn:
|
|
4637
|
+
conn.row_factory = sqlite3.Row
|
|
4638
|
+
cursor = conn.cursor()
|
|
4639
|
+
|
|
4640
|
+
query = """
|
|
4641
|
+
SELECT o.*, t.org_id FROM llm_worker_operation o
|
|
4642
|
+
JOIN llm_worker_task t ON o.task_id = t.task_id
|
|
4643
|
+
WHERE o.execution_status = 'failed'
|
|
4644
|
+
"""
|
|
4645
|
+
params = []
|
|
4646
|
+
|
|
4647
|
+
if org_id:
|
|
4648
|
+
query += " AND t.org_id = ?"
|
|
4649
|
+
params.append(org_id)
|
|
4650
|
+
|
|
4651
|
+
if executor_name:
|
|
4652
|
+
query += " AND o.executor_name = ?"
|
|
4653
|
+
params.append(executor_name)
|
|
4654
|
+
|
|
4655
|
+
query += " ORDER BY o.date_created DESC LIMIT ?"
|
|
4656
|
+
params.append(limit)
|
|
4657
|
+
|
|
4658
|
+
cursor.execute(query, params)
|
|
4659
|
+
|
|
4660
|
+
failed_operations = []
|
|
4661
|
+
for row in cursor.fetchall():
|
|
4662
|
+
operation = dict(row)
|
|
4663
|
+
|
|
4664
|
+
# Parse output_data to extract error information
|
|
4665
|
+
if operation['output_data']:
|
|
4666
|
+
try:
|
|
4667
|
+
output_data = json.loads(operation['output_data'])
|
|
4668
|
+
operation['output_data'] = output_data
|
|
4669
|
+
operation['error_summary'] = output_data.get(
|
|
4670
|
+
'error', 'Unknown error')
|
|
4671
|
+
except json.JSONDecodeError:
|
|
4672
|
+
operation['error_summary'] = 'JSON parse error in output_data'
|
|
4673
|
+
else:
|
|
4674
|
+
operation['error_summary'] = 'No error details available'
|
|
4675
|
+
|
|
4676
|
+
failed_operations.append(operation)
|
|
4677
|
+
|
|
4678
|
+
return failed_operations
|
|
4679
|
+
|
|
4680
|
+
except Exception as e:
|
|
4681
|
+
self.logger.error(f"Failed to find failed operations: {str(e)}")
|
|
4682
|
+
return []
|
|
4683
|
+
|
|
4684
|
+
def create_task(
|
|
4685
|
+
self,
|
|
4686
|
+
task_id: str,
|
|
4687
|
+
plan_id: str,
|
|
4688
|
+
org_id: str,
|
|
4689
|
+
request_body: Dict[str, Any],
|
|
4690
|
+
status: str = "running"
|
|
4691
|
+
) -> str:
|
|
4692
|
+
"""
|
|
4693
|
+
Create a new task record in llm_worker_task table.
|
|
4694
|
+
|
|
4695
|
+
Args:
|
|
4696
|
+
task_id: Unique task identifier
|
|
4697
|
+
plan_id: Plan identifier
|
|
4698
|
+
org_id: Organization identifier
|
|
4699
|
+
request_body: Task request body data
|
|
4700
|
+
status: Initial task status
|
|
4701
|
+
|
|
4702
|
+
Returns:
|
|
4703
|
+
Task ID
|
|
4704
|
+
"""
|
|
4705
|
+
try:
|
|
4706
|
+
with sqlite3.connect(self.db_path) as conn:
|
|
4707
|
+
cursor = conn.cursor()
|
|
4708
|
+
|
|
4709
|
+
cursor.execute("""
|
|
4710
|
+
INSERT INTO llm_worker_task
|
|
4711
|
+
(task_id, plan_id, org_id, status, current_runtime_index, messages, request_body)
|
|
4712
|
+
VALUES (?, ?, ?, ?, ?, ?, ?)
|
|
4713
|
+
""", (
|
|
4714
|
+
task_id,
|
|
4715
|
+
plan_id,
|
|
4716
|
+
org_id,
|
|
4717
|
+
status,
|
|
4718
|
+
0, # initial runtime_index
|
|
4719
|
+
json.dumps([]), # empty messages initially
|
|
4720
|
+
json.dumps(request_body)
|
|
4721
|
+
))
|
|
4722
|
+
|
|
4723
|
+
conn.commit()
|
|
4724
|
+
self.logger.debug(f"Created task: {task_id}")
|
|
4725
|
+
return task_id
|
|
4726
|
+
|
|
4727
|
+
except Exception as e:
|
|
4728
|
+
self.logger.error(f"Failed to create task: {str(e)}")
|
|
4729
|
+
raise
|
|
4730
|
+
|
|
4731
|
+
def create_operation(
|
|
4732
|
+
self,
|
|
4733
|
+
task_id: str,
|
|
4734
|
+
executor_name: str,
|
|
4735
|
+
runtime_index: int,
|
|
4736
|
+
chain_index: int,
|
|
4737
|
+
input_data: Dict[str, Any]
|
|
4738
|
+
) -> str:
|
|
4739
|
+
"""
|
|
4740
|
+
Create a new operation record in llm_worker_operation table.
|
|
4741
|
+
|
|
4742
|
+
Args:
|
|
4743
|
+
task_id: Task identifier
|
|
4744
|
+
executor_name: Name of the executor/stage
|
|
4745
|
+
runtime_index: Runtime execution index
|
|
4746
|
+
chain_index: Chain execution index
|
|
4747
|
+
input_data: Operation input data
|
|
4748
|
+
|
|
4749
|
+
Returns:
|
|
4750
|
+
Operation ID
|
|
4751
|
+
"""
|
|
4752
|
+
try:
|
|
4753
|
+
operation_id = f"{task_id}_{executor_name}_{runtime_index}_{chain_index}"
|
|
4754
|
+
|
|
4755
|
+
with sqlite3.connect(self.db_path) as conn:
|
|
4756
|
+
cursor = conn.cursor()
|
|
4757
|
+
|
|
4758
|
+
cursor.execute("""
|
|
4759
|
+
INSERT INTO llm_worker_operation
|
|
4760
|
+
(operation_id, task_id, executor_name, runtime_index, chain_index,
|
|
4761
|
+
execution_status, input_data)
|
|
4762
|
+
VALUES (?, ?, ?, ?, ?, ?, ?)
|
|
4763
|
+
""", (
|
|
4764
|
+
operation_id,
|
|
4765
|
+
task_id,
|
|
4766
|
+
executor_name,
|
|
4767
|
+
runtime_index,
|
|
4768
|
+
chain_index,
|
|
4769
|
+
'running',
|
|
4770
|
+
json.dumps(input_data)
|
|
4771
|
+
))
|
|
4772
|
+
|
|
4773
|
+
conn.commit()
|
|
4774
|
+
self.logger.debug(f"Created operation: {operation_id}")
|
|
4775
|
+
return operation_id
|
|
4776
|
+
|
|
4777
|
+
except Exception as e:
|
|
4778
|
+
self.logger.error(f"Failed to create operation: {str(e)}")
|
|
4779
|
+
raise
|
|
4780
|
+
|
|
4781
|
+
def update_operation_status(
|
|
4782
|
+
self,
|
|
4783
|
+
operation_id: str,
|
|
4784
|
+
execution_status: str,
|
|
4785
|
+
output_data: Optional[Dict[str, Any]] = None
|
|
4786
|
+
) -> None:
|
|
4787
|
+
"""
|
|
4788
|
+
Update operation status and output data.
|
|
4789
|
+
|
|
4790
|
+
Args:
|
|
4791
|
+
operation_id: Operation identifier
|
|
4792
|
+
execution_status: New execution status
|
|
4793
|
+
output_data: Optional output data
|
|
4794
|
+
"""
|
|
4795
|
+
try:
|
|
4796
|
+
with sqlite3.connect(self.db_path) as conn:
|
|
4797
|
+
cursor = conn.cursor()
|
|
4798
|
+
|
|
4799
|
+
if output_data:
|
|
4800
|
+
cursor.execute("""
|
|
4801
|
+
UPDATE llm_worker_operation
|
|
4802
|
+
SET execution_status = ?, output_data = ?, date_updated = CURRENT_TIMESTAMP
|
|
4803
|
+
WHERE operation_id = ?
|
|
4804
|
+
""", (execution_status, json.dumps(output_data), operation_id))
|
|
4805
|
+
else:
|
|
4806
|
+
cursor.execute("""
|
|
4807
|
+
UPDATE llm_worker_operation
|
|
4808
|
+
SET execution_status = ?, date_updated = CURRENT_TIMESTAMP
|
|
4809
|
+
WHERE operation_id = ?
|
|
4810
|
+
""", (execution_status, operation_id))
|
|
4811
|
+
|
|
4812
|
+
conn.commit()
|
|
4813
|
+
self.logger.debug(
|
|
4814
|
+
f"Updated operation status: {operation_id} -> {execution_status}")
|
|
4815
|
+
|
|
4816
|
+
except Exception as e:
|
|
4817
|
+
self.logger.error(f"Failed to update operation status: {str(e)}")
|
|
4818
|
+
raise
|
|
4819
|
+
|
|
4820
|
+
def update_task_status(
|
|
4821
|
+
self,
|
|
4822
|
+
task_id: str,
|
|
4823
|
+
status: str,
|
|
4824
|
+
runtime_index: Optional[int] = None
|
|
4825
|
+
) -> None:
|
|
4826
|
+
"""
|
|
4827
|
+
Update task status and runtime index.
|
|
4828
|
+
|
|
4829
|
+
Args:
|
|
4830
|
+
task_id: Task identifier
|
|
4831
|
+
status: New task status
|
|
4832
|
+
runtime_index: Optional runtime index
|
|
4833
|
+
"""
|
|
4834
|
+
try:
|
|
4835
|
+
with sqlite3.connect(self.db_path) as conn:
|
|
4836
|
+
cursor = conn.cursor()
|
|
4837
|
+
|
|
4838
|
+
if runtime_index is not None:
|
|
4839
|
+
cursor.execute("""
|
|
4840
|
+
UPDATE llm_worker_task
|
|
4841
|
+
SET status = ?, current_runtime_index = ?, updated_at = CURRENT_TIMESTAMP
|
|
4842
|
+
WHERE task_id = ?
|
|
4843
|
+
""", (status, runtime_index, task_id))
|
|
4844
|
+
else:
|
|
4845
|
+
cursor.execute("""
|
|
4846
|
+
UPDATE llm_worker_task
|
|
4847
|
+
SET status = ?, updated_at = CURRENT_TIMESTAMP
|
|
4848
|
+
WHERE task_id = ?
|
|
4849
|
+
""", (status, task_id))
|
|
4850
|
+
|
|
4851
|
+
conn.commit()
|
|
4852
|
+
self.logger.debug(
|
|
4853
|
+
f"Updated task status: {task_id} -> {status}")
|
|
4854
|
+
|
|
4855
|
+
except Exception as e:
|
|
4856
|
+
self.logger.error(f"Failed to update task status: {str(e)}")
|
|
4857
|
+
raise
|