fusesell 1.2.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of fusesell might be problematic. Click here for more details.

@@ -0,0 +1,4050 @@
1
+ """
2
+ Local Data Manager for FuseSell Local Implementation
3
+ Handles SQLite database operations and local file management
4
+ """
5
+
6
+ import sqlite3
7
+ import json
8
+ import os
9
+ import uuid
10
+ from typing import Dict, Any, List, Optional, Union
11
+ from datetime import datetime
12
+ import logging
13
+ from pathlib import Path
14
+
15
+
16
+ class LocalDataManager:
17
+ """
18
+ Manages local data storage using SQLite database and JSON files.
19
+ Provides interface for storing execution results, customer data, and configurations.
20
+ """
21
+
22
+ # Class-level tracking to prevent multiple initializations
23
+ _initialized_databases = set()
24
+ _initialization_lock = False
25
+
26
+ def __init__(self, data_dir: str = "./fusesell_data"):
27
+ """
28
+ Initialize data manager with specified data directory.
29
+
30
+ Args:
31
+ data_dir: Directory path for storing local data
32
+ """
33
+ self.data_dir = Path(data_dir)
34
+ self.db_path = self.data_dir / "fusesell.db"
35
+ self.config_dir = self.data_dir / "config"
36
+ self.drafts_dir = self.data_dir / "drafts"
37
+ self.logs_dir = self.data_dir / "logs"
38
+
39
+ self.logger = logging.getLogger("fusesell.data_manager")
40
+
41
+ # Create directories if they don't exist
42
+ self._create_directories()
43
+
44
+ # Initialize database with optimization check
45
+ self._init_database_optimized()
46
+
47
+ def _create_directories(self) -> None:
48
+ """Create necessary directories for data storage."""
49
+ for directory in [self.data_dir, self.config_dir, self.drafts_dir, self.logs_dir]:
50
+ directory.mkdir(parents=True, exist_ok=True)
51
+
52
+ def _init_database_optimized(self) -> None:
53
+ """
54
+ Initialize database with optimization to avoid redundant initialization.
55
+ Only performs full initialization if database doesn't exist or is incomplete.
56
+ Uses a class-level lock to prevent concurrent initialization.
57
+ """
58
+ try:
59
+ db_path_str = str(self.db_path)
60
+
61
+ # Check if this database has already been initialized in this process
62
+ if db_path_str in LocalDataManager._initialized_databases:
63
+ self.logger.debug("Database already initialized in this process, skipping initialization")
64
+ return
65
+
66
+ # Use class-level lock to prevent concurrent initialization
67
+ if LocalDataManager._initialization_lock:
68
+ self.logger.debug("Database initialization in progress by another instance, skipping")
69
+ return
70
+
71
+ LocalDataManager._initialization_lock = True
72
+
73
+ try:
74
+ # Double-check after acquiring lock
75
+ if db_path_str in LocalDataManager._initialized_databases:
76
+ self.logger.debug("Database already initialized by another instance, skipping initialization")
77
+ return
78
+
79
+ # Check if database exists and has basic tables
80
+ if self.db_path.exists():
81
+ with sqlite3.connect(self.db_path) as conn:
82
+ cursor = conn.cursor()
83
+
84
+ # Check if key tables exist (use tables that actually exist in our schema)
85
+ cursor.execute("""
86
+ SELECT name FROM sqlite_master
87
+ WHERE type='table' AND name IN ('stage_results', 'customers', 'llm_worker_task')
88
+ """)
89
+ existing_tables = [row[0] for row in cursor.fetchall()]
90
+
91
+ self.logger.debug(f"Database exists, found tables: {existing_tables}")
92
+
93
+ if len(existing_tables) >= 3:
94
+ self.logger.info("Database already initialized, skipping full initialization")
95
+ LocalDataManager._initialized_databases.add(db_path_str)
96
+ return
97
+
98
+ # Perform full initialization
99
+ self.logger.info("Performing database initialization")
100
+ self._init_database()
101
+ LocalDataManager._initialized_databases.add(db_path_str)
102
+
103
+ finally:
104
+ LocalDataManager._initialization_lock = False
105
+
106
+ except Exception as e:
107
+ LocalDataManager._initialization_lock = False
108
+ self.logger.warning(f"Database optimization check failed, performing full initialization: {str(e)}")
109
+ self._init_database()
110
+ LocalDataManager._initialized_databases.add(db_path_str)
111
+
112
+ def _init_database(self) -> None:
113
+ """Initialize SQLite database with required tables."""
114
+ try:
115
+ with sqlite3.connect(self.db_path) as conn:
116
+ cursor = conn.cursor()
117
+
118
+ # Create executions table
119
+ cursor.execute("""
120
+ CREATE TABLE IF NOT EXISTS executions (
121
+ execution_id TEXT PRIMARY KEY,
122
+ org_id TEXT NOT NULL,
123
+ org_name TEXT,
124
+ customer_website TEXT,
125
+ customer_name TEXT,
126
+ status TEXT NOT NULL,
127
+ started_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
128
+ completed_at TIMESTAMP,
129
+ config_json TEXT,
130
+ results_json TEXT
131
+ )
132
+ """)
133
+
134
+ # Create stage_results table
135
+ cursor.execute("""
136
+ CREATE TABLE IF NOT EXISTS stage_results (
137
+ id TEXT PRIMARY KEY,
138
+ execution_id TEXT NOT NULL,
139
+ stage_name TEXT NOT NULL,
140
+ status TEXT NOT NULL,
141
+ input_data TEXT,
142
+ output_data TEXT,
143
+ started_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
144
+ completed_at TIMESTAMP,
145
+ error_message TEXT,
146
+ FOREIGN KEY (execution_id) REFERENCES executions(execution_id)
147
+ )
148
+ """)
149
+
150
+ # Create customers table
151
+ cursor.execute("""
152
+ CREATE TABLE IF NOT EXISTS customers (
153
+ customer_id TEXT PRIMARY KEY,
154
+ org_id TEXT NOT NULL,
155
+ company_name TEXT,
156
+ website TEXT,
157
+ industry TEXT,
158
+ contact_name TEXT,
159
+ contact_email TEXT,
160
+ contact_phone TEXT,
161
+ address TEXT,
162
+ profile_data TEXT,
163
+ created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
164
+ updated_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP
165
+ )
166
+ """)
167
+
168
+ # Create lead_scores table
169
+ cursor.execute("""
170
+ CREATE TABLE IF NOT EXISTS lead_scores (
171
+ id TEXT PRIMARY KEY,
172
+ execution_id TEXT NOT NULL,
173
+ customer_id TEXT,
174
+ product_id TEXT,
175
+ score REAL,
176
+ criteria_breakdown TEXT,
177
+ created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
178
+ FOREIGN KEY (execution_id) REFERENCES executions(execution_id)
179
+ )
180
+ """)
181
+
182
+ # Create email_drafts table
183
+ cursor.execute("""
184
+ CREATE TABLE IF NOT EXISTS email_drafts (
185
+ draft_id TEXT PRIMARY KEY,
186
+ execution_id TEXT NOT NULL,
187
+ customer_id TEXT,
188
+ subject TEXT,
189
+ content TEXT,
190
+ draft_type TEXT,
191
+ version INTEGER DEFAULT 1,
192
+ created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
193
+ FOREIGN KEY (execution_id) REFERENCES executions(execution_id)
194
+ )
195
+ """)
196
+
197
+ # Create llm_worker_task table (server-compatible)
198
+ cursor.execute("""
199
+ CREATE TABLE IF NOT EXISTS llm_worker_task (
200
+ task_id TEXT PRIMARY KEY,
201
+ plan_id TEXT NOT NULL,
202
+ org_id TEXT NOT NULL,
203
+ status TEXT NOT NULL DEFAULT 'running',
204
+ current_runtime_index INTEGER DEFAULT 0,
205
+ messages JSON,
206
+ request_body JSON,
207
+ created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
208
+ updated_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
209
+ FOREIGN KEY (plan_id) REFERENCES llm_worker_plan(id)
210
+ )
211
+ """)
212
+
213
+ # Simply drop and recreate llm_worker_operation table to ensure correct schema
214
+ cursor.execute("DROP TABLE IF EXISTS llm_worker_operation")
215
+ self.logger.info(
216
+ "Creating llm_worker_operation table with server-compatible schema - FIXED VERSION")
217
+
218
+ # Create llm_worker_operation table (server-compatible)
219
+ cursor.execute("""
220
+ CREATE TABLE IF NOT EXISTS llm_worker_operation (
221
+ operation_id TEXT PRIMARY KEY,
222
+ task_id TEXT NOT NULL,
223
+ executor_name TEXT NOT NULL,
224
+ runtime_index INTEGER NOT NULL DEFAULT 0,
225
+ chain_index INTEGER NOT NULL DEFAULT 0,
226
+ execution_status TEXT NOT NULL DEFAULT 'running',
227
+ input_data JSON,
228
+ output_data JSON,
229
+ date_created TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
230
+ date_updated TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
231
+ FOREIGN KEY (task_id) REFERENCES llm_worker_task(task_id)
232
+ )
233
+ """)
234
+
235
+ # Create teams table (equivalent to llm_worker_plan_team)
236
+ cursor.execute("""
237
+ CREATE TABLE IF NOT EXISTS teams (
238
+ team_id TEXT PRIMARY KEY,
239
+ org_id TEXT NOT NULL,
240
+ org_name TEXT,
241
+ plan_id TEXT NOT NULL,
242
+ plan_name TEXT,
243
+ project_code TEXT,
244
+ name TEXT NOT NULL,
245
+ description TEXT,
246
+ avatar TEXT,
247
+ completed_settings INTEGER DEFAULT 0,
248
+ total_settings INTEGER DEFAULT 0,
249
+ completed_settings_list TEXT,
250
+ missing_settings_list TEXT,
251
+ created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
252
+ updated_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP
253
+ )
254
+ """)
255
+
256
+ # Create team_settings table (equivalent to gs_team_settings)
257
+ cursor.execute("""
258
+ CREATE TABLE IF NOT EXISTS team_settings (
259
+ id TEXT PRIMARY KEY,
260
+ team_id TEXT NOT NULL,
261
+ org_id TEXT NOT NULL,
262
+ plan_id TEXT NOT NULL,
263
+ plan_name TEXT,
264
+ project_code TEXT,
265
+ team_name TEXT,
266
+ gs_team_organization TEXT,
267
+ gs_team_rep TEXT,
268
+ gs_team_product TEXT,
269
+ gs_team_schedule_time TEXT,
270
+ gs_team_initial_outreach TEXT,
271
+ gs_team_follow_up TEXT,
272
+ gs_team_auto_interaction TEXT,
273
+ gs_team_followup_schedule_time TEXT,
274
+ gs_team_birthday_email TEXT,
275
+ created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
276
+ updated_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
277
+ FOREIGN KEY (team_id) REFERENCES teams(team_id)
278
+ )
279
+ """)
280
+
281
+ # Check if we need to migrate old column names
282
+ try:
283
+ cursor.execute("PRAGMA table_info(team_settings)")
284
+ columns = [row[1] for row in cursor.fetchall()]
285
+
286
+ # Check if we have old column names and need to migrate
287
+ old_columns = ['organization_settings', 'sales_rep_settings', 'product_settings']
288
+
289
+ if any(col in columns for col in old_columns):
290
+ self.logger.info("Migrating team_settings table to new column names")
291
+
292
+ # Create new table with correct column names
293
+ cursor.execute("""
294
+ CREATE TABLE IF NOT EXISTS team_settings_new (
295
+ id TEXT PRIMARY KEY,
296
+ team_id TEXT NOT NULL,
297
+ org_id TEXT NOT NULL,
298
+ plan_id TEXT NOT NULL,
299
+ plan_name TEXT,
300
+ project_code TEXT,
301
+ team_name TEXT,
302
+ gs_team_organization TEXT,
303
+ gs_team_rep TEXT,
304
+ gs_team_product TEXT,
305
+ gs_team_schedule_time TEXT,
306
+ gs_team_initial_outreach TEXT,
307
+ gs_team_follow_up TEXT,
308
+ gs_team_auto_interaction TEXT,
309
+ gs_team_followup_schedule_time TEXT,
310
+ gs_team_birthday_email TEXT,
311
+ created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
312
+ updated_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
313
+ FOREIGN KEY (team_id) REFERENCES teams(team_id)
314
+ )
315
+ """)
316
+
317
+ # Copy data from old table to new table
318
+ cursor.execute("""
319
+ INSERT OR IGNORE INTO team_settings_new
320
+ (id, team_id, org_id, plan_id, plan_name, project_code, team_name,
321
+ gs_team_organization, gs_team_rep, gs_team_product, gs_team_schedule_time,
322
+ gs_team_initial_outreach, gs_team_follow_up, gs_team_auto_interaction,
323
+ gs_team_followup_schedule_time, gs_team_birthday_email, created_at, updated_at)
324
+ SELECT
325
+ id, team_id, org_id, plan_id, plan_name, project_code, team_name,
326
+ organization_settings, sales_rep_settings, product_settings, schedule_time_settings,
327
+ initial_outreach_settings, follow_up_settings, auto_interaction_settings,
328
+ followup_schedule_settings, birthday_email_settings, created_at, updated_at
329
+ FROM team_settings
330
+ """)
331
+
332
+ # Drop old table and rename new one
333
+ cursor.execute("DROP TABLE team_settings")
334
+ cursor.execute("ALTER TABLE team_settings_new RENAME TO team_settings")
335
+
336
+ self.logger.info("Team settings table migration completed")
337
+ except Exception as e:
338
+ self.logger.debug(f"Migration check/execution failed (may be normal): {str(e)}")
339
+
340
+ # Create products table (equivalent to sell_products)
341
+ cursor.execute("""
342
+ CREATE TABLE IF NOT EXISTS products (
343
+ product_id TEXT PRIMARY KEY,
344
+ org_id TEXT NOT NULL,
345
+ org_name TEXT,
346
+ project_code TEXT,
347
+ product_name TEXT NOT NULL,
348
+ short_description TEXT,
349
+ long_description TEXT,
350
+ category TEXT,
351
+ subcategory TEXT,
352
+ target_users TEXT,
353
+ key_features TEXT,
354
+ unique_selling_points TEXT,
355
+ pain_points_solved TEXT,
356
+ competitive_advantages TEXT,
357
+ pricing TEXT,
358
+ pricing_rules TEXT,
359
+ product_website TEXT,
360
+ demo_available BOOLEAN DEFAULT FALSE,
361
+ trial_available BOOLEAN DEFAULT FALSE,
362
+ sales_contact_email TEXT,
363
+ image_url TEXT,
364
+ sales_metrics TEXT,
365
+ customer_feedback TEXT,
366
+ keywords TEXT,
367
+ related_products TEXT,
368
+ seasonal_demand TEXT,
369
+ market_insights TEXT,
370
+ case_studies TEXT,
371
+ testimonials TEXT,
372
+ success_metrics TEXT,
373
+ product_variants TEXT,
374
+ availability TEXT,
375
+ technical_specifications TEXT,
376
+ compatibility TEXT,
377
+ support_info TEXT,
378
+ regulatory_compliance TEXT,
379
+ localization TEXT,
380
+ installation_requirements TEXT,
381
+ user_manual_url TEXT,
382
+ return_policy TEXT,
383
+ shipping_info TEXT,
384
+ schema_version TEXT DEFAULT '1.3',
385
+ status TEXT DEFAULT 'active',
386
+ created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
387
+ updated_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP
388
+ )
389
+ """)
390
+
391
+ # Create gs_customer_llmtask table (server-compatible)
392
+ cursor.execute("""
393
+ CREATE TABLE IF NOT EXISTS gs_customer_llmtask (
394
+ id TEXT PRIMARY KEY,
395
+ task_id TEXT NOT NULL,
396
+ customer_id TEXT NOT NULL,
397
+ customer_name TEXT NOT NULL,
398
+ customer_phone TEXT,
399
+ customer_address TEXT,
400
+ customer_email TEXT,
401
+ customer_industry TEXT,
402
+ customer_taxcode TEXT,
403
+ customer_website TEXT,
404
+ contact_name TEXT,
405
+ org_id TEXT NOT NULL,
406
+ org_name TEXT,
407
+ project_code TEXT,
408
+ crm_dob DATE,
409
+ image_url TEXT,
410
+ created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
411
+ FOREIGN KEY (task_id) REFERENCES llm_worker_task(task_id),
412
+ FOREIGN KEY (customer_id) REFERENCES customers(customer_id)
413
+ )
414
+ """)
415
+
416
+ # Create prompts table (equivalent to gs_plan_team_prompt)
417
+ cursor.execute("""
418
+ CREATE TABLE IF NOT EXISTS prompts (
419
+ id TEXT PRIMARY KEY,
420
+ execution_id TEXT,
421
+ org_id TEXT NOT NULL,
422
+ plan_id TEXT,
423
+ team_id TEXT,
424
+ project_code TEXT,
425
+ input_stage TEXT NOT NULL,
426
+ prompt TEXT NOT NULL,
427
+ fewshots BOOLEAN DEFAULT FALSE,
428
+ instance_id TEXT,
429
+ submission_date TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
430
+ retrieved_date TIMESTAMP DEFAULT CURRENT_TIMESTAMP
431
+ )
432
+ """)
433
+
434
+ # Create scheduler_rules table (equivalent to gs_scheduler)
435
+ cursor.execute("""
436
+ CREATE TABLE IF NOT EXISTS scheduler_rules (
437
+ id TEXT PRIMARY KEY,
438
+ org_id TEXT NOT NULL,
439
+ org_name TEXT,
440
+ plan_id TEXT,
441
+ plan_name TEXT,
442
+ team_id TEXT,
443
+ team_name TEXT,
444
+ project_code TEXT,
445
+ input_stage TEXT NOT NULL,
446
+ input_stage_label TEXT,
447
+ language TEXT,
448
+ rule_config TEXT,
449
+ is_autorun_time_rule BOOLEAN DEFAULT FALSE,
450
+ status_code INTEGER,
451
+ message TEXT,
452
+ md_code TEXT,
453
+ username TEXT,
454
+ fullname TEXT,
455
+ instance_id TEXT,
456
+ submission_date TIMESTAMP DEFAULT CURRENT_TIMESTAMP
457
+ )
458
+ """)
459
+
460
+ # Create extracted_files table (equivalent to gs_plan_setting_extracted_file)
461
+ cursor.execute("""
462
+ CREATE TABLE IF NOT EXISTS extracted_files (
463
+ id TEXT PRIMARY KEY,
464
+ org_id TEXT NOT NULL,
465
+ plan_id TEXT,
466
+ team_id TEXT,
467
+ project_code TEXT,
468
+ import_uuid TEXT,
469
+ file_url TEXT,
470
+ project_url TEXT,
471
+ extracted_data TEXT,
472
+ username TEXT,
473
+ fullname TEXT,
474
+ instance_id TEXT,
475
+ submission_date TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
476
+ retrieved_date TIMESTAMP DEFAULT CURRENT_TIMESTAMP
477
+ )
478
+ """)
479
+
480
+ # Create llm_worker_plan table (server schema)
481
+ cursor.execute("""
482
+ CREATE TABLE IF NOT EXISTS llm_worker_plan (
483
+ id TEXT PRIMARY KEY,
484
+ name TEXT NOT NULL,
485
+ description TEXT,
486
+ org_id TEXT,
487
+ status TEXT,
488
+ executors TEXT,
489
+ settings TEXT,
490
+ created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
491
+ updated_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
492
+ date_created TIMESTAMP,
493
+ date_updated TIMESTAMP,
494
+ user_created TEXT,
495
+ user_updated TEXT,
496
+ sort INTEGER
497
+ )
498
+ """)
499
+
500
+ # Create gs_company_criteria table (server schema)
501
+ cursor.execute("""
502
+ CREATE TABLE IF NOT EXISTS gs_company_criteria (
503
+ id TEXT PRIMARY KEY,
504
+ name TEXT NOT NULL,
505
+ definition TEXT,
506
+ weight REAL,
507
+ guidelines TEXT,
508
+ scoring_factors TEXT,
509
+ org_id TEXT,
510
+ status TEXT,
511
+ created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
512
+ updated_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
513
+ date_created TIMESTAMP,
514
+ date_updated TIMESTAMP,
515
+ user_created TEXT,
516
+ user_updated TEXT,
517
+ sort INTEGER
518
+ )
519
+ """)
520
+
521
+ # Create indexes for better performance
522
+ # Check if executions is a table before creating index (it might be a view)
523
+ cursor.execute(
524
+ "SELECT type FROM sqlite_master WHERE name='executions'")
525
+ executions_type = cursor.fetchone()
526
+ if executions_type and executions_type[0] == 'table':
527
+ cursor.execute(
528
+ "CREATE INDEX IF NOT EXISTS idx_executions_org_id ON executions(org_id)")
529
+
530
+ cursor.execute(
531
+ "CREATE INDEX IF NOT EXISTS idx_stage_results_execution_id ON stage_results(execution_id)")
532
+ cursor.execute(
533
+ "CREATE INDEX IF NOT EXISTS idx_customers_org_id ON customers(org_id)")
534
+ cursor.execute(
535
+ "CREATE INDEX IF NOT EXISTS idx_lead_scores_execution_id ON lead_scores(execution_id)")
536
+ cursor.execute(
537
+ "CREATE INDEX IF NOT EXISTS idx_email_drafts_execution_id ON email_drafts(execution_id)")
538
+ cursor.execute(
539
+ "CREATE INDEX IF NOT EXISTS idx_llm_worker_task_org_id ON llm_worker_task(org_id)")
540
+ # Server-compatible indexes for performance
541
+ cursor.execute(
542
+ "CREATE INDEX IF NOT EXISTS idx_llm_worker_task_org_id ON llm_worker_task(org_id)")
543
+ cursor.execute(
544
+ "CREATE INDEX IF NOT EXISTS idx_llm_worker_task_plan_id ON llm_worker_task(plan_id)")
545
+ cursor.execute(
546
+ "CREATE INDEX IF NOT EXISTS idx_llm_worker_task_status ON llm_worker_task(status)")
547
+ cursor.execute(
548
+ "CREATE INDEX IF NOT EXISTS idx_llm_worker_operation_task_id ON llm_worker_operation(task_id)")
549
+ cursor.execute(
550
+ "CREATE INDEX IF NOT EXISTS idx_llm_worker_operation_task_runtime ON llm_worker_operation(task_id, runtime_index)")
551
+ cursor.execute(
552
+ "CREATE INDEX IF NOT EXISTS idx_llm_worker_operation_executor_status ON llm_worker_operation(executor_name, execution_status)")
553
+ cursor.execute(
554
+ "CREATE INDEX IF NOT EXISTS idx_llm_worker_operation_created_date ON llm_worker_operation(date_created)")
555
+
556
+ # Existing indexes
557
+ cursor.execute(
558
+ "CREATE INDEX IF NOT EXISTS idx_teams_org_id ON teams(org_id)")
559
+ cursor.execute(
560
+ "CREATE INDEX IF NOT EXISTS idx_team_settings_team_id ON team_settings(team_id)")
561
+ cursor.execute(
562
+ "CREATE INDEX IF NOT EXISTS idx_products_org_id ON products(org_id)")
563
+ cursor.execute(
564
+ "CREATE INDEX IF NOT EXISTS idx_gs_customer_llmtask_task_id ON gs_customer_llmtask(task_id)")
565
+ cursor.execute(
566
+ "CREATE INDEX IF NOT EXISTS idx_prompts_org_id ON prompts(org_id)")
567
+ cursor.execute(
568
+ "CREATE INDEX IF NOT EXISTS idx_scheduler_rules_org_id ON scheduler_rules(org_id)")
569
+ cursor.execute(
570
+ "CREATE INDEX IF NOT EXISTS idx_extracted_files_org_id ON extracted_files(org_id)")
571
+ cursor.execute(
572
+ "CREATE INDEX IF NOT EXISTS idx_llm_worker_plan_org_id ON llm_worker_plan(org_id)")
573
+ cursor.execute(
574
+ "CREATE INDEX IF NOT EXISTS idx_gs_company_criteria_org_id ON gs_company_criteria(org_id)")
575
+
576
+ # Create compatibility views for backward compatibility
577
+ cursor.execute("""
578
+ CREATE VIEW IF NOT EXISTS executions_view AS
579
+ SELECT
580
+ task_id as execution_id,
581
+ org_id,
582
+ '' as org_name,
583
+ '' as customer_website,
584
+ '' as customer_name,
585
+ status,
586
+ created_at as started_at,
587
+ updated_at as completed_at,
588
+ request_body as config_json,
589
+ '{}' as results_json
590
+ FROM llm_worker_task
591
+ """)
592
+
593
+ cursor.execute("""
594
+ CREATE VIEW IF NOT EXISTS stage_results_view AS
595
+ SELECT
596
+ operation_id as id,
597
+ task_id as execution_id,
598
+ executor_name as stage_name,
599
+ execution_status as status,
600
+ input_data,
601
+ output_data,
602
+ date_created as started_at,
603
+ date_updated as completed_at,
604
+ CASE WHEN execution_status = 'failed'
605
+ THEN json_extract(output_data, '$.error')
606
+ ELSE NULL END as error_message
607
+ FROM llm_worker_operation
608
+ """)
609
+
610
+ conn.commit()
611
+
612
+ # Initialize default data for new tables
613
+ self._initialize_default_data()
614
+
615
+ self.logger.info("Database initialized successfully")
616
+
617
+ except Exception as e:
618
+ self.logger.error(f"Failed to initialize database: {str(e)}")
619
+ raise
620
+
621
+ def save_execution(
622
+ self,
623
+ execution_id: str,
624
+ org_id: str,
625
+ config: Dict[str, Any],
626
+ org_name: Optional[str] = None,
627
+ customer_website: Optional[str] = None,
628
+ customer_name: Optional[str] = None
629
+ ) -> None:
630
+ """
631
+ Save execution record to database.
632
+
633
+ Args:
634
+ execution_id: Unique execution identifier
635
+ org_id: Organization ID
636
+ config: Execution configuration
637
+ org_name: Organization name
638
+ customer_website: Customer website URL
639
+ customer_name: Customer company name
640
+ """
641
+ try:
642
+ with sqlite3.connect(self.db_path) as conn:
643
+ cursor = conn.cursor()
644
+ cursor.execute("""
645
+ INSERT INTO executions
646
+ (execution_id, org_id, org_name, customer_website, customer_name, status, config_json)
647
+ VALUES (?, ?, ?, ?, ?, ?, ?)
648
+ """, (
649
+ execution_id, org_id, org_name, customer_website,
650
+ customer_name, 'running', json.dumps(config)
651
+ ))
652
+ conn.commit()
653
+ self.logger.debug(f"Saved execution record: {execution_id}")
654
+
655
+ except Exception as e:
656
+ self.logger.error(f"Failed to save execution: {str(e)}")
657
+ raise
658
+
659
+ def update_execution_status(
660
+ self,
661
+ execution_id: str,
662
+ status: str,
663
+ results: Optional[Dict[str, Any]] = None
664
+ ) -> None:
665
+ """
666
+ Update execution status and results.
667
+
668
+ Args:
669
+ execution_id: Execution identifier
670
+ status: New status (running, completed, failed)
671
+ results: Optional execution results
672
+ """
673
+ try:
674
+ with sqlite3.connect(self.db_path) as conn:
675
+ cursor = conn.cursor()
676
+
677
+ if results:
678
+ cursor.execute("""
679
+ UPDATE executions
680
+ SET status = ?, completed_at = CURRENT_TIMESTAMP, results_json = ?
681
+ WHERE execution_id = ?
682
+ """, (status, json.dumps(results), execution_id))
683
+ else:
684
+ cursor.execute("""
685
+ UPDATE executions
686
+ SET status = ?, completed_at = CURRENT_TIMESTAMP
687
+ WHERE execution_id = ?
688
+ """, (status, execution_id))
689
+
690
+ conn.commit()
691
+ self.logger.debug(
692
+ f"Updated execution status: {execution_id} -> {status}")
693
+
694
+ except Exception as e:
695
+ self.logger.error(f"Failed to update execution status: {str(e)}")
696
+ raise
697
+
698
+ def save_stage_result(
699
+ self,
700
+ execution_id: str,
701
+ stage_name: str,
702
+ input_data: Dict[str, Any],
703
+ output_data: Dict[str, Any],
704
+ status: str,
705
+ error_message: Optional[str] = None
706
+ ) -> None:
707
+ """
708
+ Save stage execution result.
709
+
710
+ Args:
711
+ execution_id: Execution identifier
712
+ stage_name: Name of the stage
713
+ input_data: Stage input data
714
+ output_data: Stage output data
715
+ status: Stage execution status
716
+ error_message: Optional error message
717
+ """
718
+ try:
719
+ with sqlite3.connect(self.db_path) as conn:
720
+ cursor = conn.cursor()
721
+ cursor.execute("""
722
+ INSERT INTO stage_results
723
+ (id, execution_id, stage_name, status, input_data, output_data, completed_at, error_message)
724
+ VALUES (?, ?, ?, ?, ?, ?, CURRENT_TIMESTAMP, ?)
725
+ """, (
726
+ f"uuid:{str(uuid.uuid4())}", execution_id, stage_name, status,
727
+ json.dumps(input_data), json.dumps(
728
+ output_data), error_message
729
+ ))
730
+ conn.commit()
731
+ self.logger.debug(
732
+ f"Saved stage result: {execution_id}/{stage_name}")
733
+
734
+ except Exception as e:
735
+ self.logger.error(f"Failed to save stage result: {str(e)}")
736
+ raise
737
+
738
+ def save_customer(self, customer_data: Dict[str, Any]) -> str:
739
+ """
740
+ Save or update customer information.
741
+
742
+ Args:
743
+ customer_data: Customer information dictionary
744
+
745
+ Returns:
746
+ Customer ID
747
+ """
748
+ try:
749
+ customer_id = customer_data.get(
750
+ 'customer_id') or self._generate_customer_id()
751
+
752
+ with sqlite3.connect(self.db_path) as conn:
753
+ cursor = conn.cursor()
754
+
755
+ # Check if customer exists
756
+ cursor.execute(
757
+ "SELECT customer_id FROM customers WHERE customer_id = ?", (customer_id,))
758
+ exists = cursor.fetchone()
759
+
760
+ if exists:
761
+ # Update existing customer
762
+ cursor.execute("""
763
+ UPDATE customers
764
+ SET company_name = ?, website = ?, industry = ?, contact_name = ?,
765
+ contact_email = ?, contact_phone = ?, address = ?,
766
+ profile_data = ?, updated_at = CURRENT_TIMESTAMP
767
+ WHERE customer_id = ?
768
+ """, (
769
+ customer_data.get('company_name'),
770
+ customer_data.get('website'),
771
+ customer_data.get('industry'),
772
+ customer_data.get('contact_name'),
773
+ customer_data.get('contact_email'),
774
+ customer_data.get('contact_phone'),
775
+ customer_data.get('address'),
776
+ json.dumps(customer_data),
777
+ customer_id
778
+ ))
779
+ else:
780
+ # Insert new customer
781
+ cursor.execute("""
782
+ INSERT INTO customers
783
+ (customer_id, org_id, company_name, website, industry, contact_name,
784
+ contact_email, contact_phone, address, profile_data)
785
+ VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?)
786
+ """, (
787
+ customer_id,
788
+ customer_data.get('org_id'),
789
+ customer_data.get('company_name'),
790
+ customer_data.get('website'),
791
+ customer_data.get('industry'),
792
+ customer_data.get('contact_name'),
793
+ customer_data.get('contact_email'),
794
+ customer_data.get('contact_phone'),
795
+ customer_data.get('address'),
796
+ json.dumps(customer_data)
797
+ ))
798
+
799
+ conn.commit()
800
+ self.logger.debug(f"Saved customer: {customer_id}")
801
+ return customer_id
802
+
803
+ except Exception as e:
804
+ self.logger.error(f"Failed to save customer: {str(e)}")
805
+ raise
806
+
807
+ def save_customer_task(self, customer_task_data: Dict[str, Any]) -> str:
808
+ """
809
+ Save customer task data to gs_customer_llmtask table (server-compatible).
810
+
811
+ Args:
812
+ customer_task_data: Customer task information dictionary
813
+
814
+ Returns:
815
+ Record ID
816
+ """
817
+ try:
818
+ record_id = f"{customer_task_data.get('task_id')}_{customer_task_data.get('customer_id')}"
819
+
820
+ with sqlite3.connect(self.db_path) as conn:
821
+ cursor = conn.cursor()
822
+
823
+ # Insert or replace customer task data
824
+ cursor.execute("""
825
+ INSERT OR REPLACE INTO gs_customer_llmtask
826
+ (id, task_id, customer_id, customer_name, customer_phone, customer_address,
827
+ customer_email, customer_industry, customer_taxcode, customer_website,
828
+ contact_name, org_id, org_name, project_code, crm_dob, image_url)
829
+ VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)
830
+ """, (
831
+ record_id,
832
+ customer_task_data.get('task_id'),
833
+ customer_task_data.get('customer_id'),
834
+ customer_task_data.get('customer_name'),
835
+ customer_task_data.get('customer_phone'),
836
+ customer_task_data.get('customer_address'),
837
+ customer_task_data.get('customer_email'),
838
+ customer_task_data.get('customer_industry'),
839
+ customer_task_data.get('customer_taxcode'),
840
+ customer_task_data.get('customer_website'),
841
+ customer_task_data.get('contact_name'),
842
+ customer_task_data.get('org_id'),
843
+ customer_task_data.get('org_name'),
844
+ customer_task_data.get('project_code'),
845
+ customer_task_data.get('crm_dob'),
846
+ customer_task_data.get('image_url')
847
+ ))
848
+
849
+ conn.commit()
850
+ self.logger.debug(f"Saved customer task: {record_id}")
851
+ return record_id
852
+
853
+ except Exception as e:
854
+ self.logger.error(f"Failed to save customer task data: {str(e)}")
855
+ raise
856
+
857
+ def save_lead_score(
858
+ self,
859
+ execution_id: str,
860
+ customer_id: str,
861
+ product_id: str,
862
+ score: float,
863
+ criteria_breakdown: Dict[str, Any]
864
+ ) -> None:
865
+ """
866
+ Save lead scoring result.
867
+
868
+ Args:
869
+ execution_id: Execution identifier
870
+ customer_id: Customer identifier
871
+ product_id: Product identifier
872
+ score: Lead score (0-100)
873
+ criteria_breakdown: Detailed scoring breakdown
874
+ """
875
+ try:
876
+ with sqlite3.connect(self.db_path) as conn:
877
+ cursor = conn.cursor()
878
+ cursor.execute("""
879
+ INSERT INTO lead_scores
880
+ (id, execution_id, customer_id, product_id, score, criteria_breakdown)
881
+ VALUES (?, ?, ?, ?, ?, ?)
882
+ """, (
883
+ f"uuid:{str(uuid.uuid4())}", execution_id, customer_id, product_id, score,
884
+ json.dumps(criteria_breakdown)
885
+ ))
886
+ conn.commit()
887
+ self.logger.debug(
888
+ f"Saved lead score: {customer_id}/{product_id} = {score}")
889
+
890
+ except Exception as e:
891
+ self.logger.error(f"Failed to save lead score: {str(e)}")
892
+ raise
893
+
894
+ def save_email_draft(
895
+ self,
896
+ draft_id: str,
897
+ execution_id: str,
898
+ customer_id: str,
899
+ subject: str,
900
+ content: str,
901
+ draft_type: str = "initial_outreach",
902
+ version: int = 1
903
+ ) -> None:
904
+ """
905
+ Save email draft.
906
+
907
+ Args:
908
+ draft_id: Draft identifier
909
+ execution_id: Execution identifier
910
+ customer_id: Customer identifier
911
+ subject: Email subject
912
+ content: Email content
913
+ draft_type: Type of draft (initial_outreach, follow_up)
914
+ version: Draft version number
915
+ """
916
+ try:
917
+ with sqlite3.connect(self.db_path) as conn:
918
+ cursor = conn.cursor()
919
+ cursor.execute("""
920
+ INSERT INTO email_drafts
921
+ (draft_id, execution_id, customer_id, subject, content, draft_type, version)
922
+ VALUES (?, ?, ?, ?, ?, ?, ?)
923
+ """, (draft_id, execution_id, customer_id, subject, content, draft_type, version))
924
+ conn.commit()
925
+ self.logger.debug(f"Saved email draft: {draft_id}")
926
+
927
+ except Exception as e:
928
+ self.logger.error(f"Failed to save email draft: {str(e)}")
929
+ raise
930
+
931
+ def get_execution(self, execution_id: str) -> Optional[Dict[str, Any]]:
932
+ """
933
+ Get execution record by ID.
934
+
935
+ Args:
936
+ execution_id: Execution identifier
937
+
938
+ Returns:
939
+ Execution record dictionary or None if not found
940
+ """
941
+ try:
942
+ with sqlite3.connect(self.db_path) as conn:
943
+ conn.row_factory = sqlite3.Row
944
+ cursor = conn.cursor()
945
+ cursor.execute(
946
+ "SELECT * FROM executions WHERE execution_id = ?", (execution_id,))
947
+ row = cursor.fetchone()
948
+
949
+ if row:
950
+ result = dict(row)
951
+ if result['config_json']:
952
+ result['config'] = json.loads(result['config_json'])
953
+ if result['results_json']:
954
+ result['results'] = json.loads(result['results_json'])
955
+ return result
956
+ return None
957
+
958
+ except Exception as e:
959
+ self.logger.error(f"Failed to get execution: {str(e)}")
960
+ raise
961
+
962
+ def get_stage_results(self, execution_id: str) -> List[Dict[str, Any]]:
963
+ """
964
+ Get all stage results for an execution.
965
+
966
+ Args:
967
+ execution_id: Execution identifier
968
+
969
+ Returns:
970
+ List of stage result dictionaries
971
+ """
972
+ try:
973
+ with sqlite3.connect(self.db_path) as conn:
974
+ conn.row_factory = sqlite3.Row
975
+ cursor = conn.cursor()
976
+ cursor.execute("""
977
+ SELECT * FROM stage_results
978
+ WHERE execution_id = ?
979
+ ORDER BY started_at
980
+ """, (execution_id,))
981
+
982
+ results = []
983
+ for row in cursor.fetchall():
984
+ result = dict(row)
985
+ if result['input_data']:
986
+ result['input_data'] = json.loads(result['input_data'])
987
+ if result['output_data']:
988
+ result['output_data'] = json.loads(
989
+ result['output_data'])
990
+ results.append(result)
991
+
992
+ return results
993
+
994
+ except Exception as e:
995
+ self.logger.error(f"Failed to get stage results: {str(e)}")
996
+ raise
997
+
998
+ def load_prompts(self) -> Dict[str, Any]:
999
+ """
1000
+ Load prompt templates from configuration.
1001
+
1002
+ Returns:
1003
+ Dictionary of prompt templates
1004
+ """
1005
+ try:
1006
+ prompts_file = self.config_dir / "prompts.json"
1007
+ if prompts_file.exists():
1008
+ with open(prompts_file, 'r', encoding='utf-8') as f:
1009
+ return json.load(f)
1010
+ return {}
1011
+ except Exception as e:
1012
+ self.logger.error(f"Failed to load prompts: {str(e)}")
1013
+ return {}
1014
+
1015
+ def load_scoring_criteria(self) -> Dict[str, Any]:
1016
+ """
1017
+ Load scoring criteria configuration.
1018
+
1019
+ Returns:
1020
+ Dictionary of scoring criteria
1021
+ """
1022
+ try:
1023
+ criteria_file = self.config_dir / "scoring_criteria.json"
1024
+ if criteria_file.exists():
1025
+ with open(criteria_file, 'r', encoding='utf-8') as f:
1026
+ return json.load(f)
1027
+ return {}
1028
+ except Exception as e:
1029
+ self.logger.error(f"Failed to load scoring criteria: {str(e)}")
1030
+ return {}
1031
+
1032
+ def load_email_templates(self) -> Dict[str, Any]:
1033
+ """
1034
+ Load email templates configuration.
1035
+
1036
+ Returns:
1037
+ Dictionary of email templates
1038
+ """
1039
+ try:
1040
+ templates_file = self.config_dir / "email_templates.json"
1041
+ if templates_file.exists():
1042
+ with open(templates_file, 'r', encoding='utf-8') as f:
1043
+ return json.load(f)
1044
+ return {}
1045
+ except Exception as e:
1046
+ self.logger.error(f"Failed to load email templates: {str(e)}")
1047
+ return {}
1048
+
1049
+ def _generate_customer_id(self) -> str:
1050
+ """Generate unique customer ID."""
1051
+ import uuid
1052
+ return f"uuid:{str(uuid.uuid4())}"
1053
+
1054
+ # ===== TEAM MANAGEMENT METHODS =====
1055
+
1056
+ def save_team(
1057
+ self,
1058
+ team_id: str,
1059
+ org_id: str,
1060
+ org_name: str,
1061
+ plan_id: str,
1062
+ name: str,
1063
+ description: str = None,
1064
+ plan_name: str = None,
1065
+ project_code: str = None,
1066
+ avatar: str = None
1067
+ ) -> str:
1068
+ """
1069
+ Save or update team information.
1070
+
1071
+ Args:
1072
+ team_id: Team identifier
1073
+ org_id: Organization identifier
1074
+ org_name: Organization name
1075
+ plan_id: Plan identifier
1076
+ name: Team name
1077
+ description: Team description
1078
+ plan_name: Plan name
1079
+ project_code: Project code
1080
+ avatar: Avatar URL
1081
+
1082
+ Returns:
1083
+ Team ID
1084
+ """
1085
+ try:
1086
+ with sqlite3.connect(self.db_path) as conn:
1087
+ cursor = conn.cursor()
1088
+
1089
+ # Check if team exists
1090
+ cursor.execute("SELECT team_id FROM teams WHERE team_id = ?", (team_id,))
1091
+ exists = cursor.fetchone()
1092
+
1093
+ if exists:
1094
+ # Update existing team
1095
+ cursor.execute("""
1096
+ UPDATE teams SET
1097
+ org_name = ?, plan_name = ?, project_code = ?, name = ?, description = ?,
1098
+ avatar = ?, updated_at = CURRENT_TIMESTAMP
1099
+ WHERE team_id = ?
1100
+ """, (org_name, plan_name, project_code, name, description, avatar, team_id))
1101
+ else:
1102
+ # Insert new team
1103
+ cursor.execute("""
1104
+ INSERT INTO teams
1105
+ (team_id, org_id, org_name, plan_id, plan_name, project_code, name, description, avatar)
1106
+ VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?)
1107
+ """, (team_id, org_id, org_name, plan_id, plan_name, project_code, name, description, avatar))
1108
+
1109
+ conn.commit()
1110
+ self.logger.debug(f"Saved team: {team_id}")
1111
+ return team_id
1112
+
1113
+ except Exception as e:
1114
+ self.logger.error(f"Error saving team {team_id}: {str(e)}")
1115
+ raise
1116
+
1117
+ def get_team(self, team_id: str) -> Optional[Dict[str, Any]]:
1118
+ """
1119
+ Get team by ID.
1120
+
1121
+ Args:
1122
+ team_id: Team identifier
1123
+
1124
+ Returns:
1125
+ Team data or None if not found
1126
+ """
1127
+ try:
1128
+ with sqlite3.connect(self.db_path) as conn:
1129
+ cursor = conn.cursor()
1130
+ cursor.execute("SELECT * FROM teams WHERE team_id = ?", (team_id,))
1131
+ row = cursor.fetchone()
1132
+
1133
+ if row:
1134
+ columns = [description[0] for description in cursor.description]
1135
+ return dict(zip(columns, row))
1136
+ return None
1137
+
1138
+ except Exception as e:
1139
+ self.logger.error(f"Error getting team {team_id}: {str(e)}")
1140
+ raise
1141
+
1142
+ def list_teams(self, org_id: str) -> List[Dict[str, Any]]:
1143
+ """
1144
+ List all teams for an organization.
1145
+
1146
+ Args:
1147
+ org_id: Organization identifier
1148
+
1149
+ Returns:
1150
+ List of team data
1151
+ """
1152
+ try:
1153
+ with sqlite3.connect(self.db_path) as conn:
1154
+ cursor = conn.cursor()
1155
+ cursor.execute("SELECT * FROM teams WHERE org_id = ? ORDER BY created_at DESC", (org_id,))
1156
+ rows = cursor.fetchall()
1157
+
1158
+ columns = [description[0] for description in cursor.description]
1159
+ return [dict(zip(columns, row)) for row in rows]
1160
+
1161
+ except Exception as e:
1162
+ self.logger.error(f"Error listing teams for org {org_id}: {str(e)}")
1163
+ raise
1164
+
1165
+ def update_team(
1166
+ self,
1167
+ team_id: str,
1168
+ name: str = None,
1169
+ description: str = None,
1170
+ plan_name: str = None,
1171
+ project_code: str = None,
1172
+ avatar: str = None
1173
+ ) -> bool:
1174
+ """
1175
+ Update team information.
1176
+
1177
+ Args:
1178
+ team_id: Team identifier
1179
+ name: New team name
1180
+ description: New team description
1181
+ plan_name: New plan name
1182
+ project_code: New project code
1183
+ avatar: New avatar URL
1184
+
1185
+ Returns:
1186
+ True if updated successfully
1187
+ """
1188
+ try:
1189
+ with sqlite3.connect(self.db_path) as conn:
1190
+ cursor = conn.cursor()
1191
+
1192
+ # Build update query dynamically
1193
+ updates = []
1194
+ params = []
1195
+
1196
+ if name is not None:
1197
+ updates.append("name = ?")
1198
+ params.append(name)
1199
+ if description is not None:
1200
+ updates.append("description = ?")
1201
+ params.append(description)
1202
+ if plan_name is not None:
1203
+ updates.append("plan_name = ?")
1204
+ params.append(plan_name)
1205
+ if project_code is not None:
1206
+ updates.append("project_code = ?")
1207
+ params.append(project_code)
1208
+ if avatar is not None:
1209
+ updates.append("avatar = ?")
1210
+ params.append(avatar)
1211
+
1212
+ if not updates:
1213
+ return True # Nothing to update
1214
+
1215
+ updates.append("updated_at = CURRENT_TIMESTAMP")
1216
+ params.append(team_id)
1217
+
1218
+ query = f"UPDATE teams SET {', '.join(updates)} WHERE team_id = ?"
1219
+ cursor.execute(query, params)
1220
+
1221
+ conn.commit()
1222
+ self.logger.debug(f"Updated team: {team_id}")
1223
+ return cursor.rowcount > 0
1224
+
1225
+ except Exception as e:
1226
+ self.logger.error(f"Error updating team {team_id}: {str(e)}")
1227
+ raise
1228
+
1229
+ # ===== TEAM SETTINGS MANAGEMENT METHODS =====
1230
+
1231
+ def save_team_settings(
1232
+ self,
1233
+ team_id: str,
1234
+ org_id: str,
1235
+ plan_id: str,
1236
+ team_name: str,
1237
+ gs_team_organization: Optional[Dict[str, Any]] = None,
1238
+ gs_team_rep: Optional[List[Dict[str, Any]]] = None,
1239
+ gs_team_product: Optional[List[Dict[str, Any]]] = None,
1240
+ gs_team_schedule_time: Optional[Dict[str, Any]] = None,
1241
+ gs_team_initial_outreach: Optional[Dict[str, Any]] = None,
1242
+ gs_team_follow_up: Optional[Dict[str, Any]] = None,
1243
+ gs_team_auto_interaction: Optional[Dict[str, Any]] = None,
1244
+ gs_team_followup_schedule_time: Optional[Dict[str, Any]] = None,
1245
+ gs_team_birthday_email: Optional[Dict[str, Any]] = None
1246
+ ) -> None:
1247
+ """
1248
+ Save or update team settings.
1249
+
1250
+ Args:
1251
+ team_id: Team identifier
1252
+ org_id: Organization identifier
1253
+ plan_id: Plan identifier
1254
+ team_name: Team name
1255
+ gs_team_organization: Organization configuration
1256
+ gs_team_rep: Sales representative settings
1257
+ gs_team_product: Product configuration
1258
+ gs_team_schedule_time: Scheduling configuration
1259
+ gs_team_initial_outreach: Initial outreach configuration
1260
+ gs_team_follow_up: Follow-up configuration
1261
+ gs_team_auto_interaction: Auto interaction rules
1262
+ gs_team_followup_schedule_time: Follow-up scheduling rules
1263
+ gs_team_birthday_email: Birthday email configuration
1264
+ """
1265
+ try:
1266
+ settings_id = f"{team_id}_{org_id}"
1267
+
1268
+ with sqlite3.connect(self.db_path) as conn:
1269
+ cursor = conn.cursor()
1270
+
1271
+ # Check if settings exist
1272
+ cursor.execute(
1273
+ "SELECT id FROM team_settings WHERE team_id = ?", (team_id,))
1274
+ exists = cursor.fetchone()
1275
+
1276
+ if exists:
1277
+ # Update existing settings
1278
+ cursor.execute("""
1279
+ UPDATE team_settings
1280
+ SET org_id = ?, plan_id = ?, team_name = ?,
1281
+ gs_team_organization = ?, gs_team_rep = ?, gs_team_product = ?,
1282
+ gs_team_schedule_time = ?, gs_team_initial_outreach = ?, gs_team_follow_up = ?,
1283
+ gs_team_auto_interaction = ?, gs_team_followup_schedule_time = ?, gs_team_birthday_email = ?,
1284
+ updated_at = CURRENT_TIMESTAMP
1285
+ WHERE team_id = ?
1286
+ """, (
1287
+ org_id, plan_id, team_name,
1288
+ json.dumps(
1289
+ gs_team_organization) if gs_team_organization else None,
1290
+ json.dumps(
1291
+ gs_team_rep) if gs_team_rep else None,
1292
+ json.dumps(
1293
+ gs_team_product) if gs_team_product else None,
1294
+ json.dumps(
1295
+ gs_team_schedule_time) if gs_team_schedule_time else None,
1296
+ json.dumps(
1297
+ gs_team_initial_outreach) if gs_team_initial_outreach else None,
1298
+ json.dumps(
1299
+ gs_team_follow_up) if gs_team_follow_up else None,
1300
+ json.dumps(
1301
+ gs_team_auto_interaction) if gs_team_auto_interaction else None,
1302
+ json.dumps(
1303
+ gs_team_followup_schedule_time) if gs_team_followup_schedule_time else None,
1304
+ json.dumps(
1305
+ gs_team_birthday_email) if gs_team_birthday_email else None,
1306
+ team_id
1307
+ ))
1308
+ else:
1309
+ # Insert new settings
1310
+ cursor.execute("""
1311
+ INSERT INTO team_settings
1312
+ (id, team_id, org_id, plan_id, team_name, gs_team_organization, gs_team_rep,
1313
+ gs_team_product, gs_team_schedule_time, gs_team_initial_outreach, gs_team_follow_up,
1314
+ gs_team_auto_interaction, gs_team_followup_schedule_time, gs_team_birthday_email)
1315
+ VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)
1316
+ """, (
1317
+ settings_id, team_id, org_id, plan_id, team_name,
1318
+ json.dumps(
1319
+ gs_team_organization) if gs_team_organization else None,
1320
+ json.dumps(
1321
+ gs_team_rep) if gs_team_rep else None,
1322
+ json.dumps(
1323
+ gs_team_product) if gs_team_product else None,
1324
+ json.dumps(
1325
+ gs_team_schedule_time) if gs_team_schedule_time else None,
1326
+ json.dumps(
1327
+ gs_team_initial_outreach) if gs_team_initial_outreach else None,
1328
+ json.dumps(
1329
+ gs_team_follow_up) if gs_team_follow_up else None,
1330
+ json.dumps(
1331
+ gs_team_auto_interaction) if gs_team_auto_interaction else None,
1332
+ json.dumps(
1333
+ gs_team_followup_schedule_time) if gs_team_followup_schedule_time else None,
1334
+ json.dumps(
1335
+ gs_team_birthday_email) if gs_team_birthday_email else None
1336
+ ))
1337
+
1338
+ conn.commit()
1339
+ self.logger.debug(f"Saved team settings: {team_id}")
1340
+
1341
+ except Exception as e:
1342
+ self.logger.error(f"Failed to save team settings: {str(e)}")
1343
+ raise
1344
+
1345
+ def get_team_settings(self, team_id: str) -> Optional[Dict[str, Any]]:
1346
+ """
1347
+ Get team settings by team ID.
1348
+
1349
+ Args:
1350
+ team_id: Team identifier
1351
+
1352
+ Returns:
1353
+ Team settings dictionary or None if not found
1354
+ """
1355
+ try:
1356
+ with sqlite3.connect(self.db_path) as conn:
1357
+ conn.row_factory = sqlite3.Row
1358
+ cursor = conn.cursor()
1359
+ cursor.execute(
1360
+ "SELECT * FROM team_settings WHERE team_id = ?", (team_id,))
1361
+ row = cursor.fetchone()
1362
+
1363
+ if row:
1364
+ result = dict(row)
1365
+ # Parse JSON fields
1366
+ json_fields = [
1367
+ 'gs_team_organization', 'gs_team_rep', 'gs_team_product',
1368
+ 'gs_team_schedule_time', 'gs_team_initial_outreach', 'gs_team_follow_up',
1369
+ 'gs_team_auto_interaction', 'gs_team_followup_schedule_time', 'gs_team_birthday_email'
1370
+ ]
1371
+
1372
+ for field in json_fields:
1373
+ if result[field]:
1374
+ try:
1375
+ result[field] = json.loads(result[field])
1376
+ except json.JSONDecodeError:
1377
+ result[field] = None
1378
+
1379
+ return result
1380
+ return None
1381
+
1382
+ except Exception as e:
1383
+ self.logger.error(f"Failed to get team settings: {str(e)}")
1384
+ raise
1385
+
1386
+ def save_product(self, product_data: Dict[str, Any]) -> str:
1387
+ """
1388
+ Save or update product information.
1389
+
1390
+ Args:
1391
+ product_data: Product information dictionary
1392
+
1393
+ Returns:
1394
+ Product ID
1395
+ """
1396
+ try:
1397
+ product_id = product_data.get('product_id') or product_data.get(
1398
+ 'id') or self._generate_product_id()
1399
+
1400
+ with sqlite3.connect(self.db_path) as conn:
1401
+ cursor = conn.cursor()
1402
+
1403
+ # Check if product exists
1404
+ cursor.execute(
1405
+ "SELECT product_id FROM products WHERE product_id = ?", (product_id,))
1406
+ exists = cursor.fetchone()
1407
+
1408
+ if exists:
1409
+ # Update existing product
1410
+ cursor.execute("""
1411
+ UPDATE products
1412
+ SET org_id = ?, org_name = ?, project_code = ?, product_name = ?,
1413
+ short_description = ?, long_description = ?, category = ?, subcategory = ?,
1414
+ target_users = ?, key_features = ?, unique_selling_points = ?, pain_points_solved = ?,
1415
+ competitive_advantages = ?, pricing = ?, pricing_rules = ?, product_website = ?,
1416
+ demo_available = ?, trial_available = ?, sales_contact_email = ?, image_url = ?,
1417
+ sales_metrics = ?, customer_feedback = ?, keywords = ?, related_products = ?,
1418
+ seasonal_demand = ?, market_insights = ?, case_studies = ?, testimonials = ?,
1419
+ success_metrics = ?, product_variants = ?, availability = ?, technical_specifications = ?,
1420
+ compatibility = ?, support_info = ?, regulatory_compliance = ?, localization = ?,
1421
+ installation_requirements = ?, user_manual_url = ?, return_policy = ?, shipping_info = ?,
1422
+ updated_at = CURRENT_TIMESTAMP
1423
+ WHERE product_id = ?
1424
+ """, (
1425
+ product_data.get('org_id'), product_data.get(
1426
+ 'org_name'), product_data.get('project_code'),
1427
+ product_data.get('productName'), product_data.get(
1428
+ 'shortDescription'), product_data.get('longDescription'),
1429
+ product_data.get('category'), product_data.get(
1430
+ 'subcategory'),
1431
+ json.dumps(product_data.get('targetUsers')) if product_data.get(
1432
+ 'targetUsers') else None,
1433
+ json.dumps(product_data.get('keyFeatures')) if product_data.get(
1434
+ 'keyFeatures') else None,
1435
+ json.dumps(product_data.get('uniqueSellingPoints')) if product_data.get(
1436
+ 'uniqueSellingPoints') else None,
1437
+ json.dumps(product_data.get('painPointsSolved')) if product_data.get(
1438
+ 'painPointsSolved') else None,
1439
+ json.dumps(product_data.get('competitiveAdvantages')) if product_data.get(
1440
+ 'competitiveAdvantages') else None,
1441
+ json.dumps(product_data.get('pricing')) if product_data.get(
1442
+ 'pricing') else None,
1443
+ json.dumps(product_data.get('pricingRules')) if product_data.get(
1444
+ 'pricingRules') else None,
1445
+ product_data.get('productWebsite'), product_data.get(
1446
+ 'demoAvailable', False),
1447
+ product_data.get('trialAvailable', False), product_data.get(
1448
+ 'salesContactEmail'),
1449
+ product_data.get('imageUrl'),
1450
+ json.dumps(product_data.get('salesMetrics')) if product_data.get(
1451
+ 'salesMetrics') else None,
1452
+ json.dumps(product_data.get('customerFeedback')) if product_data.get(
1453
+ 'customerFeedback') else None,
1454
+ json.dumps(product_data.get('keywords')) if product_data.get(
1455
+ 'keywords') else None,
1456
+ json.dumps(product_data.get('relatedProducts')) if product_data.get(
1457
+ 'relatedProducts') else None,
1458
+ json.dumps(product_data.get('seasonalDemand')) if product_data.get(
1459
+ 'seasonalDemand') else None,
1460
+ json.dumps(product_data.get('marketInsights')) if product_data.get(
1461
+ 'marketInsights') else None,
1462
+ json.dumps(product_data.get('caseStudies')) if product_data.get(
1463
+ 'caseStudies') else None,
1464
+ json.dumps(product_data.get('testimonials')) if product_data.get(
1465
+ 'testimonials') else None,
1466
+ json.dumps(product_data.get('successMetrics')) if product_data.get(
1467
+ 'successMetrics') else None,
1468
+ json.dumps(product_data.get('productVariants')) if product_data.get(
1469
+ 'productVariants') else None,
1470
+ product_data.get('availability'),
1471
+ json.dumps(product_data.get('technicalSpecifications')) if product_data.get(
1472
+ 'technicalSpecifications') else None,
1473
+ json.dumps(product_data.get('compatibility')) if product_data.get(
1474
+ 'compatibility') else None,
1475
+ json.dumps(product_data.get('supportInfo')) if product_data.get(
1476
+ 'supportInfo') else None,
1477
+ json.dumps(product_data.get('regulatoryCompliance')) if product_data.get(
1478
+ 'regulatoryCompliance') else None,
1479
+ json.dumps(product_data.get('localization')) if product_data.get(
1480
+ 'localization') else None,
1481
+ product_data.get('installationRequirements'), product_data.get(
1482
+ 'userManualUrl'),
1483
+ product_data.get('returnPolicy'),
1484
+ json.dumps(product_data.get('shippingInfo')) if product_data.get(
1485
+ 'shippingInfo') else None,
1486
+ product_id
1487
+ ))
1488
+ else:
1489
+ # Insert new product
1490
+ cursor.execute("""
1491
+ INSERT INTO products
1492
+ (product_id, org_id, org_name, project_code, product_name, short_description, long_description,
1493
+ category, subcategory, target_users, key_features, unique_selling_points, pain_points_solved,
1494
+ competitive_advantages, pricing, pricing_rules, product_website, demo_available, trial_available,
1495
+ sales_contact_email, image_url, sales_metrics, customer_feedback, keywords, related_products,
1496
+ seasonal_demand, market_insights, case_studies, testimonials, success_metrics, product_variants,
1497
+ availability, technical_specifications, compatibility, support_info, regulatory_compliance,
1498
+ localization, installation_requirements, user_manual_url, return_policy, shipping_info)
1499
+ VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)
1500
+ """, (
1501
+ product_id, product_data.get('org_id'), product_data.get(
1502
+ 'org_name'), product_data.get('project_code'),
1503
+ product_data.get('productName'), product_data.get(
1504
+ 'shortDescription'), product_data.get('longDescription'),
1505
+ product_data.get('category'), product_data.get(
1506
+ 'subcategory'),
1507
+ json.dumps(product_data.get('targetUsers')) if product_data.get(
1508
+ 'targetUsers') else None,
1509
+ json.dumps(product_data.get('keyFeatures')) if product_data.get(
1510
+ 'keyFeatures') else None,
1511
+ json.dumps(product_data.get('uniqueSellingPoints')) if product_data.get(
1512
+ 'uniqueSellingPoints') else None,
1513
+ json.dumps(product_data.get('painPointsSolved')) if product_data.get(
1514
+ 'painPointsSolved') else None,
1515
+ json.dumps(product_data.get('competitiveAdvantages')) if product_data.get(
1516
+ 'competitiveAdvantages') else None,
1517
+ json.dumps(product_data.get('pricing')) if product_data.get(
1518
+ 'pricing') else None,
1519
+ json.dumps(product_data.get('pricingRules')) if product_data.get(
1520
+ 'pricingRules') else None,
1521
+ product_data.get('productWebsite'), product_data.get(
1522
+ 'demoAvailable', False),
1523
+ product_data.get('trialAvailable', False), product_data.get(
1524
+ 'salesContactEmail'),
1525
+ product_data.get('imageUrl'),
1526
+ json.dumps(product_data.get('salesMetrics')) if product_data.get(
1527
+ 'salesMetrics') else None,
1528
+ json.dumps(product_data.get('customerFeedback')) if product_data.get(
1529
+ 'customerFeedback') else None,
1530
+ json.dumps(product_data.get('keywords')) if product_data.get(
1531
+ 'keywords') else None,
1532
+ json.dumps(product_data.get('relatedProducts')) if product_data.get(
1533
+ 'relatedProducts') else None,
1534
+ json.dumps(product_data.get('seasonalDemand')) if product_data.get(
1535
+ 'seasonalDemand') else None,
1536
+ json.dumps(product_data.get('marketInsights')) if product_data.get(
1537
+ 'marketInsights') else None,
1538
+ json.dumps(product_data.get('caseStudies')) if product_data.get(
1539
+ 'caseStudies') else None,
1540
+ json.dumps(product_data.get('testimonials')) if product_data.get(
1541
+ 'testimonials') else None,
1542
+ json.dumps(product_data.get('successMetrics')) if product_data.get(
1543
+ 'successMetrics') else None,
1544
+ json.dumps(product_data.get('productVariants')) if product_data.get(
1545
+ 'productVariants') else None,
1546
+ product_data.get('availability'),
1547
+ json.dumps(product_data.get('technicalSpecifications')) if product_data.get(
1548
+ 'technicalSpecifications') else None,
1549
+ json.dumps(product_data.get('compatibility')) if product_data.get(
1550
+ 'compatibility') else None,
1551
+ json.dumps(product_data.get('supportInfo')) if product_data.get(
1552
+ 'supportInfo') else None,
1553
+ json.dumps(product_data.get('regulatoryCompliance')) if product_data.get(
1554
+ 'regulatoryCompliance') else None,
1555
+ json.dumps(product_data.get('localization')) if product_data.get(
1556
+ 'localization') else None,
1557
+ product_data.get('installationRequirements'), product_data.get(
1558
+ 'userManualUrl'),
1559
+ product_data.get('returnPolicy'),
1560
+ json.dumps(product_data.get('shippingInfo')) if product_data.get(
1561
+ 'shippingInfo') else None
1562
+ ))
1563
+
1564
+ conn.commit()
1565
+ self.logger.debug(f"Saved product: {product_id}")
1566
+ return product_id
1567
+
1568
+ except Exception as e:
1569
+ self.logger.error(f"Failed to save product: {str(e)}")
1570
+ raise
1571
+
1572
+ def get_products_by_org(self, org_id: str) -> List[Dict[str, Any]]:
1573
+ """
1574
+ Get all products for an organization.
1575
+
1576
+ Args:
1577
+ org_id: Organization identifier
1578
+
1579
+ Returns:
1580
+ List of product dictionaries
1581
+ """
1582
+ try:
1583
+ with sqlite3.connect(self.db_path) as conn:
1584
+ conn.row_factory = sqlite3.Row
1585
+ cursor = conn.cursor()
1586
+ cursor.execute(
1587
+ "SELECT * FROM products WHERE org_id = ? AND status = 'active'", (org_id,))
1588
+
1589
+ products = []
1590
+ for row in cursor.fetchall():
1591
+ product = dict(row)
1592
+ # Parse JSON fields
1593
+ json_fields = [
1594
+ 'target_users', 'key_features', 'unique_selling_points', 'pain_points_solved',
1595
+ 'competitive_advantages', 'pricing', 'pricing_rules', 'sales_metrics',
1596
+ 'customer_feedback', 'keywords', 'related_products', 'seasonal_demand',
1597
+ 'market_insights', 'case_studies', 'testimonials', 'success_metrics',
1598
+ 'product_variants', 'technical_specifications', 'compatibility', 'support_info',
1599
+ 'regulatory_compliance', 'localization', 'shipping_info'
1600
+ ]
1601
+
1602
+ for field in json_fields:
1603
+ if product[field]:
1604
+ try:
1605
+ product[field] = json.loads(product[field])
1606
+ except json.JSONDecodeError:
1607
+ product[field] = None
1608
+
1609
+ products.append(product)
1610
+
1611
+ return products
1612
+
1613
+ except Exception as e:
1614
+ self.logger.error(f"Failed to get products: {str(e)}")
1615
+ raise
1616
+
1617
+ def get_products_by_team(self, team_id: str) -> List[Dict[str, Any]]:
1618
+ """
1619
+ Get products configured for a specific team.
1620
+
1621
+ Args:
1622
+ team_id: Team identifier
1623
+
1624
+ Returns:
1625
+ List of product dictionaries
1626
+ """
1627
+ try:
1628
+ # Get team settings first
1629
+ team_settings = self.get_team_settings(team_id)
1630
+ if not team_settings or not team_settings.get('gs_team_product'):
1631
+ return []
1632
+
1633
+ # Extract product IDs from team settings
1634
+ product_settings = team_settings['gs_team_product']
1635
+ if not isinstance(product_settings, list):
1636
+ return []
1637
+
1638
+ product_ids = [p.get('product_id')
1639
+ for p in product_settings if p.get('product_id')]
1640
+ if not product_ids:
1641
+ return []
1642
+
1643
+ # Get products by IDs
1644
+ with sqlite3.connect(self.db_path) as conn:
1645
+ conn.row_factory = sqlite3.Row
1646
+ cursor = conn.cursor()
1647
+
1648
+ placeholders = ','.join(['?' for _ in product_ids])
1649
+ cursor.execute(
1650
+ f"SELECT * FROM products WHERE product_id IN ({placeholders}) AND status = 'active'", product_ids)
1651
+
1652
+ products = []
1653
+ for row in cursor.fetchall():
1654
+ product = dict(row)
1655
+ # Parse JSON fields (same as get_products_by_org)
1656
+ json_fields = [
1657
+ 'target_users', 'key_features', 'unique_selling_points', 'pain_points_solved',
1658
+ 'competitive_advantages', 'pricing', 'pricing_rules', 'sales_metrics',
1659
+ 'customer_feedback', 'keywords', 'related_products', 'seasonal_demand',
1660
+ 'market_insights', 'case_studies', 'testimonials', 'success_metrics',
1661
+ 'product_variants', 'technical_specifications', 'compatibility', 'support_info',
1662
+ 'regulatory_compliance', 'localization', 'shipping_info'
1663
+ ]
1664
+
1665
+ for field in json_fields:
1666
+ if product[field]:
1667
+ try:
1668
+ product[field] = json.loads(product[field])
1669
+ except json.JSONDecodeError:
1670
+ product[field] = None
1671
+
1672
+ products.append(product)
1673
+
1674
+ return products
1675
+
1676
+ except Exception as e:
1677
+ self.logger.error(f"Failed to get products by team: {str(e)}")
1678
+ raise
1679
+
1680
+ def get_product(self, product_id: str) -> Optional[Dict[str, Any]]:
1681
+ """
1682
+ Get product by ID.
1683
+
1684
+ Args:
1685
+ product_id: Product identifier
1686
+
1687
+ Returns:
1688
+ Product data or None if not found
1689
+ """
1690
+ try:
1691
+ with sqlite3.connect(self.db_path) as conn:
1692
+ conn.row_factory = sqlite3.Row
1693
+ cursor = conn.cursor()
1694
+ cursor.execute("SELECT * FROM products WHERE product_id = ?", (product_id,))
1695
+ row = cursor.fetchone()
1696
+
1697
+ if row:
1698
+ product = dict(row)
1699
+ # Parse JSON fields
1700
+ json_fields = [
1701
+ 'target_users', 'key_features', 'unique_selling_points', 'pain_points_solved',
1702
+ 'competitive_advantages', 'pricing', 'pricing_rules', 'sales_metrics',
1703
+ 'customer_feedback', 'keywords', 'related_products', 'seasonal_demand',
1704
+ 'market_insights', 'case_studies', 'testimonials', 'success_metrics',
1705
+ 'product_variants', 'technical_specifications', 'compatibility', 'support_info',
1706
+ 'regulatory_compliance', 'localization', 'shipping_info'
1707
+ ]
1708
+
1709
+ for field in json_fields:
1710
+ if product[field]:
1711
+ try:
1712
+ product[field] = json.loads(product[field])
1713
+ except json.JSONDecodeError:
1714
+ product[field] = None
1715
+
1716
+ return product
1717
+ return None
1718
+
1719
+ except Exception as e:
1720
+ self.logger.error(f"Error getting product {product_id}: {str(e)}")
1721
+ raise
1722
+
1723
+ def update_product(self, product_id: str, product_data: Dict[str, Any]) -> bool:
1724
+ """
1725
+ Update product information.
1726
+
1727
+ Args:
1728
+ product_id: Product identifier
1729
+ product_data: Updated product data
1730
+
1731
+ Returns:
1732
+ True if updated successfully
1733
+ """
1734
+ try:
1735
+ # Get existing product data first
1736
+ existing_product = self.get_product(product_id)
1737
+ if not existing_product:
1738
+ self.logger.error(f"Product not found: {product_id}")
1739
+ return False
1740
+
1741
+ # Convert existing data to save_product format (snake_case to camelCase)
1742
+ converted_existing = {
1743
+ 'product_id': existing_product.get('product_id'),
1744
+ 'org_id': existing_product.get('org_id'),
1745
+ 'org_name': existing_product.get('org_name'),
1746
+ 'project_code': existing_product.get('project_code'),
1747
+ 'productName': existing_product.get('product_name'),
1748
+ 'shortDescription': existing_product.get('short_description'),
1749
+ 'longDescription': existing_product.get('long_description'),
1750
+ 'category': existing_product.get('category'),
1751
+ 'subcategory': existing_product.get('subcategory'),
1752
+ 'targetUsers': existing_product.get('target_users'),
1753
+ 'keyFeatures': existing_product.get('key_features'),
1754
+ 'uniqueSellingPoints': existing_product.get('unique_selling_points'),
1755
+ 'painPointsSolved': existing_product.get('pain_points_solved'),
1756
+ 'competitiveAdvantages': existing_product.get('competitive_advantages'),
1757
+ 'pricing': existing_product.get('pricing'),
1758
+ 'pricingRules': existing_product.get('pricing_rules'),
1759
+ 'productWebsite': existing_product.get('product_website'),
1760
+ 'demoAvailable': existing_product.get('demo_available'),
1761
+ 'trialAvailable': existing_product.get('trial_available'),
1762
+ 'salesContactEmail': existing_product.get('sales_contact_email'),
1763
+ 'imageUrl': existing_product.get('image_url'),
1764
+ 'salesMetrics': existing_product.get('sales_metrics'),
1765
+ 'customerFeedback': existing_product.get('customer_feedback'),
1766
+ 'keywords': existing_product.get('keywords'),
1767
+ 'relatedProducts': existing_product.get('related_products'),
1768
+ 'seasonalDemand': existing_product.get('seasonal_demand'),
1769
+ 'marketInsights': existing_product.get('market_insights'),
1770
+ 'caseStudies': existing_product.get('case_studies'),
1771
+ 'testimonials': existing_product.get('testimonials'),
1772
+ 'successMetrics': existing_product.get('success_metrics'),
1773
+ 'productVariants': existing_product.get('product_variants'),
1774
+ 'availability': existing_product.get('availability'),
1775
+ 'technicalSpecifications': existing_product.get('technical_specifications'),
1776
+ 'compatibility': existing_product.get('compatibility'),
1777
+ 'supportInfo': existing_product.get('support_info'),
1778
+ 'regulatoryCompliance': existing_product.get('regulatory_compliance'),
1779
+ 'localization': existing_product.get('localization'),
1780
+ 'installationRequirements': existing_product.get('installation_requirements'),
1781
+ 'userManualUrl': existing_product.get('user_manual_url'),
1782
+ 'returnPolicy': existing_product.get('return_policy'),
1783
+ 'shippingInfo': existing_product.get('shipping_info')
1784
+ }
1785
+
1786
+ # Merge existing data with updates
1787
+ merged_data = converted_existing.copy()
1788
+ merged_data.update(product_data)
1789
+ merged_data['product_id'] = product_id
1790
+
1791
+ # Use save_product with merged data
1792
+ updated_id = self.save_product(merged_data)
1793
+ return updated_id == product_id
1794
+
1795
+ except Exception as e:
1796
+ self.logger.error(f"Error updating product {product_id}: {str(e)}")
1797
+ raise
1798
+
1799
+ def save_scoring_criteria(self, org_id: str, criteria: List[Dict[str, Any]]) -> None:
1800
+ """
1801
+ Save scoring criteria for an organization.
1802
+
1803
+ Args:
1804
+ org_id: Organization identifier
1805
+ criteria: List of scoring criteria
1806
+ """
1807
+ try:
1808
+ # Save to configuration file
1809
+ criteria_file = self.config_dir / "scoring_criteria.json"
1810
+
1811
+ # Load existing criteria
1812
+ existing_criteria = {}
1813
+ if criteria_file.exists():
1814
+ with open(criteria_file, 'r') as f:
1815
+ existing_criteria = json.load(f)
1816
+
1817
+ # Update criteria for this org
1818
+ existing_criteria[org_id] = criteria
1819
+
1820
+ # Save back to file
1821
+ with open(criteria_file, 'w') as f:
1822
+ json.dump(existing_criteria, f, indent=2)
1823
+
1824
+ self.logger.debug(f"Saved scoring criteria for org: {org_id}")
1825
+
1826
+ except Exception as e:
1827
+ self.logger.error(f"Failed to save scoring criteria: {str(e)}")
1828
+ raise
1829
+
1830
+ def get_scoring_criteria(self, org_id: str) -> List[Dict[str, Any]]:
1831
+ """
1832
+ Get scoring criteria for an organization.
1833
+
1834
+ Args:
1835
+ org_id: Organization identifier
1836
+
1837
+ Returns:
1838
+ List of scoring criteria
1839
+ """
1840
+ try:
1841
+ criteria_file = self.config_dir / "scoring_criteria.json"
1842
+
1843
+ if criteria_file.exists():
1844
+ with open(criteria_file, 'r') as f:
1845
+ all_criteria = json.load(f)
1846
+ return all_criteria.get(org_id, [])
1847
+
1848
+ return []
1849
+
1850
+ except Exception as e:
1851
+ self.logger.error(f"Failed to get scoring criteria: {str(e)}")
1852
+ return []
1853
+
1854
+ def _generate_product_id(self) -> str:
1855
+ """Generate unique product ID."""
1856
+ import uuid
1857
+ return f"uuid:{str(uuid.uuid4())}"
1858
+
1859
+ def _initialize_default_data(self):
1860
+ """Initialize default data for llm_worker_plan and gs_company_criteria tables."""
1861
+ try:
1862
+ with sqlite3.connect(self.db_path) as conn:
1863
+ cursor = conn.cursor()
1864
+
1865
+ # Check if llm_worker_plan has data
1866
+ cursor.execute("SELECT COUNT(*) FROM llm_worker_plan")
1867
+ plan_count = cursor.fetchone()[0]
1868
+
1869
+ if plan_count == 0:
1870
+ # Insert default llm_worker_plan record
1871
+ default_plan = {
1872
+ 'id': '569cdcbd-cf6d-4e33-b0b2-d2f6f15a0832',
1873
+ 'name': 'FuseSell AI (v1.025)',
1874
+ 'description': 'Default FuseSell AI plan for local development',
1875
+ 'org_id': 'rta',
1876
+ 'status': 'published',
1877
+ 'executors': json.dumps([
1878
+ {
1879
+ 'llm_worker_executor_id': {
1880
+ 'name': 'gs_161_data_acquisition',
1881
+ 'display_name': 'Data Acquisition'
1882
+ }
1883
+ },
1884
+ {
1885
+ 'llm_worker_executor_id': {
1886
+ 'name': 'gs_161_data_preparation',
1887
+ 'display_name': 'Data Preparation'
1888
+ }
1889
+ },
1890
+ {
1891
+ 'llm_worker_executor_id': {
1892
+ 'name': 'gs_161_lead_scoring',
1893
+ 'display_name': 'Lead Scoring'
1894
+ }
1895
+ },
1896
+ {
1897
+ 'llm_worker_executor_id': {
1898
+ 'name': 'gs_162_initial_outreach',
1899
+ 'display_name': 'Initial Outreach'
1900
+ }
1901
+ },
1902
+ {
1903
+ 'llm_worker_executor_id': {
1904
+ 'name': 'gs_162_follow_up',
1905
+ 'display_name': 'Follow Up'
1906
+ }
1907
+ }
1908
+ ]),
1909
+ 'settings': json.dumps({}),
1910
+ 'date_created': datetime.now().isoformat(),
1911
+ 'user_created': 'system'
1912
+ }
1913
+
1914
+ cursor.execute("""
1915
+ INSERT INTO llm_worker_plan
1916
+ (id, name, description, org_id, status, executors, settings,
1917
+ date_created, user_created)
1918
+ VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?)
1919
+ """, (
1920
+ default_plan['id'],
1921
+ default_plan['name'],
1922
+ default_plan['description'],
1923
+ default_plan['org_id'],
1924
+ default_plan['status'],
1925
+ default_plan['executors'],
1926
+ default_plan['settings'],
1927
+ default_plan['date_created'],
1928
+ default_plan['user_created']
1929
+ ))
1930
+
1931
+ self.logger.debug(
1932
+ "Initialized default llm_worker_plan data")
1933
+
1934
+ # Check if gs_company_criteria has data
1935
+ cursor.execute("SELECT COUNT(*) FROM gs_company_criteria")
1936
+ criteria_count = cursor.fetchone()[0]
1937
+
1938
+ if criteria_count == 0:
1939
+ # Insert default gs_company_criteria records (based on fetched data)
1940
+ default_criteria = [
1941
+ {
1942
+ 'id': 'criteria_industry_fit',
1943
+ 'name': 'industry_fit',
1944
+ 'definition': 'How well the customer\'s industry aligns with the product\'s target market',
1945
+ 'weight': 0.15,
1946
+ 'guidelines': json.dumps({
1947
+ 'low': {'range': [0, 49], 'description': "Industries with minimal overlap or relevance to product capabilities"},
1948
+ 'medium': {'range': [50, 79], 'description': 'Industries with potential for product adoption but limited case studies'},
1949
+ 'high': {'range': [80, 100], 'description': 'Industries where product has proven success (e.g., IT services, software development, project management firms)'}
1950
+ }),
1951
+ 'scoring_factors': json.dumps([
1952
+ 'Perfect industry match: 80-100',
1953
+ 'Related industry: 60-79',
1954
+ 'Adjacent industry: 40-59',
1955
+ 'Unrelated industry: 0-39'
1956
+ ]),
1957
+ 'org_id': 'rta',
1958
+ 'status': 'published',
1959
+ 'date_created': datetime.now().isoformat(),
1960
+ 'user_created': 'system'
1961
+ },
1962
+ {
1963
+ 'id': 'criteria_company_size',
1964
+ 'name': 'company_size',
1965
+ 'definition': 'Company size alignment with product\'s ideal customer profile',
1966
+ 'weight': 0.15,
1967
+ 'guidelines': json.dumps({
1968
+ 'low': {'range': [0, 49], 'description': 'Companies below 20 or above 1000 employees, or outside the specified revenue ranges'},
1969
+ 'medium': {'range': [50, 79], 'description': 'Companies with 20-49 or 501-1000 employees, $1M-$4.9M or $50.1M-$100M revenue'},
1970
+ 'high': {'range': [80, 100], 'description': 'Companies with 50-500 employees and $5M-$50M annual revenue'}
1971
+ }),
1972
+ 'scoring_factors': json.dumps([
1973
+ 'Ideal size range: 80-100',
1974
+ 'Close to ideal: 60-79',
1975
+ 'Acceptable size: 40-59',
1976
+ 'Poor size fit: 0-39'
1977
+ ]),
1978
+ 'org_id': 'rta',
1979
+ 'status': 'published',
1980
+ 'date_created': datetime.now().isoformat(),
1981
+ 'user_created': 'system'
1982
+ },
1983
+ {
1984
+ 'id': 'criteria_pain_points',
1985
+ 'name': 'pain_points',
1986
+ 'definition': 'How well the product addresses customer\'s identified pain points',
1987
+ 'weight': 0.3,
1988
+ 'guidelines': json.dumps({
1989
+ 'low': {'range': [0, 49], 'description': "Few or no relevant pain points, or challenges outside product's primary focus"},
1990
+ 'medium': {'range': [50, 79], 'description': 'Some relevant pain points addressed, with potential for significant impact'},
1991
+ 'high': {'range': [80, 100], 'description': "Multiple critical pain points directly addressed by product's core features"}
1992
+ }),
1993
+ 'scoring_factors': json.dumps([
1994
+ 'Addresses all major pain points: 80-100',
1995
+ 'Addresses most pain points: 60-79',
1996
+ 'Addresses some pain points: 40-59',
1997
+ 'Addresses few/no pain points: 0-39'
1998
+ ]),
1999
+ 'org_id': 'rta',
2000
+ 'status': 'published',
2001
+ 'date_created': datetime.now().isoformat(),
2002
+ 'user_created': 'system'
2003
+ },
2004
+ {
2005
+ 'id': 'criteria_product_fit',
2006
+ 'name': 'product_fit',
2007
+ 'definition': 'Overall product-customer compatibility',
2008
+ 'weight': 0.2,
2009
+ 'guidelines': json.dumps({
2010
+ 'low': {'range': [0, 49], 'description': "Significant gaps between product's capabilities and the prospect's needs, or extensive customization required"},
2011
+ 'medium': {'range': [50, 79], 'description': 'Product addresses most key needs, some customization or additional features may be necessary'},
2012
+ 'high': {'range': [80, 100], 'description': "Product's features closely match the prospect's primary needs with minimal customization required"}
2013
+ }),
2014
+ 'scoring_factors': json.dumps([
2015
+ 'Excellent feature match: 80-100',
2016
+ 'Good feature match: 60-79',
2017
+ 'Basic feature match: 40-59',
2018
+ 'Poor feature match: 0-39'
2019
+ ]),
2020
+ 'org_id': 'rta',
2021
+ 'status': 'published',
2022
+ 'date_created': datetime.now().isoformat(),
2023
+ 'user_created': 'system'
2024
+ },
2025
+ {
2026
+ 'id': 'criteria_geographic_fit',
2027
+ 'name': 'geographic_market_fit',
2028
+ 'definition': 'Geographic alignment between customer location and product availability',
2029
+ 'weight': 0.2,
2030
+ 'guidelines': json.dumps({
2031
+ 'low': {'range': [0, 30], 'description': "Customer location is outside of the product's designated target markets"},
2032
+ 'medium': {'range': [31, 70], 'description': "Customer location is in regions adjacent to or with strong ties to the product's primary markets"},
2033
+ 'high': {'range': [71, 100], 'description': "Customer location is within the product's primary target markets"}
2034
+ }),
2035
+ 'scoring_factors': json.dumps([
2036
+ 'Strong market presence: 80-100',
2037
+ 'Moderate presence: 60-79',
2038
+ 'Limited presence: 40-59',
2039
+ 'No market presence: 0-39'
2040
+ ]),
2041
+ 'org_id': 'rta',
2042
+ 'status': 'published',
2043
+ 'date_created': datetime.now().isoformat(),
2044
+ 'user_created': 'system'
2045
+ }
2046
+ ]
2047
+
2048
+ for criteria in default_criteria:
2049
+ cursor.execute("""
2050
+ INSERT INTO gs_company_criteria
2051
+ (id, name, definition, weight, guidelines, scoring_factors, org_id, status,
2052
+ date_created, user_created)
2053
+ VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?)
2054
+ """, (
2055
+ criteria['id'],
2056
+ criteria['name'],
2057
+ criteria['definition'],
2058
+ criteria['weight'],
2059
+ criteria['guidelines'],
2060
+ criteria['scoring_factors'],
2061
+ criteria['org_id'],
2062
+ criteria['status'],
2063
+ criteria['date_created'],
2064
+ criteria['user_created']
2065
+ ))
2066
+
2067
+ self.logger.debug(
2068
+ f"Initialized {len(default_criteria)} default gs_company_criteria records")
2069
+
2070
+ # Initialize default products if none exist
2071
+ cursor.execute(
2072
+ "SELECT COUNT(*) FROM products WHERE org_id = 'rta'")
2073
+ product_count = cursor.fetchone()[0]
2074
+
2075
+ if product_count == 0:
2076
+ default_products = [
2077
+ {
2078
+ 'product_id': 'prod-12345678-1234-1234-1234-123456789012',
2079
+ 'org_id': 'rta',
2080
+ 'org_name': 'RTA',
2081
+ 'project_code': 'FUSESELL',
2082
+ 'product_name': 'FuseSell AI Pro',
2083
+ 'short_description': 'AI-powered sales automation platform',
2084
+ 'long_description': 'Comprehensive sales automation solution with AI-driven lead scoring, email generation, and customer analysis capabilities',
2085
+ 'category': 'Sales Automation',
2086
+ 'subcategory': 'AI-Powered CRM',
2087
+ 'target_users': json.dumps(['Sales teams', 'Marketing professionals', 'Business development managers']),
2088
+ 'key_features': json.dumps(['AI lead scoring', 'Automated email generation', 'Customer data analysis', 'Pipeline management']),
2089
+ 'pain_points_solved': json.dumps(['Manual lead qualification', 'Inconsistent email outreach', 'Poor lead prioritization']),
2090
+ 'competitive_advantages': json.dumps(['Advanced AI algorithms', 'Local data processing', 'Customizable workflows']),
2091
+ 'localization': json.dumps(['North America', 'Europe', 'Asia-Pacific', 'Vietnam']),
2092
+ 'market_insights': json.dumps({'targetIndustries': ['Technology', 'SaaS', 'Professional Services'], 'idealCompanySize': '50-500 employees'}),
2093
+ 'status': 'active'
2094
+ },
2095
+ {
2096
+ 'product_id': 'prod-87654321-4321-4321-4321-210987654321',
2097
+ 'org_id': 'rta',
2098
+ 'org_name': 'RTA',
2099
+ 'project_code': 'FUSESELL',
2100
+ 'product_name': 'FuseSell Starter',
2101
+ 'short_description': 'Entry-level sales automation tool',
2102
+ 'long_description': 'Basic sales automation features for small teams getting started with sales technology',
2103
+ 'category': 'Sales Automation',
2104
+ 'subcategory': 'Basic CRM',
2105
+ 'target_users': json.dumps(['Small sales teams', 'Startups', 'Solo entrepreneurs']),
2106
+ 'key_features': json.dumps(['Contact management', 'Email templates', 'Basic reporting', 'Lead tracking']),
2107
+ 'pain_points_solved': json.dumps(['Manual contact management', 'Basic email automation needs']),
2108
+ 'competitive_advantages': json.dumps(['Easy to use', 'Affordable pricing', 'Quick setup']),
2109
+ 'localization': json.dumps(['Global']),
2110
+ 'market_insights': json.dumps({'targetIndustries': ['All industries'], 'idealCompanySize': '1-50 employees'}),
2111
+ 'status': 'active'
2112
+ }
2113
+ ]
2114
+
2115
+ for product in default_products:
2116
+ cursor.execute("""
2117
+ INSERT INTO products
2118
+ (product_id, org_id, org_name, project_code, product_name, short_description,
2119
+ long_description, category, subcategory, target_users, key_features,
2120
+ pain_points_solved, competitive_advantages, localization, market_insights, status)
2121
+ VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)
2122
+ """, (
2123
+ product['product_id'], product['org_id'], product['org_name'], product['project_code'],
2124
+ product['product_name'], product['short_description'], product['long_description'],
2125
+ product['category'], product['subcategory'], product['target_users'], product['key_features'],
2126
+ product['pain_points_solved'], product['competitive_advantages'], product['localization'],
2127
+ product['market_insights'], product['status']
2128
+ ))
2129
+
2130
+ self.logger.debug(
2131
+ f"Initialized {len(default_products)} default products")
2132
+
2133
+ # Initialize default team settings if none exist
2134
+ cursor.execute(
2135
+ "SELECT COUNT(*) FROM team_settings WHERE org_id = 'rta'")
2136
+ team_count = cursor.fetchone()[0]
2137
+
2138
+ if team_count == 0:
2139
+ default_team_settings = {
2140
+ 'id': 'team_rta_default_settings',
2141
+ 'team_id': 'team_rta_default',
2142
+ 'org_id': 'rta',
2143
+ 'plan_id': '569cdcbd-cf6d-4e33-b0b2-d2f6f15a0832',
2144
+ 'plan_name': 'FuseSell AI (v1.025)',
2145
+ 'project_code': 'FUSESELL',
2146
+ 'team_name': 'RTA Default Team',
2147
+ 'organization_settings': json.dumps({
2148
+ 'name': 'RTA',
2149
+ 'industry': 'Technology',
2150
+ 'website': 'https://rta.vn'
2151
+ }),
2152
+ 'sales_rep_settings': json.dumps([{
2153
+ 'name': 'Sales Team',
2154
+ 'email': 'sales@rta.vn',
2155
+ 'position': 'Sales Representative',
2156
+ 'is_primary': True
2157
+ }]),
2158
+ 'product_settings': json.dumps([
2159
+ {'product_id': 'prod-12345678-1234-1234-1234-123456789012',
2160
+ 'enabled': True, 'priority': 1},
2161
+ {'product_id': 'prod-87654321-4321-4321-4321-210987654321',
2162
+ 'enabled': True, 'priority': 2}
2163
+ ]),
2164
+ 'schedule_time_settings': json.dumps({
2165
+ 'business_hours_start': '08:00',
2166
+ 'business_hours_end': '20:00',
2167
+ 'default_delay_hours': 2,
2168
+ 'respect_weekends': True
2169
+ }),
2170
+ 'initial_outreach_settings': json.dumps({
2171
+ 'default_tone': 'professional',
2172
+ 'approaches': ['professional_direct', 'consultative', 'industry_expert', 'relationship_building'],
2173
+ 'subject_line_variations': 4
2174
+ }),
2175
+ 'follow_up_settings': json.dumps({
2176
+ 'max_follow_ups': 5,
2177
+ 'default_interval_days': 3,
2178
+ 'strategies': ['gentle_reminder', 'value_add', 'alternative_approach', 'final_attempt', 'graceful_farewell']
2179
+ })
2180
+ }
2181
+
2182
+ cursor.execute("""
2183
+ INSERT INTO team_settings
2184
+ (id, team_id, org_id, plan_id, plan_name, project_code, team_name,
2185
+ organization_settings, sales_rep_settings, product_settings,
2186
+ schedule_time_settings, initial_outreach_settings, follow_up_settings)
2187
+ VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)
2188
+ """, (
2189
+ default_team_settings['id'],
2190
+ default_team_settings['team_id'],
2191
+ default_team_settings['org_id'],
2192
+ default_team_settings['plan_id'],
2193
+ default_team_settings['plan_name'],
2194
+ default_team_settings['project_code'],
2195
+ default_team_settings['team_name'],
2196
+ default_team_settings['organization_settings'],
2197
+ default_team_settings['sales_rep_settings'],
2198
+ default_team_settings['product_settings'],
2199
+ default_team_settings['schedule_time_settings'],
2200
+ default_team_settings['initial_outreach_settings'],
2201
+ default_team_settings['follow_up_settings']
2202
+ ))
2203
+
2204
+ self.logger.debug("Initialized default team settings")
2205
+
2206
+ conn.commit()
2207
+
2208
+ except Exception as e:
2209
+ self.logger.warning(f"Failed to initialize default data: {str(e)}")
2210
+ # Don't raise exception - this is not critical for basic functionality
2211
+
2212
+ def get_gs_company_criteria(self, org_id: str) -> List[Dict[str, Any]]:
2213
+ """
2214
+ Get scoring criteria from gs_company_criteria table (server schema).
2215
+
2216
+ Args:
2217
+ org_id: Organization identifier
2218
+
2219
+ Returns:
2220
+ List of scoring criteria from gs_company_criteria table
2221
+ """
2222
+ try:
2223
+ with sqlite3.connect(self.db_path) as conn:
2224
+ conn.row_factory = sqlite3.Row
2225
+ cursor = conn.cursor()
2226
+ cursor.execute("""
2227
+ SELECT * FROM gs_company_criteria
2228
+ WHERE org_id = ? AND status = 'published'
2229
+ ORDER BY name
2230
+ """, (org_id,))
2231
+
2232
+ criteria = []
2233
+ for row in cursor.fetchall():
2234
+ criterion = dict(row)
2235
+
2236
+ # Parse JSON fields
2237
+ if criterion['guidelines']:
2238
+ try:
2239
+ criterion['guidelines'] = json.loads(
2240
+ criterion['guidelines'])
2241
+ except json.JSONDecodeError:
2242
+ pass
2243
+
2244
+ if criterion['scoring_factors']:
2245
+ try:
2246
+ criterion['scoring_factors'] = json.loads(
2247
+ criterion['scoring_factors'])
2248
+ except json.JSONDecodeError:
2249
+ pass
2250
+
2251
+ criteria.append(criterion)
2252
+
2253
+ return criteria
2254
+
2255
+ except Exception as e:
2256
+ self.logger.error(f"Failed to get gs_company_criteria: {str(e)}")
2257
+ return []
2258
+
2259
+ def get_llm_worker_plan(self, plan_id: str) -> Optional[Dict[str, Any]]:
2260
+ """
2261
+ Get llm_worker_plan data by plan ID.
2262
+
2263
+ Args:
2264
+ plan_id: Plan identifier
2265
+
2266
+ Returns:
2267
+ Plan data dictionary or None if not found
2268
+ """
2269
+ try:
2270
+ with sqlite3.connect(self.db_path) as conn:
2271
+ conn.row_factory = sqlite3.Row
2272
+ cursor = conn.cursor()
2273
+ cursor.execute(
2274
+ "SELECT * FROM llm_worker_plan WHERE id = ?", (plan_id,))
2275
+ row = cursor.fetchone()
2276
+
2277
+ if row:
2278
+ result = dict(row)
2279
+
2280
+ # Parse JSON fields
2281
+ if result['executors']:
2282
+ try:
2283
+ result['executors'] = json.loads(
2284
+ result['executors'])
2285
+ except json.JSONDecodeError:
2286
+ result['executors'] = []
2287
+
2288
+ if result['settings']:
2289
+ try:
2290
+ result['settings'] = json.loads(result['settings'])
2291
+ except json.JSONDecodeError:
2292
+ result['settings'] = {}
2293
+
2294
+ return result
2295
+ return None
2296
+
2297
+ except Exception as e:
2298
+ self.logger.error(f"Failed to get llm_worker_plan: {str(e)}")
2299
+ return None
2300
+
2301
+ # ===== TASK MANAGEMENT METHODS (Correct Schema Implementation) =====
2302
+
2303
+ def save_task(
2304
+ self,
2305
+ task_id: str,
2306
+ plan_id: str,
2307
+ org_id: str,
2308
+ status: str = "running",
2309
+ messages: Optional[List[str]] = None,
2310
+ request_body: Optional[Dict[str, Any]] = None
2311
+ ) -> None:
2312
+ """
2313
+ Save sales process task (equivalent to llm_worker_task).
2314
+
2315
+ Args:
2316
+ task_id: Unique task identifier (sales process ID)
2317
+ plan_id: Plan identifier
2318
+ org_id: Organization identifier
2319
+ status: Task status (running, completed, failed)
2320
+ messages: Optional messages for the task
2321
+ request_body: Initial request data for the sales process
2322
+ """
2323
+ try:
2324
+ with sqlite3.connect(self.db_path) as conn:
2325
+ cursor = conn.cursor()
2326
+ cursor.execute("""
2327
+ INSERT OR REPLACE INTO llm_worker_task
2328
+ (task_id, plan_id, org_id, status, current_runtime_index, messages, request_body)
2329
+ VALUES (?, ?, ?, ?, ?, ?, ?)
2330
+ """, (
2331
+ task_id, plan_id, org_id, status, 0,
2332
+ json.dumps(messages) if messages else None,
2333
+ json.dumps(request_body) if request_body else None
2334
+ ))
2335
+ conn.commit()
2336
+ self.logger.debug(f"Saved task: {task_id}")
2337
+
2338
+ except Exception as e:
2339
+ self.logger.error(f"Failed to save task: {str(e)}")
2340
+ raise
2341
+
2342
+ def update_task_status(
2343
+ self,
2344
+ task_id: str,
2345
+ status: str,
2346
+ runtime_index: Optional[int] = None
2347
+ ) -> None:
2348
+ """
2349
+ Update task status and runtime index.
2350
+
2351
+ Args:
2352
+ task_id: Task identifier
2353
+ status: New status
2354
+ runtime_index: Current runtime index (stage number)
2355
+ """
2356
+ try:
2357
+ with sqlite3.connect(self.db_path) as conn:
2358
+ cursor = conn.cursor()
2359
+
2360
+ if runtime_index is not None:
2361
+ cursor.execute("""
2362
+ UPDATE llm_worker_task
2363
+ SET status = ?, current_runtime_index = ?, updated_at = CURRENT_TIMESTAMP
2364
+ WHERE task_id = ?
2365
+ """, (status, runtime_index, task_id))
2366
+ else:
2367
+ cursor.execute("""
2368
+ UPDATE llm_worker_task
2369
+ SET status = ?, updated_at = CURRENT_TIMESTAMP
2370
+ WHERE task_id = ?
2371
+ """, (status, task_id))
2372
+
2373
+ conn.commit()
2374
+ self.logger.debug(
2375
+ f"Updated task status: {task_id} -> {status}")
2376
+
2377
+ except Exception as e:
2378
+ self.logger.error(f"Failed to update task status: {str(e)}")
2379
+ raise
2380
+
2381
+ def save_operation(
2382
+ self,
2383
+ operation_id: str,
2384
+ task_id: str,
2385
+ executor_id: str,
2386
+ chain_order: int,
2387
+ chain_index: int,
2388
+ runtime_index: int,
2389
+ item_index: int,
2390
+ execution_status: str,
2391
+ input_data: Optional[Dict[str, Any]] = None,
2392
+ output_data: Optional[Dict[str, Any]] = None,
2393
+ payload: Optional[Dict[str, Any]] = None,
2394
+ user_messages: Optional[List[str]] = None
2395
+ ) -> None:
2396
+ """
2397
+ Save stage operation execution (equivalent to llm_worker_operation).
2398
+
2399
+ Args:
2400
+ operation_id: Unique operation identifier
2401
+ task_id: Parent task identifier
2402
+ executor_id: Stage executor identifier (e.g., 'data_acquisition')
2403
+ chain_order: Order in the execution chain
2404
+ chain_index: Chain index
2405
+ runtime_index: Runtime index (stage number)
2406
+ item_index: Item index
2407
+ execution_status: Operation status (running, done, failed)
2408
+ input_data: Input data for the operation
2409
+ output_data: Output data from the operation
2410
+ payload: Additional payload data
2411
+ user_messages: User messages for the operation
2412
+ """
2413
+ try:
2414
+ with sqlite3.connect(self.db_path) as conn:
2415
+ cursor = conn.cursor()
2416
+ cursor.execute("""
2417
+ INSERT OR REPLACE INTO llm_worker_operation
2418
+ (operation_id, task_id, executor_id, chain_order, chain_index,
2419
+ runtime_index, item_index, execution_status, input_data,
2420
+ output_data, payload, user_messages)
2421
+ VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)
2422
+ """, (
2423
+ operation_id, task_id, executor_id, chain_order, chain_index,
2424
+ runtime_index, item_index, execution_status,
2425
+ json.dumps(input_data) if input_data else None,
2426
+ json.dumps(output_data) if output_data else None,
2427
+ json.dumps(payload) if payload else None,
2428
+ json.dumps(user_messages) if user_messages else None
2429
+ ))
2430
+ conn.commit()
2431
+ self.logger.debug(f"Saved operation: {operation_id}")
2432
+
2433
+ except Exception as e:
2434
+ self.logger.error(f"Failed to save operation: {str(e)}")
2435
+ raise
2436
+
2437
+ def get_task_operations(self, task_id: str) -> List[Dict[str, Any]]:
2438
+ """
2439
+ Get all operations for a specific task.
2440
+
2441
+ Args:
2442
+ task_id: Task identifier
2443
+
2444
+ Returns:
2445
+ List of operation records
2446
+ """
2447
+ try:
2448
+ with sqlite3.connect(self.db_path) as conn:
2449
+ cursor = conn.cursor()
2450
+ cursor.execute("""
2451
+ SELECT * FROM llm_worker_operation
2452
+ WHERE task_id = ?
2453
+ ORDER BY runtime_index, chain_order
2454
+ """, (task_id,))
2455
+
2456
+ columns = [description[0]
2457
+ for description in cursor.description]
2458
+ operations = []
2459
+
2460
+ for row in cursor.fetchall():
2461
+ operation = dict(zip(columns, row))
2462
+ # Parse JSON fields
2463
+ for field in ['input_data', 'output_data', 'payload', 'user_messages']:
2464
+ if operation[field]:
2465
+ try:
2466
+ operation[field] = json.loads(operation[field])
2467
+ except json.JSONDecodeError:
2468
+ pass
2469
+ operations.append(operation)
2470
+
2471
+ return operations
2472
+
2473
+ except Exception as e:
2474
+ self.logger.error(f"Failed to get task operations: {str(e)}")
2475
+ return []
2476
+
2477
+ def get_task_by_id(self, task_id: str) -> Optional[Dict[str, Any]]:
2478
+ """
2479
+ Get task by ID.
2480
+
2481
+ Args:
2482
+ task_id: Task identifier
2483
+
2484
+ Returns:
2485
+ Task record or None if not found
2486
+ """
2487
+ try:
2488
+ with sqlite3.connect(self.db_path) as conn:
2489
+ cursor = conn.cursor()
2490
+ cursor.execute(
2491
+ "SELECT * FROM llm_worker_task WHERE task_id = ?", (task_id,))
2492
+
2493
+ row = cursor.fetchone()
2494
+ if row:
2495
+ columns = [description[0]
2496
+ for description in cursor.description]
2497
+ task = dict(zip(columns, row))
2498
+
2499
+ # Parse JSON fields
2500
+ for field in ['messages', 'request_body']:
2501
+ if task[field]:
2502
+ try:
2503
+ task[field] = json.loads(task[field])
2504
+ except json.JSONDecodeError:
2505
+ pass
2506
+
2507
+ return task
2508
+
2509
+ return None
2510
+
2511
+ except Exception as e:
2512
+ self.logger.error(f"Failed to get task: {str(e)}")
2513
+ return None
2514
+
2515
+ def list_tasks(
2516
+ self,
2517
+ org_id: Optional[str] = None,
2518
+ status: Optional[str] = None,
2519
+ limit: int = 50
2520
+ ) -> List[Dict[str, Any]]:
2521
+ """
2522
+ List tasks with optional filtering.
2523
+
2524
+ Args:
2525
+ org_id: Optional organization filter
2526
+ status: Optional status filter
2527
+ limit: Maximum number of results
2528
+
2529
+ Returns:
2530
+ List of task records
2531
+ """
2532
+ try:
2533
+ with sqlite3.connect(self.db_path) as conn:
2534
+ cursor = conn.cursor()
2535
+
2536
+ query = "SELECT * FROM llm_worker_task"
2537
+ params = []
2538
+ conditions = []
2539
+
2540
+ if org_id:
2541
+ conditions.append("org_id = ?")
2542
+ params.append(org_id)
2543
+
2544
+ if status:
2545
+ conditions.append("status = ?")
2546
+ params.append(status)
2547
+
2548
+ if conditions:
2549
+ query += " WHERE " + " AND ".join(conditions)
2550
+
2551
+ query += " ORDER BY created_at DESC LIMIT ?"
2552
+ params.append(limit)
2553
+
2554
+ cursor.execute(query, params)
2555
+
2556
+ columns = [description[0]
2557
+ for description in cursor.description]
2558
+ tasks = []
2559
+
2560
+ for row in cursor.fetchall():
2561
+ task = dict(zip(columns, row))
2562
+ # Parse JSON fields
2563
+ for field in ['messages', 'request_body']:
2564
+ if task[field]:
2565
+ try:
2566
+ task[field] = json.loads(task[field])
2567
+ except json.JSONDecodeError:
2568
+ pass
2569
+ tasks.append(task)
2570
+
2571
+ return tasks
2572
+
2573
+ except Exception as e:
2574
+ self.logger.error(f"Failed to list tasks: {str(e)}")
2575
+ return []
2576
+ # ===== SALES PROCESS QUERY METHODS =====
2577
+
2578
+ def find_sales_processes_by_customer(self, customer_name: str) -> List[Dict[str, Any]]:
2579
+ """
2580
+ Find all sales processes for a specific customer.
2581
+
2582
+ Args:
2583
+ customer_name: Customer name to search for
2584
+
2585
+ Returns:
2586
+ List of task records matching the customer
2587
+ """
2588
+ try:
2589
+ with sqlite3.connect(self.db_path) as conn:
2590
+ cursor = conn.cursor()
2591
+ cursor.execute("""
2592
+ SELECT t.*,
2593
+ json_extract(t.request_body, '$.customer_info') as customer_info,
2594
+ json_extract(t.request_body, '$.org_name') as org_name
2595
+ FROM llm_worker_task t
2596
+ WHERE json_extract(t.request_body, '$.customer_info') LIKE ?
2597
+ ORDER BY t.created_at DESC
2598
+ """, (f'%{customer_name}%',))
2599
+
2600
+ columns = [description[0]
2601
+ for description in cursor.description]
2602
+ processes = []
2603
+
2604
+ for row in cursor.fetchall():
2605
+ process = dict(zip(columns, row))
2606
+ # Parse JSON fields
2607
+ for field in ['messages', 'request_body']:
2608
+ if process[field]:
2609
+ try:
2610
+ process[field] = json.loads(process[field])
2611
+ except json.JSONDecodeError:
2612
+ pass
2613
+ processes.append(process)
2614
+
2615
+ return processes
2616
+
2617
+ except Exception as e:
2618
+ self.logger.error(
2619
+ f"Failed to find sales processes by customer: {str(e)}")
2620
+ return []
2621
+
2622
+ def get_sales_process_stages(self, task_id: str) -> List[Dict[str, Any]]:
2623
+ """
2624
+ Get all stage executions for a specific sales process.
2625
+
2626
+ Args:
2627
+ task_id: Sales process (task) identifier
2628
+
2629
+ Returns:
2630
+ List of operation records for the sales process
2631
+ """
2632
+ try:
2633
+ with sqlite3.connect(self.db_path) as conn:
2634
+ cursor = conn.cursor()
2635
+ cursor.execute("""
2636
+ SELECT
2637
+ operation_id,
2638
+ executor_id,
2639
+ runtime_index,
2640
+ execution_status,
2641
+ input_data,
2642
+ output_data,
2643
+ created_at,
2644
+ updated_at
2645
+ FROM llm_worker_operation
2646
+ WHERE task_id = ?
2647
+ ORDER BY runtime_index, chain_order
2648
+ """, (task_id,))
2649
+
2650
+ columns = [description[0]
2651
+ for description in cursor.description]
2652
+ stages = []
2653
+
2654
+ for row in cursor.fetchall():
2655
+ stage = dict(zip(columns, row))
2656
+ # Parse JSON fields
2657
+ for field in ['input_data', 'output_data']:
2658
+ if stage[field]:
2659
+ try:
2660
+ stage[field] = json.loads(stage[field])
2661
+ except json.JSONDecodeError:
2662
+ pass
2663
+
2664
+ # Map executor_id to readable stage name
2665
+ executor_mapping = {
2666
+ 'gs_161_data_acquisition': 'Data Acquisition',
2667
+ 'gs_161_data_preparation': 'Data Preparation',
2668
+ 'gs_161_lead_scoring': 'Lead Scoring',
2669
+ 'gs_162_initial_outreach': 'Initial Outreach',
2670
+ 'gs_162_follow_up': 'Follow-up'
2671
+ }
2672
+ stage['stage_name'] = executor_mapping.get(
2673
+ stage['executor_id'], stage['executor_id'])
2674
+
2675
+ stages.append(stage)
2676
+
2677
+ return stages
2678
+
2679
+ except Exception as e:
2680
+ self.logger.error(f"Failed to get sales process stages: {str(e)}")
2681
+ return []
2682
+
2683
+ def get_sales_process_summary(self, task_id: str) -> Optional[Dict[str, Any]]:
2684
+ """
2685
+ Get a complete summary of a sales process including task info and all stages.
2686
+
2687
+ Args:
2688
+ task_id: Sales process (task) identifier
2689
+
2690
+ Returns:
2691
+ Complete sales process summary or None if not found
2692
+ """
2693
+ try:
2694
+ # Get task info
2695
+ task = self.get_task_by_id(task_id)
2696
+ if not task:
2697
+ return None
2698
+
2699
+ # Get all stage operations
2700
+ stages = self.get_sales_process_stages(task_id)
2701
+
2702
+ # Get related data
2703
+ lead_scores = []
2704
+ email_drafts = []
2705
+
2706
+ try:
2707
+ with sqlite3.connect(self.db_path) as conn:
2708
+ cursor = conn.cursor()
2709
+
2710
+ # Get lead scores
2711
+ cursor.execute("""
2712
+ SELECT product_id, score, criteria_breakdown, created_at
2713
+ FROM lead_scores
2714
+ WHERE execution_id = ?
2715
+ """, (task_id,))
2716
+
2717
+ for row in cursor.fetchall():
2718
+ lead_scores.append({
2719
+ 'product_id': row[0],
2720
+ 'score': row[1],
2721
+ 'criteria_breakdown': json.loads(row[2]) if row[2] else {},
2722
+ 'created_at': row[3]
2723
+ })
2724
+
2725
+ # Get email drafts
2726
+ cursor.execute("""
2727
+ SELECT draft_id, subject, content, draft_type, created_at
2728
+ FROM email_drafts
2729
+ WHERE execution_id = ?
2730
+ """, (task_id,))
2731
+
2732
+ for row in cursor.fetchall():
2733
+ email_drafts.append({
2734
+ 'draft_id': row[0],
2735
+ 'subject': row[1],
2736
+ # Truncate content
2737
+ 'content': row[2][:200] + '...' if len(row[2]) > 200 else row[2],
2738
+ 'draft_type': row[3],
2739
+ 'created_at': row[4]
2740
+ })
2741
+
2742
+ except Exception as e:
2743
+ self.logger.warning(
2744
+ f"Failed to get related data for task {task_id}: {str(e)}")
2745
+
2746
+ return {
2747
+ 'task_info': task,
2748
+ 'stages': stages,
2749
+ 'lead_scores': lead_scores,
2750
+ 'email_drafts': email_drafts,
2751
+ 'summary': {
2752
+ 'total_stages': len(stages),
2753
+ 'completed_stages': len([s for s in stages if s['execution_status'] == 'done']),
2754
+ 'failed_stages': len([s for s in stages if s['execution_status'] == 'failed']),
2755
+ 'total_lead_scores': len(lead_scores),
2756
+ 'total_email_drafts': len(email_drafts)
2757
+ }
2758
+ }
2759
+
2760
+ except Exception as e:
2761
+ self.logger.error(f"Failed to get sales process summary: {str(e)}")
2762
+ return None
2763
+
2764
+ # ===== CUSTOMER DATA PERSISTENCE METHODS =====
2765
+
2766
+ def update_customer_from_profile(self, customer_id: str, profile_data: Dict[str, Any]) -> None:
2767
+ """
2768
+ Update customer record with profile data from data preparation stage.
2769
+
2770
+ Args:
2771
+ customer_id: Customer identifier
2772
+ profile_data: Structured profile data from data preparation
2773
+ """
2774
+ try:
2775
+ with sqlite3.connect(self.db_path) as conn:
2776
+ cursor = conn.cursor()
2777
+
2778
+ # Extract data from profile structure
2779
+ company_info = profile_data.get('companyInfo', {})
2780
+ contact_info = profile_data.get('primaryContact', {})
2781
+
2782
+ cursor.execute("""
2783
+ UPDATE customers
2784
+ SET company_name = ?, website = ?, industry = ?,
2785
+ contact_name = ?, contact_email = ?, contact_phone = ?,
2786
+ address = ?, profile_data = ?, updated_at = CURRENT_TIMESTAMP
2787
+ WHERE customer_id = ?
2788
+ """, (
2789
+ company_info.get('name', ''),
2790
+ company_info.get('website', ''),
2791
+ company_info.get('industry', ''),
2792
+ contact_info.get('name', ''),
2793
+ contact_info.get('email', ''),
2794
+ contact_info.get('phone', ''),
2795
+ company_info.get('address', ''),
2796
+ json.dumps(profile_data),
2797
+ customer_id
2798
+ ))
2799
+
2800
+ conn.commit()
2801
+ self.logger.debug(f"Updated customer profile: {customer_id}")
2802
+
2803
+ except Exception as e:
2804
+ self.logger.error(f"Failed to update customer profile: {str(e)}")
2805
+ raise
2806
+
2807
+ def get_customer_task(self, task_id: str, customer_id: str) -> Optional[Dict[str, Any]]:
2808
+ """
2809
+ Get customer task data by task_id and customer_id.
2810
+
2811
+ Args:
2812
+ task_id: Task identifier
2813
+ customer_id: Customer identifier
2814
+
2815
+ Returns:
2816
+ Customer task data or None if not found
2817
+ """
2818
+ try:
2819
+ with sqlite3.connect(self.db_path) as conn:
2820
+ conn.row_factory = sqlite3.Row
2821
+ cursor = conn.cursor()
2822
+
2823
+ cursor.execute("""
2824
+ SELECT * FROM gs_customer_llmtask
2825
+ WHERE task_id = ? AND customer_id = ?
2826
+ """, (task_id, customer_id))
2827
+
2828
+ row = cursor.fetchone()
2829
+ if row:
2830
+ return dict(row)
2831
+ return None
2832
+
2833
+ except Exception as e:
2834
+ self.logger.error(f"Failed to get customer task: {str(e)}")
2835
+ return None
2836
+
2837
+ # ===== SCHEMA MIGRATION METHODS =====
2838
+
2839
+ def backup_existing_schema(self) -> str:
2840
+ """
2841
+ Create backup of existing execution data before migration.
2842
+
2843
+ Returns:
2844
+ Backup file path
2845
+ """
2846
+ try:
2847
+ import shutil
2848
+ from datetime import datetime
2849
+
2850
+ backup_path = f"{self.db_path}.backup_{datetime.now().strftime('%Y%m%d_%H%M%S')}"
2851
+ shutil.copy2(self.db_path, backup_path)
2852
+
2853
+ self.logger.info(f"Database backup created: {backup_path}")
2854
+ return backup_path
2855
+
2856
+ except Exception as e:
2857
+ self.logger.error(f"Failed to create backup: {str(e)}")
2858
+ raise
2859
+
2860
+ def migrate_executions_to_tasks(self) -> int:
2861
+ """
2862
+ Migrate existing executions table data to new llm_worker_task table format.
2863
+
2864
+ Returns:
2865
+ Number of records migrated
2866
+ """
2867
+ try:
2868
+ with sqlite3.connect(self.db_path) as conn:
2869
+ cursor = conn.cursor()
2870
+
2871
+ # Check if old executions table exists
2872
+ cursor.execute("""
2873
+ SELECT name FROM sqlite_master
2874
+ WHERE type='table' AND name='executions'
2875
+ """)
2876
+ if not cursor.fetchone():
2877
+ self.logger.info(
2878
+ "No executions table found, skipping migration")
2879
+ return 0
2880
+
2881
+ # Get existing executions
2882
+ cursor.execute("""
2883
+ SELECT execution_id, org_id, org_name, status, started_at,
2884
+ completed_at, config_json
2885
+ FROM executions
2886
+ """)
2887
+ executions = cursor.fetchall()
2888
+
2889
+ migrated_count = 0
2890
+ for execution in executions:
2891
+ execution_id, org_id, org_name, status, started_at, completed_at, config_json = execution
2892
+
2893
+ # Parse config_json to extract request_body
2894
+ request_body = {}
2895
+ if config_json:
2896
+ try:
2897
+ config_data = json.loads(config_json)
2898
+ request_body = {
2899
+ 'org_id': org_id,
2900
+ 'org_name': org_name,
2901
+ 'customer_info': config_data.get('customer_name', ''),
2902
+ 'language': config_data.get('language', 'english'),
2903
+ 'input_website': config_data.get('customer_website', ''),
2904
+ 'execution_id': execution_id
2905
+ }
2906
+ except json.JSONDecodeError:
2907
+ request_body = {
2908
+ 'org_id': org_id, 'org_name': org_name}
2909
+
2910
+ # Map execution status to task status
2911
+ task_status = 'completed' if status == 'completed' else 'failed' if status == 'failed' else 'running'
2912
+
2913
+ # Insert into llm_worker_task table
2914
+ cursor.execute("""
2915
+ INSERT OR REPLACE INTO llm_worker_task
2916
+ (task_id, plan_id, org_id, status, current_runtime_index,
2917
+ messages, request_body, created_at, updated_at)
2918
+ VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?)
2919
+ """, (
2920
+ execution_id,
2921
+ '569cdcbd-cf6d-4e33-b0b2-d2f6f15a0832', # Default plan ID
2922
+ org_id,
2923
+ task_status,
2924
+ 0, # Default runtime index
2925
+ json.dumps([]), # Empty messages
2926
+ json.dumps(request_body),
2927
+ started_at,
2928
+ completed_at or started_at
2929
+ ))
2930
+
2931
+ migrated_count += 1
2932
+
2933
+ conn.commit()
2934
+ self.logger.info(
2935
+ f"Migrated {migrated_count} executions to llm_worker_task table")
2936
+ return migrated_count
2937
+
2938
+ except Exception as e:
2939
+ self.logger.error(
2940
+ f"Failed to migrate executions to tasks: {str(e)}")
2941
+ raise
2942
+
2943
+ def migrate_stage_results_to_operations(self) -> int:
2944
+ """
2945
+ Migrate existing stage_results table data to new llm_worker_operation table format.
2946
+
2947
+ Returns:
2948
+ Number of records migrated
2949
+ """
2950
+ try:
2951
+ with sqlite3.connect(self.db_path) as conn:
2952
+ cursor = conn.cursor()
2953
+
2954
+ # Check if old stage_results table exists
2955
+ cursor.execute("""
2956
+ SELECT name FROM sqlite_master
2957
+ WHERE type='table' AND name='stage_results'
2958
+ """)
2959
+ if not cursor.fetchone():
2960
+ self.logger.info(
2961
+ "No stage_results table found, skipping migration")
2962
+ return 0
2963
+
2964
+ # Get existing stage results
2965
+ cursor.execute("""
2966
+ SELECT id, execution_id, stage_name, status, input_data,
2967
+ output_data, started_at, completed_at, error_message
2968
+ FROM stage_results
2969
+ ORDER BY execution_id, started_at
2970
+ """)
2971
+ stage_results = cursor.fetchall()
2972
+
2973
+ migrated_count = 0
2974
+ current_execution = None
2975
+ chain_index = 0
2976
+
2977
+ for stage_result in stage_results:
2978
+ (stage_id, execution_id, stage_name, status, input_data,
2979
+ output_data, started_at, completed_at, error_message) = stage_result
2980
+
2981
+ # Reset chain_index for new execution
2982
+ if current_execution != execution_id:
2983
+ current_execution = execution_id
2984
+ chain_index = 0
2985
+
2986
+ # Parse JSON data
2987
+ input_json = {}
2988
+ output_json = {}
2989
+
2990
+ if input_data:
2991
+ try:
2992
+ input_json = json.loads(input_data) if isinstance(
2993
+ input_data, str) else input_data
2994
+ except (json.JSONDecodeError, TypeError):
2995
+ input_json = {'raw_input': str(input_data)}
2996
+
2997
+ if output_data:
2998
+ try:
2999
+ output_json = json.loads(output_data) if isinstance(
3000
+ output_data, str) else output_data
3001
+ except (json.JSONDecodeError, TypeError):
3002
+ output_json = {'raw_output': str(output_data)}
3003
+
3004
+ # Add error message to output if failed
3005
+ if status == 'failed' and error_message:
3006
+ output_json['error'] = error_message
3007
+
3008
+ # Map stage status to execution status
3009
+ execution_status = 'done' if status == 'success' else 'failed' if status == 'failed' else 'running'
3010
+
3011
+ # Generate operation ID
3012
+ operation_id = f"{execution_id}_{stage_name}_{chain_index}"
3013
+
3014
+ # Insert into llm_worker_operation table
3015
+ cursor.execute("""
3016
+ INSERT OR REPLACE INTO llm_worker_operation
3017
+ (operation_id, task_id, executor_name, runtime_index,
3018
+ chain_index, execution_status, input_data, output_data,
3019
+ date_created, date_updated)
3020
+ VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?)
3021
+ """, (
3022
+ operation_id,
3023
+ execution_id,
3024
+ stage_name,
3025
+ 0, # Default runtime index
3026
+ chain_index,
3027
+ execution_status,
3028
+ json.dumps(input_json),
3029
+ json.dumps(output_json),
3030
+ started_at,
3031
+ completed_at or started_at
3032
+ ))
3033
+
3034
+ chain_index += 1
3035
+ migrated_count += 1
3036
+
3037
+ conn.commit()
3038
+ self.logger.info(
3039
+ f"Migrated {migrated_count} stage results to llm_worker_operation table")
3040
+ return migrated_count
3041
+
3042
+ except Exception as e:
3043
+ self.logger.error(
3044
+ f"Failed to migrate stage results to operations: {str(e)}")
3045
+ raise
3046
+
3047
+ def validate_migration(self) -> bool:
3048
+ """
3049
+ Validate that migration was successful by comparing data integrity.
3050
+
3051
+ Returns:
3052
+ True if migration is valid, False otherwise
3053
+ """
3054
+ try:
3055
+ with sqlite3.connect(self.db_path) as conn:
3056
+ cursor = conn.cursor()
3057
+
3058
+ validation_errors = []
3059
+
3060
+ # Check if new tables exist
3061
+ required_tables = ['tasks', 'operations']
3062
+ for table in required_tables:
3063
+ cursor.execute("""
3064
+ SELECT name FROM sqlite_master
3065
+ WHERE type='table' AND name=?
3066
+ """, (table,))
3067
+ if not cursor.fetchone():
3068
+ validation_errors.append(
3069
+ f"Required table '{table}' not found")
3070
+
3071
+ # Check if old tables still exist (for rollback capability)
3072
+ legacy_tables = ['executions', 'stage_results']
3073
+ for table in legacy_tables:
3074
+ cursor.execute("""
3075
+ SELECT name FROM sqlite_master
3076
+ WHERE type='table' AND name=?
3077
+ """, (table,))
3078
+ if not cursor.fetchone():
3079
+ validation_errors.append(
3080
+ f"Legacy table '{table}' not found for rollback")
3081
+
3082
+ # Validate data counts match
3083
+ cursor.execute("SELECT COUNT(*) FROM executions")
3084
+ old_execution_count = cursor.fetchone()[0]
3085
+
3086
+ cursor.execute("SELECT COUNT(*) FROM llm_worker_task")
3087
+ new_task_count = cursor.fetchone()[0]
3088
+
3089
+ if old_execution_count != new_task_count:
3090
+ validation_errors.append(
3091
+ f"Execution count mismatch: {old_execution_count} executions vs {new_task_count} tasks"
3092
+ )
3093
+
3094
+ cursor.execute("SELECT COUNT(*) FROM stage_results")
3095
+ old_stage_count = cursor.fetchone()[0]
3096
+
3097
+ cursor.execute("SELECT COUNT(*) FROM llm_worker_operation")
3098
+ new_operation_count = cursor.fetchone()[0]
3099
+
3100
+ if old_stage_count != new_operation_count:
3101
+ validation_errors.append(
3102
+ f"Stage count mismatch: {old_stage_count} stage_results vs {new_operation_count} operations"
3103
+ )
3104
+
3105
+ # Validate JSON data integrity
3106
+ cursor.execute(
3107
+ "SELECT operation_id, input_data, output_data FROM llm_worker_operation LIMIT 10")
3108
+ for operation_id, input_data, output_data in cursor.fetchall():
3109
+ try:
3110
+ if input_data:
3111
+ json.loads(input_data)
3112
+ if output_data:
3113
+ json.loads(output_data)
3114
+ except json.JSONDecodeError as e:
3115
+ validation_errors.append(
3116
+ f"Invalid JSON in operation {operation_id}: {e}")
3117
+
3118
+ # Validate foreign key relationships
3119
+ cursor.execute("""
3120
+ SELECT COUNT(*) FROM llm_worker_operation o
3121
+ LEFT JOIN llm_worker_task t ON o.task_id = t.task_id
3122
+ WHERE t.task_id IS NULL
3123
+ """)
3124
+ orphaned_operations = cursor.fetchone()[0]
3125
+ if orphaned_operations > 0:
3126
+ validation_errors.append(
3127
+ f"Found {orphaned_operations} orphaned operations")
3128
+
3129
+ if validation_errors:
3130
+ self.logger.error(
3131
+ f"Migration validation failed: {validation_errors}")
3132
+ return False
3133
+
3134
+ self.logger.info("Migration validation successful")
3135
+ return True
3136
+
3137
+ except Exception as e:
3138
+ self.logger.error(f"Migration validation error: {str(e)}")
3139
+ return False
3140
+
3141
+ def rollback_migration(self, backup_path: str = None) -> bool:
3142
+ """
3143
+ Rollback migration by restoring from backup.
3144
+
3145
+ Args:
3146
+ backup_path: Path to backup file, if None will find latest backup
3147
+
3148
+ Returns:
3149
+ True if rollback successful, False otherwise
3150
+ """
3151
+ try:
3152
+ import shutil
3153
+ import glob
3154
+
3155
+ # Find backup file if not provided
3156
+ if not backup_path:
3157
+ backup_pattern = f"{self.db_path}.backup_*"
3158
+ backup_files = glob.glob(backup_pattern)
3159
+ if not backup_files:
3160
+ self.logger.error("No backup files found for rollback")
3161
+ return False
3162
+ backup_path = max(backup_files) # Get most recent backup
3163
+
3164
+ if not os.path.exists(backup_path):
3165
+ self.logger.error(f"Backup file not found: {backup_path}")
3166
+ return False
3167
+
3168
+ # Create a backup of current state before rollback
3169
+ current_backup = f"{self.db_path}.pre_rollback_{datetime.now().strftime('%Y%m%d_%H%M%S')}"
3170
+ shutil.copy2(self.db_path, current_backup)
3171
+
3172
+ # Restore from backup
3173
+ shutil.copy2(backup_path, self.db_path)
3174
+
3175
+ self.logger.info(
3176
+ f"Migration rolled back from backup: {backup_path}")
3177
+ self.logger.info(f"Current state backed up to: {current_backup}")
3178
+ return True
3179
+
3180
+ except Exception as e:
3181
+ self.logger.error(f"Rollback failed: {str(e)}")
3182
+ return False
3183
+
3184
+ def execute_full_migration(self) -> bool:
3185
+ """
3186
+ Execute complete migration process with error handling and rollback.
3187
+
3188
+ Returns:
3189
+ True if migration successful, False otherwise
3190
+ """
3191
+ backup_path = None
3192
+ try:
3193
+ self.logger.info("Starting schema migration process")
3194
+
3195
+ # Step 1: Create backup
3196
+ backup_path = self.backup_existing_schema()
3197
+
3198
+ # Step 2: Migrate executions to tasks
3199
+ task_count = self.migrate_executions_to_tasks()
3200
+
3201
+ # Step 3: Migrate stage results to operations
3202
+ operation_count = self.migrate_stage_results_to_operations()
3203
+
3204
+ # Step 4: Validate migration
3205
+ if not self.validate_migration():
3206
+ self.logger.error("Migration validation failed, rolling back")
3207
+ self.rollback_migration(backup_path)
3208
+ return False
3209
+
3210
+ self.logger.info(
3211
+ f"Migration completed successfully: {task_count} tasks, {operation_count} operations")
3212
+ return True
3213
+
3214
+ except Exception as e:
3215
+ self.logger.error(f"Migration failed: {str(e)}")
3216
+ if backup_path:
3217
+ self.logger.info("Attempting rollback...")
3218
+ self.rollback_migration(backup_path)
3219
+ return False
3220
+
3221
+ # ===== SERVER-COMPATIBLE TASK MANAGEMENT METHODS =====
3222
+
3223
+ def create_task(
3224
+ self,
3225
+ task_id: str,
3226
+ plan_id: str,
3227
+ org_id: str,
3228
+ request_body: Dict[str, Any],
3229
+ status: str = "running"
3230
+ ) -> None:
3231
+ """
3232
+ Create task record with proper server schema (llm_worker_task).
3233
+
3234
+ Args:
3235
+ task_id: Unique task identifier (sales process ID)
3236
+ plan_id: Plan identifier
3237
+ org_id: Organization identifier
3238
+ request_body: Initial request data for the sales process
3239
+ status: Task status (running, completed, failed)
3240
+ """
3241
+ try:
3242
+ with sqlite3.connect(self.db_path) as conn:
3243
+ cursor = conn.cursor()
3244
+ cursor.execute("""
3245
+ INSERT OR REPLACE INTO llm_worker_task
3246
+ (task_id, plan_id, org_id, status, current_runtime_index,
3247
+ messages, request_body, created_at, updated_at)
3248
+ VALUES (?, ?, ?, ?, ?, ?, ?, CURRENT_TIMESTAMP, CURRENT_TIMESTAMP)
3249
+ """, (
3250
+ task_id,
3251
+ plan_id,
3252
+ org_id,
3253
+ status,
3254
+ 0, # Initial runtime index
3255
+ json.dumps([]), # Empty messages initially
3256
+ json.dumps(request_body)
3257
+ ))
3258
+ conn.commit()
3259
+ self.logger.debug(f"Created task: {task_id}")
3260
+
3261
+ except Exception as e:
3262
+ self.logger.error(f"Failed to create task: {str(e)}")
3263
+ raise
3264
+
3265
+ def update_task_status(
3266
+ self,
3267
+ task_id: str,
3268
+ status: str,
3269
+ runtime_index: Optional[int] = None
3270
+ ) -> None:
3271
+ """
3272
+ Update task status and runtime_index with proper server schema.
3273
+
3274
+ Args:
3275
+ task_id: Task identifier
3276
+ status: New task status (running, completed, failed)
3277
+ runtime_index: Optional runtime index to update
3278
+ """
3279
+ try:
3280
+ with sqlite3.connect(self.db_path) as conn:
3281
+ cursor = conn.cursor()
3282
+
3283
+ if runtime_index is not None:
3284
+ cursor.execute("""
3285
+ UPDATE llm_worker_task
3286
+ SET status = ?, current_runtime_index = ?, updated_at = CURRENT_TIMESTAMP
3287
+ WHERE task_id = ?
3288
+ """, (status, runtime_index, task_id))
3289
+ else:
3290
+ cursor.execute("""
3291
+ UPDATE llm_worker_task
3292
+ SET status = ?, updated_at = CURRENT_TIMESTAMP
3293
+ WHERE task_id = ?
3294
+ """, (status, task_id))
3295
+
3296
+ conn.commit()
3297
+ self.logger.debug(
3298
+ f"Updated task {task_id}: status={status}, runtime_index={runtime_index}")
3299
+
3300
+ except Exception as e:
3301
+ self.logger.error(f"Failed to update task status: {str(e)}")
3302
+ raise
3303
+
3304
+ def get_task(self, task_id: str) -> Optional[Dict[str, Any]]:
3305
+ """
3306
+ Get task record with all related data.
3307
+
3308
+ Args:
3309
+ task_id: Task identifier
3310
+
3311
+ Returns:
3312
+ Task data or None if not found
3313
+ """
3314
+ try:
3315
+ with sqlite3.connect(self.db_path) as conn:
3316
+ conn.row_factory = sqlite3.Row
3317
+ cursor = conn.cursor()
3318
+
3319
+ cursor.execute("""
3320
+ SELECT * FROM llm_worker_task WHERE task_id = ?
3321
+ """, (task_id,))
3322
+
3323
+ row = cursor.fetchone()
3324
+ if row:
3325
+ task_data = dict(row)
3326
+
3327
+ # Parse JSON fields
3328
+ if task_data['messages']:
3329
+ try:
3330
+ task_data['messages'] = json.loads(
3331
+ task_data['messages'])
3332
+ except json.JSONDecodeError:
3333
+ task_data['messages'] = []
3334
+
3335
+ if task_data['request_body']:
3336
+ try:
3337
+ task_data['request_body'] = json.loads(
3338
+ task_data['request_body'])
3339
+ except json.JSONDecodeError:
3340
+ task_data['request_body'] = {}
3341
+
3342
+ return task_data
3343
+
3344
+ return None
3345
+
3346
+ except Exception as e:
3347
+ self.logger.error(f"Failed to get task: {str(e)}")
3348
+ return None
3349
+
3350
+ def add_task_message(self, task_id: str, message: str) -> None:
3351
+ """
3352
+ Add message to task messages array.
3353
+
3354
+ Args:
3355
+ task_id: Task identifier
3356
+ message: Message to add
3357
+ """
3358
+ try:
3359
+ task = self.get_task(task_id)
3360
+ if not task:
3361
+ self.logger.warning(f"Task not found: {task_id}")
3362
+ return
3363
+
3364
+ messages = task.get('messages', [])
3365
+ messages.append({
3366
+ 'message': message,
3367
+ 'timestamp': datetime.now().isoformat()
3368
+ })
3369
+
3370
+ with sqlite3.connect(self.db_path) as conn:
3371
+ cursor = conn.cursor()
3372
+ cursor.execute("""
3373
+ UPDATE llm_worker_task
3374
+ SET messages = ?, updated_at = CURRENT_TIMESTAMP
3375
+ WHERE task_id = ?
3376
+ """, (json.dumps(messages), task_id))
3377
+ conn.commit()
3378
+
3379
+ except Exception as e:
3380
+ self.logger.error(f"Failed to add task message: {str(e)}")
3381
+ raise
3382
+
3383
+ # ===== SERVER-COMPATIBLE OPERATION MANAGEMENT METHODS =====
3384
+
3385
+ def create_operation(
3386
+ self,
3387
+ task_id: str,
3388
+ executor_name: str,
3389
+ runtime_index: int,
3390
+ chain_index: int,
3391
+ input_data: Dict[str, Any]
3392
+ ) -> str:
3393
+ """
3394
+ Create operation record with input_data (llm_worker_operation).
3395
+
3396
+ Args:
3397
+ task_id: Parent task identifier
3398
+ executor_name: Stage name (data_acquisition, lead_scoring, etc.)
3399
+ runtime_index: Execution attempt number
3400
+ chain_index: Position in execution chain
3401
+ input_data: Stage-specific input data
3402
+
3403
+ Returns:
3404
+ Generated operation_id
3405
+ """
3406
+ try:
3407
+ # Generate unique operation ID
3408
+ operation_id = f"{task_id}_{executor_name}_{runtime_index}_{chain_index}"
3409
+
3410
+ with sqlite3.connect(self.db_path) as conn:
3411
+ cursor = conn.cursor()
3412
+ cursor.execute("""
3413
+ INSERT OR REPLACE INTO llm_worker_operation
3414
+ (operation_id, task_id, executor_name, runtime_index,
3415
+ chain_index, execution_status, input_data, output_data,
3416
+ date_created, date_updated)
3417
+ VALUES (?, ?, ?, ?, ?, ?, ?, ?, CURRENT_TIMESTAMP, CURRENT_TIMESTAMP)
3418
+ """, (
3419
+ operation_id,
3420
+ task_id,
3421
+ executor_name,
3422
+ runtime_index,
3423
+ chain_index,
3424
+ 'running', # Initial status
3425
+ json.dumps(input_data),
3426
+ json.dumps({}) # Empty output initially
3427
+ ))
3428
+ conn.commit()
3429
+ self.logger.debug(f"Created operation: {operation_id}")
3430
+ return operation_id
3431
+
3432
+ except Exception as e:
3433
+ self.logger.error(f"Failed to create operation: {str(e)}")
3434
+ raise
3435
+
3436
+ def update_operation_status(
3437
+ self,
3438
+ operation_id: str,
3439
+ execution_status: str,
3440
+ output_data: Dict[str, Any]
3441
+ ) -> None:
3442
+ """
3443
+ Update operation execution_status and output_data.
3444
+
3445
+ Args:
3446
+ operation_id: Operation identifier
3447
+ execution_status: New status (done, failed, running)
3448
+ output_data: Stage-specific output data
3449
+ """
3450
+ try:
3451
+ with sqlite3.connect(self.db_path) as conn:
3452
+ cursor = conn.cursor()
3453
+ cursor.execute("""
3454
+ UPDATE llm_worker_operation
3455
+ SET execution_status = ?, output_data = ?, date_updated = CURRENT_TIMESTAMP
3456
+ WHERE operation_id = ?
3457
+ """, (execution_status, json.dumps(output_data), operation_id))
3458
+
3459
+ conn.commit()
3460
+ self.logger.debug(
3461
+ f"Updated operation {operation_id}: status={execution_status}")
3462
+
3463
+ except Exception as e:
3464
+ self.logger.error(f"Failed to update operation status: {str(e)}")
3465
+ raise
3466
+
3467
+ def get_operations_by_task(self, task_id: str) -> List[Dict[str, Any]]:
3468
+ """
3469
+ Get all operations for a specific task.
3470
+
3471
+ Args:
3472
+ task_id: Task identifier
3473
+
3474
+ Returns:
3475
+ List of operation records
3476
+ """
3477
+ try:
3478
+ with sqlite3.connect(self.db_path) as conn:
3479
+ conn.row_factory = sqlite3.Row
3480
+ cursor = conn.cursor()
3481
+
3482
+ cursor.execute("""
3483
+ SELECT * FROM llm_worker_operation
3484
+ WHERE task_id = ?
3485
+ ORDER BY runtime_index, chain_index
3486
+ """, (task_id,))
3487
+
3488
+ operations = []
3489
+ for row in cursor.fetchall():
3490
+ operation = dict(row)
3491
+
3492
+ # Parse JSON fields
3493
+ if operation['input_data']:
3494
+ try:
3495
+ operation['input_data'] = json.loads(
3496
+ operation['input_data'])
3497
+ except json.JSONDecodeError:
3498
+ operation['input_data'] = {}
3499
+
3500
+ if operation['output_data']:
3501
+ try:
3502
+ operation['output_data'] = json.loads(
3503
+ operation['output_data'])
3504
+ except json.JSONDecodeError:
3505
+ operation['output_data'] = {}
3506
+
3507
+ operations.append(operation)
3508
+
3509
+ return operations
3510
+
3511
+ except Exception as e:
3512
+ self.logger.error(f"Failed to get operations by task: {str(e)}")
3513
+ return []
3514
+
3515
+ def get_operation(self, operation_id: str) -> Optional[Dict[str, Any]]:
3516
+ """
3517
+ Get individual operation record.
3518
+
3519
+ Args:
3520
+ operation_id: Operation identifier
3521
+
3522
+ Returns:
3523
+ Operation data or None if not found
3524
+ """
3525
+ try:
3526
+ with sqlite3.connect(self.db_path) as conn:
3527
+ conn.row_factory = sqlite3.Row
3528
+ cursor = conn.cursor()
3529
+
3530
+ cursor.execute("""
3531
+ SELECT * FROM llm_worker_operation WHERE operation_id = ?
3532
+ """, (operation_id,))
3533
+
3534
+ row = cursor.fetchone()
3535
+ if row:
3536
+ operation = dict(row)
3537
+
3538
+ # Parse JSON fields
3539
+ if operation['input_data']:
3540
+ try:
3541
+ operation['input_data'] = json.loads(
3542
+ operation['input_data'])
3543
+ except json.JSONDecodeError:
3544
+ operation['input_data'] = {}
3545
+
3546
+ if operation['output_data']:
3547
+ try:
3548
+ operation['output_data'] = json.loads(
3549
+ operation['output_data'])
3550
+ except json.JSONDecodeError:
3551
+ operation['output_data'] = {}
3552
+
3553
+ return operation
3554
+
3555
+ return None
3556
+
3557
+ except Exception as e:
3558
+ self.logger.error(f"Failed to get operation: {str(e)}")
3559
+ return None
3560
+
3561
+ def get_operations_by_executor(
3562
+ self,
3563
+ executor_name: str,
3564
+ org_id: Optional[str] = None,
3565
+ execution_status: Optional[str] = None
3566
+ ) -> List[Dict[str, Any]]:
3567
+ """
3568
+ Get operations by executor name (stage-specific queries).
3569
+
3570
+ Args:
3571
+ executor_name: Stage name to filter by
3572
+ org_id: Optional organization filter
3573
+ execution_status: Optional status filter
3574
+
3575
+ Returns:
3576
+ List of matching operations
3577
+ """
3578
+ try:
3579
+ with sqlite3.connect(self.db_path) as conn:
3580
+ conn.row_factory = sqlite3.Row
3581
+ cursor = conn.cursor()
3582
+
3583
+ query = """
3584
+ SELECT o.* FROM llm_worker_operation o
3585
+ JOIN llm_worker_task t ON o.task_id = t.task_id
3586
+ WHERE o.executor_name = ?
3587
+ """
3588
+ params = [executor_name]
3589
+
3590
+ if org_id:
3591
+ query += " AND t.org_id = ?"
3592
+ params.append(org_id)
3593
+
3594
+ if execution_status:
3595
+ query += " AND o.execution_status = ?"
3596
+ params.append(execution_status)
3597
+
3598
+ query += " ORDER BY o.date_created DESC"
3599
+
3600
+ cursor.execute(query, params)
3601
+
3602
+ operations = []
3603
+ for row in cursor.fetchall():
3604
+ operation = dict(row)
3605
+
3606
+ # Parse JSON fields
3607
+ if operation['input_data']:
3608
+ try:
3609
+ operation['input_data'] = json.loads(
3610
+ operation['input_data'])
3611
+ except json.JSONDecodeError:
3612
+ operation['input_data'] = {}
3613
+
3614
+ if operation['output_data']:
3615
+ try:
3616
+ operation['output_data'] = json.loads(
3617
+ operation['output_data'])
3618
+ except json.JSONDecodeError:
3619
+ operation['output_data'] = {}
3620
+
3621
+ operations.append(operation)
3622
+
3623
+ return operations
3624
+
3625
+ except Exception as e:
3626
+ self.logger.error(
3627
+ f"Failed to get operations by executor: {str(e)}")
3628
+ return []
3629
+
3630
+ # ===== SERVER-COMPATIBLE QUERY METHODS =====
3631
+
3632
+ def get_task_with_operations(self, task_id: str) -> Optional[Dict[str, Any]]:
3633
+ """
3634
+ Get complete task details with all related operations.
3635
+
3636
+ Args:
3637
+ task_id: Task identifier
3638
+
3639
+ Returns:
3640
+ Complete task data with operations or None if not found
3641
+ """
3642
+ try:
3643
+ task = self.get_task(task_id)
3644
+ if not task:
3645
+ return None
3646
+
3647
+ operations = self.get_operations_by_task(task_id)
3648
+
3649
+ # Add operations to task data
3650
+ task['operations'] = operations
3651
+
3652
+ # Add summary statistics
3653
+ task['summary'] = {
3654
+ 'total_operations': len(operations),
3655
+ 'completed_operations': len([op for op in operations if op['execution_status'] == 'done']),
3656
+ 'failed_operations': len([op for op in operations if op['execution_status'] == 'failed']),
3657
+ 'running_operations': len([op for op in operations if op['execution_status'] == 'running'])
3658
+ }
3659
+
3660
+ return task
3661
+
3662
+ except Exception as e:
3663
+ self.logger.error(f"Failed to get task with operations: {str(e)}")
3664
+ return None
3665
+
3666
+ def get_execution_timeline(self, task_id: str, runtime_index: Optional[int] = None) -> List[Dict[str, Any]]:
3667
+ """
3668
+ Get chronological operation tracking for specific execution attempt.
3669
+
3670
+ Args:
3671
+ task_id: Task identifier
3672
+ runtime_index: Optional specific runtime index
3673
+
3674
+ Returns:
3675
+ List of operations in chronological order
3676
+ """
3677
+ try:
3678
+ with sqlite3.connect(self.db_path) as conn:
3679
+ conn.row_factory = sqlite3.Row
3680
+ cursor = conn.cursor()
3681
+
3682
+ query = """
3683
+ SELECT * FROM llm_worker_operation
3684
+ WHERE task_id = ?
3685
+ """
3686
+ params = [task_id]
3687
+
3688
+ if runtime_index is not None:
3689
+ query += " AND runtime_index = ?"
3690
+ params.append(runtime_index)
3691
+
3692
+ query += " ORDER BY runtime_index, chain_index, date_created"
3693
+
3694
+ cursor.execute(query, params)
3695
+
3696
+ timeline = []
3697
+ for row in cursor.fetchall():
3698
+ operation = dict(row)
3699
+
3700
+ # Parse JSON fields
3701
+ if operation['input_data']:
3702
+ try:
3703
+ operation['input_data'] = json.loads(
3704
+ operation['input_data'])
3705
+ except json.JSONDecodeError:
3706
+ operation['input_data'] = {}
3707
+
3708
+ if operation['output_data']:
3709
+ try:
3710
+ operation['output_data'] = json.loads(
3711
+ operation['output_data'])
3712
+ except json.JSONDecodeError:
3713
+ operation['output_data'] = {}
3714
+
3715
+ timeline.append(operation)
3716
+
3717
+ return timeline
3718
+
3719
+ except Exception as e:
3720
+ self.logger.error(f"Failed to get execution timeline: {str(e)}")
3721
+ return []
3722
+
3723
+ def get_stage_performance_metrics(
3724
+ self,
3725
+ executor_name: str,
3726
+ org_id: Optional[str] = None,
3727
+ date_range: Optional[tuple] = None
3728
+ ) -> Dict[str, Any]:
3729
+ """
3730
+ Get performance analysis for specific stage.
3731
+
3732
+ Args:
3733
+ executor_name: Stage name
3734
+ org_id: Optional organization filter
3735
+ date_range: Optional (start_date, end_date) tuple
3736
+
3737
+ Returns:
3738
+ Performance metrics dictionary
3739
+ """
3740
+ try:
3741
+ with sqlite3.connect(self.db_path) as conn:
3742
+ cursor = conn.cursor()
3743
+
3744
+ query = """
3745
+ SELECT
3746
+ o.execution_status,
3747
+ COUNT(*) as count,
3748
+ AVG(julianday(o.date_updated) - julianday(o.date_created)) * 24 * 60 as avg_duration_minutes
3749
+ FROM llm_worker_operation o
3750
+ JOIN llm_worker_task t ON o.task_id = t.task_id
3751
+ WHERE o.executor_name = ?
3752
+ """
3753
+ params = [executor_name]
3754
+
3755
+ if org_id:
3756
+ query += " AND t.org_id = ?"
3757
+ params.append(org_id)
3758
+
3759
+ if date_range:
3760
+ query += " AND o.date_created BETWEEN ? AND ?"
3761
+ params.extend(date_range)
3762
+
3763
+ query += " GROUP BY o.execution_status"
3764
+
3765
+ cursor.execute(query, params)
3766
+
3767
+ metrics = {
3768
+ 'executor_name': executor_name,
3769
+ 'org_id': org_id,
3770
+ 'total_executions': 0,
3771
+ 'success_rate': 0.0,
3772
+ 'failure_rate': 0.0,
3773
+ 'avg_duration_minutes': 0.0,
3774
+ 'status_breakdown': {}
3775
+ }
3776
+
3777
+ total_count = 0
3778
+ success_count = 0
3779
+ total_duration = 0.0
3780
+
3781
+ for row in cursor.fetchall():
3782
+ status, count, avg_duration = row
3783
+ total_count += count
3784
+ metrics['status_breakdown'][status] = {
3785
+ 'count': count,
3786
+ 'avg_duration_minutes': avg_duration or 0.0
3787
+ }
3788
+
3789
+ if status == 'done':
3790
+ success_count = count
3791
+
3792
+ if avg_duration:
3793
+ total_duration += avg_duration * count
3794
+
3795
+ if total_count > 0:
3796
+ metrics['total_executions'] = total_count
3797
+ metrics['success_rate'] = (
3798
+ success_count / total_count) * 100
3799
+ metrics['failure_rate'] = (
3800
+ (total_count - success_count) / total_count) * 100
3801
+ metrics['avg_duration_minutes'] = total_duration / \
3802
+ total_count
3803
+
3804
+ return metrics
3805
+
3806
+ except Exception as e:
3807
+ self.logger.error(
3808
+ f"Failed to get stage performance metrics: {str(e)}")
3809
+ return {'error': str(e)}
3810
+
3811
+ def find_failed_operations(
3812
+ self,
3813
+ org_id: Optional[str] = None,
3814
+ executor_name: Optional[str] = None,
3815
+ limit: int = 50
3816
+ ) -> List[Dict[str, Any]]:
3817
+ """
3818
+ Find failed operations for debugging.
3819
+
3820
+ Args:
3821
+ org_id: Optional organization filter
3822
+ executor_name: Optional stage filter
3823
+ limit: Maximum number of results
3824
+
3825
+ Returns:
3826
+ List of failed operations with error details
3827
+ """
3828
+ try:
3829
+ with sqlite3.connect(self.db_path) as conn:
3830
+ conn.row_factory = sqlite3.Row
3831
+ cursor = conn.cursor()
3832
+
3833
+ query = """
3834
+ SELECT o.*, t.org_id FROM llm_worker_operation o
3835
+ JOIN llm_worker_task t ON o.task_id = t.task_id
3836
+ WHERE o.execution_status = 'failed'
3837
+ """
3838
+ params = []
3839
+
3840
+ if org_id:
3841
+ query += " AND t.org_id = ?"
3842
+ params.append(org_id)
3843
+
3844
+ if executor_name:
3845
+ query += " AND o.executor_name = ?"
3846
+ params.append(executor_name)
3847
+
3848
+ query += " ORDER BY o.date_created DESC LIMIT ?"
3849
+ params.append(limit)
3850
+
3851
+ cursor.execute(query, params)
3852
+
3853
+ failed_operations = []
3854
+ for row in cursor.fetchall():
3855
+ operation = dict(row)
3856
+
3857
+ # Parse output_data to extract error information
3858
+ if operation['output_data']:
3859
+ try:
3860
+ output_data = json.loads(operation['output_data'])
3861
+ operation['output_data'] = output_data
3862
+ operation['error_summary'] = output_data.get(
3863
+ 'error', 'Unknown error')
3864
+ except json.JSONDecodeError:
3865
+ operation['error_summary'] = 'JSON parse error in output_data'
3866
+ else:
3867
+ operation['error_summary'] = 'No error details available'
3868
+
3869
+ failed_operations.append(operation)
3870
+
3871
+ return failed_operations
3872
+
3873
+ except Exception as e:
3874
+ self.logger.error(f"Failed to find failed operations: {str(e)}")
3875
+ return []
3876
+
3877
+ def create_task(
3878
+ self,
3879
+ task_id: str,
3880
+ plan_id: str,
3881
+ org_id: str,
3882
+ request_body: Dict[str, Any],
3883
+ status: str = "running"
3884
+ ) -> str:
3885
+ """
3886
+ Create a new task record in llm_worker_task table.
3887
+
3888
+ Args:
3889
+ task_id: Unique task identifier
3890
+ plan_id: Plan identifier
3891
+ org_id: Organization identifier
3892
+ request_body: Task request body data
3893
+ status: Initial task status
3894
+
3895
+ Returns:
3896
+ Task ID
3897
+ """
3898
+ try:
3899
+ with sqlite3.connect(self.db_path) as conn:
3900
+ cursor = conn.cursor()
3901
+
3902
+ cursor.execute("""
3903
+ INSERT INTO llm_worker_task
3904
+ (task_id, plan_id, org_id, status, current_runtime_index, messages, request_body)
3905
+ VALUES (?, ?, ?, ?, ?, ?, ?)
3906
+ """, (
3907
+ task_id,
3908
+ plan_id,
3909
+ org_id,
3910
+ status,
3911
+ 0, # initial runtime_index
3912
+ json.dumps([]), # empty messages initially
3913
+ json.dumps(request_body)
3914
+ ))
3915
+
3916
+ conn.commit()
3917
+ self.logger.debug(f"Created task: {task_id}")
3918
+ return task_id
3919
+
3920
+ except Exception as e:
3921
+ self.logger.error(f"Failed to create task: {str(e)}")
3922
+ raise
3923
+
3924
+ def create_operation(
3925
+ self,
3926
+ task_id: str,
3927
+ executor_name: str,
3928
+ runtime_index: int,
3929
+ chain_index: int,
3930
+ input_data: Dict[str, Any]
3931
+ ) -> str:
3932
+ """
3933
+ Create a new operation record in llm_worker_operation table.
3934
+
3935
+ Args:
3936
+ task_id: Task identifier
3937
+ executor_name: Name of the executor/stage
3938
+ runtime_index: Runtime execution index
3939
+ chain_index: Chain execution index
3940
+ input_data: Operation input data
3941
+
3942
+ Returns:
3943
+ Operation ID
3944
+ """
3945
+ try:
3946
+ operation_id = f"{task_id}_{executor_name}_{runtime_index}_{chain_index}"
3947
+
3948
+ with sqlite3.connect(self.db_path) as conn:
3949
+ cursor = conn.cursor()
3950
+
3951
+ cursor.execute("""
3952
+ INSERT INTO llm_worker_operation
3953
+ (operation_id, task_id, executor_name, runtime_index, chain_index,
3954
+ execution_status, input_data)
3955
+ VALUES (?, ?, ?, ?, ?, ?, ?)
3956
+ """, (
3957
+ operation_id,
3958
+ task_id,
3959
+ executor_name,
3960
+ runtime_index,
3961
+ chain_index,
3962
+ 'running',
3963
+ json.dumps(input_data)
3964
+ ))
3965
+
3966
+ conn.commit()
3967
+ self.logger.debug(f"Created operation: {operation_id}")
3968
+ return operation_id
3969
+
3970
+ except Exception as e:
3971
+ self.logger.error(f"Failed to create operation: {str(e)}")
3972
+ raise
3973
+
3974
+ def update_operation_status(
3975
+ self,
3976
+ operation_id: str,
3977
+ execution_status: str,
3978
+ output_data: Optional[Dict[str, Any]] = None
3979
+ ) -> None:
3980
+ """
3981
+ Update operation status and output data.
3982
+
3983
+ Args:
3984
+ operation_id: Operation identifier
3985
+ execution_status: New execution status
3986
+ output_data: Optional output data
3987
+ """
3988
+ try:
3989
+ with sqlite3.connect(self.db_path) as conn:
3990
+ cursor = conn.cursor()
3991
+
3992
+ if output_data:
3993
+ cursor.execute("""
3994
+ UPDATE llm_worker_operation
3995
+ SET execution_status = ?, output_data = ?, date_updated = CURRENT_TIMESTAMP
3996
+ WHERE operation_id = ?
3997
+ """, (execution_status, json.dumps(output_data), operation_id))
3998
+ else:
3999
+ cursor.execute("""
4000
+ UPDATE llm_worker_operation
4001
+ SET execution_status = ?, date_updated = CURRENT_TIMESTAMP
4002
+ WHERE operation_id = ?
4003
+ """, (execution_status, operation_id))
4004
+
4005
+ conn.commit()
4006
+ self.logger.debug(
4007
+ f"Updated operation status: {operation_id} -> {execution_status}")
4008
+
4009
+ except Exception as e:
4010
+ self.logger.error(f"Failed to update operation status: {str(e)}")
4011
+ raise
4012
+
4013
+ def update_task_status(
4014
+ self,
4015
+ task_id: str,
4016
+ status: str,
4017
+ runtime_index: Optional[int] = None
4018
+ ) -> None:
4019
+ """
4020
+ Update task status and runtime index.
4021
+
4022
+ Args:
4023
+ task_id: Task identifier
4024
+ status: New task status
4025
+ runtime_index: Optional runtime index
4026
+ """
4027
+ try:
4028
+ with sqlite3.connect(self.db_path) as conn:
4029
+ cursor = conn.cursor()
4030
+
4031
+ if runtime_index is not None:
4032
+ cursor.execute("""
4033
+ UPDATE llm_worker_task
4034
+ SET status = ?, current_runtime_index = ?, updated_at = CURRENT_TIMESTAMP
4035
+ WHERE task_id = ?
4036
+ """, (status, runtime_index, task_id))
4037
+ else:
4038
+ cursor.execute("""
4039
+ UPDATE llm_worker_task
4040
+ SET status = ?, updated_at = CURRENT_TIMESTAMP
4041
+ WHERE task_id = ?
4042
+ """, (status, task_id))
4043
+
4044
+ conn.commit()
4045
+ self.logger.debug(
4046
+ f"Updated task status: {task_id} -> {status}")
4047
+
4048
+ except Exception as e:
4049
+ self.logger.error(f"Failed to update task status: {str(e)}")
4050
+ raise