vibego 0.2.58__py3-none-any.whl → 1.0.10__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- bot.py +1346 -1136
- logging_setup.py +25 -18
- master.py +812 -506
- project_repository.py +42 -40
- scripts/__init__.py +1 -2
- scripts/bump_version.sh +57 -55
- scripts/log_writer.py +19 -16
- scripts/master_healthcheck.py +38 -138
- scripts/models/claudecode.sh +4 -4
- scripts/models/codex.sh +1 -1
- scripts/models/common.sh +24 -6
- scripts/models/gemini.sh +2 -2
- scripts/publish.sh +50 -50
- scripts/requirements.txt +1 -0
- scripts/run_bot.sh +41 -17
- scripts/session_pointer_watch.py +265 -0
- scripts/start.sh +147 -120
- scripts/start_tmux_codex.sh +33 -8
- scripts/stop_all.sh +21 -21
- scripts/stop_bot.sh +31 -10
- scripts/test_deps_check.sh +32 -28
- tasks/__init__.py +1 -1
- tasks/commands.py +4 -4
- tasks/constants.py +1 -1
- tasks/fsm.py +9 -9
- tasks/models.py +7 -7
- tasks/service.py +56 -101
- vibego-1.0.10.dist-info/METADATA +226 -0
- {vibego-0.2.58.dist-info → vibego-1.0.10.dist-info}/RECORD +38 -36
- vibego-1.0.10.dist-info/licenses/LICENSE +201 -0
- vibego_cli/__init__.py +5 -4
- vibego_cli/__main__.py +1 -2
- vibego_cli/config.py +9 -9
- vibego_cli/deps.py +8 -9
- vibego_cli/main.py +63 -63
- vibego-0.2.58.dist-info/METADATA +0 -197
- {vibego-0.2.58.dist-info → vibego-1.0.10.dist-info}/WHEEL +0 -0
- {vibego-0.2.58.dist-info → vibego-1.0.10.dist-info}/entry_points.txt +0 -0
- {vibego-0.2.58.dist-info → vibego-1.0.10.dist-info}/top_level.txt +0 -0
tasks/service.py
CHANGED
|
@@ -1,4 +1,4 @@
|
|
|
1
|
-
"""
|
|
1
|
+
"""Persistence and business logic for the task subsystem."""
|
|
2
2
|
from __future__ import annotations
|
|
3
3
|
|
|
4
4
|
import asyncio
|
|
@@ -28,10 +28,10 @@ logger = logging.getLogger(__name__)
|
|
|
28
28
|
|
|
29
29
|
|
|
30
30
|
class TaskService:
|
|
31
|
-
"""
|
|
31
|
+
"""Wrap task-related database operations."""
|
|
32
32
|
|
|
33
33
|
def __init__(self, db_path: Path, project_slug: str) -> None:
|
|
34
|
-
"""
|
|
34
|
+
"""Initialise the service with the database path and project slug."""
|
|
35
35
|
|
|
36
36
|
self.db_path = Path(db_path)
|
|
37
37
|
self.project_slug = project_slug
|
|
@@ -40,7 +40,7 @@ class TaskService:
|
|
|
40
40
|
self._valid_statuses = set(TASK_STATUSES)
|
|
41
41
|
|
|
42
42
|
async def initialize(self) -> None:
|
|
43
|
-
"""
|
|
43
|
+
"""Ensure the schema exists and run required migrations."""
|
|
44
44
|
|
|
45
45
|
if self._initialized:
|
|
46
46
|
return
|
|
@@ -58,7 +58,7 @@ class TaskService:
|
|
|
58
58
|
self._initialized = True
|
|
59
59
|
|
|
60
60
|
async def _create_tables(self, db: aiosqlite.Connection) -> None:
|
|
61
|
-
"""
|
|
61
|
+
"""Create or augment all tables and indexes for tasks."""
|
|
62
62
|
|
|
63
63
|
await db.execute(
|
|
64
64
|
"""
|
|
@@ -191,7 +191,7 @@ class TaskService:
|
|
|
191
191
|
)
|
|
192
192
|
|
|
193
193
|
async def _migrate_timezones(self, db: aiosqlite.Connection) -> None:
|
|
194
|
-
"""
|
|
194
|
+
"""Convert legacy UTC timestamps to their Shanghai equivalents."""
|
|
195
195
|
|
|
196
196
|
db.row_factory = aiosqlite.Row
|
|
197
197
|
tables: Sequence[tuple[str, str, tuple[str, ...]]] = (
|
|
@@ -245,7 +245,7 @@ class TaskService:
|
|
|
245
245
|
)
|
|
246
246
|
|
|
247
247
|
async def _migrate_task_ids_to_underscore(self, db: aiosqlite.Connection) -> None:
|
|
248
|
-
"""
|
|
248
|
+
"""Rewrite legacy task IDs with underscores so Telegram commands remain clickable."""
|
|
249
249
|
|
|
250
250
|
db.row_factory = aiosqlite.Row
|
|
251
251
|
async with db.execute(
|
|
@@ -265,7 +265,7 @@ class TaskService:
|
|
|
265
265
|
if not legacy_row:
|
|
266
266
|
return
|
|
267
267
|
|
|
268
|
-
logger.info("
|
|
268
|
+
logger.info("Detected legacy task IDs, starting migration: project=%s", self.project_slug)
|
|
269
269
|
await db.execute("PRAGMA foreign_keys = OFF")
|
|
270
270
|
await db.execute("PRAGMA defer_foreign_keys = ON")
|
|
271
271
|
mapping: Dict[str, str] = {}
|
|
@@ -289,27 +289,27 @@ class TaskService:
|
|
|
289
289
|
continue
|
|
290
290
|
if new_id is None:
|
|
291
291
|
logger.error(
|
|
292
|
-
"
|
|
292
|
+
"Task ID migration encountered a non-normalisable value: project=%s value=%s",
|
|
293
293
|
self.project_slug,
|
|
294
294
|
old_id,
|
|
295
295
|
)
|
|
296
|
-
raise ValueError("
|
|
296
|
+
raise ValueError("Task ID migration failed: unable to normalise ID")
|
|
297
297
|
if new_id != old_id and new_id in existing_ids:
|
|
298
298
|
logger.error(
|
|
299
|
-
"
|
|
299
|
+
"Task ID migration detected a potential conflict: project=%s old=%s new=%s",
|
|
300
300
|
self.project_slug,
|
|
301
301
|
old_id,
|
|
302
302
|
new_id,
|
|
303
303
|
)
|
|
304
|
-
raise ValueError("
|
|
304
|
+
raise ValueError("Task ID migration conflict: target ID already exists")
|
|
305
305
|
if new_id in mapping.values() or new_id in mapping:
|
|
306
306
|
logger.error(
|
|
307
|
-
"
|
|
307
|
+
"Task ID migration detected a conflict: project=%s old=%s new=%s",
|
|
308
308
|
self.project_slug,
|
|
309
309
|
old_id,
|
|
310
310
|
new_id,
|
|
311
311
|
)
|
|
312
|
-
raise ValueError("
|
|
312
|
+
raise ValueError("Task ID migration conflict")
|
|
313
313
|
mapping[old_id] = new_id
|
|
314
314
|
|
|
315
315
|
if not mapping:
|
|
@@ -338,13 +338,13 @@ class TaskService:
|
|
|
338
338
|
|
|
339
339
|
self._write_id_migration_report(mapping)
|
|
340
340
|
logger.info(
|
|
341
|
-
"
|
|
341
|
+
"Task ID migration completed: project=%s changed=%s",
|
|
342
342
|
self.project_slug,
|
|
343
343
|
len(mapping),
|
|
344
344
|
)
|
|
345
345
|
|
|
346
346
|
async def _archive_legacy_child_tasks(self, db: aiosqlite.Connection) -> None:
|
|
347
|
-
"""
|
|
347
|
+
"""Archive legacy child tasks so they stop appearing in listings."""
|
|
348
348
|
|
|
349
349
|
now = shanghai_now_iso()
|
|
350
350
|
cursor = await db.execute(
|
|
@@ -364,15 +364,15 @@ class TaskService:
|
|
|
364
364
|
changed = 0
|
|
365
365
|
await cursor.close()
|
|
366
366
|
if changed > 0:
|
|
367
|
-
logger.info("
|
|
367
|
+
logger.info("Archived legacy child tasks: project=%s count=%s", self.project_slug, changed)
|
|
368
368
|
|
|
369
369
|
async def _drop_child_sequences_table(self, db: aiosqlite.Connection) -> None:
|
|
370
|
-
"""
|
|
370
|
+
"""Remove the defunct child sequence table to prevent stale lookups."""
|
|
371
371
|
|
|
372
372
|
await db.execute("DROP TABLE IF EXISTS child_sequences")
|
|
373
373
|
|
|
374
374
|
async def _verify_status_values(self, db: aiosqlite.Connection) -> None:
|
|
375
|
-
"""
|
|
375
|
+
"""Validate task status values against the allowed enumeration."""
|
|
376
376
|
|
|
377
377
|
async with db.execute(
|
|
378
378
|
"SELECT DISTINCT status FROM tasks WHERE project_slug = ?",
|
|
@@ -382,14 +382,14 @@ class TaskService:
|
|
|
382
382
|
for (status,) in rows:
|
|
383
383
|
if status is None:
|
|
384
384
|
logger.error(
|
|
385
|
-
"
|
|
385
|
+
"Task status integrity check found NULL value: project=%s",
|
|
386
386
|
self.project_slug,
|
|
387
387
|
)
|
|
388
388
|
continue
|
|
389
389
|
normalized = self._normalize_status_token(status, context="integrity_check")
|
|
390
390
|
if normalized not in self._valid_statuses:
|
|
391
391
|
logger.error(
|
|
392
|
-
"
|
|
392
|
+
"Task status integrity check found unknown value: project=%s value=%s",
|
|
393
393
|
self.project_slug,
|
|
394
394
|
status,
|
|
395
395
|
)
|
|
@@ -405,7 +405,7 @@ class TaskService:
|
|
|
405
405
|
description: Optional[str] = None,
|
|
406
406
|
actor: Optional[str],
|
|
407
407
|
) -> TaskRecord:
|
|
408
|
-
"""
|
|
408
|
+
"""Create a root task and capture the initial history entry."""
|
|
409
409
|
|
|
410
410
|
async with self._lock:
|
|
411
411
|
async with aiosqlite.connect(self.db_path) as db:
|
|
@@ -445,15 +445,6 @@ class TaskService:
|
|
|
445
445
|
0,
|
|
446
446
|
),
|
|
447
447
|
)
|
|
448
|
-
await self._insert_history(
|
|
449
|
-
db,
|
|
450
|
-
task_id,
|
|
451
|
-
"create",
|
|
452
|
-
None,
|
|
453
|
-
title,
|
|
454
|
-
actor,
|
|
455
|
-
created_at=now,
|
|
456
|
-
)
|
|
457
448
|
await db.commit()
|
|
458
449
|
return TaskRecord(
|
|
459
450
|
id=task_id,
|
|
@@ -483,7 +474,7 @@ class TaskService:
|
|
|
483
474
|
include_archived: bool = False,
|
|
484
475
|
exclude_statuses: Optional[Sequence[str]] = None,
|
|
485
476
|
) -> List[TaskRecord]:
|
|
486
|
-
"""
|
|
477
|
+
"""List tasks with optional filters, status exclusions, and pagination."""
|
|
487
478
|
|
|
488
479
|
query = [
|
|
489
480
|
"SELECT * FROM tasks WHERE project_slug = ?",
|
|
@@ -515,7 +506,7 @@ class TaskService:
|
|
|
515
506
|
page: int,
|
|
516
507
|
page_size: int = DEFAULT_LIMIT,
|
|
517
508
|
) -> Tuple[List[TaskRecord], int, int]:
|
|
518
|
-
"""
|
|
509
|
+
"""Search tasks by title or description and return results, pages, and totals."""
|
|
519
510
|
|
|
520
511
|
if page_size <= 0:
|
|
521
512
|
page_size = DEFAULT_LIMIT
|
|
@@ -556,7 +547,7 @@ class TaskService:
|
|
|
556
547
|
return [self._row_to_task(row, context="search") for row in rows], pages, total
|
|
557
548
|
|
|
558
549
|
async def get_task(self, task_id: str) -> Optional[TaskRecord]:
|
|
559
|
-
"""
|
|
550
|
+
"""Return a task by ID, or ``None`` when it does not exist."""
|
|
560
551
|
|
|
561
552
|
canonical_task_id = self._canonical_task_id(task_id)
|
|
562
553
|
if not canonical_task_id:
|
|
@@ -588,11 +579,11 @@ class TaskService:
|
|
|
588
579
|
description: Optional[str] = None,
|
|
589
580
|
archived: Optional[bool] = None,
|
|
590
581
|
) -> TaskRecord:
|
|
591
|
-
"""
|
|
582
|
+
"""Update a task, write history entries, and return the refreshed record."""
|
|
592
583
|
|
|
593
584
|
canonical_task_id = self._canonical_task_id(task_id)
|
|
594
585
|
if not canonical_task_id:
|
|
595
|
-
raise ValueError("
|
|
586
|
+
raise ValueError("Task does not exist")
|
|
596
587
|
task_id = canonical_task_id
|
|
597
588
|
async with self._lock:
|
|
598
589
|
async with aiosqlite.connect(self.db_path) as db:
|
|
@@ -602,19 +593,17 @@ class TaskService:
|
|
|
602
593
|
row = await self._fetch_task_row(db, task_id)
|
|
603
594
|
if row is None:
|
|
604
595
|
await db.execute("ROLLBACK")
|
|
605
|
-
raise ValueError("
|
|
596
|
+
raise ValueError("Task does not exist")
|
|
606
597
|
updates = []
|
|
607
598
|
params: List[object] = []
|
|
608
|
-
history_items: List[Tuple[str, Optional[str], Optional[str]]] = []
|
|
609
599
|
if title is not None and title != row["title"]:
|
|
610
600
|
updates.append("title = ?")
|
|
611
|
-
history_items.append(("title", row["title"], title))
|
|
612
601
|
params.append(title)
|
|
613
602
|
if status is not None:
|
|
614
603
|
normalized_status = self._normalize_status_token(status, context="update")
|
|
615
604
|
if normalized_status != status:
|
|
616
605
|
logger.warning(
|
|
617
|
-
"
|
|
606
|
+
"Task status input corrected automatically: task_id=%s raw=%s normalized=%s",
|
|
618
607
|
task_id,
|
|
619
608
|
status,
|
|
620
609
|
normalized_status,
|
|
@@ -624,35 +613,28 @@ class TaskService:
|
|
|
624
613
|
status_value = None
|
|
625
614
|
if status_value is not None and status_value != row["status"]:
|
|
626
615
|
updates.append("status = ?")
|
|
627
|
-
history_items.append(("status", row["status"], status_value))
|
|
628
616
|
params.append(status_value)
|
|
629
617
|
if priority is not None and priority != row["priority"]:
|
|
630
618
|
updates.append("priority = ?")
|
|
631
|
-
history_items.append(("priority", str(row["priority"]), str(priority)))
|
|
632
619
|
params.append(priority)
|
|
633
620
|
if task_type is not None and task_type != row["task_type"]:
|
|
634
621
|
updates.append("task_type = ?")
|
|
635
|
-
history_items.append(("task_type", row["task_type"], task_type))
|
|
636
622
|
params.append(task_type)
|
|
637
623
|
if tags is not None:
|
|
638
624
|
tags_json = json.dumps(list(tags))
|
|
639
625
|
if tags_json != row["tags"]:
|
|
640
626
|
updates.append("tags = ?")
|
|
641
|
-
history_items.append(("tags", row["tags"], tags_json))
|
|
642
627
|
params.append(tags_json)
|
|
643
628
|
if due_date is not None and due_date != row["due_date"]:
|
|
644
629
|
updates.append("due_date = ?")
|
|
645
|
-
history_items.append(("due_date", row["due_date"], due_date))
|
|
646
630
|
params.append(due_date)
|
|
647
631
|
if description is not None and description != row["description"]:
|
|
648
632
|
updates.append("description = ?")
|
|
649
|
-
history_items.append(("description", row["description"], description))
|
|
650
633
|
params.append(description)
|
|
651
634
|
if archived is not None:
|
|
652
635
|
archived_int = 1 if archived else 0
|
|
653
636
|
if archived_int != row["archived"]:
|
|
654
637
|
updates.append("archived = ?")
|
|
655
|
-
history_items.append(("archived", str(row["archived"]), str(archived_int)))
|
|
656
638
|
params.append(archived_int)
|
|
657
639
|
if updates:
|
|
658
640
|
now = shanghai_now_iso()
|
|
@@ -663,20 +645,10 @@ class TaskService:
|
|
|
663
645
|
f"UPDATE tasks SET {' , '.join(updates)} WHERE id = ?",
|
|
664
646
|
params,
|
|
665
647
|
)
|
|
666
|
-
for field, old, new in history_items:
|
|
667
|
-
await self._insert_history(
|
|
668
|
-
db,
|
|
669
|
-
task_id,
|
|
670
|
-
field,
|
|
671
|
-
old,
|
|
672
|
-
new,
|
|
673
|
-
actor,
|
|
674
|
-
created_at=now,
|
|
675
|
-
)
|
|
676
648
|
await db.commit()
|
|
677
649
|
updated = await self.get_task(task_id)
|
|
678
650
|
if updated is None:
|
|
679
|
-
raise ValueError("
|
|
651
|
+
raise ValueError("Task does not exist")
|
|
680
652
|
return updated
|
|
681
653
|
|
|
682
654
|
async def add_note(
|
|
@@ -687,11 +659,11 @@ class TaskService:
|
|
|
687
659
|
content: str,
|
|
688
660
|
actor: Optional[str],
|
|
689
661
|
) -> TaskNoteRecord:
|
|
690
|
-
"""
|
|
662
|
+
"""Append a note to a task (notes no longer create task history entries)."""
|
|
691
663
|
|
|
692
664
|
canonical_task_id = self._canonical_task_id(task_id)
|
|
693
665
|
if not canonical_task_id:
|
|
694
|
-
raise ValueError("
|
|
666
|
+
raise ValueError("Task does not exist")
|
|
695
667
|
task_id = canonical_task_id
|
|
696
668
|
now = shanghai_now_iso()
|
|
697
669
|
async with self._lock:
|
|
@@ -702,7 +674,7 @@ class TaskService:
|
|
|
702
674
|
task_row = await self._fetch_task_row(db, task_id)
|
|
703
675
|
if task_row is None:
|
|
704
676
|
await db.execute("ROLLBACK")
|
|
705
|
-
raise ValueError("
|
|
677
|
+
raise ValueError("Task does not exist")
|
|
706
678
|
cursor = await db.execute(
|
|
707
679
|
"""
|
|
708
680
|
INSERT INTO task_notes(task_id, note_type, content, created_at)
|
|
@@ -711,23 +683,6 @@ class TaskService:
|
|
|
711
683
|
(task_id, note_type, content, now),
|
|
712
684
|
)
|
|
713
685
|
note_id = cursor.lastrowid
|
|
714
|
-
payload = {
|
|
715
|
-
"action": "add_note",
|
|
716
|
-
"note_type": note_type,
|
|
717
|
-
"note_id": note_id,
|
|
718
|
-
"content_length": len(content or ""),
|
|
719
|
-
}
|
|
720
|
-
await self._insert_history(
|
|
721
|
-
db,
|
|
722
|
-
task_id,
|
|
723
|
-
"note",
|
|
724
|
-
None,
|
|
725
|
-
content,
|
|
726
|
-
actor,
|
|
727
|
-
event_type="task_action",
|
|
728
|
-
payload=json.dumps(payload, ensure_ascii=False),
|
|
729
|
-
created_at=now,
|
|
730
|
-
)
|
|
731
686
|
await db.commit()
|
|
732
687
|
return TaskNoteRecord(
|
|
733
688
|
id=note_id,
|
|
@@ -738,7 +693,7 @@ class TaskService:
|
|
|
738
693
|
)
|
|
739
694
|
|
|
740
695
|
async def list_notes(self, task_id: str) -> List[TaskNoteRecord]:
|
|
741
|
-
"""
|
|
696
|
+
"""Return every note for a task ordered by creation time."""
|
|
742
697
|
|
|
743
698
|
canonical_task_id = self._canonical_task_id(task_id)
|
|
744
699
|
if not canonical_task_id:
|
|
@@ -766,7 +721,7 @@ class TaskService:
|
|
|
766
721
|
]
|
|
767
722
|
|
|
768
723
|
async def list_history(self, task_id: str) -> List[TaskHistoryRecord]:
|
|
769
|
-
"""
|
|
724
|
+
"""Return the full history list for a task."""
|
|
770
725
|
|
|
771
726
|
canonical_task_id = self._canonical_task_id(task_id)
|
|
772
727
|
if not canonical_task_id:
|
|
@@ -809,11 +764,11 @@ class TaskService:
|
|
|
809
764
|
payload: Optional[Dict[str, Any]] = None,
|
|
810
765
|
created_at: Optional[str] = None,
|
|
811
766
|
) -> None:
|
|
812
|
-
"""
|
|
767
|
+
"""Record a structured task event."""
|
|
813
768
|
|
|
814
769
|
canonical_task_id = self._canonical_task_id(task_id)
|
|
815
770
|
if not canonical_task_id:
|
|
816
|
-
raise ValueError("
|
|
771
|
+
raise ValueError("Task does not exist")
|
|
817
772
|
task_id = canonical_task_id
|
|
818
773
|
|
|
819
774
|
event_token = (event_type or "task_action").strip() or "task_action"
|
|
@@ -825,7 +780,7 @@ class TaskService:
|
|
|
825
780
|
try:
|
|
826
781
|
payload_text = json.dumps(payload, ensure_ascii=False)
|
|
827
782
|
except (TypeError, ValueError) as exc:
|
|
828
|
-
logger.warning("
|
|
783
|
+
logger.warning("Failed to serialise event payload: task_id=%s error=%s", task_id, exc)
|
|
829
784
|
payload_text = None
|
|
830
785
|
async with self._lock:
|
|
831
786
|
async with aiosqlite.connect(self.db_path) as db:
|
|
@@ -835,7 +790,7 @@ class TaskService:
|
|
|
835
790
|
row = await self._fetch_task_row(db, task_id)
|
|
836
791
|
if row is None:
|
|
837
792
|
await db.execute("ROLLBACK")
|
|
838
|
-
raise ValueError("
|
|
793
|
+
raise ValueError("Task does not exist")
|
|
839
794
|
await self._insert_history(
|
|
840
795
|
db,
|
|
841
796
|
task_id,
|
|
@@ -850,7 +805,7 @@ class TaskService:
|
|
|
850
805
|
await db.commit()
|
|
851
806
|
|
|
852
807
|
async def delete_task(self, task_id: str, *, actor: Optional[str]) -> TaskRecord:
|
|
853
|
-
"""
|
|
808
|
+
"""Perform a logical delete by marking the task archived and return the state."""
|
|
854
809
|
|
|
855
810
|
updated = await self.update_task(task_id, actor=actor, archived=True)
|
|
856
811
|
return updated
|
|
@@ -863,7 +818,7 @@ class TaskService:
|
|
|
863
818
|
page_size: int = DEFAULT_LIMIT,
|
|
864
819
|
exclude_statuses: Optional[Sequence[str]] = None,
|
|
865
820
|
) -> Tuple[List[TaskRecord], int]:
|
|
866
|
-
"""
|
|
821
|
+
"""Fetch a specific page of tasks and return both the page data and total count."""
|
|
867
822
|
|
|
868
823
|
total = await self.count_tasks(
|
|
869
824
|
status=status,
|
|
@@ -887,7 +842,7 @@ class TaskService:
|
|
|
887
842
|
include_archived: bool,
|
|
888
843
|
exclude_statuses: Optional[Sequence[str]] = None,
|
|
889
844
|
) -> int:
|
|
890
|
-
"""
|
|
845
|
+
"""Count tasks that satisfy the provided filters."""
|
|
891
846
|
|
|
892
847
|
query = "SELECT COUNT(1) AS c FROM tasks WHERE project_slug = ?"
|
|
893
848
|
params: List[object] = [self.project_slug]
|
|
@@ -908,7 +863,7 @@ class TaskService:
|
|
|
908
863
|
return int(row["c"] if row else 0)
|
|
909
864
|
|
|
910
865
|
async def backup(self, target_path: Path) -> None:
|
|
911
|
-
"""
|
|
866
|
+
"""Backup the current database to the target path."""
|
|
912
867
|
|
|
913
868
|
target_path = target_path.expanduser()
|
|
914
869
|
target_path.parent.mkdir(parents=True, exist_ok=True)
|
|
@@ -921,7 +876,7 @@ class TaskService:
|
|
|
921
876
|
|
|
922
877
|
@staticmethod
|
|
923
878
|
def _convert_task_id_token(value: Optional[str]) -> Optional[str]:
|
|
924
|
-
"""
|
|
879
|
+
"""Normalise task ID separators to remain compatible with legacy formats."""
|
|
925
880
|
|
|
926
881
|
if value is None:
|
|
927
882
|
return None
|
|
@@ -930,14 +885,14 @@ class TaskService:
|
|
|
930
885
|
if token.startswith("TASK"):
|
|
931
886
|
suffix = token[4:]
|
|
932
887
|
if suffix and not suffix.startswith("_"):
|
|
933
|
-
#
|
|
888
|
+
# Legacy formats like TASK0001/TASK0001_1 require an underscore.
|
|
934
889
|
token = f"TASK_{suffix}"
|
|
935
890
|
else:
|
|
936
891
|
token = f"TASK{suffix}"
|
|
937
892
|
return token
|
|
938
893
|
|
|
939
894
|
def _canonical_task_id(self, value: Optional[str]) -> Optional[str]:
|
|
940
|
-
"""
|
|
895
|
+
"""Normalise externally provided task IDs into the canonical format."""
|
|
941
896
|
|
|
942
897
|
if value is None:
|
|
943
898
|
return None
|
|
@@ -948,7 +903,7 @@ class TaskService:
|
|
|
948
903
|
return self._convert_task_id_token(token)
|
|
949
904
|
|
|
950
905
|
def _write_id_migration_report(self, mapping: Dict[str, str]) -> None:
|
|
951
|
-
"""
|
|
906
|
+
"""Write a JSON report describing task ID migration results."""
|
|
952
907
|
|
|
953
908
|
if not mapping:
|
|
954
909
|
return
|
|
@@ -969,13 +924,13 @@ class TaskService:
|
|
|
969
924
|
report_path.write_text(json.dumps(payload, ensure_ascii=False, indent=2))
|
|
970
925
|
except Exception as exc:
|
|
971
926
|
logger.warning(
|
|
972
|
-
"
|
|
927
|
+
"Failed to write task ID migration report: project=%s error=%s",
|
|
973
928
|
self.project_slug,
|
|
974
929
|
exc,
|
|
975
930
|
)
|
|
976
931
|
|
|
977
932
|
async def _fetch_task_row(self, db: aiosqlite.Connection, task_id: str):
|
|
978
|
-
"""
|
|
933
|
+
"""Fetch the raw task row from the database."""
|
|
979
934
|
|
|
980
935
|
canonical_task_id = self._canonical_task_id(task_id)
|
|
981
936
|
if not canonical_task_id:
|
|
@@ -988,7 +943,7 @@ class TaskService:
|
|
|
988
943
|
return await cursor.fetchone()
|
|
989
944
|
|
|
990
945
|
async def _next_root_sequence(self, db: aiosqlite.Connection) -> int:
|
|
991
|
-
"""
|
|
946
|
+
"""Increment and return the next root task sequence."""
|
|
992
947
|
|
|
993
948
|
async with db.execute(
|
|
994
949
|
"SELECT last_root FROM task_sequences WHERE project_slug = ?",
|
|
@@ -1022,7 +977,7 @@ class TaskService:
|
|
|
1022
977
|
payload: Optional[str] = None,
|
|
1023
978
|
created_at: Optional[str] = None,
|
|
1024
979
|
) -> None:
|
|
1025
|
-
"""
|
|
980
|
+
"""Insert a task history entry while filling timestamps automatically."""
|
|
1026
981
|
|
|
1027
982
|
normalized = ensure_shanghai_iso(created_at) if created_at else None
|
|
1028
983
|
timestamp = normalized or shanghai_now_iso()
|
|
@@ -1044,16 +999,16 @@ class TaskService:
|
|
|
1044
999
|
)
|
|
1045
1000
|
|
|
1046
1001
|
def _normalize_status_token(self, value: Optional[str], *, context: str) -> str:
|
|
1047
|
-
"""
|
|
1002
|
+
"""Normalise status strings, providing compatibility with legacy aliases."""
|
|
1048
1003
|
|
|
1049
1004
|
if not value:
|
|
1050
|
-
logger.warning("
|
|
1005
|
+
logger.warning("Encountered empty task status; falling back to default: context=%s", context)
|
|
1051
1006
|
return TASK_STATUSES[0]
|
|
1052
1007
|
token = str(value).strip().lower()
|
|
1053
1008
|
mapped = STATUS_ALIASES.get(token, token)
|
|
1054
1009
|
if mapped not in self._valid_statuses:
|
|
1055
1010
|
logger.warning(
|
|
1056
|
-
"
|
|
1011
|
+
"Unknown task status detected: value=%s mapped=%s context=%s",
|
|
1057
1012
|
value,
|
|
1058
1013
|
mapped,
|
|
1059
1014
|
context,
|
|
@@ -1061,7 +1016,7 @@ class TaskService:
|
|
|
1061
1016
|
return mapped
|
|
1062
1017
|
if mapped != token:
|
|
1063
1018
|
logger.info(
|
|
1064
|
-
"
|
|
1019
|
+
"Task status converted via alias: raw=%s normalized=%s context=%s",
|
|
1065
1020
|
value,
|
|
1066
1021
|
mapped,
|
|
1067
1022
|
context,
|
|
@@ -1074,7 +1029,7 @@ class TaskService:
|
|
|
1074
1029
|
*,
|
|
1075
1030
|
context: str,
|
|
1076
1031
|
) -> TaskRecord:
|
|
1077
|
-
"""
|
|
1032
|
+
"""Convert a sqlite row into a ``TaskRecord`` instance."""
|
|
1078
1033
|
|
|
1079
1034
|
tags_raw = row["tags"] or "[]"
|
|
1080
1035
|
try:
|