tb-order-sync 0.3.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -0,0 +1,202 @@
1
+ """Gross profit calculation service.
2
+
3
+ Business rule:
4
+ 毛利 = 客户报价 - 运费 - 包装价格 - 产品价格
5
+ G = F - E - D - C
6
+
7
+ If any of C/D/E/F is not a valid number, write "数据异常" to G.
8
+ """
9
+
10
+ from __future__ import annotations
11
+
12
+ from datetime import datetime
13
+ from typing import Any, Optional
14
+
15
+ from config.mappings import ColumnMapping, get_column_mapping
16
+ from config.settings import Settings, SyncMode, get_settings
17
+ from connectors.base import BaseSheetConnector, CellUpdate
18
+ from models.records import OrderRecord
19
+ from models.state_models import SyncState
20
+ from models.task_models import TaskName, TaskResult
21
+ from services.state_service import StateService
22
+ from utils.diff import row_fingerprint
23
+ from utils.logger import get_logger
24
+ from utils.parser import normalize_order_no, parse_number
25
+
26
+ logger = get_logger(__name__)
27
+
28
+
29
+ class GrossProfitService:
30
+ """Calculate and write gross profit for every row in A table."""
31
+
32
+ def __init__(
33
+ self,
34
+ connector: BaseSheetConnector,
35
+ state_service: StateService,
36
+ settings: Optional[Settings] = None,
37
+ mapping: Optional[ColumnMapping] = None,
38
+ ) -> None:
39
+ self._conn = connector
40
+ self._state_svc = state_service
41
+ self._settings = settings or get_settings()
42
+ self._map = mapping or get_column_mapping()
43
+
44
+ def run(
45
+ self,
46
+ mode: Optional[SyncMode] = None,
47
+ dry_run: Optional[bool] = None,
48
+ ) -> TaskResult:
49
+ """Execute gross profit calculation."""
50
+ mode = mode or self._settings.gross_profit_mode
51
+ dry_run = dry_run if dry_run is not None else self._settings.dry_run
52
+ result = TaskResult(task_name=TaskName.GROSS_PROFIT, mode=mode, dry_run=dry_run)
53
+
54
+ logger.info("=== Gross Profit Service START (mode=%s, dry_run=%s) ===", mode.value, dry_run)
55
+
56
+ try:
57
+ state = self._state_svc.load()
58
+ rows = self._read_a_table()
59
+ result.rows_read = len(rows)
60
+ if not rows:
61
+ logger.warning("A table returned 0 data rows")
62
+ result.finish()
63
+ return result
64
+
65
+ records = self._parse_rows(rows)
66
+ updates, changed, errors = self._compute(records, state, mode)
67
+
68
+ result.rows_changed = changed
69
+ result.rows_error = errors
70
+
71
+ if updates and not dry_run:
72
+ self._write_updates(updates)
73
+ state.last_run_at = datetime.now()
74
+ self._state_svc.save(state)
75
+
76
+ if dry_run:
77
+ logger.info("[DRY-RUN] Would write %d cells, %d changed, %d errors", len(updates), changed, errors)
78
+
79
+ result.finish()
80
+ logger.info(
81
+ "=== Gross Profit Service END — read=%d changed=%d errors=%d ===",
82
+ result.rows_read, result.rows_changed, result.rows_error,
83
+ )
84
+ except Exception as exc:
85
+ logger.exception("Gross Profit Service failed")
86
+ result.finish(success=False, error_message=str(exc))
87
+
88
+ return result
89
+
90
+ # ── Internal ───────────────────────────────────────────────────────────
91
+
92
+ def _read_a_table(self) -> list[list[Any]]:
93
+ rows = self._conn.read_rows(
94
+ self._settings.tencent_a_file_id,
95
+ self._settings.tencent_a_sheet_id,
96
+ )
97
+ # Skip header (row 0)
98
+ return rows[1:] if rows else []
99
+
100
+ def _parse_rows(self, rows: list[list[Any]]) -> list[OrderRecord]:
101
+ m = self._map
102
+ records: list[OrderRecord] = []
103
+ for idx, row in enumerate(rows):
104
+ row_num = idx + 1 # 1-based data row (0 is header)
105
+ records.append(OrderRecord(
106
+ row_index=row_num,
107
+ order_no=normalize_order_no(self._safe_get(row, m.a_order_no)),
108
+ product_price=self._safe_get_str(row, m.a_product_price),
109
+ packaging_price=self._safe_get_str(row, m.a_packaging_price),
110
+ freight=self._safe_get_str(row, m.a_freight),
111
+ customer_quote=self._safe_get_str(row, m.a_customer_quote),
112
+ gross_profit=self._safe_get_str(row, m.a_gross_profit),
113
+ refund_status=self._safe_get_str(row, m.a_refund_status),
114
+ raw_data=row,
115
+ ))
116
+ return records
117
+
118
+ def _compute(
119
+ self,
120
+ records: list[OrderRecord],
121
+ state: SyncState,
122
+ mode: SyncMode,
123
+ ) -> tuple[list[CellUpdate], int, int]:
124
+ """Compute gross profit for each record, return (updates, changed_count, error_count)."""
125
+ m = self._map
126
+ updates: list[CellUpdate] = []
127
+ changed = 0
128
+ errors = 0
129
+
130
+ for rec in records:
131
+ # Fingerprint for incremental check: C, D, E, F, H
132
+ fp = row_fingerprint([
133
+ rec.product_price, rec.packaging_price,
134
+ rec.freight, rec.customer_quote, rec.order_no,
135
+ ])
136
+
137
+ if mode == SyncMode.INCREMENTAL:
138
+ old_fp = state.a_table_fingerprints.get(str(rec.row_index), "")
139
+ if old_fp == fp:
140
+ continue # No change
141
+
142
+ # Parse numbers
143
+ c = parse_number(rec.product_price)
144
+ d = parse_number(rec.packaging_price)
145
+ e = parse_number(rec.freight)
146
+ f = parse_number(rec.customer_quote)
147
+
148
+ if any(v is None for v in (c, d, e, f)):
149
+ # Data error
150
+ new_val = self._settings.data_error_text
151
+ errors += 1
152
+ self._log_data_error(rec, c, d, e, f)
153
+ else:
154
+ gross = f - e - d - c # type: ignore[operator]
155
+ new_val = round(gross, 2)
156
+
157
+ # Check if value actually changed
158
+ if str(new_val) != str(rec.gross_profit or ""):
159
+ updates.append(CellUpdate(row=rec.row_index, col=m.a_gross_profit, value=new_val))
160
+ changed += 1
161
+
162
+ # Update fingerprint
163
+ state.a_table_fingerprints[str(rec.row_index)] = fp
164
+
165
+ return updates, changed, errors
166
+
167
+ def _write_updates(self, updates: list[CellUpdate]) -> None:
168
+ self._conn.batch_update(
169
+ self._settings.tencent_a_file_id,
170
+ self._settings.tencent_a_sheet_id,
171
+ updates,
172
+ batch_size=self._settings.write_batch_size,
173
+ )
174
+
175
+ def _log_data_error(
176
+ self, rec: OrderRecord,
177
+ c: Optional[float], d: Optional[float],
178
+ e: Optional[float], f: Optional[float],
179
+ ) -> None:
180
+ bad_fields = []
181
+ labels = [
182
+ ("产品价格(C)", rec.product_price, c),
183
+ ("包装价格(D)", rec.packaging_price, d),
184
+ ("运费(E)", rec.freight, e),
185
+ ("客户报价(F)", rec.customer_quote, f),
186
+ ]
187
+ for name, raw, parsed in labels:
188
+ if parsed is None:
189
+ bad_fields.append(f"{name}='{raw}'")
190
+ logger.warning(
191
+ "Row %d (单号=%s) 数据异常: %s",
192
+ rec.row_index, rec.order_no, ", ".join(bad_fields),
193
+ )
194
+
195
+ @staticmethod
196
+ def _safe_get(row: list[Any], idx: int) -> Any:
197
+ return row[idx] if idx < len(row) else None
198
+
199
+ @staticmethod
200
+ def _safe_get_str(row: list[Any], idx: int) -> Optional[str]:
201
+ val = row[idx] if idx < len(row) else None
202
+ return str(val) if val is not None else None
@@ -0,0 +1,196 @@
1
+ """Refund matching service.
2
+
3
+ Business rules:
4
+ - Read B table column A (order numbers) to build refund set
5
+ - Scan A table column H (order numbers)
6
+ - If A row's order_no is in refund set → write "进入退款流程" to A table I column
7
+ - If not in refund set → clear I column
8
+ - Optional: set/clear row background color
9
+ """
10
+
11
+ from __future__ import annotations
12
+
13
+ from datetime import datetime
14
+ from typing import Any, Optional
15
+
16
+ from config.mappings import ColumnMapping, get_column_mapping
17
+ from config.settings import Settings, SyncMode, get_settings
18
+ from connectors.base import BaseSheetConnector, CellUpdate
19
+ from models.task_models import TaskName, TaskResult
20
+ from services.state_service import StateService
21
+ from utils.diff import row_fingerprint, set_hash
22
+ from utils.logger import get_logger
23
+ from utils.parser import normalize_order_no
24
+
25
+ logger = get_logger(__name__)
26
+
27
+ # Background colors for optional style update
28
+ _BG_RED = "#FF4D4F"
29
+ _BG_DEFAULT = None # None means reset / no color
30
+
31
+
32
+ class RefundMatchService:
33
+ """Match refund orders from B table against A table and update refund status."""
34
+
35
+ def __init__(
36
+ self,
37
+ connector: BaseSheetConnector,
38
+ state_service: StateService,
39
+ settings: Optional[Settings] = None,
40
+ mapping: Optional[ColumnMapping] = None,
41
+ ) -> None:
42
+ self._conn = connector
43
+ self._state_svc = state_service
44
+ self._settings = settings or get_settings()
45
+ self._map = mapping or get_column_mapping()
46
+
47
+ def run(
48
+ self,
49
+ mode: Optional[SyncMode] = None,
50
+ dry_run: Optional[bool] = None,
51
+ ) -> TaskResult:
52
+ mode = mode or self._settings.refund_match_mode
53
+ dry_run = dry_run if dry_run is not None else self._settings.dry_run
54
+ result = TaskResult(task_name=TaskName.REFUND_MATCH, mode=mode, dry_run=dry_run)
55
+
56
+ logger.info("=== Refund Match Service START (mode=%s, dry_run=%s) ===", mode.value, dry_run)
57
+
58
+ try:
59
+ state = self._state_svc.load()
60
+
61
+ # 1. Build refund set from B table
62
+ refund_set = self._build_refund_set()
63
+ new_refund_hash = set_hash(list(refund_set))
64
+ logger.info("B table refund set: %d order numbers, hash=%s", len(refund_set), new_refund_hash[:8])
65
+
66
+ # 2. Read A table
67
+ a_rows = self._read_a_table()
68
+ result.rows_read = len(a_rows)
69
+
70
+ # 3. Incremental short-circuit: if refund set unchanged and mode=incremental
71
+ if mode == SyncMode.INCREMENTAL and new_refund_hash == state.b_table_refund_hash:
72
+ logger.info("Refund set unchanged, skipping (incremental mode)")
73
+ result.finish()
74
+ return result
75
+
76
+ # 4. Match and compute updates
77
+ updates, style_ops, changed = self._match(a_rows, refund_set, state, mode)
78
+ result.rows_changed = changed
79
+
80
+ # 5. Write
81
+ if not dry_run:
82
+ if updates:
83
+ self._conn.batch_update(
84
+ self._settings.tencent_a_file_id,
85
+ self._settings.tencent_a_sheet_id,
86
+ updates,
87
+ batch_size=self._settings.write_batch_size,
88
+ )
89
+ if self._settings.enable_style_update and style_ops:
90
+ self._apply_styles(style_ops)
91
+
92
+ state.b_table_refund_hash = new_refund_hash
93
+ state.b_table_refund_set = sorted(refund_set)
94
+ state.last_run_at = datetime.now()
95
+ self._state_svc.save(state)
96
+ else:
97
+ logger.info("[DRY-RUN] Would write %d cells, %d style ops, %d changed", len(updates), len(style_ops), changed)
98
+
99
+ result.finish()
100
+ logger.info(
101
+ "=== Refund Match Service END — read=%d changed=%d ===",
102
+ result.rows_read, result.rows_changed,
103
+ )
104
+ except Exception as exc:
105
+ logger.exception("Refund Match Service failed")
106
+ result.finish(success=False, error_message=str(exc))
107
+
108
+ return result
109
+
110
+ # ── Internal ───────────────────────────────────────────────────────────
111
+
112
+ def _build_refund_set(self) -> set[str]:
113
+ rows = self._conn.read_rows(
114
+ self._settings.tencent_b_file_id,
115
+ self._settings.tencent_b_sheet_id,
116
+ )
117
+ # Skip header
118
+ data_rows = rows[1:] if rows else []
119
+ refund_set: set[str] = set()
120
+ col = self._map.b_order_no
121
+ for row in data_rows:
122
+ val = row[col] if col < len(row) else None
123
+ order_no = normalize_order_no(val)
124
+ if order_no:
125
+ refund_set.add(order_no)
126
+ return refund_set
127
+
128
+ def _read_a_table(self) -> list[list[Any]]:
129
+ rows = self._conn.read_rows(
130
+ self._settings.tencent_a_file_id,
131
+ self._settings.tencent_a_sheet_id,
132
+ )
133
+ return rows[1:] if rows else []
134
+
135
+ def _match(
136
+ self,
137
+ a_rows: list[list[Any]],
138
+ refund_set: set[str],
139
+ state: SyncState,
140
+ mode: SyncMode,
141
+ ) -> tuple[list[CellUpdate], list[tuple[int, Optional[str]]], int]:
142
+ """Compare each A-table row's order_no against refund_set.
143
+
144
+ Returns: (cell_updates, style_operations, changed_count)
145
+ """
146
+ m = self._map
147
+ updates: list[CellUpdate] = []
148
+ style_ops: list[tuple[int, Optional[str]]] = []
149
+ changed = 0
150
+ refund_text = self._settings.refund_status_text
151
+
152
+ for idx, row in enumerate(a_rows):
153
+ row_num = idx + 1 # 1-based (0 is header)
154
+ order_no = normalize_order_no(row[m.a_order_no] if m.a_order_no < len(row) else None)
155
+
156
+ if not order_no:
157
+ continue
158
+
159
+ current_status = str(row[m.a_refund_status]).strip() if m.a_refund_status < len(row) and row[m.a_refund_status] is not None else ""
160
+ is_refund = order_no in refund_set
161
+
162
+ if is_refund:
163
+ desired = refund_text
164
+ desired_color = _BG_RED
165
+ else:
166
+ desired = ""
167
+ desired_color = _BG_DEFAULT
168
+
169
+ if current_status == desired:
170
+ continue # No change needed
171
+
172
+ # Incremental: if mode is incremental, still process because refund set changed
173
+ # (we already short-circuited above if hash unchanged)
174
+
175
+ updates.append(CellUpdate(row=row_num, col=m.a_refund_status, value=desired))
176
+ style_ops.append((row_num, desired_color))
177
+ changed += 1
178
+
179
+ if is_refund:
180
+ logger.info("Row %d (单号=%s): 标记退款", row_num, order_no)
181
+ else:
182
+ logger.info("Row %d (单号=%s): 取消退款标记", row_num, order_no)
183
+
184
+ return updates, style_ops, changed
185
+
186
+ def _apply_styles(self, ops: list[tuple[int, Optional[str]]]) -> None:
187
+ for row_idx, color in ops:
188
+ try:
189
+ self._conn.update_row_style(
190
+ self._settings.tencent_a_file_id,
191
+ self._settings.tencent_a_sheet_id,
192
+ row_idx,
193
+ bg_color=color,
194
+ )
195
+ except Exception as exc:
196
+ logger.warning("Failed to update style for row %d: %s", row_idx, exc)
@@ -0,0 +1,76 @@
1
+ """APScheduler-based task scheduler."""
2
+
3
+ from __future__ import annotations
4
+
5
+ import random
6
+ import time
7
+ from typing import Optional
8
+
9
+ from apscheduler.schedulers.blocking import BlockingScheduler
10
+ from apscheduler.triggers.interval import IntervalTrigger
11
+
12
+ from config.settings import Settings, SyncMode, get_settings
13
+ from connectors.base import BaseSheetConnector
14
+ from services.gross_profit_service import GrossProfitService
15
+ from services.refund_match_service import RefundMatchService
16
+ from services.state_service import StateService
17
+ from utils.logger import get_logger
18
+
19
+ logger = get_logger(__name__)
20
+
21
+
22
+ class SchedulerService:
23
+ """Manages periodic execution of sync tasks."""
24
+
25
+ def __init__(
26
+ self,
27
+ connector: BaseSheetConnector,
28
+ state_service: StateService,
29
+ settings: Optional[Settings] = None,
30
+ ) -> None:
31
+ self._settings = settings or get_settings()
32
+ self._connector = connector
33
+ self._state_svc = state_service
34
+ self._scheduler = BlockingScheduler()
35
+
36
+ self._gp_svc = GrossProfitService(connector, state_service, self._settings)
37
+ self._rm_svc = RefundMatchService(connector, state_service, self._settings)
38
+
39
+ def _run_all(self) -> None:
40
+ """Execute all tasks in sequence."""
41
+ logger.info("--- Scheduled run: all tasks ---")
42
+ self._gp_svc.run()
43
+ self._rm_svc.run()
44
+
45
+ def start(self) -> None:
46
+ """Start the blocking scheduler with configured interval and jitter."""
47
+ jitter = self._settings.startup_jitter_seconds
48
+ if jitter > 0:
49
+ delay = random.uniform(0, jitter)
50
+ logger.info("Startup jitter: sleeping %.1f seconds", delay)
51
+ time.sleep(delay)
52
+
53
+ interval_minutes = self._settings.task_interval_minutes
54
+ self._scheduler.add_job(
55
+ self._run_all,
56
+ trigger=IntervalTrigger(minutes=interval_minutes),
57
+ id="sync_all",
58
+ name="Sync All Tasks",
59
+ max_instances=1,
60
+ coalesce=True,
61
+ )
62
+ logger.info("Scheduler started: interval=%d minutes", interval_minutes)
63
+
64
+ # Run once immediately
65
+ self._run_all()
66
+
67
+ try:
68
+ self._scheduler.start()
69
+ except (KeyboardInterrupt, SystemExit):
70
+ logger.info("Scheduler stopped by user")
71
+ self._scheduler.shutdown(wait=False)
72
+
73
+ def stop(self) -> None:
74
+ if self._scheduler.running:
75
+ self._scheduler.shutdown(wait=False)
76
+ logger.info("Scheduler stopped")
@@ -0,0 +1,50 @@
1
+ """Local JSON-based state persistence for incremental sync."""
2
+
3
+ from __future__ import annotations
4
+
5
+ import json
6
+ from pathlib import Path
7
+
8
+ from models.state_models import SyncState
9
+ from utils.logger import get_logger
10
+
11
+ logger = get_logger(__name__)
12
+
13
+ STATE_FILENAME = "sync_state.json"
14
+
15
+
16
+ class StateService:
17
+ """Read / write incremental sync state from a local JSON file."""
18
+
19
+ def __init__(self, state_dir: str) -> None:
20
+ self._dir = Path(state_dir)
21
+ self._dir.mkdir(parents=True, exist_ok=True)
22
+ self._path = self._dir / STATE_FILENAME
23
+
24
+ def load(self, *, quiet: bool = False) -> SyncState:
25
+ """Load state from disk. Returns fresh state if file missing / corrupt."""
26
+ if not self._path.exists():
27
+ if not quiet:
28
+ logger.info("No existing state file, starting fresh: %s", self._path)
29
+ return SyncState()
30
+ try:
31
+ raw = self._path.read_text(encoding="utf-8")
32
+ data = json.loads(raw)
33
+ state = SyncState.model_validate(data)
34
+ if not quiet:
35
+ logger.info("Loaded sync state from %s", self._path)
36
+ return state
37
+ except Exception as exc:
38
+ if not quiet:
39
+ logger.warning("Failed to load state (%s), starting fresh: %s", exc, self._path)
40
+ return SyncState()
41
+
42
+ def save(self, state: SyncState) -> None:
43
+ """Persist state to disk."""
44
+ try:
45
+ raw = state.model_dump_json(indent=2)
46
+ self._path.write_text(raw, encoding="utf-8")
47
+ logger.info("Saved sync state to %s", self._path)
48
+ except Exception as exc:
49
+ logger.error("Failed to save state: %s", exc)
50
+ raise
@@ -0,0 +1,93 @@
1
+ # -*- mode: python ; coding: utf-8 -*-
2
+ """PyInstaller spec file for 多表格同步服务.
3
+
4
+ Build:
5
+ pyinstaller sync_service.spec
6
+
7
+ Output:
8
+ dist/sync_service/sync_service.exe (Windows)
9
+ dist/sync_service/sync_service (macOS/Linux)
10
+ """
11
+
12
+ import sys
13
+ from pathlib import Path
14
+
15
+ block_cipher = None
16
+ root = Path(SPECPATH)
17
+
18
+ a = Analysis(
19
+ [str(root / 'main.py')],
20
+ pathex=[str(root)],
21
+ binaries=[],
22
+ datas=[
23
+ # Bundle .env.example so first-run setup can use it as template
24
+ (str(root / '.env.example'), '.'),
25
+ ],
26
+ hiddenimports=[
27
+ 'config',
28
+ 'config.settings',
29
+ 'config.mappings',
30
+ 'connectors',
31
+ 'connectors.base',
32
+ 'connectors.tencent_docs',
33
+ 'connectors.feishu_sheets',
34
+ 'models',
35
+ 'models.records',
36
+ 'models.task_models',
37
+ 'models.state_models',
38
+ 'services',
39
+ 'services.gross_profit_service',
40
+ 'services.refund_match_service',
41
+ 'services.c_to_a_sync_service',
42
+ 'services.scheduler_service',
43
+ 'services.state_service',
44
+ 'utils',
45
+ 'utils.logger',
46
+ 'utils.parser',
47
+ 'utils.diff',
48
+ 'utils.retry',
49
+ 'cli',
50
+ 'cli.commands',
51
+ 'cli.setup',
52
+ # Pydantic v2 needs these
53
+ 'pydantic',
54
+ 'pydantic_settings',
55
+ 'annotated_types',
56
+ 'dotenv',
57
+ ],
58
+ hookspath=[],
59
+ hooksconfig={},
60
+ runtime_hooks=[],
61
+ excludes=[],
62
+ win_no_prefer_redirects=False,
63
+ win_private_assemblies=False,
64
+ cipher=block_cipher,
65
+ noarchive=False,
66
+ )
67
+
68
+ pyz = PYZ(a.pure, a.zipped_data, cipher=block_cipher)
69
+
70
+ exe = EXE(
71
+ pyz,
72
+ a.scripts,
73
+ [],
74
+ exclude_binaries=True,
75
+ name='sync_service',
76
+ debug=False,
77
+ bootloader_ignore_signals=False,
78
+ strip=False,
79
+ upx=True,
80
+ console=True, # Must be console app for interactive CLI
81
+ icon=None,
82
+ )
83
+
84
+ coll = COLLECT(
85
+ exe,
86
+ a.binaries,
87
+ a.zipfiles,
88
+ a.datas,
89
+ strip=False,
90
+ upx=True,
91
+ upx_exclude=[],
92
+ name='sync_service',
93
+ )
File without changes
package/utils/diff.py ADDED
@@ -0,0 +1,27 @@
1
+ """Row fingerprinting and diff utilities for incremental sync."""
2
+
3
+ from __future__ import annotations
4
+
5
+ import hashlib
6
+ from typing import Any, Sequence
7
+
8
+
9
+ def row_fingerprint(fields: Sequence[Any]) -> str:
10
+ """Generate an MD5 hex digest for a list of cell values.
11
+
12
+ Used to detect whether a row's key fields have changed since last sync.
13
+ """
14
+ raw = "|".join(_normalize(v) for v in fields)
15
+ return hashlib.md5(raw.encode("utf-8")).hexdigest()
16
+
17
+
18
+ def set_hash(items: Sequence[str]) -> str:
19
+ """Generate a hash for an ordered set of strings (e.g. refund order nos)."""
20
+ combined = "\n".join(sorted(items))
21
+ return hashlib.md5(combined.encode("utf-8")).hexdigest()
22
+
23
+
24
+ def _normalize(value: Any) -> str:
25
+ if value is None:
26
+ return ""
27
+ return str(value).strip()
@@ -0,0 +1,47 @@
1
+ """Centralized logging setup.
2
+
3
+ Usage:
4
+ from utils.logger import get_logger
5
+ logger = get_logger(__name__)
6
+ """
7
+
8
+ from __future__ import annotations
9
+
10
+ import logging
11
+ import sys
12
+ from pathlib import Path
13
+
14
+ _INITIALIZED = False
15
+
16
+
17
+ def setup_logging(level: str = "INFO", log_dir: str | None = None) -> None:
18
+ """Configure root logger with console + optional file handler."""
19
+ global _INITIALIZED
20
+ if _INITIALIZED:
21
+ return
22
+ _INITIALIZED = True
23
+
24
+ root = logging.getLogger()
25
+ root.setLevel(getattr(logging, level.upper(), logging.INFO))
26
+
27
+ fmt = logging.Formatter(
28
+ fmt="%(asctime)s | %(levelname)-7s | %(name)s | %(message)s",
29
+ datefmt="%Y-%m-%d %H:%M:%S",
30
+ )
31
+
32
+ # Console handler
33
+ console = logging.StreamHandler(sys.stdout)
34
+ console.setFormatter(fmt)
35
+ root.addHandler(console)
36
+
37
+ # File handler (optional)
38
+ if log_dir:
39
+ log_path = Path(log_dir)
40
+ log_path.mkdir(parents=True, exist_ok=True)
41
+ fh = logging.FileHandler(log_path / "sync_service.log", encoding="utf-8")
42
+ fh.setFormatter(fmt)
43
+ root.addHandler(fh)
44
+
45
+
46
+ def get_logger(name: str) -> logging.Logger:
47
+ return logging.getLogger(name)