fustor-core 0.1.2.post3__tar.gz
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- fustor_core-0.1.2.post3/PKG-INFO +12 -0
- fustor_core-0.1.2.post3/README.md +22 -0
- fustor_core-0.1.2.post3/pyproject.toml +30 -0
- fustor_core-0.1.2.post3/setup.cfg +4 -0
- fustor_core-0.1.2.post3/src/fustor_core/__init__.py +0 -0
- fustor_core-0.1.2.post3/src/fustor_core/drivers.py +216 -0
- fustor_core-0.1.2.post3/src/fustor_core/exceptions.py +42 -0
- fustor_core-0.1.2.post3/src/fustor_core/models/__init__.py +0 -0
- fustor_core-0.1.2.post3/src/fustor_core/models/config.py +206 -0
- fustor_core-0.1.2.post3/src/fustor_core/models/log.py +11 -0
- fustor_core-0.1.2.post3/src/fustor_core/models/states.py +38 -0
- fustor_core-0.1.2.post3/src/fustor_core/utils/retry.py +36 -0
- fustor_core-0.1.2.post3/src/fustor_core.egg-info/PKG-INFO +12 -0
- fustor_core-0.1.2.post3/src/fustor_core.egg-info/SOURCES.txt +18 -0
- fustor_core-0.1.2.post3/src/fustor_core.egg-info/dependency_links.txt +1 -0
- fustor_core-0.1.2.post3/src/fustor_core.egg-info/requires.txt +7 -0
- fustor_core-0.1.2.post3/src/fustor_core.egg-info/top_level.txt +1 -0
- fustor_core-0.1.2.post3/tests/models/test_config.py +196 -0
- fustor_core-0.1.2.post3/tests/models/test_event.py +26 -0
- fustor_core-0.1.2.post3/tests/models/test_states.py +72 -0
|
@@ -0,0 +1,12 @@
|
|
|
1
|
+
Metadata-Version: 2.4
|
|
2
|
+
Name: fustor-core
|
|
3
|
+
Version: 0.1.2.post3
|
|
4
|
+
Summary: Core components for Fustor services and plugins
|
|
5
|
+
Author-email: Huajin Wang <wanghuajin999@163.com>
|
|
6
|
+
Requires-Python: >=3.11
|
|
7
|
+
Requires-Dist: pydantic>=2.11.7
|
|
8
|
+
Provides-Extra: dev
|
|
9
|
+
Requires-Dist: pytest>=8.0.0; extra == "dev"
|
|
10
|
+
Requires-Dist: ruff>=0.1.0; extra == "dev"
|
|
11
|
+
Requires-Dist: mypy>=1.0.0; extra == "dev"
|
|
12
|
+
Requires-Dist: pytest-asyncio>=0.23.0; extra == "dev"
|
|
@@ -0,0 +1,22 @@
|
|
|
1
|
+
# fustor-core
|
|
2
|
+
|
|
3
|
+
This package contains core components, abstractions, and utilities shared across various Fustor services and plugins within the monorepo. It provides foundational elements such as base classes for drivers, common exceptions, data models, and utility functions.
|
|
4
|
+
|
|
5
|
+
## Contents
|
|
6
|
+
|
|
7
|
+
* `drivers.py`: Defines abstract base classes (ABCs) for `SourceDriver` and `PusherDriver`, establishing the contract for all data source and data pusher implementations.
|
|
8
|
+
* `exceptions.py`: Contains custom exception classes used throughout the Fustor ecosystem for consistent error handling.
|
|
9
|
+
* `models/`: Houses Pydantic models for various data structures, ensuring data validation and serialization.
|
|
10
|
+
* `utils/`: Provides general utility functions that are commonly used by different Fustor components.
|
|
11
|
+
|
|
12
|
+
## Installation
|
|
13
|
+
|
|
14
|
+
This package is part of the Fustor monorepo and is typically installed in editable mode within the monorepo's development environment using `uv sync`.
|
|
15
|
+
|
|
16
|
+
## Usage
|
|
17
|
+
|
|
18
|
+
Components from `fustor-core` are imported and utilized by other Fustor services (e.g., `agent`, `registry`, `fusion`) and plugin packages (e.g., `source_mysql`, `pusher_fusion`) to ensure consistency and reusability.
|
|
19
|
+
|
|
20
|
+
## Note on Package Structure
|
|
21
|
+
|
|
22
|
+
It has been observed that the `__init__.py` file is missing from the `fustor_core` package directory (`packages/core/src/fustor_core/`). While the package's contents are still accessible, this is an unconventional Python package structure and might lead to issues with package discovery or imports in certain environments. It is recommended to add an empty `__init__.py` file to this directory.
|
|
@@ -0,0 +1,30 @@
|
|
|
1
|
+
[project]
|
|
2
|
+
name = "fustor-core"
|
|
3
|
+
dynamic = ["version"]
|
|
4
|
+
description = "Core components for Fustor services and plugins"
|
|
5
|
+
requires-python = ">=3.11"
|
|
6
|
+
dependencies = [ "pydantic>=2.11.7",]
|
|
7
|
+
[[project.authors]]
|
|
8
|
+
name = "Huajin Wang"
|
|
9
|
+
email = "wanghuajin999@163.com"
|
|
10
|
+
|
|
11
|
+
[build-system]
|
|
12
|
+
requires = [ "setuptools>=61.0", "setuptools-scm>=8.0"]
|
|
13
|
+
build-backend = "setuptools.build_meta"
|
|
14
|
+
|
|
15
|
+
[tool.setuptools_scm]
|
|
16
|
+
root = "../.."
|
|
17
|
+
version_scheme = "post-release"
|
|
18
|
+
local_scheme = "dirty-tag"
|
|
19
|
+
|
|
20
|
+
["project.urls"]
|
|
21
|
+
Homepage = "https://github.com/excelwang/fustor/tree/master/packages/core"
|
|
22
|
+
"Bug Tracker" = "https://github.com/excelwang/fustor/issues"
|
|
23
|
+
|
|
24
|
+
license = "MIT"
|
|
25
|
+
|
|
26
|
+
[project.optional-dependencies]
|
|
27
|
+
dev = [ "pytest>=8.0.0", "ruff>=0.1.0", "mypy>=1.0.0", "pytest-asyncio>=0.23.0",]
|
|
28
|
+
|
|
29
|
+
[tool.setuptools.packages.find]
|
|
30
|
+
where = [ "src",]
|
|
File without changes
|
|
@@ -0,0 +1,216 @@
|
|
|
1
|
+
"""
|
|
2
|
+
Abstract Base Classes for Fuagent Drivers.
|
|
3
|
+
|
|
4
|
+
This module defines the formal interface for Source and Pusher drivers.
|
|
5
|
+
All drivers must inherit from the appropriate base class and implement its
|
|
6
|
+
abstract methods.
|
|
7
|
+
"""
|
|
8
|
+
from abc import ABC, abstractmethod
|
|
9
|
+
from typing import (
|
|
10
|
+
Any,
|
|
11
|
+
Dict,
|
|
12
|
+
Iterator,
|
|
13
|
+
List,
|
|
14
|
+
Tuple,
|
|
15
|
+
)
|
|
16
|
+
|
|
17
|
+
# Forward-referencing models to avoid circular imports if drivers.py is imported by models
|
|
18
|
+
from fustor_event_model.models import EventBase # Import EventBase from fustor_event_model
|
|
19
|
+
from fustor_core.models.config import SourceConfig, PusherConfig
|
|
20
|
+
|
|
21
|
+
|
|
22
|
+
class PusherDriver(ABC):
|
|
23
|
+
"""
|
|
24
|
+
Abstract Base Class for all Pusher drivers.
|
|
25
|
+
|
|
26
|
+
Defines the contract for drivers that receive data from the Fuagent core.
|
|
27
|
+
These drivers are expected to be asynchronous.
|
|
28
|
+
"""
|
|
29
|
+
|
|
30
|
+
def __init__(self, id: str, config: PusherConfig):
|
|
31
|
+
"""
|
|
32
|
+
Initializes the driver with its specific configuration.
|
|
33
|
+
"""
|
|
34
|
+
self.id = id
|
|
35
|
+
self.config = config
|
|
36
|
+
|
|
37
|
+
@abstractmethod
|
|
38
|
+
async def push(self, events: List[EventBase], **kwargs) -> Dict:
|
|
39
|
+
"""
|
|
40
|
+
Receives and processes a list of events. This is the primary data-writing method.
|
|
41
|
+
"""
|
|
42
|
+
raise NotImplementedError
|
|
43
|
+
|
|
44
|
+
async def get_latest_committed_index(self, **kwargs) -> int:
|
|
45
|
+
"""
|
|
46
|
+
Optional: Gets the last successfully processed index from the pusher endpoint.
|
|
47
|
+
Used for resumable syncs. A return value of -1 indicates starting from the beginning.
|
|
48
|
+
"""
|
|
49
|
+
return -1
|
|
50
|
+
|
|
51
|
+
@abstractmethod
|
|
52
|
+
async def heartbeat(self, **kwargs) -> Dict:
|
|
53
|
+
"""
|
|
54
|
+
Sends a heartbeat to maintain session state with the pusher endpoint.
|
|
55
|
+
The `kwargs` will contain `agent_id`, `task_id`, and `session_id`.
|
|
56
|
+
Returns a dictionary with status information.
|
|
57
|
+
"""
|
|
58
|
+
raise NotImplementedError
|
|
59
|
+
|
|
60
|
+
@abstractmethod
|
|
61
|
+
async def create_session(self, task_id: str) -> str:
|
|
62
|
+
"""
|
|
63
|
+
Creates a new session with the pusher endpoint.
|
|
64
|
+
Returns the session ID string.
|
|
65
|
+
"""
|
|
66
|
+
raise NotImplementedError
|
|
67
|
+
|
|
68
|
+
async def close(self):
|
|
69
|
+
"""
|
|
70
|
+
Optional: Gracefully closes any open resources, like network clients.
|
|
71
|
+
"""
|
|
72
|
+
pass
|
|
73
|
+
|
|
74
|
+
@classmethod
|
|
75
|
+
@abstractmethod
|
|
76
|
+
async def get_needed_fields(cls, **kwargs) -> Dict:
|
|
77
|
+
"""
|
|
78
|
+
Declares the data fields required by this pusher.
|
|
79
|
+
Returns a JSON Schema dictionary. An empty dict means all fields are accepted.
|
|
80
|
+
"""
|
|
81
|
+
raise NotImplementedError
|
|
82
|
+
|
|
83
|
+
@classmethod
|
|
84
|
+
async def test_connection(cls, **kwargs) -> Tuple[bool, str]:
|
|
85
|
+
"""
|
|
86
|
+
Optional: Tests the connection to the source service.
|
|
87
|
+
"""
|
|
88
|
+
return (True, "Connection test not implemented for this driver.")
|
|
89
|
+
|
|
90
|
+
@classmethod
|
|
91
|
+
async def check_privileges(cls, **kwargs) -> Tuple[bool, str]:
|
|
92
|
+
"""
|
|
93
|
+
Optional: Checks if the provided credentials have sufficient privileges.
|
|
94
|
+
"""
|
|
95
|
+
return (True, "Privilege check not implemented for this driver.")
|
|
96
|
+
|
|
97
|
+
@classmethod
|
|
98
|
+
async def get_wizard_steps(cls) -> Dict[str, Any]:
|
|
99
|
+
"""
|
|
100
|
+
Optional: Provides configuration wizard steps for UI integration.
|
|
101
|
+
Returns a dictionary defining the steps.
|
|
102
|
+
"""
|
|
103
|
+
return {}
|
|
104
|
+
|
|
105
|
+
class SourceDriver(ABC):
|
|
106
|
+
"""
|
|
107
|
+
Abstract Base Class for all Source drivers.
|
|
108
|
+
|
|
109
|
+
Defines the contract for drivers that produce data for the Fuagent core.
|
|
110
|
+
Note the mix of synchronous and asynchronous methods, reflecting the current
|
|
111
|
+
design of existing drivers (e.g., threading-based fs vs. async network drivers).
|
|
112
|
+
"""
|
|
113
|
+
|
|
114
|
+
def __init__(self, id: str, config: SourceConfig):
|
|
115
|
+
"""
|
|
116
|
+
Initializes the driver with its specific configuration.
|
|
117
|
+
"""
|
|
118
|
+
self.id = id
|
|
119
|
+
self.config = config
|
|
120
|
+
|
|
121
|
+
@property
|
|
122
|
+
def is_transient(self) -> bool:
|
|
123
|
+
"""
|
|
124
|
+
Indicates whether this source driver is transient.
|
|
125
|
+
Transient sources lose events if not processed immediately.
|
|
126
|
+
Defaults to False. Drivers that are transient should override this property.
|
|
127
|
+
"""
|
|
128
|
+
return False
|
|
129
|
+
|
|
130
|
+
@abstractmethod
|
|
131
|
+
def get_snapshot_iterator(self, **kwargs) -> Iterator[EventBase]:
|
|
132
|
+
"""
|
|
133
|
+
Performs a one-time, full snapshot of the source data.
|
|
134
|
+
This method returns an iterator that yields new events.
|
|
135
|
+
"""
|
|
136
|
+
raise NotImplementedError
|
|
137
|
+
|
|
138
|
+
def is_position_available(self, position: int) -> bool:
|
|
139
|
+
"""
|
|
140
|
+
Checks if the driver can resume from a specific position.
|
|
141
|
+
For transient sources, this should return False since they don't keep historical events.
|
|
142
|
+
Defaults to True for non-transient sources, but drivers should override this method
|
|
143
|
+
to provide accurate information about position availability.
|
|
144
|
+
"""
|
|
145
|
+
if position <= 0: #means from the latest snapshot
|
|
146
|
+
return False
|
|
147
|
+
return not self.is_transient
|
|
148
|
+
|
|
149
|
+
@abstractmethod
|
|
150
|
+
def get_message_iterator(self, start_position: int = -1, **kwargs) -> Iterator[EventBase]:
|
|
151
|
+
"""
|
|
152
|
+
Performs incremental data capture (CDC).
|
|
153
|
+
|
|
154
|
+
This method returns an iterator that yields new events.
|
|
155
|
+
Optionally, a start_position can be provided to resume from a specific point.
|
|
156
|
+
Use is_position_available() to check if a position can be resumed from.
|
|
157
|
+
|
|
158
|
+
Args:
|
|
159
|
+
start_position (int): The position to start from, or -1 for latest position
|
|
160
|
+
**kwargs: Additional implementation-specific parameters
|
|
161
|
+
|
|
162
|
+
Returns:
|
|
163
|
+
Iterator[EventBase]: An iterator that yields new events.
|
|
164
|
+
"""
|
|
165
|
+
raise NotImplementedError
|
|
166
|
+
|
|
167
|
+
async def close(self):
|
|
168
|
+
"""
|
|
169
|
+
Optional: Gracefully closes any open resources, like database connections or file handles.
|
|
170
|
+
"""
|
|
171
|
+
pass
|
|
172
|
+
|
|
173
|
+
@classmethod
|
|
174
|
+
@abstractmethod
|
|
175
|
+
async def get_available_fields(cls, **kwargs) -> Dict:
|
|
176
|
+
"""
|
|
177
|
+
Declares the data fields that this source can provide.
|
|
178
|
+
Returns a JSON Schema dictionary.
|
|
179
|
+
"""
|
|
180
|
+
raise NotImplementedError
|
|
181
|
+
|
|
182
|
+
@classmethod
|
|
183
|
+
async def test_connection(cls, **kwargs) -> Tuple[bool, str]:
|
|
184
|
+
"""
|
|
185
|
+
Optional: Tests the connection to the source service.
|
|
186
|
+
"""
|
|
187
|
+
return (True, "Connection test not implemented for this driver.")
|
|
188
|
+
|
|
189
|
+
@classmethod
|
|
190
|
+
async def check_privileges(cls, **kwargs) -> Tuple[bool, str]:
|
|
191
|
+
"""
|
|
192
|
+
Optional: Checks if the provided credentials have sufficient privileges.
|
|
193
|
+
"""
|
|
194
|
+
return (True, "Privilege check not implemented for this driver.")
|
|
195
|
+
|
|
196
|
+
@classmethod
|
|
197
|
+
async def check_runtime_params(cls, **kwargs) -> Tuple[bool, str]:
|
|
198
|
+
"""
|
|
199
|
+
Optional: Checks if the runtime parameters of the underlying source system are edequate for generating events.
|
|
200
|
+
"""
|
|
201
|
+
return (True, "Runtime parameter check not implemented for this driver.")
|
|
202
|
+
|
|
203
|
+
@classmethod
|
|
204
|
+
async def create_agent_user(cls, **kwargs) -> Tuple[bool, str]:
|
|
205
|
+
"""
|
|
206
|
+
Optional: Creates a agent user for the source service.
|
|
207
|
+
"""
|
|
208
|
+
return (True, "Agent user creation not implemented for this driver.")
|
|
209
|
+
|
|
210
|
+
@classmethod
|
|
211
|
+
async def get_wizard_steps(cls) -> Dict[str, Any]:
|
|
212
|
+
"""
|
|
213
|
+
Optional: Provides configuration wizard steps for UI integration.
|
|
214
|
+
Returns a dictionary defining the steps.
|
|
215
|
+
"""
|
|
216
|
+
return {}
|
|
@@ -0,0 +1,42 @@
|
|
|
1
|
+
from fastapi import HTTPException, status
|
|
2
|
+
|
|
3
|
+
from typing import Optional, Any, Dict
|
|
4
|
+
|
|
5
|
+
class fustor_agentException(HTTPException):
|
|
6
|
+
"""Base exception for fustor_agent, mapping to HTTP exceptions."""
|
|
7
|
+
def __init__(self, status_code: int, detail: Any = None, headers: Optional[Dict[str, Any]] = None):
|
|
8
|
+
super().__init__(status_code=status_code, detail=detail, headers=headers)
|
|
9
|
+
|
|
10
|
+
class ConfigError(fustor_agentException):
|
|
11
|
+
"""Raised when there's an issue with configuration (e.g., not found, invalid)."""
|
|
12
|
+
def __init__(self, detail: str = "Configuration error", headers: Optional[Dict[str, Any]] = None):
|
|
13
|
+
super().__init__(status_code=status.HTTP_400_BAD_REQUEST, detail=detail, headers=headers)
|
|
14
|
+
|
|
15
|
+
class NotFoundError(fustor_agentException):
|
|
16
|
+
"""Raised when a requested resource (e.g., config, instance) is not found."""
|
|
17
|
+
def __init__(self, detail: str = "Resource not found", headers: Optional[Dict[str, Any]] = None):
|
|
18
|
+
super().__init__(status_code=status.HTTP_404_NOT_FOUND, detail=detail, headers=headers)
|
|
19
|
+
|
|
20
|
+
class ConflictError(fustor_agentException):
|
|
21
|
+
"""Raised when a resource already exists and cannot be created again."""
|
|
22
|
+
def __init__(self, detail: str = "Resource already exists", headers: Optional[Dict[str, Any]] = None):
|
|
23
|
+
super().__init__(status_code=status.HTTP_409_CONFLICT, detail=detail, headers=headers)
|
|
24
|
+
|
|
25
|
+
class DriverError(fustor_agentException):
|
|
26
|
+
"""Raised when there's an issue with a driver (e.g., connection, invalid parameters)."""
|
|
27
|
+
def __init__(self, detail: str = "Driver error", headers: Optional[Dict[str, Any]] = None):
|
|
28
|
+
super().__init__(status_code=status.HTTP_500_INTERNAL_SERVER_ERROR, detail=detail, headers=headers)
|
|
29
|
+
|
|
30
|
+
class StateConflictError(fustor_agentException):
|
|
31
|
+
"""Raised when an operation is attempted in an invalid state."""
|
|
32
|
+
def __init__(self, detail: str = "Operation not allowed in current state", headers: Optional[Dict[str, Any]] = None):
|
|
33
|
+
super().__init__(status_code=status.HTTP_409_CONFLICT, detail=detail, headers=headers)
|
|
34
|
+
|
|
35
|
+
class ValidationError(fustor_agentException):
|
|
36
|
+
"""Raised when input validation fails."""
|
|
37
|
+
def __init__(self, detail: str = "Validation error", headers: Optional[Dict[str, Any]] = None):
|
|
38
|
+
super().__init__(status_code=status.HTTP_422_UNPROCESSABLE_ENTITY, detail=detail, headers=headers)
|
|
39
|
+
|
|
40
|
+
class TransientSourceBufferFullError(Exception):
|
|
41
|
+
"""Raised by MemoryEventBus when its buffer is full and the source is transient."""
|
|
42
|
+
pass
|
|
File without changes
|
|
@@ -0,0 +1,206 @@
|
|
|
1
|
+
from pydantic import BaseModel, Field, field_validator, RootModel, ConfigDict
|
|
2
|
+
from typing import List, Optional, Union, TypeAlias, Dict, Any
|
|
3
|
+
from fustor_core.exceptions import ConfigError, NotFoundError
|
|
4
|
+
|
|
5
|
+
class PasswdCredential(BaseModel):
|
|
6
|
+
# --- START FIX: Forbid extra fields to prevent incorrect model coercion ---
|
|
7
|
+
model_config = ConfigDict(extra='forbid')
|
|
8
|
+
# --- END FIX ---
|
|
9
|
+
|
|
10
|
+
user: str = Field(..., description="用户名")
|
|
11
|
+
passwd: Optional[str] = Field(None, description="密码")
|
|
12
|
+
|
|
13
|
+
def to_base64(self) -> str:
|
|
14
|
+
"""为HTTP Basic Auth生成Base64编码的字符串。"""
|
|
15
|
+
import base64
|
|
16
|
+
auth_str = f"{self.user}:{self.passwd or ''}"
|
|
17
|
+
return base64.b64encode(auth_str.encode('utf-8')).decode('utf-8')
|
|
18
|
+
|
|
19
|
+
def _get_hashable_data(self):
|
|
20
|
+
return ("PasswdCredential", self.user, self.passwd)
|
|
21
|
+
|
|
22
|
+
def __hash__(self):
|
|
23
|
+
return hash(self._get_hashable_data())
|
|
24
|
+
|
|
25
|
+
def __eq__(self, other):
|
|
26
|
+
if not isinstance(other, PasswdCredential):
|
|
27
|
+
return NotImplemented
|
|
28
|
+
return self._get_hashable_data() == other._get_hashable_data()
|
|
29
|
+
|
|
30
|
+
class ApiKeyCredential(BaseModel):
|
|
31
|
+
# --- START FIX: Forbid extra fields to prevent incorrect model coercion ---
|
|
32
|
+
model_config = ConfigDict(extra='forbid')
|
|
33
|
+
# --- END FIX ---
|
|
34
|
+
|
|
35
|
+
user: Optional[str] = Field(None, description="用户名")
|
|
36
|
+
key: str = Field(..., description="api key")
|
|
37
|
+
|
|
38
|
+
def _get_hashable_data(self):
|
|
39
|
+
return ("ApiKeyCredential", self.user, self.key)
|
|
40
|
+
|
|
41
|
+
def __hash__(self):
|
|
42
|
+
return hash(self._get_hashable_data())
|
|
43
|
+
|
|
44
|
+
def __eq__(self, other):
|
|
45
|
+
if not isinstance(other, ApiKeyCredential):
|
|
46
|
+
return NotImplemented
|
|
47
|
+
return self._get_hashable_data() == other._get_hashable_data()
|
|
48
|
+
|
|
49
|
+
# Reordered Union to prioritize the more specific ApiKeyCredential
|
|
50
|
+
Credential: TypeAlias = Union[ApiKeyCredential, PasswdCredential]
|
|
51
|
+
|
|
52
|
+
class FieldMapping(BaseModel):
|
|
53
|
+
to: str = Field(..., description="供给字段")
|
|
54
|
+
source: List[str] = Field(..., description="来源字段")
|
|
55
|
+
required: bool = Field(default=False, description="是否为必填字段")
|
|
56
|
+
|
|
57
|
+
class SourceConfig(BaseModel):
|
|
58
|
+
# Assuming 'name' will be the key in the dict, it's not needed inside the model itself.
|
|
59
|
+
driver: str
|
|
60
|
+
uri: str
|
|
61
|
+
credential: Credential
|
|
62
|
+
max_queue_size: int = Field(default=1000, gt=0, description="事件缓冲区的最大尺寸")
|
|
63
|
+
max_retries: int = Field(default=10, gt=0, description="驱动在读取事件失败时的最大重试次数")
|
|
64
|
+
retry_delay_sec: int = Field(default=5, gt=0, description="驱动重试前的等待秒数")
|
|
65
|
+
disabled: bool = Field(default=True, description="是否禁用此配置")
|
|
66
|
+
validation_error: Optional[str] = Field(None, exclude=True)
|
|
67
|
+
driver_params: Dict[str, Any] = Field(default_factory=dict, description="驱动专属参数")
|
|
68
|
+
|
|
69
|
+
class PusherConfig(BaseModel):
|
|
70
|
+
# Assuming 'name' will be the key in the dict
|
|
71
|
+
driver: str
|
|
72
|
+
endpoint: str
|
|
73
|
+
credential: Credential
|
|
74
|
+
batch_size: int = Field(default=100, description="单次推送事件的批处理大小")
|
|
75
|
+
max_retries: int = Field(default=10, gt=0, description="推送失败时的最大重试次数")
|
|
76
|
+
retry_delay_sec: int = Field(default=5, gt=0, description="推送重试前的等待秒数")
|
|
77
|
+
disabled: bool = Field(default=True, description="是否禁用此配置")
|
|
78
|
+
driver_params: Optional[Dict[str, Any]] = Field(default=None, description="驱动专属参数")
|
|
79
|
+
|
|
80
|
+
@field_validator('batch_size')
|
|
81
|
+
def batch_size_must_be_positive(cls, v):
|
|
82
|
+
if v <= 0:
|
|
83
|
+
raise ConfigError('batch_size must be positive')
|
|
84
|
+
return v
|
|
85
|
+
|
|
86
|
+
class SyncConfig(BaseModel):
|
|
87
|
+
source: str
|
|
88
|
+
pusher: str
|
|
89
|
+
disabled: bool = Field(default=True, description="是否禁用此同步任务")
|
|
90
|
+
# --- START: 核心修改 ---
|
|
91
|
+
# [REMOVED] The following two fields are obsolete in the new architecture.
|
|
92
|
+
# checkpoint_interval_events: int = Field(...)
|
|
93
|
+
# enable_checkpoint: bool = True
|
|
94
|
+
# --- END: 核心修改 ---
|
|
95
|
+
fields_mapping: List[FieldMapping] = Field(default_factory=list)
|
|
96
|
+
|
|
97
|
+
class SourceConfigDict(RootModel[Dict[str, SourceConfig]]):
|
|
98
|
+
root: Dict[str, SourceConfig] = Field(default_factory=dict)
|
|
99
|
+
|
|
100
|
+
class PusherConfigDict(RootModel[Dict[str, PusherConfig]]):
|
|
101
|
+
root: Dict[str, PusherConfig] = Field(default_factory=dict)
|
|
102
|
+
|
|
103
|
+
class SyncConfigDict(RootModel[Dict[str, SyncConfig]]):
|
|
104
|
+
root: Dict[str, SyncConfig] = Field(default_factory=dict)
|
|
105
|
+
|
|
106
|
+
class AppConfig(BaseModel):
|
|
107
|
+
# FIX: Provide default factories for top-level configuration sections.
|
|
108
|
+
# This allows the application to start with an empty but valid config
|
|
109
|
+
# if the config.yaml file is missing or empty.
|
|
110
|
+
sources: SourceConfigDict = Field(default_factory=SourceConfigDict)
|
|
111
|
+
pushers: PusherConfigDict = Field(default_factory=PusherConfigDict)
|
|
112
|
+
syncs: SyncConfigDict = Field(default_factory=SyncConfigDict)
|
|
113
|
+
|
|
114
|
+
def get_sources(self) -> Dict[str, SourceConfig]:
|
|
115
|
+
return self.sources.root
|
|
116
|
+
|
|
117
|
+
def get_pushers(self) -> Dict[str, PusherConfig]:
|
|
118
|
+
return self.pushers.root
|
|
119
|
+
|
|
120
|
+
def get_syncs(self) -> Dict[str, SyncConfig]:
|
|
121
|
+
return self.syncs.root
|
|
122
|
+
|
|
123
|
+
def get_source(self, id: str) -> Optional[SourceConfig]:
|
|
124
|
+
return self.get_sources().get(id)
|
|
125
|
+
|
|
126
|
+
def get_pusher(self, id: str) -> Optional[PusherConfig]:
|
|
127
|
+
return self.get_pushers().get(id)
|
|
128
|
+
|
|
129
|
+
def get_sync(self, id: str) -> Optional[SyncConfig]:
|
|
130
|
+
return self.get_syncs().get(id)
|
|
131
|
+
|
|
132
|
+
def add_source(self, id: str, config: SourceConfig) -> SourceConfig:
|
|
133
|
+
config_may = self.get_source(id)
|
|
134
|
+
if config_may:
|
|
135
|
+
raise ConfigError(f"Source config with name '{id}' already exists.")
|
|
136
|
+
self.get_sources()[id] = config
|
|
137
|
+
return config
|
|
138
|
+
|
|
139
|
+
def add_pusher(self, id: str, config: PusherConfig) -> PusherConfig:
|
|
140
|
+
config_may = self.get_pusher(id)
|
|
141
|
+
if config_may:
|
|
142
|
+
raise ConfigError(f"Pusher config with name '{id}' already exists.")
|
|
143
|
+
self.get_pushers()[id] = config
|
|
144
|
+
return config
|
|
145
|
+
|
|
146
|
+
def add_sync(self, id: str, config: SyncConfig) -> SyncConfig:
|
|
147
|
+
config_may = self.get_sync(id)
|
|
148
|
+
if config_may:
|
|
149
|
+
raise ConfigError(f"Sync config with id '{id}' already exists.")
|
|
150
|
+
|
|
151
|
+
# Dependency check
|
|
152
|
+
if not self.get_source(config.source):
|
|
153
|
+
raise NotFoundError(f"Dependency source '{config.source}' not found.")
|
|
154
|
+
if not self.get_pusher(config.pusher):
|
|
155
|
+
raise NotFoundError(f"Dependency pusher '{config.pusher}' not found.")
|
|
156
|
+
|
|
157
|
+
self.get_syncs()[id] = config
|
|
158
|
+
return config
|
|
159
|
+
|
|
160
|
+
def delete_source(self, id: str) -> SourceConfig:
|
|
161
|
+
config = self.get_source(id)
|
|
162
|
+
if not config:
|
|
163
|
+
raise NotFoundError(f"Source config with id '{id}' not found.")
|
|
164
|
+
|
|
165
|
+
# Delete dependent syncs first
|
|
166
|
+
sync_ids_to_delete = [sync_id for sync_id, cfg in self.get_syncs().items() if cfg.source == id]
|
|
167
|
+
for sync_id in sync_ids_to_delete:
|
|
168
|
+
self.delete_sync(sync_id)
|
|
169
|
+
|
|
170
|
+
return self.get_sources().pop(id)
|
|
171
|
+
|
|
172
|
+
def delete_pusher(self, id: str) -> PusherConfig:
|
|
173
|
+
config = self.get_pusher(id)
|
|
174
|
+
if not config:
|
|
175
|
+
raise NotFoundError(f"Pusher config with id '{id}' not found.")
|
|
176
|
+
|
|
177
|
+
# Delete dependent syncs first
|
|
178
|
+
sync_ids_to_delete = [sync_id for sync_id, cfg in self.syncs.root.items() if cfg.pusher == id]
|
|
179
|
+
for sync_id in sync_ids_to_delete:
|
|
180
|
+
self.delete_sync(sync_id)
|
|
181
|
+
|
|
182
|
+
return self.get_pushers().pop(id)
|
|
183
|
+
|
|
184
|
+
def delete_sync(self, id: str) -> SyncConfig:
|
|
185
|
+
config = self.get_sync(id)
|
|
186
|
+
if not config:
|
|
187
|
+
raise NotFoundError(f"Sync config with id '{id}' not found.")
|
|
188
|
+
return self.get_syncs().pop(id)
|
|
189
|
+
|
|
190
|
+
def check_sync_is_disabled(self, id: str) -> bool:
|
|
191
|
+
config = self.get_sync(id)
|
|
192
|
+
if not config:
|
|
193
|
+
raise NotFoundError(f"Sync with id '{id}' not found.")
|
|
194
|
+
|
|
195
|
+
if config.disabled:
|
|
196
|
+
return True
|
|
197
|
+
|
|
198
|
+
source_config = self.sources.root.get(config.source)
|
|
199
|
+
if not source_config:
|
|
200
|
+
raise NotFoundError(f"Dependency source '{config.source}' not found for sync '{id}'.")
|
|
201
|
+
|
|
202
|
+
pusher_config = self.pushers.root.get(config.pusher)
|
|
203
|
+
if not pusher_config:
|
|
204
|
+
raise NotFoundError(f"Dependency pusher '{config.pusher}' not found for sync '{id}'.")
|
|
205
|
+
|
|
206
|
+
return source_config.disabled or pusher_config.disabled
|
|
@@ -0,0 +1,11 @@
|
|
|
1
|
+
from pydantic import BaseModel, Field
|
|
2
|
+
from datetime import datetime
|
|
3
|
+
from typing import Optional
|
|
4
|
+
|
|
5
|
+
class LogEntry(BaseModel):
|
|
6
|
+
"""定义了单条日志记录的结构化模型"""
|
|
7
|
+
timestamp: datetime = Field(..., alias="ts")
|
|
8
|
+
level: str
|
|
9
|
+
component: str = Field(..., alias="source")
|
|
10
|
+
message: str = Field(..., alias="msg")
|
|
11
|
+
line_number: int # 用于分页的关键字段
|
|
@@ -0,0 +1,38 @@
|
|
|
1
|
+
from enum import Enum, Flag, auto
|
|
2
|
+
from typing import Optional, Dict, Any
|
|
3
|
+
from pydantic import BaseModel, Field
|
|
4
|
+
|
|
5
|
+
class EventBusState(str, Enum):
|
|
6
|
+
IDLE = "IDLE"
|
|
7
|
+
PRODUCING = "PRODUCING"
|
|
8
|
+
ERROR = "ERROR"
|
|
9
|
+
|
|
10
|
+
class SyncState(Flag):
|
|
11
|
+
"""Enumeration for the state of a sync instance."""
|
|
12
|
+
STOPPED = 0
|
|
13
|
+
STARTING = auto()
|
|
14
|
+
SNAPSHOT_SYNC = auto()
|
|
15
|
+
MESSAGE_SYNC = auto()
|
|
16
|
+
RUNNING_CONF_OUTDATE = auto()
|
|
17
|
+
STOPPING = auto()
|
|
18
|
+
ERROR = auto()
|
|
19
|
+
|
|
20
|
+
class EventBusInstance(BaseModel):
|
|
21
|
+
id: str
|
|
22
|
+
source_name: str
|
|
23
|
+
state: EventBusState
|
|
24
|
+
info: str
|
|
25
|
+
statistics: Dict[str, Any]
|
|
26
|
+
|
|
27
|
+
class SyncInstanceDTO(BaseModel):
|
|
28
|
+
id: str
|
|
29
|
+
state: SyncState
|
|
30
|
+
info: str
|
|
31
|
+
statistics: Dict[str, Any]
|
|
32
|
+
bus_info: Optional[EventBusInstance] = None
|
|
33
|
+
bus_id: Optional[str] = None
|
|
34
|
+
|
|
35
|
+
class AgentState(BaseModel):
|
|
36
|
+
agent_id: str = Field(..., description="The unique identifier for the agent.")
|
|
37
|
+
sync_tasks: Dict[str, SyncInstanceDTO] = Field(default_factory=dict, description="A dictionary of all sync tasks, keyed by their ID.")
|
|
38
|
+
event_buses: Dict[str, EventBusInstance] = Field(default_factory=dict, description="A dictionary of all active event buses, keyed by their ID.")
|
|
@@ -0,0 +1,36 @@
|
|
|
1
|
+
import asyncio
|
|
2
|
+
import logging
|
|
3
|
+
from functools import wraps
|
|
4
|
+
from fustor_core.exceptions import DriverError
|
|
5
|
+
|
|
6
|
+
logger = logging.getLogger(__name__)
|
|
7
|
+
|
|
8
|
+
def retry(max_retries_attr: str, delay_sec_attr: str, exceptions: tuple = (DriverError,)):
|
|
9
|
+
def decorator(func):
|
|
10
|
+
@wraps(func)
|
|
11
|
+
async def wrapper(self, *args, **kwargs):
|
|
12
|
+
max_retries = getattr(self.config, max_retries_attr)
|
|
13
|
+
delay_sec = getattr(self.config, delay_sec_attr)
|
|
14
|
+
|
|
15
|
+
retries = 0
|
|
16
|
+
while True:
|
|
17
|
+
try:
|
|
18
|
+
return await func(self, *args, **kwargs)
|
|
19
|
+
except asyncio.CancelledError:
|
|
20
|
+
# Handle cancellation that occurs during function execution
|
|
21
|
+
logger.info(f"Function {func.__name__} was cancelled during execution.")
|
|
22
|
+
raise
|
|
23
|
+
except exceptions as e:
|
|
24
|
+
retries += 1
|
|
25
|
+
if retries >= max_retries:
|
|
26
|
+
logger.error(f"Function {func.__name__} failed after {max_retries} retries.")
|
|
27
|
+
raise
|
|
28
|
+
logger.warning(f"Function {func.__name__} failed. Retrying in {delay_sec:.2f} seconds... ({retries}/{max_retries})")
|
|
29
|
+
try:
|
|
30
|
+
await asyncio.sleep(delay_sec)
|
|
31
|
+
except asyncio.CancelledError:
|
|
32
|
+
# Handle cancellation that occurs during the sleep/delay period
|
|
33
|
+
logger.info(f"Retry loop for {func.__name__} was cancelled during sleep.")
|
|
34
|
+
raise
|
|
35
|
+
return wrapper
|
|
36
|
+
return decorator
|
|
@@ -0,0 +1,12 @@
|
|
|
1
|
+
Metadata-Version: 2.4
|
|
2
|
+
Name: fustor-core
|
|
3
|
+
Version: 0.1.2.post3
|
|
4
|
+
Summary: Core components for Fustor services and plugins
|
|
5
|
+
Author-email: Huajin Wang <wanghuajin999@163.com>
|
|
6
|
+
Requires-Python: >=3.11
|
|
7
|
+
Requires-Dist: pydantic>=2.11.7
|
|
8
|
+
Provides-Extra: dev
|
|
9
|
+
Requires-Dist: pytest>=8.0.0; extra == "dev"
|
|
10
|
+
Requires-Dist: ruff>=0.1.0; extra == "dev"
|
|
11
|
+
Requires-Dist: mypy>=1.0.0; extra == "dev"
|
|
12
|
+
Requires-Dist: pytest-asyncio>=0.23.0; extra == "dev"
|
|
@@ -0,0 +1,18 @@
|
|
|
1
|
+
README.md
|
|
2
|
+
pyproject.toml
|
|
3
|
+
src/fustor_core/__init__.py
|
|
4
|
+
src/fustor_core/drivers.py
|
|
5
|
+
src/fustor_core/exceptions.py
|
|
6
|
+
src/fustor_core.egg-info/PKG-INFO
|
|
7
|
+
src/fustor_core.egg-info/SOURCES.txt
|
|
8
|
+
src/fustor_core.egg-info/dependency_links.txt
|
|
9
|
+
src/fustor_core.egg-info/requires.txt
|
|
10
|
+
src/fustor_core.egg-info/top_level.txt
|
|
11
|
+
src/fustor_core/models/__init__.py
|
|
12
|
+
src/fustor_core/models/config.py
|
|
13
|
+
src/fustor_core/models/log.py
|
|
14
|
+
src/fustor_core/models/states.py
|
|
15
|
+
src/fustor_core/utils/retry.py
|
|
16
|
+
tests/models/test_config.py
|
|
17
|
+
tests/models/test_event.py
|
|
18
|
+
tests/models/test_states.py
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
fustor_core
|
|
@@ -0,0 +1,196 @@
|
|
|
1
|
+
import pytest
|
|
2
|
+
from fustor_core.models.config import (
|
|
3
|
+
PasswdCredential, ApiKeyCredential, FieldMapping,
|
|
4
|
+
SourceConfig, PusherConfig, SyncConfig,
|
|
5
|
+
AppConfig, ConfigError, NotFoundError
|
|
6
|
+
)
|
|
7
|
+
|
|
8
|
+
def test_passwd_credential_hash_and_eq():
|
|
9
|
+
cred1 = PasswdCredential(user="testuser", passwd="testpass")
|
|
10
|
+
cred2 = PasswdCredential(user="testuser", passwd="testpass")
|
|
11
|
+
cred3 = PasswdCredential(user="anotheruser", passwd="testpass")
|
|
12
|
+
|
|
13
|
+
assert cred1 == cred2
|
|
14
|
+
assert hash(cred1) == hash(cred2)
|
|
15
|
+
assert cred1 != cred3
|
|
16
|
+
assert hash(cred1) != hash(cred3)
|
|
17
|
+
|
|
18
|
+
def test_api_key_credential_hash_and_eq():
|
|
19
|
+
cred1 = ApiKeyCredential(user="testuser", key="apikey123")
|
|
20
|
+
cred2 = ApiKeyCredential(user="testuser", key="apikey123")
|
|
21
|
+
cred3 = ApiKeyCredential(user="anotheruser", key="anotherkey")
|
|
22
|
+
|
|
23
|
+
assert cred1 == cred2
|
|
24
|
+
assert hash(cred1) == hash(cred2)
|
|
25
|
+
assert cred1 != cred3
|
|
26
|
+
assert hash(cred1) != hash(cred3)
|
|
27
|
+
|
|
28
|
+
def test_pusher_config_batch_size_validation():
|
|
29
|
+
with pytest.raises(ConfigError, match="batch_size must be positive"):
|
|
30
|
+
PusherConfig(driver="test", endpoint="http://localhost", credential=PasswdCredential(user="u"), batch_size=0)
|
|
31
|
+
with pytest.raises(ConfigError, match="batch_size must be positive"):
|
|
32
|
+
PusherConfig(driver="test", endpoint="http://localhost", credential=PasswdCredential(user="u"), batch_size=-1)
|
|
33
|
+
|
|
34
|
+
config = PusherConfig(driver="test", endpoint="http://localhost", credential=PasswdCredential(user="u"), batch_size=1)
|
|
35
|
+
assert config.batch_size == 1
|
|
36
|
+
|
|
37
|
+
def test_app_config_add_get_delete_source():
|
|
38
|
+
app_config = AppConfig()
|
|
39
|
+
source_config = SourceConfig(driver="mysql", uri="mysql://host", credential=PasswdCredential(user="u"), disabled=False)
|
|
40
|
+
|
|
41
|
+
# Add source
|
|
42
|
+
app_config.add_source("my_source", source_config)
|
|
43
|
+
assert app_config.get_source("my_source") == source_config
|
|
44
|
+
|
|
45
|
+
# Add duplicate source
|
|
46
|
+
with pytest.raises(ConfigError, match="Source config with name 'my_source' already exists."):
|
|
47
|
+
app_config.add_source("my_source", source_config)
|
|
48
|
+
|
|
49
|
+
# Delete source
|
|
50
|
+
deleted_source = app_config.delete_source("my_source")
|
|
51
|
+
assert deleted_source == source_config
|
|
52
|
+
assert app_config.get_source("my_source") is None
|
|
53
|
+
|
|
54
|
+
# Delete non-existent source
|
|
55
|
+
with pytest.raises(NotFoundError, match="Source config with id 'non_existent' not found."):
|
|
56
|
+
app_config.delete_source("non_existent")
|
|
57
|
+
|
|
58
|
+
def test_app_config_add_get_delete_pusher():
|
|
59
|
+
app_config = AppConfig()
|
|
60
|
+
pusher_config = PusherConfig(driver="http", endpoint="http://localhost", credential=PasswdCredential(user="u"), disabled=False)
|
|
61
|
+
|
|
62
|
+
# Add pusher
|
|
63
|
+
app_config.add_pusher("my_pusher", pusher_config)
|
|
64
|
+
assert app_config.get_pusher("my_pusher") == pusher_config
|
|
65
|
+
|
|
66
|
+
# Add duplicate pusher
|
|
67
|
+
with pytest.raises(ConfigError, match="Pusher config with name 'my_pusher' already exists."):
|
|
68
|
+
app_config.add_pusher("my_pusher", pusher_config)
|
|
69
|
+
|
|
70
|
+
# Delete pusher
|
|
71
|
+
deleted_pusher = app_config.delete_pusher("my_pusher")
|
|
72
|
+
assert deleted_pusher == pusher_config
|
|
73
|
+
assert app_config.get_pusher("my_pusher") is None
|
|
74
|
+
|
|
75
|
+
# Delete non-existent pusher
|
|
76
|
+
with pytest.raises(NotFoundError, match="Pusher config with id 'non_existent' not found."):
|
|
77
|
+
app_config.delete_pusher("non_existent")
|
|
78
|
+
|
|
79
|
+
def test_app_config_add_get_delete_sync():
|
|
80
|
+
app_config = AppConfig()
|
|
81
|
+
source_config = SourceConfig(driver="mysql", uri="mysql://host", credential=PasswdCredential(user="u"), disabled=False)
|
|
82
|
+
pusher_config = PusherConfig(driver="http", endpoint="http://localhost", credential=PasswdCredential(user="u"), disabled=False)
|
|
83
|
+
sync_config = SyncConfig(source="my_source", pusher="my_pusher", disabled=False)
|
|
84
|
+
|
|
85
|
+
# Add sync without dependencies
|
|
86
|
+
with pytest.raises(NotFoundError, match="Dependency source 'my_source' not found."):
|
|
87
|
+
app_config.add_sync("my_sync", sync_config)
|
|
88
|
+
|
|
89
|
+
app_config.add_source("my_source", source_config)
|
|
90
|
+
with pytest.raises(NotFoundError, match="Dependency pusher 'my_pusher' not found."):
|
|
91
|
+
app_config.add_sync("my_sync", sync_config)
|
|
92
|
+
|
|
93
|
+
app_config.add_pusher("my_pusher", pusher_config)
|
|
94
|
+
app_config.add_sync("my_sync", sync_config)
|
|
95
|
+
assert app_config.get_sync("my_sync") == sync_config
|
|
96
|
+
|
|
97
|
+
# Add duplicate sync
|
|
98
|
+
with pytest.raises(ConfigError, match="Sync config with id 'my_sync' already exists."):
|
|
99
|
+
app_config.add_sync("my_sync", sync_config)
|
|
100
|
+
|
|
101
|
+
# Delete sync
|
|
102
|
+
deleted_sync = app_config.delete_sync("my_sync")
|
|
103
|
+
assert deleted_sync == sync_config
|
|
104
|
+
assert app_config.get_sync("my_sync") is None
|
|
105
|
+
|
|
106
|
+
# Delete non-existent sync
|
|
107
|
+
with pytest.raises(NotFoundError, match="Sync config with id 'non_existent' not found."):
|
|
108
|
+
app_config.delete_sync("non_existent")
|
|
109
|
+
|
|
110
|
+
def test_app_config_delete_source_with_dependent_syncs():
|
|
111
|
+
app_config = AppConfig()
|
|
112
|
+
source_config = SourceConfig(driver="mysql", uri="mysql://host", credential=PasswdCredential(user="u"), disabled=False)
|
|
113
|
+
pusher_config = PusherConfig(driver="http", endpoint="http://localhost", credential=PasswdCredential(user="u"), disabled=False)
|
|
114
|
+
sync_config1 = SyncConfig(source="my_source", pusher="my_pusher", disabled=False)
|
|
115
|
+
sync_config2 = SyncConfig(source="my_source", pusher="my_pusher", disabled=False) # Another sync using the same source
|
|
116
|
+
|
|
117
|
+
app_config.add_source("my_source", source_config)
|
|
118
|
+
app_config.add_pusher("my_pusher", pusher_config)
|
|
119
|
+
app_config.add_sync("sync1", sync_config1)
|
|
120
|
+
app_config.add_sync("sync2", sync_config2)
|
|
121
|
+
|
|
122
|
+
assert app_config.get_sync("sync1") is not None
|
|
123
|
+
assert app_config.get_sync("sync2") is not None
|
|
124
|
+
|
|
125
|
+
app_config.delete_source("my_source")
|
|
126
|
+
|
|
127
|
+
assert app_config.get_source("my_source") is None
|
|
128
|
+
assert app_config.get_sync("sync1") is None
|
|
129
|
+
assert app_config.get_sync("sync2") is None
|
|
130
|
+
|
|
131
|
+
def test_app_config_delete_pusher_with_dependent_syncs():
|
|
132
|
+
app_config = AppConfig()
|
|
133
|
+
source_config = SourceConfig(driver="mysql", uri="mysql://host", credential=PasswdCredential(user="u"), disabled=False)
|
|
134
|
+
pusher_config = PusherConfig(driver="http", endpoint="http://localhost", credential=PasswdCredential(user="u"), disabled=False)
|
|
135
|
+
sync_config1 = SyncConfig(source="my_source", pusher="my_pusher", disabled=False)
|
|
136
|
+
sync_config2 = SyncConfig(source="my_source", pusher="my_pusher", disabled=False) # Another sync using the same pusher
|
|
137
|
+
|
|
138
|
+
app_config.add_source("my_source", source_config)
|
|
139
|
+
app_config.add_pusher("my_pusher", pusher_config)
|
|
140
|
+
app_config.add_sync("sync1", sync_config1)
|
|
141
|
+
app_config.add_sync("sync2", sync_config2)
|
|
142
|
+
|
|
143
|
+
assert app_config.get_sync("sync1") is not None
|
|
144
|
+
assert app_config.get_sync("sync2") is not None
|
|
145
|
+
|
|
146
|
+
app_config.delete_pusher("my_pusher")
|
|
147
|
+
|
|
148
|
+
assert app_config.get_pusher("my_pusher") is None
|
|
149
|
+
assert app_config.get_sync("sync1") is None
|
|
150
|
+
assert app_config.get_sync("sync2") is None
|
|
151
|
+
|
|
152
|
+
def test_app_config_check_sync_is_disabled():
|
|
153
|
+
app_config = AppConfig()
|
|
154
|
+
source_config_enabled = SourceConfig(driver="mysql", uri="mysql://host", credential=PasswdCredential(user="u"), disabled=False)
|
|
155
|
+
source_config_disabled = SourceConfig(driver="mysql", uri="mysql://host", credential=PasswdCredential(user="u"), disabled=True)
|
|
156
|
+
pusher_config_enabled = PusherConfig(driver="http", endpoint="http://localhost", credential=PasswdCredential(user="u"), disabled=False)
|
|
157
|
+
pusher_config_disabled = PusherConfig(driver="http", endpoint="http://localhost", credential=PasswdCredential(user="u"), disabled=True)
|
|
158
|
+
|
|
159
|
+
app_config.add_source("source_e", source_config_enabled)
|
|
160
|
+
app_config.add_source("source_d", source_config_disabled)
|
|
161
|
+
app_config.add_pusher("pusher_e", pusher_config_enabled)
|
|
162
|
+
app_config.add_pusher("pusher_d", pusher_config_disabled)
|
|
163
|
+
|
|
164
|
+
# Sync itself disabled
|
|
165
|
+
sync_disabled = SyncConfig(source="source_e", pusher="pusher_e", disabled=True)
|
|
166
|
+
app_config.add_sync("sync_d", sync_disabled)
|
|
167
|
+
assert app_config.check_sync_is_disabled("sync_d") is True
|
|
168
|
+
|
|
169
|
+
# Source disabled
|
|
170
|
+
sync_source_disabled = SyncConfig(source="source_d", pusher="pusher_e", disabled=False)
|
|
171
|
+
app_config.add_sync("sync_source_d", sync_source_disabled)
|
|
172
|
+
assert app_config.check_sync_is_disabled("sync_source_d") is True
|
|
173
|
+
|
|
174
|
+
# Pusher disabled
|
|
175
|
+
sync_pusher_disabled = SyncConfig(source="source_e", pusher="pusher_d", disabled=False)
|
|
176
|
+
app_config.add_sync("sync_pusher_d", sync_pusher_disabled)
|
|
177
|
+
assert app_config.check_sync_is_disabled("sync_pusher_d") is True
|
|
178
|
+
|
|
179
|
+
# All enabled
|
|
180
|
+
sync_enabled = SyncConfig(source="source_e", pusher="pusher_e", disabled=False)
|
|
181
|
+
app_config.add_sync("sync_e", sync_enabled)
|
|
182
|
+
assert app_config.check_sync_is_disabled("sync_e") is False
|
|
183
|
+
|
|
184
|
+
# Non-existent sync
|
|
185
|
+
with pytest.raises(NotFoundError, match="Sync with id 'non_existent' not found."):
|
|
186
|
+
app_config.check_sync_is_disabled("non_existent")
|
|
187
|
+
|
|
188
|
+
# Missing source dependency
|
|
189
|
+
sync_missing_source = SyncConfig(source="non_existent_source", pusher="pusher_e", disabled=False)
|
|
190
|
+
with pytest.raises(NotFoundError, match="Dependency source 'non_existent_source' not found."):
|
|
191
|
+
app_config.add_sync("sync_missing_source", sync_missing_source)
|
|
192
|
+
|
|
193
|
+
# Missing pusher dependency
|
|
194
|
+
sync_missing_pusher = SyncConfig(source="source_e", pusher="non_existent_pusher", disabled=False)
|
|
195
|
+
with pytest.raises(NotFoundError, match="Dependency pusher 'non_existent_pusher' not found."):
|
|
196
|
+
app_config.add_sync("sync_missing_pusher", sync_missing_pusher)
|
|
@@ -0,0 +1,26 @@
|
|
|
1
|
+
from fustor_event_model.models import EventType, EventBase, InsertEvent, UpdateEvent, DeleteEvent
|
|
2
|
+
|
|
3
|
+
class TestEventModels:
|
|
4
|
+
def test_event_base(self):
|
|
5
|
+
event = EventBase(fields=["id", "name"], rows=[(1, "test")], event_type=EventType.INSERT, index=123, event_schema="s", table="t")
|
|
6
|
+
assert event.event_type == EventType.INSERT
|
|
7
|
+
assert event.fields == ["id", "name"]
|
|
8
|
+
assert event.rows == [(1, "test")]
|
|
9
|
+
assert event.index == 123
|
|
10
|
+
|
|
11
|
+
def test_insert_event(self):
|
|
12
|
+
event = InsertEvent(event_schema="public", table="users", rows=[{"id": 1, "name": "test"}], fields=["id", "name"])
|
|
13
|
+
assert event.event_type == EventType.INSERT
|
|
14
|
+
assert event.event_schema == "public"
|
|
15
|
+
assert event.table == "users"
|
|
16
|
+
assert event.fields == ["id", "name"]
|
|
17
|
+
|
|
18
|
+
def test_update_event(self):
|
|
19
|
+
event = UpdateEvent(event_schema="public", table="users", rows=[{"id": 1, "name": "test_updated"}], fields=["id", "name"])
|
|
20
|
+
assert event.event_type == EventType.UPDATE
|
|
21
|
+
assert event.table == "users"
|
|
22
|
+
|
|
23
|
+
def test_delete_event(self):
|
|
24
|
+
event = DeleteEvent(event_schema="public", table="users", rows=[{"id": 1}], fields=["id"])
|
|
25
|
+
assert event.event_type == EventType.DELETE
|
|
26
|
+
assert event.table == "users"
|
|
@@ -0,0 +1,72 @@
|
|
|
1
|
+
import pytest
|
|
2
|
+
from fustor_core.models.states import EventBusState, SyncState, EventBusInstance, SyncInstanceDTO
|
|
3
|
+
|
|
4
|
+
def test_event_bus_state_enum():
|
|
5
|
+
assert EventBusState.IDLE.name == "IDLE"
|
|
6
|
+
assert EventBusState.PRODUCING.name == "PRODUCING"
|
|
7
|
+
assert EventBusState.ERROR.name == "ERROR"
|
|
8
|
+
|
|
9
|
+
def test_sync_state_enum():
|
|
10
|
+
assert SyncState.STOPPED.name == "STOPPED"
|
|
11
|
+
# --- REFACTORED: Test for new two-phase states instead of obsolete RUNNING state ---
|
|
12
|
+
assert SyncState.SNAPSHOT_SYNC.name == "SNAPSHOT_SYNC"
|
|
13
|
+
assert SyncState.MESSAGE_SYNC.name == "MESSAGE_SYNC"
|
|
14
|
+
# --- END REFACTOR ---
|
|
15
|
+
assert SyncState.RUNNING_CONF_OUTDATE.name == "RUNNING_CONF_OUTDATE"
|
|
16
|
+
assert SyncState.STOPPING.name == "STOPPING"
|
|
17
|
+
assert SyncState.ERROR.name == "ERROR"
|
|
18
|
+
|
|
19
|
+
def test_event_bus_instance_dto():
|
|
20
|
+
dto = EventBusInstance(
|
|
21
|
+
id="bus-123",
|
|
22
|
+
source_name="my-source",
|
|
23
|
+
state=EventBusState.PRODUCING,
|
|
24
|
+
info="Bus is actively producing events.",
|
|
25
|
+
statistics={"events_produced": 100, "consumers": 2}
|
|
26
|
+
)
|
|
27
|
+
assert dto.id == "bus-123"
|
|
28
|
+
assert dto.source_name == "my-source"
|
|
29
|
+
assert dto.state == EventBusState.PRODUCING
|
|
30
|
+
assert dto.info == "Bus is actively producing events."
|
|
31
|
+
assert dto.statistics == {"events_produced": 100, "consumers": 2}
|
|
32
|
+
|
|
33
|
+
def test_sync_instance_dto():
|
|
34
|
+
bus_dto = EventBusInstance(
|
|
35
|
+
id="bus-456",
|
|
36
|
+
source_name="another-source",
|
|
37
|
+
state=EventBusState.IDLE,
|
|
38
|
+
info="Bus is idle.",
|
|
39
|
+
statistics={}
|
|
40
|
+
)
|
|
41
|
+
# --- REFACTORED: Use one of the new valid states for the test ---
|
|
42
|
+
dto = SyncInstanceDTO(
|
|
43
|
+
id="sync-abc",
|
|
44
|
+
state=SyncState.MESSAGE_SYNC,
|
|
45
|
+
info="Sync task is running normally.",
|
|
46
|
+
bus_info=bus_dto,
|
|
47
|
+
bus_id="bus-456",
|
|
48
|
+
statistics={"events_pushed": 50, "last_event_id": "xyz"}
|
|
49
|
+
)
|
|
50
|
+
# --- END REFACTOR ---
|
|
51
|
+
assert dto.id == "sync-abc"
|
|
52
|
+
assert dto.state == SyncState.MESSAGE_SYNC
|
|
53
|
+
assert dto.info == "Sync task is running normally."
|
|
54
|
+
assert dto.bus_info == bus_dto
|
|
55
|
+
assert dto.bus_id == "bus-456"
|
|
56
|
+
assert dto.statistics == {"events_pushed": 50, "last_event_id": "xyz"}
|
|
57
|
+
|
|
58
|
+
def test_sync_instance_dto_no_bus_info():
|
|
59
|
+
dto = SyncInstanceDTO(
|
|
60
|
+
id="sync-def",
|
|
61
|
+
state=SyncState.STOPPED,
|
|
62
|
+
info="Sync task is stopped.",
|
|
63
|
+
bus_info=None,
|
|
64
|
+
bus_id=None,
|
|
65
|
+
statistics={}
|
|
66
|
+
)
|
|
67
|
+
assert dto.id == "sync-def"
|
|
68
|
+
assert dto.state == SyncState.STOPPED
|
|
69
|
+
assert dto.info == "Sync task is stopped."
|
|
70
|
+
assert dto.bus_info is None
|
|
71
|
+
assert dto.bus_id is None
|
|
72
|
+
assert dto.statistics == {}
|