taskdog-client 0.18.1__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- taskdog_client/__init__.py +31 -0
- taskdog_client/analytics_client.py +97 -0
- taskdog_client/audit_client.py +120 -0
- taskdog_client/base_client.py +205 -0
- taskdog_client/bulk_client.py +90 -0
- taskdog_client/converters/__init__.py +31 -0
- taskdog_client/converters/datetime_utils.py +160 -0
- taskdog_client/converters/exceptions.py +33 -0
- taskdog_client/converters/gantt_converters.py +158 -0
- taskdog_client/converters/optimization_converters.py +139 -0
- taskdog_client/converters/statistics_converters.py +215 -0
- taskdog_client/converters/tag_converters.py +25 -0
- taskdog_client/converters/task_converters.py +203 -0
- taskdog_client/lifecycle_client.py +148 -0
- taskdog_client/notes_client.py +61 -0
- taskdog_client/py.typed +0 -0
- taskdog_client/query_client.py +211 -0
- taskdog_client/relationship_client.py +102 -0
- taskdog_client/task_client.py +217 -0
- taskdog_client/taskdog_api_client.py +443 -0
- taskdog_client/websocket/__init__.py +8 -0
- taskdog_client/websocket/websocket_client.py +203 -0
- taskdog_client-0.18.1.dist-info/METADATA +160 -0
- taskdog_client-0.18.1.dist-info/RECORD +26 -0
- taskdog_client-0.18.1.dist-info/WHEEL +5 -0
- taskdog_client-0.18.1.dist-info/top_level.txt +1 -0
|
@@ -0,0 +1,31 @@
|
|
|
1
|
+
"""HTTP and WebSocket client for Taskdog server.
|
|
2
|
+
|
|
3
|
+
This package provides type-safe HTTP and WebSocket clients for communicating with
|
|
4
|
+
the Taskdog API server. It handles authentication, error mapping,
|
|
5
|
+
and response conversion to domain DTOs.
|
|
6
|
+
"""
|
|
7
|
+
|
|
8
|
+
from taskdog_client.analytics_client import AnalyticsClient
|
|
9
|
+
from taskdog_client.audit_client import AuditClient
|
|
10
|
+
from taskdog_client.base_client import BaseApiClient
|
|
11
|
+
from taskdog_client.lifecycle_client import LifecycleClient
|
|
12
|
+
from taskdog_client.notes_client import NotesClient
|
|
13
|
+
from taskdog_client.query_client import QueryClient
|
|
14
|
+
from taskdog_client.relationship_client import RelationshipClient
|
|
15
|
+
from taskdog_client.task_client import TaskClient
|
|
16
|
+
from taskdog_client.taskdog_api_client import TaskdogApiClient
|
|
17
|
+
from taskdog_client.websocket import ConnectionState, WebSocketClient
|
|
18
|
+
|
|
19
|
+
__all__ = [
|
|
20
|
+
"AnalyticsClient",
|
|
21
|
+
"AuditClient",
|
|
22
|
+
"BaseApiClient",
|
|
23
|
+
"ConnectionState",
|
|
24
|
+
"LifecycleClient",
|
|
25
|
+
"NotesClient",
|
|
26
|
+
"QueryClient",
|
|
27
|
+
"RelationshipClient",
|
|
28
|
+
"TaskClient",
|
|
29
|
+
"TaskdogApiClient",
|
|
30
|
+
"WebSocketClient",
|
|
31
|
+
]
|
|
@@ -0,0 +1,97 @@
|
|
|
1
|
+
"""Analytics and optimization client."""
|
|
2
|
+
|
|
3
|
+
from datetime import datetime
|
|
4
|
+
|
|
5
|
+
from taskdog_client.base_client import BaseApiClient
|
|
6
|
+
from taskdog_client.converters import (
|
|
7
|
+
convert_to_optimization_output,
|
|
8
|
+
convert_to_statistics_output,
|
|
9
|
+
)
|
|
10
|
+
from taskdog_core.application.dto.optimization_output import OptimizationOutput
|
|
11
|
+
from taskdog_core.application.dto.statistics_output import StatisticsOutput
|
|
12
|
+
|
|
13
|
+
|
|
14
|
+
class AnalyticsClient:
|
|
15
|
+
"""Client for analytics and schedule optimization.
|
|
16
|
+
|
|
17
|
+
Operations:
|
|
18
|
+
- Calculate statistics
|
|
19
|
+
- Optimize schedules
|
|
20
|
+
- Get algorithm metadata
|
|
21
|
+
"""
|
|
22
|
+
|
|
23
|
+
def __init__(self, base_client: BaseApiClient):
|
|
24
|
+
"""Initialize analytics client.
|
|
25
|
+
|
|
26
|
+
Args:
|
|
27
|
+
base_client: Base API client for HTTP operations
|
|
28
|
+
"""
|
|
29
|
+
self._base = base_client
|
|
30
|
+
|
|
31
|
+
def calculate_statistics(self, period: str = "all") -> StatisticsOutput:
|
|
32
|
+
"""Calculate task statistics.
|
|
33
|
+
|
|
34
|
+
Args:
|
|
35
|
+
period: Time period (all, 7d, 30d)
|
|
36
|
+
|
|
37
|
+
Returns:
|
|
38
|
+
StatisticsOutput with statistics data
|
|
39
|
+
|
|
40
|
+
Raises:
|
|
41
|
+
TaskValidationError: If period is invalid
|
|
42
|
+
"""
|
|
43
|
+
data = self._base._request_json("get", f"/api/v1/statistics?period={period}")
|
|
44
|
+
return convert_to_statistics_output(data)
|
|
45
|
+
|
|
46
|
+
def optimize_schedule(
|
|
47
|
+
self,
|
|
48
|
+
algorithm: str,
|
|
49
|
+
start_date: datetime | None,
|
|
50
|
+
max_hours_per_day: float,
|
|
51
|
+
force_override: bool = True,
|
|
52
|
+
task_ids: list[int] | None = None,
|
|
53
|
+
include_all_days: bool = False,
|
|
54
|
+
) -> OptimizationOutput:
|
|
55
|
+
"""Optimize task schedules.
|
|
56
|
+
|
|
57
|
+
Args:
|
|
58
|
+
algorithm: Algorithm name (required)
|
|
59
|
+
start_date: Optimization start date (None = server current time)
|
|
60
|
+
max_hours_per_day: Maximum hours per day (required)
|
|
61
|
+
force_override: Force override existing schedules
|
|
62
|
+
task_ids: Specific task IDs to optimize (None means all schedulable tasks)
|
|
63
|
+
include_all_days: If True, schedule tasks on weekends and holidays too (default: False)
|
|
64
|
+
|
|
65
|
+
Returns:
|
|
66
|
+
OptimizationOutput with optimization results
|
|
67
|
+
|
|
68
|
+
Raises:
|
|
69
|
+
TaskValidationError: If validation fails
|
|
70
|
+
TaskNotFoundException: If any specified task_id does not exist
|
|
71
|
+
NoSchedulableTasksError: If no tasks can be scheduled
|
|
72
|
+
"""
|
|
73
|
+
payload: dict[str, str | float | bool | list[int] | None] = {
|
|
74
|
+
"algorithm": algorithm,
|
|
75
|
+
"start_date": start_date.isoformat() if start_date else None,
|
|
76
|
+
"max_hours_per_day": max_hours_per_day,
|
|
77
|
+
"force_override": force_override,
|
|
78
|
+
"include_all_days": include_all_days,
|
|
79
|
+
}
|
|
80
|
+
|
|
81
|
+
# Only include task_ids if it's not None
|
|
82
|
+
if task_ids is not None:
|
|
83
|
+
payload["task_ids"] = task_ids
|
|
84
|
+
|
|
85
|
+
data = self._base._request_json("post", "/api/v1/optimize", json=payload)
|
|
86
|
+
return convert_to_optimization_output(data)
|
|
87
|
+
|
|
88
|
+
def get_algorithm_metadata(self) -> list[tuple[str, str, str]]:
|
|
89
|
+
"""Get available optimization algorithms.
|
|
90
|
+
|
|
91
|
+
Returns:
|
|
92
|
+
List of (name, display_name, description) tuples
|
|
93
|
+
"""
|
|
94
|
+
data = self._base._request_json("get", "/api/v1/algorithms")
|
|
95
|
+
return [
|
|
96
|
+
(algo["name"], algo["display_name"], algo["description"]) for algo in data
|
|
97
|
+
]
|
|
@@ -0,0 +1,120 @@
|
|
|
1
|
+
"""Audit log client."""
|
|
2
|
+
|
|
3
|
+
from datetime import datetime
|
|
4
|
+
from typing import Any
|
|
5
|
+
|
|
6
|
+
from taskdog_client.base_client import BaseApiClient
|
|
7
|
+
from taskdog_core.application.dto.audit_log_dto import (
|
|
8
|
+
AuditLogListOutput,
|
|
9
|
+
AuditLogOutput,
|
|
10
|
+
)
|
|
11
|
+
|
|
12
|
+
|
|
13
|
+
class AuditClient:
|
|
14
|
+
"""Client for audit log operations.
|
|
15
|
+
|
|
16
|
+
Operations:
|
|
17
|
+
- List audit logs with filtering
|
|
18
|
+
- Get single audit log by ID
|
|
19
|
+
"""
|
|
20
|
+
|
|
21
|
+
def __init__(self, base_client: BaseApiClient):
|
|
22
|
+
"""Initialize audit client.
|
|
23
|
+
|
|
24
|
+
Args:
|
|
25
|
+
base_client: Base API client for HTTP operations
|
|
26
|
+
"""
|
|
27
|
+
self._base = base_client
|
|
28
|
+
|
|
29
|
+
def list_audit_logs(
|
|
30
|
+
self,
|
|
31
|
+
client_filter: str | None = None,
|
|
32
|
+
operation: str | None = None,
|
|
33
|
+
resource_type: str | None = None,
|
|
34
|
+
resource_id: int | None = None,
|
|
35
|
+
success: bool | None = None,
|
|
36
|
+
start_date: datetime | None = None,
|
|
37
|
+
end_date: datetime | None = None,
|
|
38
|
+
limit: int = 100,
|
|
39
|
+
offset: int = 0,
|
|
40
|
+
) -> AuditLogListOutput:
|
|
41
|
+
"""List audit logs with optional filtering.
|
|
42
|
+
|
|
43
|
+
Args:
|
|
44
|
+
client_filter: Filter by client name
|
|
45
|
+
operation: Filter by operation type
|
|
46
|
+
resource_type: Filter by resource type
|
|
47
|
+
resource_id: Filter by resource ID
|
|
48
|
+
success: Filter by success status
|
|
49
|
+
start_date: Filter logs after this datetime
|
|
50
|
+
end_date: Filter logs before this datetime
|
|
51
|
+
limit: Maximum number of logs to return
|
|
52
|
+
offset: Number of logs to skip for pagination
|
|
53
|
+
|
|
54
|
+
Returns:
|
|
55
|
+
AuditLogListOutput with logs and pagination info
|
|
56
|
+
"""
|
|
57
|
+
params: dict[str, str | int] = {"limit": limit, "offset": offset}
|
|
58
|
+
|
|
59
|
+
if client_filter is not None:
|
|
60
|
+
params["client"] = client_filter
|
|
61
|
+
if operation is not None:
|
|
62
|
+
params["operation"] = operation
|
|
63
|
+
if resource_type is not None:
|
|
64
|
+
params["resource_type"] = resource_type
|
|
65
|
+
if resource_id is not None:
|
|
66
|
+
params["resource_id"] = resource_id
|
|
67
|
+
if success is not None:
|
|
68
|
+
params["success"] = str(success).lower()
|
|
69
|
+
if start_date is not None:
|
|
70
|
+
params["start_date"] = start_date.isoformat()
|
|
71
|
+
if end_date is not None:
|
|
72
|
+
params["end_date"] = end_date.isoformat()
|
|
73
|
+
|
|
74
|
+
data = self._base._request_json("get", "/api/v1/audit-logs", params=params)
|
|
75
|
+
return self._convert_to_list_output(data)
|
|
76
|
+
|
|
77
|
+
def get_audit_log(self, log_id: int) -> AuditLogOutput:
|
|
78
|
+
"""Get a single audit log entry by ID.
|
|
79
|
+
|
|
80
|
+
Args:
|
|
81
|
+
log_id: ID of the audit log entry to retrieve
|
|
82
|
+
|
|
83
|
+
Returns:
|
|
84
|
+
AuditLogOutput with the audit log details
|
|
85
|
+
"""
|
|
86
|
+
data = self._base._request_json("get", f"/api/v1/audit-logs/{log_id}")
|
|
87
|
+
return self._convert_to_output(data)
|
|
88
|
+
|
|
89
|
+
def _convert_to_output(self, data: dict[str, Any]) -> AuditLogOutput:
|
|
90
|
+
"""Convert API response to AuditLogOutput DTO."""
|
|
91
|
+
# Parse timestamp with error handling for malformed data
|
|
92
|
+
try:
|
|
93
|
+
timestamp = datetime.fromisoformat(data["timestamp"])
|
|
94
|
+
except (ValueError, KeyError):
|
|
95
|
+
# Fallback to current time if timestamp is invalid or missing
|
|
96
|
+
timestamp = datetime.now()
|
|
97
|
+
|
|
98
|
+
return AuditLogOutput(
|
|
99
|
+
id=data["id"],
|
|
100
|
+
timestamp=timestamp,
|
|
101
|
+
client_name=data.get("client_name"),
|
|
102
|
+
operation=data["operation"],
|
|
103
|
+
resource_type=data["resource_type"],
|
|
104
|
+
resource_id=data.get("resource_id"),
|
|
105
|
+
resource_name=data.get("resource_name"),
|
|
106
|
+
old_values=data.get("old_values"),
|
|
107
|
+
new_values=data.get("new_values"),
|
|
108
|
+
success=data["success"],
|
|
109
|
+
error_message=data.get("error_message"),
|
|
110
|
+
)
|
|
111
|
+
|
|
112
|
+
def _convert_to_list_output(self, data: dict[str, Any]) -> AuditLogListOutput:
|
|
113
|
+
"""Convert API response to AuditLogListOutput DTO."""
|
|
114
|
+
logs = [self._convert_to_output(log) for log in data["logs"]]
|
|
115
|
+
return AuditLogListOutput(
|
|
116
|
+
logs=logs,
|
|
117
|
+
total_count=data["total_count"],
|
|
118
|
+
limit=data["limit"],
|
|
119
|
+
offset=data["offset"],
|
|
120
|
+
)
|
|
@@ -0,0 +1,205 @@
|
|
|
1
|
+
"""Base HTTP client infrastructure for Taskdog API."""
|
|
2
|
+
|
|
3
|
+
from __future__ import annotations
|
|
4
|
+
|
|
5
|
+
import contextlib
|
|
6
|
+
from typing import TYPE_CHECKING, Any
|
|
7
|
+
|
|
8
|
+
if TYPE_CHECKING:
|
|
9
|
+
from collections.abc import Callable
|
|
10
|
+
|
|
11
|
+
from taskdog_core.application.dto.task_operation_output import TaskOperationOutput
|
|
12
|
+
|
|
13
|
+
import httpx # type: ignore[import-not-found]
|
|
14
|
+
|
|
15
|
+
from taskdog_core.domain.exceptions.task_exceptions import (
|
|
16
|
+
AuthenticationError,
|
|
17
|
+
ServerConnectionError,
|
|
18
|
+
ServerError,
|
|
19
|
+
TaskNotFoundException,
|
|
20
|
+
TaskValidationError,
|
|
21
|
+
)
|
|
22
|
+
|
|
23
|
+
|
|
24
|
+
class BaseApiClient:
|
|
25
|
+
"""Base HTTP client with error handling and lifecycle management.
|
|
26
|
+
|
|
27
|
+
Provides:
|
|
28
|
+
- HTTP client initialization and lifecycle
|
|
29
|
+
- Context manager support
|
|
30
|
+
- Error mapping to domain exceptions
|
|
31
|
+
- Safe request execution with connection handling
|
|
32
|
+
"""
|
|
33
|
+
|
|
34
|
+
def __init__(
|
|
35
|
+
self,
|
|
36
|
+
base_url: str = "http://127.0.0.1:8000",
|
|
37
|
+
timeout: float = 30.0,
|
|
38
|
+
api_key: str | None = None,
|
|
39
|
+
):
|
|
40
|
+
"""Initialize base API client.
|
|
41
|
+
|
|
42
|
+
Args:
|
|
43
|
+
base_url: Base URL of the API server
|
|
44
|
+
timeout: Request timeout in seconds
|
|
45
|
+
api_key: API key for authentication (sent as X-Api-Key header)
|
|
46
|
+
"""
|
|
47
|
+
self.base_url = base_url.rstrip("/")
|
|
48
|
+
self.client = httpx.Client(base_url=self.base_url, timeout=timeout)
|
|
49
|
+
self.client_id: str | None = None # Set by WebSocket connection
|
|
50
|
+
self.api_key = api_key
|
|
51
|
+
|
|
52
|
+
def close(self) -> None:
|
|
53
|
+
"""Close the HTTP client."""
|
|
54
|
+
self.client.close()
|
|
55
|
+
|
|
56
|
+
def __enter__(self) -> BaseApiClient:
|
|
57
|
+
"""Context manager entry."""
|
|
58
|
+
return self
|
|
59
|
+
|
|
60
|
+
def __exit__(self, *args: Any) -> None:
|
|
61
|
+
"""Context manager exit."""
|
|
62
|
+
self.close()
|
|
63
|
+
|
|
64
|
+
def _extract_validation_error_detail(self, response: httpx.Response) -> str:
|
|
65
|
+
"""Extract validation error detail from response.
|
|
66
|
+
|
|
67
|
+
Handles both simple {"detail": "message"} and FastAPI's Pydantic format:
|
|
68
|
+
{"detail": [{"loc": [...], "msg": "...", "type": "..."}]}
|
|
69
|
+
|
|
70
|
+
Args:
|
|
71
|
+
response: HTTP response with validation error
|
|
72
|
+
|
|
73
|
+
Returns:
|
|
74
|
+
Human-readable error message
|
|
75
|
+
"""
|
|
76
|
+
try:
|
|
77
|
+
data = response.json()
|
|
78
|
+
detail = data.get("detail", "Validation error")
|
|
79
|
+
|
|
80
|
+
# Handle FastAPI's Pydantic validation error format (list of errors)
|
|
81
|
+
if isinstance(detail, list) and len(detail) > 0:
|
|
82
|
+
messages = []
|
|
83
|
+
for error in detail:
|
|
84
|
+
if isinstance(error, dict):
|
|
85
|
+
msg = error.get("msg", "")
|
|
86
|
+
loc = error.get("loc", [])
|
|
87
|
+
# Format: "field: message"
|
|
88
|
+
field = loc[-1] if loc else "field"
|
|
89
|
+
messages.append(f"{field}: {msg}")
|
|
90
|
+
else:
|
|
91
|
+
messages.append(str(error))
|
|
92
|
+
return "; ".join(messages)
|
|
93
|
+
|
|
94
|
+
return str(detail)
|
|
95
|
+
except (KeyError, TypeError, ValueError):
|
|
96
|
+
return "Validation error"
|
|
97
|
+
|
|
98
|
+
def _handle_error(self, response: httpx.Response) -> None:
|
|
99
|
+
"""Handle HTTP error responses.
|
|
100
|
+
|
|
101
|
+
Args:
|
|
102
|
+
response: HTTP response
|
|
103
|
+
|
|
104
|
+
Raises:
|
|
105
|
+
TaskNotFoundException: If status is 404
|
|
106
|
+
TaskValidationError: If status is 400 or 422
|
|
107
|
+
Exception: For other errors
|
|
108
|
+
"""
|
|
109
|
+
if response.status_code == 404:
|
|
110
|
+
detail = response.json().get("detail", "Task not found")
|
|
111
|
+
raise TaskNotFoundException(detail)
|
|
112
|
+
if response.status_code in (400, 422):
|
|
113
|
+
detail = self._extract_validation_error_detail(response)
|
|
114
|
+
raise TaskValidationError(detail)
|
|
115
|
+
if response.status_code == 401:
|
|
116
|
+
raise AuthenticationError("Authentication failed. Check your API key.")
|
|
117
|
+
if response.status_code >= 500:
|
|
118
|
+
detail = "Server error occurred"
|
|
119
|
+
with contextlib.suppress(Exception):
|
|
120
|
+
detail = response.json().get("detail", detail)
|
|
121
|
+
raise ServerError(response.status_code, detail)
|
|
122
|
+
response.raise_for_status()
|
|
123
|
+
|
|
124
|
+
def lifecycle_operation(self, task_id: int, operation: str) -> TaskOperationOutput:
|
|
125
|
+
"""Execute a lifecycle operation on a task.
|
|
126
|
+
|
|
127
|
+
Generic helper for lifecycle operations (start, complete, pause, etc.)
|
|
128
|
+
that follow the same pattern: POST to /api/v1/tasks/{id}/{operation}.
|
|
129
|
+
|
|
130
|
+
Args:
|
|
131
|
+
task_id: Task ID
|
|
132
|
+
operation: Operation name (e.g., "start", "complete", "archive")
|
|
133
|
+
|
|
134
|
+
Returns:
|
|
135
|
+
TaskOperationOutput with updated task data
|
|
136
|
+
|
|
137
|
+
Raises:
|
|
138
|
+
TaskNotFoundException: If task not found
|
|
139
|
+
TaskValidationError: If validation fails
|
|
140
|
+
"""
|
|
141
|
+
from taskdog_client.converters import convert_to_task_operation_output
|
|
142
|
+
|
|
143
|
+
data = self._request_json("post", f"/api/v1/tasks/{task_id}/{operation}")
|
|
144
|
+
return convert_to_task_operation_output(data)
|
|
145
|
+
|
|
146
|
+
def _request_json(self, method: str, *args: Any, **kwargs: Any) -> Any:
|
|
147
|
+
"""Execute HTTP request and return JSON response, handling errors.
|
|
148
|
+
|
|
149
|
+
Combines _safe_request + error check + JSON parsing into one call.
|
|
150
|
+
|
|
151
|
+
Args:
|
|
152
|
+
method: HTTP method name ('get', 'post', 'patch', 'delete', 'put')
|
|
153
|
+
*args: Positional arguments for the request
|
|
154
|
+
**kwargs: Keyword arguments for the request
|
|
155
|
+
|
|
156
|
+
Returns:
|
|
157
|
+
Parsed JSON response (dict, list, or primitive)
|
|
158
|
+
|
|
159
|
+
Raises:
|
|
160
|
+
ServerConnectionError: If connection to server fails
|
|
161
|
+
TaskNotFoundException: If status is 404
|
|
162
|
+
TaskValidationError: If status is 400 or 422
|
|
163
|
+
AuthenticationError: If status is 401
|
|
164
|
+
ServerError: If status >= 500
|
|
165
|
+
"""
|
|
166
|
+
response = self._safe_request(method, *args, **kwargs)
|
|
167
|
+
if not response.is_success:
|
|
168
|
+
self._handle_error(response)
|
|
169
|
+
return response.json()
|
|
170
|
+
|
|
171
|
+
def _safe_request(self, method: str, *args: Any, **kwargs: Any) -> httpx.Response:
|
|
172
|
+
"""Execute HTTP request with connection error handling.
|
|
173
|
+
|
|
174
|
+
Args:
|
|
175
|
+
method: HTTP method name ('get', 'post', 'patch', 'delete', 'put')
|
|
176
|
+
*args: Positional arguments for the request
|
|
177
|
+
**kwargs: Keyword arguments for the request
|
|
178
|
+
|
|
179
|
+
Returns:
|
|
180
|
+
HTTP response
|
|
181
|
+
|
|
182
|
+
Raises:
|
|
183
|
+
ServerConnectionError: If connection to server fails
|
|
184
|
+
TaskNotFoundException: If status is 404
|
|
185
|
+
TaskValidationError: If status is 400
|
|
186
|
+
Exception: For other errors
|
|
187
|
+
"""
|
|
188
|
+
try:
|
|
189
|
+
headers = kwargs.get("headers", {})
|
|
190
|
+
|
|
191
|
+
# Add X-Client-ID header if client_id is set
|
|
192
|
+
if self.client_id:
|
|
193
|
+
headers["X-Client-ID"] = self.client_id
|
|
194
|
+
|
|
195
|
+
# Add X-Api-Key header if api_key is set
|
|
196
|
+
if self.api_key:
|
|
197
|
+
headers["X-Api-Key"] = self.api_key
|
|
198
|
+
|
|
199
|
+
if headers:
|
|
200
|
+
kwargs["headers"] = headers
|
|
201
|
+
|
|
202
|
+
request_method: Callable[..., httpx.Response] = getattr(self.client, method)
|
|
203
|
+
return request_method(*args, **kwargs)
|
|
204
|
+
except (httpx.ConnectError, httpx.TimeoutException, httpx.RequestError) as e:
|
|
205
|
+
raise ServerConnectionError(self.base_url, e) from e
|
|
@@ -0,0 +1,90 @@
|
|
|
1
|
+
"""Bulk operations client for batch task processing."""
|
|
2
|
+
|
|
3
|
+
from typing import Any
|
|
4
|
+
|
|
5
|
+
from taskdog_client.base_client import BaseApiClient
|
|
6
|
+
from taskdog_client.converters import convert_to_task_operation_output
|
|
7
|
+
from taskdog_core.application.dto.bulk_operation_output import (
|
|
8
|
+
BulkOperationOutput,
|
|
9
|
+
BulkTaskResultOutput,
|
|
10
|
+
)
|
|
11
|
+
|
|
12
|
+
|
|
13
|
+
class BulkClient:
|
|
14
|
+
"""Client for bulk task operations.
|
|
15
|
+
|
|
16
|
+
Sends multiple task IDs in a single request for batch processing.
|
|
17
|
+
"""
|
|
18
|
+
|
|
19
|
+
def __init__(self, base_client: BaseApiClient):
|
|
20
|
+
self._base = base_client
|
|
21
|
+
|
|
22
|
+
def _bulk_operation(
|
|
23
|
+
self, task_ids: list[int], operation: str
|
|
24
|
+
) -> BulkOperationOutput:
|
|
25
|
+
"""Execute a bulk operation.
|
|
26
|
+
|
|
27
|
+
Args:
|
|
28
|
+
task_ids: List of task IDs to operate on
|
|
29
|
+
operation: Operation name (e.g., "start", "complete", "archive")
|
|
30
|
+
|
|
31
|
+
Returns:
|
|
32
|
+
BulkOperationOutput with per-task results
|
|
33
|
+
"""
|
|
34
|
+
data = self._base._request_json(
|
|
35
|
+
"post",
|
|
36
|
+
f"/api/v1/tasks/bulk/{operation}",
|
|
37
|
+
json={"task_ids": task_ids},
|
|
38
|
+
)
|
|
39
|
+
return self._parse_bulk_response(data)
|
|
40
|
+
|
|
41
|
+
def _parse_bulk_response(self, data: dict[str, Any]) -> BulkOperationOutput:
|
|
42
|
+
"""Parse bulk operation response JSON into DTO."""
|
|
43
|
+
if "results" not in data:
|
|
44
|
+
raise ValueError("Invalid bulk operation response: missing 'results' key")
|
|
45
|
+
results: list[BulkTaskResultOutput] = []
|
|
46
|
+
for item in data["results"]:
|
|
47
|
+
task = None
|
|
48
|
+
if item.get("task") is not None:
|
|
49
|
+
task = convert_to_task_operation_output(item["task"])
|
|
50
|
+
results.append(
|
|
51
|
+
BulkTaskResultOutput(
|
|
52
|
+
task_id=item["task_id"],
|
|
53
|
+
success=item["success"],
|
|
54
|
+
task=task,
|
|
55
|
+
error=item.get("error"),
|
|
56
|
+
)
|
|
57
|
+
)
|
|
58
|
+
return BulkOperationOutput(results=results)
|
|
59
|
+
|
|
60
|
+
def bulk_start(self, task_ids: list[int]) -> BulkOperationOutput:
|
|
61
|
+
"""Start multiple tasks."""
|
|
62
|
+
return self._bulk_operation(task_ids, "start")
|
|
63
|
+
|
|
64
|
+
def bulk_complete(self, task_ids: list[int]) -> BulkOperationOutput:
|
|
65
|
+
"""Complete multiple tasks."""
|
|
66
|
+
return self._bulk_operation(task_ids, "complete")
|
|
67
|
+
|
|
68
|
+
def bulk_pause(self, task_ids: list[int]) -> BulkOperationOutput:
|
|
69
|
+
"""Pause multiple tasks."""
|
|
70
|
+
return self._bulk_operation(task_ids, "pause")
|
|
71
|
+
|
|
72
|
+
def bulk_cancel(self, task_ids: list[int]) -> BulkOperationOutput:
|
|
73
|
+
"""Cancel multiple tasks."""
|
|
74
|
+
return self._bulk_operation(task_ids, "cancel")
|
|
75
|
+
|
|
76
|
+
def bulk_reopen(self, task_ids: list[int]) -> BulkOperationOutput:
|
|
77
|
+
"""Reopen multiple tasks."""
|
|
78
|
+
return self._bulk_operation(task_ids, "reopen")
|
|
79
|
+
|
|
80
|
+
def bulk_archive(self, task_ids: list[int]) -> BulkOperationOutput:
|
|
81
|
+
"""Archive multiple tasks."""
|
|
82
|
+
return self._bulk_operation(task_ids, "archive")
|
|
83
|
+
|
|
84
|
+
def bulk_restore(self, task_ids: list[int]) -> BulkOperationOutput:
|
|
85
|
+
"""Restore multiple tasks."""
|
|
86
|
+
return self._bulk_operation(task_ids, "restore")
|
|
87
|
+
|
|
88
|
+
def bulk_delete(self, task_ids: list[int]) -> BulkOperationOutput:
|
|
89
|
+
"""Delete multiple tasks permanently."""
|
|
90
|
+
return self._bulk_operation(task_ids, "delete")
|
|
@@ -0,0 +1,31 @@
|
|
|
1
|
+
"""DTO converters for API responses.
|
|
2
|
+
|
|
3
|
+
Converts JSON responses from API to taskdog-core DTOs.
|
|
4
|
+
Single source of truth for all API-to-DTO transformations.
|
|
5
|
+
"""
|
|
6
|
+
|
|
7
|
+
from .exceptions import ConversionError
|
|
8
|
+
from .gantt_converters import convert_to_gantt_output
|
|
9
|
+
from .optimization_converters import convert_to_optimization_output
|
|
10
|
+
from .statistics_converters import convert_to_statistics_output
|
|
11
|
+
from .tag_converters import convert_to_tag_statistics_output
|
|
12
|
+
from .task_converters import (
|
|
13
|
+
convert_to_get_task_by_id_output,
|
|
14
|
+
convert_to_get_task_detail_output,
|
|
15
|
+
convert_to_task_list_output,
|
|
16
|
+
convert_to_task_operation_output,
|
|
17
|
+
convert_to_update_task_output,
|
|
18
|
+
)
|
|
19
|
+
|
|
20
|
+
__all__ = [
|
|
21
|
+
"ConversionError",
|
|
22
|
+
"convert_to_gantt_output",
|
|
23
|
+
"convert_to_get_task_by_id_output",
|
|
24
|
+
"convert_to_get_task_detail_output",
|
|
25
|
+
"convert_to_optimization_output",
|
|
26
|
+
"convert_to_statistics_output",
|
|
27
|
+
"convert_to_tag_statistics_output",
|
|
28
|
+
"convert_to_task_list_output",
|
|
29
|
+
"convert_to_task_operation_output",
|
|
30
|
+
"convert_to_update_task_output",
|
|
31
|
+
]
|