qalita 2.6.3__py3-none-any.whl → 2.8.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- qalita/__main__.py +29 -19
- qalita/_frontend/.next/BUILD_ID +1 -1
- qalita/_frontend/.next/build-manifest.json +7 -7
- qalita/_frontend/.next/prerender-manifest.json +3 -3
- qalita/_frontend/.next/server/app/_global-error/page/build-manifest.json +5 -5
- qalita/_frontend/.next/server/app/_global-error/page.js +1 -1
- qalita/_frontend/.next/server/app/_global-error/page.js.nft.json +1 -1
- qalita/_frontend/.next/server/app/_global-error/page_client-reference-manifest.js +1 -1
- qalita/_frontend/.next/server/app/_global-error.html +2 -2
- qalita/_frontend/.next/server/app/_global-error.rsc +7 -7
- qalita/_frontend/.next/server/app/_global-error.segments/__PAGE__.segment.rsc +2 -2
- qalita/_frontend/.next/server/app/_global-error.segments/_full.segment.rsc +7 -7
- qalita/_frontend/.next/server/app/_global-error.segments/_head.segment.rsc +3 -3
- qalita/_frontend/.next/server/app/_global-error.segments/_index.segment.rsc +3 -3
- qalita/_frontend/.next/server/app/_global-error.segments/_tree.segment.rsc +1 -1
- qalita/_frontend/.next/server/app/_not-found/page/build-manifest.json +5 -5
- qalita/_frontend/.next/server/app/_not-found/page.js +1 -1
- qalita/_frontend/.next/server/app/_not-found/page.js.nft.json +1 -1
- qalita/_frontend/.next/server/app/_not-found/page_client-reference-manifest.js +1 -1
- qalita/_frontend/.next/server/app/_not-found.html +1 -1
- qalita/_frontend/.next/server/app/_not-found.rsc +14 -12
- qalita/_frontend/.next/server/app/_not-found.segments/_full.segment.rsc +14 -12
- qalita/_frontend/.next/server/app/_not-found.segments/_head.segment.rsc +3 -3
- qalita/_frontend/.next/server/app/_not-found.segments/_index.segment.rsc +6 -4
- qalita/_frontend/.next/server/app/_not-found.segments/_not-found/__PAGE__.segment.rsc +2 -2
- qalita/_frontend/.next/server/app/_not-found.segments/_not-found.segment.rsc +3 -3
- qalita/_frontend/.next/server/app/_not-found.segments/_tree.segment.rsc +3 -2
- qalita/_frontend/.next/server/app/page/build-manifest.json +5 -5
- qalita/_frontend/.next/server/app/page.js +1 -1
- qalita/_frontend/.next/server/app/page.js.nft.json +1 -1
- qalita/_frontend/.next/server/app/page_client-reference-manifest.js +1 -1
- qalita/_frontend/.next/server/app/sources/add/page/build-manifest.json +5 -5
- qalita/_frontend/.next/server/app/sources/add/page.js +1 -1
- qalita/_frontend/.next/server/app/sources/add/page.js.nft.json +1 -1
- qalita/_frontend/.next/server/app/sources/add/page_client-reference-manifest.js +1 -1
- qalita/_frontend/.next/server/app/sources/add.html +1 -1
- qalita/_frontend/.next/server/app/sources/add.rsc +18 -16
- qalita/_frontend/.next/server/app/sources/add.segments/_full.segment.rsc +18 -16
- qalita/_frontend/.next/server/app/sources/add.segments/_head.segment.rsc +3 -3
- qalita/_frontend/.next/server/app/sources/add.segments/_index.segment.rsc +6 -4
- qalita/_frontend/.next/server/app/sources/add.segments/_tree.segment.rsc +3 -2
- qalita/_frontend/.next/server/app/sources/add.segments/sources/add/__PAGE__.segment.rsc +4 -4
- qalita/_frontend/.next/server/app/sources/add.segments/sources/add.segment.rsc +3 -3
- qalita/_frontend/.next/server/app/sources/add.segments/sources.segment.rsc +3 -3
- qalita/_frontend/.next/server/app/sources/edit/[id]/page/build-manifest.json +5 -5
- qalita/_frontend/.next/server/app/sources/edit/[id]/page.js +1 -1
- qalita/_frontend/.next/server/app/sources/edit/[id]/page.js.nft.json +1 -1
- qalita/_frontend/.next/server/app/sources/edit/[id]/page_client-reference-manifest.js +1 -1
- qalita/_frontend/.next/server/app/sources/page/build-manifest.json +5 -5
- qalita/_frontend/.next/server/app/sources/page.js +1 -1
- qalita/_frontend/.next/server/app/sources/page.js.nft.json +1 -1
- qalita/_frontend/.next/server/app/sources/page_client-reference-manifest.js +1 -1
- qalita/_frontend/.next/server/app/sources.html +1 -1
- qalita/_frontend/.next/server/app/sources.rsc +18 -16
- qalita/_frontend/.next/server/app/sources.segments/_full.segment.rsc +18 -16
- qalita/_frontend/.next/server/app/sources.segments/_head.segment.rsc +3 -3
- qalita/_frontend/.next/server/app/sources.segments/_index.segment.rsc +6 -4
- qalita/_frontend/.next/server/app/sources.segments/_tree.segment.rsc +3 -2
- qalita/_frontend/.next/server/app/sources.segments/sources/__PAGE__.segment.rsc +4 -4
- qalita/_frontend/.next/server/app/sources.segments/sources.segment.rsc +3 -3
- qalita/_frontend/.next/server/chunks/[root-of-the-server]__e868c9e1._.js +1 -1
- qalita/_frontend/.next/server/chunks/[root-of-the-server]__ebaae723._.js +1 -1
- qalita/_frontend/.next/server/chunks/ssr/[root-of-the-server]__17f2c9b6._.js +1 -1
- qalita/_frontend/.next/server/chunks/ssr/[root-of-the-server]__1d5b5394._.js +3 -0
- qalita/_frontend/.next/server/chunks/ssr/[root-of-the-server]__21824174._.js +3 -0
- qalita/_frontend/.next/server/chunks/ssr/{[root-of-the-server]__b9356576._.js → [root-of-the-server]__336e4c46._.js} +2 -2
- qalita/_frontend/.next/server/chunks/ssr/{[root-of-the-server]__c507bbfe._.js → [root-of-the-server]__7876511a._.js} +2 -2
- qalita/_frontend/.next/server/chunks/ssr/[root-of-the-server]__be91267c._.js +3 -0
- qalita/_frontend/.next/server/chunks/ssr/[root-of-the-server]__d15765f1._.js +3 -0
- qalita/_frontend/.next/server/chunks/ssr/{_3b4a232c._.js → _404f6e81._.js} +4 -4
- qalita/_frontend/.next/server/chunks/ssr/{_cd257a0c._.js → _6a67f6f0._.js} +4 -4
- qalita/_frontend/.next/server/chunks/ssr/_cafb65ac._.js +3 -0
- qalita/_frontend/.next/server/chunks/ssr/_cb7b44d6._.js +1 -1
- qalita/_frontend/.next/server/chunks/ssr/_d44c43ed._.js +3 -0
- qalita/_frontend/.next/server/chunks/ssr/components_DashboardContent_tsx_c3635665._.js +1 -1
- qalita/_frontend/.next/server/chunks/ssr/node_modules_next_dist_server_route-modules_app-page_vendored_a443a6bf._.js +3 -0
- qalita/_frontend/.next/server/middleware-build-manifest.js +5 -5
- qalita/_frontend/.next/server/pages/404.html +1 -1
- qalita/_frontend/.next/server/pages/500.html +2 -2
- qalita/_frontend/.next/server/server-reference-manifest.js +1 -1
- qalita/_frontend/.next/server/server-reference-manifest.json +1 -1
- qalita/_frontend/.next/static/chunks/02a64570f0a14789.js +1 -0
- qalita/_frontend/.next/static/chunks/{7340adf74ff47ec0.js → 0b082245f106d665.js} +1 -1
- qalita/_frontend/.next/static/chunks/27b3ba70c7ef50a8.js +1 -0
- qalita/_frontend/.next/static/chunks/517e9b74d1a3c0ce.js +1 -0
- qalita/_frontend/.next/static/chunks/58689c96b0676c41.js +1 -0
- qalita/_frontend/.next/static/chunks/{236f7e5abd6f09ff.js → 89ba62a8ba9b79ce.js} +2 -2
- qalita/_frontend/.next/static/chunks/acc5da18ff20daa1.js +3 -0
- qalita/_frontend/.next/static/chunks/bdc8a8e7721f5675.js +2 -0
- qalita/_frontend/.next/static/chunks/e0df86cbf44bbf9f.js +1 -0
- qalita/_frontend/.next/static/chunks/e4c3a252774ab7fd.css +1 -0
- qalita/_frontend/.next/static/chunks/e6ce59ba40b863f2.js +1 -0
- qalita/_frontend/.next/static/chunks/{30ea11065999f7ac.js → ec4b1f1e3cd3ae43.js} +1 -1
- qalita/_frontend/.next/static/chunks/{turbopack-25186fc8e1264445.js → turbopack-d21156d03715fafa.js} +1 -1
- qalita/_frontend/node_modules/@swc/helpers/package.json +225 -2
- qalita/_frontend/node_modules/next/node_modules/@swc/helpers/package.json +471 -0
- qalita/_frontend/package.json +12 -1
- qalita/commands/pack.py +61 -8
- qalita/commands/worker.py +46 -20
- qalita/commands/worker_grpc.py +941 -0
- qalita/grpc/__init__.py +8 -0
- qalita/grpc/client.py +693 -0
- qalita/grpc/protos/__init__.py +4 -0
- qalita/grpc/protos/qalita.proto +391 -0
- qalita/grpc/protos/qalita_pb2.py +112 -0
- qalita/grpc/protos/qalita_pb2_grpc.py +588 -0
- qalita/internal/data_preview.py +565 -0
- qalita/internal/request.py +2 -1
- qalita/internal/utils.py +1 -1
- qalita/web/app.py +6 -2
- qalita/web/blueprints/dashboard.py +12 -44
- qalita/web/blueprints/helpers.py +119 -46
- qalita/web/blueprints/sources.py +5 -99
- qalita/web/blueprints/workers.py +6 -6
- {qalita-2.6.3.dist-info → qalita-2.8.0.dist-info}/METADATA +7 -1
- {qalita-2.6.3.dist-info → qalita-2.8.0.dist-info}/RECORD +124 -111
- qalita/_frontend/.next/server/chunks/ssr/[root-of-the-server]__345b6cae._.js +0 -3
- qalita/_frontend/.next/server/chunks/ssr/[root-of-the-server]__7213ba1d._.js +0 -3
- qalita/_frontend/.next/server/chunks/ssr/[root-of-the-server]__9130e1f5._.js +0 -3
- qalita/_frontend/.next/server/chunks/ssr/[root-of-the-server]__e2a7729d._.js +0 -3
- qalita/_frontend/.next/server/chunks/ssr/app_layout_tsx_271801d7._.js +0 -3
- qalita/_frontend/.next/static/chunks/0f84739db4a8acc7.js +0 -1
- qalita/_frontend/.next/static/chunks/1107bdca1eff6d34.css +0 -1
- qalita/_frontend/.next/static/chunks/4b0c5de8d4cc313f.js +0 -1
- qalita/_frontend/.next/static/chunks/4dd28bc3f722184a.js +0 -2
- qalita/_frontend/.next/static/chunks/711d597b816a80c1.js +0 -1
- qalita/_frontend/.next/static/chunks/bb29c2be4df20a40.js +0 -1
- qalita/_frontend/.next/static/chunks/ecf559101be0ae12.js +0 -3
- /qalita/_frontend/.next/static/{N9MqNrf23ZZkbbSW2aXkt → oDJBrlQBPl3vggds1RNfL}/_buildManifest.js +0 -0
- /qalita/_frontend/.next/static/{N9MqNrf23ZZkbbSW2aXkt → oDJBrlQBPl3vggds1RNfL}/_clientMiddlewareManifest.json +0 -0
- /qalita/_frontend/.next/static/{N9MqNrf23ZZkbbSW2aXkt → oDJBrlQBPl3vggds1RNfL}/_ssgManifest.js +0 -0
- /qalita/_frontend/node_modules/{@swc → next/node_modules/@swc}/helpers/cjs/_interop_require_default.cjs +0 -0
- /qalita/_frontend/node_modules/{@swc → next/node_modules/@swc}/helpers/cjs/_interop_require_wildcard.cjs +0 -0
- {qalita-2.6.3.dist-info → qalita-2.8.0.dist-info}/WHEEL +0 -0
- {qalita-2.6.3.dist-info → qalita-2.8.0.dist-info}/entry_points.txt +0 -0
- {qalita-2.6.3.dist-info → qalita-2.8.0.dist-info}/licenses/LICENSE +0 -0
qalita/grpc/__init__.py
ADDED
qalita/grpc/client.py
ADDED
|
@@ -0,0 +1,693 @@
|
|
|
1
|
+
"""
|
|
2
|
+
# QALITA (c) COPYRIGHT 2025 - ALL RIGHTS RESERVED -
|
|
3
|
+
gRPC client for Worker-Backend communication with automatic reconnection
|
|
4
|
+
"""
|
|
5
|
+
|
|
6
|
+
import asyncio
|
|
7
|
+
from typing import Optional, AsyncIterator, Callable, Any
|
|
8
|
+
from datetime import datetime, timezone
|
|
9
|
+
from urllib.parse import urlparse
|
|
10
|
+
|
|
11
|
+
import grpc
|
|
12
|
+
import grpc.aio
|
|
13
|
+
from google.protobuf.timestamp_pb2 import Timestamp
|
|
14
|
+
from google.protobuf import empty_pb2
|
|
15
|
+
|
|
16
|
+
from qalita.internal.utils import logger
|
|
17
|
+
from qalita.grpc.protos import qalita_pb2, qalita_pb2_grpc
|
|
18
|
+
|
|
19
|
+
|
|
20
|
+
class GrpcClient:
|
|
21
|
+
"""
|
|
22
|
+
gRPC client for communicating with the QALITA backend.
|
|
23
|
+
|
|
24
|
+
Features:
|
|
25
|
+
- Automatic reconnection with exponential backoff
|
|
26
|
+
- Keep-alive management
|
|
27
|
+
- Bidirectional streaming support
|
|
28
|
+
- Thread-safe connection state
|
|
29
|
+
"""
|
|
30
|
+
|
|
31
|
+
def __init__(
|
|
32
|
+
self,
|
|
33
|
+
url: str,
|
|
34
|
+
token: str,
|
|
35
|
+
worker_id: Optional[int] = None,
|
|
36
|
+
max_reconnect_attempts: int = 10,
|
|
37
|
+
initial_reconnect_delay: float = 1.0,
|
|
38
|
+
max_reconnect_delay: float = 60.0,
|
|
39
|
+
):
|
|
40
|
+
"""
|
|
41
|
+
Initialize the gRPC client.
|
|
42
|
+
|
|
43
|
+
Args:
|
|
44
|
+
url: Backend URL (e.g., "http://localhost:3080" or "grpc://localhost:50051")
|
|
45
|
+
token: Authentication token
|
|
46
|
+
worker_id: Optional worker ID for keep-alive
|
|
47
|
+
max_reconnect_attempts: Maximum reconnection attempts (0 = unlimited)
|
|
48
|
+
initial_reconnect_delay: Initial delay between reconnection attempts
|
|
49
|
+
max_reconnect_delay: Maximum delay between reconnection attempts
|
|
50
|
+
"""
|
|
51
|
+
self._url = url
|
|
52
|
+
self._token = token
|
|
53
|
+
self._worker_id = worker_id
|
|
54
|
+
self._max_reconnect_attempts = max_reconnect_attempts
|
|
55
|
+
self._initial_reconnect_delay = initial_reconnect_delay
|
|
56
|
+
self._max_reconnect_delay = max_reconnect_delay
|
|
57
|
+
|
|
58
|
+
# Parse URL to get gRPC endpoint
|
|
59
|
+
self._grpc_target = self._parse_grpc_target(url)
|
|
60
|
+
|
|
61
|
+
# Connection state
|
|
62
|
+
self._channel: Optional[grpc.aio.Channel] = None
|
|
63
|
+
self._stub: Optional[qalita_pb2_grpc.WorkerServiceStub] = None
|
|
64
|
+
self._connected = False
|
|
65
|
+
self._reconnect_attempts = 0
|
|
66
|
+
|
|
67
|
+
# Stream state
|
|
68
|
+
self._stream_call = None
|
|
69
|
+
self._outgoing_queue: asyncio.Queue = asyncio.Queue()
|
|
70
|
+
self._stream_active = False
|
|
71
|
+
|
|
72
|
+
# Callbacks
|
|
73
|
+
self._on_job_received: Optional[Callable] = None
|
|
74
|
+
self._on_routine_received: Optional[Callable] = None
|
|
75
|
+
self._on_data_preview_request: Optional[Callable] = None
|
|
76
|
+
self._on_add_source_request: Optional[Callable] = None
|
|
77
|
+
self._on_disconnect: Optional[Callable] = None
|
|
78
|
+
|
|
79
|
+
def _parse_grpc_target(self, url: str) -> str:
|
|
80
|
+
"""
|
|
81
|
+
Parse the backend URL to get the gRPC target.
|
|
82
|
+
|
|
83
|
+
If the URL is HTTP, convert to gRPC port (default 50051).
|
|
84
|
+
If the URL is already gRPC, use as-is.
|
|
85
|
+
"""
|
|
86
|
+
parsed = urlparse(url)
|
|
87
|
+
|
|
88
|
+
if parsed.scheme in ('grpc', 'grpcs'):
|
|
89
|
+
# Already a gRPC URL
|
|
90
|
+
return f"{parsed.hostname}:{parsed.port or 50051}"
|
|
91
|
+
|
|
92
|
+
# HTTP URL - convert to gRPC
|
|
93
|
+
# Default: use same host but port 50051
|
|
94
|
+
host = parsed.hostname or 'localhost'
|
|
95
|
+
grpc_port = 50051 # Default gRPC port
|
|
96
|
+
|
|
97
|
+
return f"{host}:{grpc_port}"
|
|
98
|
+
|
|
99
|
+
@property
|
|
100
|
+
def metadata(self) -> list[tuple[str, str]]:
|
|
101
|
+
"""Get gRPC call metadata with authentication."""
|
|
102
|
+
return [('authorization', f'Bearer {self._token}')]
|
|
103
|
+
|
|
104
|
+
async def connect(self) -> bool:
|
|
105
|
+
"""
|
|
106
|
+
Establish connection to the gRPC server.
|
|
107
|
+
|
|
108
|
+
Returns:
|
|
109
|
+
True if connection successful, False otherwise
|
|
110
|
+
"""
|
|
111
|
+
try:
|
|
112
|
+
# Create channel with options for long-running streams
|
|
113
|
+
self._channel = grpc.aio.insecure_channel(
|
|
114
|
+
self._grpc_target,
|
|
115
|
+
options=[
|
|
116
|
+
('grpc.keepalive_time_ms', 30000),
|
|
117
|
+
('grpc.keepalive_timeout_ms', 10000),
|
|
118
|
+
('grpc.keepalive_permit_without_calls', True),
|
|
119
|
+
('grpc.http2.min_recv_ping_interval_without_data_ms', 10000),
|
|
120
|
+
('grpc.http2.max_pings_without_data', 0),
|
|
121
|
+
('grpc.max_receive_message_length', 50 * 1024 * 1024),
|
|
122
|
+
('grpc.max_send_message_length', 50 * 1024 * 1024),
|
|
123
|
+
]
|
|
124
|
+
)
|
|
125
|
+
|
|
126
|
+
self._stub = qalita_pb2_grpc.WorkerServiceStub(self._channel)
|
|
127
|
+
self._connected = True
|
|
128
|
+
self._reconnect_attempts = 0
|
|
129
|
+
|
|
130
|
+
logger.info(f"Connected to gRPC server at {self._grpc_target}")
|
|
131
|
+
return True
|
|
132
|
+
|
|
133
|
+
except Exception as e:
|
|
134
|
+
logger.error(f"Failed to connect to gRPC server: {e}")
|
|
135
|
+
self._connected = False
|
|
136
|
+
return False
|
|
137
|
+
|
|
138
|
+
async def disconnect(self) -> None:
|
|
139
|
+
"""Close the gRPC connection gracefully."""
|
|
140
|
+
self._stream_active = False
|
|
141
|
+
|
|
142
|
+
if self._stream_call:
|
|
143
|
+
self._stream_call.cancel()
|
|
144
|
+
self._stream_call = None
|
|
145
|
+
|
|
146
|
+
if self._channel:
|
|
147
|
+
await self._channel.close()
|
|
148
|
+
self._channel = None
|
|
149
|
+
|
|
150
|
+
self._stub = None
|
|
151
|
+
self._connected = False
|
|
152
|
+
|
|
153
|
+
if self._on_disconnect:
|
|
154
|
+
await self._on_disconnect()
|
|
155
|
+
|
|
156
|
+
logger.info("Disconnected from gRPC server")
|
|
157
|
+
|
|
158
|
+
async def _reconnect(self) -> bool:
|
|
159
|
+
"""
|
|
160
|
+
Attempt to reconnect with exponential backoff.
|
|
161
|
+
|
|
162
|
+
Returns:
|
|
163
|
+
True if reconnection successful, False if max attempts exceeded
|
|
164
|
+
"""
|
|
165
|
+
delay = self._initial_reconnect_delay
|
|
166
|
+
|
|
167
|
+
while (self._max_reconnect_attempts == 0 or
|
|
168
|
+
self._reconnect_attempts < self._max_reconnect_attempts):
|
|
169
|
+
|
|
170
|
+
self._reconnect_attempts += 1
|
|
171
|
+
logger.warning(
|
|
172
|
+
f"Reconnection attempt {self._reconnect_attempts}"
|
|
173
|
+
f"{f'/{self._max_reconnect_attempts}' if self._max_reconnect_attempts > 0 else ''}"
|
|
174
|
+
)
|
|
175
|
+
|
|
176
|
+
await asyncio.sleep(delay)
|
|
177
|
+
|
|
178
|
+
if await self.connect():
|
|
179
|
+
return True
|
|
180
|
+
|
|
181
|
+
# Exponential backoff
|
|
182
|
+
delay = min(delay * 2, self._max_reconnect_delay)
|
|
183
|
+
|
|
184
|
+
logger.error("Max reconnection attempts exceeded")
|
|
185
|
+
return False
|
|
186
|
+
|
|
187
|
+
# =========================================================================
|
|
188
|
+
# Unary RPCs
|
|
189
|
+
# =========================================================================
|
|
190
|
+
|
|
191
|
+
async def authenticate(self) -> Optional[qalita_pb2.AuthResponse]:
|
|
192
|
+
"""
|
|
193
|
+
Authenticate with the backend.
|
|
194
|
+
|
|
195
|
+
Returns:
|
|
196
|
+
AuthResponse if successful, None otherwise
|
|
197
|
+
"""
|
|
198
|
+
if not self._connected:
|
|
199
|
+
if not await self.connect():
|
|
200
|
+
return None
|
|
201
|
+
|
|
202
|
+
try:
|
|
203
|
+
request = qalita_pb2.AuthRequest(token=self._token)
|
|
204
|
+
response = await self._stub.Authenticate(request)
|
|
205
|
+
return response
|
|
206
|
+
except grpc.aio.AioRpcError as e:
|
|
207
|
+
logger.error(f"Authentication failed: {e.code()} - {e.details()}")
|
|
208
|
+
return None
|
|
209
|
+
|
|
210
|
+
async def register_worker(
|
|
211
|
+
self,
|
|
212
|
+
name: str,
|
|
213
|
+
mode: str,
|
|
214
|
+
status: str = "online",
|
|
215
|
+
is_active: bool = True,
|
|
216
|
+
) -> Optional[qalita_pb2.Worker]:
|
|
217
|
+
"""
|
|
218
|
+
Register or update a worker.
|
|
219
|
+
|
|
220
|
+
Returns:
|
|
221
|
+
Worker object if successful, None otherwise
|
|
222
|
+
"""
|
|
223
|
+
if not self._connected:
|
|
224
|
+
if not await self.connect():
|
|
225
|
+
return None
|
|
226
|
+
|
|
227
|
+
try:
|
|
228
|
+
request = qalita_pb2.RegisterWorkerRequest(
|
|
229
|
+
name=name,
|
|
230
|
+
mode=mode,
|
|
231
|
+
status=status,
|
|
232
|
+
is_active=is_active,
|
|
233
|
+
)
|
|
234
|
+
response = await self._stub.RegisterWorker(
|
|
235
|
+
request,
|
|
236
|
+
metadata=self.metadata,
|
|
237
|
+
)
|
|
238
|
+
self._worker_id = response.id
|
|
239
|
+
return response
|
|
240
|
+
except grpc.aio.AioRpcError as e:
|
|
241
|
+
logger.error(f"Worker registration failed: {e.code()} - {e.details()}")
|
|
242
|
+
return None
|
|
243
|
+
|
|
244
|
+
async def get_worker(self, worker_id: int) -> Optional[qalita_pb2.Worker]:
|
|
245
|
+
"""Get worker by ID."""
|
|
246
|
+
if not self._connected:
|
|
247
|
+
return None
|
|
248
|
+
|
|
249
|
+
try:
|
|
250
|
+
request = qalita_pb2.GetWorkerRequest(worker_id=worker_id)
|
|
251
|
+
return await self._stub.GetWorker(request, metadata=self.metadata)
|
|
252
|
+
except grpc.aio.AioRpcError as e:
|
|
253
|
+
logger.error(f"Get worker failed: {e.code()} - {e.details()}")
|
|
254
|
+
return None
|
|
255
|
+
|
|
256
|
+
async def get_pack(self, pack_id: int) -> Optional[qalita_pb2.Pack]:
|
|
257
|
+
"""Get pack by ID with versions."""
|
|
258
|
+
if not self._connected:
|
|
259
|
+
return None
|
|
260
|
+
|
|
261
|
+
try:
|
|
262
|
+
request = qalita_pb2.GetPackRequest(pack_id=pack_id)
|
|
263
|
+
return await self._stub.GetPack(request, metadata=self.metadata)
|
|
264
|
+
except grpc.aio.AioRpcError as e:
|
|
265
|
+
logger.error(f"Get pack failed: {e.code()} - {e.details()}")
|
|
266
|
+
return None
|
|
267
|
+
|
|
268
|
+
async def get_source(self, source_id: int) -> Optional[qalita_pb2.Source]:
|
|
269
|
+
"""Get source by ID with versions."""
|
|
270
|
+
if not self._connected:
|
|
271
|
+
return None
|
|
272
|
+
|
|
273
|
+
try:
|
|
274
|
+
request = qalita_pb2.GetSourceRequest(source_id=source_id)
|
|
275
|
+
return await self._stub.GetSource(request, metadata=self.metadata)
|
|
276
|
+
except grpc.aio.AioRpcError as e:
|
|
277
|
+
logger.error(f"Get source failed: {e.code()} - {e.details()}")
|
|
278
|
+
return None
|
|
279
|
+
|
|
280
|
+
async def get_asset_url(self, asset_id: int) -> Optional[qalita_pb2.AssetUrl]:
|
|
281
|
+
"""Get asset URL by ID."""
|
|
282
|
+
if not self._connected:
|
|
283
|
+
return None
|
|
284
|
+
|
|
285
|
+
try:
|
|
286
|
+
request = qalita_pb2.GetAssetUrlRequest(asset_id=asset_id)
|
|
287
|
+
return await self._stub.GetAssetUrl(request, metadata=self.metadata)
|
|
288
|
+
except grpc.aio.AioRpcError as e:
|
|
289
|
+
logger.error(f"Get asset URL failed: {e.code()} - {e.details()}")
|
|
290
|
+
return None
|
|
291
|
+
|
|
292
|
+
async def get_registries(self) -> list[qalita_pb2.Registry]:
|
|
293
|
+
"""Get all registries."""
|
|
294
|
+
if not self._connected:
|
|
295
|
+
return []
|
|
296
|
+
|
|
297
|
+
try:
|
|
298
|
+
response = await self._stub.GetRegistries(
|
|
299
|
+
empty_pb2.Empty(),
|
|
300
|
+
metadata=self.metadata,
|
|
301
|
+
)
|
|
302
|
+
return list(response.registries)
|
|
303
|
+
except grpc.aio.AioRpcError as e:
|
|
304
|
+
logger.error(f"Get registries failed: {e.code()} - {e.details()}")
|
|
305
|
+
return []
|
|
306
|
+
|
|
307
|
+
async def create_job(
|
|
308
|
+
self,
|
|
309
|
+
source_id: int,
|
|
310
|
+
pack_id: int,
|
|
311
|
+
source_version_id: Optional[int] = None,
|
|
312
|
+
target_id: Optional[int] = None,
|
|
313
|
+
target_version_id: Optional[int] = None,
|
|
314
|
+
pack_version_id: Optional[int] = None,
|
|
315
|
+
routine_id: Optional[int] = None,
|
|
316
|
+
pack_config_override: Optional[str] = None,
|
|
317
|
+
job_type: Optional[str] = None,
|
|
318
|
+
name: Optional[str] = None,
|
|
319
|
+
) -> Optional[qalita_pb2.Job]:
|
|
320
|
+
"""Create a new job."""
|
|
321
|
+
if not self._connected:
|
|
322
|
+
return None
|
|
323
|
+
|
|
324
|
+
try:
|
|
325
|
+
request = qalita_pb2.CreateJobRequest(
|
|
326
|
+
source_id=source_id,
|
|
327
|
+
pack_id=pack_id,
|
|
328
|
+
)
|
|
329
|
+
if source_version_id:
|
|
330
|
+
request.source_version_id = source_version_id
|
|
331
|
+
if target_id:
|
|
332
|
+
request.target_id = target_id
|
|
333
|
+
if target_version_id:
|
|
334
|
+
request.target_version_id = target_version_id
|
|
335
|
+
if pack_version_id:
|
|
336
|
+
request.pack_version_id = pack_version_id
|
|
337
|
+
if routine_id:
|
|
338
|
+
request.routine_id = routine_id
|
|
339
|
+
if pack_config_override:
|
|
340
|
+
request.pack_config_override = pack_config_override
|
|
341
|
+
if job_type:
|
|
342
|
+
request.type = job_type
|
|
343
|
+
if name:
|
|
344
|
+
request.name = name
|
|
345
|
+
|
|
346
|
+
return await self._stub.CreateJob(request, metadata=self.metadata)
|
|
347
|
+
except grpc.aio.AioRpcError as e:
|
|
348
|
+
logger.error(f"Create job failed: {e.code()} - {e.details()}")
|
|
349
|
+
return None
|
|
350
|
+
|
|
351
|
+
async def update_job(
|
|
352
|
+
self,
|
|
353
|
+
job_id: int,
|
|
354
|
+
agent_id: Optional[int] = None,
|
|
355
|
+
status: Optional[str] = None,
|
|
356
|
+
name: Optional[str] = None,
|
|
357
|
+
start_date: Optional[datetime] = None,
|
|
358
|
+
end_date: Optional[datetime] = None,
|
|
359
|
+
logs_id: Optional[int] = None,
|
|
360
|
+
) -> Optional[qalita_pb2.Job]:
|
|
361
|
+
"""Update an existing job."""
|
|
362
|
+
if not self._connected:
|
|
363
|
+
return None
|
|
364
|
+
|
|
365
|
+
try:
|
|
366
|
+
request = qalita_pb2.UpdateJobRequest(job_id=job_id)
|
|
367
|
+
if agent_id is not None:
|
|
368
|
+
request.agent_id = agent_id
|
|
369
|
+
if status:
|
|
370
|
+
request.status = status
|
|
371
|
+
if name:
|
|
372
|
+
request.name = name
|
|
373
|
+
if start_date:
|
|
374
|
+
ts = Timestamp()
|
|
375
|
+
ts.FromDatetime(start_date if start_date.tzinfo else start_date.replace(tzinfo=timezone.utc))
|
|
376
|
+
request.start_date.CopyFrom(ts)
|
|
377
|
+
if end_date:
|
|
378
|
+
ts = Timestamp()
|
|
379
|
+
ts.FromDatetime(end_date if end_date.tzinfo else end_date.replace(tzinfo=timezone.utc))
|
|
380
|
+
request.end_date.CopyFrom(ts)
|
|
381
|
+
if logs_id is not None:
|
|
382
|
+
request.logs_id = logs_id
|
|
383
|
+
|
|
384
|
+
return await self._stub.UpdateJob(request, metadata=self.metadata)
|
|
385
|
+
except grpc.aio.AioRpcError as e:
|
|
386
|
+
logger.error(f"Update job failed: {e.code()} - {e.details()}")
|
|
387
|
+
return None
|
|
388
|
+
|
|
389
|
+
async def claim_job(self, job_id: int, worker_id: int) -> Optional[qalita_pb2.Job]:
|
|
390
|
+
"""Claim a job for a worker."""
|
|
391
|
+
if not self._connected:
|
|
392
|
+
return None
|
|
393
|
+
|
|
394
|
+
try:
|
|
395
|
+
request = qalita_pb2.ClaimJobRequest(job_id=job_id, worker_id=worker_id)
|
|
396
|
+
return await self._stub.ClaimJob(request, metadata=self.metadata)
|
|
397
|
+
except grpc.aio.AioRpcError as e:
|
|
398
|
+
logger.error(f"Claim job failed: {e.code()} - {e.details()}")
|
|
399
|
+
return None
|
|
400
|
+
|
|
401
|
+
async def get_routines(self) -> list[qalita_pb2.Routine]:
|
|
402
|
+
"""Get all routines."""
|
|
403
|
+
if not self._connected:
|
|
404
|
+
return []
|
|
405
|
+
|
|
406
|
+
try:
|
|
407
|
+
response = await self._stub.GetRoutines(
|
|
408
|
+
empty_pb2.Empty(),
|
|
409
|
+
metadata=self.metadata,
|
|
410
|
+
)
|
|
411
|
+
return list(response.routines)
|
|
412
|
+
except grpc.aio.AioRpcError as e:
|
|
413
|
+
logger.error(f"Get routines failed: {e.code()} - {e.details()}")
|
|
414
|
+
return []
|
|
415
|
+
|
|
416
|
+
# =========================================================================
|
|
417
|
+
# Bidirectional Streaming
|
|
418
|
+
# =========================================================================
|
|
419
|
+
|
|
420
|
+
def on_job_received(self, callback: Callable[[qalita_pb2.Job], Any]) -> None:
|
|
421
|
+
"""Set callback for when a job is received via stream."""
|
|
422
|
+
self._on_job_received = callback
|
|
423
|
+
|
|
424
|
+
def on_routine_received(self, callback: Callable[[qalita_pb2.Routine], Any]) -> None:
|
|
425
|
+
"""Set callback for when a routine is triggered via stream."""
|
|
426
|
+
self._on_routine_received = callback
|
|
427
|
+
|
|
428
|
+
def on_data_preview_request(self, callback: Callable[[qalita_pb2.DataPreviewRequest], Any]) -> None:
|
|
429
|
+
"""Set callback for when a data preview request is received via stream."""
|
|
430
|
+
self._on_data_preview_request = callback
|
|
431
|
+
|
|
432
|
+
def on_add_source_request(self, callback: Callable[[qalita_pb2.AddSourceRequest], Any]) -> None:
|
|
433
|
+
"""Set callback for when an add source request is received via stream."""
|
|
434
|
+
self._on_add_source_request = callback
|
|
435
|
+
|
|
436
|
+
def on_disconnect(self, callback: Callable[[], Any]) -> None:
|
|
437
|
+
"""Set callback for when connection is lost."""
|
|
438
|
+
self._on_disconnect = callback
|
|
439
|
+
|
|
440
|
+
async def send_keep_alive(self) -> None:
|
|
441
|
+
"""Send a keep-alive message through the stream."""
|
|
442
|
+
if not self._worker_id:
|
|
443
|
+
logger.warning("Cannot send keep-alive: worker_id not set")
|
|
444
|
+
return
|
|
445
|
+
|
|
446
|
+
ts = Timestamp()
|
|
447
|
+
ts.FromDatetime(datetime.now(timezone.utc))
|
|
448
|
+
|
|
449
|
+
msg = qalita_pb2.WorkerMessage(
|
|
450
|
+
keep_alive=qalita_pb2.KeepAlive(
|
|
451
|
+
worker_id=self._worker_id,
|
|
452
|
+
timestamp=ts,
|
|
453
|
+
)
|
|
454
|
+
)
|
|
455
|
+
await self._outgoing_queue.put(msg)
|
|
456
|
+
|
|
457
|
+
async def send_job_status(
|
|
458
|
+
self,
|
|
459
|
+
job_id: int,
|
|
460
|
+
status: str,
|
|
461
|
+
error_message: Optional[str] = None,
|
|
462
|
+
start_date: Optional[datetime] = None,
|
|
463
|
+
end_date: Optional[datetime] = None,
|
|
464
|
+
logs_id: Optional[int] = None,
|
|
465
|
+
) -> None:
|
|
466
|
+
"""Send a job status update through the stream."""
|
|
467
|
+
job_status = qalita_pb2.JobStatusUpdate(
|
|
468
|
+
job_id=job_id,
|
|
469
|
+
status=status,
|
|
470
|
+
)
|
|
471
|
+
if error_message:
|
|
472
|
+
job_status.error_message = error_message
|
|
473
|
+
if start_date:
|
|
474
|
+
ts = Timestamp()
|
|
475
|
+
ts.FromDatetime(start_date if start_date.tzinfo else start_date.replace(tzinfo=timezone.utc))
|
|
476
|
+
job_status.start_date.CopyFrom(ts)
|
|
477
|
+
if end_date:
|
|
478
|
+
ts = Timestamp()
|
|
479
|
+
ts.FromDatetime(end_date if end_date.tzinfo else end_date.replace(tzinfo=timezone.utc))
|
|
480
|
+
job_status.end_date.CopyFrom(ts)
|
|
481
|
+
if logs_id is not None:
|
|
482
|
+
job_status.logs_id = logs_id
|
|
483
|
+
|
|
484
|
+
msg = qalita_pb2.WorkerMessage(job_status=job_status)
|
|
485
|
+
await self._outgoing_queue.put(msg)
|
|
486
|
+
|
|
487
|
+
async def send_worker_status(self, worker_id: int, status: str) -> None:
|
|
488
|
+
"""Send a worker status update through the stream."""
|
|
489
|
+
msg = qalita_pb2.WorkerMessage(
|
|
490
|
+
worker_status=qalita_pb2.WorkerStatusUpdate(
|
|
491
|
+
worker_id=worker_id,
|
|
492
|
+
status=status,
|
|
493
|
+
)
|
|
494
|
+
)
|
|
495
|
+
await self._outgoing_queue.put(msg)
|
|
496
|
+
|
|
497
|
+
async def send_log_line(self, job_id: int, line: str, level: str = "INFO") -> None:
|
|
498
|
+
"""Send a log line through the stream for live log streaming."""
|
|
499
|
+
ts = Timestamp()
|
|
500
|
+
ts.FromDatetime(datetime.now(timezone.utc))
|
|
501
|
+
|
|
502
|
+
msg = qalita_pb2.WorkerMessage(
|
|
503
|
+
log_line=qalita_pb2.JobLogLine(
|
|
504
|
+
job_id=job_id,
|
|
505
|
+
line=line,
|
|
506
|
+
level=level,
|
|
507
|
+
timestamp=ts,
|
|
508
|
+
)
|
|
509
|
+
)
|
|
510
|
+
await self._outgoing_queue.put(msg)
|
|
511
|
+
|
|
512
|
+
async def send_data_preview_response(
|
|
513
|
+
self,
|
|
514
|
+
request_id: str,
|
|
515
|
+
ok: bool,
|
|
516
|
+
data_type: str,
|
|
517
|
+
error: Optional[str] = None,
|
|
518
|
+
headers: Optional[list[str]] = None,
|
|
519
|
+
rows: Optional[list[list[str]]] = None,
|
|
520
|
+
total_rows: Optional[int] = None,
|
|
521
|
+
content: Optional[str] = None,
|
|
522
|
+
binary_base64: Optional[str] = None,
|
|
523
|
+
mime_type: Optional[str] = None,
|
|
524
|
+
) -> None:
|
|
525
|
+
"""Send a data preview response through the stream."""
|
|
526
|
+
response = qalita_pb2.DataPreviewResponse(
|
|
527
|
+
request_id=request_id,
|
|
528
|
+
ok=ok,
|
|
529
|
+
data_type=data_type,
|
|
530
|
+
)
|
|
531
|
+
|
|
532
|
+
if error:
|
|
533
|
+
response.error = error
|
|
534
|
+
if headers:
|
|
535
|
+
response.headers.extend(headers)
|
|
536
|
+
if rows:
|
|
537
|
+
for row in rows:
|
|
538
|
+
data_row = qalita_pb2.DataRow(values=row)
|
|
539
|
+
response.rows.append(data_row)
|
|
540
|
+
if total_rows is not None:
|
|
541
|
+
response.total_rows = total_rows
|
|
542
|
+
if content:
|
|
543
|
+
response.content = content
|
|
544
|
+
if binary_base64:
|
|
545
|
+
response.binary_base64 = binary_base64
|
|
546
|
+
if mime_type:
|
|
547
|
+
response.mime_type = mime_type
|
|
548
|
+
|
|
549
|
+
msg = qalita_pb2.WorkerMessage(data_preview_response=response)
|
|
550
|
+
await self._outgoing_queue.put(msg)
|
|
551
|
+
|
|
552
|
+
async def send_add_source_response(
|
|
553
|
+
self,
|
|
554
|
+
request_id: str,
|
|
555
|
+
ok: bool,
|
|
556
|
+
source_id: Optional[int] = None,
|
|
557
|
+
connectivity_verified: bool = False,
|
|
558
|
+
error: Optional[str] = None,
|
|
559
|
+
) -> None:
|
|
560
|
+
"""Send an add source response through the stream."""
|
|
561
|
+
response = qalita_pb2.AddSourceResponse(
|
|
562
|
+
request_id=request_id,
|
|
563
|
+
ok=ok,
|
|
564
|
+
connectivity_verified=connectivity_verified,
|
|
565
|
+
)
|
|
566
|
+
|
|
567
|
+
if error:
|
|
568
|
+
response.error = error
|
|
569
|
+
if source_id is not None:
|
|
570
|
+
response.source_id = source_id
|
|
571
|
+
|
|
572
|
+
msg = qalita_pb2.WorkerMessage(add_source_response=response)
|
|
573
|
+
await self._outgoing_queue.put(msg)
|
|
574
|
+
|
|
575
|
+
async def _outgoing_messages(self) -> AsyncIterator[qalita_pb2.WorkerMessage]:
|
|
576
|
+
"""Generator for outgoing stream messages."""
|
|
577
|
+
logger.info("Outgoing messages generator started")
|
|
578
|
+
while self._stream_active:
|
|
579
|
+
try:
|
|
580
|
+
# Use get_nowait in a loop with sleep to avoid blocking gRPC
|
|
581
|
+
try:
|
|
582
|
+
msg = self._outgoing_queue.get_nowait()
|
|
583
|
+
logger.debug(f"Yielding message type: {msg.WhichOneof('payload')}")
|
|
584
|
+
yield msg
|
|
585
|
+
except asyncio.QueueEmpty:
|
|
586
|
+
# No message available, yield control briefly
|
|
587
|
+
await asyncio.sleep(0.05)
|
|
588
|
+
except asyncio.CancelledError:
|
|
589
|
+
logger.info("Outgoing messages generator cancelled")
|
|
590
|
+
break
|
|
591
|
+
except Exception as e:
|
|
592
|
+
logger.error(f"Error in outgoing generator: {e}")
|
|
593
|
+
await asyncio.sleep(0.1)
|
|
594
|
+
logger.info("Outgoing messages generator stopped")
|
|
595
|
+
|
|
596
|
+
async def start_stream(self) -> None:
|
|
597
|
+
"""
|
|
598
|
+
Start the bidirectional stream for real-time communication.
|
|
599
|
+
|
|
600
|
+
This method runs indefinitely, handling:
|
|
601
|
+
- Keep-alive signals (sent every 10 seconds)
|
|
602
|
+
- Incoming job assignments
|
|
603
|
+
- Incoming routine triggers
|
|
604
|
+
- Automatic reconnection on failure
|
|
605
|
+
"""
|
|
606
|
+
if not self._connected:
|
|
607
|
+
if not await self.connect():
|
|
608
|
+
raise ConnectionError("Failed to connect to gRPC server")
|
|
609
|
+
|
|
610
|
+
# Recreate queue in async context to ensure proper event loop binding
|
|
611
|
+
self._outgoing_queue = asyncio.Queue()
|
|
612
|
+
self._stream_active = True
|
|
613
|
+
|
|
614
|
+
async def keep_alive_loop():
|
|
615
|
+
"""Send keep-alive every 10 seconds."""
|
|
616
|
+
logger.info(f"Keep-alive loop started, worker_id={self._worker_id}")
|
|
617
|
+
while self._stream_active:
|
|
618
|
+
try:
|
|
619
|
+
logger.debug(f"Sending keep-alive for worker {self._worker_id}")
|
|
620
|
+
await self.send_keep_alive()
|
|
621
|
+
await asyncio.sleep(10)
|
|
622
|
+
except asyncio.CancelledError:
|
|
623
|
+
logger.info("Keep-alive loop cancelled")
|
|
624
|
+
break
|
|
625
|
+
except Exception as e:
|
|
626
|
+
logger.error(f"Keep-alive error: {e}")
|
|
627
|
+
|
|
628
|
+
async def process_stream():
|
|
629
|
+
"""Process incoming stream messages."""
|
|
630
|
+
try:
|
|
631
|
+
self._stream_call = self._stub.Connect(
|
|
632
|
+
self._outgoing_messages(),
|
|
633
|
+
metadata=self.metadata,
|
|
634
|
+
)
|
|
635
|
+
|
|
636
|
+
async for msg in self._stream_call:
|
|
637
|
+
if msg.HasField('job_assignment'):
|
|
638
|
+
job = msg.job_assignment.job
|
|
639
|
+
logger.info(f"Received job assignment: {job.id}")
|
|
640
|
+
if self._on_job_received:
|
|
641
|
+
await self._on_job_received(job)
|
|
642
|
+
|
|
643
|
+
elif msg.HasField('routine_triggered'):
|
|
644
|
+
routine = msg.routine_triggered.routine
|
|
645
|
+
logger.info(f"Received routine trigger: {routine.id}")
|
|
646
|
+
if self._on_routine_received:
|
|
647
|
+
await self._on_routine_received(routine)
|
|
648
|
+
|
|
649
|
+
elif msg.HasField('data_preview_request'):
|
|
650
|
+
request = msg.data_preview_request
|
|
651
|
+
logger.info(f"Received data preview request: {request.request_id} for source {request.source_id}")
|
|
652
|
+
if self._on_data_preview_request:
|
|
653
|
+
await self._on_data_preview_request(request)
|
|
654
|
+
|
|
655
|
+
elif msg.HasField('add_source_request'):
|
|
656
|
+
request = msg.add_source_request
|
|
657
|
+
logger.info(f"Received add source request: {request.request_id} for '{request.name}'")
|
|
658
|
+
if self._on_add_source_request:
|
|
659
|
+
await self._on_add_source_request(request)
|
|
660
|
+
|
|
661
|
+
elif msg.HasField('ack'):
|
|
662
|
+
logger.debug(f"Received ack: {msg.ack.message_type}")
|
|
663
|
+
|
|
664
|
+
elif msg.HasField('error'):
|
|
665
|
+
logger.error(f"Server error: {msg.error.code} - {msg.error.message}")
|
|
666
|
+
|
|
667
|
+
except grpc.aio.AioRpcError as e:
|
|
668
|
+
if e.code() == grpc.StatusCode.CANCELLED:
|
|
669
|
+
logger.info("Stream cancelled")
|
|
670
|
+
else:
|
|
671
|
+
logger.error(f"Stream error: {e.code()} - {e.details()}")
|
|
672
|
+
# Attempt reconnection
|
|
673
|
+
if self._stream_active and await self._reconnect():
|
|
674
|
+
await process_stream()
|
|
675
|
+
|
|
676
|
+
# Run keep-alive and stream processing concurrently
|
|
677
|
+
keep_alive_task = asyncio.create_task(keep_alive_loop())
|
|
678
|
+
|
|
679
|
+
try:
|
|
680
|
+
await process_stream()
|
|
681
|
+
finally:
|
|
682
|
+
self._stream_active = False
|
|
683
|
+
keep_alive_task.cancel()
|
|
684
|
+
try:
|
|
685
|
+
await keep_alive_task
|
|
686
|
+
except asyncio.CancelledError:
|
|
687
|
+
pass
|
|
688
|
+
|
|
689
|
+
async def stop_stream(self) -> None:
|
|
690
|
+
"""Stop the bidirectional stream."""
|
|
691
|
+
self._stream_active = False
|
|
692
|
+
if self._stream_call:
|
|
693
|
+
self._stream_call.cancel()
|