runqy-python 0.2.1__tar.gz → 0.2.2__tar.gz
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- {runqy_python-0.2.1/src/runqy_python.egg-info → runqy_python-0.2.2}/PKG-INFO +1 -1
- {runqy_python-0.2.1 → runqy_python-0.2.2}/pyproject.toml +1 -1
- {runqy_python-0.2.1 → runqy_python-0.2.2}/src/runqy_python/__init__.py +2 -1
- {runqy_python-0.2.1 → runqy_python-0.2.2}/src/runqy_python/decorator.py +34 -0
- runqy_python-0.2.2/src/runqy_python/runner.py +235 -0
- {runqy_python-0.2.1 → runqy_python-0.2.2/src/runqy_python.egg-info}/PKG-INFO +1 -1
- {runqy_python-0.2.1 → runqy_python-0.2.2}/src/runqy_python.egg-info/SOURCES.txt +3 -1
- runqy_python-0.2.2/tests/test_client.py +220 -0
- runqy_python-0.2.2/tests/test_runner_hardening.py +541 -0
- runqy_python-0.2.1/src/runqy_python/runner.py +0 -127
- {runqy_python-0.2.1 → runqy_python-0.2.2}/LICENSE +0 -0
- {runqy_python-0.2.1 → runqy_python-0.2.2}/README.md +0 -0
- {runqy_python-0.2.1 → runqy_python-0.2.2}/setup.cfg +0 -0
- {runqy_python-0.2.1 → runqy_python-0.2.2}/src/runqy_python/client.py +0 -0
- {runqy_python-0.2.1 → runqy_python-0.2.2}/src/runqy_python.egg-info/dependency_links.txt +0 -0
- {runqy_python-0.2.1 → runqy_python-0.2.2}/src/runqy_python.egg-info/top_level.txt +0 -0
|
@@ -1,7 +1,7 @@
|
|
|
1
1
|
"""runqy-python: Python SDK for runqy - write distributed task handlers with simple decorators."""
|
|
2
2
|
|
|
3
3
|
# Task execution (for workers)
|
|
4
|
-
from .decorator import task, load
|
|
4
|
+
from .decorator import task, load, RetryableError
|
|
5
5
|
from .runner import run, run_once
|
|
6
6
|
|
|
7
7
|
# Client (for enqueuing tasks)
|
|
@@ -20,6 +20,7 @@ __all__ = [
|
|
|
20
20
|
# Task execution
|
|
21
21
|
"task",
|
|
22
22
|
"load",
|
|
23
|
+
"RetryableError",
|
|
23
24
|
"run",
|
|
24
25
|
"run_once",
|
|
25
26
|
# Client
|
|
@@ -4,6 +4,23 @@ _registered_handler = None
|
|
|
4
4
|
_registered_loader = None
|
|
5
5
|
|
|
6
6
|
|
|
7
|
+
class RetryableError(Exception):
|
|
8
|
+
"""Raise this from a @task handler to signal that the task should be retried.
|
|
9
|
+
|
|
10
|
+
Usage:
|
|
11
|
+
from runqy_python import task, RetryableError
|
|
12
|
+
|
|
13
|
+
@task
|
|
14
|
+
def process(payload):
|
|
15
|
+
try:
|
|
16
|
+
result = call_external_api(payload)
|
|
17
|
+
except TimeoutError:
|
|
18
|
+
raise RetryableError("API timed out, please retry")
|
|
19
|
+
return result
|
|
20
|
+
"""
|
|
21
|
+
pass
|
|
22
|
+
|
|
23
|
+
|
|
7
24
|
def task(func):
|
|
8
25
|
"""Decorator to register a function as the task handler.
|
|
9
26
|
|
|
@@ -18,6 +35,11 @@ def task(func):
|
|
|
18
35
|
return ctx["model"].predict(payload)
|
|
19
36
|
"""
|
|
20
37
|
global _registered_handler
|
|
38
|
+
if _registered_handler is not None:
|
|
39
|
+
raise RuntimeError(
|
|
40
|
+
f"@task handler already registered ({_registered_handler.__name__}). "
|
|
41
|
+
"Only one @task handler is allowed per process."
|
|
42
|
+
)
|
|
21
43
|
_registered_handler = func
|
|
22
44
|
return func
|
|
23
45
|
|
|
@@ -39,6 +61,11 @@ def load(func):
|
|
|
39
61
|
return ctx["model"].predict(payload)
|
|
40
62
|
"""
|
|
41
63
|
global _registered_loader
|
|
64
|
+
if _registered_loader is not None:
|
|
65
|
+
raise RuntimeError(
|
|
66
|
+
f"@load handler already registered ({_registered_loader.__name__}). "
|
|
67
|
+
"Only one @load handler is allowed per process."
|
|
68
|
+
)
|
|
42
69
|
_registered_loader = func
|
|
43
70
|
return func
|
|
44
71
|
|
|
@@ -51,3 +78,10 @@ def get_handler():
|
|
|
51
78
|
def get_loader():
|
|
52
79
|
"""Get the registered load function."""
|
|
53
80
|
return _registered_loader
|
|
81
|
+
|
|
82
|
+
|
|
83
|
+
def _reset():
|
|
84
|
+
"""Reset registered handler and loader. For testing only."""
|
|
85
|
+
global _registered_handler, _registered_loader
|
|
86
|
+
_registered_handler = None
|
|
87
|
+
_registered_loader = None
|
|
@@ -0,0 +1,235 @@
|
|
|
1
|
+
"""Runner loop for processing tasks from runqy-worker."""
|
|
2
|
+
|
|
3
|
+
import os
|
|
4
|
+
import sys
|
|
5
|
+
import json
|
|
6
|
+
import signal
|
|
7
|
+
import traceback
|
|
8
|
+
from .decorator import get_handler, get_loader, RetryableError
|
|
9
|
+
|
|
10
|
+
# Flag for graceful shutdown
|
|
11
|
+
_shutdown_requested = False
|
|
12
|
+
|
|
13
|
+
# Private file object for protocol communication (set by _protect_stdout)
|
|
14
|
+
_protocol_stdout = None
|
|
15
|
+
|
|
16
|
+
|
|
17
|
+
def _shutdown_handler(signum, frame):
|
|
18
|
+
"""Handle SIGTERM/SIGINT for graceful shutdown.
|
|
19
|
+
|
|
20
|
+
First signal: set flag so the current task can complete before exit.
|
|
21
|
+
Second signal: force exit (in case process is stuck).
|
|
22
|
+
"""
|
|
23
|
+
global _shutdown_requested
|
|
24
|
+
if _shutdown_requested:
|
|
25
|
+
# Second signal — force exit
|
|
26
|
+
sys.exit(1)
|
|
27
|
+
_shutdown_requested = True
|
|
28
|
+
|
|
29
|
+
|
|
30
|
+
def _protect_stdout():
|
|
31
|
+
"""Redirect sys.stdout to stderr so print() doesn't corrupt the JSON protocol.
|
|
32
|
+
|
|
33
|
+
The original stdout fd is saved to _protocol_stdout for _safe_write to use.
|
|
34
|
+
"""
|
|
35
|
+
global _protocol_stdout
|
|
36
|
+
# Duplicate the real stdout fd so it survives sys.stdout reassignment
|
|
37
|
+
proto_fd = os.dup(sys.stdout.fileno())
|
|
38
|
+
_protocol_stdout = os.fdopen(proto_fd, "w")
|
|
39
|
+
# Redirect sys.stdout to stderr so user print() goes to logs
|
|
40
|
+
sys.stdout = sys.stderr
|
|
41
|
+
|
|
42
|
+
|
|
43
|
+
def _safe_write(data):
|
|
44
|
+
"""Safely write JSON data to the protocol channel, handling BrokenPipeError and serialization errors."""
|
|
45
|
+
out = _protocol_stdout if _protocol_stdout is not None else sys.stdout
|
|
46
|
+
|
|
47
|
+
try:
|
|
48
|
+
text = json.dumps(data)
|
|
49
|
+
except (TypeError, ValueError) as e:
|
|
50
|
+
# Result not JSON-serializable — send error response instead
|
|
51
|
+
fallback = {
|
|
52
|
+
"task_id": data.get("task_id", "unknown") if isinstance(data, dict) else "unknown",
|
|
53
|
+
"result": None,
|
|
54
|
+
"error": f"Result not JSON-serializable: {e}",
|
|
55
|
+
"retry": False,
|
|
56
|
+
}
|
|
57
|
+
text = json.dumps(fallback)
|
|
58
|
+
|
|
59
|
+
try:
|
|
60
|
+
out.write(text + "\n")
|
|
61
|
+
out.flush()
|
|
62
|
+
except BrokenPipeError:
|
|
63
|
+
# Pipe closed by worker — exit cleanly
|
|
64
|
+
sys.exit(1)
|
|
65
|
+
|
|
66
|
+
|
|
67
|
+
def run():
|
|
68
|
+
"""Main loop: load, ready signal, read tasks, call handler, write responses.
|
|
69
|
+
|
|
70
|
+
This function:
|
|
71
|
+
1. Calls the @load function if registered (for model loading, etc.)
|
|
72
|
+
2. Sends {"status": "ready"} to signal readiness to runqy-worker
|
|
73
|
+
3. Reads JSON task requests from stdin (one per line)
|
|
74
|
+
4. Calls the registered @task handler with the payload (and context if @load was used)
|
|
75
|
+
5. Writes JSON responses to stdout
|
|
76
|
+
"""
|
|
77
|
+
# Protect stdout: redirect sys.stdout to stderr so print() doesn't corrupt protocol
|
|
78
|
+
_protect_stdout()
|
|
79
|
+
|
|
80
|
+
# Install signal handlers for graceful shutdown
|
|
81
|
+
signal.signal(signal.SIGTERM, _shutdown_handler)
|
|
82
|
+
signal.signal(signal.SIGINT, _shutdown_handler)
|
|
83
|
+
|
|
84
|
+
handler = get_handler()
|
|
85
|
+
if handler is None:
|
|
86
|
+
raise RuntimeError("No task handler registered. Use @task decorator.")
|
|
87
|
+
|
|
88
|
+
# Run load function if registered (before ready signal)
|
|
89
|
+
loader = get_loader()
|
|
90
|
+
ctx = None
|
|
91
|
+
if loader is not None:
|
|
92
|
+
try:
|
|
93
|
+
ctx = loader()
|
|
94
|
+
except Exception as e:
|
|
95
|
+
_safe_write({"status": "error", "error": f"@load failed: {e}"})
|
|
96
|
+
sys.exit(1)
|
|
97
|
+
|
|
98
|
+
# Ready signal
|
|
99
|
+
_safe_write({"status": "ready"})
|
|
100
|
+
|
|
101
|
+
# Process tasks from stdin
|
|
102
|
+
for line in sys.stdin:
|
|
103
|
+
if _shutdown_requested:
|
|
104
|
+
break
|
|
105
|
+
|
|
106
|
+
line = line.strip()
|
|
107
|
+
if not line:
|
|
108
|
+
continue
|
|
109
|
+
|
|
110
|
+
task_id = "unknown"
|
|
111
|
+
try:
|
|
112
|
+
task_data = json.loads(line)
|
|
113
|
+
task_id = task_data.get("task_id", "unknown")
|
|
114
|
+
payload = task_data.get("payload", {})
|
|
115
|
+
|
|
116
|
+
# Call handler with or without context
|
|
117
|
+
if ctx is not None:
|
|
118
|
+
result = handler(payload, ctx)
|
|
119
|
+
else:
|
|
120
|
+
result = handler(payload)
|
|
121
|
+
|
|
122
|
+
response = {
|
|
123
|
+
"task_id": task_id,
|
|
124
|
+
"result": result,
|
|
125
|
+
"error": None,
|
|
126
|
+
"retry": False
|
|
127
|
+
}
|
|
128
|
+
except json.JSONDecodeError as e:
|
|
129
|
+
response = {
|
|
130
|
+
"task_id": task_id,
|
|
131
|
+
"result": None,
|
|
132
|
+
"error": f"Invalid JSON input: {e}",
|
|
133
|
+
"retry": False
|
|
134
|
+
}
|
|
135
|
+
except RetryableError as e:
|
|
136
|
+
response = {
|
|
137
|
+
"task_id": task_id,
|
|
138
|
+
"result": None,
|
|
139
|
+
"error": str(e),
|
|
140
|
+
"retry": True
|
|
141
|
+
}
|
|
142
|
+
except Exception as e:
|
|
143
|
+
response = {
|
|
144
|
+
"task_id": task_id,
|
|
145
|
+
"result": None,
|
|
146
|
+
"error": traceback.format_exc(),
|
|
147
|
+
"retry": False
|
|
148
|
+
}
|
|
149
|
+
|
|
150
|
+
_safe_write(response)
|
|
151
|
+
|
|
152
|
+
|
|
153
|
+
def run_once():
|
|
154
|
+
"""Process a single task from stdin and exit.
|
|
155
|
+
|
|
156
|
+
Use this for lightweight tasks that don't need to stay loaded in memory.
|
|
157
|
+
|
|
158
|
+
Flow:
|
|
159
|
+
1. Calls @load function if registered
|
|
160
|
+
2. Sends {"status": "ready"}
|
|
161
|
+
3. Reads ONE JSON task from stdin
|
|
162
|
+
4. Calls @task handler
|
|
163
|
+
5. Writes response to stdout
|
|
164
|
+
6. Exits
|
|
165
|
+
"""
|
|
166
|
+
# Protect stdout: redirect sys.stdout to stderr so print() doesn't corrupt protocol
|
|
167
|
+
_protect_stdout()
|
|
168
|
+
|
|
169
|
+
# Install signal handlers for graceful shutdown
|
|
170
|
+
signal.signal(signal.SIGTERM, _shutdown_handler)
|
|
171
|
+
signal.signal(signal.SIGINT, _shutdown_handler)
|
|
172
|
+
|
|
173
|
+
handler = get_handler()
|
|
174
|
+
if handler is None:
|
|
175
|
+
raise RuntimeError("No task handler registered. Use @task decorator.")
|
|
176
|
+
|
|
177
|
+
# Run load function if registered (before ready signal)
|
|
178
|
+
loader = get_loader()
|
|
179
|
+
ctx = None
|
|
180
|
+
if loader is not None:
|
|
181
|
+
try:
|
|
182
|
+
ctx = loader()
|
|
183
|
+
except Exception as e:
|
|
184
|
+
_safe_write({"status": "error", "error": f"@load failed: {e}"})
|
|
185
|
+
sys.exit(1)
|
|
186
|
+
|
|
187
|
+
# Ready signal
|
|
188
|
+
_safe_write({"status": "ready"})
|
|
189
|
+
|
|
190
|
+
# Read ONE task
|
|
191
|
+
line = sys.stdin.readline().strip()
|
|
192
|
+
if not line:
|
|
193
|
+
return
|
|
194
|
+
|
|
195
|
+
task_id = "unknown"
|
|
196
|
+
try:
|
|
197
|
+
task_data = json.loads(line)
|
|
198
|
+
task_id = task_data.get("task_id", "unknown")
|
|
199
|
+
payload = task_data.get("payload", {})
|
|
200
|
+
|
|
201
|
+
# Call handler with or without context
|
|
202
|
+
if ctx is not None:
|
|
203
|
+
result = handler(payload, ctx)
|
|
204
|
+
else:
|
|
205
|
+
result = handler(payload)
|
|
206
|
+
|
|
207
|
+
response = {
|
|
208
|
+
"task_id": task_id,
|
|
209
|
+
"result": result,
|
|
210
|
+
"error": None,
|
|
211
|
+
"retry": False
|
|
212
|
+
}
|
|
213
|
+
except json.JSONDecodeError as e:
|
|
214
|
+
response = {
|
|
215
|
+
"task_id": task_id,
|
|
216
|
+
"result": None,
|
|
217
|
+
"error": f"Invalid JSON input: {e}",
|
|
218
|
+
"retry": False
|
|
219
|
+
}
|
|
220
|
+
except RetryableError as e:
|
|
221
|
+
response = {
|
|
222
|
+
"task_id": task_id,
|
|
223
|
+
"result": None,
|
|
224
|
+
"error": str(e),
|
|
225
|
+
"retry": True
|
|
226
|
+
}
|
|
227
|
+
except Exception as e:
|
|
228
|
+
response = {
|
|
229
|
+
"task_id": task_id,
|
|
230
|
+
"result": None,
|
|
231
|
+
"error": traceback.format_exc(),
|
|
232
|
+
"retry": False
|
|
233
|
+
}
|
|
234
|
+
|
|
235
|
+
_safe_write(response)
|
|
@@ -8,4 +8,6 @@ src/runqy_python/runner.py
|
|
|
8
8
|
src/runqy_python.egg-info/PKG-INFO
|
|
9
9
|
src/runqy_python.egg-info/SOURCES.txt
|
|
10
10
|
src/runqy_python.egg-info/dependency_links.txt
|
|
11
|
-
src/runqy_python.egg-info/top_level.txt
|
|
11
|
+
src/runqy_python.egg-info/top_level.txt
|
|
12
|
+
tests/test_client.py
|
|
13
|
+
tests/test_runner_hardening.py
|
|
@@ -0,0 +1,220 @@
|
|
|
1
|
+
"""Tests for RunqyClient."""
|
|
2
|
+
|
|
3
|
+
import json
|
|
4
|
+
import unittest
|
|
5
|
+
from unittest import mock
|
|
6
|
+
from urllib.error import HTTPError
|
|
7
|
+
|
|
8
|
+
from runqy_python.client import (
|
|
9
|
+
AuthenticationError,
|
|
10
|
+
BatchResult,
|
|
11
|
+
RunqyClient,
|
|
12
|
+
RunqyError,
|
|
13
|
+
TaskInfo,
|
|
14
|
+
TaskNotFoundError,
|
|
15
|
+
)
|
|
16
|
+
|
|
17
|
+
|
|
18
|
+
class TestRunqyClientInit(unittest.TestCase):
|
|
19
|
+
def test_stores_url_and_key(self):
|
|
20
|
+
client = RunqyClient("http://localhost:3000", api_key="my-key")
|
|
21
|
+
self.assertEqual(client.server_url, "http://localhost:3000")
|
|
22
|
+
self.assertEqual(client.api_key, "my-key")
|
|
23
|
+
self.assertEqual(client.timeout, 30)
|
|
24
|
+
|
|
25
|
+
def test_strips_trailing_slash(self):
|
|
26
|
+
client = RunqyClient("http://localhost:3000/", api_key="key")
|
|
27
|
+
self.assertEqual(client.server_url, "http://localhost:3000")
|
|
28
|
+
|
|
29
|
+
def test_custom_timeout(self):
|
|
30
|
+
client = RunqyClient("http://localhost:3000", api_key="key", timeout=60)
|
|
31
|
+
self.assertEqual(client.timeout, 60)
|
|
32
|
+
|
|
33
|
+
|
|
34
|
+
class TestRunqyClientEnqueue(unittest.TestCase):
|
|
35
|
+
def setUp(self):
|
|
36
|
+
self.client = RunqyClient("http://localhost:3000", api_key="test-key")
|
|
37
|
+
|
|
38
|
+
@mock.patch("runqy_python.client.urllib.request.urlopen")
|
|
39
|
+
def test_enqueue_success(self, mock_urlopen):
|
|
40
|
+
response_data = json.dumps({
|
|
41
|
+
"info": {
|
|
42
|
+
"id": "task-123",
|
|
43
|
+
"queue": "inference.default",
|
|
44
|
+
"state": "pending",
|
|
45
|
+
}
|
|
46
|
+
}).encode("utf-8")
|
|
47
|
+
|
|
48
|
+
mock_response = mock.MagicMock()
|
|
49
|
+
mock_response.read.return_value = response_data
|
|
50
|
+
mock_response.__enter__ = mock.MagicMock(return_value=mock_response)
|
|
51
|
+
mock_response.__exit__ = mock.MagicMock(return_value=False)
|
|
52
|
+
mock_urlopen.return_value = mock_response
|
|
53
|
+
|
|
54
|
+
result = self.client.enqueue("inference.default", {"msg": "hello"})
|
|
55
|
+
|
|
56
|
+
self.assertIsInstance(result, TaskInfo)
|
|
57
|
+
self.assertEqual(result.task_id, "task-123")
|
|
58
|
+
self.assertEqual(result.queue, "inference.default")
|
|
59
|
+
self.assertEqual(result.state, "pending")
|
|
60
|
+
|
|
61
|
+
# Verify the request was made correctly
|
|
62
|
+
call_args = mock_urlopen.call_args
|
|
63
|
+
req = call_args[0][0]
|
|
64
|
+
self.assertTrue(req.full_url.endswith("/queue/add"))
|
|
65
|
+
self.assertEqual(req.get_header("Authorization"), "Bearer test-key")
|
|
66
|
+
self.assertEqual(req.get_header("Content-type"), "application/json")
|
|
67
|
+
|
|
68
|
+
body = json.loads(req.data.decode("utf-8"))
|
|
69
|
+
self.assertEqual(body["queue"], "inference.default")
|
|
70
|
+
self.assertEqual(body["data"], {"msg": "hello"})
|
|
71
|
+
self.assertEqual(body["timeout"], 300)
|
|
72
|
+
|
|
73
|
+
@mock.patch("runqy_python.client.urllib.request.urlopen")
|
|
74
|
+
def test_enqueue_auth_error(self, mock_urlopen):
|
|
75
|
+
mock_urlopen.side_effect = HTTPError(
|
|
76
|
+
url="http://localhost:3000/queue/add",
|
|
77
|
+
code=401,
|
|
78
|
+
msg="Unauthorized",
|
|
79
|
+
hdrs=None,
|
|
80
|
+
fp=mock.MagicMock(read=mock.MagicMock(return_value=b"invalid api key")),
|
|
81
|
+
)
|
|
82
|
+
|
|
83
|
+
with self.assertRaises(AuthenticationError):
|
|
84
|
+
self.client.enqueue("inference.default", {"msg": "hello"})
|
|
85
|
+
|
|
86
|
+
@mock.patch("runqy_python.client.urllib.request.urlopen")
|
|
87
|
+
def test_enqueue_server_error(self, mock_urlopen):
|
|
88
|
+
mock_urlopen.side_effect = HTTPError(
|
|
89
|
+
url="http://localhost:3000/queue/add",
|
|
90
|
+
code=500,
|
|
91
|
+
msg="Internal Server Error",
|
|
92
|
+
hdrs=None,
|
|
93
|
+
fp=mock.MagicMock(read=mock.MagicMock(return_value=b"internal error")),
|
|
94
|
+
)
|
|
95
|
+
|
|
96
|
+
with self.assertRaises(RunqyError):
|
|
97
|
+
self.client.enqueue("inference.default", {"msg": "hello"})
|
|
98
|
+
|
|
99
|
+
@mock.patch("runqy_python.client.urllib.request.urlopen")
|
|
100
|
+
def test_enqueue_not_found(self, mock_urlopen):
|
|
101
|
+
mock_urlopen.side_effect = HTTPError(
|
|
102
|
+
url="http://localhost:3000/queue/add",
|
|
103
|
+
code=404,
|
|
104
|
+
msg="Not Found",
|
|
105
|
+
hdrs=None,
|
|
106
|
+
fp=mock.MagicMock(read=mock.MagicMock(return_value=b"queue not found")),
|
|
107
|
+
)
|
|
108
|
+
|
|
109
|
+
with self.assertRaises(TaskNotFoundError):
|
|
110
|
+
self.client.enqueue("nonexistent", {"msg": "hello"})
|
|
111
|
+
|
|
112
|
+
|
|
113
|
+
class TestRunqyClientEnqueueBatch(unittest.TestCase):
|
|
114
|
+
def setUp(self):
|
|
115
|
+
self.client = RunqyClient("http://localhost:3000", api_key="test-key")
|
|
116
|
+
|
|
117
|
+
@mock.patch("runqy_python.client.urllib.request.urlopen")
|
|
118
|
+
def test_enqueue_batch_success(self, mock_urlopen):
|
|
119
|
+
response_data = json.dumps({
|
|
120
|
+
"enqueued": 2,
|
|
121
|
+
"failed": 0,
|
|
122
|
+
"task_ids": ["t1", "t2"],
|
|
123
|
+
"errors": [],
|
|
124
|
+
}).encode("utf-8")
|
|
125
|
+
|
|
126
|
+
mock_response = mock.MagicMock()
|
|
127
|
+
mock_response.read.return_value = response_data
|
|
128
|
+
mock_response.__enter__ = mock.MagicMock(return_value=mock_response)
|
|
129
|
+
mock_response.__exit__ = mock.MagicMock(return_value=False)
|
|
130
|
+
mock_urlopen.return_value = mock_response
|
|
131
|
+
|
|
132
|
+
result = self.client.enqueue_batch(
|
|
133
|
+
"inference.default",
|
|
134
|
+
[{"input": "a"}, {"input": "b"}],
|
|
135
|
+
)
|
|
136
|
+
|
|
137
|
+
self.assertIsInstance(result, BatchResult)
|
|
138
|
+
self.assertEqual(result.enqueued, 2)
|
|
139
|
+
self.assertEqual(result.failed, 0)
|
|
140
|
+
self.assertEqual(result.task_ids, ["t1", "t2"])
|
|
141
|
+
self.assertEqual(result.errors, [])
|
|
142
|
+
|
|
143
|
+
# Verify request body structure
|
|
144
|
+
call_args = mock_urlopen.call_args
|
|
145
|
+
req = call_args[0][0]
|
|
146
|
+
body = json.loads(req.data.decode("utf-8"))
|
|
147
|
+
self.assertEqual(body["queue"], "inference.default")
|
|
148
|
+
self.assertEqual(len(body["jobs"]), 2)
|
|
149
|
+
self.assertEqual(body["jobs"][0]["data"], {"input": "a"})
|
|
150
|
+
|
|
151
|
+
|
|
152
|
+
class TestRunqyClientGetTask(unittest.TestCase):
|
|
153
|
+
def setUp(self):
|
|
154
|
+
self.client = RunqyClient("http://localhost:3000", api_key="test-key")
|
|
155
|
+
|
|
156
|
+
@mock.patch("runqy_python.client.urllib.request.urlopen")
|
|
157
|
+
def test_get_task_completed(self, mock_urlopen):
|
|
158
|
+
response_data = json.dumps({
|
|
159
|
+
"info": {
|
|
160
|
+
"id": "task-456",
|
|
161
|
+
"queue": "inference.default",
|
|
162
|
+
"state": "completed",
|
|
163
|
+
"result": json.dumps({"output": "done"}),
|
|
164
|
+
"payload": json.dumps({"input": "test"}),
|
|
165
|
+
}
|
|
166
|
+
}).encode("utf-8")
|
|
167
|
+
|
|
168
|
+
mock_response = mock.MagicMock()
|
|
169
|
+
mock_response.read.return_value = response_data
|
|
170
|
+
mock_response.__enter__ = mock.MagicMock(return_value=mock_response)
|
|
171
|
+
mock_response.__exit__ = mock.MagicMock(return_value=False)
|
|
172
|
+
mock_urlopen.return_value = mock_response
|
|
173
|
+
|
|
174
|
+
result = self.client.get_task("task-456")
|
|
175
|
+
|
|
176
|
+
self.assertEqual(result.task_id, "task-456")
|
|
177
|
+
self.assertEqual(result.state, "completed")
|
|
178
|
+
self.assertEqual(result.result, {"output": "done"})
|
|
179
|
+
self.assertEqual(result.payload, {"input": "test"})
|
|
180
|
+
|
|
181
|
+
@mock.patch("runqy_python.client.urllib.request.urlopen")
|
|
182
|
+
def test_get_task_not_found(self, mock_urlopen):
|
|
183
|
+
mock_urlopen.side_effect = HTTPError(
|
|
184
|
+
url="http://localhost:3000/queue/task-999",
|
|
185
|
+
code=404,
|
|
186
|
+
msg="Not Found",
|
|
187
|
+
hdrs=None,
|
|
188
|
+
fp=mock.MagicMock(read=mock.MagicMock(return_value=b"not found")),
|
|
189
|
+
)
|
|
190
|
+
|
|
191
|
+
with self.assertRaises(TaskNotFoundError):
|
|
192
|
+
self.client.get_task("task-999")
|
|
193
|
+
|
|
194
|
+
|
|
195
|
+
class TestModuleLevelFunctions(unittest.TestCase):
|
|
196
|
+
@mock.patch("runqy_python.client.urllib.request.urlopen")
|
|
197
|
+
def test_module_enqueue(self, mock_urlopen):
|
|
198
|
+
from runqy_python.client import enqueue
|
|
199
|
+
|
|
200
|
+
response_data = json.dumps({
|
|
201
|
+
"info": {"id": "t1", "queue": "q", "state": "pending"}
|
|
202
|
+
}).encode("utf-8")
|
|
203
|
+
|
|
204
|
+
mock_response = mock.MagicMock()
|
|
205
|
+
mock_response.read.return_value = response_data
|
|
206
|
+
mock_response.__enter__ = mock.MagicMock(return_value=mock_response)
|
|
207
|
+
mock_response.__exit__ = mock.MagicMock(return_value=False)
|
|
208
|
+
mock_urlopen.return_value = mock_response
|
|
209
|
+
|
|
210
|
+
result = enqueue(
|
|
211
|
+
"q", {"key": "val"},
|
|
212
|
+
server_url="http://localhost:3000",
|
|
213
|
+
api_key="key",
|
|
214
|
+
)
|
|
215
|
+
self.assertIsInstance(result, TaskInfo)
|
|
216
|
+
self.assertEqual(result.task_id, "t1")
|
|
217
|
+
|
|
218
|
+
|
|
219
|
+
if __name__ == "__main__":
|
|
220
|
+
unittest.main()
|
|
@@ -0,0 +1,541 @@
|
|
|
1
|
+
"""Tests for Phase 1 runner.py hardening fixes.
|
|
2
|
+
|
|
3
|
+
Covers:
|
|
4
|
+
- _safe_write: normal output, non-serializable fallback, BrokenPipeError
|
|
5
|
+
- run(): @load failure sends {"status":"error"}, invalid JSON input handling
|
|
6
|
+
- run_once(): @load failure sends {"status":"error"}
|
|
7
|
+
|
|
8
|
+
Uses unittest + unittest.mock with io.StringIO for stdin/stdout redirection.
|
|
9
|
+
"""
|
|
10
|
+
|
|
11
|
+
import io
|
|
12
|
+
import json
|
|
13
|
+
import sys
|
|
14
|
+
import unittest
|
|
15
|
+
from unittest import mock
|
|
16
|
+
|
|
17
|
+
# We need to be able to reset the decorator global state between tests,
|
|
18
|
+
# so import the decorator module directly.
|
|
19
|
+
import runqy_python.decorator as decorator
|
|
20
|
+
import runqy_python.runner as runner
|
|
21
|
+
|
|
22
|
+
|
|
23
|
+
class SafeWriteTestCase(unittest.TestCase):
|
|
24
|
+
"""Tests for runner._safe_write."""
|
|
25
|
+
|
|
26
|
+
def setUp(self):
|
|
27
|
+
# Ensure _safe_write uses sys.stdout (no protocol redirect active)
|
|
28
|
+
self._saved_protocol_stdout = runner._protocol_stdout
|
|
29
|
+
runner._protocol_stdout = None
|
|
30
|
+
|
|
31
|
+
def tearDown(self):
|
|
32
|
+
runner._protocol_stdout = self._saved_protocol_stdout
|
|
33
|
+
|
|
34
|
+
def test_normal_dict_outputs_json(self):
|
|
35
|
+
"""_safe_write with a normal dict should write valid JSON + newline to stdout."""
|
|
36
|
+
fake_stdout = io.StringIO()
|
|
37
|
+
with mock.patch.object(sys, "stdout", fake_stdout):
|
|
38
|
+
runner._safe_write({"task_id": "t1", "result": {"ok": True}, "error": None, "retry": False})
|
|
39
|
+
|
|
40
|
+
output = fake_stdout.getvalue()
|
|
41
|
+
# Should end with a newline
|
|
42
|
+
self.assertTrue(output.endswith("\n"), "Output should end with a newline")
|
|
43
|
+
|
|
44
|
+
# Should be valid JSON
|
|
45
|
+
parsed = json.loads(output.strip())
|
|
46
|
+
self.assertEqual(parsed["task_id"], "t1")
|
|
47
|
+
self.assertEqual(parsed["result"], {"ok": True})
|
|
48
|
+
self.assertIsNone(parsed["error"])
|
|
49
|
+
self.assertFalse(parsed["retry"])
|
|
50
|
+
|
|
51
|
+
def test_non_serializable_data_outputs_error_fallback(self):
|
|
52
|
+
"""_safe_write with non-serializable data (e.g., a set) should output
|
|
53
|
+
a fallback error response instead of crashing."""
|
|
54
|
+
fake_stdout = io.StringIO()
|
|
55
|
+
non_serializable = {
|
|
56
|
+
"task_id": "t-bad",
|
|
57
|
+
"result": {"items": {1, 2, 3}}, # sets are not JSON-serializable
|
|
58
|
+
"error": None,
|
|
59
|
+
"retry": False,
|
|
60
|
+
}
|
|
61
|
+
with mock.patch.object(sys, "stdout", fake_stdout):
|
|
62
|
+
# Should NOT raise
|
|
63
|
+
runner._safe_write(non_serializable)
|
|
64
|
+
|
|
65
|
+
output = fake_stdout.getvalue()
|
|
66
|
+
parsed = json.loads(output.strip())
|
|
67
|
+
|
|
68
|
+
# Fallback should contain the task_id from the original data
|
|
69
|
+
self.assertEqual(parsed["task_id"], "t-bad")
|
|
70
|
+
# result should be None in the fallback
|
|
71
|
+
self.assertIsNone(parsed["result"])
|
|
72
|
+
# error should mention "not JSON-serializable"
|
|
73
|
+
self.assertIn("not JSON-serializable", parsed["error"])
|
|
74
|
+
self.assertFalse(parsed["retry"])
|
|
75
|
+
|
|
76
|
+
def test_non_serializable_without_task_id_uses_unknown(self):
|
|
77
|
+
"""_safe_write with non-serializable data and no task_id should default to 'unknown'."""
|
|
78
|
+
fake_stdout = io.StringIO()
|
|
79
|
+
# No task_id key at all
|
|
80
|
+
non_serializable = {"result": object()}
|
|
81
|
+
with mock.patch.object(sys, "stdout", fake_stdout):
|
|
82
|
+
runner._safe_write(non_serializable)
|
|
83
|
+
|
|
84
|
+
output = fake_stdout.getvalue()
|
|
85
|
+
parsed = json.loads(output.strip())
|
|
86
|
+
self.assertEqual(parsed["task_id"], "unknown")
|
|
87
|
+
self.assertIn("not JSON-serializable", parsed["error"])
|
|
88
|
+
|
|
89
|
+
def test_non_dict_non_serializable_uses_unknown(self):
|
|
90
|
+
"""_safe_write with a non-dict, non-serializable value should use 'unknown' task_id."""
|
|
91
|
+
fake_stdout = io.StringIO()
|
|
92
|
+
# Pass a non-dict that is also not serializable
|
|
93
|
+
with mock.patch.object(sys, "stdout", fake_stdout):
|
|
94
|
+
runner._safe_write(object())
|
|
95
|
+
|
|
96
|
+
output = fake_stdout.getvalue()
|
|
97
|
+
parsed = json.loads(output.strip())
|
|
98
|
+
self.assertEqual(parsed["task_id"], "unknown")
|
|
99
|
+
self.assertIn("not JSON-serializable", parsed["error"])
|
|
100
|
+
|
|
101
|
+
def test_broken_pipe_exits_cleanly(self):
|
|
102
|
+
"""_safe_write should call sys.exit(1) on BrokenPipeError."""
|
|
103
|
+
broken_stdout = mock.MagicMock()
|
|
104
|
+
broken_stdout.write.side_effect = BrokenPipeError("pipe closed")
|
|
105
|
+
with mock.patch.object(sys, "stdout", broken_stdout):
|
|
106
|
+
with self.assertRaises(SystemExit) as ctx:
|
|
107
|
+
runner._safe_write({"status": "ready"})
|
|
108
|
+
self.assertEqual(ctx.exception.code, 1)
|
|
109
|
+
|
|
110
|
+
|
|
111
|
+
class RunLoadFailureTestCase(unittest.TestCase):
|
|
112
|
+
"""Tests for run() handling a failing @load function."""
|
|
113
|
+
|
|
114
|
+
def setUp(self):
|
|
115
|
+
# Reset global decorator state before each test
|
|
116
|
+
decorator._reset()
|
|
117
|
+
# Reset shutdown flag and protocol stdout
|
|
118
|
+
runner._shutdown_requested = False
|
|
119
|
+
runner._protocol_stdout = None
|
|
120
|
+
|
|
121
|
+
def tearDown(self):
|
|
122
|
+
decorator._reset()
|
|
123
|
+
runner._shutdown_requested = False
|
|
124
|
+
runner._protocol_stdout = None
|
|
125
|
+
|
|
126
|
+
def test_run_load_failure_sends_error_status(self):
|
|
127
|
+
"""run() should send {"status":"error"} and exit(1) when @load raises."""
|
|
128
|
+
|
|
129
|
+
@decorator.task
|
|
130
|
+
def my_handler(payload):
|
|
131
|
+
return {"done": True}
|
|
132
|
+
|
|
133
|
+
@decorator.load
|
|
134
|
+
def my_loader():
|
|
135
|
+
raise RuntimeError("model download failed")
|
|
136
|
+
|
|
137
|
+
fake_stdout = io.StringIO()
|
|
138
|
+
|
|
139
|
+
with mock.patch.object(sys, "stdout", fake_stdout), \
|
|
140
|
+
mock.patch("runqy_python.runner._protect_stdout"), \
|
|
141
|
+
mock.patch("runqy_python.runner.signal.signal"):
|
|
142
|
+
with self.assertRaises(SystemExit) as ctx:
|
|
143
|
+
runner.run()
|
|
144
|
+
|
|
145
|
+
self.assertEqual(ctx.exception.code, 1)
|
|
146
|
+
|
|
147
|
+
# Parse the output line
|
|
148
|
+
output = fake_stdout.getvalue().strip()
|
|
149
|
+
parsed = json.loads(output)
|
|
150
|
+
|
|
151
|
+
self.assertEqual(parsed["status"], "error")
|
|
152
|
+
self.assertIn("@load failed", parsed["error"])
|
|
153
|
+
self.assertIn("model download failed", parsed["error"])
|
|
154
|
+
|
|
155
|
+
def test_run_without_loader_sends_ready(self):
|
|
156
|
+
"""run() with no @load should send {"status":"ready"} and proceed normally."""
|
|
157
|
+
|
|
158
|
+
@decorator.task
|
|
159
|
+
def my_handler(payload):
|
|
160
|
+
return {"echo": payload}
|
|
161
|
+
|
|
162
|
+
task_input = json.dumps({"task_id": "t1", "payload": {"msg": "hello"}}) + "\n"
|
|
163
|
+
fake_stdin = io.StringIO(task_input)
|
|
164
|
+
fake_stdout = io.StringIO()
|
|
165
|
+
|
|
166
|
+
with mock.patch.object(sys, "stdin", fake_stdin), \
|
|
167
|
+
mock.patch.object(sys, "stdout", fake_stdout), \
|
|
168
|
+
mock.patch("runqy_python.runner._protect_stdout"), \
|
|
169
|
+
mock.patch("runqy_python.runner.signal.signal"):
|
|
170
|
+
runner.run()
|
|
171
|
+
|
|
172
|
+
lines = fake_stdout.getvalue().strip().split("\n")
|
|
173
|
+
# First line: ready signal
|
|
174
|
+
ready = json.loads(lines[0])
|
|
175
|
+
self.assertEqual(ready["status"], "ready")
|
|
176
|
+
|
|
177
|
+
# Second line: task response
|
|
178
|
+
resp = json.loads(lines[1])
|
|
179
|
+
self.assertEqual(resp["task_id"], "t1")
|
|
180
|
+
self.assertEqual(resp["result"], {"echo": {"msg": "hello"}})
|
|
181
|
+
self.assertIsNone(resp["error"])
|
|
182
|
+
|
|
183
|
+
def test_run_no_handler_raises(self):
|
|
184
|
+
"""run() should raise RuntimeError if no @task handler is registered."""
|
|
185
|
+
with mock.patch("runqy_python.runner._protect_stdout"), \
|
|
186
|
+
mock.patch("runqy_python.runner.signal.signal"):
|
|
187
|
+
with self.assertRaises(RuntimeError) as ctx:
|
|
188
|
+
runner.run()
|
|
189
|
+
self.assertIn("No task handler registered", str(ctx.exception))
|
|
190
|
+
|
|
191
|
+
|
|
192
|
+
class RunInvalidJsonTestCase(unittest.TestCase):
|
|
193
|
+
"""Tests for run() handling invalid JSON input."""
|
|
194
|
+
|
|
195
|
+
def setUp(self):
|
|
196
|
+
decorator._reset()
|
|
197
|
+
runner._shutdown_requested = False
|
|
198
|
+
runner._protocol_stdout = None
|
|
199
|
+
|
|
200
|
+
def tearDown(self):
|
|
201
|
+
decorator._reset()
|
|
202
|
+
runner._shutdown_requested = False
|
|
203
|
+
runner._protocol_stdout = None
|
|
204
|
+
|
|
205
|
+
def test_invalid_json_sends_error_response(self):
|
|
206
|
+
"""run() should send an error response with 'Invalid JSON input' for malformed input."""
|
|
207
|
+
|
|
208
|
+
@decorator.task
|
|
209
|
+
def my_handler(payload):
|
|
210
|
+
return {"done": True}
|
|
211
|
+
|
|
212
|
+
# First line is invalid JSON, no more lines after
|
|
213
|
+
fake_stdin = io.StringIO("this is not json\n")
|
|
214
|
+
fake_stdout = io.StringIO()
|
|
215
|
+
|
|
216
|
+
with mock.patch.object(sys, "stdin", fake_stdin), \
|
|
217
|
+
mock.patch.object(sys, "stdout", fake_stdout), \
|
|
218
|
+
mock.patch("runqy_python.runner._protect_stdout"), \
|
|
219
|
+
mock.patch("runqy_python.runner.signal.signal"):
|
|
220
|
+
runner.run()
|
|
221
|
+
|
|
222
|
+
lines = fake_stdout.getvalue().strip().split("\n")
|
|
223
|
+
# First line: ready signal
|
|
224
|
+
ready = json.loads(lines[0])
|
|
225
|
+
self.assertEqual(ready["status"], "ready")
|
|
226
|
+
|
|
227
|
+
# Second line: error response for invalid JSON
|
|
228
|
+
resp = json.loads(lines[1])
|
|
229
|
+
self.assertEqual(resp["task_id"], "unknown")
|
|
230
|
+
self.assertIsNone(resp["result"])
|
|
231
|
+
self.assertIn("Invalid JSON input", resp["error"])
|
|
232
|
+
self.assertFalse(resp["retry"])
|
|
233
|
+
|
|
234
|
+
def test_invalid_json_does_not_crash_and_continues(self):
|
|
235
|
+
"""run() should handle invalid JSON and then continue processing valid tasks."""
|
|
236
|
+
|
|
237
|
+
@decorator.task
|
|
238
|
+
def my_handler(payload):
|
|
239
|
+
return {"value": payload.get("x", 0) * 2}
|
|
240
|
+
|
|
241
|
+
# First line is invalid, second is valid
|
|
242
|
+
lines_in = "NOT_JSON\n" + json.dumps({"task_id": "t2", "payload": {"x": 5}}) + "\n"
|
|
243
|
+
fake_stdin = io.StringIO(lines_in)
|
|
244
|
+
fake_stdout = io.StringIO()
|
|
245
|
+
|
|
246
|
+
with mock.patch.object(sys, "stdin", fake_stdin), \
|
|
247
|
+
mock.patch.object(sys, "stdout", fake_stdout), \
|
|
248
|
+
mock.patch("runqy_python.runner._protect_stdout"), \
|
|
249
|
+
mock.patch("runqy_python.runner.signal.signal"):
|
|
250
|
+
runner.run()
|
|
251
|
+
|
|
252
|
+
lines = fake_stdout.getvalue().strip().split("\n")
|
|
253
|
+
self.assertEqual(len(lines), 3) # ready + error + success
|
|
254
|
+
|
|
255
|
+
# Line 0: ready
|
|
256
|
+
self.assertEqual(json.loads(lines[0])["status"], "ready")
|
|
257
|
+
# Line 1: error for invalid JSON
|
|
258
|
+
self.assertIn("Invalid JSON input", json.loads(lines[1])["error"])
|
|
259
|
+
# Line 2: successful task response
|
|
260
|
+
resp = json.loads(lines[2])
|
|
261
|
+
self.assertEqual(resp["task_id"], "t2")
|
|
262
|
+
self.assertEqual(resp["result"], {"value": 10})
|
|
263
|
+
self.assertIsNone(resp["error"])
|
|
264
|
+
|
|
265
|
+
def test_empty_lines_are_skipped(self):
|
|
266
|
+
"""run() should skip empty lines without producing output."""
|
|
267
|
+
|
|
268
|
+
@decorator.task
|
|
269
|
+
def my_handler(payload):
|
|
270
|
+
return {"ok": True}
|
|
271
|
+
|
|
272
|
+
# Only empty/whitespace lines, then a valid task
|
|
273
|
+
lines_in = "\n \n" + json.dumps({"task_id": "t3", "payload": {}}) + "\n"
|
|
274
|
+
fake_stdin = io.StringIO(lines_in)
|
|
275
|
+
fake_stdout = io.StringIO()
|
|
276
|
+
|
|
277
|
+
with mock.patch.object(sys, "stdin", fake_stdin), \
|
|
278
|
+
mock.patch.object(sys, "stdout", fake_stdout), \
|
|
279
|
+
mock.patch("runqy_python.runner._protect_stdout"), \
|
|
280
|
+
mock.patch("runqy_python.runner.signal.signal"):
|
|
281
|
+
runner.run()
|
|
282
|
+
|
|
283
|
+
lines = fake_stdout.getvalue().strip().split("\n")
|
|
284
|
+
# Should only have ready + one task response (empty lines skipped)
|
|
285
|
+
self.assertEqual(len(lines), 2)
|
|
286
|
+
self.assertEqual(json.loads(lines[0])["status"], "ready")
|
|
287
|
+
self.assertEqual(json.loads(lines[1])["task_id"], "t3")
|
|
288
|
+
|
|
289
|
+
|
|
290
|
+
class RunOnceLoadFailureTestCase(unittest.TestCase):
|
|
291
|
+
"""Tests for run_once() handling a failing @load function."""
|
|
292
|
+
|
|
293
|
+
def setUp(self):
|
|
294
|
+
decorator._reset()
|
|
295
|
+
runner._shutdown_requested = False
|
|
296
|
+
runner._protocol_stdout = None
|
|
297
|
+
|
|
298
|
+
def tearDown(self):
|
|
299
|
+
decorator._reset()
|
|
300
|
+
runner._shutdown_requested = False
|
|
301
|
+
runner._protocol_stdout = None
|
|
302
|
+
|
|
303
|
+
def test_run_once_load_failure_sends_error_status(self):
|
|
304
|
+
"""run_once() should send {"status":"error"} and exit(1) when @load raises."""
|
|
305
|
+
|
|
306
|
+
@decorator.task
|
|
307
|
+
def my_handler(payload):
|
|
308
|
+
return {"done": True}
|
|
309
|
+
|
|
310
|
+
@decorator.load
|
|
311
|
+
def my_loader():
|
|
312
|
+
raise ValueError("bad config")
|
|
313
|
+
|
|
314
|
+
fake_stdout = io.StringIO()
|
|
315
|
+
|
|
316
|
+
with mock.patch.object(sys, "stdout", fake_stdout), \
|
|
317
|
+
mock.patch("runqy_python.runner._protect_stdout"), \
|
|
318
|
+
mock.patch("runqy_python.runner.signal.signal"):
|
|
319
|
+
with self.assertRaises(SystemExit) as ctx:
|
|
320
|
+
runner.run_once()
|
|
321
|
+
|
|
322
|
+
self.assertEqual(ctx.exception.code, 1)
|
|
323
|
+
|
|
324
|
+
output = fake_stdout.getvalue().strip()
|
|
325
|
+
parsed = json.loads(output)
|
|
326
|
+
|
|
327
|
+
self.assertEqual(parsed["status"], "error")
|
|
328
|
+
self.assertIn("@load failed", parsed["error"])
|
|
329
|
+
self.assertIn("bad config", parsed["error"])
|
|
330
|
+
|
|
331
|
+
def test_run_once_processes_single_task(self):
|
|
332
|
+
"""run_once() should process exactly one task and return."""
|
|
333
|
+
|
|
334
|
+
@decorator.task
|
|
335
|
+
def my_handler(payload):
|
|
336
|
+
return {"doubled": payload.get("n", 0) * 2}
|
|
337
|
+
|
|
338
|
+
task_input = json.dumps({"task_id": "once-1", "payload": {"n": 7}}) + "\n"
|
|
339
|
+
fake_stdin = io.StringIO(task_input)
|
|
340
|
+
fake_stdout = io.StringIO()
|
|
341
|
+
|
|
342
|
+
with mock.patch.object(sys, "stdin", fake_stdin), \
|
|
343
|
+
mock.patch.object(sys, "stdout", fake_stdout), \
|
|
344
|
+
mock.patch("runqy_python.runner._protect_stdout"), \
|
|
345
|
+
mock.patch("runqy_python.runner.signal.signal"):
|
|
346
|
+
runner.run_once()
|
|
347
|
+
|
|
348
|
+
lines = fake_stdout.getvalue().strip().split("\n")
|
|
349
|
+
self.assertEqual(len(lines), 2) # ready + response
|
|
350
|
+
|
|
351
|
+
ready = json.loads(lines[0])
|
|
352
|
+
self.assertEqual(ready["status"], "ready")
|
|
353
|
+
|
|
354
|
+
resp = json.loads(lines[1])
|
|
355
|
+
self.assertEqual(resp["task_id"], "once-1")
|
|
356
|
+
self.assertEqual(resp["result"], {"doubled": 14})
|
|
357
|
+
self.assertIsNone(resp["error"])
|
|
358
|
+
|
|
359
|
+
def test_run_once_invalid_json_sends_error(self):
|
|
360
|
+
"""run_once() should handle invalid JSON input gracefully."""
|
|
361
|
+
|
|
362
|
+
@decorator.task
|
|
363
|
+
def my_handler(payload):
|
|
364
|
+
return {"ok": True}
|
|
365
|
+
|
|
366
|
+
fake_stdin = io.StringIO("{broken json\n")
|
|
367
|
+
fake_stdout = io.StringIO()
|
|
368
|
+
|
|
369
|
+
with mock.patch.object(sys, "stdin", fake_stdin), \
|
|
370
|
+
mock.patch.object(sys, "stdout", fake_stdout), \
|
|
371
|
+
mock.patch("runqy_python.runner._protect_stdout"), \
|
|
372
|
+
mock.patch("runqy_python.runner.signal.signal"):
|
|
373
|
+
runner.run_once()
|
|
374
|
+
|
|
375
|
+
lines = fake_stdout.getvalue().strip().split("\n")
|
|
376
|
+
self.assertEqual(len(lines), 2) # ready + error
|
|
377
|
+
|
|
378
|
+
ready = json.loads(lines[0])
|
|
379
|
+
self.assertEqual(ready["status"], "ready")
|
|
380
|
+
|
|
381
|
+
resp = json.loads(lines[1])
|
|
382
|
+
self.assertEqual(resp["task_id"], "unknown")
|
|
383
|
+
self.assertIn("Invalid JSON input", resp["error"])
|
|
384
|
+
self.assertFalse(resp["retry"])
|
|
385
|
+
|
|
386
|
+
def test_run_once_no_handler_raises(self):
|
|
387
|
+
"""run_once() should raise RuntimeError if no @task handler is registered."""
|
|
388
|
+
with mock.patch("runqy_python.runner._protect_stdout"), \
|
|
389
|
+
mock.patch("runqy_python.runner.signal.signal"):
|
|
390
|
+
with self.assertRaises(RuntimeError) as ctx:
|
|
391
|
+
runner.run_once()
|
|
392
|
+
self.assertIn("No task handler registered", str(ctx.exception))
|
|
393
|
+
|
|
394
|
+
def test_run_once_empty_input_returns_without_error(self):
|
|
395
|
+
"""run_once() should return cleanly when stdin is empty (no task to process)."""
|
|
396
|
+
|
|
397
|
+
@decorator.task
|
|
398
|
+
def my_handler(payload):
|
|
399
|
+
return {"ok": True}
|
|
400
|
+
|
|
401
|
+
fake_stdin = io.StringIO("")
|
|
402
|
+
fake_stdout = io.StringIO()
|
|
403
|
+
|
|
404
|
+
with mock.patch.object(sys, "stdin", fake_stdin), \
|
|
405
|
+
mock.patch.object(sys, "stdout", fake_stdout), \
|
|
406
|
+
mock.patch("runqy_python.runner._protect_stdout"), \
|
|
407
|
+
mock.patch("runqy_python.runner.signal.signal"):
|
|
408
|
+
# Should not raise
|
|
409
|
+
runner.run_once()
|
|
410
|
+
|
|
411
|
+
lines = fake_stdout.getvalue().strip().split("\n")
|
|
412
|
+
# Only the ready signal, no task response
|
|
413
|
+
self.assertEqual(len(lines), 1)
|
|
414
|
+
self.assertEqual(json.loads(lines[0])["status"], "ready")
|
|
415
|
+
|
|
416
|
+
|
|
417
|
+
class ShutdownHandlerTestCase(unittest.TestCase):
|
|
418
|
+
"""Tests for _shutdown_handler."""
|
|
419
|
+
|
|
420
|
+
def setUp(self):
|
|
421
|
+
runner._shutdown_requested = False
|
|
422
|
+
|
|
423
|
+
def tearDown(self):
|
|
424
|
+
runner._shutdown_requested = False
|
|
425
|
+
|
|
426
|
+
def test_shutdown_handler_sets_flag_on_first_signal(self):
|
|
427
|
+
"""First signal should set _shutdown_requested without exiting."""
|
|
428
|
+
runner._shutdown_handler(15, None) # SIGTERM = 15
|
|
429
|
+
self.assertTrue(runner._shutdown_requested)
|
|
430
|
+
|
|
431
|
+
def test_shutdown_handler_exits_on_second_signal(self):
|
|
432
|
+
"""Second signal should force exit when already shutting down."""
|
|
433
|
+
runner._shutdown_requested = True # simulate first signal already received
|
|
434
|
+
with self.assertRaises(SystemExit) as ctx:
|
|
435
|
+
runner._shutdown_handler(15, None) # second SIGTERM
|
|
436
|
+
self.assertEqual(ctx.exception.code, 1)
|
|
437
|
+
|
|
438
|
+
|
|
439
|
+
class StdoutProtectionTestCase(unittest.TestCase):
|
|
440
|
+
"""Tests for stdout protection (print() shouldn't corrupt protocol)."""
|
|
441
|
+
|
|
442
|
+
def setUp(self):
|
|
443
|
+
decorator._reset()
|
|
444
|
+
runner._shutdown_requested = False
|
|
445
|
+
runner._protocol_stdout = None
|
|
446
|
+
|
|
447
|
+
def tearDown(self):
|
|
448
|
+
decorator._reset()
|
|
449
|
+
runner._shutdown_requested = False
|
|
450
|
+
runner._protocol_stdout = None
|
|
451
|
+
|
|
452
|
+
def test_print_in_handler_does_not_appear_in_protocol(self):
|
|
453
|
+
"""print() inside a @task handler should not corrupt JSON protocol output."""
|
|
454
|
+
|
|
455
|
+
@decorator.task
|
|
456
|
+
def my_handler(payload):
|
|
457
|
+
print("debug: processing task") # This should go to stderr, not protocol
|
|
458
|
+
return {"ok": True}
|
|
459
|
+
|
|
460
|
+
task_input = json.dumps({"task_id": "t-print", "payload": {}}) + "\n"
|
|
461
|
+
fake_stdin = io.StringIO(task_input)
|
|
462
|
+
fake_protocol = io.StringIO()
|
|
463
|
+
fake_stderr = io.StringIO()
|
|
464
|
+
|
|
465
|
+
# Simulate what _protect_stdout does: set _protocol_stdout and redirect stdout to stderr
|
|
466
|
+
runner._protocol_stdout = fake_protocol
|
|
467
|
+
|
|
468
|
+
with mock.patch.object(sys, "stdin", fake_stdin), \
|
|
469
|
+
mock.patch.object(sys, "stdout", fake_stderr), \
|
|
470
|
+
mock.patch("runqy_python.runner._protect_stdout"), \
|
|
471
|
+
mock.patch("runqy_python.runner.signal.signal"):
|
|
472
|
+
runner.run()
|
|
473
|
+
|
|
474
|
+
# Protocol output should be clean JSON only
|
|
475
|
+
protocol_lines = fake_protocol.getvalue().strip().split("\n")
|
|
476
|
+
self.assertEqual(len(protocol_lines), 2) # ready + response
|
|
477
|
+
ready = json.loads(protocol_lines[0])
|
|
478
|
+
self.assertEqual(ready["status"], "ready")
|
|
479
|
+
resp = json.loads(protocol_lines[1])
|
|
480
|
+
self.assertEqual(resp["task_id"], "t-print")
|
|
481
|
+
self.assertEqual(resp["result"], {"ok": True})
|
|
482
|
+
|
|
483
|
+
# print() output should have gone to "stderr" (which is sys.stdout in this test)
|
|
484
|
+
self.assertIn("debug: processing task", fake_stderr.getvalue())
|
|
485
|
+
|
|
486
|
+
|
|
487
|
+
class DecoratorOverwriteTestCase(unittest.TestCase):
|
|
488
|
+
"""Tests that @task and @load raise on double registration."""
|
|
489
|
+
|
|
490
|
+
def setUp(self):
|
|
491
|
+
decorator._reset()
|
|
492
|
+
|
|
493
|
+
def tearDown(self):
|
|
494
|
+
decorator._reset()
|
|
495
|
+
|
|
496
|
+
def test_task_double_registration_raises(self):
|
|
497
|
+
"""Registering @task twice should raise RuntimeError."""
|
|
498
|
+
@decorator.task
|
|
499
|
+
def handler_one(payload):
|
|
500
|
+
return {}
|
|
501
|
+
|
|
502
|
+
with self.assertRaises(RuntimeError) as ctx:
|
|
503
|
+
@decorator.task
|
|
504
|
+
def handler_two(payload):
|
|
505
|
+
return {}
|
|
506
|
+
|
|
507
|
+
self.assertIn("already registered", str(ctx.exception))
|
|
508
|
+
self.assertIn("handler_one", str(ctx.exception))
|
|
509
|
+
|
|
510
|
+
def test_load_double_registration_raises(self):
|
|
511
|
+
"""Registering @load twice should raise RuntimeError."""
|
|
512
|
+
@decorator.load
|
|
513
|
+
def loader_one():
|
|
514
|
+
return {}
|
|
515
|
+
|
|
516
|
+
with self.assertRaises(RuntimeError) as ctx:
|
|
517
|
+
@decorator.load
|
|
518
|
+
def loader_two():
|
|
519
|
+
return {}
|
|
520
|
+
|
|
521
|
+
self.assertIn("already registered", str(ctx.exception))
|
|
522
|
+
self.assertIn("loader_one", str(ctx.exception))
|
|
523
|
+
|
|
524
|
+
def test_reset_allows_re_registration(self):
|
|
525
|
+
"""After _reset(), decorators can be applied again."""
|
|
526
|
+
@decorator.task
|
|
527
|
+
def handler_one(payload):
|
|
528
|
+
return {}
|
|
529
|
+
|
|
530
|
+
decorator._reset()
|
|
531
|
+
|
|
532
|
+
# Should not raise
|
|
533
|
+
@decorator.task
|
|
534
|
+
def handler_two(payload):
|
|
535
|
+
return {"new": True}
|
|
536
|
+
|
|
537
|
+
self.assertEqual(decorator.get_handler(), handler_two)
|
|
538
|
+
|
|
539
|
+
|
|
540
|
+
if __name__ == "__main__":
|
|
541
|
+
unittest.main()
|
|
@@ -1,127 +0,0 @@
|
|
|
1
|
-
"""Runner loop for processing tasks from runqy-worker."""
|
|
2
|
-
|
|
3
|
-
import sys
|
|
4
|
-
import json
|
|
5
|
-
from .decorator import get_handler, get_loader
|
|
6
|
-
|
|
7
|
-
|
|
8
|
-
def run():
|
|
9
|
-
"""Main loop: load, ready signal, read tasks, call handler, write responses.
|
|
10
|
-
|
|
11
|
-
This function:
|
|
12
|
-
1. Calls the @load function if registered (for model loading, etc.)
|
|
13
|
-
2. Sends {"status": "ready"} to signal readiness to runqy-worker
|
|
14
|
-
3. Reads JSON task requests from stdin (one per line)
|
|
15
|
-
4. Calls the registered @task handler with the payload (and context if @load was used)
|
|
16
|
-
5. Writes JSON responses to stdout
|
|
17
|
-
"""
|
|
18
|
-
handler = get_handler()
|
|
19
|
-
if handler is None:
|
|
20
|
-
raise RuntimeError("No task handler registered. Use @task decorator.")
|
|
21
|
-
|
|
22
|
-
# Run load function if registered (before ready signal)
|
|
23
|
-
loader = get_loader()
|
|
24
|
-
ctx = None
|
|
25
|
-
if loader is not None:
|
|
26
|
-
ctx = loader()
|
|
27
|
-
|
|
28
|
-
# Ready signal
|
|
29
|
-
print(json.dumps({"status": "ready"}))
|
|
30
|
-
sys.stdout.flush()
|
|
31
|
-
|
|
32
|
-
# Process tasks from stdin
|
|
33
|
-
for line in sys.stdin:
|
|
34
|
-
line = line.strip()
|
|
35
|
-
if not line:
|
|
36
|
-
continue
|
|
37
|
-
|
|
38
|
-
task_id = "unknown"
|
|
39
|
-
try:
|
|
40
|
-
task_data = json.loads(line)
|
|
41
|
-
task_id = task_data.get("task_id", "unknown")
|
|
42
|
-
payload = task_data.get("payload", {})
|
|
43
|
-
|
|
44
|
-
# Call handler with or without context
|
|
45
|
-
if ctx is not None:
|
|
46
|
-
result = handler(payload, ctx)
|
|
47
|
-
else:
|
|
48
|
-
result = handler(payload)
|
|
49
|
-
|
|
50
|
-
response = {
|
|
51
|
-
"task_id": task_id,
|
|
52
|
-
"result": result,
|
|
53
|
-
"error": None,
|
|
54
|
-
"retry": False
|
|
55
|
-
}
|
|
56
|
-
except Exception as e:
|
|
57
|
-
response = {
|
|
58
|
-
"task_id": task_id,
|
|
59
|
-
"result": None,
|
|
60
|
-
"error": str(e),
|
|
61
|
-
"retry": False
|
|
62
|
-
}
|
|
63
|
-
|
|
64
|
-
print(json.dumps(response))
|
|
65
|
-
sys.stdout.flush()
|
|
66
|
-
|
|
67
|
-
|
|
68
|
-
def run_once():
|
|
69
|
-
"""Process a single task from stdin and exit.
|
|
70
|
-
|
|
71
|
-
Use this for lightweight tasks that don't need to stay loaded in memory.
|
|
72
|
-
|
|
73
|
-
Flow:
|
|
74
|
-
1. Calls @load function if registered
|
|
75
|
-
2. Sends {"status": "ready"}
|
|
76
|
-
3. Reads ONE JSON task from stdin
|
|
77
|
-
4. Calls @task handler
|
|
78
|
-
5. Writes response to stdout
|
|
79
|
-
6. Exits
|
|
80
|
-
"""
|
|
81
|
-
handler = get_handler()
|
|
82
|
-
if handler is None:
|
|
83
|
-
raise RuntimeError("No task handler registered. Use @task decorator.")
|
|
84
|
-
|
|
85
|
-
# Run load function if registered (before ready signal)
|
|
86
|
-
loader = get_loader()
|
|
87
|
-
ctx = None
|
|
88
|
-
if loader is not None:
|
|
89
|
-
ctx = loader()
|
|
90
|
-
|
|
91
|
-
# Ready signal
|
|
92
|
-
print(json.dumps({"status": "ready"}))
|
|
93
|
-
sys.stdout.flush()
|
|
94
|
-
|
|
95
|
-
# Read ONE task
|
|
96
|
-
line = sys.stdin.readline().strip()
|
|
97
|
-
if not line:
|
|
98
|
-
return
|
|
99
|
-
|
|
100
|
-
task_id = "unknown"
|
|
101
|
-
try:
|
|
102
|
-
task_data = json.loads(line)
|
|
103
|
-
task_id = task_data.get("task_id", "unknown")
|
|
104
|
-
payload = task_data.get("payload", {})
|
|
105
|
-
|
|
106
|
-
# Call handler with or without context
|
|
107
|
-
if ctx is not None:
|
|
108
|
-
result = handler(payload, ctx)
|
|
109
|
-
else:
|
|
110
|
-
result = handler(payload)
|
|
111
|
-
|
|
112
|
-
response = {
|
|
113
|
-
"task_id": task_id,
|
|
114
|
-
"result": result,
|
|
115
|
-
"error": None,
|
|
116
|
-
"retry": False
|
|
117
|
-
}
|
|
118
|
-
except Exception as e:
|
|
119
|
-
response = {
|
|
120
|
-
"task_id": task_id,
|
|
121
|
-
"result": None,
|
|
122
|
-
"error": str(e),
|
|
123
|
-
"retry": False
|
|
124
|
-
}
|
|
125
|
-
|
|
126
|
-
print(json.dumps(response))
|
|
127
|
-
sys.stdout.flush()
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|