xllify 0.8.9__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- xllify/XLLIFY_DIST_VERSION +2 -0
- xllify/__init__.py +101 -0
- xllify/__main__.py +343 -0
- xllify/diagnostics.py +78 -0
- xllify/funcinfo.py +375 -0
- xllify/install.py +251 -0
- xllify/py.typed +1 -0
- xllify/rpc_server.py +639 -0
- xllify/rtd_client.py +576 -0
- xllify-0.8.9.dist-info/METADATA +407 -0
- xllify-0.8.9.dist-info/RECORD +15 -0
- xllify-0.8.9.dist-info/WHEEL +5 -0
- xllify-0.8.9.dist-info/entry_points.txt +5 -0
- xllify-0.8.9.dist-info/licenses/LICENSE +21 -0
- xllify-0.8.9.dist-info/top_level.txt +1 -0
xllify/rpc_server.py
ADDED
|
@@ -0,0 +1,639 @@
|
|
|
1
|
+
"""
|
|
2
|
+
Python RPC Server for xllify
|
|
3
|
+
Receives function call requests from Excel/xllify via ZeroMQ ROUTER socket.
|
|
4
|
+
"""
|
|
5
|
+
|
|
6
|
+
import json
|
|
7
|
+
import logging
|
|
8
|
+
import os
|
|
9
|
+
import sys
|
|
10
|
+
import traceback
|
|
11
|
+
import inspect
|
|
12
|
+
import signal
|
|
13
|
+
from pathlib import Path
|
|
14
|
+
from typing import Callable, Dict, Any, List, Optional
|
|
15
|
+
from dataclasses import dataclass
|
|
16
|
+
|
|
17
|
+
from xllify.rtd_client import RTDClient
|
|
18
|
+
from xllify.funcinfo import extract_functions
|
|
19
|
+
|
|
20
|
+
logger = logging.getLogger(__name__)
|
|
21
|
+
|
|
22
|
+
try:
|
|
23
|
+
import zmq
|
|
24
|
+
except ImportError:
|
|
25
|
+
raise ImportError("This module requires pyzmq.")
|
|
26
|
+
|
|
27
|
+
|
|
28
|
+
@dataclass
|
|
29
|
+
class Parameter:
|
|
30
|
+
"""
|
|
31
|
+
Parameter metadata for Excel functions.
|
|
32
|
+
|
|
33
|
+
Attributes:
|
|
34
|
+
name: Parameter name (must match function argument name)
|
|
35
|
+
type: Parameter type (e.g., "number", "string", "boolean", "array")
|
|
36
|
+
description: Description of what the parameter does
|
|
37
|
+
optional: Whether the parameter is optional (has a default value)
|
|
38
|
+
default: Default value for optional parameters
|
|
39
|
+
"""
|
|
40
|
+
|
|
41
|
+
name: str
|
|
42
|
+
type: str = "any"
|
|
43
|
+
description: str = ""
|
|
44
|
+
optional: bool = False
|
|
45
|
+
default: Any = None
|
|
46
|
+
|
|
47
|
+
def to_dict(self) -> Dict[str, Any]:
|
|
48
|
+
"""Convert to dictionary for JSON serialization"""
|
|
49
|
+
result = {"name": self.name, "type": self.type, "optional": self.optional}
|
|
50
|
+
if self.description:
|
|
51
|
+
result["description"] = self.description
|
|
52
|
+
if self.default is not None:
|
|
53
|
+
result["default"] = self.default
|
|
54
|
+
return result
|
|
55
|
+
|
|
56
|
+
|
|
57
|
+
class XllifyRPCServer:
|
|
58
|
+
"""
|
|
59
|
+
RPC server that receives function calls from xllify and executes registered Python functions.
|
|
60
|
+
|
|
61
|
+
Example usage:
|
|
62
|
+
server = XllifyRPCServer()
|
|
63
|
+
|
|
64
|
+
@server.fn("MyFunc")
|
|
65
|
+
def my_func(name: str, count: int):
|
|
66
|
+
return f"Hello {name} x {count}"
|
|
67
|
+
|
|
68
|
+
server.start() # Blocks, listening for requests
|
|
69
|
+
"""
|
|
70
|
+
|
|
71
|
+
# ZeroMQ endpoint (TCP on Python since ipc:// not supported)
|
|
72
|
+
# This connects to the C++ broker's backend DEALER socket
|
|
73
|
+
DEFAULT_ENDPOINT = "tcp://127.0.0.1:55558"
|
|
74
|
+
|
|
75
|
+
def __init__(self, endpoint: Optional[str] = None, xll_name: Optional[str] = None):
|
|
76
|
+
"""
|
|
77
|
+
Initialize RPC server.
|
|
78
|
+
|
|
79
|
+
Args:
|
|
80
|
+
endpoint: Custom ZeroMQ endpoint (overrides default)
|
|
81
|
+
xll_name: XLL name for metadata path (default: "xllify_addin")
|
|
82
|
+
"""
|
|
83
|
+
# All workers connect to the same backend endpoint
|
|
84
|
+
self.endpoint = endpoint if endpoint else self.DEFAULT_ENDPOINT
|
|
85
|
+
self.xll_name = xll_name if xll_name else "xllify_addin"
|
|
86
|
+
self.functions: Dict[str, Callable] = {}
|
|
87
|
+
|
|
88
|
+
# Store batching config (defaults)
|
|
89
|
+
self._batch_config = {"enable_batching": True, "batch_size": 500, "batch_timeout_ms": 50}
|
|
90
|
+
|
|
91
|
+
# RTD client will be initialized with these settings
|
|
92
|
+
# Enable batching for RPC server for better performance
|
|
93
|
+
# batch_size=500, batch_timeout_ms=50 (50ms wait allows more batching)
|
|
94
|
+
self.rtd_client = RTDClient(
|
|
95
|
+
enable_batching=self._batch_config["enable_batching"],
|
|
96
|
+
batch_size=self._batch_config["batch_size"],
|
|
97
|
+
batch_timeout_ms=self._batch_config["batch_timeout_ms"],
|
|
98
|
+
)
|
|
99
|
+
self.running = False
|
|
100
|
+
self.metadata_file: Optional[Path] = None
|
|
101
|
+
|
|
102
|
+
# ZeroMQ context and socket
|
|
103
|
+
self._context: Optional[zmq.Context] = None
|
|
104
|
+
self._router: Optional[zmq.Socket] = None
|
|
105
|
+
|
|
106
|
+
def configure_batching(
|
|
107
|
+
self, enabled: bool = True, batch_size: int = 500, batch_timeout_ms: int = 50
|
|
108
|
+
) -> None:
|
|
109
|
+
"""
|
|
110
|
+
Configure batching behavior for RTD updates.
|
|
111
|
+
|
|
112
|
+
Call this before starting the server to customize batching settings.
|
|
113
|
+
Batching improves performance by sending multiple updates together.
|
|
114
|
+
|
|
115
|
+
Args:
|
|
116
|
+
enabled: Enable batching (default: True)
|
|
117
|
+
batch_size: Maximum number of updates to batch together (default: 500)
|
|
118
|
+
batch_timeout_ms: Maximum time to wait before flushing batch in milliseconds (default: 50)
|
|
119
|
+
|
|
120
|
+
Example:
|
|
121
|
+
server = xllify.get_server()
|
|
122
|
+
server.configure_batching(batch_size=1000, batch_timeout_ms=100)
|
|
123
|
+
|
|
124
|
+
@xllify.fn("xllipy.Hello")
|
|
125
|
+
def hello(name: str) -> str:
|
|
126
|
+
return f"Hello, {name}!"
|
|
127
|
+
"""
|
|
128
|
+
if self.running:
|
|
129
|
+
logger.warning("Cannot configure batching while server is running")
|
|
130
|
+
return
|
|
131
|
+
|
|
132
|
+
self._batch_config = {
|
|
133
|
+
"enable_batching": enabled,
|
|
134
|
+
"batch_size": batch_size,
|
|
135
|
+
"batch_timeout_ms": batch_timeout_ms,
|
|
136
|
+
}
|
|
137
|
+
|
|
138
|
+
# Recreate RTD client with new settings
|
|
139
|
+
self.rtd_client = RTDClient(
|
|
140
|
+
enable_batching=enabled, batch_size=batch_size, batch_timeout_ms=batch_timeout_ms
|
|
141
|
+
)
|
|
142
|
+
|
|
143
|
+
logger.info(
|
|
144
|
+
f"Batching configured: enabled={enabled}, batch_size={batch_size}, timeout={batch_timeout_ms}ms"
|
|
145
|
+
)
|
|
146
|
+
|
|
147
|
+
def fn(
|
|
148
|
+
self,
|
|
149
|
+
name: str,
|
|
150
|
+
description: str = "",
|
|
151
|
+
category: str = "",
|
|
152
|
+
parameters: Optional[List[Parameter]] = None,
|
|
153
|
+
return_type: str = "",
|
|
154
|
+
):
|
|
155
|
+
"""
|
|
156
|
+
Decorator to register a Python function as an Excel function.
|
|
157
|
+
|
|
158
|
+
Args:
|
|
159
|
+
name: The Excel function name (e.g., "PYTHON.MyFunc" or "xllipy.MyFunc")
|
|
160
|
+
description: Optional description of what the function does (defaults to function's docstring)
|
|
161
|
+
category: Optional category for grouping functions in Excel
|
|
162
|
+
parameters: Optional list of Parameter objects describing each parameter
|
|
163
|
+
return_type: Optional return type override (defaults to type annotation if present)
|
|
164
|
+
|
|
165
|
+
Example:
|
|
166
|
+
@server.fn("PYTHON.Add", category="Math", return_type="number", parameters=[
|
|
167
|
+
Parameter("a", type="number", description="First number"),
|
|
168
|
+
Parameter("b", type="number", description="Second number")
|
|
169
|
+
])
|
|
170
|
+
def add(a, b):
|
|
171
|
+
\"\"\"Add two numbers\"\"\"
|
|
172
|
+
return a + b
|
|
173
|
+
|
|
174
|
+
Raises:
|
|
175
|
+
ValueError: If parameter names don't match function signature
|
|
176
|
+
"""
|
|
177
|
+
|
|
178
|
+
def decorator(func: Callable):
|
|
179
|
+
# Validate parameters against function signature
|
|
180
|
+
if parameters:
|
|
181
|
+
self._validate_parameters(func, parameters, name)
|
|
182
|
+
|
|
183
|
+
self.functions[name] = func
|
|
184
|
+
|
|
185
|
+
# Store metadata as function attributes for introspection
|
|
186
|
+
# Use docstring if description not provided
|
|
187
|
+
func._xllify_name = name
|
|
188
|
+
func._xllify_description = description or (func.__doc__.strip() if func.__doc__ else "")
|
|
189
|
+
func._xllify_category = category
|
|
190
|
+
func._xllify_parameters = parameters or []
|
|
191
|
+
func._xllify_return_type = return_type
|
|
192
|
+
return func
|
|
193
|
+
|
|
194
|
+
return decorator
|
|
195
|
+
|
|
196
|
+
def _validate_parameters(self, func: Callable, parameters: List[Parameter], func_name: str):
|
|
197
|
+
"""
|
|
198
|
+
Validate that parameter names match the function signature.
|
|
199
|
+
|
|
200
|
+
Args:
|
|
201
|
+
func: The function being decorated
|
|
202
|
+
parameters: List of Parameter objects
|
|
203
|
+
func_name: The Excel function name (for error messages)
|
|
204
|
+
|
|
205
|
+
Raises:
|
|
206
|
+
ValueError: If validation fails
|
|
207
|
+
"""
|
|
208
|
+
# Get function signature
|
|
209
|
+
sig = inspect.signature(func)
|
|
210
|
+
func_params = list(sig.parameters.keys())
|
|
211
|
+
|
|
212
|
+
param_names = [p.name for p in parameters]
|
|
213
|
+
|
|
214
|
+
for param_name in param_names:
|
|
215
|
+
if param_name not in func_params:
|
|
216
|
+
raise ValueError(
|
|
217
|
+
f"Parameter '{param_name}' in @xllify.fn('{func_name}') "
|
|
218
|
+
f"does not match function signature. "
|
|
219
|
+
f"Function has parameters: {func_params}"
|
|
220
|
+
)
|
|
221
|
+
|
|
222
|
+
# Warn if function has more parameters than declared (not an error, just incomplete metadata)
|
|
223
|
+
if len(func_params) > len(param_names):
|
|
224
|
+
missing = [p for p in func_params if p not in param_names]
|
|
225
|
+
logger.warning(f"Function '{func_name}' has undocumented parameters: {missing}")
|
|
226
|
+
|
|
227
|
+
def _parse_type_prefixed_arg(self, frames: List[str], index: int) -> tuple[Any, int]:
|
|
228
|
+
"""
|
|
229
|
+
Parse a type-prefixed argument from frames.
|
|
230
|
+
|
|
231
|
+
Args:
|
|
232
|
+
frames: List of frame strings
|
|
233
|
+
index: Current frame index to start parsing
|
|
234
|
+
|
|
235
|
+
Returns:
|
|
236
|
+
Tuple of (parsed_value, next_index)
|
|
237
|
+
"""
|
|
238
|
+
if index >= len(frames):
|
|
239
|
+
raise ValueError("Unexpected end of frames")
|
|
240
|
+
|
|
241
|
+
frame = frames[index]
|
|
242
|
+
|
|
243
|
+
if not frame or len(frame) < 2 or frame[1] != ":":
|
|
244
|
+
raise ValueError(f"Invalid type-prefixed frame: {frame}")
|
|
245
|
+
|
|
246
|
+
type_prefix = frame[0]
|
|
247
|
+
value_part = frame[2:] # Everything after "X:"
|
|
248
|
+
|
|
249
|
+
if type_prefix == "i": # Integer
|
|
250
|
+
return (int(value_part), index + 1)
|
|
251
|
+
|
|
252
|
+
elif type_prefix == "d": # Double/number
|
|
253
|
+
return (float(value_part), index + 1)
|
|
254
|
+
|
|
255
|
+
elif type_prefix == "b": # Boolean
|
|
256
|
+
return (value_part == "true", index + 1)
|
|
257
|
+
|
|
258
|
+
elif type_prefix == "s": # String
|
|
259
|
+
return (value_part, index + 1)
|
|
260
|
+
|
|
261
|
+
elif type_prefix == "n": # Null/None
|
|
262
|
+
return (None, index + 1)
|
|
263
|
+
|
|
264
|
+
elif type_prefix == "m": # Matrix (newline-delimited format)
|
|
265
|
+
# Parse dimensions: "m:rows,cols\nd:1\nd:2\n..."
|
|
266
|
+
# Split by newlines to get cells
|
|
267
|
+
lines = frame.split("\n")
|
|
268
|
+
|
|
269
|
+
if len(lines) < 1:
|
|
270
|
+
raise ValueError(f"Invalid matrix format: missing dimensions")
|
|
271
|
+
|
|
272
|
+
# First line is "m:rows,cols", extract dimensions from value_part of first line only
|
|
273
|
+
first_line = lines[0]
|
|
274
|
+
# Extract just the dimension part (after "m:")
|
|
275
|
+
dim_part = first_line[2:] # Skip "m:"
|
|
276
|
+
dims = dim_part.split(",")
|
|
277
|
+
if len(dims) != 2:
|
|
278
|
+
raise ValueError(f"Invalid matrix dimensions: {dim_part}")
|
|
279
|
+
|
|
280
|
+
rows = int(dims[0])
|
|
281
|
+
cols = int(dims[1])
|
|
282
|
+
total_cells = rows * cols
|
|
283
|
+
|
|
284
|
+
# Verify we have enough cell lines
|
|
285
|
+
if len(lines) - 1 != total_cells:
|
|
286
|
+
raise ValueError(
|
|
287
|
+
f"Matrix cell count mismatch: expected {total_cells}, got {len(lines) - 1}"
|
|
288
|
+
)
|
|
289
|
+
|
|
290
|
+
# Parse each cell from subsequent lines
|
|
291
|
+
matrix = []
|
|
292
|
+
line_idx = 1 # Start after dimension line
|
|
293
|
+
|
|
294
|
+
for row in range(rows):
|
|
295
|
+
row_data = []
|
|
296
|
+
for col in range(cols):
|
|
297
|
+
if line_idx >= len(lines):
|
|
298
|
+
raise ValueError(f"Matrix underflow: expected {total_cells} cells")
|
|
299
|
+
|
|
300
|
+
cell_line = lines[line_idx]
|
|
301
|
+
line_idx += 1
|
|
302
|
+
|
|
303
|
+
# Parse cell value
|
|
304
|
+
if not cell_line or len(cell_line) < 2 or cell_line[1] != ":":
|
|
305
|
+
raise ValueError(f"Invalid cell format: {cell_line}")
|
|
306
|
+
|
|
307
|
+
cell_type = cell_line[0]
|
|
308
|
+
cell_value_part = cell_line[2:]
|
|
309
|
+
|
|
310
|
+
if cell_type == "i":
|
|
311
|
+
row_data.append(int(cell_value_part))
|
|
312
|
+
elif cell_type == "d":
|
|
313
|
+
row_data.append(float(cell_value_part))
|
|
314
|
+
elif cell_type == "b":
|
|
315
|
+
row_data.append(cell_value_part == "true")
|
|
316
|
+
elif cell_type == "s":
|
|
317
|
+
row_data.append(cell_value_part)
|
|
318
|
+
elif cell_type == "n":
|
|
319
|
+
row_data.append(None)
|
|
320
|
+
elif cell_type == "e":
|
|
321
|
+
row_data.append(f"#ERROR: {cell_value_part}")
|
|
322
|
+
else:
|
|
323
|
+
raise ValueError(f"Unknown cell type prefix: {cell_type}")
|
|
324
|
+
|
|
325
|
+
matrix.append(row_data)
|
|
326
|
+
|
|
327
|
+
return (matrix, index + 1)
|
|
328
|
+
|
|
329
|
+
elif type_prefix == "e": # Error
|
|
330
|
+
return (f"#ERROR: {value_part}", index + 1)
|
|
331
|
+
|
|
332
|
+
else:
|
|
333
|
+
raise ValueError(f"Unknown type prefix: {type_prefix}")
|
|
334
|
+
|
|
335
|
+
def _handle_zmq_request(self, frames: List[bytes]) -> None:
|
|
336
|
+
"""
|
|
337
|
+
Handle ZeroMQ RPC request (type-prefixed wire format).
|
|
338
|
+
|
|
339
|
+
Args:
|
|
340
|
+
frames: List of ZeroMQ message frames
|
|
341
|
+
frames[0] = "x:function:topic:arg_count"
|
|
342
|
+
frames[1..N] = type-prefixed arguments
|
|
343
|
+
"""
|
|
344
|
+
try:
|
|
345
|
+
logger.debug(f"Received {len(frames)} frames")
|
|
346
|
+
for i, frame in enumerate(frames):
|
|
347
|
+
# Check if frame looks like binary (identity) or text
|
|
348
|
+
is_binary = len(frame) > 0 and (frame[0] < 32 or frame[0] > 126)
|
|
349
|
+
logger.debug(f"Frame {i} (len={len(frame)}, binary={is_binary}): {frame[:50]}")
|
|
350
|
+
|
|
351
|
+
if len(frames) < 1:
|
|
352
|
+
logger.error("RPC Error: Not enough frames")
|
|
353
|
+
return
|
|
354
|
+
|
|
355
|
+
# Check if first frame is an identity frame (binary, typically small)
|
|
356
|
+
# When DEALER connects through ROUTER-DEALER proxy, we might get an identity frame
|
|
357
|
+
frame_offset = 0
|
|
358
|
+
if len(frames[0]) > 0 and frames[0][0] < 32:
|
|
359
|
+
# First frame looks like binary identity, skip it
|
|
360
|
+
logger.debug(f"Skipping identity frame: {frames[0][:20]}")
|
|
361
|
+
frame_offset = 1
|
|
362
|
+
|
|
363
|
+
if frame_offset >= len(frames):
|
|
364
|
+
logger.error(
|
|
365
|
+
f"RPC Error: No message frames after identity (total frames: {len(frames)})"
|
|
366
|
+
)
|
|
367
|
+
return
|
|
368
|
+
|
|
369
|
+
# Get the actual header frame
|
|
370
|
+
try:
|
|
371
|
+
header = frames[frame_offset].decode("utf-8")
|
|
372
|
+
except UnicodeDecodeError as e:
|
|
373
|
+
logger.error(f"RPC Error: Failed to decode header frame as UTF-8: {e}")
|
|
374
|
+
logger.error(f"Raw header bytes: {frames[frame_offset][:100]}")
|
|
375
|
+
return
|
|
376
|
+
|
|
377
|
+
# Parse header: "CALL|function|topic|arg_count"
|
|
378
|
+
# Using pipe delimiter to avoid conflicts with colons in topics (e.g., "43f4d02a:xllipy.Hello#hash")
|
|
379
|
+
if not header.startswith("CALL|"):
|
|
380
|
+
logger.error(f"RPC Error: Invalid header format: {header}")
|
|
381
|
+
return
|
|
382
|
+
|
|
383
|
+
# Strip "CALL|" prefix and split by pipe
|
|
384
|
+
header_body = header[5:] # Strip "CALL|"
|
|
385
|
+
parts = header_body.split("|")
|
|
386
|
+
|
|
387
|
+
logger.debug(f"Header: {header}")
|
|
388
|
+
logger.debug(f"Header body: {header_body}")
|
|
389
|
+
logger.debug(f"Parts after split: {parts}")
|
|
390
|
+
|
|
391
|
+
if len(parts) != 3:
|
|
392
|
+
logger.error(f"RPC Error: Invalid header parts count={len(parts)}, header={header}")
|
|
393
|
+
return
|
|
394
|
+
|
|
395
|
+
function, topic, arg_count_str = parts
|
|
396
|
+
logger.debug(f"Parsed: function={function}, topic={topic}, arg_count={arg_count_str}")
|
|
397
|
+
arg_count = int(arg_count_str)
|
|
398
|
+
|
|
399
|
+
# Decode remaining frames to strings (skip identity frame if present)
|
|
400
|
+
frame_strs = [f.decode("utf-8") for f in frames[frame_offset + 1 :]]
|
|
401
|
+
|
|
402
|
+
# Parse arguments
|
|
403
|
+
args = []
|
|
404
|
+
frame_index = 0
|
|
405
|
+
for _ in range(arg_count):
|
|
406
|
+
arg_value, frame_index = self._parse_type_prefixed_arg(frame_strs, frame_index)
|
|
407
|
+
args.append(arg_value)
|
|
408
|
+
|
|
409
|
+
logger.debug(f"RPC: {function}({topic[:50]}...) with {len(args)} args")
|
|
410
|
+
|
|
411
|
+
# Look up the registered function
|
|
412
|
+
func = self.functions.get(function)
|
|
413
|
+
if not func:
|
|
414
|
+
error_msg = f"Function '{function}' not registered"
|
|
415
|
+
logger.error(f"RPC Error: {error_msg}")
|
|
416
|
+
self.rtd_client.complete(topic, f"#ERROR: {error_msg}")
|
|
417
|
+
return
|
|
418
|
+
|
|
419
|
+
# Execute the function
|
|
420
|
+
result = func(*args)
|
|
421
|
+
|
|
422
|
+
# Send result back via RTD
|
|
423
|
+
self.rtd_client.complete(topic, result)
|
|
424
|
+
|
|
425
|
+
except Exception as e:
|
|
426
|
+
logger.error(f"RPC Error handling request: {e}")
|
|
427
|
+
logger.debug(traceback.format_exc())
|
|
428
|
+
|
|
429
|
+
# Try to extract topic from header for error reporting
|
|
430
|
+
try:
|
|
431
|
+
if len(frames) >= 1:
|
|
432
|
+
header = frames[0].decode("utf-8")
|
|
433
|
+
parts = header[5:].split("|") # Split by pipe, strip "CALL|"
|
|
434
|
+
if len(parts) >= 2:
|
|
435
|
+
topic = parts[1] # Second part is topic
|
|
436
|
+
self.rtd_client.complete(topic, f"#ERROR: {str(e)}")
|
|
437
|
+
except:
|
|
438
|
+
pass
|
|
439
|
+
|
|
440
|
+
def generate_function_metadata_json(self) -> str:
|
|
441
|
+
"""
|
|
442
|
+
Generate function metadata JSON for all registered functions.
|
|
443
|
+
Format matches the pyfuncinfo tool output.
|
|
444
|
+
|
|
445
|
+
Returns:
|
|
446
|
+
JSON string with function metadata
|
|
447
|
+
"""
|
|
448
|
+
functions_metadata = []
|
|
449
|
+
|
|
450
|
+
for func_name, func in self.functions.items():
|
|
451
|
+
# Extract stored metadata
|
|
452
|
+
description = getattr(func, "_xllify_description", "")
|
|
453
|
+
category = getattr(func, "_xllify_category", "")
|
|
454
|
+
parameters = getattr(func, "_xllify_parameters", [])
|
|
455
|
+
return_type = getattr(func, "_xllify_return_type", "")
|
|
456
|
+
|
|
457
|
+
# Get function signature for has_vararg detection
|
|
458
|
+
sig = inspect.signature(func)
|
|
459
|
+
has_vararg = any(
|
|
460
|
+
p.kind == inspect.Parameter.VAR_POSITIONAL for p in sig.parameters.values()
|
|
461
|
+
)
|
|
462
|
+
has_kwargs = any(
|
|
463
|
+
p.kind == inspect.Parameter.VAR_KEYWORD for p in sig.parameters.values()
|
|
464
|
+
)
|
|
465
|
+
|
|
466
|
+
# Build parameter list
|
|
467
|
+
param_list = [param.to_dict() for param in parameters]
|
|
468
|
+
|
|
469
|
+
# Build function metadata
|
|
470
|
+
func_metadata = {
|
|
471
|
+
"config_name": func_name,
|
|
472
|
+
"description": description,
|
|
473
|
+
"category": category,
|
|
474
|
+
"execution_type": "external",
|
|
475
|
+
"parameters": param_list,
|
|
476
|
+
"has_vararg": has_vararg,
|
|
477
|
+
}
|
|
478
|
+
|
|
479
|
+
if has_kwargs:
|
|
480
|
+
func_metadata["has_kwargs"] = has_kwargs
|
|
481
|
+
|
|
482
|
+
if return_type:
|
|
483
|
+
func_metadata["return_type"] = return_type
|
|
484
|
+
|
|
485
|
+
functions_metadata.append(func_metadata)
|
|
486
|
+
|
|
487
|
+
# Build runtime command using current Python executable
|
|
488
|
+
# This ensures we use the same venv/python that the user is running
|
|
489
|
+
python_exe = sys.executable
|
|
490
|
+
|
|
491
|
+
# Get the module name from sys.argv if available, otherwise use 'main'
|
|
492
|
+
entrypoint = "main.py"
|
|
493
|
+
if len(sys.argv) > 0 and sys.argv[0].endswith(".py"):
|
|
494
|
+
# Extract module name from script path
|
|
495
|
+
entrypoint = sys.argv[0]
|
|
496
|
+
|
|
497
|
+
runtime_command = f'"{python_exe}" -m xllify {entrypoint} --xll-name both'
|
|
498
|
+
working_dir = str(Path.cwd())
|
|
499
|
+
|
|
500
|
+
# Read spawn_count from global set by configure_spawn_count()
|
|
501
|
+
import xllify
|
|
502
|
+
spawn_count = getattr(xllify, '_spawn_count', 1)
|
|
503
|
+
|
|
504
|
+
# Return JSON with generic runtime config
|
|
505
|
+
return json.dumps(
|
|
506
|
+
{
|
|
507
|
+
"runtime": {
|
|
508
|
+
"command": runtime_command,
|
|
509
|
+
"working_directory": working_dir,
|
|
510
|
+
"spawn_count": spawn_count,
|
|
511
|
+
},
|
|
512
|
+
"functions": functions_metadata,
|
|
513
|
+
},
|
|
514
|
+
indent=2,
|
|
515
|
+
)
|
|
516
|
+
|
|
517
|
+
def _write_function_metadata(self):
|
|
518
|
+
r"""
|
|
519
|
+
Write function metadata JSON to AppData\Local\xllify\{xll_name}\xrpc\python_funcs.json
|
|
520
|
+
"""
|
|
521
|
+
try:
|
|
522
|
+
# Build output path: AppData\Local\xllify\{xll_name}\xrpc\python_funcs.json
|
|
523
|
+
appdata = os.getenv("LOCALAPPDATA")
|
|
524
|
+
if not appdata:
|
|
525
|
+
logger.warning("LOCALAPPDATA not found, skipping metadata write")
|
|
526
|
+
return
|
|
527
|
+
|
|
528
|
+
output_dir = Path(appdata) / "xllify" / self.xll_name / "xrpc"
|
|
529
|
+
|
|
530
|
+
# Create directory if it doesn't exist
|
|
531
|
+
if not output_dir.exists():
|
|
532
|
+
logger.debug(f"Creating metadata directory: {output_dir}")
|
|
533
|
+
output_dir.mkdir(parents=True, exist_ok=True)
|
|
534
|
+
|
|
535
|
+
output_file = output_dir / "python_funcs.json"
|
|
536
|
+
|
|
537
|
+
# Generate and write metadata
|
|
538
|
+
metadata_json = self.generate_function_metadata_json()
|
|
539
|
+
output_file.write_text(metadata_json, encoding="utf-8")
|
|
540
|
+
|
|
541
|
+
# Store the file path for cleanup
|
|
542
|
+
self.metadata_file = output_file
|
|
543
|
+
|
|
544
|
+
logger.info(f"Function metadata written to: {output_file}")
|
|
545
|
+
|
|
546
|
+
except Exception as e:
|
|
547
|
+
logger.error(f"Failed to write function metadata: {e}")
|
|
548
|
+
|
|
549
|
+
def start(self):
|
|
550
|
+
"""
|
|
551
|
+
Start the RPC server (blocking).
|
|
552
|
+
Listens for incoming RPC requests on the ZeroMQ ROUTER socket.
|
|
553
|
+
"""
|
|
554
|
+
self.running = True
|
|
555
|
+
|
|
556
|
+
# Set up signal handler for clean Ctrl+C shutdown
|
|
557
|
+
def signal_handler(sig, frame):
|
|
558
|
+
logger.info("\nReceived interrupt signal, shutting down...")
|
|
559
|
+
self.running = False
|
|
560
|
+
|
|
561
|
+
signal.signal(signal.SIGINT, signal_handler)
|
|
562
|
+
signal.signal(signal.SIGTERM, signal_handler)
|
|
563
|
+
|
|
564
|
+
logger.debug(f"xllify RPC: {self.endpoint}")
|
|
565
|
+
logger.info(f"Loaded functions: {list(self.functions.keys())}")
|
|
566
|
+
|
|
567
|
+
# Write function metadata JSON to AppData
|
|
568
|
+
self._write_function_metadata()
|
|
569
|
+
|
|
570
|
+
try:
|
|
571
|
+
# Create ZeroMQ context and DEALER socket
|
|
572
|
+
self._context = zmq.Context()
|
|
573
|
+
self._router = self._context.socket(zmq.DEALER)
|
|
574
|
+
self._router.setsockopt(zmq.LINGER, 0) # Don't wait on close
|
|
575
|
+
|
|
576
|
+
# Set buffer sizes for large messages (32MB buffers - supports ~1M cells)
|
|
577
|
+
buffer_size = 32 * 1024 * 1024 # 32MB
|
|
578
|
+
self._router.setsockopt(zmq.SNDBUF, buffer_size)
|
|
579
|
+
self._router.setsockopt(zmq.RCVBUF, buffer_size)
|
|
580
|
+
|
|
581
|
+
# Set high water marks (queue depth before blocking)
|
|
582
|
+
hwm = 1000 # Higher for bursts of RPC requests
|
|
583
|
+
self._router.setsockopt(zmq.SNDHWM, hwm)
|
|
584
|
+
self._router.setsockopt(zmq.RCVHWM, hwm)
|
|
585
|
+
|
|
586
|
+
# Connect to backend endpoint (C++ broker's backend)
|
|
587
|
+
self._router.connect(self.endpoint)
|
|
588
|
+
logger.info(f"Connected to {self.endpoint}")
|
|
589
|
+
|
|
590
|
+
# Poll with timeout so we can respond to Ctrl+C
|
|
591
|
+
poller = zmq.Poller()
|
|
592
|
+
poller.register(self._router, zmq.POLLIN)
|
|
593
|
+
|
|
594
|
+
while self.running:
|
|
595
|
+
try:
|
|
596
|
+
# Poll with 500ms timeout to check self.running
|
|
597
|
+
socks = dict(poller.poll(500))
|
|
598
|
+
|
|
599
|
+
if self._router in socks and socks[self._router] == zmq.POLLIN:
|
|
600
|
+
# Receive multipart message: [identity, "x:function:topic:count", arg1, arg2, ...]
|
|
601
|
+
try:
|
|
602
|
+
frames = self._router.recv_multipart()
|
|
603
|
+
|
|
604
|
+
# Handle the request (executes function, sends result via RTD)
|
|
605
|
+
self._handle_zmq_request(frames)
|
|
606
|
+
|
|
607
|
+
except zmq.Again:
|
|
608
|
+
# Timeout, continue
|
|
609
|
+
continue
|
|
610
|
+
|
|
611
|
+
except Exception as e:
|
|
612
|
+
if self.running:
|
|
613
|
+
logger.error(f"RPC error: {e}")
|
|
614
|
+
logger.debug(traceback.format_exc())
|
|
615
|
+
|
|
616
|
+
finally:
|
|
617
|
+
# Cleanup
|
|
618
|
+
if self._router:
|
|
619
|
+
self._router.close()
|
|
620
|
+
if self._context:
|
|
621
|
+
self._context.term()
|
|
622
|
+
|
|
623
|
+
# Cleanup metadata file on exit
|
|
624
|
+
self._cleanup_metadata_file()
|
|
625
|
+
|
|
626
|
+
def stop(self):
|
|
627
|
+
"""Stop the RPC server"""
|
|
628
|
+
self.running = False
|
|
629
|
+
self._cleanup_metadata_file()
|
|
630
|
+
|
|
631
|
+
def _cleanup_metadata_file(self):
|
|
632
|
+
"""Remove the metadata JSON file if it exists"""
|
|
633
|
+
if self.metadata_file and self.metadata_file.exists():
|
|
634
|
+
try:
|
|
635
|
+
# Best to leave in place for now?
|
|
636
|
+
# self.metadata_file.unlink()
|
|
637
|
+
logger.info(f"Cleaned up metadata file: {self.metadata_file}")
|
|
638
|
+
except Exception as e:
|
|
639
|
+
logger.error(f"Failed to cleanup metadata file: {e}")
|