mcp-stata 1.2.2__py3-none-any.whl → 1.6.2__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of mcp-stata might be problematic. Click here for more details.

@@ -0,0 +1,261 @@
1
+ import queue
2
+ import threading
3
+ import time
4
+ from typing import Any, Awaitable, Callable, Optional
5
+
6
+ import anyio
7
+
8
+
9
+ _SENTINEL = object()
10
+
11
+
12
+ class StreamBuffer:
13
+ def __init__(
14
+ self,
15
+ *,
16
+ max_total_chars: int = 2_000_000,
17
+ truncation_marker: str = "\n... (output truncated)\n",
18
+ ):
19
+ self._lock = threading.Lock()
20
+ self._parts: list[str] = []
21
+ self._total_chars = 0
22
+ self._max_total_chars = max_total_chars
23
+ self._truncation_marker = truncation_marker
24
+ self._truncated = False
25
+
26
+ def write(self, data: Any) -> int:
27
+ text = self._normalize(data)
28
+ if not text:
29
+ return 0
30
+
31
+ with self._lock:
32
+ if self._truncated:
33
+ return len(text)
34
+
35
+ remaining = self._max_total_chars - self._total_chars
36
+ if remaining <= 0:
37
+ self._parts.append(self._truncation_marker)
38
+ self._total_chars += len(self._truncation_marker)
39
+ self._truncated = True
40
+ return len(text)
41
+
42
+ if len(text) <= remaining:
43
+ self._parts.append(text)
44
+ self._total_chars += len(text)
45
+ return len(text)
46
+
47
+ self._parts.append(text[:remaining])
48
+ self._parts.append(self._truncation_marker)
49
+ self._total_chars += remaining + len(self._truncation_marker)
50
+ self._truncated = True
51
+ return len(text)
52
+
53
+ def get_value(self) -> str:
54
+ with self._lock:
55
+ return "".join(self._parts)
56
+
57
+ @staticmethod
58
+ def _normalize(data: Any) -> str:
59
+ if data is None:
60
+ return ""
61
+ if isinstance(data, bytes):
62
+ return data.decode("utf-8", errors="replace")
63
+ return str(data)
64
+
65
+
66
+ class StreamingTeeIO:
67
+ def __init__(
68
+ self,
69
+ buffer: StreamBuffer,
70
+ q: queue.Queue,
71
+ *,
72
+ max_fragment_chars: int = 4000,
73
+ on_chunk_callback=None,
74
+ ):
75
+ self._buffer = buffer
76
+ self._queue = q
77
+ self._max_fragment_chars = max_fragment_chars
78
+ self._closed = False
79
+ self._lock = threading.Lock()
80
+ self._on_chunk_callback = on_chunk_callback
81
+
82
+ def write(self, data: Any) -> int:
83
+ text = StreamBuffer._normalize(data)
84
+ if not text:
85
+ return 0
86
+
87
+ n = self._buffer.write(text)
88
+
89
+ # Call chunk callback for graph detection
90
+ if self._on_chunk_callback:
91
+ try:
92
+ self._on_chunk_callback(text)
93
+ except Exception:
94
+ # Don't let callback errors break streaming
95
+ pass
96
+
97
+ with self._lock:
98
+ if self._closed:
99
+ return n
100
+ if len(text) <= self._max_fragment_chars:
101
+ self._queue.put_nowait(text)
102
+ else:
103
+ for i in range(0, len(text), self._max_fragment_chars):
104
+ self._queue.put_nowait(text[i : i + self._max_fragment_chars])
105
+ return n
106
+
107
+ def flush(self) -> None:
108
+ return
109
+
110
+ def isatty(self) -> bool:
111
+ return False
112
+
113
+ def close(self) -> None:
114
+ with self._lock:
115
+ if self._closed:
116
+ return
117
+ self._closed = True
118
+ self._queue.put_nowait(_SENTINEL)
119
+
120
+
121
+ class TailBuffer:
122
+ def __init__(self, *, max_chars: int = 8000):
123
+ self._lock = threading.Lock()
124
+ self._parts: list[str] = []
125
+ self._total = 0
126
+ self._max_chars = max_chars
127
+
128
+ def append(self, data: Any) -> None:
129
+ text = StreamBuffer._normalize(data)
130
+ if not text:
131
+ return
132
+
133
+ with self._lock:
134
+ self._parts.append(text)
135
+ self._total += len(text)
136
+
137
+ if self._total <= self._max_chars:
138
+ return
139
+
140
+ # Trim from the left until we are within budget.
141
+ over = self._total - self._max_chars
142
+ while over > 0 and self._parts:
143
+ head = self._parts[0]
144
+ if len(head) <= over:
145
+ self._parts.pop(0)
146
+ self._total -= len(head)
147
+ over = self._total - self._max_chars
148
+ continue
149
+
150
+ self._parts[0] = head[over:]
151
+ self._total -= over
152
+ over = 0
153
+
154
+ def get_value(self) -> str:
155
+ with self._lock:
156
+ return "".join(self._parts)
157
+
158
+
159
+ class FileTeeIO:
160
+ def __init__(self, file_obj, tail: TailBuffer):
161
+ self._file = file_obj
162
+ self._tail = tail
163
+ self._lock = threading.Lock()
164
+ self._closed = False
165
+
166
+ def write(self, data: Any) -> int:
167
+ text = StreamBuffer._normalize(data)
168
+ if not text:
169
+ return 0
170
+
171
+ with self._lock:
172
+ if self._closed:
173
+ return len(text)
174
+
175
+ self._tail.append(text)
176
+ self._file.write(text)
177
+
178
+ if "\n" in text:
179
+ try:
180
+ self._file.flush()
181
+ except Exception:
182
+ pass
183
+ return len(text)
184
+
185
+ def flush(self) -> None:
186
+ with self._lock:
187
+ if self._closed:
188
+ return
189
+ try:
190
+ self._file.flush()
191
+ except Exception:
192
+ return
193
+
194
+ def isatty(self) -> bool:
195
+ return False
196
+
197
+ def close(self) -> None:
198
+ with self._lock:
199
+ if self._closed:
200
+ return
201
+ self._closed = True
202
+ try:
203
+ self._file.flush()
204
+ except Exception:
205
+ pass
206
+ try:
207
+ self._file.close()
208
+ except Exception:
209
+ pass
210
+
211
+
212
+ async def drain_queue_and_notify(
213
+ q: queue.Queue,
214
+ notify_log: Callable[[str], Awaitable[None]],
215
+ *,
216
+ min_interval_ms: int = 200,
217
+ max_chunk_chars: int = 4000,
218
+ on_chunk: Optional[Callable[[str], Awaitable[None]]] = None,
219
+ ) -> None:
220
+ buf: list[str] = []
221
+ buf_len = 0
222
+ last_send = time.monotonic()
223
+
224
+ async def flush() -> None:
225
+ nonlocal buf, buf_len, last_send
226
+ if not buf:
227
+ return
228
+ chunk = "".join(buf)
229
+ buf = []
230
+ buf_len = 0
231
+ if on_chunk is not None:
232
+ await on_chunk(chunk)
233
+ await notify_log(chunk)
234
+ last_send = time.monotonic()
235
+
236
+ while True:
237
+ item = None
238
+ try:
239
+ item = q.get_nowait()
240
+ except queue.Empty:
241
+ now = time.monotonic()
242
+ if buf and (now - last_send) * 1000 >= min_interval_ms:
243
+ await flush()
244
+ await anyio.sleep(min_interval_ms / 1000)
245
+ continue
246
+
247
+ if item is _SENTINEL:
248
+ break
249
+
250
+ text = StreamBuffer._normalize(item)
251
+ if not text:
252
+ continue
253
+
254
+ buf.append(text)
255
+ buf_len += len(text)
256
+
257
+ now = time.monotonic()
258
+ if buf_len >= max_chunk_chars or (now - last_send) * 1000 >= min_interval_ms:
259
+ await flush()
260
+
261
+ await flush()