mcp-stata 1.18.0__cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of mcp-stata might be problematic. Click here for more details.

@@ -0,0 +1,263 @@
1
+ import queue
2
+ import threading
3
+ import time
4
+ from typing import Any, Awaitable, Callable, Optional
5
+ import logging
6
+
7
+ import anyio
8
+
9
+
10
+ _SENTINEL = object()
11
+ logger = logging.getLogger("mcp_stata")
12
+
13
+
14
+ class StreamBuffer:
15
+ def __init__(
16
+ self,
17
+ *,
18
+ max_total_chars: int = 2_000_000,
19
+ truncation_marker: str = "\n... (output truncated)\n",
20
+ ):
21
+ self._lock = threading.Lock()
22
+ self._parts: list[str] = []
23
+ self._total_chars = 0
24
+ self._max_total_chars = max_total_chars
25
+ self._truncation_marker = truncation_marker
26
+ self._truncated = False
27
+
28
+ def write(self, data: Any) -> int:
29
+ text = self._normalize(data)
30
+ if not text:
31
+ return 0
32
+
33
+ with self._lock:
34
+ if self._truncated:
35
+ return len(text)
36
+
37
+ remaining = self._max_total_chars - self._total_chars
38
+ if remaining <= 0:
39
+ self._parts.append(self._truncation_marker)
40
+ self._total_chars += len(self._truncation_marker)
41
+ self._truncated = True
42
+ return len(text)
43
+
44
+ if len(text) <= remaining:
45
+ self._parts.append(text)
46
+ self._total_chars += len(text)
47
+ return len(text)
48
+
49
+ self._parts.append(text[:remaining])
50
+ self._parts.append(self._truncation_marker)
51
+ self._total_chars += remaining + len(self._truncation_marker)
52
+ self._truncated = True
53
+ return len(text)
54
+
55
+ def get_value(self) -> str:
56
+ with self._lock:
57
+ return "".join(self._parts)
58
+
59
+ @staticmethod
60
+ def _normalize(data: Any) -> str:
61
+ if data is None:
62
+ return ""
63
+ if isinstance(data, bytes):
64
+ return data.decode("utf-8", errors="replace")
65
+ return str(data)
66
+
67
+
68
+ class StreamingTeeIO:
69
+ def __init__(
70
+ self,
71
+ buffer: StreamBuffer,
72
+ q: queue.Queue,
73
+ *,
74
+ max_fragment_chars: int = 4000,
75
+ on_chunk_callback=None,
76
+ ):
77
+ self._buffer = buffer
78
+ self._queue = q
79
+ self._max_fragment_chars = max_fragment_chars
80
+ self._closed = False
81
+ self._lock = threading.Lock()
82
+ self._on_chunk_callback = on_chunk_callback
83
+
84
+ def write(self, data: Any) -> int:
85
+ text = StreamBuffer._normalize(data)
86
+ if not text:
87
+ return 0
88
+
89
+ n = self._buffer.write(text)
90
+
91
+ # Call chunk callback for graph detection
92
+ if self._on_chunk_callback:
93
+ try:
94
+ self._on_chunk_callback(text)
95
+ except Exception:
96
+ # Don't let callback errors break streaming
97
+ logger.debug("Streaming chunk callback failed", exc_info=True)
98
+
99
+ with self._lock:
100
+ if self._closed:
101
+ return n
102
+ if len(text) <= self._max_fragment_chars:
103
+ self._queue.put_nowait(text)
104
+ else:
105
+ for i in range(0, len(text), self._max_fragment_chars):
106
+ self._queue.put_nowait(text[i : i + self._max_fragment_chars])
107
+ return n
108
+
109
+ def flush(self) -> None:
110
+ return
111
+
112
+ def isatty(self) -> bool:
113
+ return False
114
+
115
+ def close(self) -> None:
116
+ with self._lock:
117
+ if self._closed:
118
+ return
119
+ self._closed = True
120
+ self._queue.put_nowait(_SENTINEL)
121
+
122
+
123
+ class TailBuffer:
124
+ def __init__(self, *, max_chars: int = 8000):
125
+ self._lock = threading.Lock()
126
+ self._parts: list[str] = []
127
+ self._total = 0
128
+ self._max_chars = max_chars
129
+
130
+ def append(self, data: Any) -> None:
131
+ text = StreamBuffer._normalize(data)
132
+ if not text:
133
+ return
134
+
135
+ with self._lock:
136
+ self._parts.append(text)
137
+ self._total += len(text)
138
+
139
+ if self._total <= self._max_chars:
140
+ return
141
+
142
+ # Trim from the left until we are within budget.
143
+ over = self._total - self._max_chars
144
+ while over > 0 and self._parts:
145
+ head = self._parts[0]
146
+ if len(head) <= over:
147
+ self._parts.pop(0)
148
+ self._total -= len(head)
149
+ over = self._total - self._max_chars
150
+ continue
151
+
152
+ self._parts[0] = head[over:]
153
+ self._total -= over
154
+ over = 0
155
+
156
+ def get_value(self) -> str:
157
+ with self._lock:
158
+ return "".join(self._parts)
159
+
160
+
161
+ class FileTeeIO:
162
+ def __init__(self, file_obj, tail: TailBuffer):
163
+ self._file = file_obj
164
+ self._tail = tail
165
+ self._lock = threading.Lock()
166
+ self._closed = False
167
+
168
+ def write(self, data: Any) -> int:
169
+ text = StreamBuffer._normalize(data)
170
+ if not text:
171
+ return 0
172
+
173
+ with self._lock:
174
+ if self._closed:
175
+ return len(text)
176
+
177
+ self._tail.append(text)
178
+ self._file.write(text)
179
+
180
+ if "\n" in text:
181
+ try:
182
+ self._file.flush()
183
+ except Exception:
184
+ pass
185
+ return len(text)
186
+
187
+ def flush(self) -> None:
188
+ with self._lock:
189
+ if self._closed:
190
+ return
191
+ try:
192
+ self._file.flush()
193
+ except Exception:
194
+ return
195
+
196
+ def isatty(self) -> bool:
197
+ return False
198
+
199
+ def close(self) -> None:
200
+ with self._lock:
201
+ if self._closed:
202
+ return
203
+ self._closed = True
204
+ try:
205
+ self._file.flush()
206
+ except Exception:
207
+ pass
208
+ try:
209
+ self._file.close()
210
+ except Exception:
211
+ pass
212
+
213
+
214
+ async def drain_queue_and_notify(
215
+ q: queue.Queue,
216
+ notify_log: Callable[[str], Awaitable[None]],
217
+ *,
218
+ min_interval_ms: int = 200,
219
+ max_chunk_chars: int = 4000,
220
+ on_chunk: Optional[Callable[[str], Awaitable[None]]] = None,
221
+ ) -> None:
222
+ buf: list[str] = []
223
+ buf_len = 0
224
+ last_send = time.monotonic()
225
+
226
+ async def flush() -> None:
227
+ nonlocal buf, buf_len, last_send
228
+ if not buf:
229
+ return
230
+ chunk = "".join(buf)
231
+ buf = []
232
+ buf_len = 0
233
+ if on_chunk is not None:
234
+ await on_chunk(chunk)
235
+ await notify_log(chunk)
236
+ last_send = time.monotonic()
237
+
238
+ while True:
239
+ item = None
240
+ try:
241
+ item = q.get_nowait()
242
+ except queue.Empty:
243
+ now = time.monotonic()
244
+ if buf and (now - last_send) * 1000 >= min_interval_ms:
245
+ await flush()
246
+ await anyio.sleep(min_interval_ms / 1000)
247
+ continue
248
+
249
+ if item is _SENTINEL:
250
+ break
251
+
252
+ text = StreamBuffer._normalize(item)
253
+ if not text:
254
+ continue
255
+
256
+ buf.append(text)
257
+ buf_len += len(text)
258
+
259
+ now = time.monotonic()
260
+ if buf_len >= max_chunk_chars or (now - last_send) * 1000 >= min_interval_ms:
261
+ await flush()
262
+
263
+ await flush()
@@ -0,0 +1,54 @@
1
+ import stata_setup
2
+ stata_setup.config("/Applications/StataNow/", "mp")
3
+ from pystata import stata
4
+ import tempfile
5
+ import os
6
+
7
+ print("=== Testing multiple concurrent logs ===\n")
8
+
9
+ # Create temp files for logs
10
+ log1_path = tempfile.mktemp(suffix='.smcl')
11
+ log2_path = tempfile.mktemp(suffix='.smcl')
12
+
13
+ stata.run("sysuse auto, clear")
14
+
15
+ try:
16
+ # Start first (unnamed) log - simulating user's log
17
+ print("1. Starting unnamed user log...")
18
+ stata.run(f'log using "{log1_path}", replace smcl')
19
+
20
+ # Start second (named) log - our capture log
21
+ print("2. Starting named capture log...")
22
+ stata.run(f'log using "{log2_path}", replace smcl name(_capture)')
23
+
24
+ # Run a command - should go to both logs
25
+ print("3. Running command...")
26
+ stata.run("summarize price mpg")
27
+
28
+ # Close named log first
29
+ print("4. Closing named log...")
30
+ stata.run("log close _capture")
31
+
32
+ # Close unnamed log
33
+ print("5. Closing unnamed log...")
34
+ stata.run("log close")
35
+
36
+ print("\n=== SUCCESS: Multiple concurrent logs work! ===\n")
37
+
38
+ # Show contents
39
+ print("--- User log contents (first 500 chars) ---")
40
+ with open(log1_path, 'r') as f:
41
+ print(f.read()[:500])
42
+
43
+ print("\n--- Capture log contents (first 500 chars) ---")
44
+ with open(log2_path, 'r') as f:
45
+ print(f.read()[:500])
46
+
47
+ except Exception as e:
48
+ print(f"\n=== FAILED: {e} ===\n")
49
+
50
+ finally:
51
+ # Cleanup
52
+ for p in [log1_path, log2_path]:
53
+ if os.path.exists(p):
54
+ os.unlink(p)