mcp-stata 1.7.6__py3-none-any.whl → 1.16.6__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of mcp-stata might be problematic. Click here for more details.

mcp_stata/streaming_io.py CHANGED
@@ -2,11 +2,13 @@ import queue
2
2
  import threading
3
3
  import time
4
4
  from typing import Any, Awaitable, Callable, Optional
5
+ import logging
5
6
 
6
7
  import anyio
7
8
 
8
9
 
9
10
  _SENTINEL = object()
11
+ logger = logging.getLogger("mcp_stata")
10
12
 
11
13
 
12
14
  class StreamBuffer:
@@ -92,7 +94,7 @@ class StreamingTeeIO:
92
94
  self._on_chunk_callback(text)
93
95
  except Exception:
94
96
  # Don't let callback errors break streaming
95
- pass
97
+ logger.debug("Streaming chunk callback failed", exc_info=True)
96
98
 
97
99
  with self._lock:
98
100
  if self._closed:
@@ -0,0 +1,54 @@
1
+ import stata_setup
2
+ stata_setup.config("/Applications/StataNow/", "mp")
3
+ from pystata import stata
4
+ import tempfile
5
+ import os
6
+
7
+ print("=== Testing multiple concurrent logs ===\n")
8
+
9
+ # Create temp files for logs
10
+ log1_path = tempfile.mktemp(suffix='.smcl')
11
+ log2_path = tempfile.mktemp(suffix='.smcl')
12
+
13
+ stata.run("sysuse auto, clear")
14
+
15
+ try:
16
+ # Start first (unnamed) log - simulating user's log
17
+ print("1. Starting unnamed user log...")
18
+ stata.run(f'log using "{log1_path}", replace smcl')
19
+
20
+ # Start second (named) log - our capture log
21
+ print("2. Starting named capture log...")
22
+ stata.run(f'log using "{log2_path}", replace smcl name(_capture)')
23
+
24
+ # Run a command - should go to both logs
25
+ print("3. Running command...")
26
+ stata.run("summarize price mpg")
27
+
28
+ # Close named log first
29
+ print("4. Closing named log...")
30
+ stata.run("log close _capture")
31
+
32
+ # Close unnamed log
33
+ print("5. Closing unnamed log...")
34
+ stata.run("log close")
35
+
36
+ print("\n=== SUCCESS: Multiple concurrent logs work! ===\n")
37
+
38
+ # Show contents
39
+ print("--- User log contents (first 500 chars) ---")
40
+ with open(log1_path, 'r') as f:
41
+ print(f.read()[:500])
42
+
43
+ print("\n--- Capture log contents (first 500 chars) ---")
44
+ with open(log2_path, 'r') as f:
45
+ print(f.read()[:500])
46
+
47
+ except Exception as e:
48
+ print(f"\n=== FAILED: {e} ===\n")
49
+
50
+ finally:
51
+ # Cleanup
52
+ for p in [log1_path, log2_path]:
53
+ if os.path.exists(p):
54
+ os.unlink(p)
mcp_stata/ui_http.py CHANGED
@@ -4,11 +4,26 @@ import secrets
4
4
  import threading
5
5
  import time
6
6
  import uuid
7
+ import logging
7
8
  from dataclasses import dataclass
8
9
  from http.server import BaseHTTPRequestHandler, ThreadingHTTPServer
9
10
  from typing import Any, Callable, Optional
10
11
 
11
12
  from .stata_client import StataClient
13
+ from .config import (
14
+ DEFAULT_HOST,
15
+ DEFAULT_PORT,
16
+ MAX_ARROW_LIMIT,
17
+ MAX_CHARS,
18
+ MAX_LIMIT,
19
+ MAX_REQUEST_BYTES,
20
+ MAX_VARS,
21
+ TOKEN_TTL_S,
22
+ VIEW_TTL_S,
23
+ )
24
+
25
+
26
+ logger = logging.getLogger("mcp_stata")
12
27
 
13
28
 
14
29
  def _stable_hash(payload: dict[str, Any]) -> str:
@@ -39,14 +54,15 @@ class UIChannelManager:
39
54
  self,
40
55
  client: StataClient,
41
56
  *,
42
- host: str = "127.0.0.1",
43
- port: int = 0,
44
- token_ttl_s: int = 20 * 60,
45
- view_ttl_s: int = 30 * 60,
46
- max_limit: int = 500,
47
- max_vars: int = 200,
48
- max_chars: int = 500,
49
- max_request_bytes: int = 1_000_000,
57
+ host: str = DEFAULT_HOST,
58
+ port: int = DEFAULT_PORT,
59
+ token_ttl_s: int = TOKEN_TTL_S,
60
+ view_ttl_s: int = VIEW_TTL_S,
61
+ max_limit: int = MAX_LIMIT,
62
+ max_vars: int = MAX_VARS,
63
+ max_chars: int = MAX_CHARS,
64
+ max_request_bytes: int = MAX_REQUEST_BYTES,
65
+ max_arrow_limit: int = MAX_ARROW_LIMIT,
50
66
  ):
51
67
  self._client = client
52
68
  self._host = host
@@ -57,6 +73,7 @@ class UIChannelManager:
57
73
  self._max_vars = max_vars
58
74
  self._max_chars = max_chars
59
75
  self._max_request_bytes = max_request_bytes
76
+ self._max_arrow_limit = max_arrow_limit
60
77
 
61
78
  self._lock = threading.Lock()
62
79
  self._httpd: ThreadingHTTPServer | None = None
@@ -87,7 +104,7 @@ class UIChannelManager:
87
104
  return UIChannelInfo(base_url=base_url, token=self._token or "", expires_at=self._expires_at)
88
105
 
89
106
  def capabilities(self) -> dict[str, bool]:
90
- return {"dataBrowser": True, "filtering": True, "sorting": True}
107
+ return {"dataBrowser": True, "filtering": True, "sorting": True, "arrowStream": True}
91
108
 
92
109
  def current_dataset_id(self) -> str:
93
110
  with self._lock:
@@ -193,6 +210,7 @@ class UIChannelManager:
193
210
  manager = self
194
211
 
195
212
  class Handler(BaseHTTPRequestHandler):
213
+
196
214
  def _send_json(self, status: int, payload: dict[str, Any]) -> None:
197
215
  data = json.dumps(payload).encode("utf-8")
198
216
  self.send_response(status)
@@ -201,7 +219,17 @@ class UIChannelManager:
201
219
  self.end_headers()
202
220
  self.wfile.write(data)
203
221
 
222
+ def _send_binary(self, status: int, data: bytes, content_type: str) -> None:
223
+ self.send_response(status)
224
+ self.send_header("Content-Type", content_type)
225
+ self.send_header("Content-Length", str(len(data)))
226
+ self.end_headers()
227
+ self.wfile.write(data)
228
+
204
229
  def _error(self, status: int, code: str, message: str, *, stata_rc: int | None = None) -> None:
230
+ if status >= 500 or code == "internal_error":
231
+ logger.error("UI HTTP error %s: %s", code, message)
232
+ message = "Internal server error"
205
233
  body: dict[str, Any] = {"error": {"code": code, "message": message}}
206
234
  if stata_rc is not None:
207
235
  body["error"]["stataRc"] = stata_rc
@@ -288,20 +316,31 @@ class UIChannelManager:
288
316
  if not self._require_auth():
289
317
  return
290
318
 
319
+
320
+ if self.path == "/v1/arrow":
321
+ body = self._read_json()
322
+ if body is None:
323
+ return
324
+ try:
325
+ resp_bytes = handle_arrow_request(manager, body, view_id=None)
326
+ self._send_binary(200, resp_bytes, "application/vnd.apache.arrow.stream")
327
+ return
328
+ except HTTPError as e:
329
+ self._error(e.status, e.code, e.message, stata_rc=e.stata_rc)
330
+ return
331
+ except Exception as e:
332
+ self._error(500, "internal_error", str(e))
333
+ return
334
+
291
335
  if self.path == "/v1/page":
292
336
  body = self._read_json()
293
337
  if body is None:
294
338
  return
295
- # Debug logging to diagnose limit parameter issues
296
- import sys
297
- print(f"[DEBUG] /v1/page request body: {body}", file=sys.stderr, flush=True)
298
- print(f"[DEBUG] limit value: {body.get('limit')!r} (type: {type(body.get('limit')).__name__})", file=sys.stderr, flush=True)
299
339
  try:
300
340
  resp = handle_page_request(manager, body, view_id=None)
301
341
  self._send_json(200, resp)
302
342
  return
303
343
  except HTTPError as e:
304
- print(f"[DEBUG] HTTPError: {e.code} - {e.message}", file=sys.stderr, flush=True)
305
344
  self._error(e.status, e.code, e.message, stata_rc=e.stata_rc)
306
345
  return
307
346
  except Exception as e:
@@ -354,16 +393,31 @@ class UIChannelManager:
354
393
  body = self._read_json()
355
394
  if body is None:
356
395
  return
357
- # Debug logging to diagnose limit parameter issues
358
- import sys
359
- print(f"[DEBUG] /v1/views/{view_id}/page request body: {body}", file=sys.stderr, flush=True)
360
- print(f"[DEBUG] limit value: {body.get('limit')!r} (type: {type(body.get('limit')).__name__})", file=sys.stderr, flush=True)
361
396
  try:
362
397
  resp = handle_page_request(manager, body, view_id=view_id)
363
398
  self._send_json(200, resp)
364
399
  return
365
400
  except HTTPError as e:
366
- print(f"[DEBUG] HTTPError: {e.code} - {e.message}", file=sys.stderr, flush=True)
401
+ self._error(e.status, e.code, e.message, stata_rc=e.stata_rc)
402
+ return
403
+ except Exception as e:
404
+ self._error(500, "internal_error", str(e))
405
+ return
406
+
407
+ if self.path.startswith("/v1/views/") and self.path.endswith("/arrow"):
408
+ parts = self.path.split("/")
409
+ if len(parts) != 5:
410
+ self._error(404, "not_found", "Not found")
411
+ return
412
+ view_id = parts[3]
413
+ body = self._read_json()
414
+ if body is None:
415
+ return
416
+ try:
417
+ resp_bytes = handle_arrow_request(manager, body, view_id=view_id)
418
+ self._send_binary(200, resp_bytes, "application/vnd.apache.arrow.stream")
419
+ return
420
+ except HTTPError as e:
367
421
  self._error(e.status, e.code, e.message, stata_rc=e.stata_rc)
368
422
  return
369
423
  except Exception as e:
@@ -593,3 +647,108 @@ def handle_page_request(manager: UIChannelManager, body: dict[str, Any], *, view
593
647
  "missing": ".",
594
648
  },
595
649
  }
650
+
651
+
652
+ def handle_arrow_request(manager: UIChannelManager, body: dict[str, Any], *, view_id: str | None) -> bytes:
653
+ max_limit, max_vars, max_chars, _ = manager.limits()
654
+ # Use the specific Arrow limit instead of the general UI page limit
655
+ chunk_limit = getattr(manager, "_max_arrow_limit", 1_000_000)
656
+
657
+ if view_id is None:
658
+ dataset_id = str(body.get("datasetId", ""))
659
+ frame = str(body.get("frame", "default"))
660
+ else:
661
+ view = manager.get_view(view_id)
662
+ if view is None:
663
+ raise HTTPError(404, "not_found", "View not found")
664
+ dataset_id = view.dataset_id
665
+ frame = view.frame
666
+
667
+ # Parse offset (default 0)
668
+ try:
669
+ offset = int(body.get("offset") or 0)
670
+ except (ValueError, TypeError):
671
+ raise HTTPError(400, "invalid_request", "offset must be a valid integer")
672
+
673
+ # Parse limit (required)
674
+ limit_raw = body.get("limit")
675
+ if limit_raw is None:
676
+ # Default to the max arrow limit if not specified?
677
+ # The previous code required it. Let's keep it required but allow large values.
678
+ raise HTTPError(400, "invalid_request", "limit is required")
679
+ try:
680
+ limit = int(limit_raw)
681
+ except (ValueError, TypeError):
682
+ raise HTTPError(400, "invalid_request", "limit must be a valid integer")
683
+
684
+ vars_req = body.get("vars", [])
685
+ include_obs_no = bool(body.get("includeObsNo", False))
686
+ sort_by = body.get("sortBy", [])
687
+
688
+ if offset < 0:
689
+ raise HTTPError(400, "invalid_request", "offset must be >= 0")
690
+ if limit <= 0:
691
+ raise HTTPError(400, "invalid_request", "limit must be > 0")
692
+ # Arrow streams are efficient, but we still respect a (much larger) max limit
693
+ if limit > chunk_limit:
694
+ raise HTTPError(400, "request_too_large", f"limit must be <= {chunk_limit}")
695
+
696
+ if not isinstance(vars_req, list) or not all(isinstance(v, str) for v in vars_req):
697
+ raise HTTPError(400, "invalid_request", "vars must be a list of strings")
698
+ if len(vars_req) > max_vars:
699
+ raise HTTPError(400, "request_too_large", f"vars length must be <= {max_vars}")
700
+
701
+ current_id = manager.current_dataset_id()
702
+ if dataset_id != current_id:
703
+ raise HTTPError(409, "dataset_changed", "Dataset changed")
704
+
705
+ if view_id is None:
706
+ obs_indices = None
707
+ else:
708
+ assert view is not None
709
+ obs_indices = view.obs_indices
710
+
711
+ try:
712
+ # Apply sorting if requested
713
+ if sort_by:
714
+ if not isinstance(sort_by, list) or not all(isinstance(s, str) for s in sort_by):
715
+ raise HTTPError(400, "invalid_request", "sortBy must be a list of strings")
716
+ try:
717
+ manager._client.apply_sort(sort_by)
718
+ if view_id is not None:
719
+ # encapsulated re-computation if view is active
720
+ # Note: original code only does this for view_id is not None
721
+ # But if we sort global dataset, existing views might become invalid unless
722
+ # they rely on stable indices. Stata indices change on sort.
723
+ # The current implementation of create_view computes indices once.
724
+ # If we sort, those indices point to different rows!
725
+ # The original code handles this by re-computing view indices on sort.
726
+ assert view is not None
727
+ obs_indices = manager._client.compute_view_indices(view.filter_expr)
728
+ except ValueError as e:
729
+ raise HTTPError(400, "invalid_request", f"Invalid sort: {e}")
730
+ except RuntimeError as e:
731
+ raise HTTPError(500, "internal_error", f"Sort failed: {e}")
732
+
733
+ arrow_bytes = manager._client.get_arrow_stream(
734
+ offset=offset,
735
+ limit=limit,
736
+ vars=vars_req,
737
+ include_obs_no=include_obs_no,
738
+ obs_indices=obs_indices,
739
+ )
740
+ return arrow_bytes
741
+
742
+ except RuntimeError as e:
743
+ msg = str(e) or "No data in memory"
744
+ if "no data" in msg.lower():
745
+ raise HTTPError(400, "no_data_in_memory", msg)
746
+ raise HTTPError(500, "internal_error", msg)
747
+ except ValueError as e:
748
+ msg = str(e)
749
+ if "invalid variable" in msg.lower():
750
+ raise HTTPError(400, "invalid_variable", msg)
751
+ raise HTTPError(400, "invalid_request", msg)
752
+ except Exception as e:
753
+ raise HTTPError(500, "internal_error", str(e))
754
+
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: mcp-stata
3
- Version: 1.7.6
3
+ Version: 1.16.6
4
4
  Summary: A lightweight Model Context Protocol (MCP) server for Stata. Execute commands, inspect data, retrieve stored results (`r()`/`e()`), and view graphs in your chat interface. Built for economists who want to integrate LLM assistance into their Stata workflow.
5
5
  Project-URL: Homepage, https://github.com/tmonk/mcp-stata
6
6
  Project-URL: Repository, https://github.com/tmonk/mcp-stata
@@ -21,6 +21,8 @@ Requires-Python: >=3.12
21
21
  Requires-Dist: httpx<0.28.0,>=0.27.0
22
22
  Requires-Dist: mcp[cli]>=1.0.0
23
23
  Requires-Dist: pandas>=2.0.0
24
+ Requires-Dist: polars>=1.36.1
25
+ Requires-Dist: pyarrow>=14.0.0
24
26
  Requires-Dist: pydantic>=2.0.0
25
27
  Requires-Dist: pystata>=0.0.1
26
28
  Requires-Dist: pytest-asyncio>=1.3.0
@@ -30,13 +32,14 @@ Requires-Dist: build>=1.3.0; extra == 'dev'
30
32
  Requires-Dist: hatch>=1.16.2; extra == 'dev'
31
33
  Requires-Dist: pytest-cov>=4.0.0; extra == 'dev'
32
34
  Requires-Dist: pytest>=7.0.0; extra == 'dev'
35
+ Requires-Dist: python-semantic-release>=9.8.0; extra == 'dev'
33
36
  Requires-Dist: ruff>=0.4.0; extra == 'dev'
34
37
  Requires-Dist: twine>=6.2.0; extra == 'dev'
35
38
  Description-Content-Type: text/markdown
36
39
 
37
40
  # Stata MCP Server
38
41
 
39
- <a href="https://cursor.com/en-US/install-mcp?name=mcp-stata&config=eyJjb21tYW5kIjoidXZ4IC0tZnJvbSBtY3Atc3RhdGEgbWNwLXN0YXRhIn0%3D"><img src="https://cursor.com/deeplink/mcp-install-dark.svg" alt="Install MCP Server" height="20"></a>&nbsp;
42
+ <a href="https://cursor.com/en-US/install-mcp?name=mcp-stata&config=eyJjb21tYW5kIjoidXZ4IC0tcmVmcmVzaCAtLWZyb20gbWNwLXN0YXRhQGxhdGVzdCBtY3Atc3RhdGEgLS1yZWluc3RhbGwtcGFja2FnZSBtY3Atc3RhdGEifQ%3D%3D"><img src="https://cursor.com/deeplink/mcp-install-dark.svg" alt="Install MCP Server" height="20"></a>&nbsp;
40
43
  <a href="https://pypi.org/project/mcp-stata/"><img src="https://img.shields.io/pypi/v/mcp-stata?style=flat&color=black" alt="PyPI - Version" height="20"></a>
41
44
 
42
45
  A [Model Context Protocol](https://github.com/modelcontextprotocol) (MCP) server that connects AI agents to a local Stata installation.
@@ -64,7 +67,7 @@ This server enables LLMs to:
64
67
  ### Run as a published tool with `uvx`
65
68
 
66
69
  ```bash
67
- uvx --refresh --from mcp-stata@latest mcp-stata
70
+ uvx --refresh --from mcp-stata@latest mcp-stata --reinstall-package mcp-stata
68
71
  ```
69
72
 
70
73
  `uvx` is an alias for `uv tool run` and runs the tool in an isolated, cached environment.
@@ -118,6 +121,8 @@ Config file locations include:
118
121
  "--refresh",
119
122
  "--from",
120
123
  "mcp-stata@latest",
124
+ "mcp-stata",
125
+ "--reinstall-package",
121
126
  "mcp-stata"
122
127
  ]
123
128
  }
@@ -147,6 +152,8 @@ Cursor supports MCP config at:
147
152
  "--refresh",
148
153
  "--from",
149
154
  "mcp-stata@latest",
155
+ "mcp-stata",
156
+ "--reinstall-package",
150
157
  "mcp-stata"
151
158
  ]
152
159
  }
@@ -172,6 +179,8 @@ A common location is `~/.codeium/windsurf/mcp_config.json`.
172
179
  "--refresh",
173
180
  "--from",
174
181
  "mcp-stata@latest",
182
+ "mcp-stata",
183
+ "--reinstall-package",
175
184
  "mcp-stata"
176
185
  ]
177
186
  }
@@ -196,6 +205,8 @@ In Antigravity, MCP servers are managed from the MCP store/menu; you can open **
196
205
  "--refresh",
197
206
  "--from",
198
207
  "mcp-stata@latest",
208
+ "mcp-stata",
209
+ "--reinstall-package",
199
210
  "mcp-stata"
200
211
  ]
201
212
  }
@@ -223,6 +234,8 @@ Create `.vscode/mcp.json`:
223
234
  "--refresh",
224
235
  "--from",
225
236
  "mcp-stata@latest",
237
+ "mcp-stata",
238
+ "--reinstall-package",
226
239
  "mcp-stata"
227
240
  ]
228
241
  }
@@ -244,13 +257,16 @@ VS Code documents `.vscode/mcp.json` and the `servers` schema, including `type`
244
257
  - Always writes output to a temporary log file and emits a single `notifications/logMessage` containing `{"event":"log_path","path":"..."}` so the client can tail it locally.
245
258
  - May emit `notifications/progress` when the client provides a progress token/callback.
246
259
  * `read_log(path, offset=0, max_bytes=65536)`: Read a slice of a previously-provided log file (JSON: `path`, `offset`, `next_offset`, `data`).
260
+ * `find_in_log(path, query, start_offset=0, max_bytes=5_000_000, before=2, after=2, case_sensitive=False, regex=False, max_matches=50)`: Search a log file for text and return context windows.
261
+ - Returns JSON with `matches` (context lines, line indices), `next_offset`, and `truncated` if `max_matches` is hit.
262
+ - Supports literal or regex search with bounded read window for large logs.
247
263
  * `load_data(source, clear=True, as_json=True, raw=False, max_output_lines=None)`: Heuristic loader (sysuse/webuse/use/path/URL) with JSON envelope unless `raw=True`. Supports output truncation.
248
264
  * `get_data(start=0, count=50)`: View dataset rows (JSON response, capped to 500 rows).
249
265
  * `get_ui_channel()`: Return a short-lived localhost HTTP endpoint + bearer token for the UI-only data browser.
250
266
  * `describe()`: View dataset structure via Stata `describe`.
251
267
  * `list_graphs()`: See available graphs in memory (JSON list with an `active` flag).
252
268
  * `export_graph(graph_name=None, format="pdf")`: Export a graph to a file path (default PDF; use `format="png"` for PNG).
253
- * `export_graphs_all()`: Export all in-memory graphs. Returns file paths by default.
269
+ * `export_graphs_all()`: Export all in-memory graphs. Returns file paths.
254
270
  * `get_help(topic, plain_text=False)`: Markdown-rendered Stata help by default; `plain_text=True` strips formatting.
255
271
  * `codebook(variable, as_json=True, trace=False, raw=False, max_output_lines=None)`: Variable-level metadata (JSON envelope by default; supports `trace=True` and output truncation).
256
272
  * `run_do_file(path, echo=True, as_json=True, trace=False, raw=False, max_output_lines=None)`: Execute a .do file.
@@ -301,7 +317,8 @@ Call the MCP tool `get_ui_channel()` and parse the JSON:
301
317
  "capabilities": {
302
318
  "dataBrowser": true,
303
319
  "filtering": true,
304
- "sorting": true
320
+ "sorting": true,
321
+ "arrowStream": true
305
322
  }
306
323
  }
307
324
  ```
@@ -309,9 +326,10 @@ Call the MCP tool `get_ui_channel()` and parse the JSON:
309
326
  Server-enforced limits (current defaults):
310
327
 
311
328
  - **maxLimit**: 500
312
- - **maxVars**: 200
329
+ - **maxVars**: 32,767
313
330
  - **maxChars**: 500
314
331
  - **maxRequestBytes**: 1,000,000
332
+ - **maxArrowLimit**: 1,000,000 (specific to `/v1/arrow`)
315
333
 
316
334
  ### Endpoints
317
335
 
@@ -323,10 +341,14 @@ All endpoints are under `baseUrl` and require the bearer token.
323
341
  - Returns variable metadata (`name`, `type`, `label`, `format`).
324
342
  - `POST /v1/page`
325
343
  - Returns a page of data for selected variables.
344
+ - `POST /v1/arrow`
345
+ - Returns a binary Arrow IPC stream (same input as `/v1/page`).
326
346
  - `POST /v1/views`
327
347
  - Creates a server-side filtered view (handle-based filtering).
328
348
  - `POST /v1/views/:viewId/page`
329
349
  - Pages within a filtered view.
350
+ - `POST /v1/views/:viewId/arrow`
351
+ - Returns a binary Arrow IPC stream from a filtered view.
330
352
  - `DELETE /v1/views/:viewId`
331
353
  - Deletes a view handle.
332
354
  - `POST /v1/filters/validate`
@@ -0,0 +1,16 @@
1
+ mcp_stata/__init__.py,sha256=kJKKRn7lGuVCuS2-GaN5VoVcvnxtNlfuswW_VOlYqwg,98
2
+ mcp_stata/config.py,sha256=SfXltpwO_gROABca1sm0xXDhaeRmlRfQmXcnBiG4GYk,714
3
+ mcp_stata/discovery.py,sha256=16DQhO2O3klKq-HwBaK3pv5LpKGPyIVlo1VyW1p4Msg,20799
4
+ mcp_stata/graph_detector.py,sha256=6dc2M9Lr_0PVA0yIPdc4clmbTQsiOADnqtb0fOxYEQs,17370
5
+ mcp_stata/models.py,sha256=G30Km396RDgvHjTeg7uR5BKT8AK5mX3Zw8sUuBSbdpM,1272
6
+ mcp_stata/server.py,sha256=cEgoI2kT8-H2GAoqW440y8KuiqGVwSUzz6WxJO5WXfc,40396
7
+ mcp_stata/stata_client.py,sha256=wqcrycdXj5f-azExGHpTlMoZir6e5vexwaYZtVhE6Xs,147729
8
+ mcp_stata/streaming_io.py,sha256=1WLStbdYEQFO1P3gCchmD7frHpmn5mHi6DuynodfiqM,7087
9
+ mcp_stata/test_stata.py,sha256=HF_P4IjvLGlihXggbt79MZZB6PbuYHpKI3S0R2dgCeE,1512
10
+ mcp_stata/ui_http.py,sha256=vG6oO3fokSvWkIx3fcCb-tiqlkuFKtml2ptvQrZ-o5k,30457
11
+ mcp_stata/smcl/smcl2html.py,sha256=wi91mOMeV9MCmHtNr0toihNbaiDCNZ_NP6a6xEAzWLM,2624
12
+ mcp_stata-1.16.6.dist-info/METADATA,sha256=mZnfEEMS1v9KlHrVIXwH4qph7EuGJWo2O5146E_evKk,17072
13
+ mcp_stata-1.16.6.dist-info/WHEEL,sha256=WLgqFyCfm_KASv4WHyYy0P3pM_m7J5L9k2skdKLirC8,87
14
+ mcp_stata-1.16.6.dist-info/entry_points.txt,sha256=TcOgrtiTL4LGFEDb1pCrQWA-fUZvIujDOvQ-bWFh5Z8,52
15
+ mcp_stata-1.16.6.dist-info/licenses/LICENSE,sha256=DZak_2itbUtvHzD3E7GNUYSRK6jdOJ-GqncQ2weavLA,34523
16
+ mcp_stata-1.16.6.dist-info/RECORD,,
@@ -1,14 +0,0 @@
1
- mcp_stata/__init__.py,sha256=kJKKRn7lGuVCuS2-GaN5VoVcvnxtNlfuswW_VOlYqwg,98
2
- mcp_stata/discovery.py,sha256=jQN9uvBNHF_hCCU9k6BDtSdDxiUVpvXcOJwpWYwo55c,17430
3
- mcp_stata/graph_detector.py,sha256=-dJIU1Dq_c1eQSk4eegUi0gU2N-tFqjFGM0tE1E32KM,16066
4
- mcp_stata/models.py,sha256=EKFawioKBhtZhRQ3pFzrKV99ui9L-qzcAuRYuk0npVg,1235
5
- mcp_stata/server.py,sha256=PV8ragGMeHT72zgVx5DJp3vt8CPqT8iwdvJ8GXSctds,15989
6
- mcp_stata/stata_client.py,sha256=Yd8SxtLf_JVxeOnqOwEM6lNGdhw4T3v2tjVxpTCf9xU,96397
7
- mcp_stata/streaming_io.py,sha256=GVaXgTtxx8YLY6RWqdTcO2M3QSqxLsefqkmnlNO1nTI,6974
8
- mcp_stata/ui_http.py,sha256=w1tYxNuwuhkjyfWHxUnpd1DcVBaakjPkEnWr-Fo1lWo,24193
9
- mcp_stata/smcl/smcl2html.py,sha256=wi91mOMeV9MCmHtNr0toihNbaiDCNZ_NP6a6xEAzWLM,2624
10
- mcp_stata-1.7.6.dist-info/METADATA,sha256=J9ki7OKvuj3Ve_kmr6sWGkZq3eQg24iDkpwqr5J-iLI,15951
11
- mcp_stata-1.7.6.dist-info/WHEEL,sha256=WLgqFyCfm_KASv4WHyYy0P3pM_m7J5L9k2skdKLirC8,87
12
- mcp_stata-1.7.6.dist-info/entry_points.txt,sha256=TcOgrtiTL4LGFEDb1pCrQWA-fUZvIujDOvQ-bWFh5Z8,52
13
- mcp_stata-1.7.6.dist-info/licenses/LICENSE,sha256=DZak_2itbUtvHzD3E7GNUYSRK6jdOJ-GqncQ2weavLA,34523
14
- mcp_stata-1.7.6.dist-info/RECORD,,