mcp-stata 1.13.0__py3-none-any.whl → 1.16.6__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of mcp-stata might be problematic. Click here for more details.

mcp_stata/discovery.py CHANGED
@@ -375,18 +375,19 @@ def find_stata_candidates() -> List[Tuple[str, str]]:
375
375
  candidates: List[Tuple[str, str]] = [] # List of (path, edition)
376
376
 
377
377
  if system == "Darwin": # macOS
378
- app_globs = [
378
+ # Search targets specific to macOS installation patterns
379
+ patterns = [
379
380
  "/Applications/StataNow/StataMP.app",
380
381
  "/Applications/StataNow/StataSE.app",
381
382
  "/Applications/StataNow/Stata.app",
382
383
  "/Applications/Stata/StataMP.app",
383
384
  "/Applications/Stata/StataSE.app",
384
385
  "/Applications/Stata/Stata.app",
385
- "/Applications/Stata*.app",
386
386
  "/Applications/Stata*/Stata*.app",
387
+ "/Applications/Stata*.app",
387
388
  ]
388
389
 
389
- for pattern in app_globs:
390
+ for pattern in patterns:
390
391
  for app_dir in glob.glob(pattern):
391
392
  binary_dir = os.path.join(app_dir, "Contents", "MacOS")
392
393
  if not _exists_fast(binary_dir):
@@ -395,6 +396,7 @@ def find_stata_candidates() -> List[Tuple[str, str]]:
395
396
  full_path = os.path.join(binary_dir, binary)
396
397
  if _exists_fast(full_path):
397
398
  candidates.append((full_path, edition))
399
+ candidates = _dedupe_preserve(candidates)
398
400
 
399
401
  elif system == "Windows":
400
402
  # Include ProgramW6432 (real 64-bit Program Files) and hardcode fallbacks.
@@ -443,6 +445,7 @@ def find_stata_candidates() -> List[Tuple[str, str]]:
443
445
  full_path = os.path.join(stata_dir, exe)
444
446
  if _exists_fast(full_path):
445
447
  candidates.append((full_path, edition))
448
+ candidates = _dedupe_preserve(candidates)
446
449
 
447
450
  elif system == "Linux":
448
451
  home_base = os.environ.get("HOME") or os.path.expanduser("~")
@@ -486,7 +489,8 @@ def find_stata_candidates() -> List[Tuple[str, str]]:
486
489
 
487
490
  # Final validation of candidates (still using fast checks)
488
491
  validated: List[Tuple[str, str]] = []
489
- for path, edition in _sort_candidates(candidates):
492
+ unique_candidates = _dedupe_preserve(candidates)
493
+ for path, edition in _sort_candidates(unique_candidates):
490
494
  if not _exists_fast(path):
491
495
  logger.warning("Discovered candidate missing on disk: %s", path)
492
496
  continue
@@ -6,6 +6,7 @@ during Stata command execution and automatically cache them.
6
6
  """
7
7
 
8
8
  import asyncio
9
+ import contextlib
9
10
  import inspect
10
11
  import re
11
12
  import threading
@@ -34,24 +35,27 @@ class GraphCreationDetector:
34
35
  def _describe_graph_signature(self, graph_name: str) -> str:
35
36
  """Return a stable signature for a graph.
36
37
 
37
- We intentionally avoid using timestamps as the signature, since that makes
38
- every poll look like a modification.
38
+ We avoid using Stata calls like 'graph describe' here because they are slow
39
+ (each call takes ~35ms) and would be called for every graph on every poll,
40
+ bottlenecking the streaming output.
41
+
42
+ Instead, we use name-based tracking tied to the Stata command execution
43
+ context. The signature is stable within a single command execution but
44
+ changes when a new command starts, allowing us to detect modifications
45
+ between commands without any Stata overhead.
39
46
  """
40
- if not self._stata_client or not hasattr(self._stata_client, "stata"):
41
- return ""
42
- try:
43
- # Use lightweight execution to avoid heavy FS I/O for high-frequency polling
44
- resp = self._stata_client.exec_lightweight(f"graph describe {graph_name}")
45
-
46
- if resp.success and resp.stdout:
47
- return resp.stdout
48
- if resp.error and resp.error.snippet:
49
- # If using lightweight, error might be None or just string in stderr,
50
- # but run_command_structured returns proper error envelope.
51
- return resp.error.snippet
52
- except Exception:
47
+ if not self._stata_client:
53
48
  return ""
54
- return ""
49
+
50
+ # Access command_idx from stata_client if available
51
+ # NOTE: We only use command_idx for the default 'Graph' name to detect
52
+ # modifications. For named graphs, we only detect creation (name change)
53
+ # to avoid triggering redundant notifications for all existing graphs
54
+ # on every command (since command_idx changes globally).
55
+ cmd_idx = getattr(self._stata_client, "_command_idx", 0)
56
+ if graph_name.lower() == "graph":
57
+ return f"{graph_name}_{cmd_idx}"
58
+ return graph_name
55
59
 
56
60
  def _detect_graphs_via_pystata(self) -> List[str]:
57
61
  """Detect newly created graphs using direct pystata state access."""
@@ -103,21 +107,26 @@ class GraphCreationDetector:
103
107
  else:
104
108
  # Fallback to sfi Macro interface - only if stata is available
105
109
  if self._stata_client and hasattr(self._stata_client, 'stata'):
106
- try:
107
- from sfi import Macro
108
- hold_name = f"_mcp_detector_hold_{int(time.time() * 1000 % 1000000)}"
109
- self._stata_client.stata.run(f"capture _return hold {hold_name}", echo=False)
110
+ # Access the lock from client to prevent concurrency issues with pystata
111
+ exec_lock = getattr(self._stata_client, "_exec_lock", None)
112
+ ctx = exec_lock if exec_lock else contextlib.nullcontext()
113
+
114
+ with ctx:
110
115
  try:
111
- self._stata_client.stata.run("macro define mcp_graph_list \"\"", echo=False)
112
- self._stata_client.stata.run("quietly graph dir, memory", echo=False)
113
- self._stata_client.stata.run("macro define mcp_graph_list `r(list)'", echo=False)
114
- graph_list_str = Macro.getGlobal("mcp_graph_list")
115
- finally:
116
- self._stata_client.stata.run(f"capture _return restore {hold_name}", echo=False)
117
- return graph_list_str.split() if graph_list_str else []
118
- except ImportError:
119
- logger.warning("sfi.Macro not available for fallback graph detection")
120
- return []
116
+ from sfi import Macro
117
+ hold_name = f"_mcp_detector_hold_{int(time.time() * 1000 % 1000000)}"
118
+ self._stata_client.stata.run(f"capture _return hold {hold_name}", echo=False)
119
+ try:
120
+ self._stata_client.stata.run("macro define mcp_graph_list \"\"", echo=False)
121
+ self._stata_client.stata.run("quietly graph dir, memory", echo=False)
122
+ self._stata_client.stata.run("macro define mcp_graph_list `r(list)'", echo=False)
123
+ graph_list_str = Macro.getGlobal("mcp_graph_list")
124
+ finally:
125
+ self._stata_client.stata.run(f"capture _return restore {hold_name}", echo=False)
126
+ return graph_list_str.split() if graph_list_str else []
127
+ except ImportError:
128
+ logger.warning("sfi.Macro not available for fallback graph detection")
129
+ return []
121
130
  else:
122
131
  return []
123
132
  except Exception as e:
@@ -256,7 +265,11 @@ class StreamingGraphCache:
256
265
  def __init__(self, stata_client, auto_cache: bool = False):
257
266
  self.stata_client = stata_client
258
267
  self.auto_cache = auto_cache
259
- self.detector = GraphCreationDetector(stata_client)
268
+ # Use persistent detector from client if available, else create local one
269
+ if hasattr(stata_client, "_graph_detector"):
270
+ self.detector = stata_client._graph_detector
271
+ else:
272
+ self.detector = GraphCreationDetector(stata_client)
260
273
  self._lock = threading.Lock()
261
274
  self._cache_callbacks: List[Callable[[str, bool], None]] = []
262
275
  self._graphs_to_cache: List[str] = []
mcp_stata/models.py CHANGED
@@ -55,7 +55,6 @@ class GraphListResponse(BaseModel):
55
55
  class GraphExport(BaseModel):
56
56
  name: str
57
57
  file_path: Optional[str] = None
58
- image_base64: Optional[str] = None
59
58
 
60
59
 
61
60
  class GraphExportResponse(BaseModel):
mcp_stata/server.py CHANGED
@@ -4,6 +4,7 @@ from dataclasses import dataclass
4
4
  from datetime import datetime
5
5
  from importlib.metadata import PackageNotFoundError, version
6
6
  from mcp.server.fastmcp import Context, FastMCP
7
+ from mcp.server.fastmcp.utilities import logging as fastmcp_logging
7
8
  import mcp.types as types
8
9
  from .stata_client import StataClient
9
10
  from .models import (
@@ -27,30 +28,61 @@ from .ui_http import UIChannelManager
27
28
 
28
29
  # Configure logging
29
30
  logger = logging.getLogger("mcp_stata")
31
+ payload_logger = logging.getLogger("mcp_stata.payloads")
32
+ _LOGGING_CONFIGURED = False
30
33
 
31
34
  def setup_logging():
32
- # Configure logging to stderr with immediate flush for MCP transport
35
+ global _LOGGING_CONFIGURED
36
+ if _LOGGING_CONFIGURED:
37
+ return
38
+ _LOGGING_CONFIGURED = True
33
39
  log_level = os.getenv("MCP_STATA_LOGLEVEL", "DEBUG").upper()
34
- configure_root = os.getenv("MCP_STATA_CONFIGURE_LOGGING", "1").lower() not in {"0", "false", "no"}
35
-
36
- # Create a handler that flushes immediately
37
- handler = logging.StreamHandler(sys.stderr)
38
- handler.setLevel(getattr(logging, log_level, logging.DEBUG))
39
- handler.setFormatter(logging.Formatter("[%(name)s] %(levelname)s: %(message)s"))
40
-
41
- # Configure root logger only if requested; avoid clobbering existing handlers.
42
- if configure_root:
43
- root_logger = logging.getLogger()
44
- if not root_logger.handlers:
45
- root_logger.addHandler(handler)
46
- root_logger.setLevel(getattr(logging, log_level, logging.DEBUG))
47
-
48
- # Also configure the mcp_stata logger explicitly without duplicating handlers.
40
+ app_handler = logging.StreamHandler(sys.stderr)
41
+ app_handler.setLevel(getattr(logging, log_level, logging.DEBUG))
42
+ app_handler.setFormatter(logging.Formatter("[%(name)s] %(levelname)s: %(message)s"))
43
+
44
+ mcp_handler = logging.StreamHandler(sys.stderr)
45
+ mcp_handler.setLevel(getattr(logging, log_level, logging.DEBUG))
46
+ mcp_handler.setFormatter(logging.Formatter("[%(name)s] %(levelname)s: %(message)s"))
47
+
48
+ payload_handler = logging.StreamHandler(sys.stderr)
49
+ payload_handler.setLevel(getattr(logging, log_level, logging.DEBUG))
50
+ payload_handler.setFormatter(logging.Formatter("[%(name)s] %(levelname)s: %(message)s"))
51
+
52
+ root_logger = logging.getLogger()
53
+ root_logger.handlers = []
54
+ root_logger.setLevel(logging.WARNING)
55
+
56
+ for name, item in logging.root.manager.loggerDict.items():
57
+ if not isinstance(item, logging.Logger):
58
+ continue
59
+ item.handlers = []
60
+ item.propagate = False
61
+ if item.level == logging.NOTSET:
62
+ item.setLevel(getattr(logging, log_level, logging.DEBUG))
63
+
64
+ logger.handlers = [app_handler]
65
+ logger.propagate = False
66
+
67
+ payload_logger.handlers = [payload_handler]
68
+ payload_logger.propagate = False
69
+
70
+ mcp_logger = logging.getLogger("mcp.server")
71
+ mcp_logger.handlers = [mcp_handler]
72
+ mcp_logger.propagate = False
73
+ mcp_logger.setLevel(getattr(logging, log_level, logging.DEBUG))
74
+
75
+ mcp_lowlevel = logging.getLogger("mcp.server.lowlevel.server")
76
+ mcp_lowlevel.handlers = [mcp_handler]
77
+ mcp_lowlevel.propagate = False
78
+ mcp_lowlevel.setLevel(getattr(logging, log_level, logging.DEBUG))
79
+
80
+ mcp_root = logging.getLogger("mcp")
81
+ mcp_root.handlers = [mcp_handler]
82
+ mcp_root.propagate = False
83
+ mcp_root.setLevel(getattr(logging, log_level, logging.DEBUG))
49
84
  if logger.level == logging.NOTSET:
50
85
  logger.setLevel(getattr(logging, log_level, logging.DEBUG))
51
- if not logger.handlers:
52
- logger.addHandler(handler)
53
- logger.propagate = False
54
86
 
55
87
  try:
56
88
  _mcp_stata_version = version("mcp-stata")
@@ -62,6 +94,8 @@ def setup_logging():
62
94
  logger.info("STATA_PATH env at startup: %s", os.getenv("STATA_PATH", "<not set>"))
63
95
  logger.info("LOG_LEVEL: %s", log_level)
64
96
 
97
+
98
+
65
99
  # Initialize FastMCP
66
100
  mcp = FastMCP("mcp_stata")
67
101
  client = StataClient()
@@ -81,6 +115,9 @@ class BackgroundTask:
81
115
 
82
116
 
83
117
  _background_tasks: Dict[str, BackgroundTask] = {}
118
+ _request_log_paths: Dict[str, str] = {}
119
+ _read_log_paths: set[str] = set()
120
+ _read_log_offsets: Dict[str, int] = {}
84
121
 
85
122
 
86
123
  def _register_task(task_info: BackgroundTask, max_tasks: int = 100) -> None:
@@ -124,7 +161,6 @@ async def _notify_task_done(session: object | None, task_info: BackgroundTask, r
124
161
  "error": task_info.error,
125
162
  }
126
163
  try:
127
- _debug_notification("logMessage", payload, request_id)
128
164
  await session.send_log_message(level="info", data=json.dumps(payload), related_request_id=request_id)
129
165
  except Exception:
130
166
  return
@@ -135,7 +171,7 @@ def _debug_notification(kind: str, payload: object, request_id: object | None =
135
171
  serialized = payload if isinstance(payload, str) else json.dumps(payload, ensure_ascii=False)
136
172
  except Exception:
137
173
  serialized = str(payload)
138
- logger.debug("MCP notify %s request_id=%s payload=%s", kind, request_id, serialized)
174
+ payload_logger.info("MCP notify %s request_id=%s payload=%s", kind, request_id, serialized)
139
175
 
140
176
 
141
177
  async def _notify_tool_error(ctx: Context | None, tool_name: str, exc: Exception) -> None:
@@ -157,7 +193,6 @@ async def _notify_tool_error(ctx: Context | None, tool_name: str, exc: Exception
157
193
  if task_id is not None:
158
194
  payload["task_id"] = task_id
159
195
  try:
160
- _debug_notification("logMessage", payload, ctx.request_id)
161
196
  await session.send_log_message(
162
197
  level="error",
163
198
  data=json.dumps(payload),
@@ -173,6 +208,20 @@ def _log_tool_call(tool_name: str, ctx: Context | None = None) -> None:
173
208
  request_id = getattr(ctx, "request_id", None)
174
209
  logger.info("MCP tool call: %s request_id=%s", tool_name, request_id)
175
210
 
211
+ def _should_stream_smcl_chunk(text: str, request_id: object | None) -> bool:
212
+ if request_id is None:
213
+ return True
214
+ try:
215
+ payload = json.loads(text)
216
+ if isinstance(payload, dict) and payload.get("event"):
217
+ return True
218
+ except Exception:
219
+ pass
220
+ log_path = _request_log_paths.get(str(request_id))
221
+ if log_path and log_path in _read_log_paths:
222
+ return False
223
+ return True
224
+
176
225
 
177
226
  def _attach_task_id(ctx: Context | None, task_id: str) -> None:
178
227
  if ctx is None:
@@ -291,12 +340,21 @@ async def run_do_file_background(
291
340
 
292
341
  async def notify_log(text: str) -> None:
293
342
  if session is not None:
343
+ if not _should_stream_smcl_chunk(text, ctx.request_id):
344
+ return
294
345
  _debug_notification("logMessage", text, ctx.request_id)
295
- await session.send_log_message(level="info", data=text, related_request_id=ctx.request_id)
346
+ try:
347
+ await session.send_log_message(level="info", data=text, related_request_id=ctx.request_id)
348
+ except Exception as e:
349
+ logger.warning("Failed to send logMessage notification: %s", e)
350
+ sys.stderr.write(f"[mcp_stata] ERROR: logMessage send failed: {e!r}\n")
351
+ sys.stderr.flush()
296
352
  try:
297
353
  payload = json.loads(text)
298
354
  if isinstance(payload, dict) and payload.get("event") == "log_path":
299
355
  task_info.log_path = payload.get("path")
356
+ if ctx.request_id is not None and task_info.log_path:
357
+ _request_log_paths[str(ctx.request_id)] = task_info.log_path
300
358
  except Exception:
301
359
  return
302
360
 
@@ -334,14 +392,17 @@ async def run_do_file_background(
334
392
  graph_ready_task_id=task_id,
335
393
  graph_ready_format="svg",
336
394
  )
337
- ui_channel.notify_potential_dataset_change()
338
- task_info.result = _format_command_result(result, raw=raw, as_json=as_json)
395
+ # Notify task completion as soon as the core operation is finished
396
+ task_info.done = True
339
397
  if result.error:
340
398
  task_info.error = result.error.message
399
+ await _notify_task_done(session, task_info, request_id)
400
+
401
+ ui_channel.notify_potential_dataset_change()
402
+ task_info.result = _format_command_result(result, raw=raw, as_json=as_json)
341
403
  except Exception as exc: # pragma: no cover - defensive
342
- task_info.error = str(exc)
343
- finally:
344
404
  task_info.done = True
405
+ task_info.error = str(exc)
345
406
  await _notify_task_done(session, task_info, request_id)
346
407
 
347
408
  task_info.task = asyncio.create_task(_run())
@@ -462,12 +523,16 @@ async def run_command_background(
462
523
 
463
524
  async def notify_log(text: str) -> None:
464
525
  if session is not None:
526
+ if not _should_stream_smcl_chunk(text, ctx.request_id):
527
+ return
465
528
  _debug_notification("logMessage", text, ctx.request_id)
466
529
  await session.send_log_message(level="info", data=text, related_request_id=ctx.request_id)
467
530
  try:
468
531
  payload = json.loads(text)
469
532
  if isinstance(payload, dict) and payload.get("event") == "log_path":
470
533
  task_info.log_path = payload.get("path")
534
+ if ctx.request_id is not None and task_info.log_path:
535
+ _request_log_paths[str(ctx.request_id)] = task_info.log_path
471
536
  except Exception:
472
537
  return
473
538
 
@@ -478,11 +543,6 @@ async def run_command_background(
478
543
  async def notify_progress(progress: float, total: float | None, message: str | None) -> None:
479
544
  if session is None or progress_token is None:
480
545
  return
481
- _debug_notification(
482
- "progress",
483
- {"progress": progress, "total": total, "message": message},
484
- ctx.request_id,
485
- )
486
546
  await session.send_progress_notification(
487
547
  progress_token=progress_token,
488
548
  progress=progress,
@@ -505,14 +565,17 @@ async def run_command_background(
505
565
  graph_ready_task_id=task_id,
506
566
  graph_ready_format="svg",
507
567
  )
508
- ui_channel.notify_potential_dataset_change()
509
- task_info.result = _format_command_result(result, raw=raw, as_json=as_json)
568
+ # Notify task completion as soon as the core operation is finished
569
+ task_info.done = True
510
570
  if result.error:
511
571
  task_info.error = result.error.message
572
+ await _notify_task_done(session, task_info, request_id)
573
+
574
+ ui_channel.notify_potential_dataset_change()
575
+ task_info.result = _format_command_result(result, raw=raw, as_json=as_json)
512
576
  except Exception as exc: # pragma: no cover - defensive
513
- task_info.error = str(exc)
514
- finally:
515
577
  task_info.done = True
578
+ task_info.error = str(exc)
516
579
  await _notify_task_done(session, task_info, request_id)
517
580
 
518
581
  task_info.task = asyncio.create_task(_run())
@@ -558,8 +621,17 @@ async def run_command(
558
621
  async def notify_log(text: str) -> None:
559
622
  if session is None:
560
623
  return
624
+ if not _should_stream_smcl_chunk(text, ctx.request_id):
625
+ return
561
626
  _debug_notification("logMessage", text, ctx.request_id)
562
627
  await session.send_log_message(level="info", data=text, related_request_id=ctx.request_id)
628
+ try:
629
+ payload = json.loads(text)
630
+ if isinstance(payload, dict) and payload.get("event") == "log_path":
631
+ if ctx.request_id is not None:
632
+ _request_log_paths[str(ctx.request_id)] = payload.get("path")
633
+ except Exception:
634
+ return
563
635
 
564
636
  progress_token = None
565
637
  if ctx is not None and ctx.request_context.meta is not None:
@@ -568,11 +640,6 @@ async def run_command(
568
640
  async def notify_progress(progress: float, total: float | None, message: str | None) -> None:
569
641
  if session is None or progress_token is None:
570
642
  return
571
- _debug_notification(
572
- "progress",
573
- {"progress": progress, "total": total, "message": message},
574
- ctx.request_id,
575
- )
576
643
  await session.send_progress_notification(
577
644
  progress_token=progress_token,
578
645
  progress=progress,
@@ -627,12 +694,20 @@ def read_log(path: str, offset: int = 0, max_bytes: int = 65536) -> str:
627
694
  Returns a compact JSON string: {"path":..., "offset":..., "next_offset":..., "data":...}
628
695
  """
629
696
  try:
697
+ if path:
698
+ _read_log_paths.add(path)
630
699
  if offset < 0:
631
700
  offset = 0
701
+ if path:
702
+ last_offset = _read_log_offsets.get(path, 0)
703
+ if offset < last_offset:
704
+ offset = last_offset
632
705
  with open(path, "rb") as f:
633
706
  f.seek(offset)
634
707
  data = f.read(max_bytes)
635
708
  next_offset = f.tell()
709
+ if path:
710
+ _read_log_offsets[path] = next_offset
636
711
  text = data.decode("utf-8", errors="replace")
637
712
  return json.dumps({"path": path, "offset": offset, "next_offset": next_offset, "data": text})
638
713
  except FileNotFoundError:
@@ -910,8 +985,16 @@ async def run_do_file(
910
985
  async def notify_log(text: str) -> None:
911
986
  if session is None:
912
987
  return
913
- _debug_notification("logMessage", text, ctx.request_id)
988
+ if not _should_stream_smcl_chunk(text, ctx.request_id):
989
+ return
914
990
  await session.send_log_message(level="info", data=text, related_request_id=ctx.request_id)
991
+ try:
992
+ payload = json.loads(text)
993
+ if isinstance(payload, dict) and payload.get("event") == "log_path":
994
+ if ctx.request_id is not None:
995
+ _request_log_paths[str(ctx.request_id)] = payload.get("path")
996
+ except Exception:
997
+ return
915
998
 
916
999
  progress_token = None
917
1000
  if ctx is not None and ctx.request_context.meta is not None:
@@ -920,11 +1003,6 @@ async def run_do_file(
920
1003
  async def notify_progress(progress: float, total: float | None, message: str | None) -> None:
921
1004
  if session is None or progress_token is None:
922
1005
  return
923
- _debug_notification(
924
- "progress",
925
- {"progress": progress, "total": total, "message": message},
926
- ctx.request_id,
927
- )
928
1006
  await session.send_progress_notification(
929
1007
  progress_token=progress_token,
930
1008
  progress=progress,
@@ -1008,19 +1086,14 @@ def get_stored_results_resource() -> str:
1008
1086
  return json.dumps(client.get_stored_results())
1009
1087
 
1010
1088
  @mcp.tool()
1011
- def export_graphs_all(use_base64: bool = False) -> str:
1089
+ def export_graphs_all() -> str:
1012
1090
  """
1013
- Exports all graphs in memory to file paths (default) or base64-encoded SVGs.
1014
-
1015
- Args:
1016
- use_base64: If True, returns base64-encoded images (token-intensive).
1017
- If False (default), returns file paths to SVG files (token-efficient).
1018
- Use file paths unless you need to embed images directly.
1091
+ Exports all graphs in memory to file paths.
1019
1092
 
1020
- Returns a JSON envelope listing graph names and either file paths or base64 images.
1093
+ Returns a JSON envelope listing graph names and file paths.
1021
1094
  The agent can open SVG files directly to verify visuals (titles/labels/colors/legends).
1022
1095
  """
1023
- exports = client.export_graphs_all(use_base64=use_base64)
1096
+ exports = client.export_graphs_all()
1024
1097
  return exports.model_dump_json(exclude_none=False)
1025
1098
 
1026
1099
  def main():