mcli-framework 7.10.1__py3-none-any.whl → 7.10.2__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of mcli-framework might be problematic. Click here for more details.

Files changed (99) hide show
  1. mcli/lib/custom_commands.py +10 -0
  2. mcli/lib/optional_deps.py +240 -0
  3. mcli/workflow/git_commit/ai_service.py +13 -2
  4. mcli/workflow/notebook/converter.py +375 -0
  5. mcli/workflow/notebook/notebook_cmd.py +441 -0
  6. mcli/workflow/notebook/schema.py +402 -0
  7. mcli/workflow/notebook/validator.py +313 -0
  8. mcli/workflow/workflow.py +14 -0
  9. {mcli_framework-7.10.1.dist-info → mcli_framework-7.10.2.dist-info}/METADATA +36 -2
  10. {mcli_framework-7.10.1.dist-info → mcli_framework-7.10.2.dist-info}/RECORD +14 -94
  11. mcli/__init__.py +0 -160
  12. mcli/__main__.py +0 -14
  13. mcli/app/__init__.py +0 -23
  14. mcli/app/model/__init__.py +0 -0
  15. mcli/app/video/__init__.py +0 -5
  16. mcli/chat/__init__.py +0 -34
  17. mcli/lib/__init__.py +0 -0
  18. mcli/lib/api/__init__.py +0 -0
  19. mcli/lib/auth/__init__.py +0 -1
  20. mcli/lib/config/__init__.py +0 -1
  21. mcli/lib/erd/__init__.py +0 -25
  22. mcli/lib/files/__init__.py +0 -0
  23. mcli/lib/fs/__init__.py +0 -1
  24. mcli/lib/logger/__init__.py +0 -3
  25. mcli/lib/performance/__init__.py +0 -17
  26. mcli/lib/pickles/__init__.py +0 -1
  27. mcli/lib/secrets/__init__.py +0 -10
  28. mcli/lib/shell/__init__.py +0 -0
  29. mcli/lib/toml/__init__.py +0 -1
  30. mcli/lib/watcher/__init__.py +0 -0
  31. mcli/ml/__init__.py +0 -16
  32. mcli/ml/api/__init__.py +0 -30
  33. mcli/ml/api/routers/__init__.py +0 -27
  34. mcli/ml/auth/__init__.py +0 -41
  35. mcli/ml/backtesting/__init__.py +0 -33
  36. mcli/ml/cli/__init__.py +0 -5
  37. mcli/ml/config/__init__.py +0 -33
  38. mcli/ml/configs/__init__.py +0 -16
  39. mcli/ml/dashboard/__init__.py +0 -12
  40. mcli/ml/dashboard/components/__init__.py +0 -7
  41. mcli/ml/dashboard/pages/__init__.py +0 -6
  42. mcli/ml/data_ingestion/__init__.py +0 -29
  43. mcli/ml/database/__init__.py +0 -40
  44. mcli/ml/experimentation/__init__.py +0 -29
  45. mcli/ml/features/__init__.py +0 -39
  46. mcli/ml/features/political_features.py +0 -677
  47. mcli/ml/mlops/__init__.py +0 -19
  48. mcli/ml/models/__init__.py +0 -90
  49. mcli/ml/monitoring/__init__.py +0 -25
  50. mcli/ml/optimization/__init__.py +0 -27
  51. mcli/ml/predictions/__init__.py +0 -5
  52. mcli/ml/preprocessing/__init__.py +0 -24
  53. mcli/ml/preprocessing/politician_trading_preprocessor.py +0 -570
  54. mcli/ml/scripts/__init__.py +0 -1
  55. mcli/ml/serving/__init__.py +0 -1
  56. mcli/ml/trading/__init__.py +0 -63
  57. mcli/ml/training/__init__.py +0 -7
  58. mcli/mygroup/__init__.py +0 -3
  59. mcli/public/__init__.py +0 -1
  60. mcli/public/commands/__init__.py +0 -2
  61. mcli/self/__init__.py +0 -3
  62. mcli/workflow/__init__.py +0 -0
  63. mcli/workflow/daemon/__init__.py +0 -15
  64. mcli/workflow/dashboard/__init__.py +0 -5
  65. mcli/workflow/docker/__init__.py +0 -0
  66. mcli/workflow/file/__init__.py +0 -0
  67. mcli/workflow/gcloud/__init__.py +0 -1
  68. mcli/workflow/git_commit/__init__.py +0 -0
  69. mcli/workflow/interview/__init__.py +0 -0
  70. mcli/workflow/politician_trading/__init__.py +0 -4
  71. mcli/workflow/politician_trading/config.py +0 -134
  72. mcli/workflow/politician_trading/connectivity.py +0 -492
  73. mcli/workflow/politician_trading/data_sources.py +0 -654
  74. mcli/workflow/politician_trading/database.py +0 -412
  75. mcli/workflow/politician_trading/demo.py +0 -249
  76. mcli/workflow/politician_trading/models.py +0 -327
  77. mcli/workflow/politician_trading/monitoring.py +0 -413
  78. mcli/workflow/politician_trading/scrapers.py +0 -1074
  79. mcli/workflow/politician_trading/scrapers_california.py +0 -434
  80. mcli/workflow/politician_trading/scrapers_corporate_registry.py +0 -797
  81. mcli/workflow/politician_trading/scrapers_eu.py +0 -376
  82. mcli/workflow/politician_trading/scrapers_free_sources.py +0 -509
  83. mcli/workflow/politician_trading/scrapers_third_party.py +0 -373
  84. mcli/workflow/politician_trading/scrapers_uk.py +0 -378
  85. mcli/workflow/politician_trading/scrapers_us_states.py +0 -471
  86. mcli/workflow/politician_trading/seed_database.py +0 -520
  87. mcli/workflow/politician_trading/supabase_functions.py +0 -354
  88. mcli/workflow/politician_trading/workflow.py +0 -879
  89. mcli/workflow/registry/__init__.py +0 -0
  90. mcli/workflow/repo/__init__.py +0 -0
  91. mcli/workflow/scheduler/__init__.py +0 -25
  92. mcli/workflow/search/__init__.py +0 -0
  93. mcli/workflow/sync/__init__.py +0 -5
  94. mcli/workflow/videos/__init__.py +0 -1
  95. mcli/workflow/wakatime/__init__.py +0 -80
  96. {mcli_framework-7.10.1.dist-info → mcli_framework-7.10.2.dist-info}/WHEEL +0 -0
  97. {mcli_framework-7.10.1.dist-info → mcli_framework-7.10.2.dist-info}/entry_points.txt +0 -0
  98. {mcli_framework-7.10.1.dist-info → mcli_framework-7.10.2.dist-info}/licenses/LICENSE +0 -0
  99. {mcli_framework-7.10.1.dist-info → mcli_framework-7.10.2.dist-info}/top_level.txt +0 -0
@@ -107,15 +107,25 @@ class CustomCommandManager:
107
107
  """
108
108
  Load all custom commands from the commands directory.
109
109
 
110
+ Automatically filters out test commands (starting with 'test_' or 'test-')
111
+ unless MCLI_INCLUDE_TEST_COMMANDS=true is set.
112
+
110
113
  Returns:
111
114
  List of command data dictionaries
112
115
  """
113
116
  commands = []
117
+ include_test = os.environ.get('MCLI_INCLUDE_TEST_COMMANDS', 'false').lower() == 'true'
118
+
114
119
  for command_file in self.commands_dir.glob("*.json"):
115
120
  # Skip the lockfile
116
121
  if command_file.name == "commands.lock.json":
117
122
  continue
118
123
 
124
+ # Skip test commands unless explicitly included
125
+ if not include_test and command_file.stem.startswith(('test_', 'test-')):
126
+ logger.debug(f"Skipping test command: {command_file.name}")
127
+ continue
128
+
119
129
  command_data = self.load_command(command_file)
120
130
  if command_data:
121
131
  commands.append(command_data)
@@ -0,0 +1,240 @@
1
+ """
2
+ Utilities for graceful handling of optional dependencies.
3
+
4
+ This module provides helper functions and decorators to handle optional
5
+ dependencies gracefully, with clear error messages when features are unavailable.
6
+ """
7
+
8
+ import functools
9
+ from typing import Any, Callable, Dict, Optional, Tuple
10
+
11
+ from mcli.lib.logger.logger import get_logger
12
+
13
+ logger = get_logger(__name__)
14
+
15
+
16
+ class OptionalDependency:
17
+ """
18
+ Container for an optional dependency with availability tracking.
19
+
20
+ Example:
21
+ >>> ollama = OptionalDependency("ollama")
22
+ >>> if ollama.available:
23
+ ... client = ollama.module.Client()
24
+ """
25
+
26
+ def __init__(
27
+ self,
28
+ module_name: str,
29
+ import_name: Optional[str] = None,
30
+ install_hint: Optional[str] = None,
31
+ ):
32
+ """
33
+ Initialize optional dependency handler.
34
+
35
+ Args:
36
+ module_name: Name of the module to import (e.g., "ollama")
37
+ import_name: Alternative import name if different from module_name
38
+ install_hint: Custom installation instruction
39
+ """
40
+ self.module_name = module_name
41
+ self.import_name = import_name or module_name
42
+ self.install_hint = install_hint or f"pip install {module_name}"
43
+ self.module: Optional[Any] = None
44
+ self.available = False
45
+ self.error: Optional[Exception] = None
46
+
47
+ self._try_import()
48
+
49
+ def _try_import(self):
50
+ """Attempt to import the module."""
51
+ try:
52
+ self.module = __import__(self.import_name)
53
+ self.available = True
54
+ logger.debug(f"Optional dependency '{self.module_name}' is available")
55
+ except ImportError as e:
56
+ self.available = False
57
+ self.error = e
58
+ logger.debug(f"Optional dependency '{self.module_name}' is not available: {e}")
59
+
60
+ def require(self, feature_name: Optional[str] = None) -> Any:
61
+ """
62
+ Require the dependency to be available, raising an error if not.
63
+
64
+ Args:
65
+ feature_name: Name of the feature requiring this dependency
66
+
67
+ Returns:
68
+ The imported module
69
+
70
+ Raises:
71
+ ImportError: If the dependency is not available
72
+ """
73
+ if not self.available:
74
+ feature_msg = f" for {feature_name}" if feature_name else ""
75
+ raise ImportError(
76
+ f"'{self.module_name}' is required{feature_msg} but not installed.\n"
77
+ f"Install it with: {self.install_hint}"
78
+ )
79
+ return self.module
80
+
81
+ def __getattr__(self, name: str) -> Any:
82
+ """Allow direct attribute access to the module."""
83
+ if not self.available:
84
+ raise ImportError(
85
+ f"Cannot access '{name}' from '{self.module_name}' - module not installed.\n"
86
+ f"Install it with: {self.install_hint}"
87
+ )
88
+ return getattr(self.module, name)
89
+
90
+
91
+ def optional_import(
92
+ module_name: str, import_name: Optional[str] = None, install_hint: Optional[str] = None
93
+ ) -> Tuple[Optional[Any], bool]:
94
+ """
95
+ Try to import an optional dependency.
96
+
97
+ Args:
98
+ module_name: Name of the module to import
99
+ import_name: Alternative import name if different from module_name
100
+ install_hint: Custom installation instruction
101
+
102
+ Returns:
103
+ Tuple of (module, available) where module is None if unavailable
104
+
105
+ Example:
106
+ >>> ollama, OLLAMA_AVAILABLE = optional_import("ollama")
107
+ >>> if OLLAMA_AVAILABLE:
108
+ ... client = ollama.Client()
109
+ """
110
+ dep = OptionalDependency(module_name, import_name, install_hint)
111
+ return (dep.module, dep.available)
112
+
113
+
114
+ def require_dependency(
115
+ module_name: str, feature_name: str, install_hint: Optional[str] = None
116
+ ) -> Any:
117
+ """
118
+ Require a dependency, raising clear error if not available.
119
+
120
+ Args:
121
+ module_name: Name of the module to import
122
+ feature_name: Name of the feature requiring this dependency
123
+ install_hint: Custom installation instruction
124
+
125
+ Returns:
126
+ The imported module
127
+
128
+ Raises:
129
+ ImportError: If the dependency is not available
130
+
131
+ Example:
132
+ >>> streamlit = require_dependency("streamlit", "dashboard")
133
+ """
134
+ dep = OptionalDependency(module_name, install_hint=install_hint)
135
+ return dep.require(feature_name)
136
+
137
+
138
+ def requires(*dependencies: str, install_all_hint: Optional[str] = None):
139
+ """
140
+ Decorator to mark a function as requiring specific dependencies.
141
+
142
+ Args:
143
+ *dependencies: Module names required by the function
144
+ install_all_hint: Custom installation instruction for all dependencies
145
+
146
+ Raises:
147
+ ImportError: If any required dependency is not available
148
+
149
+ Example:
150
+ >>> @requires("torch", "transformers")
151
+ ... def train_model():
152
+ ... import torch
153
+ ... import transformers
154
+ ... # training code
155
+ """
156
+
157
+ def decorator(func: Callable) -> Callable:
158
+ @functools.wraps(func)
159
+ def wrapper(*args, **kwargs):
160
+ missing = []
161
+ for dep_name in dependencies:
162
+ dep = OptionalDependency(dep_name)
163
+ if not dep.available:
164
+ missing.append(dep_name)
165
+
166
+ if missing:
167
+ if install_all_hint:
168
+ hint = install_all_hint
169
+ else:
170
+ hint = f"pip install {' '.join(missing)}"
171
+
172
+ raise ImportError(
173
+ f"Function '{func.__name__}' requires missing dependencies: {', '.join(missing)}\n"
174
+ f"Install them with: {hint}"
175
+ )
176
+
177
+ return func(*args, **kwargs)
178
+
179
+ return wrapper
180
+
181
+ return decorator
182
+
183
+
184
+ # Common optional dependencies registry
185
+ OPTIONAL_DEPS: Dict[str, OptionalDependency] = {}
186
+
187
+
188
+ def register_optional_dependency(
189
+ module_name: str, import_name: Optional[str] = None, install_hint: Optional[str] = None
190
+ ) -> OptionalDependency:
191
+ """
192
+ Register and cache an optional dependency.
193
+
194
+ Args:
195
+ module_name: Name of the module to import
196
+ import_name: Alternative import name if different from module_name
197
+ install_hint: Custom installation instruction
198
+
199
+ Returns:
200
+ OptionalDependency instance
201
+ """
202
+ if module_name not in OPTIONAL_DEPS:
203
+ OPTIONAL_DEPS[module_name] = OptionalDependency(module_name, import_name, install_hint)
204
+ return OPTIONAL_DEPS[module_name]
205
+
206
+
207
+ def check_dependencies(*module_names: str) -> Dict[str, bool]:
208
+ """
209
+ Check availability of multiple dependencies.
210
+
211
+ Args:
212
+ *module_names: Module names to check
213
+
214
+ Returns:
215
+ Dictionary mapping module names to availability status
216
+
217
+ Example:
218
+ >>> status = check_dependencies("torch", "transformers", "streamlit")
219
+ >>> print(status)
220
+ {'torch': True, 'transformers': False, 'streamlit': True}
221
+ """
222
+ return {
223
+ name: OptionalDependency(name).available for name in module_names
224
+ }
225
+
226
+
227
+ # Pre-register common optional dependencies
228
+ _COMMON_DEPS = {
229
+ "ollama": ("ollama", "pip install ollama"),
230
+ "streamlit": ("streamlit", "pip install streamlit"),
231
+ "torch": ("torch", "pip install torch"),
232
+ "transformers": ("transformers", "pip install transformers"),
233
+ "mlflow": ("mlflow", "pip install mlflow"),
234
+ "plotly": ("plotly", "pip install plotly"),
235
+ "pandas": ("pandas", "pip install pandas"),
236
+ "numpy": ("numpy", "pip install numpy"),
237
+ }
238
+
239
+ for module_name, (import_name, hint) in _COMMON_DEPS.items():
240
+ register_optional_dependency(module_name, import_name, hint)
@@ -2,11 +2,13 @@ import json
2
2
  import logging
3
3
  from typing import Any, Dict, Optional
4
4
 
5
- import ollama
6
-
7
5
  from mcli.lib.logger.logger import get_logger
6
+ from mcli.lib.optional_deps import optional_import
8
7
  from mcli.lib.toml.toml import read_from_toml
9
8
 
9
+ # Gracefully handle optional ollama dependency
10
+ ollama, OLLAMA_AVAILABLE = optional_import("ollama")
11
+
10
12
  logger = get_logger(__name__)
11
13
 
12
14
 
@@ -204,6 +206,15 @@ Generate ONLY the commit message, nothing else:"""
204
206
  def generate_commit_message(self, changes: Dict[str, Any], diff_content: str) -> str:
205
207
  """Generate an AI-powered commit message"""
206
208
  try:
209
+ # Check if ollama is available
210
+ if not OLLAMA_AVAILABLE:
211
+ logger.warning(
212
+ "Ollama is not installed. Install it with: pip install ollama\n"
213
+ "Falling back to rule-based commit message generation."
214
+ )
215
+ analysis = self._analyze_file_patterns(changes)
216
+ return self._generate_fallback_message(changes, analysis)
217
+
207
218
  # Analyze the changes first
208
219
  analysis = self._analyze_file_patterns(changes)
209
220
 
@@ -0,0 +1,375 @@
1
+ """
2
+ Converter for transforming between MCLI workflow JSON and notebook format.
3
+
4
+ This module provides bidirectional conversion between:
5
+ 1. Legacy MCLI workflow JSON format (single code field)
6
+ 2. New Jupyter-compatible notebook format (multi-cell)
7
+ """
8
+
9
+ import json
10
+ import re
11
+ from datetime import datetime
12
+ from pathlib import Path
13
+ from typing import Any, Dict, List, Optional, Union
14
+
15
+ from mcli.lib.logger.logger import get_logger
16
+
17
+ from .schema import (
18
+ CellLanguage,
19
+ CellType,
20
+ MCLIMetadata,
21
+ NotebookCell,
22
+ NotebookMetadata,
23
+ WorkflowNotebook,
24
+ )
25
+
26
+ logger = get_logger()
27
+
28
+
29
+ class WorkflowConverter:
30
+ """Convert between workflow JSON and notebook formats."""
31
+
32
+ @staticmethod
33
+ def _split_code_into_cells(code: str, language: str = "python") -> List[NotebookCell]:
34
+ """
35
+ Split a monolithic code block into logical cells.
36
+
37
+ This attempts to intelligently split code based on:
38
+ - Comment markers like # %% or # CELL
39
+ - Function/class definitions
40
+ - Major logical blocks
41
+ """
42
+ cells = []
43
+
44
+ # First, try to split by cell markers (VSCode/Jupyter style)
45
+ cell_marker_pattern = r"^#\s*%%|^#\s*<cell>|^#\s*CELL"
46
+ segments = re.split(cell_marker_pattern, code, flags=re.MULTILINE)
47
+
48
+ if len(segments) > 1:
49
+ # Found cell markers
50
+ for i, segment in enumerate(segments):
51
+ if segment.strip():
52
+ cells.append(
53
+ NotebookCell(
54
+ cell_type=CellType.CODE,
55
+ source=segment.strip() + "\n",
56
+ metadata={"language": language},
57
+ )
58
+ )
59
+ else:
60
+ # No cell markers, try to split intelligently by blank lines or major blocks
61
+ lines = code.split("\n")
62
+ current_cell_lines = []
63
+
64
+ for i, line in enumerate(lines):
65
+ current_cell_lines.append(line)
66
+
67
+ # Split on double blank lines or before major definitions
68
+ next_line = lines[i + 1] if i + 1 < len(lines) else ""
69
+ is_double_blank = line.strip() == "" and next_line.strip() == ""
70
+ is_major_def = (
71
+ next_line.strip().startswith("def ")
72
+ or next_line.strip().startswith("class ")
73
+ or next_line.strip().startswith("@")
74
+ )
75
+
76
+ if (is_double_blank or is_major_def) and len(current_cell_lines) > 3:
77
+ cell_code = "\n".join(current_cell_lines).strip()
78
+ if cell_code:
79
+ cells.append(
80
+ NotebookCell(
81
+ cell_type=CellType.CODE,
82
+ source=cell_code + "\n",
83
+ metadata={"language": language},
84
+ )
85
+ )
86
+ current_cell_lines = []
87
+
88
+ # Add remaining lines as final cell
89
+ if current_cell_lines:
90
+ cell_code = "\n".join(current_cell_lines).strip()
91
+ if cell_code:
92
+ cells.append(
93
+ NotebookCell(
94
+ cell_type=CellType.CODE,
95
+ source=cell_code + "\n",
96
+ metadata={"language": language},
97
+ )
98
+ )
99
+
100
+ # If no cells were created, add the entire code as one cell
101
+ if not cells and code.strip():
102
+ cells.append(
103
+ NotebookCell(
104
+ cell_type=CellType.CODE,
105
+ source=code,
106
+ metadata={"language": language},
107
+ )
108
+ )
109
+
110
+ return cells
111
+
112
+ @classmethod
113
+ def workflow_to_notebook(
114
+ cls, workflow_data: Dict[str, Any], add_description: bool = True
115
+ ) -> WorkflowNotebook:
116
+ """
117
+ Convert legacy workflow JSON to notebook format.
118
+
119
+ Args:
120
+ workflow_data: Legacy workflow JSON data
121
+ add_description: Add description as markdown cell
122
+
123
+ Returns:
124
+ WorkflowNotebook instance
125
+ """
126
+ # Extract metadata
127
+ name = workflow_data.get("name", "untitled")
128
+ description = workflow_data.get("description", "")
129
+ group = workflow_data.get("group")
130
+ version = workflow_data.get("version", "1.0")
131
+ language = workflow_data.get("language", "python")
132
+ created_at = workflow_data.get("created_at")
133
+ updated_at = workflow_data.get("updated_at")
134
+ extra_metadata = workflow_data.get("metadata", {})
135
+
136
+ # Create MCLI metadata
137
+ mcli_metadata = MCLIMetadata(
138
+ name=name,
139
+ description=description,
140
+ group=group,
141
+ version=version,
142
+ language=CellLanguage(language),
143
+ created_at=created_at,
144
+ updated_at=updated_at,
145
+ extra=extra_metadata,
146
+ )
147
+
148
+ # Create notebook metadata
149
+ notebook_metadata = NotebookMetadata(mcli=mcli_metadata)
150
+
151
+ # Create notebook
152
+ notebook = WorkflowNotebook(metadata=notebook_metadata)
153
+
154
+ # Add description as markdown cell if present
155
+ if add_description and description:
156
+ notebook.add_markdown_cell(f"# {name}\n\n{description}")
157
+
158
+ # Extract and split code into cells
159
+ code = workflow_data.get("code", "")
160
+ if code:
161
+ cells = cls._split_code_into_cells(code, language)
162
+ notebook.cells.extend(cells)
163
+
164
+ return notebook
165
+
166
+ @staticmethod
167
+ def notebook_to_workflow(notebook: WorkflowNotebook) -> Dict[str, Any]:
168
+ """
169
+ Convert notebook format to legacy workflow JSON.
170
+
171
+ Args:
172
+ notebook: WorkflowNotebook instance
173
+
174
+ Returns:
175
+ Legacy workflow JSON data
176
+ """
177
+ mcli_meta = notebook.metadata.mcli
178
+
179
+ # Combine all code cells into single code field
180
+ code_parts = []
181
+ for cell in notebook.cells:
182
+ if cell.cell_type == CellType.CODE:
183
+ code_parts.append(cell.source_text)
184
+
185
+ # Join with cell markers for potential round-trip conversion
186
+ combined_code = "\n# %%\n".join(code_parts)
187
+
188
+ # Build workflow data
189
+ workflow_data = {
190
+ "name": mcli_meta.name,
191
+ "description": mcli_meta.description,
192
+ "version": mcli_meta.version,
193
+ "language": mcli_meta.language.value,
194
+ "code": combined_code,
195
+ }
196
+
197
+ # Add optional fields
198
+ if mcli_meta.group:
199
+ workflow_data["group"] = mcli_meta.group
200
+ if mcli_meta.created_at:
201
+ workflow_data["created_at"] = mcli_meta.created_at
202
+ if mcli_meta.updated_at:
203
+ workflow_data["updated_at"] = mcli_meta.updated_at
204
+ else:
205
+ workflow_data["updated_at"] = datetime.utcnow().isoformat() + "Z"
206
+
207
+ if mcli_meta.extra:
208
+ workflow_data["metadata"] = mcli_meta.extra
209
+
210
+ return workflow_data
211
+
212
+ @classmethod
213
+ def load_workflow_json(cls, path: Union[str, Path]) -> Dict[str, Any]:
214
+ """Load workflow JSON from file."""
215
+ path = Path(path)
216
+ with open(path, "r") as f:
217
+ return json.load(f)
218
+
219
+ @classmethod
220
+ def save_workflow_json(cls, data: Dict[str, Any], path: Union[str, Path]) -> None:
221
+ """Save workflow JSON to file."""
222
+ path = Path(path)
223
+ with open(path, "w") as f:
224
+ json.dump(data, f, indent=2)
225
+
226
+ @classmethod
227
+ def load_notebook_json(cls, path: Union[str, Path]) -> WorkflowNotebook:
228
+ """Load notebook from JSON file."""
229
+ path = Path(path)
230
+ with open(path, "r") as f:
231
+ data = json.load(f)
232
+
233
+ # Check if it's a notebook or legacy workflow format
234
+ if "nbformat" in data:
235
+ # It's already a notebook
236
+ return WorkflowNotebook.from_dict(data)
237
+ else:
238
+ # It's a legacy workflow, convert it
239
+ logger.info(f"Converting legacy workflow to notebook format: {path}")
240
+ return cls.workflow_to_notebook(data)
241
+
242
+ @classmethod
243
+ def save_notebook_json(cls, notebook: WorkflowNotebook, path: Union[str, Path]) -> None:
244
+ """Save notebook to JSON file."""
245
+ path = Path(path)
246
+ data = notebook.to_dict()
247
+ with open(path, "w") as f:
248
+ json.dump(data, f, indent=2)
249
+
250
+ @classmethod
251
+ def convert_file_to_notebook(
252
+ cls, input_path: Union[str, Path], output_path: Optional[Union[str, Path]] = None
253
+ ) -> Path:
254
+ """
255
+ Convert a workflow JSON file to notebook format.
256
+
257
+ Args:
258
+ input_path: Path to legacy workflow JSON
259
+ output_path: Optional output path (defaults to same path)
260
+
261
+ Returns:
262
+ Path to the converted notebook file
263
+ """
264
+ input_path = Path(input_path)
265
+ output_path = Path(output_path) if output_path else input_path
266
+
267
+ # Load legacy workflow
268
+ workflow_data = cls.load_workflow_json(input_path)
269
+
270
+ # Convert to notebook
271
+ notebook = cls.workflow_to_notebook(workflow_data)
272
+
273
+ # Save notebook
274
+ cls.save_notebook_json(notebook, output_path)
275
+
276
+ logger.info(f"Converted {input_path} to notebook format at {output_path}")
277
+ return output_path
278
+
279
+ @classmethod
280
+ def convert_file_to_workflow(
281
+ cls, input_path: Union[str, Path], output_path: Optional[Union[str, Path]] = None
282
+ ) -> Path:
283
+ """
284
+ Convert a notebook file to legacy workflow JSON format.
285
+
286
+ Args:
287
+ input_path: Path to notebook JSON
288
+ output_path: Optional output path (defaults to same path)
289
+
290
+ Returns:
291
+ Path to the converted workflow file
292
+ """
293
+ input_path = Path(input_path)
294
+ output_path = Path(output_path) if output_path else input_path
295
+
296
+ # Load notebook
297
+ notebook = cls.load_notebook_json(input_path)
298
+
299
+ # Convert to workflow
300
+ workflow_data = cls.notebook_to_workflow(notebook)
301
+
302
+ # Save workflow
303
+ cls.save_workflow_json(workflow_data, output_path)
304
+
305
+ logger.info(f"Converted {input_path} to workflow format at {output_path}")
306
+ return output_path
307
+
308
+ @classmethod
309
+ def migrate_directory(
310
+ cls, directory: Union[str, Path], backup: bool = True, in_place: bool = True
311
+ ) -> Dict[str, Any]:
312
+ """
313
+ Migrate all workflow JSON files in a directory to notebook format.
314
+
315
+ Args:
316
+ directory: Directory containing workflow JSON files
317
+ backup: Create backup files before conversion
318
+ in_place: Convert files in place (vs creating new files)
319
+
320
+ Returns:
321
+ Dictionary with migration results
322
+ """
323
+ directory = Path(directory)
324
+ results = {
325
+ "total": 0,
326
+ "converted": 0,
327
+ "failed": 0,
328
+ "skipped": 0,
329
+ "files": [],
330
+ }
331
+
332
+ for json_file in directory.glob("*.json"):
333
+ # Skip lockfile and already-converted notebooks
334
+ if json_file.name == "commands.lock.json":
335
+ continue
336
+
337
+ try:
338
+ # Load and check if already a notebook
339
+ with open(json_file, "r") as f:
340
+ data = json.load(f)
341
+
342
+ results["total"] += 1
343
+
344
+ if "nbformat" in data:
345
+ # Already a notebook
346
+ results["skipped"] += 1
347
+ logger.debug(f"Skipping {json_file.name} - already a notebook")
348
+ continue
349
+
350
+ # Backup if requested
351
+ if backup:
352
+ backup_path = json_file.with_suffix(".json.bak")
353
+ cls.save_workflow_json(data, backup_path)
354
+ logger.debug(f"Created backup: {backup_path}")
355
+
356
+ # Convert to notebook
357
+ if in_place:
358
+ output_path = json_file
359
+ else:
360
+ output_path = json_file.with_stem(f"{json_file.stem}.notebook")
361
+
362
+ cls.convert_file_to_notebook(json_file, output_path)
363
+
364
+ results["converted"] += 1
365
+ results["files"].append(str(json_file))
366
+
367
+ except Exception as e:
368
+ logger.error(f"Failed to convert {json_file}: {e}")
369
+ results["failed"] += 1
370
+
371
+ logger.info(
372
+ f"Migration complete: {results['converted']} converted, "
373
+ f"{results['skipped']} skipped, {results['failed']} failed"
374
+ )
375
+ return results