more-compute 0.3.2__py3-none-any.whl → 0.4.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
morecompute/notebook.py CHANGED
@@ -1,6 +1,8 @@
1
1
  import json
2
+ from pathlib import Path
2
3
  from typing import List, Dict, Any
3
4
  from uuid import uuid4
5
+ from .utils.py_percent_parser import parse_py_percent, generate_py_percent
4
6
 
5
7
  class Notebook:
6
8
  """Manages the state of a notebook's cells."""
@@ -71,6 +73,8 @@ class Notebook:
71
73
  if cell.get('cell_type') == 'code':
72
74
  cell['outputs'] = []
73
75
  cell['execution_count'] = None
76
+ cell['execution_time'] = None
77
+ cell['error'] = None
74
78
 
75
79
  def to_json(self) -> str:
76
80
  # Basic notebook format
@@ -84,10 +88,18 @@ class Notebook:
84
88
 
85
89
  def load_from_file(self, file_path: str):
86
90
  try:
87
- with open(file_path, 'r') as f:
88
- data = json.load(f)
91
+ path = Path(file_path)
92
+
93
+ # Check file extension
94
+ if path.suffix == '.py':
95
+ # Load .py file with py:percent format
96
+ with open(file_path, 'r', encoding='utf-8') as f:
97
+ content = f.read()
98
+
99
+ data = parse_py_percent(content)
89
100
  loaded_cells = data.get('cells', [])
90
- # Ensure stable IDs for all cells (back-compat for notebooks without IDs)
101
+
102
+ # Ensure stable IDs for all cells
91
103
  self.cells = []
92
104
  for cell in loaded_cells:
93
105
  if not isinstance(cell, dict):
@@ -95,9 +107,40 @@ class Notebook:
95
107
  if 'id' not in cell or not cell['id']:
96
108
  cell['id'] = self._generate_cell_id()
97
109
  self.cells.append(cell)
110
+
98
111
  self.metadata = data.get('metadata', {})
99
112
  self.file_path = file_path
100
- except (FileNotFoundError, json.JSONDecodeError) as e:
113
+
114
+ elif path.suffix == '.ipynb':
115
+ # Block .ipynb files with helpful error
116
+ raise ValueError(
117
+ f"MoreCompute only supports .py notebooks.\n\n"
118
+ f"Convert your notebook with:\n"
119
+ f" more-compute convert {path.name} -o {path.stem}.py\n\n"
120
+ f"Then open with:\n"
121
+ f" more-compute {path.stem}.py"
122
+ )
123
+
124
+ else:
125
+ raise ValueError(f"Unsupported file format: {path.suffix}. Use .py files.")
126
+
127
+ except FileNotFoundError as e:
128
+ print(f"Error: File not found: {e}")
129
+ # Initialize with a default cell if loading fails
130
+ self.cells = [{
131
+ 'id': self._generate_cell_id(),
132
+ 'cell_type': 'code',
133
+ 'source': '',
134
+ 'metadata': {},
135
+ 'outputs': [],
136
+ 'execution_count': None
137
+ }]
138
+ self.metadata = {}
139
+ self.file_path = file_path
140
+ except ValueError as e:
141
+ # Re-raise validation errors (like .ipynb block)
142
+ raise
143
+ except Exception as e:
101
144
  print(f"Error loading notebook: {e}")
102
145
  # Initialize with a default cell if loading fails
103
146
  self.cells = [{
@@ -116,8 +159,24 @@ class Notebook:
116
159
  if not path_to_save:
117
160
  raise ValueError("No file path specified for saving.")
118
161
 
119
- with open(path_to_save, 'w') as f:
120
- f.write(self.to_json())
162
+ path = Path(path_to_save)
163
+
164
+ # Save in appropriate format based on extension
165
+ if path.suffix == '.py':
166
+ # Save as py:percent format
167
+ content = generate_py_percent(self.cells)
168
+ with open(path_to_save, 'w', encoding='utf-8') as f:
169
+ f.write(content)
170
+ elif path.suffix == '.ipynb':
171
+ # Block saving as .ipynb
172
+ raise ValueError("MoreCompute only supports .py notebooks. Use .py extension.")
173
+ else:
174
+ # Default to .py if no extension
175
+ path_to_save = str(path.with_suffix('.py'))
176
+ content = generate_py_percent(self.cells)
177
+ with open(path_to_save, 'w', encoding='utf-8') as f:
178
+ f.write(content)
179
+
121
180
  self.file_path = path_to_save
122
181
 
123
182
  def _generate_cell_id(self) -> str:
morecompute/server.py CHANGED
@@ -18,7 +18,7 @@ from .utils.system_environment_util import DeviceMetrics
18
18
  from .utils.error_utils import ErrorUtils
19
19
  from .utils.cache_util import make_cache_key
20
20
  from .utils.notebook_util import coerce_cell_source
21
- from .utils.config_util import load_api_key_from_env, save_api_key_to_env
21
+ from .utils.config_util import load_api_key, save_api_key
22
22
  from .utils.zmq_util import reconnect_zmq_sockets, reset_to_local_zmq
23
23
  from .services.prime_intellect import PrimeIntellectService
24
24
  from .services.pod_manager import PodKernelManager
@@ -67,7 +67,7 @@ else:
67
67
  error_utils = ErrorUtils()
68
68
  executor = NextZmqExecutor(error_utils=error_utils)
69
69
  metrics = DeviceMetrics()
70
- prime_api_key = load_api_key_from_env("PRIME_INTELLECT_API_KEY", BASE_DIR / ".env")
70
+ prime_api_key = load_api_key("PRIME_INTELLECT_API_KEY")
71
71
  prime_intellect = PrimeIntellectService(api_key=prime_api_key) if prime_api_key else None
72
72
  pod_manager: PodKernelManager | None = None
73
73
  data_manager = DataManager(prime_intellect=prime_intellect)
@@ -99,7 +99,23 @@ async def startup_event():
99
99
  @app.on_event("shutdown")
100
100
  async def shutdown_event():
101
101
  """Cleanup services on shutdown."""
102
- global lsp_service
102
+ global lsp_service, executor
103
+
104
+ # Shutdown executor and worker process
105
+ if executor and executor.worker_proc:
106
+ try:
107
+ print("[EXECUTOR] Shutting down worker process...", file=sys.stderr, flush=True)
108
+ executor.worker_proc.terminate()
109
+ executor.worker_proc.wait(timeout=2)
110
+ print("[EXECUTOR] Worker process shutdown complete", file=sys.stderr, flush=True)
111
+ except Exception as e:
112
+ print(f"[EXECUTOR] Error during worker shutdown, forcing kill: {e}", file=sys.stderr, flush=True)
113
+ try:
114
+ executor.worker_proc.kill()
115
+ except Exception:
116
+ pass
117
+
118
+ # Shutdown LSP service
103
119
  if lsp_service:
104
120
  try:
105
121
  await lsp_service.shutdown()
@@ -441,12 +457,34 @@ class WebSocketManager:
441
457
 
442
458
  async def handle_message_loop(self, websocket: WebSocket):
443
459
  """Main loop to handle incoming WebSocket messages."""
460
+ tasks = set()
461
+
462
+ def task_done_callback(task):
463
+ tasks.discard(task)
464
+ # Check for exceptions in completed tasks
465
+ try:
466
+ exc = task.exception()
467
+ if exc:
468
+ print(f"[SERVER] Task raised exception: {exc}", file=sys.stderr, flush=True)
469
+ import traceback
470
+ traceback.print_exception(type(exc), exc, exc.__traceback__)
471
+ except asyncio.CancelledError:
472
+ pass
473
+ except Exception as e:
474
+ print(f"[SERVER] Error in task_done_callback: {e}", file=sys.stderr, flush=True)
475
+
444
476
  while True:
445
477
  try:
446
478
  message = await websocket.receive_json()
447
- await self._handle_message(websocket, message)
479
+ # Process messages concurrently so interrupts can arrive during execution
480
+ task = asyncio.create_task(self._handle_message(websocket, message))
481
+ tasks.add(task)
482
+ task.add_done_callback(task_done_callback)
448
483
  except WebSocketDisconnect:
449
484
  self.disconnect(websocket)
485
+ # Cancel all pending tasks
486
+ for task in tasks:
487
+ task.cancel()
450
488
  break
451
489
  except Exception as e:
452
490
  await self._send_error(websocket, f"Unhandled error: {e}")
@@ -534,12 +572,24 @@ class WebSocketManager:
534
572
  else:
535
573
  # Normal add cell
536
574
  self.notebook.add_cell(index=index, cell_type=cell_type, source=source)
575
+
576
+ # Save the notebook after adding cell
577
+ try:
578
+ self.notebook.save_to_file()
579
+ except Exception as e:
580
+ print(f"Warning: Failed to save notebook after adding cell: {e}", file=sys.stderr)
581
+
537
582
  await self.broadcast_notebook_update()
538
583
 
539
584
  async def _handle_delete_cell(self, websocket: WebSocket, data: dict):
540
585
  index = data.get('cell_index')
541
586
  if index is not None:
542
587
  self.notebook.delete_cell(index)
588
+ # Save the notebook after deleting cell
589
+ try:
590
+ self.notebook.save_to_file()
591
+ except Exception as e:
592
+ print(f"Warning: Failed to save notebook after deleting cell: {e}", file=sys.stderr)
543
593
  await self.broadcast_notebook_update()
544
594
 
545
595
  async def _handle_update_cell(self, websocket: WebSocket, data: dict):
@@ -587,49 +637,30 @@ class WebSocketManager:
587
637
  print(f"[SERVER] Interrupt request received for cell {cell_index}", file=sys.stderr, flush=True)
588
638
 
589
639
  # Perform the interrupt (this may take up to 1 second)
640
+ # The execution handler will send the appropriate error and completion messages
590
641
  await self.executor.interrupt_kernel(cell_index=cell_index)
591
642
 
592
- print(f"[SERVER] Interrupt completed, sending error message", file=sys.stderr, flush=True)
643
+ print(f"[SERVER] Interrupt completed, execution handler will send completion messages", file=sys.stderr, flush=True)
593
644
 
594
- # Inform all clients that the currently running cell (if any) is interrupted
595
- try:
596
- await websocket.send_json({
597
- "type": "execution_error",
598
- "data": {
599
- "cell_index": cell_index,
600
- "error": {
601
- "output_type": "error",
602
- "ename": "KeyboardInterrupt",
603
- "evalue": "Execution interrupted by user",
604
- "traceback": ["KeyboardInterrupt: Execution was stopped by user"]
605
- }
606
- }
607
- })
608
- await websocket.send_json({
609
- "type": "execution_complete",
610
- "data": {
611
- "cell_index": cell_index,
612
- "result": {
613
- "status": "error",
614
- "execution_count": None,
615
- "execution_time": "interrupted",
616
- "outputs": [],
617
- "error": {
618
- "output_type": "error",
619
- "ename": "KeyboardInterrupt",
620
- "evalue": "Execution interrupted by user",
621
- "traceback": ["KeyboardInterrupt: Execution was stopped by user"]
622
- }
623
- }
624
- }
625
- })
626
- print(f"[SERVER] Error messages sent for cell {cell_index}", file=sys.stderr, flush=True)
627
- except Exception as e:
628
- print(f"[SERVER] Failed to send error messages: {e}", file=sys.stderr, flush=True)
645
+ # Note: We don't send completion messages here anymore because:
646
+ # 1. For shell commands: AsyncSpecialCommandHandler._execute_shell_command sends them
647
+ # 2. For Python code: The worker sends them
648
+ # Sending duplicate messages causes the frontend to get confused
629
649
 
630
650
  async def _handle_reset_kernel(self, websocket: WebSocket, data: dict):
651
+ import sys
652
+ print(f"[SERVER] Resetting kernel", file=sys.stderr, flush=True)
631
653
  self.executor.reset_kernel()
632
654
  self.notebook.clear_all_outputs()
655
+
656
+ # Note: We don't save the notebook here - this preserves execution times
657
+ # from the last session, which is useful for seeing how long things took
658
+
659
+ # Broadcast kernel restart to all clients
660
+ await self.broadcast_pod_update({
661
+ "type": "kernel_restarted",
662
+ "data": {}
663
+ })
633
664
  await self.broadcast_notebook_update()
634
665
 
635
666
  async def _send_error(self, websocket: WebSocket, error_message: str):
@@ -654,14 +685,14 @@ async def get_gpu_config() -> ConfigStatusResponse:
654
685
 
655
686
  @app.post("/api/gpu/config", response_model=ApiKeyResponse)
656
687
  async def set_gpu_config(request: ApiKeyRequest) -> ApiKeyResponse:
657
- """Save Prime Intellect API key to .env file and reinitialize service."""
688
+ """Save Prime Intellect API key to user config (~/.morecompute/config.json) and reinitialize service."""
658
689
  global prime_intellect, pod_monitor
659
690
 
660
691
  if not request.api_key.strip():
661
692
  raise HTTPException(status_code=400, detail="API key is required")
662
693
 
663
694
  try:
664
- save_api_key_to_env("PRIME_INTELLECT_API_KEY", request.api_key, BASE_DIR / ".env")
695
+ save_api_key("PRIME_INTELLECT_API_KEY", request.api_key)
665
696
  prime_intellect = PrimeIntellectService(api_key=request.api_key)
666
697
  if prime_intellect:
667
698
  pod_monitor = PodMonitor(
@@ -811,9 +842,10 @@ async def _connect_to_pod_background(pod_id: str):
811
842
  reconnect_zmq_sockets(
812
843
  executor,
813
844
  cmd_addr=addresses["cmd_addr"],
814
- pub_addr=addresses["pub_addr"]
845
+ pub_addr=addresses["pub_addr"],
846
+ is_remote=True # Critical: Tell executor this is a remote worker
815
847
  )
816
- print(f"[CONNECT BACKGROUND] Successfully connected to pod {pod_id}", file=sys.stderr, flush=True)
848
+ print(f"[CONNECT BACKGROUND] Successfully connected to pod {pod_id}, executor.is_remote=True", file=sys.stderr, flush=True)
817
849
  else:
818
850
  # Connection failed - clean up
819
851
  print(f"[CONNECT BACKGROUND] Failed to connect: {result}", file=sys.stderr, flush=True)
@@ -408,8 +408,8 @@ class PodKernelManager:
408
408
  f"'cd /tmp && "
409
409
  f"MC_ZMQ_CMD_ADDR=tcp://0.0.0.0:{self.remote_cmd_port} "
410
410
  f"MC_ZMQ_PUB_ADDR=tcp://0.0.0.0:{self.remote_pub_port} "
411
- f"nohup python3 /tmp/morecompute/execution/worker.py "
412
- f">/tmp/worker.log 2>&1 </dev/null & "
411
+ f"setsid python3 -u /tmp/morecompute/execution/worker.py "
412
+ f"</dev/null >/tmp/worker.log 2>&1 & "
413
413
  f"echo $!'"
414
414
  )
415
415
  ])
@@ -248,6 +248,11 @@ class CellMagicHandlers:
248
248
  env=env
249
249
  )
250
250
 
251
+ # Track process for interrupt handling
252
+ if hasattr(cell_magic_handler, 'special_handler'):
253
+ cell_magic_handler.special_handler.current_process_sync = process
254
+ print(f"[CELL_MAGIC] Tracking sync subprocess PID={process.pid}", file=sys.stderr, flush=True)
255
+
251
256
  # Read and print output line by line (real-time streaming)
252
257
  def read_stream(stream, output_type):
253
258
  """Read stream line by line and print immediately"""
@@ -276,12 +281,38 @@ class CellMagicHandlers:
276
281
  stdout_thread.start()
277
282
  stderr_thread.start()
278
283
 
279
- # Wait for process to complete
280
- return_code = process.wait()
284
+ # Wait for process to complete, checking if it was killed
285
+ try:
286
+ # Poll with timeout to detect if process was killed externally
287
+ while process.poll() is None:
288
+ try:
289
+ process.wait(timeout=0.1)
290
+ except subprocess.TimeoutExpired:
291
+ # Check if interrupted
292
+ if hasattr(cell_magic_handler, 'special_handler'):
293
+ if cell_magic_handler.special_handler.sync_interrupted:
294
+ # Process was killed by interrupt handler
295
+ print(f"[CELL_MAGIC] Process was interrupted, raising KeyboardInterrupt", file=sys.stderr, flush=True)
296
+ raise KeyboardInterrupt("Execution interrupted by user")
297
+
298
+ return_code = process.returncode
299
+ except KeyboardInterrupt:
300
+ # Kill process if KeyboardInterrupt
301
+ try:
302
+ process.kill()
303
+ process.wait()
304
+ except Exception:
305
+ pass
306
+ raise
281
307
 
282
308
  # Wait for output threads to finish
283
- stdout_thread.join()
284
- stderr_thread.join()
309
+ stdout_thread.join(timeout=1)
310
+ stderr_thread.join(timeout=1)
311
+
312
+ # Clear process reference
313
+ if hasattr(cell_magic_handler, 'special_handler'):
314
+ cell_magic_handler.special_handler.current_process_sync = None
315
+ print(f"[CELL_MAGIC] Cleared sync subprocess reference", file=sys.stderr, flush=True)
285
316
 
286
317
  return return_code
287
318
 
@@ -1,45 +1,67 @@
1
1
  """Configuration utilities for managing API keys and environment variables."""
2
2
 
3
3
  from pathlib import Path
4
+ from typing import Optional
4
5
  import os
6
+ import json
5
7
 
6
8
 
7
- def load_api_key_from_env(env_var: str, env_file_path: Path | None = None) -> str | None:
9
+ # Global config directory in user's home
10
+ CONFIG_DIR = Path.home() / ".morecompute"
11
+ CONFIG_FILE = CONFIG_DIR / "config.json"
12
+
13
+
14
+ def _ensure_config_dir() -> None:
15
+ """Ensure the config directory exists."""
16
+ CONFIG_DIR.mkdir(parents=True, exist_ok=True)
17
+
18
+
19
+ def _load_config() -> dict:
20
+ """Load config from JSON file."""
21
+ if not CONFIG_FILE.exists():
22
+ return {}
23
+ try:
24
+ with CONFIG_FILE.open("r", encoding="utf-8") as f:
25
+ return json.load(f)
26
+ except (json.JSONDecodeError, IOError):
27
+ return {}
28
+
29
+
30
+ def _save_config(config: dict) -> None:
31
+ """Save config to JSON file."""
32
+ _ensure_config_dir()
33
+ with CONFIG_FILE.open("w", encoding="utf-8") as f:
34
+ json.dump(config, f, indent=2)
35
+
36
+
37
+ def load_api_key(key_name: str) -> Optional[str]:
8
38
  """
9
- Load API key from environment variable or .env file.
39
+ Load API key from user config directory (~/.morecompute/config.json).
40
+ Falls back to environment variable if not found in config.
10
41
 
11
42
  Args:
12
- env_var: Environment variable name to check
13
- env_file_path: Path to .env file (optional)
43
+ key_name: Key name (e.g., "PRIME_INTELLECT_API_KEY")
14
44
 
15
45
  Returns:
16
46
  API key string or None if not found
17
47
  """
18
- api_key = os.getenv(env_var)
19
- if api_key:
20
- return api_key
48
+ # Check environment variable first
49
+ env_key = os.getenv(key_name)
50
+ if env_key:
51
+ return env_key
21
52
 
22
- if env_file_path and env_file_path.exists():
23
- try:
24
- with env_file_path.open("r", encoding="utf-8") as f:
25
- for line in f:
26
- line = line.strip()
27
- if line.startswith(f"{env_var}="):
28
- return line.split("=", 1)[1].strip().strip('"').strip("'")
29
- except Exception:
30
- pass
53
+ # Check config file
54
+ config = _load_config()
55
+ return config.get(key_name)
31
56
 
32
- return None
33
57
 
34
-
35
- def save_api_key_to_env(env_var: str, api_key: str, env_file_path: Path) -> None:
58
+ def save_api_key(key_name: str, api_key: str) -> None:
36
59
  """
37
- Save API key to .env file, replacing existing value if present.
60
+ Save API key to user config directory (~/.morecompute/config.json).
38
61
 
39
62
  Args:
40
- env_var: Environment variable name
63
+ key_name: Key name (e.g., "PRIME_INTELLECT_API_KEY")
41
64
  api_key: API key value to save
42
- env_file_path: Path to .env file
43
65
 
44
66
  Raises:
45
67
  ValueError: If API key is empty
@@ -48,12 +70,6 @@ def save_api_key_to_env(env_var: str, api_key: str, env_file_path: Path) -> None
48
70
  if not api_key.strip():
49
71
  raise ValueError("API key cannot be empty")
50
72
 
51
- existing_lines = []
52
- if env_file_path.exists():
53
- with env_file_path.open("r", encoding="utf-8") as f:
54
- existing_lines = f.readlines()
55
-
56
- new_lines = [line for line in existing_lines if not line.strip().startswith(f"{env_var}=")]
57
- new_lines.append(f"{env_var}={api_key}\n")
58
- with env_file_path.open("w", encoding="utf-8") as f:
59
- f.writelines(new_lines)
73
+ config = _load_config()
74
+ config[key_name] = api_key
75
+ _save_config(config)
@@ -0,0 +1,129 @@
1
+ """Converter utilities for notebook formats."""
2
+
3
+ import json
4
+ import re
5
+ from pathlib import Path
6
+ from typing import List, Set
7
+ from .py_percent_parser import generate_py_percent, parse_py_percent
8
+
9
+
10
+ def extract_pip_dependencies(notebook_data: dict) -> Set[str]:
11
+ """
12
+ Extract package names from !pip install and %pip install commands.
13
+
14
+ Args:
15
+ notebook_data: Parsed notebook JSON
16
+
17
+ Returns:
18
+ Set of package names
19
+ """
20
+ packages = set()
21
+
22
+ for cell in notebook_data.get('cells', []):
23
+ if cell.get('cell_type') != 'code':
24
+ continue
25
+
26
+ source = cell.get('source', [])
27
+ if isinstance(source, list):
28
+ source = ''.join(source)
29
+
30
+ # Match: !pip install package1 package2
31
+ # Match: %pip install package1 package2
32
+ pip_pattern = r'[!%]pip\s+install\s+([^\n]+)'
33
+ matches = re.finditer(pip_pattern, source)
34
+
35
+ for match in matches:
36
+ install_line = match.group(1)
37
+ # Remove common flags
38
+ install_line = re.sub(r'--[^\s]+\s*', '', install_line)
39
+ install_line = re.sub(r'-[qU]\s*', '', install_line)
40
+
41
+ # Extract package names (handle package==version format)
42
+ parts = install_line.split()
43
+ for part in parts:
44
+ part = part.strip()
45
+ if part and not part.startswith('-'):
46
+ packages.add(part)
47
+
48
+ return packages
49
+
50
+
51
+ def convert_ipynb_to_py(ipynb_path: Path, output_path: Path, include_uv_deps: bool = True) -> None:
52
+ """
53
+ Convert .ipynb notebook to .py format with py:percent cell markers.
54
+
55
+ Args:
56
+ ipynb_path: Path to input .ipynb file
57
+ output_path: Path to output .py file
58
+ include_uv_deps: Whether to add UV inline script dependencies
59
+ """
60
+ # Read notebook
61
+ with open(ipynb_path, 'r', encoding='utf-8') as f:
62
+ notebook_data = json.load(f)
63
+
64
+ cells = notebook_data.get('cells', [])
65
+
66
+ # Generate UV dependencies header if requested
67
+ header_lines = []
68
+ if include_uv_deps:
69
+ dependencies = extract_pip_dependencies(notebook_data)
70
+ if dependencies:
71
+ header_lines.append('# /// script')
72
+ header_lines.append('# dependencies = [')
73
+ for dep in sorted(dependencies):
74
+ header_lines.append(f'# "{dep}",')
75
+ header_lines.append('# ]')
76
+ header_lines.append('# ///')
77
+ header_lines.append('')
78
+
79
+ # Generate py:percent format
80
+ py_content = generate_py_percent(cells)
81
+
82
+ # Combine header and content
83
+ if header_lines:
84
+ final_content = '\n'.join(header_lines) + '\n' + py_content
85
+ else:
86
+ final_content = py_content
87
+
88
+ # Write output
89
+ with open(output_path, 'w', encoding='utf-8') as f:
90
+ f.write(final_content)
91
+
92
+ print(f"✓ Converted {ipynb_path.name} → {output_path.name}")
93
+
94
+ # Show dependencies if found
95
+ if include_uv_deps and dependencies:
96
+ print(f" Found dependencies: {', '.join(sorted(dependencies))}")
97
+ print(f" Run with: more-compute {output_path.name}")
98
+
99
+
100
+ def convert_py_to_ipynb(py_path: Path, output_path: Path) -> None:
101
+ """
102
+ Convert .py notebook to .ipynb format.
103
+
104
+ Args:
105
+ py_path: Path to input .py file
106
+ output_path: Path to output .ipynb file
107
+ """
108
+ # Read .py file
109
+ with open(py_path, 'r', encoding='utf-8') as f:
110
+ py_content = f.read()
111
+
112
+ # Parse py:percent format to notebook structure
113
+ notebook_data = parse_py_percent(py_content)
114
+
115
+ # Ensure source is in list format (Jupyter notebook standard)
116
+ for cell in notebook_data.get('cells', []):
117
+ source = cell.get('source', '')
118
+ if isinstance(source, str):
119
+ # Split into lines and keep newlines (Jupyter format)
120
+ lines = source.split('\n')
121
+ # Add \n to each line except the last
122
+ cell['source'] = [line + '\n' for line in lines[:-1]] + ([lines[-1]] if lines[-1] else [])
123
+
124
+ # Write .ipynb file
125
+ with open(output_path, 'w', encoding='utf-8') as f:
126
+ json.dump(notebook_data, f, indent=1, ensure_ascii=False)
127
+
128
+ print(f"Converted {py_path.name} -> {output_path.name}")
129
+ print(f" Upload to Google Colab or open in Jupyter")