more-compute 0.1.3__py3-none-any.whl → 0.2.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (55) hide show
  1. frontend/app/globals.css +322 -77
  2. frontend/app/layout.tsx +98 -82
  3. frontend/components/Cell.tsx +234 -95
  4. frontend/components/Notebook.tsx +430 -199
  5. frontend/components/{AddCellButton.tsx → cell/AddCellButton.tsx} +0 -2
  6. frontend/components/cell/MonacoCell.tsx +726 -0
  7. frontend/components/layout/ConnectionBanner.tsx +41 -0
  8. frontend/components/{Sidebar.tsx → layout/Sidebar.tsx} +16 -11
  9. frontend/components/modals/ConfirmModal.tsx +154 -0
  10. frontend/components/modals/SuccessModal.tsx +140 -0
  11. frontend/components/output/MarkdownRenderer.tsx +116 -0
  12. frontend/components/popups/ComputePopup.tsx +674 -365
  13. frontend/components/popups/MetricsPopup.tsx +11 -7
  14. frontend/components/popups/SettingsPopup.tsx +11 -13
  15. frontend/contexts/PodWebSocketContext.tsx +247 -0
  16. frontend/eslint.config.mjs +11 -0
  17. frontend/lib/monaco-themes.ts +160 -0
  18. frontend/lib/settings.ts +128 -26
  19. frontend/lib/themes.json +9973 -0
  20. frontend/lib/websocket-native.ts +19 -8
  21. frontend/lib/websocket.ts +59 -11
  22. frontend/next.config.ts +8 -0
  23. frontend/package-lock.json +1705 -3
  24. frontend/package.json +8 -1
  25. frontend/styling_README.md +18 -0
  26. kernel_run.py +161 -43
  27. more_compute-0.2.0.dist-info/METADATA +126 -0
  28. more_compute-0.2.0.dist-info/RECORD +100 -0
  29. morecompute/__version__.py +1 -0
  30. morecompute/execution/executor.py +31 -20
  31. morecompute/execution/worker.py +68 -7
  32. morecompute/models/__init__.py +31 -0
  33. morecompute/models/api_models.py +197 -0
  34. morecompute/notebook.py +50 -7
  35. morecompute/server.py +574 -94
  36. morecompute/services/data_manager.py +379 -0
  37. morecompute/services/lsp_service.py +335 -0
  38. morecompute/services/pod_manager.py +122 -20
  39. morecompute/services/pod_monitor.py +138 -0
  40. morecompute/services/prime_intellect.py +87 -63
  41. morecompute/utils/config_util.py +59 -0
  42. morecompute/utils/special_commands.py +11 -5
  43. morecompute/utils/zmq_util.py +51 -0
  44. frontend/components/MarkdownRenderer.tsx +0 -84
  45. frontend/components/popups/PythonPopup.tsx +0 -292
  46. more_compute-0.1.3.dist-info/METADATA +0 -173
  47. more_compute-0.1.3.dist-info/RECORD +0 -85
  48. /frontend/components/{CellButton.tsx → cell/CellButton.tsx} +0 -0
  49. /frontend/components/{ErrorModal.tsx → modals/ErrorModal.tsx} +0 -0
  50. /frontend/components/{CellOutput.tsx → output/CellOutput.tsx} +0 -0
  51. /frontend/components/{ErrorDisplay.tsx → output/ErrorDisplay.tsx} +0 -0
  52. {more_compute-0.1.3.dist-info → more_compute-0.2.0.dist-info}/WHEEL +0 -0
  53. {more_compute-0.1.3.dist-info → more_compute-0.2.0.dist-info}/entry_points.txt +0 -0
  54. {more_compute-0.1.3.dist-info → more_compute-0.2.0.dist-info}/licenses/LICENSE +0 -0
  55. {more_compute-0.1.3.dist-info → more_compute-0.2.0.dist-info}/top_level.txt +0 -0
@@ -150,6 +150,74 @@ def worker_main():
150
150
  exec_count = requested_count - 1
151
151
  command_type = msg.get('command_type')
152
152
  pub.send_json({'type': 'execution_start', 'cell_index': cell_index, 'execution_count': exec_count + 1})
153
+
154
+ # Check if this is a special command (shell command starting with ! or magic command)
155
+ is_special_cmd = code.strip().startswith('!') or code.strip().startswith('%')
156
+
157
+ if is_special_cmd:
158
+ # Handle special commands on remote worker
159
+ exec_count += 1
160
+ status = 'ok'
161
+ error_payload = None
162
+ start = time.time()
163
+
164
+ try:
165
+ import subprocess
166
+ import shlex
167
+
168
+ # Strip the ! prefix for shell commands
169
+ if code.strip().startswith('!'):
170
+ shell_cmd = code.strip()[1:].strip()
171
+
172
+ # Run shell command
173
+ process = subprocess.Popen(
174
+ ['/bin/bash', '-c', shell_cmd],
175
+ stdout=subprocess.PIPE,
176
+ stderr=subprocess.PIPE,
177
+ text=True
178
+ )
179
+ stdout, stderr = process.communicate()
180
+
181
+ # Send stdout
182
+ if stdout:
183
+ pub.send_json({'type': 'stream', 'name': 'stdout', 'text': stdout, 'cell_index': cell_index})
184
+
185
+ # Send stderr
186
+ if stderr:
187
+ pub.send_json({'type': 'stream', 'name': 'stderr', 'text': stderr, 'cell_index': cell_index})
188
+
189
+ # Check return code
190
+ if process.returncode != 0:
191
+ status = 'error'
192
+ # Only set error if we don't have detailed stderr
193
+ if not stderr.strip():
194
+ error_payload = {
195
+ 'ename': 'ShellCommandError',
196
+ 'evalue': f'Command failed with return code {process.returncode}',
197
+ 'traceback': [f'Shell command failed: {shell_cmd}']
198
+ }
199
+ else:
200
+ # Magic commands not fully supported on remote yet
201
+ status = 'error'
202
+ error_payload = {
203
+ 'ename': 'NotImplementedError',
204
+ 'evalue': 'Magic commands (%) not yet supported on remote GPU pods',
205
+ 'traceback': ['Use ! for shell commands instead']
206
+ }
207
+
208
+ except Exception as exc:
209
+ status = 'error'
210
+ error_payload = {'ename': type(exc).__name__, 'evalue': str(exc), 'traceback': traceback.format_exc().split('\n')}
211
+
212
+ duration_ms = f"{(time.time()-start)*1000:.1f}ms"
213
+ if error_payload:
214
+ pub.send_json({'type': 'execution_error', 'cell_index': cell_index, 'error': error_payload})
215
+ pub.send_json({'type': 'execution_complete', 'cell_index': cell_index, 'result': {'status': status, 'execution_count': exec_count, 'execution_time': duration_ms, 'outputs': [], 'error': error_payload}})
216
+ rep.send_json({'ok': True, 'pid': os.getpid()})
217
+ current_cell = None
218
+ continue
219
+
220
+ # Regular Python code execution
153
221
  # Redirect streams
154
222
  sf = _StreamForwarder(pub, cell_index)
155
223
  old_out, old_err = sys.stdout, sys.stderr
@@ -164,13 +232,6 @@ def worker_main():
164
232
  error_payload = None
165
233
  start = time.time()
166
234
  try:
167
- if command_type == 'special':
168
- # This path should be handled in-process; worker only handles python execution
169
- exec_count += 1
170
- pub.send_json({'type': 'execution_complete', 'cell_index': cell_index, 'result': {'status': 'ok', 'execution_count': exec_count, 'execution_time': '0.0ms', 'outputs': [], 'error': None}})
171
- rep.send_json({'ok': True})
172
- current_cell = None
173
- continue
174
235
  compiled = compile(code, '<cell>', 'exec')
175
236
  exec(compiled, g, l)
176
237
 
@@ -0,0 +1,31 @@
1
+ """Models package for typed API data structures."""
2
+
3
+ from .api_models import (
4
+ ApiKeyRequest,
5
+ ApiKeyResponse,
6
+ ConfigStatusResponse,
7
+ DatasetInfoRequest,
8
+ DatasetCheckRequest,
9
+ DatasetDiskRequest,
10
+ PackageInfo,
11
+ PackagesResponse,
12
+ EnvironmentInfo,
13
+ EnvironmentsResponse,
14
+ FileItem,
15
+ FileTreeResponse,
16
+ )
17
+
18
+ __all__ = [
19
+ "ApiKeyRequest",
20
+ "ApiKeyResponse",
21
+ "ConfigStatusResponse",
22
+ "DatasetInfoRequest",
23
+ "DatasetCheckRequest",
24
+ "DatasetDiskRequest",
25
+ "PackageInfo",
26
+ "PackagesResponse",
27
+ "EnvironmentInfo",
28
+ "EnvironmentsResponse",
29
+ "FileItem",
30
+ "FileTreeResponse",
31
+ ]
@@ -0,0 +1,197 @@
1
+ """Typed models for API requests and responses."""
2
+
3
+ from pydantic import BaseModel
4
+ from datetime import datetime
5
+
6
+
7
+ class ApiKeyRequest(BaseModel):
8
+ """Request model for setting API key."""
9
+ api_key: str
10
+
11
+
12
+ class ApiKeyResponse(BaseModel):
13
+ """Response model for API key configuration."""
14
+ configured: bool
15
+ message: str | None = None
16
+
17
+
18
+ class ConfigStatusResponse(BaseModel):
19
+ """Response model for configuration status."""
20
+ configured: bool
21
+
22
+
23
+ class DatasetInfoRequest(BaseModel):
24
+ """Request model for dataset info."""
25
+ name: str
26
+ config: str | None = None
27
+
28
+
29
+ class DatasetCheckRequest(BaseModel):
30
+ """Request model for checking dataset load."""
31
+ name: str
32
+ config: str | None = None
33
+ split: str | None = None
34
+ auto_stream_threshold_gb: float = 10.0
35
+
36
+
37
+ class DatasetDiskRequest(BaseModel):
38
+ """Request model for creating dataset disk."""
39
+ pod_id: str
40
+ disk_name: str
41
+ size_gb: int
42
+ provider_type: str = "runpod"
43
+
44
+
45
+ class PackageInfo(BaseModel):
46
+ """Model for installed package information."""
47
+ name: str
48
+ version: str
49
+
50
+
51
+ class PackagesResponse(BaseModel):
52
+ """Response model for package list."""
53
+ packages: list[PackageInfo]
54
+
55
+
56
+ class EnvironmentInfo(BaseModel):
57
+ """Model for Python environment information."""
58
+ name: str
59
+ path: str
60
+ version: str
61
+ is_current: bool
62
+
63
+
64
+ class EnvironmentsResponse(BaseModel):
65
+ """Response model for environment list."""
66
+ status: str
67
+ environments: list[EnvironmentInfo]
68
+ current: EnvironmentInfo | None
69
+
70
+
71
+ class FileItem(BaseModel):
72
+ """Model for file/directory item."""
73
+ name: str
74
+ path: str
75
+ type: str
76
+ size: int | None = None
77
+ modified: str | None = None
78
+
79
+
80
+ class FileTreeResponse(BaseModel):
81
+ """Response model for file tree listing."""
82
+ root: str
83
+ path: str
84
+ items: list[FileItem]
85
+
86
+
87
+ # ============================================================================
88
+ # Prime Intellect GPU API Models
89
+ # ============================================================================
90
+
91
+ class EnvVar(BaseModel):
92
+ """Environment variable for pod configuration."""
93
+ key: str
94
+ value: str
95
+
96
+
97
+ class PodConfig(BaseModel):
98
+ """Configuration for creating a GPU pod."""
99
+ # Required fields
100
+ name: str
101
+ cloudId: str
102
+ gpuType: str
103
+ socket: str
104
+ gpuCount: int = 1
105
+
106
+ # Optional fields
107
+ diskSize: int | None = None
108
+ vcpus: int | None = None
109
+ memory: int | None = None
110
+ maxPrice: float | None = None
111
+ image: str | None = None
112
+ customTemplateId: str | None = None
113
+ dataCenterId: str | None = None
114
+ country: str | None = None
115
+ security: str | None = None
116
+ envVars: list[EnvVar] | None = None
117
+ jupyterPassword: str | None = None
118
+ autoRestart: bool | None = None
119
+
120
+
121
+ class ProviderConfig(BaseModel):
122
+ """Cloud provider configuration."""
123
+ type: str = "runpod"
124
+
125
+
126
+ class TeamConfig(BaseModel):
127
+ """Team configuration for shared resources."""
128
+ teamId: str | None = None
129
+
130
+
131
+ class CreatePodRequest(BaseModel):
132
+ """Request to create a new GPU pod."""
133
+ pod: PodConfig
134
+ provider: ProviderConfig
135
+ team: TeamConfig | None = None
136
+
137
+
138
+ class DiskConfig(BaseModel):
139
+ """Configuration for creating a persistent disk."""
140
+ name: str
141
+ size: int # Size in GB
142
+ cloudId: str | None = None
143
+ dataCenterId: str | None = None
144
+ country: str | None = None
145
+
146
+
147
+ class CreateDiskRequest(BaseModel):
148
+ """Request to create a new disk."""
149
+ disk: DiskConfig
150
+ provider: ProviderConfig
151
+ team: TeamConfig | None = None
152
+
153
+
154
+ class DiskResponse(BaseModel):
155
+ """Response with disk information."""
156
+ id: str
157
+ name: str
158
+ remoteId: str
159
+ providerType: str
160
+ status: str
161
+ size: int
162
+ createdAt: datetime
163
+ updatedAt: datetime
164
+ terminatedAt: datetime | None
165
+ priceHr: float | None
166
+ stoppedPriceHr: float | None
167
+ provisioningPriceHr: float | None
168
+ userId: str | None
169
+ teamId: str | None
170
+ walletId: str | None
171
+ pods: list[str]
172
+ clusters: list[str]
173
+ info: dict[str, object] | None
174
+
175
+
176
+ class PodResponse(BaseModel):
177
+ """Response with GPU pod information."""
178
+ id: str
179
+ userId: str
180
+ teamId: str | None
181
+ name: str
182
+ status: str
183
+ gpuName: str
184
+ gpuCount: int
185
+ priceHr: float
186
+ sshConnection: str | None
187
+ ip: str | None
188
+ createdAt: datetime
189
+ updatedAt: datetime
190
+
191
+
192
+ class AvailabilityQuery(BaseModel):
193
+ """Query parameters for GPU availability."""
194
+ regions: list[str] | None = None
195
+ gpu_count: int | None = None
196
+ gpu_type: str | None = None
197
+ security: str | None = None
morecompute/notebook.py CHANGED
@@ -13,13 +13,43 @@ class Notebook:
13
13
  self.load_from_file(file_path)
14
14
  else:
15
15
  # Default empty notebook structure
16
- self.cells.append({'id': self._generate_cell_id(), 'cell_type': 'code', 'source': '', 'outputs': []})
16
+ self.cells.append({
17
+ 'id': self._generate_cell_id(),
18
+ 'cell_type': 'code',
19
+ 'source': '',
20
+ 'metadata': {},
21
+ 'outputs': [],
22
+ 'execution_count': None
23
+ })
17
24
 
18
25
  def get_notebook_data(self) -> Dict[str, Any]:
19
26
  return {"cells": self.cells, "metadata": self.metadata, "file_path": self.file_path}
20
27
 
21
- def add_cell(self, index: int, cell_type: str = 'code', source: str = ''):
22
- new_cell = {'id': self._generate_cell_id(), 'cell_type': cell_type, 'source': source, 'outputs': []}
28
+ def add_cell(self, index: int, cell_type: str = 'code', source: str = '', full_cell: dict = None):
29
+ if full_cell:
30
+ actual_cell_type = full_cell.get('cell_type', cell_type)
31
+ new_cell = {
32
+ 'id': full_cell.get('id', self._generate_cell_id()),
33
+ 'cell_type': actual_cell_type,
34
+ 'source': full_cell.get('source', source),
35
+ 'metadata': full_cell.get('metadata', {}),
36
+ }
37
+ # Only add outputs and execution_count for code cells
38
+ if actual_cell_type == 'code':
39
+ new_cell['outputs'] = full_cell.get('outputs', [])
40
+ new_cell['execution_count'] = full_cell.get('execution_count')
41
+ else:
42
+ # Normal new cell creation
43
+ new_cell = {
44
+ 'id': self._generate_cell_id(),
45
+ 'cell_type': cell_type,
46
+ 'source': source,
47
+ 'metadata': {}
48
+ }
49
+ # Only add outputs for code cells
50
+ if cell_type == 'code':
51
+ new_cell['outputs'] = []
52
+ new_cell['execution_count'] = None
23
53
  self.cells.insert(index, new_cell)
24
54
 
25
55
  def delete_cell(self, index: int):
@@ -30,10 +60,16 @@ class Notebook:
30
60
  if 0 <= index < len(self.cells):
31
61
  self.cells[index]['source'] = source
32
62
 
63
+ def move_cell(self, from_index: int, to_index: int):
64
+ if 0 <= from_index < len(self.cells) and 0 <= to_index < len(self.cells):
65
+ cell = self.cells.pop(from_index)
66
+ self.cells.insert(to_index, cell)
67
+
33
68
  def clear_all_outputs(self):
34
69
  for cell in self.cells:
35
- cell['outputs'] = []
36
- if 'execution_count' in cell:
70
+ # Only clear outputs for code cells
71
+ if cell.get('cell_type') == 'code':
72
+ cell['outputs'] = []
37
73
  cell['execution_count'] = None
38
74
 
39
75
  def to_json(self) -> str:
@@ -64,7 +100,14 @@ class Notebook:
64
100
  except (FileNotFoundError, json.JSONDecodeError) as e:
65
101
  print(f"Error loading notebook: {e}")
66
102
  # Initialize with a default cell if loading fails
67
- self.cells = [{'id': self._generate_cell_id(), 'cell_type': 'code', 'source': '', 'outputs': []}]
103
+ self.cells = [{
104
+ 'id': self._generate_cell_id(),
105
+ 'cell_type': 'code',
106
+ 'source': '',
107
+ 'metadata': {},
108
+ 'outputs': [],
109
+ 'execution_count': None
110
+ }]
68
111
  self.metadata = {}
69
112
  self.file_path = file_path
70
113
 
@@ -72,7 +115,7 @@ class Notebook:
72
115
  path_to_save = file_path or self.file_path
73
116
  if not path_to_save:
74
117
  raise ValueError("No file path specified for saving.")
75
-
118
+
76
119
  with open(path_to_save, 'w') as f:
77
120
  f.write(self.to_json())
78
121
  self.file_path = path_to_save