mito-ai 0.1.40__py3-none-any.whl → 0.1.41__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of mito-ai might be problematic. Click here for more details.

Files changed (55) hide show
  1. mito_ai/__init__.py +12 -6
  2. mito_ai/_version.py +1 -1
  3. mito_ai/app_builder/handlers.py +1 -2
  4. mito_ai/completions/handlers.py +1 -1
  5. mito_ai/completions/message_history.py +9 -1
  6. mito_ai/completions/models.py +1 -1
  7. mito_ai/completions/prompt_builders/agent_execution_prompt.py +2 -0
  8. mito_ai/completions/prompt_builders/agent_smart_debug_prompt.py +8 -0
  9. mito_ai/completions/prompt_builders/agent_system_message.py +17 -0
  10. mito_ai/constants.py +3 -2
  11. mito_ai/file_uploads/__init__.py +3 -0
  12. mito_ai/file_uploads/handlers.py +225 -0
  13. mito_ai/file_uploads/urls.py +21 -0
  14. mito_ai/openai_client.py +1 -1
  15. mito_ai/tests/file_uploads/__init__.py +2 -0
  16. mito_ai/tests/file_uploads/test_handlers.py +267 -0
  17. mito_ai/tests/message_history/test_message_history_utils.py +57 -4
  18. mito_ai/utils/mito_server_utils.py +7 -0
  19. mito_ai/utils/server_limits.py +1 -1
  20. mito_ai/utils/telemetry_utils.py +26 -9
  21. {mito_ai-0.1.40.data → mito_ai-0.1.41.data}/data/share/jupyter/labextensions/mito_ai/build_log.json +102 -100
  22. {mito_ai-0.1.40.data → mito_ai-0.1.41.data}/data/share/jupyter/labextensions/mito_ai/package.json +4 -2
  23. {mito_ai-0.1.40.data → mito_ai-0.1.41.data}/data/share/jupyter/labextensions/mito_ai/schemas/mito_ai/package.json.orig +3 -1
  24. mito_ai-0.1.40.data/data/share/jupyter/labextensions/mito_ai/static/lib_index_js.55d9f8ca386d87856d2d.js → mito_ai-0.1.41.data/data/share/jupyter/labextensions/mito_ai/static/lib_index_js.01a962c68c8fae380f30.js +1782 -1027
  25. mito_ai-0.1.41.data/data/share/jupyter/labextensions/mito_ai/static/lib_index_js.01a962c68c8fae380f30.js.map +1 -0
  26. mito_ai-0.1.41.data/data/share/jupyter/labextensions/mito_ai/static/node_modules_aws-amplify_core_dist_esm_singleton_apis_fetchAuthSession_mjs.182232e7bc6311fe4528.js +63 -0
  27. mito_ai-0.1.41.data/data/share/jupyter/labextensions/mito_ai/static/node_modules_aws-amplify_core_dist_esm_singleton_apis_fetchAuthSession_mjs.182232e7bc6311fe4528.js.map +1 -0
  28. mito_ai-0.1.40.data/data/share/jupyter/labextensions/mito_ai/static/remoteEntry.264103d9addd1e166113.js → mito_ai-0.1.41.data/data/share/jupyter/labextensions/mito_ai/static/remoteEntry.9a70f033717ba8689564.js +49 -25
  29. mito_ai-0.1.41.data/data/share/jupyter/labextensions/mito_ai/static/remoteEntry.9a70f033717ba8689564.js.map +1 -0
  30. mito_ai-0.1.41.data/data/share/jupyter/labextensions/mito_ai/static/vendors-node_modules_aws-amplify_auth_dist_esm_providers_cognito_tokenProvider_tokenProvider_mjs.16430abf3466c3153f59.js +4574 -0
  31. mito_ai-0.1.41.data/data/share/jupyter/labextensions/mito_ai/static/vendors-node_modules_aws-amplify_auth_dist_esm_providers_cognito_tokenProvider_tokenProvider_mjs.16430abf3466c3153f59.js.map +1 -0
  32. mito_ai-0.1.41.data/data/share/jupyter/labextensions/mito_ai/static/vendors-node_modules_aws-amplify_core_dist_esm_singleton_Amplify_mjs.3c0035b95fe369aede82.js +2345 -0
  33. mito_ai-0.1.41.data/data/share/jupyter/labextensions/mito_ai/static/vendors-node_modules_aws-amplify_core_dist_esm_singleton_Amplify_mjs.3c0035b95fe369aede82.js.map +1 -0
  34. mito_ai-0.1.41.data/data/share/jupyter/labextensions/mito_ai/static/vendors-node_modules_aws-amplify_core_dist_esm_singleton_apis_fetchAuthSession_mjs-node_modul-758875.dc495fd682071d97070c.js +7498 -0
  35. mito_ai-0.1.41.data/data/share/jupyter/labextensions/mito_ai/static/vendors-node_modules_aws-amplify_core_dist_esm_singleton_apis_fetchAuthSession_mjs-node_modul-758875.dc495fd682071d97070c.js.map +1 -0
  36. mito_ai-0.1.41.data/data/share/jupyter/labextensions/mito_ai/static/vendors-node_modules_aws-amplify_dist_esm_index_mjs.6bac1a8c4cc93f15f6b7.js +1021 -0
  37. mito_ai-0.1.41.data/data/share/jupyter/labextensions/mito_ai/static/vendors-node_modules_aws-amplify_dist_esm_index_mjs.6bac1a8c4cc93f15f6b7.js.map +1 -0
  38. mito_ai-0.1.41.data/data/share/jupyter/labextensions/mito_ai/static/vendors-node_modules_aws-amplify_ui-react_dist_esm_index_mjs.61289bff0db44828605b.js +60178 -0
  39. mito_ai-0.1.41.data/data/share/jupyter/labextensions/mito_ai/static/vendors-node_modules_aws-amplify_ui-react_dist_esm_index_mjs.61289bff0db44828605b.js.map +1 -0
  40. {mito_ai-0.1.40.dist-info → mito_ai-0.1.41.dist-info}/METADATA +1 -1
  41. {mito_ai-0.1.40.dist-info → mito_ai-0.1.41.dist-info}/RECORD +53 -36
  42. mito_ai-0.1.40.data/data/share/jupyter/labextensions/mito_ai/static/lib_index_js.55d9f8ca386d87856d2d.js.map +0 -1
  43. mito_ai-0.1.40.data/data/share/jupyter/labextensions/mito_ai/static/remoteEntry.264103d9addd1e166113.js.map +0 -1
  44. {mito_ai-0.1.40.data → mito_ai-0.1.41.data}/data/etc/jupyter/jupyter_server_config.d/mito_ai.json +0 -0
  45. {mito_ai-0.1.40.data → mito_ai-0.1.41.data}/data/share/jupyter/labextensions/mito_ai/schemas/mito_ai/toolbar-buttons.json +0 -0
  46. {mito_ai-0.1.40.data → mito_ai-0.1.41.data}/data/share/jupyter/labextensions/mito_ai/static/style.js +0 -0
  47. {mito_ai-0.1.40.data → mito_ai-0.1.41.data}/data/share/jupyter/labextensions/mito_ai/static/style_index_js.5876024bb17dbd6a3ee6.js +0 -0
  48. {mito_ai-0.1.40.data → mito_ai-0.1.41.data}/data/share/jupyter/labextensions/mito_ai/static/style_index_js.5876024bb17dbd6a3ee6.js.map +0 -0
  49. {mito_ai-0.1.40.data → mito_ai-0.1.41.data}/data/share/jupyter/labextensions/mito_ai/static/vendors-node_modules_semver_index_js.9795f79265ddb416864b.js +0 -0
  50. {mito_ai-0.1.40.data → mito_ai-0.1.41.data}/data/share/jupyter/labextensions/mito_ai/static/vendors-node_modules_semver_index_js.9795f79265ddb416864b.js.map +0 -0
  51. {mito_ai-0.1.40.data → mito_ai-0.1.41.data}/data/share/jupyter/labextensions/mito_ai/static/vendors-node_modules_vscode-diff_dist_index_js.ea55f1f9346638aafbcf.js +0 -0
  52. {mito_ai-0.1.40.data → mito_ai-0.1.41.data}/data/share/jupyter/labextensions/mito_ai/static/vendors-node_modules_vscode-diff_dist_index_js.ea55f1f9346638aafbcf.js.map +0 -0
  53. {mito_ai-0.1.40.dist-info → mito_ai-0.1.41.dist-info}/WHEEL +0 -0
  54. {mito_ai-0.1.40.dist-info → mito_ai-0.1.41.dist-info}/entry_points.txt +0 -0
  55. {mito_ai-0.1.40.dist-info → mito_ai-0.1.41.dist-info}/licenses/LICENSE +0 -0
mito_ai/__init__.py CHANGED
@@ -14,13 +14,18 @@ from mito_ai.settings.urls import get_settings_urls
14
14
  from mito_ai.rules.urls import get_rules_urls
15
15
  from mito_ai.auth.urls import get_auth_urls
16
16
  from mito_ai.streamlit_preview.urls import get_streamlit_preview_urls
17
+ from mito_ai.file_uploads.urls import get_file_uploads_urls
17
18
 
18
- # Sometimes matplotlib figures do not show up in the notebook with this warning:
19
- # UserWarning: FigureCanvasAgg is non-interactive, and thus cannot be shown
20
- # I believe that streamlit is reconfiguring the matplotlib settings and this is happening as a result.
21
- # For now, we just set the backend to inline, so that the figures show up again
19
+ # Force Matplotlib to use the Jupyter inline backend.
20
+ # Background: importing Streamlit sets os.environ["MPLBACKEND"] = "Agg" very early.
21
+ # In a Jupyter kernel, that selects a non‑interactive canvas and can trigger:
22
+ # "UserWarning: FigureCanvasAgg is non-interactive, and thus cannot be shown"
23
+ # which prevents figures from rendering in notebook outputs.
24
+ # We preempt this by selecting the canonical Jupyter inline backend BEFORE any
25
+ # Matplotlib import, so figures render inline reliably. This must run very early.
26
+ # See: https://github.com/streamlit/streamlit/issues/9640
22
27
  import os
23
- os.environ['MPLBACKEND'] = 'inline'
28
+ os.environ["MPLBACKEND"] = "module://matplotlib_inline.backend_inline"
24
29
 
25
30
  try:
26
31
  from _version import __version__
@@ -87,6 +92,7 @@ def _load_jupyter_server_extension(server_app) -> None: # type: ignore
87
92
  handlers.extend(get_log_urls(base_url, open_ai_provider.key_type)) # type: ignore
88
93
  handlers.extend(get_auth_urls(base_url)) # type: ignore
89
94
  handlers.extend(get_streamlit_preview_urls(base_url)) # type: ignore
90
-
95
+ handlers.extend(get_file_uploads_urls(base_url)) # type: ignore
96
+
91
97
  web_app.add_handlers(host_pattern, handlers)
92
98
  server_app.log.info("Loaded the mito_ai server extension")
mito_ai/_version.py CHANGED
@@ -1,4 +1,4 @@
1
1
  # This file is auto-generated by Hatchling. As such, do not:
2
2
  # - modify
3
3
  # - track in version control e.g. be sure to add to .gitignore
4
- __version__ = VERSION = '0.1.40'
4
+ __version__ = VERSION = '0.1.41'
@@ -156,9 +156,8 @@ class AppBuilderHandler(BaseWebSocketHandler):
156
156
  success_flag, app_path_result, result_message = await streamlit_handler(notebook_path)
157
157
  if not success_flag or app_path_result is None:
158
158
  raise Exception(result_message)
159
- app_path = app_path_result
160
159
 
161
- deploy_url = await self._deploy_app(app_path, jwt_token)
160
+ deploy_url = await self._deploy_app(app_directory, jwt_token)
162
161
 
163
162
  # Send the response
164
163
  self.reply(BuildAppReply(
@@ -46,7 +46,7 @@ from mito_ai.completions.completion_handlers.agent_execution_handler import get_
46
46
  from mito_ai.completions.completion_handlers.agent_auto_error_fixup_handler import get_agent_auto_error_fixup_completion
47
47
  from mito_ai.utils.telemetry_utils import identify
48
48
 
49
- FALLBACK_MODEL = "gpt-5" # Default model to use for safety
49
+ FALLBACK_MODEL = "gpt-4.1" # Default model to use for safety
50
50
 
51
51
  # The GlobalMessageHistory is responsible for updating the message histories stored in the .mito/ai-chats directory.
52
52
  # We create one GlobalMessageHistory per backend server instance instead of one per websocket connection so that the
@@ -251,7 +251,15 @@ class GlobalMessageHistory:
251
251
  with self._lock:
252
252
  if thread_id not in self._chat_threads:
253
253
  return []
254
- return self._chat_threads[thread_id].display_history
254
+
255
+ thread = self._chat_threads[thread_id]
256
+ display_history = thread.display_history
257
+
258
+ # When we get a thread, update it's last interaction time so that if the
259
+ # user refreshes their browser, this chat will re-appear as the last opened chat.
260
+ self._update_last_interaction(thread)
261
+ self._save_thread_to_disk(thread)
262
+ return display_history
255
263
 
256
264
  async def append_message(
257
265
  self,
@@ -29,7 +29,7 @@ class CellUpdate(BaseModel):
29
29
  # for now and rely on the AI to respond with the correct types, following the format
30
30
  # that we show it in the system prompt.
31
31
  class AgentResponse(BaseModel):
32
- type: Literal['cell_update', 'get_cell_output', 'finished_task']
32
+ type: Literal['cell_update', 'get_cell_output', 'run_all_cells', 'finished_task']
33
33
  message: str
34
34
  cell_update: Optional[CellUpdate]
35
35
  get_cell_output_cell_id: Optional[str]
@@ -13,12 +13,14 @@ from mito_ai.completions.prompt_builders.utils import (
13
13
  get_selected_context_str,
14
14
  )
15
15
 
16
+
16
17
  def create_agent_execution_prompt(md: AgentExecutionMetadata) -> str:
17
18
  variables_str = '\n'.join([f"{variable}" for variable in md.variables or []])
18
19
  files_str = '\n'.join([f"{file}" for file in md.files or []])
19
20
  ai_optimized_cells_str = '\n'.join([f"{cell}" for cell in md.aiOptimizedCells or []])
20
21
  rules_str = get_rules_str(md.additionalContext)
21
22
  selected_context_str = get_selected_context_str(md.additionalContext)
23
+
22
24
  context_str = f"""Remember to choose the correct tool to respond with.
23
25
 
24
26
  {rules_str}
@@ -54,6 +54,14 @@ ERROR CORRECTION:
54
54
  - Reuse as much of the existing code as possible.
55
55
  - DO NOT ADD TEMPORARY COMMENTS like '# Fixed the typo here' or '# Added this line to fix the error'
56
56
  - If you encounter a ModuleNotFoundError, you can install the package by adding the the following line to the top of the code cell: `!pip install <package_name> --quiet`.
57
+ - If you encounter a NameError, you can use the RUN_ALL_CELLS tool to run all cells from the top of the notebook to the bottom to bring the variable into scope.
58
+ RUN_ALL_CELLS:
59
+ When you want to execute all cells in the notebook from top to bottom, respond with this format:
60
+ {{
61
+ type: 'run_all_cells',
62
+ message: str
63
+ }}
64
+ Note that if the name error persists even after using run_all_cells, it means that the variable is not defined in the notebook and you should not reuse this tool. Additionally, this tool could also be used to refresh the notebook state.
57
65
 
58
66
  <Example>
59
67
 
@@ -206,6 +206,23 @@ Important information:
206
206
  ===='''
207
207
  }
208
208
 
209
+ TOOL: RUN_ALL_CELLS
210
+
211
+ When you want to execute all cells in the notebook from top to bottom, respond with this format:
212
+
213
+ {{
214
+ type: 'run_all_cells',
215
+ message: str
216
+ }}
217
+
218
+ Important information:
219
+ 1. Use this tool when you encounter a NameError. For example, if you get an error like "NameError: name 'prompts_df' is not defined", you should use this tool to run all cells from the top of the notebook to the bottom to bring the variable into scope.
220
+ 2. Note that if the name error persists even after using run_all_cells, it means that the variable is not defined in the notebook and you should not reuse this tool.
221
+ 3. Additionally, this tool could also be used to refresh the notebook state.
222
+ 4. If running all cells results in an error, the system will automatically handle the error through the normal error fixing process.
223
+ 5. Do not use this tool repeatedly if it continues to produce errors - instead, focus on fixing the specific error that occurred.
224
+ ====
225
+
209
226
  TOOL: FINISHED_TASK
210
227
 
211
228
  When you have completed the user's task, respond with a message in this format:
mito_ai/constants.py CHANGED
@@ -24,8 +24,9 @@ AZURE_OPENAI_ENDPOINT = os.environ.get("AZURE_OPENAI_ENDPOINT")
24
24
  AZURE_OPENAI_MODEL = os.environ.get("AZURE_OPENAI_MODEL")
25
25
 
26
26
  # Mito AI Base URLs and Endpoint Paths
27
- MITO_PROD_BASE_URL = "https://7eax4i53f5odkshhlry4gw23by0yvnuv.lambda-url.us-east-1.on.aws/v1"
28
- MITO_DEV_BASE_URL = "https://g5vwmogjg7gh7aktqezyrvcq6a0hyfnr.lambda-url.us-east-1.on.aws/v1"
27
+ MITO_PROD_BASE_URL = "https://7eax4i53f5odkshhlry4gw23by0yvnuv.lambda-url.us-east-1.on.aws/v2"
28
+ MITO_DEV_BASE_URL = "https://g5vwmogjg7gh7aktqezyrvcq6a0hyfnr.lambda-url.us-east-1.on.aws/v2"
29
+ MITO_LOCAL_BASE_URL = "http://127.0.0.1:8000/v2" # When you are running the mito completion server locally
29
30
 
30
31
  # Set ACTIVE_BASE_URL manually
31
32
  ACTIVE_BASE_URL = MITO_PROD_BASE_URL # Change to MITO_DEV_BASE_URL for dev
@@ -0,0 +1,3 @@
1
+ # Copyright (c) Saga Inc.
2
+ # Distributed under the terms of the GNU Affero General Public License v3.0 License.
3
+
@@ -0,0 +1,225 @@
1
+ # Copyright (c) Saga Inc.
2
+ # Distributed under the terms of the GNU Affero General Public License v3.0 License.
3
+
4
+ import os
5
+ import tempfile
6
+ import tornado
7
+ from typing import Dict, Any
8
+ from jupyter_server.base.handlers import APIHandler
9
+ from mito_ai.utils.telemetry_utils import log_file_upload_attempt, log_file_upload_failure
10
+
11
+
12
+ class FileUploadHandler(APIHandler):
13
+ # Class-level dictionary to store temporary directories for each file upload
14
+ # This persists across handler instances since Tornado recreates handlers per request
15
+ # Key: filename, Value: dict with temp_dir, total_chunks, received_chunks, logged_upload
16
+ _temp_dirs: Dict[str, Dict[str, Any]] = {}
17
+
18
+ def __init__(self, *args: Any, **kwargs: Any) -> None:
19
+ super().__init__(*args, **kwargs)
20
+
21
+ @tornado.web.authenticated
22
+ def post(self) -> None:
23
+ """Handle file upload with multipart form data."""
24
+ try:
25
+ # Validate request has file
26
+ if not self._validate_file_upload():
27
+ return
28
+
29
+ uploaded_file = self.request.files["file"][0]
30
+ filename = uploaded_file["filename"]
31
+ file_data = uploaded_file["body"]
32
+
33
+ # Get notebook directory from request
34
+ notebook_dir = self.get_argument("notebook_dir", ".")
35
+
36
+ # Check if this is a chunked upload
37
+ chunk_number = self.get_argument("chunk_number", None)
38
+ total_chunks = self.get_argument("total_chunks", None)
39
+
40
+ if chunk_number and total_chunks:
41
+ self._handle_chunked_upload(
42
+ filename, file_data, chunk_number, total_chunks, notebook_dir
43
+ )
44
+ else:
45
+ # Log the file upload attempt for regular (non-chunked) uploads
46
+ file_extension = filename.split(".")[-1].lower()
47
+ log_file_upload_attempt(filename, file_extension, False, 0)
48
+ self._handle_regular_upload(filename, file_data, notebook_dir)
49
+
50
+ self.finish()
51
+
52
+ except Exception as e:
53
+ self._handle_error(f"Failed to save file: {str(e)}")
54
+
55
+ def _validate_file_upload(self) -> bool:
56
+ """Validate that a file was uploaded in the request."""
57
+ if "file" not in self.request.files:
58
+ self._handle_error("No file uploaded", status_code=400)
59
+ return False
60
+ return True
61
+
62
+ def _handle_chunked_upload(
63
+ self,
64
+ filename: str,
65
+ file_data: bytes,
66
+ chunk_number: str,
67
+ total_chunks: str,
68
+ notebook_dir: str,
69
+ ) -> None:
70
+ """Handle chunked file upload."""
71
+ chunk_num = int(chunk_number)
72
+ total_chunks_num = int(total_chunks)
73
+
74
+ # Log the file upload attempt only for the first chunk
75
+ if chunk_num == 1:
76
+ file_extension = filename.split(".")[-1].lower()
77
+ log_file_upload_attempt(filename, file_extension, True, total_chunks_num)
78
+
79
+ # Save chunk to temporary file
80
+ self._save_chunk(filename, file_data, chunk_num, total_chunks_num)
81
+
82
+ # Check if all chunks are received and reconstruct if complete
83
+ if self._are_all_chunks_received(filename, total_chunks_num):
84
+ self._reconstruct_file(filename, total_chunks_num, notebook_dir)
85
+ self._send_chunk_complete_response(filename, notebook_dir)
86
+ else:
87
+ self._send_chunk_received_response(chunk_num, total_chunks_num)
88
+
89
+ def _handle_regular_upload(
90
+ self, filename: str, file_data: bytes, notebook_dir: str
91
+ ) -> None:
92
+ """Handle regular (non-chunked) file upload."""
93
+ file_path = os.path.join(notebook_dir, filename)
94
+ with open(file_path, "wb") as f:
95
+ f.write(file_data)
96
+
97
+ self.write({"success": True, "filename": filename, "path": file_path})
98
+
99
+ def _save_chunk(
100
+ self, filename: str, file_data: bytes, chunk_number: int, total_chunks: int
101
+ ) -> None:
102
+ """Save a chunk to a temporary file."""
103
+ print(f"DEBUG: Saving chunk {chunk_number}/{total_chunks} for file {filename}")
104
+
105
+ # Initialize temporary directory for this file if it doesn't exist
106
+ if filename not in self._temp_dirs:
107
+ temp_dir = tempfile.mkdtemp(prefix=f"mito_upload_{filename}_")
108
+ self._temp_dirs[filename] = {
109
+ "temp_dir": temp_dir,
110
+ "total_chunks": total_chunks,
111
+ "received_chunks": set(),
112
+ }
113
+ print(f"DEBUG: Created temp dir {temp_dir} for file {filename}")
114
+
115
+ # Save the chunk to the temporary directory
116
+ chunk_filename = os.path.join(
117
+ self._temp_dirs[filename]["temp_dir"], f"chunk_{chunk_number}"
118
+ )
119
+ with open(chunk_filename, "wb") as f:
120
+ f.write(file_data)
121
+
122
+ # Mark this chunk as received
123
+ self._temp_dirs[filename]["received_chunks"].add(chunk_number)
124
+ print(
125
+ f"DEBUG: Saved chunk {chunk_number}, total received: {len(self._temp_dirs[filename]['received_chunks'])}/{total_chunks}"
126
+ )
127
+
128
+ def _are_all_chunks_received(self, filename: str, total_chunks: int) -> bool:
129
+ """Check if all chunks for a file have been received."""
130
+ if filename not in self._temp_dirs:
131
+ print(f"DEBUG: No temp dir found for {filename}")
132
+ return False
133
+
134
+ received_chunks = self._temp_dirs[filename]["received_chunks"]
135
+ is_complete = len(received_chunks) == total_chunks
136
+ print(
137
+ f"DEBUG: Checking completion for {filename}: {len(received_chunks)}/{total_chunks} chunks received, complete: {is_complete}"
138
+ )
139
+ return is_complete
140
+
141
+ def _reconstruct_file(
142
+ self, filename: str, total_chunks: int, notebook_dir: str
143
+ ) -> None:
144
+ """Reconstruct the final file from all chunks and clean up temporary directory."""
145
+ print(f"DEBUG: Starting reconstruction for {filename}")
146
+
147
+ if filename not in self._temp_dirs:
148
+ raise ValueError(f"No temporary directory found for file: {filename}")
149
+
150
+ temp_dir = self._temp_dirs[filename]["temp_dir"]
151
+ file_path = os.path.join(notebook_dir, filename)
152
+
153
+ print(f"DEBUG: Reconstructing from {temp_dir} to {file_path}")
154
+
155
+ try:
156
+ # Reconstruct the file from chunks
157
+ with open(file_path, "wb") as final_file:
158
+ for i in range(1, total_chunks + 1):
159
+ chunk_filename = os.path.join(temp_dir, f"chunk_{i}")
160
+ print(f"DEBUG: Reading chunk {i} from {chunk_filename}")
161
+ with open(chunk_filename, "rb") as chunk_file:
162
+ chunk_data = chunk_file.read()
163
+ final_file.write(chunk_data)
164
+ print(f"DEBUG: Wrote {len(chunk_data)} bytes from chunk {i}")
165
+
166
+ print(f"DEBUG: Successfully reconstructed {filename}")
167
+ finally:
168
+ # Clean up the temporary directory
169
+ print(f"DEBUG: Cleaning up temp dir for {filename}")
170
+ self._cleanup_temp_dir(filename)
171
+
172
+ def _cleanup_temp_dir(self, filename: str) -> None:
173
+ """Clean up the temporary directory for a file."""
174
+ if filename in self._temp_dirs:
175
+ temp_dir = self._temp_dirs[filename]["temp_dir"]
176
+ try:
177
+ import shutil
178
+
179
+ shutil.rmtree(temp_dir)
180
+ except Exception as e:
181
+ # Log the error but don't fail the upload
182
+ print(
183
+ f"Warning: Failed to clean up temporary directory {temp_dir}: {e}"
184
+ )
185
+ finally:
186
+ # Remove from tracking dictionary
187
+ del self._temp_dirs[filename]
188
+
189
+ def _send_chunk_complete_response(self, filename: str, notebook_dir: str) -> None:
190
+ """Send response indicating all chunks have been processed and file is complete."""
191
+ file_path = os.path.join(notebook_dir, filename)
192
+ self.write(
193
+ {
194
+ "success": True,
195
+ "filename": filename,
196
+ "path": file_path,
197
+ "chunk_complete": True,
198
+ }
199
+ )
200
+
201
+ def _send_chunk_received_response(
202
+ self, chunk_number: int, total_chunks: int
203
+ ) -> None:
204
+ """Send response indicating a chunk was received but file is not yet complete."""
205
+ self.write(
206
+ {
207
+ "success": True,
208
+ "chunk_received": True,
209
+ "chunk_number": chunk_number,
210
+ "total_chunks": total_chunks,
211
+ }
212
+ )
213
+
214
+ def _handle_error(self, error_message: str, status_code: int = 500) -> None:
215
+ """Handle errors and send appropriate error response."""
216
+ log_file_upload_failure(error_message)
217
+ self.set_status(status_code)
218
+ self.write({"error": error_message})
219
+ self.finish()
220
+
221
+ def on_finish(self) -> None:
222
+ """Clean up any remaining temporary directories when the handler is finished."""
223
+ super().on_finish()
224
+ # Note: We don't clean up here anymore since we want to preserve state across requests
225
+ # The cleanup happens when the file is fully reconstructed
@@ -0,0 +1,21 @@
1
+ # Copyright (c) Saga Inc.
2
+ # Distributed under the terms of the GNU Affero General Public License v3.0 License.
3
+
4
+ from typing import List, Tuple, Any
5
+ from jupyter_server.utils import url_path_join
6
+ from mito_ai.file_uploads.handlers import FileUploadHandler
7
+
8
+
9
+ def get_file_uploads_urls(base_url: str) -> List[Tuple[str, Any, dict]]:
10
+ """Get all file uploads related URL patterns.
11
+
12
+ Args:
13
+ base_url: The base URL for the Jupyter server
14
+
15
+ Returns:
16
+ List of (url_pattern, handler_class, handler_kwargs) tuples
17
+ """
18
+ BASE_URL = base_url + "/mito-ai"
19
+ return [
20
+ (url_path_join(BASE_URL, "upload"), FileUploadHandler, {}),
21
+ ]
mito_ai/openai_client.py CHANGED
@@ -35,7 +35,7 @@ from mito_ai.utils.telemetry_utils import (
35
35
  USER_KEY,
36
36
  )
37
37
 
38
- OPENAI_MODEL_FALLBACK = "gpt-5"
38
+ OPENAI_MODEL_FALLBACK = "gpt-4.1"
39
39
 
40
40
  class OpenAIClient(LoggingConfigurable):
41
41
  """Provide AI feature through OpenAI services."""
@@ -0,0 +1,2 @@
1
+ # Copyright (c) Saga Inc.
2
+ # Distributed under the terms of the GNU Affero General Public License v3.0 License.