portacode 0.3.19.dev4__py3-none-any.whl → 1.4.11.dev1__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of portacode might be problematic. Click here for more details.

Files changed (92) hide show
  1. portacode/_version.py +16 -3
  2. portacode/cli.py +143 -17
  3. portacode/connection/client.py +149 -10
  4. portacode/connection/handlers/WEBSOCKET_PROTOCOL.md +824 -21
  5. portacode/connection/handlers/__init__.py +28 -1
  6. portacode/connection/handlers/base.py +78 -16
  7. portacode/connection/handlers/chunked_content.py +244 -0
  8. portacode/connection/handlers/diff_handlers.py +603 -0
  9. portacode/connection/handlers/file_handlers.py +902 -17
  10. portacode/connection/handlers/project_aware_file_handlers.py +226 -0
  11. portacode/connection/handlers/project_state/README.md +312 -0
  12. portacode/connection/handlers/project_state/__init__.py +92 -0
  13. portacode/connection/handlers/project_state/file_system_watcher.py +179 -0
  14. portacode/connection/handlers/project_state/git_manager.py +1502 -0
  15. portacode/connection/handlers/project_state/handlers.py +875 -0
  16. portacode/connection/handlers/project_state/manager.py +1331 -0
  17. portacode/connection/handlers/project_state/models.py +108 -0
  18. portacode/connection/handlers/project_state/utils.py +50 -0
  19. portacode/connection/handlers/project_state_handlers.py +45 -2185
  20. portacode/connection/handlers/proxmox_infra.py +361 -0
  21. portacode/connection/handlers/registry.py +15 -4
  22. portacode/connection/handlers/session.py +483 -32
  23. portacode/connection/handlers/system_handlers.py +147 -8
  24. portacode/connection/handlers/tab_factory.py +53 -46
  25. portacode/connection/handlers/terminal_handlers.py +21 -8
  26. portacode/connection/handlers/update_handler.py +61 -0
  27. portacode/connection/multiplex.py +60 -2
  28. portacode/connection/terminal.py +214 -24
  29. portacode/keypair.py +63 -1
  30. portacode/link_capture/__init__.py +38 -0
  31. portacode/link_capture/__pycache__/__init__.cpython-311.pyc +0 -0
  32. portacode/link_capture/bin/__pycache__/link_capture_wrapper.cpython-311.pyc +0 -0
  33. portacode/link_capture/bin/elinks +3 -0
  34. portacode/link_capture/bin/gio-open +3 -0
  35. portacode/link_capture/bin/gnome-open +3 -0
  36. portacode/link_capture/bin/gvfs-open +3 -0
  37. portacode/link_capture/bin/kde-open +3 -0
  38. portacode/link_capture/bin/kfmclient +3 -0
  39. portacode/link_capture/bin/link_capture_exec.sh +11 -0
  40. portacode/link_capture/bin/link_capture_wrapper.py +75 -0
  41. portacode/link_capture/bin/links +3 -0
  42. portacode/link_capture/bin/links2 +3 -0
  43. portacode/link_capture/bin/lynx +3 -0
  44. portacode/link_capture/bin/mate-open +3 -0
  45. portacode/link_capture/bin/netsurf +3 -0
  46. portacode/link_capture/bin/sensible-browser +3 -0
  47. portacode/link_capture/bin/w3m +3 -0
  48. portacode/link_capture/bin/x-www-browser +3 -0
  49. portacode/link_capture/bin/xdg-open +3 -0
  50. portacode/logging_categories.py +140 -0
  51. portacode/pairing.py +103 -0
  52. portacode/static/js/test-ntp-clock.html +63 -0
  53. portacode/static/js/utils/ntp-clock.js +232 -0
  54. portacode/utils/NTP_ARCHITECTURE.md +136 -0
  55. portacode/utils/__init__.py +1 -0
  56. portacode/utils/diff_apply.py +456 -0
  57. portacode/utils/diff_renderer.py +371 -0
  58. portacode/utils/ntp_clock.py +65 -0
  59. portacode-1.4.11.dev1.dist-info/METADATA +298 -0
  60. portacode-1.4.11.dev1.dist-info/RECORD +97 -0
  61. {portacode-0.3.19.dev4.dist-info → portacode-1.4.11.dev1.dist-info}/WHEEL +1 -1
  62. portacode-1.4.11.dev1.dist-info/top_level.txt +3 -0
  63. test_modules/README.md +296 -0
  64. test_modules/__init__.py +1 -0
  65. test_modules/test_device_online.py +44 -0
  66. test_modules/test_file_operations.py +743 -0
  67. test_modules/test_git_status_ui.py +370 -0
  68. test_modules/test_login_flow.py +50 -0
  69. test_modules/test_navigate_testing_folder.py +361 -0
  70. test_modules/test_play_store_screenshots.py +294 -0
  71. test_modules/test_terminal_buffer_performance.py +261 -0
  72. test_modules/test_terminal_interaction.py +80 -0
  73. test_modules/test_terminal_loading_race_condition.py +95 -0
  74. test_modules/test_terminal_start.py +56 -0
  75. testing_framework/.env.example +21 -0
  76. testing_framework/README.md +334 -0
  77. testing_framework/__init__.py +17 -0
  78. testing_framework/cli.py +326 -0
  79. testing_framework/core/__init__.py +1 -0
  80. testing_framework/core/base_test.py +336 -0
  81. testing_framework/core/cli_manager.py +177 -0
  82. testing_framework/core/hierarchical_runner.py +577 -0
  83. testing_framework/core/playwright_manager.py +520 -0
  84. testing_framework/core/runner.py +447 -0
  85. testing_framework/core/shared_cli_manager.py +234 -0
  86. testing_framework/core/test_discovery.py +112 -0
  87. testing_framework/requirements.txt +12 -0
  88. portacode-0.3.19.dev4.dist-info/METADATA +0 -241
  89. portacode-0.3.19.dev4.dist-info/RECORD +0 -30
  90. portacode-0.3.19.dev4.dist-info/top_level.txt +0 -1
  91. {portacode-0.3.19.dev4.dist-info → portacode-1.4.11.dev1.dist-info}/entry_points.txt +0 -0
  92. {portacode-0.3.19.dev4.dist-info → portacode-1.4.11.dev1.dist-info/licenses}/LICENSE +0 -0
@@ -20,7 +20,13 @@ from .file_handlers import (
20
20
  DirectoryListHandler,
21
21
  FileInfoHandler,
22
22
  FileDeleteHandler,
23
+ FileCreateHandler,
24
+ FolderCreateHandler,
25
+ FileRenameHandler,
26
+ FileSearchHandler,
27
+ ContentRequestHandler,
23
28
  )
29
+ from .diff_handlers import FileApplyDiffHandler, FilePreviewDiffHandler
24
30
  from .project_state_handlers import (
25
31
  ProjectStateFolderExpandHandler,
26
32
  ProjectStateFolderCollapseHandler,
@@ -28,7 +34,13 @@ from .project_state_handlers import (
28
34
  ProjectStateTabCloseHandler,
29
35
  ProjectStateSetActiveTabHandler,
30
36
  ProjectStateDiffOpenHandler,
37
+ ProjectStateDiffContentHandler,
38
+ ProjectStateGitStageHandler,
39
+ ProjectStateGitUnstageHandler,
40
+ ProjectStateGitRevertHandler,
41
+ ProjectStateGitCommitHandler,
31
42
  )
43
+ from .proxmox_infra import ConfigureProxmoxInfraHandler, RevertProxmoxInfraHandler
32
44
 
33
45
  __all__ = [
34
46
  "BaseHandler",
@@ -40,12 +52,20 @@ __all__ = [
40
52
  "TerminalStopHandler",
41
53
  "TerminalListHandler",
42
54
  "SystemInfoHandler",
55
+ "ConfigureProxmoxInfraHandler",
43
56
  # File operation handlers (optional - register as needed)
44
57
  "FileReadHandler",
45
58
  "FileWriteHandler",
46
59
  "DirectoryListHandler",
47
60
  "FileInfoHandler",
48
61
  "FileDeleteHandler",
62
+ "FileCreateHandler",
63
+ "FolderCreateHandler",
64
+ "FileRenameHandler",
65
+ "FileSearchHandler",
66
+ "ContentRequestHandler",
67
+ "FileApplyDiffHandler",
68
+ "FilePreviewDiffHandler",
49
69
  # Project state handlers
50
70
  "ProjectStateFolderExpandHandler",
51
71
  "ProjectStateFolderCollapseHandler",
@@ -53,4 +73,11 @@ __all__ = [
53
73
  "ProjectStateTabCloseHandler",
54
74
  "ProjectStateSetActiveTabHandler",
55
75
  "ProjectStateDiffOpenHandler",
56
- ]
76
+ "ProjectStateDiffContentHandler",
77
+ "ProjectStateGitStageHandler",
78
+ "ProjectStateGitUnstageHandler",
79
+ "ProjectStateGitRevertHandler",
80
+ "ProjectStateGitCommitHandler",
81
+ "UpdatePortacodeHandler",
82
+ "RevertProxmoxInfraHandler",
83
+ ]
@@ -4,6 +4,7 @@ import asyncio
4
4
  import logging
5
5
  from abc import ABC, abstractmethod
6
6
  from typing import Any, Dict, Optional, TYPE_CHECKING
7
+ from portacode.utils.ntp_clock import ntp_clock
7
8
 
8
9
  if TYPE_CHECKING:
9
10
  from ..multiplex import Channel
@@ -42,35 +43,45 @@ class BaseHandler(ABC):
42
43
 
43
44
  async def send_response(self, payload: Dict[str, Any], reply_channel: Optional[str] = None, project_id: str = None) -> None:
44
45
  """Send a response back to the gateway with client session awareness.
45
-
46
+
46
47
  Args:
47
48
  payload: Response payload
48
49
  reply_channel: Optional reply channel for backward compatibility
49
50
  project_id: Optional project filter for targeting specific sessions
50
51
  """
52
+ # Add device_send timestamp if trace present
53
+ if "trace" in payload and "request_id" in payload:
54
+ device_send_time = ntp_clock.now_ms()
55
+ if device_send_time is not None:
56
+ payload["trace"]["device_send"] = device_send_time
57
+ # Update ping to show total time from client_send
58
+ if "client_send" in payload["trace"]:
59
+ payload["trace"]["ping"] = device_send_time - payload["trace"]["client_send"]
60
+ logger.info(f"📤 Device sending traced response: {payload['request_id']}")
61
+
51
62
  # Get client session manager from context
52
63
  client_session_manager = self.context.get("client_session_manager")
53
-
64
+
54
65
  if client_session_manager and client_session_manager.has_interested_clients():
55
66
  # Get target sessions
56
67
  target_sessions = client_session_manager.get_target_sessions(project_id)
57
68
  if not target_sessions:
58
69
  logger.debug("handler: No target sessions found, skipping response send")
59
70
  return
60
-
71
+
61
72
  # Add session targeting information
62
73
  enhanced_payload = dict(payload)
63
74
  enhanced_payload["client_sessions"] = target_sessions
64
-
75
+
65
76
  # Add backward compatibility reply_channel (first session if not provided)
66
77
  if not reply_channel:
67
78
  reply_channel = client_session_manager.get_reply_channel_for_compatibility()
68
79
  if reply_channel:
69
80
  enhanced_payload["reply_channel"] = reply_channel
70
-
71
- logger.debug("handler: Sending response to %d client sessions: %s",
81
+
82
+ logger.debug("handler: Sending response to %d client sessions: %s",
72
83
  len(target_sessions), target_sessions)
73
-
84
+
74
85
  await self.control_channel.send(enhanced_payload)
75
86
  else:
76
87
  # Fallback to original behavior if no client session manager or no clients
@@ -107,18 +118,47 @@ class AsyncHandler(BaseHandler):
107
118
 
108
119
  async def handle(self, message: Dict[str, Any], reply_channel: Optional[str] = None) -> None:
109
120
  """Handle the command by executing it and sending the response."""
110
- logger.info("handler: Processing command %s with reply_channel=%s",
121
+ logger.info("handler: Processing command %s with reply_channel=%s",
111
122
  self.command_name, reply_channel)
112
-
123
+
124
+ # Add handler_dispatch timestamp if trace present
125
+ if "trace" in message and "request_id" in message:
126
+ handler_dispatch_time = ntp_clock.now_ms()
127
+ if handler_dispatch_time is not None:
128
+ message["trace"]["handler_dispatch"] = handler_dispatch_time
129
+ # Update ping to show total time from client_send
130
+ if "client_send" in message["trace"]:
131
+ message["trace"]["ping"] = handler_dispatch_time - message["trace"]["client_send"]
132
+ logger.info(f"🔧 Handler dispatching: {message['request_id']} ({self.command_name})")
133
+
113
134
  try:
114
135
  response = await self.execute(message)
115
136
  logger.info("handler: Command %s executed successfully", self.command_name)
116
-
117
- # Extract project_id from response for session targeting
118
- project_id = response.get("project_id")
119
- logger.info("handler: %s response project_id=%s, response=%s",
120
- self.command_name, project_id, response)
121
- await self.send_response(response, reply_channel, project_id)
137
+
138
+ # Handle cases where execute() sends responses directly and returns None
139
+ if response is not None:
140
+ # Automatically copy request_id if present in the incoming message
141
+ if "request_id" in message and "request_id" not in response:
142
+ response["request_id"] = message["request_id"]
143
+
144
+ # Pass through trace from request to response (add to existing trace, don't create new one)
145
+ if "trace" in message and "request_id" in message:
146
+ response["trace"] = dict(message["trace"])
147
+ handler_complete_time = ntp_clock.now_ms()
148
+ if handler_complete_time is not None:
149
+ response["trace"]["handler_complete"] = handler_complete_time
150
+ # Update ping to show total time from client_send
151
+ if "client_send" in response["trace"]:
152
+ response["trace"]["ping"] = handler_complete_time - response["trace"]["client_send"]
153
+ logger.info(f"✅ Handler completed: {message['request_id']} ({self.command_name})")
154
+
155
+ # Extract project_id from response for session targeting
156
+ project_id = response.get("project_id")
157
+ logger.info("handler: %s response project_id=%s, response=%s",
158
+ self.command_name, project_id, response)
159
+ await self.send_response(response, reply_channel, project_id)
160
+ else:
161
+ logger.info("handler: %s handled response transmission directly", self.command_name)
122
162
  except Exception as exc:
123
163
  logger.exception("handler: Error in async handler %s: %s", self.command_name, exc)
124
164
  # Extract project_id from original message for error targeting
@@ -143,10 +183,32 @@ class SyncHandler(BaseHandler):
143
183
 
144
184
  async def handle(self, message: Dict[str, Any], reply_channel: Optional[str] = None) -> None:
145
185
  """Handle the command by executing it in an executor and sending the response."""
186
+ # Add handler_dispatch timestamp if trace present
187
+ if "trace" in message and "request_id" in message:
188
+ handler_dispatch_time = ntp_clock.now_ms()
189
+ if handler_dispatch_time is not None:
190
+ message["trace"]["handler_dispatch"] = handler_dispatch_time
191
+ # Update ping to show total time from client_send
192
+ if "client_send" in message["trace"]:
193
+ message["trace"]["ping"] = handler_dispatch_time - message["trace"]["client_send"]
194
+ logger.info(f"🔧 Handler dispatching: {message['request_id']} ({self.command_name})")
195
+
146
196
  try:
147
197
  loop = asyncio.get_running_loop()
148
198
  response = await loop.run_in_executor(None, self.execute, message)
149
-
199
+
200
+ # Automatically copy request_id if present in the incoming message
201
+ if "request_id" in message and "request_id" not in response:
202
+ response["request_id"] = message["request_id"]
203
+
204
+ # Pass through trace from request to response (add to existing trace, don't create new one)
205
+ if "trace" in message and "request_id" in message:
206
+ response["trace"] = dict(message["trace"])
207
+ handler_complete_time = ntp_clock.now_ms()
208
+ if handler_complete_time is not None:
209
+ response["trace"]["handler_complete"] = handler_complete_time
210
+ logger.info(f"✅ Handler completed: {message['request_id']} ({self.command_name})")
211
+
150
212
  # Extract project_id from response for session targeting
151
213
  project_id = response.get("project_id")
152
214
  await self.send_response(response, reply_channel, project_id)
@@ -0,0 +1,244 @@
1
+ """
2
+ Chunked content transfer utilities for handling large content over WebSocket.
3
+
4
+ This module provides functionality to split large content into chunks for reliable
5
+ transmission over WebSocket connections, and to reassemble chunks on the client side.
6
+ """
7
+
8
+ import hashlib
9
+ import uuid
10
+ from typing import Dict, Any, List, Optional
11
+ import logging
12
+
13
+ logger = logging.getLogger(__name__)
14
+
15
+ # Maximum size for content before chunking (200KB)
16
+ MAX_CONTENT_SIZE = 200 * 1024 # 200KB
17
+
18
+ # Maximum chunk size (64KB per chunk for reliable WebSocket transmission)
19
+ CHUNK_SIZE = 64 * 1024 # 64KB
20
+
21
+
22
+ def should_chunk_content(content: str) -> bool:
23
+ """Determine if content should be chunked based on size."""
24
+ if content is None:
25
+ return False
26
+
27
+ content_bytes = content.encode('utf-8')
28
+ return len(content_bytes) > MAX_CONTENT_SIZE
29
+
30
+
31
+ def calculate_content_hash(content: str) -> str:
32
+ """Calculate SHA-256 hash of content for verification."""
33
+ if content is None:
34
+ return ""
35
+
36
+ content_bytes = content.encode('utf-8')
37
+ return hashlib.sha256(content_bytes).hexdigest()
38
+
39
+
40
+ def split_content_into_chunks(content: str, transfer_id: Optional[str] = None) -> List[Dict[str, Any]]:
41
+ """
42
+ Split content into chunks for transmission.
43
+
44
+ Args:
45
+ content: The content to split
46
+ transfer_id: Optional transfer ID, will generate one if not provided
47
+
48
+ Returns:
49
+ List of chunk dictionaries ready for transmission
50
+ """
51
+ if content is None:
52
+ return []
53
+
54
+ if transfer_id is None:
55
+ transfer_id = str(uuid.uuid4())
56
+
57
+ content_bytes = content.encode('utf-8')
58
+ total_size = len(content_bytes)
59
+ content_hash = hashlib.sha256(content_bytes).hexdigest()
60
+
61
+ chunks = []
62
+ chunk_index = 0
63
+ offset = 0
64
+
65
+ while offset < len(content_bytes):
66
+ chunk_data = content_bytes[offset:offset + CHUNK_SIZE]
67
+ chunk_content = chunk_data.decode('utf-8')
68
+ chunk_hash = hashlib.sha256(chunk_data).hexdigest()
69
+
70
+ chunks.append({
71
+ "transfer_id": transfer_id,
72
+ "chunk_index": chunk_index,
73
+ "chunk_count": (total_size + CHUNK_SIZE - 1) // CHUNK_SIZE, # Ceiling division
74
+ "chunk_size": len(chunk_data),
75
+ "total_size": total_size,
76
+ "content_hash": content_hash,
77
+ "chunk_hash": chunk_hash,
78
+ "chunk_content": chunk_content,
79
+ "is_final_chunk": offset + CHUNK_SIZE >= len(content_bytes)
80
+ })
81
+
82
+ chunk_index += 1
83
+ offset += CHUNK_SIZE
84
+
85
+ logger.info(f"Split content into {len(chunks)} chunks (total size: {total_size} bytes, transfer_id: {transfer_id})")
86
+ return chunks
87
+
88
+
89
+ def create_chunked_response(base_response: Dict[str, Any], content_field: str, content: str) -> List[Dict[str, Any]]:
90
+ """
91
+ Create chunked response messages from a base response and content.
92
+
93
+ Args:
94
+ base_response: The base response dictionary
95
+ content_field: The field name where content should be placed
96
+ content: The content to chunk
97
+
98
+ Returns:
99
+ List of response dictionaries with chunked content
100
+ """
101
+ if not should_chunk_content(content):
102
+ # Content is small enough, return as single response
103
+ response = base_response.copy()
104
+ response[content_field] = content
105
+ response["chunked"] = False
106
+ return [response]
107
+
108
+ # Content needs chunking
109
+ transfer_id = str(uuid.uuid4())
110
+ chunks = split_content_into_chunks(content, transfer_id)
111
+ responses = []
112
+
113
+ for chunk in chunks:
114
+ response = base_response.copy()
115
+ response["chunked"] = True
116
+ response["transfer_id"] = chunk["transfer_id"]
117
+ response["chunk_index"] = chunk["chunk_index"]
118
+ response["chunk_count"] = chunk["chunk_count"]
119
+ response["chunk_size"] = chunk["chunk_size"]
120
+ response["total_size"] = chunk["total_size"]
121
+ response["content_hash"] = chunk["content_hash"]
122
+ response["chunk_hash"] = chunk["chunk_hash"]
123
+ response["is_final_chunk"] = chunk["is_final_chunk"]
124
+ response[content_field] = chunk["chunk_content"]
125
+
126
+ responses.append(response)
127
+
128
+ logger.info(f"Created chunked response with {len(responses)} chunks for transfer_id: {transfer_id}")
129
+ return responses
130
+
131
+
132
+ class ChunkAssembler:
133
+ """
134
+ Helper class to assemble chunked content on the receiving side.
135
+ """
136
+
137
+ def __init__(self):
138
+ self.transfers: Dict[str, Dict[str, Any]] = {}
139
+
140
+ def add_chunk(self, chunk_data: Dict[str, Any], content_field: str) -> Optional[str]:
141
+ """
142
+ Add a chunk to the assembler.
143
+
144
+ Args:
145
+ chunk_data: The chunk data dictionary
146
+ content_field: The field name containing the chunk content
147
+
148
+ Returns:
149
+ Complete content if all chunks received, None if more chunks needed
150
+
151
+ Raises:
152
+ ValueError: If chunk data is invalid or verification fails
153
+ """
154
+ transfer_id = chunk_data.get("transfer_id")
155
+ chunk_index = chunk_data.get("chunk_index")
156
+ chunk_count = chunk_data.get("chunk_count")
157
+ chunk_size = chunk_data.get("chunk_size")
158
+ total_size = chunk_data.get("total_size")
159
+ content_hash = chunk_data.get("content_hash")
160
+ chunk_hash = chunk_data.get("chunk_hash")
161
+ chunk_content = chunk_data.get(content_field)
162
+ is_final_chunk = chunk_data.get("is_final_chunk")
163
+
164
+ if not all([transfer_id, chunk_index is not None, chunk_count, chunk_size,
165
+ total_size, content_hash, chunk_hash, chunk_content is not None]):
166
+ raise ValueError("Missing required chunk fields")
167
+
168
+ # Verify chunk content hash
169
+ chunk_bytes = chunk_content.encode('utf-8')
170
+ if len(chunk_bytes) != chunk_size:
171
+ raise ValueError(f"Chunk size mismatch: expected {chunk_size}, got {len(chunk_bytes)}")
172
+
173
+ calculated_chunk_hash = hashlib.sha256(chunk_bytes).hexdigest()
174
+ if calculated_chunk_hash != chunk_hash:
175
+ raise ValueError(f"Chunk hash mismatch: expected {chunk_hash}, got {calculated_chunk_hash}")
176
+
177
+ # Initialize transfer if not exists
178
+ if transfer_id not in self.transfers:
179
+ self.transfers[transfer_id] = {
180
+ "chunk_count": chunk_count,
181
+ "total_size": total_size,
182
+ "content_hash": content_hash,
183
+ "chunks": {},
184
+ "received_chunks": 0
185
+ }
186
+
187
+ transfer = self.transfers[transfer_id]
188
+
189
+ # Verify transfer metadata consistency
190
+ if (transfer["chunk_count"] != chunk_count or
191
+ transfer["total_size"] != total_size or
192
+ transfer["content_hash"] != content_hash):
193
+ raise ValueError("Transfer metadata mismatch")
194
+
195
+ # Store chunk if not already received
196
+ if chunk_index not in transfer["chunks"]:
197
+ transfer["chunks"][chunk_index] = chunk_content
198
+ transfer["received_chunks"] += 1
199
+
200
+ logger.debug(f"Received chunk {chunk_index + 1}/{chunk_count} for transfer {transfer_id}")
201
+
202
+ # Check if all chunks received
203
+ if transfer["received_chunks"] == chunk_count:
204
+ # Assemble content
205
+ assembled_content = ""
206
+ for i in range(chunk_count):
207
+ if i not in transfer["chunks"]:
208
+ raise ValueError(f"Missing chunk {i} for transfer {transfer_id}")
209
+ assembled_content += transfer["chunks"][i]
210
+
211
+ # Verify final content hash
212
+ assembled_bytes = assembled_content.encode('utf-8')
213
+ if len(assembled_bytes) != total_size:
214
+ raise ValueError(f"Final content size mismatch: expected {total_size}, got {len(assembled_bytes)}")
215
+
216
+ calculated_hash = hashlib.sha256(assembled_bytes).hexdigest()
217
+ if calculated_hash != content_hash:
218
+ raise ValueError(f"Final content hash mismatch: expected {content_hash}, got {calculated_hash}")
219
+
220
+ # Clean up transfer
221
+ del self.transfers[transfer_id]
222
+
223
+ logger.info(f"Successfully assembled content from {chunk_count} chunks (transfer_id: {transfer_id}, size: {total_size} bytes)")
224
+ return assembled_content
225
+
226
+ return None # More chunks needed
227
+
228
+ def cleanup_stale_transfers(self, max_age_seconds: int = 300):
229
+ """Clean up transfers older than max_age_seconds."""
230
+ import time
231
+ current_time = time.time()
232
+
233
+ stale_transfers = []
234
+ for transfer_id, transfer in self.transfers.items():
235
+ # Add timestamp if not exists
236
+ if "start_time" not in transfer:
237
+ transfer["start_time"] = current_time
238
+
239
+ if current_time - transfer["start_time"] > max_age_seconds:
240
+ stale_transfers.append(transfer_id)
241
+
242
+ for transfer_id in stale_transfers:
243
+ logger.warning(f"Cleaning up stale transfer: {transfer_id}")
244
+ del self.transfers[transfer_id]