portacode 0.3.4.dev0__py3-none-any.whl → 1.4.11.dev0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of portacode might be problematic. Click here for more details.

Files changed (93) hide show
  1. portacode/_version.py +16 -3
  2. portacode/cli.py +155 -19
  3. portacode/connection/client.py +152 -12
  4. portacode/connection/handlers/WEBSOCKET_PROTOCOL.md +1577 -0
  5. portacode/connection/handlers/__init__.py +43 -1
  6. portacode/connection/handlers/base.py +122 -18
  7. portacode/connection/handlers/chunked_content.py +244 -0
  8. portacode/connection/handlers/diff_handlers.py +603 -0
  9. portacode/connection/handlers/file_handlers.py +902 -17
  10. portacode/connection/handlers/project_aware_file_handlers.py +226 -0
  11. portacode/connection/handlers/project_state/README.md +312 -0
  12. portacode/connection/handlers/project_state/__init__.py +92 -0
  13. portacode/connection/handlers/project_state/file_system_watcher.py +179 -0
  14. portacode/connection/handlers/project_state/git_manager.py +1502 -0
  15. portacode/connection/handlers/project_state/handlers.py +875 -0
  16. portacode/connection/handlers/project_state/manager.py +1331 -0
  17. portacode/connection/handlers/project_state/models.py +108 -0
  18. portacode/connection/handlers/project_state/utils.py +50 -0
  19. portacode/connection/handlers/project_state_handlers.py +45 -0
  20. portacode/connection/handlers/proxmox_infra.py +307 -0
  21. portacode/connection/handlers/registry.py +53 -10
  22. portacode/connection/handlers/session.py +705 -53
  23. portacode/connection/handlers/system_handlers.py +142 -8
  24. portacode/connection/handlers/tab_factory.py +389 -0
  25. portacode/connection/handlers/terminal_handlers.py +150 -11
  26. portacode/connection/handlers/update_handler.py +61 -0
  27. portacode/connection/multiplex.py +60 -2
  28. portacode/connection/terminal.py +695 -28
  29. portacode/keypair.py +63 -1
  30. portacode/link_capture/__init__.py +38 -0
  31. portacode/link_capture/__pycache__/__init__.cpython-311.pyc +0 -0
  32. portacode/link_capture/bin/__pycache__/link_capture_wrapper.cpython-311.pyc +0 -0
  33. portacode/link_capture/bin/elinks +3 -0
  34. portacode/link_capture/bin/gio-open +3 -0
  35. portacode/link_capture/bin/gnome-open +3 -0
  36. portacode/link_capture/bin/gvfs-open +3 -0
  37. portacode/link_capture/bin/kde-open +3 -0
  38. portacode/link_capture/bin/kfmclient +3 -0
  39. portacode/link_capture/bin/link_capture_exec.sh +11 -0
  40. portacode/link_capture/bin/link_capture_wrapper.py +75 -0
  41. portacode/link_capture/bin/links +3 -0
  42. portacode/link_capture/bin/links2 +3 -0
  43. portacode/link_capture/bin/lynx +3 -0
  44. portacode/link_capture/bin/mate-open +3 -0
  45. portacode/link_capture/bin/netsurf +3 -0
  46. portacode/link_capture/bin/sensible-browser +3 -0
  47. portacode/link_capture/bin/w3m +3 -0
  48. portacode/link_capture/bin/x-www-browser +3 -0
  49. portacode/link_capture/bin/xdg-open +3 -0
  50. portacode/logging_categories.py +140 -0
  51. portacode/pairing.py +103 -0
  52. portacode/service.py +6 -0
  53. portacode/static/js/test-ntp-clock.html +63 -0
  54. portacode/static/js/utils/ntp-clock.js +232 -0
  55. portacode/utils/NTP_ARCHITECTURE.md +136 -0
  56. portacode/utils/__init__.py +1 -0
  57. portacode/utils/diff_apply.py +456 -0
  58. portacode/utils/diff_renderer.py +371 -0
  59. portacode/utils/ntp_clock.py +65 -0
  60. portacode-1.4.11.dev0.dist-info/METADATA +298 -0
  61. portacode-1.4.11.dev0.dist-info/RECORD +97 -0
  62. {portacode-0.3.4.dev0.dist-info → portacode-1.4.11.dev0.dist-info}/WHEEL +1 -1
  63. portacode-1.4.11.dev0.dist-info/top_level.txt +3 -0
  64. test_modules/README.md +296 -0
  65. test_modules/__init__.py +1 -0
  66. test_modules/test_device_online.py +44 -0
  67. test_modules/test_file_operations.py +743 -0
  68. test_modules/test_git_status_ui.py +370 -0
  69. test_modules/test_login_flow.py +50 -0
  70. test_modules/test_navigate_testing_folder.py +361 -0
  71. test_modules/test_play_store_screenshots.py +294 -0
  72. test_modules/test_terminal_buffer_performance.py +261 -0
  73. test_modules/test_terminal_interaction.py +80 -0
  74. test_modules/test_terminal_loading_race_condition.py +95 -0
  75. test_modules/test_terminal_start.py +56 -0
  76. testing_framework/.env.example +21 -0
  77. testing_framework/README.md +334 -0
  78. testing_framework/__init__.py +17 -0
  79. testing_framework/cli.py +326 -0
  80. testing_framework/core/__init__.py +1 -0
  81. testing_framework/core/base_test.py +336 -0
  82. testing_framework/core/cli_manager.py +177 -0
  83. testing_framework/core/hierarchical_runner.py +577 -0
  84. testing_framework/core/playwright_manager.py +520 -0
  85. testing_framework/core/runner.py +447 -0
  86. testing_framework/core/shared_cli_manager.py +234 -0
  87. testing_framework/core/test_discovery.py +112 -0
  88. testing_framework/requirements.txt +12 -0
  89. portacode-0.3.4.dev0.dist-info/METADATA +0 -236
  90. portacode-0.3.4.dev0.dist-info/RECORD +0 -27
  91. portacode-0.3.4.dev0.dist-info/top_level.txt +0 -1
  92. {portacode-0.3.4.dev0.dist-info → portacode-1.4.11.dev0.dist-info}/entry_points.txt +0 -0
  93. {portacode-0.3.4.dev0.dist-info → portacode-1.4.11.dev0.dist-info/licenses}/LICENSE +0 -0
@@ -20,7 +20,28 @@ from .file_handlers import (
20
20
  DirectoryListHandler,
21
21
  FileInfoHandler,
22
22
  FileDeleteHandler,
23
+ FileCreateHandler,
24
+ FolderCreateHandler,
25
+ FileRenameHandler,
26
+ FileSearchHandler,
27
+ ContentRequestHandler,
23
28
  )
29
+ from .diff_handlers import FileApplyDiffHandler, FilePreviewDiffHandler
30
+ from .project_state_handlers import (
31
+ ProjectStateFolderExpandHandler,
32
+ ProjectStateFolderCollapseHandler,
33
+ ProjectStateFileOpenHandler,
34
+ ProjectStateTabCloseHandler,
35
+ ProjectStateSetActiveTabHandler,
36
+ ProjectStateDiffOpenHandler,
37
+ ProjectStateDiffContentHandler,
38
+ ProjectStateGitStageHandler,
39
+ ProjectStateGitUnstageHandler,
40
+ ProjectStateGitRevertHandler,
41
+ ProjectStateGitCommitHandler,
42
+ )
43
+ from .update_handler import UpdatePortacodeHandler
44
+ from .proxmox_infra import ConfigureProxmoxInfraHandler
24
45
 
25
46
  __all__ = [
26
47
  "BaseHandler",
@@ -32,10 +53,31 @@ __all__ = [
32
53
  "TerminalStopHandler",
33
54
  "TerminalListHandler",
34
55
  "SystemInfoHandler",
56
+ "ConfigureProxmoxInfraHandler",
35
57
  # File operation handlers (optional - register as needed)
36
58
  "FileReadHandler",
37
59
  "FileWriteHandler",
38
60
  "DirectoryListHandler",
39
61
  "FileInfoHandler",
40
62
  "FileDeleteHandler",
41
- ]
63
+ "FileCreateHandler",
64
+ "FolderCreateHandler",
65
+ "FileRenameHandler",
66
+ "FileSearchHandler",
67
+ "ContentRequestHandler",
68
+ "FileApplyDiffHandler",
69
+ "FilePreviewDiffHandler",
70
+ # Project state handlers
71
+ "ProjectStateFolderExpandHandler",
72
+ "ProjectStateFolderCollapseHandler",
73
+ "ProjectStateFileOpenHandler",
74
+ "ProjectStateTabCloseHandler",
75
+ "ProjectStateSetActiveTabHandler",
76
+ "ProjectStateDiffOpenHandler",
77
+ "ProjectStateDiffContentHandler",
78
+ "ProjectStateGitStageHandler",
79
+ "ProjectStateGitUnstageHandler",
80
+ "ProjectStateGitRevertHandler",
81
+ "ProjectStateGitCommitHandler",
82
+ "UpdatePortacodeHandler",
83
+ ]
@@ -4,6 +4,7 @@ import asyncio
4
4
  import logging
5
5
  from abc import ABC, abstractmethod
6
6
  from typing import Any, Dict, Optional, TYPE_CHECKING
7
+ from portacode.utils.ntp_clock import ntp_clock
7
8
 
8
9
  if TYPE_CHECKING:
9
10
  from ..multiplex import Channel
@@ -40,28 +41,64 @@ class BaseHandler(ABC):
40
41
  """
41
42
  pass
42
43
 
43
- async def send_response(self, payload: Dict[str, Any], reply_channel: Optional[str] = None) -> None:
44
- """Send a response back to the gateway.
45
-
44
+ async def send_response(self, payload: Dict[str, Any], reply_channel: Optional[str] = None, project_id: str = None) -> None:
45
+ """Send a response back to the gateway with client session awareness.
46
+
46
47
  Args:
47
48
  payload: Response payload
48
- reply_channel: Optional reply channel
49
+ reply_channel: Optional reply channel for backward compatibility
50
+ project_id: Optional project filter for targeting specific sessions
49
51
  """
50
- if reply_channel:
51
- payload["reply_channel"] = reply_channel
52
- await self.control_channel.send(payload)
52
+ # Add device_send timestamp if trace present
53
+ if "trace" in payload and "request_id" in payload:
54
+ device_send_time = ntp_clock.now_ms()
55
+ if device_send_time is not None:
56
+ payload["trace"]["device_send"] = device_send_time
57
+ # Update ping to show total time from client_send
58
+ if "client_send" in payload["trace"]:
59
+ payload["trace"]["ping"] = device_send_time - payload["trace"]["client_send"]
60
+ logger.info(f"📤 Device sending traced response: {payload['request_id']}")
61
+
62
+ # Get client session manager from context
63
+ client_session_manager = self.context.get("client_session_manager")
64
+
65
+ if client_session_manager and client_session_manager.has_interested_clients():
66
+ # Get target sessions
67
+ target_sessions = client_session_manager.get_target_sessions(project_id)
68
+ if not target_sessions:
69
+ logger.debug("handler: No target sessions found, skipping response send")
70
+ return
71
+
72
+ # Add session targeting information
73
+ enhanced_payload = dict(payload)
74
+ enhanced_payload["client_sessions"] = target_sessions
75
+
76
+ # Add backward compatibility reply_channel (first session if not provided)
77
+ if not reply_channel:
78
+ reply_channel = client_session_manager.get_reply_channel_for_compatibility()
79
+ if reply_channel:
80
+ enhanced_payload["reply_channel"] = reply_channel
81
+
82
+ logger.debug("handler: Sending response to %d client sessions: %s",
83
+ len(target_sessions), target_sessions)
84
+
85
+ await self.control_channel.send(enhanced_payload)
86
+ else:
87
+ # Fallback to original behavior if no client session manager or no clients
88
+ if reply_channel:
89
+ payload["reply_channel"] = reply_channel
90
+ await self.control_channel.send(payload)
53
91
 
54
- async def send_error(self, message: str, reply_channel: Optional[str] = None) -> None:
55
- """Send an error response.
92
+ async def send_error(self, message: str, reply_channel: Optional[str] = None, project_id: str = None) -> None:
93
+ """Send an error response with client session awareness.
56
94
 
57
95
  Args:
58
96
  message: Error message
59
- reply_channel: Optional reply channel
97
+ reply_channel: Optional reply channel for backward compatibility
98
+ project_id: Optional project filter for targeting specific sessions
60
99
  """
61
100
  payload = {"event": "error", "message": message}
62
- if reply_channel:
63
- payload["reply_channel"] = reply_channel
64
- await self.control_channel.send(payload)
101
+ await self.send_response(payload, reply_channel, project_id)
65
102
 
66
103
 
67
104
  class AsyncHandler(BaseHandler):
@@ -81,12 +118,52 @@ class AsyncHandler(BaseHandler):
81
118
 
82
119
  async def handle(self, message: Dict[str, Any], reply_channel: Optional[str] = None) -> None:
83
120
  """Handle the command by executing it and sending the response."""
121
+ logger.info("handler: Processing command %s with reply_channel=%s",
122
+ self.command_name, reply_channel)
123
+
124
+ # Add handler_dispatch timestamp if trace present
125
+ if "trace" in message and "request_id" in message:
126
+ handler_dispatch_time = ntp_clock.now_ms()
127
+ if handler_dispatch_time is not None:
128
+ message["trace"]["handler_dispatch"] = handler_dispatch_time
129
+ # Update ping to show total time from client_send
130
+ if "client_send" in message["trace"]:
131
+ message["trace"]["ping"] = handler_dispatch_time - message["trace"]["client_send"]
132
+ logger.info(f"🔧 Handler dispatching: {message['request_id']} ({self.command_name})")
133
+
84
134
  try:
85
135
  response = await self.execute(message)
86
- await self.send_response(response, reply_channel)
136
+ logger.info("handler: Command %s executed successfully", self.command_name)
137
+
138
+ # Handle cases where execute() sends responses directly and returns None
139
+ if response is not None:
140
+ # Automatically copy request_id if present in the incoming message
141
+ if "request_id" in message and "request_id" not in response:
142
+ response["request_id"] = message["request_id"]
143
+
144
+ # Pass through trace from request to response (add to existing trace, don't create new one)
145
+ if "trace" in message and "request_id" in message:
146
+ response["trace"] = dict(message["trace"])
147
+ handler_complete_time = ntp_clock.now_ms()
148
+ if handler_complete_time is not None:
149
+ response["trace"]["handler_complete"] = handler_complete_time
150
+ # Update ping to show total time from client_send
151
+ if "client_send" in response["trace"]:
152
+ response["trace"]["ping"] = handler_complete_time - response["trace"]["client_send"]
153
+ logger.info(f"✅ Handler completed: {message['request_id']} ({self.command_name})")
154
+
155
+ # Extract project_id from response for session targeting
156
+ project_id = response.get("project_id")
157
+ logger.info("handler: %s response project_id=%s, response=%s",
158
+ self.command_name, project_id, response)
159
+ await self.send_response(response, reply_channel, project_id)
160
+ else:
161
+ logger.info("handler: %s handled response transmission directly", self.command_name)
87
162
  except Exception as exc:
88
- logger.exception("Error in async handler %s: %s", self.command_name, exc)
89
- await self.send_error(str(exc), reply_channel)
163
+ logger.exception("handler: Error in async handler %s: %s", self.command_name, exc)
164
+ # Extract project_id from original message for error targeting
165
+ project_id = message.get("project_id")
166
+ await self.send_error(str(exc), reply_channel, project_id)
90
167
 
91
168
 
92
169
  class SyncHandler(BaseHandler):
@@ -106,10 +183,37 @@ class SyncHandler(BaseHandler):
106
183
 
107
184
  async def handle(self, message: Dict[str, Any], reply_channel: Optional[str] = None) -> None:
108
185
  """Handle the command by executing it in an executor and sending the response."""
186
+ # Add handler_dispatch timestamp if trace present
187
+ if "trace" in message and "request_id" in message:
188
+ handler_dispatch_time = ntp_clock.now_ms()
189
+ if handler_dispatch_time is not None:
190
+ message["trace"]["handler_dispatch"] = handler_dispatch_time
191
+ # Update ping to show total time from client_send
192
+ if "client_send" in message["trace"]:
193
+ message["trace"]["ping"] = handler_dispatch_time - message["trace"]["client_send"]
194
+ logger.info(f"🔧 Handler dispatching: {message['request_id']} ({self.command_name})")
195
+
109
196
  try:
110
197
  loop = asyncio.get_running_loop()
111
198
  response = await loop.run_in_executor(None, self.execute, message)
112
- await self.send_response(response, reply_channel)
199
+
200
+ # Automatically copy request_id if present in the incoming message
201
+ if "request_id" in message and "request_id" not in response:
202
+ response["request_id"] = message["request_id"]
203
+
204
+ # Pass through trace from request to response (add to existing trace, don't create new one)
205
+ if "trace" in message and "request_id" in message:
206
+ response["trace"] = dict(message["trace"])
207
+ handler_complete_time = ntp_clock.now_ms()
208
+ if handler_complete_time is not None:
209
+ response["trace"]["handler_complete"] = handler_complete_time
210
+ logger.info(f"✅ Handler completed: {message['request_id']} ({self.command_name})")
211
+
212
+ # Extract project_id from response for session targeting
213
+ project_id = response.get("project_id")
214
+ await self.send_response(response, reply_channel, project_id)
113
215
  except Exception as exc:
114
216
  logger.exception("Error in sync handler %s: %s", self.command_name, exc)
115
- await self.send_error(str(exc), reply_channel)
217
+ # Extract project_id from original message for error targeting
218
+ project_id = message.get("project_id")
219
+ await self.send_error(str(exc), reply_channel, project_id)
@@ -0,0 +1,244 @@
1
+ """
2
+ Chunked content transfer utilities for handling large content over WebSocket.
3
+
4
+ This module provides functionality to split large content into chunks for reliable
5
+ transmission over WebSocket connections, and to reassemble chunks on the client side.
6
+ """
7
+
8
+ import hashlib
9
+ import uuid
10
+ from typing import Dict, Any, List, Optional
11
+ import logging
12
+
13
+ logger = logging.getLogger(__name__)
14
+
15
+ # Maximum size for content before chunking (200KB)
16
+ MAX_CONTENT_SIZE = 200 * 1024 # 200KB
17
+
18
+ # Maximum chunk size (64KB per chunk for reliable WebSocket transmission)
19
+ CHUNK_SIZE = 64 * 1024 # 64KB
20
+
21
+
22
+ def should_chunk_content(content: str) -> bool:
23
+ """Determine if content should be chunked based on size."""
24
+ if content is None:
25
+ return False
26
+
27
+ content_bytes = content.encode('utf-8')
28
+ return len(content_bytes) > MAX_CONTENT_SIZE
29
+
30
+
31
+ def calculate_content_hash(content: str) -> str:
32
+ """Calculate SHA-256 hash of content for verification."""
33
+ if content is None:
34
+ return ""
35
+
36
+ content_bytes = content.encode('utf-8')
37
+ return hashlib.sha256(content_bytes).hexdigest()
38
+
39
+
40
+ def split_content_into_chunks(content: str, transfer_id: Optional[str] = None) -> List[Dict[str, Any]]:
41
+ """
42
+ Split content into chunks for transmission.
43
+
44
+ Args:
45
+ content: The content to split
46
+ transfer_id: Optional transfer ID, will generate one if not provided
47
+
48
+ Returns:
49
+ List of chunk dictionaries ready for transmission
50
+ """
51
+ if content is None:
52
+ return []
53
+
54
+ if transfer_id is None:
55
+ transfer_id = str(uuid.uuid4())
56
+
57
+ content_bytes = content.encode('utf-8')
58
+ total_size = len(content_bytes)
59
+ content_hash = hashlib.sha256(content_bytes).hexdigest()
60
+
61
+ chunks = []
62
+ chunk_index = 0
63
+ offset = 0
64
+
65
+ while offset < len(content_bytes):
66
+ chunk_data = content_bytes[offset:offset + CHUNK_SIZE]
67
+ chunk_content = chunk_data.decode('utf-8')
68
+ chunk_hash = hashlib.sha256(chunk_data).hexdigest()
69
+
70
+ chunks.append({
71
+ "transfer_id": transfer_id,
72
+ "chunk_index": chunk_index,
73
+ "chunk_count": (total_size + CHUNK_SIZE - 1) // CHUNK_SIZE, # Ceiling division
74
+ "chunk_size": len(chunk_data),
75
+ "total_size": total_size,
76
+ "content_hash": content_hash,
77
+ "chunk_hash": chunk_hash,
78
+ "chunk_content": chunk_content,
79
+ "is_final_chunk": offset + CHUNK_SIZE >= len(content_bytes)
80
+ })
81
+
82
+ chunk_index += 1
83
+ offset += CHUNK_SIZE
84
+
85
+ logger.info(f"Split content into {len(chunks)} chunks (total size: {total_size} bytes, transfer_id: {transfer_id})")
86
+ return chunks
87
+
88
+
89
+ def create_chunked_response(base_response: Dict[str, Any], content_field: str, content: str) -> List[Dict[str, Any]]:
90
+ """
91
+ Create chunked response messages from a base response and content.
92
+
93
+ Args:
94
+ base_response: The base response dictionary
95
+ content_field: The field name where content should be placed
96
+ content: The content to chunk
97
+
98
+ Returns:
99
+ List of response dictionaries with chunked content
100
+ """
101
+ if not should_chunk_content(content):
102
+ # Content is small enough, return as single response
103
+ response = base_response.copy()
104
+ response[content_field] = content
105
+ response["chunked"] = False
106
+ return [response]
107
+
108
+ # Content needs chunking
109
+ transfer_id = str(uuid.uuid4())
110
+ chunks = split_content_into_chunks(content, transfer_id)
111
+ responses = []
112
+
113
+ for chunk in chunks:
114
+ response = base_response.copy()
115
+ response["chunked"] = True
116
+ response["transfer_id"] = chunk["transfer_id"]
117
+ response["chunk_index"] = chunk["chunk_index"]
118
+ response["chunk_count"] = chunk["chunk_count"]
119
+ response["chunk_size"] = chunk["chunk_size"]
120
+ response["total_size"] = chunk["total_size"]
121
+ response["content_hash"] = chunk["content_hash"]
122
+ response["chunk_hash"] = chunk["chunk_hash"]
123
+ response["is_final_chunk"] = chunk["is_final_chunk"]
124
+ response[content_field] = chunk["chunk_content"]
125
+
126
+ responses.append(response)
127
+
128
+ logger.info(f"Created chunked response with {len(responses)} chunks for transfer_id: {transfer_id}")
129
+ return responses
130
+
131
+
132
+ class ChunkAssembler:
133
+ """
134
+ Helper class to assemble chunked content on the receiving side.
135
+ """
136
+
137
+ def __init__(self):
138
+ self.transfers: Dict[str, Dict[str, Any]] = {}
139
+
140
+ def add_chunk(self, chunk_data: Dict[str, Any], content_field: str) -> Optional[str]:
141
+ """
142
+ Add a chunk to the assembler.
143
+
144
+ Args:
145
+ chunk_data: The chunk data dictionary
146
+ content_field: The field name containing the chunk content
147
+
148
+ Returns:
149
+ Complete content if all chunks received, None if more chunks needed
150
+
151
+ Raises:
152
+ ValueError: If chunk data is invalid or verification fails
153
+ """
154
+ transfer_id = chunk_data.get("transfer_id")
155
+ chunk_index = chunk_data.get("chunk_index")
156
+ chunk_count = chunk_data.get("chunk_count")
157
+ chunk_size = chunk_data.get("chunk_size")
158
+ total_size = chunk_data.get("total_size")
159
+ content_hash = chunk_data.get("content_hash")
160
+ chunk_hash = chunk_data.get("chunk_hash")
161
+ chunk_content = chunk_data.get(content_field)
162
+ is_final_chunk = chunk_data.get("is_final_chunk")
163
+
164
+ if not all([transfer_id, chunk_index is not None, chunk_count, chunk_size,
165
+ total_size, content_hash, chunk_hash, chunk_content is not None]):
166
+ raise ValueError("Missing required chunk fields")
167
+
168
+ # Verify chunk content hash
169
+ chunk_bytes = chunk_content.encode('utf-8')
170
+ if len(chunk_bytes) != chunk_size:
171
+ raise ValueError(f"Chunk size mismatch: expected {chunk_size}, got {len(chunk_bytes)}")
172
+
173
+ calculated_chunk_hash = hashlib.sha256(chunk_bytes).hexdigest()
174
+ if calculated_chunk_hash != chunk_hash:
175
+ raise ValueError(f"Chunk hash mismatch: expected {chunk_hash}, got {calculated_chunk_hash}")
176
+
177
+ # Initialize transfer if not exists
178
+ if transfer_id not in self.transfers:
179
+ self.transfers[transfer_id] = {
180
+ "chunk_count": chunk_count,
181
+ "total_size": total_size,
182
+ "content_hash": content_hash,
183
+ "chunks": {},
184
+ "received_chunks": 0
185
+ }
186
+
187
+ transfer = self.transfers[transfer_id]
188
+
189
+ # Verify transfer metadata consistency
190
+ if (transfer["chunk_count"] != chunk_count or
191
+ transfer["total_size"] != total_size or
192
+ transfer["content_hash"] != content_hash):
193
+ raise ValueError("Transfer metadata mismatch")
194
+
195
+ # Store chunk if not already received
196
+ if chunk_index not in transfer["chunks"]:
197
+ transfer["chunks"][chunk_index] = chunk_content
198
+ transfer["received_chunks"] += 1
199
+
200
+ logger.debug(f"Received chunk {chunk_index + 1}/{chunk_count} for transfer {transfer_id}")
201
+
202
+ # Check if all chunks received
203
+ if transfer["received_chunks"] == chunk_count:
204
+ # Assemble content
205
+ assembled_content = ""
206
+ for i in range(chunk_count):
207
+ if i not in transfer["chunks"]:
208
+ raise ValueError(f"Missing chunk {i} for transfer {transfer_id}")
209
+ assembled_content += transfer["chunks"][i]
210
+
211
+ # Verify final content hash
212
+ assembled_bytes = assembled_content.encode('utf-8')
213
+ if len(assembled_bytes) != total_size:
214
+ raise ValueError(f"Final content size mismatch: expected {total_size}, got {len(assembled_bytes)}")
215
+
216
+ calculated_hash = hashlib.sha256(assembled_bytes).hexdigest()
217
+ if calculated_hash != content_hash:
218
+ raise ValueError(f"Final content hash mismatch: expected {content_hash}, got {calculated_hash}")
219
+
220
+ # Clean up transfer
221
+ del self.transfers[transfer_id]
222
+
223
+ logger.info(f"Successfully assembled content from {chunk_count} chunks (transfer_id: {transfer_id}, size: {total_size} bytes)")
224
+ return assembled_content
225
+
226
+ return None # More chunks needed
227
+
228
+ def cleanup_stale_transfers(self, max_age_seconds: int = 300):
229
+ """Clean up transfers older than max_age_seconds."""
230
+ import time
231
+ current_time = time.time()
232
+
233
+ stale_transfers = []
234
+ for transfer_id, transfer in self.transfers.items():
235
+ # Add timestamp if not exists
236
+ if "start_time" not in transfer:
237
+ transfer["start_time"] = current_time
238
+
239
+ if current_time - transfer["start_time"] > max_age_seconds:
240
+ stale_transfers.append(transfer_id)
241
+
242
+ for transfer_id in stale_transfers:
243
+ logger.warning(f"Cleaning up stale transfer: {transfer_id}")
244
+ del self.transfers[transfer_id]