portacode 1.3.27.dev0__tar.gz → 1.3.28__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of portacode might be problematic. Click here for more details.

Files changed (93) hide show
  1. {portacode-1.3.27.dev0 → portacode-1.3.28}/PKG-INFO +1 -1
  2. {portacode-1.3.27.dev0 → portacode-1.3.28}/portacode/_version.py +2 -2
  3. {portacode-1.3.27.dev0 → portacode-1.3.28}/portacode/connection/client.py +19 -0
  4. {portacode-1.3.27.dev0 → portacode-1.3.28}/portacode/connection/handlers/base.py +57 -7
  5. {portacode-1.3.27.dev0 → portacode-1.3.28}/portacode/connection/handlers/project_state/git_manager.py +236 -1
  6. {portacode-1.3.27.dev0 → portacode-1.3.28}/portacode/connection/handlers/project_state/manager.py +98 -33
  7. {portacode-1.3.27.dev0 → portacode-1.3.28}/portacode/connection/handlers/registry.py +15 -4
  8. {portacode-1.3.27.dev0 → portacode-1.3.28}/portacode/connection/terminal.py +3 -2
  9. {portacode-1.3.27.dev0 → portacode-1.3.28}/portacode/static/js/utils/ntp-clock.js +5 -3
  10. portacode-1.3.28/portacode/utils/NTP_ARCHITECTURE.md +136 -0
  11. {portacode-1.3.27.dev0 → portacode-1.3.28}/portacode.egg-info/PKG-INFO +1 -1
  12. {portacode-1.3.27.dev0 → portacode-1.3.28}/portacode.egg-info/SOURCES.txt +3 -1
  13. {portacode-1.3.27.dev0 → portacode-1.3.28}/test.sh +0 -0
  14. {portacode-1.3.27.dev0 → portacode-1.3.28}/testing_framework/core/playwright_manager.py +7 -1
  15. portacode-1.3.28/tools/test_python_ntp_clock.py +39 -0
  16. {portacode-1.3.27.dev0 → portacode-1.3.28}/.claude/agents/communication-manager.md +0 -0
  17. {portacode-1.3.27.dev0 → portacode-1.3.28}/.claude/settings.local.json +0 -0
  18. {portacode-1.3.27.dev0 → portacode-1.3.28}/.gitignore +0 -0
  19. {portacode-1.3.27.dev0 → portacode-1.3.28}/.gitmodules +0 -0
  20. {portacode-1.3.27.dev0 → portacode-1.3.28}/LICENSE +0 -0
  21. {portacode-1.3.27.dev0 → portacode-1.3.28}/MANIFEST.in +0 -0
  22. {portacode-1.3.27.dev0 → portacode-1.3.28}/Makefile +0 -0
  23. {portacode-1.3.27.dev0 → portacode-1.3.28}/README.md +0 -0
  24. {portacode-1.3.27.dev0 → portacode-1.3.28}/backup.sh +0 -0
  25. {portacode-1.3.27.dev0 → portacode-1.3.28}/connect.py +0 -0
  26. {portacode-1.3.27.dev0 → portacode-1.3.28}/connect.sh +0 -0
  27. {portacode-1.3.27.dev0 → portacode-1.3.28}/docker-compose.yaml +0 -0
  28. {portacode-1.3.27.dev0 → portacode-1.3.28}/portacode/README.md +0 -0
  29. {portacode-1.3.27.dev0 → portacode-1.3.28}/portacode/__init__.py +0 -0
  30. {portacode-1.3.27.dev0 → portacode-1.3.28}/portacode/__main__.py +0 -0
  31. {portacode-1.3.27.dev0 → portacode-1.3.28}/portacode/cli.py +0 -0
  32. {portacode-1.3.27.dev0 → portacode-1.3.28}/portacode/connection/README.md +0 -0
  33. {portacode-1.3.27.dev0 → portacode-1.3.28}/portacode/connection/__init__.py +0 -0
  34. {portacode-1.3.27.dev0 → portacode-1.3.28}/portacode/connection/handlers/README.md +0 -0
  35. {portacode-1.3.27.dev0 → portacode-1.3.28}/portacode/connection/handlers/WEBSOCKET_PROTOCOL.md +0 -0
  36. {portacode-1.3.27.dev0 → portacode-1.3.28}/portacode/connection/handlers/__init__.py +0 -0
  37. {portacode-1.3.27.dev0 → portacode-1.3.28}/portacode/connection/handlers/chunked_content.py +0 -0
  38. {portacode-1.3.27.dev0 → portacode-1.3.28}/portacode/connection/handlers/file_handlers.py +0 -0
  39. {portacode-1.3.27.dev0 → portacode-1.3.28}/portacode/connection/handlers/project_aware_file_handlers.py +0 -0
  40. {portacode-1.3.27.dev0 → portacode-1.3.28}/portacode/connection/handlers/project_state/README.md +0 -0
  41. {portacode-1.3.27.dev0 → portacode-1.3.28}/portacode/connection/handlers/project_state/__init__.py +0 -0
  42. {portacode-1.3.27.dev0 → portacode-1.3.28}/portacode/connection/handlers/project_state/file_system_watcher.py +0 -0
  43. {portacode-1.3.27.dev0 → portacode-1.3.28}/portacode/connection/handlers/project_state/handlers.py +0 -0
  44. {portacode-1.3.27.dev0 → portacode-1.3.28}/portacode/connection/handlers/project_state/models.py +0 -0
  45. {portacode-1.3.27.dev0 → portacode-1.3.28}/portacode/connection/handlers/project_state/utils.py +0 -0
  46. {portacode-1.3.27.dev0 → portacode-1.3.28}/portacode/connection/handlers/project_state_handlers.py +0 -0
  47. {portacode-1.3.27.dev0 → portacode-1.3.28}/portacode/connection/handlers/session.py +0 -0
  48. {portacode-1.3.27.dev0 → portacode-1.3.28}/portacode/connection/handlers/system_handlers.py +0 -0
  49. {portacode-1.3.27.dev0 → portacode-1.3.28}/portacode/connection/handlers/tab_factory.py +0 -0
  50. {portacode-1.3.27.dev0 → portacode-1.3.28}/portacode/connection/handlers/terminal_handlers.py +0 -0
  51. {portacode-1.3.27.dev0 → portacode-1.3.28}/portacode/connection/multiplex.py +0 -0
  52. {portacode-1.3.27.dev0 → portacode-1.3.28}/portacode/data.py +0 -0
  53. {portacode-1.3.27.dev0 → portacode-1.3.28}/portacode/keypair.py +0 -0
  54. {portacode-1.3.27.dev0 → portacode-1.3.28}/portacode/logging_categories.py +0 -0
  55. {portacode-1.3.27.dev0 → portacode-1.3.28}/portacode/service.py +0 -0
  56. {portacode-1.3.27.dev0 → portacode-1.3.28}/portacode/static/js/test-ntp-clock.html +0 -0
  57. {portacode-1.3.27.dev0 → portacode-1.3.28}/portacode/utils/__init__.py +0 -0
  58. {portacode-1.3.27.dev0 → portacode-1.3.28}/portacode/utils/ntp_clock.py +0 -0
  59. {portacode-1.3.27.dev0 → portacode-1.3.28}/portacode.egg-info/dependency_links.txt +0 -0
  60. {portacode-1.3.27.dev0 → portacode-1.3.28}/portacode.egg-info/entry_points.txt +0 -0
  61. {portacode-1.3.27.dev0 → portacode-1.3.28}/portacode.egg-info/requires.txt +0 -0
  62. {portacode-1.3.27.dev0 → portacode-1.3.28}/portacode.egg-info/top_level.txt +0 -0
  63. {portacode-1.3.27.dev0 → portacode-1.3.28}/pyproject.toml +0 -0
  64. {portacode-1.3.27.dev0 → portacode-1.3.28}/restore.sh +0 -0
  65. {portacode-1.3.27.dev0 → portacode-1.3.28}/run_tests.py +0 -0
  66. {portacode-1.3.27.dev0 → portacode-1.3.28}/setup.cfg +0 -0
  67. {portacode-1.3.27.dev0 → portacode-1.3.28}/setup.py +0 -0
  68. {portacode-1.3.27.dev0 → portacode-1.3.28}/test_modules/README.md +0 -0
  69. {portacode-1.3.27.dev0 → portacode-1.3.28}/test_modules/__init__.py +0 -0
  70. {portacode-1.3.27.dev0 → portacode-1.3.28}/test_modules/test_device_online.py +0 -0
  71. {portacode-1.3.27.dev0 → portacode-1.3.28}/test_modules/test_file_operations.py +0 -0
  72. {portacode-1.3.27.dev0 → portacode-1.3.28}/test_modules/test_git_status_ui.py +0 -0
  73. {portacode-1.3.27.dev0 → portacode-1.3.28}/test_modules/test_login_flow.py +0 -0
  74. {portacode-1.3.27.dev0 → portacode-1.3.28}/test_modules/test_navigate_testing_folder.py +0 -0
  75. {portacode-1.3.27.dev0 → portacode-1.3.28}/test_modules/test_terminal_buffer_performance.py +0 -0
  76. {portacode-1.3.27.dev0 → portacode-1.3.28}/test_modules/test_terminal_interaction.py +0 -0
  77. {portacode-1.3.27.dev0 → portacode-1.3.28}/test_modules/test_terminal_loading_race_condition.py +0 -0
  78. {portacode-1.3.27.dev0 → portacode-1.3.28}/test_modules/test_terminal_start.py +0 -0
  79. {portacode-1.3.27.dev0 → portacode-1.3.28}/test_request_id.py +0 -0
  80. {portacode-1.3.27.dev0 → portacode-1.3.28}/testing_framework/.env.example +0 -0
  81. {portacode-1.3.27.dev0 → portacode-1.3.28}/testing_framework/README.md +0 -0
  82. {portacode-1.3.27.dev0 → portacode-1.3.28}/testing_framework/__init__.py +0 -0
  83. {portacode-1.3.27.dev0 → portacode-1.3.28}/testing_framework/cli.py +0 -0
  84. {portacode-1.3.27.dev0 → portacode-1.3.28}/testing_framework/core/__init__.py +0 -0
  85. {portacode-1.3.27.dev0 → portacode-1.3.28}/testing_framework/core/base_test.py +0 -0
  86. {portacode-1.3.27.dev0 → portacode-1.3.28}/testing_framework/core/cli_manager.py +0 -0
  87. {portacode-1.3.27.dev0 → portacode-1.3.28}/testing_framework/core/hierarchical_runner.py +0 -0
  88. {portacode-1.3.27.dev0 → portacode-1.3.28}/testing_framework/core/runner.py +0 -0
  89. {portacode-1.3.27.dev0 → portacode-1.3.28}/testing_framework/core/shared_cli_manager.py +0 -0
  90. {portacode-1.3.27.dev0 → portacode-1.3.28}/testing_framework/core/test_discovery.py +0 -0
  91. {portacode-1.3.27.dev0 → portacode-1.3.28}/testing_framework/requirements.txt +0 -0
  92. {portacode-1.3.27.dev0 → portacode-1.3.28}/todo/issues/indefinite_resource_loading.md +0 -0
  93. {portacode-1.3.27.dev0 → portacode-1.3.28}/todo/issues/premature_terminal_exit.md +0 -0
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: portacode
3
- Version: 1.3.27.dev0
3
+ Version: 1.3.28
4
4
  Summary: Portacode CLI client and SDK
5
5
  Home-page: https://github.com/portacode/portacode
6
6
  Author: Meena Erian
@@ -28,7 +28,7 @@ version_tuple: VERSION_TUPLE
28
28
  commit_id: COMMIT_ID
29
29
  __commit_id__: COMMIT_ID
30
30
 
31
- __version__ = version = '1.3.27.dev0'
32
- __version_tuple__ = version_tuple = (1, 3, 27, 'dev0')
31
+ __version__ = version = '1.3.28'
32
+ __version_tuple__ = version_tuple = (1, 3, 28)
33
33
 
34
34
  __commit_id__ = commit_id = None
@@ -148,6 +148,25 @@ class ConnectionManager:
148
148
  while not self._stop_event.is_set():
149
149
  try:
150
150
  message = await asyncio.wait_for(self.websocket.recv(), timeout=1.0)
151
+
152
+ # Add device_receive timestamp if trace present
153
+ try:
154
+ import json
155
+ data = json.loads(message)
156
+ payload = data.get("payload", {})
157
+ if isinstance(payload, dict) and "trace" in payload and "request_id" in payload:
158
+ from portacode.utils.ntp_clock import ntp_clock
159
+ device_receive_time = ntp_clock.now_ms()
160
+ if device_receive_time is not None:
161
+ payload["trace"]["device_receive"] = device_receive_time
162
+ if "client_send" in payload["trace"]:
163
+ payload["trace"]["ping"] = device_receive_time - payload["trace"]["client_send"]
164
+ # Re-serialize with updated trace
165
+ message = json.dumps(data)
166
+ logger.info(f"📥 Device received traced message: {payload['request_id']}")
167
+ except:
168
+ pass # Not a traced message, continue normally
169
+
151
170
  if self.mux:
152
171
  await self.mux.on_raw_message(message)
153
172
  except asyncio.TimeoutError:
@@ -4,6 +4,7 @@ import asyncio
4
4
  import logging
5
5
  from abc import ABC, abstractmethod
6
6
  from typing import Any, Dict, Optional, TYPE_CHECKING
7
+ from portacode.utils.ntp_clock import ntp_clock
7
8
 
8
9
  if TYPE_CHECKING:
9
10
  from ..multiplex import Channel
@@ -42,35 +43,45 @@ class BaseHandler(ABC):
42
43
 
43
44
  async def send_response(self, payload: Dict[str, Any], reply_channel: Optional[str] = None, project_id: str = None) -> None:
44
45
  """Send a response back to the gateway with client session awareness.
45
-
46
+
46
47
  Args:
47
48
  payload: Response payload
48
49
  reply_channel: Optional reply channel for backward compatibility
49
50
  project_id: Optional project filter for targeting specific sessions
50
51
  """
52
+ # Add device_send timestamp if trace present
53
+ if "trace" in payload and "request_id" in payload:
54
+ device_send_time = ntp_clock.now_ms()
55
+ if device_send_time is not None:
56
+ payload["trace"]["device_send"] = device_send_time
57
+ # Update ping to show total time from client_send
58
+ if "client_send" in payload["trace"]:
59
+ payload["trace"]["ping"] = device_send_time - payload["trace"]["client_send"]
60
+ logger.info(f"📤 Device sending traced response: {payload['request_id']}")
61
+
51
62
  # Get client session manager from context
52
63
  client_session_manager = self.context.get("client_session_manager")
53
-
64
+
54
65
  if client_session_manager and client_session_manager.has_interested_clients():
55
66
  # Get target sessions
56
67
  target_sessions = client_session_manager.get_target_sessions(project_id)
57
68
  if not target_sessions:
58
69
  logger.debug("handler: No target sessions found, skipping response send")
59
70
  return
60
-
71
+
61
72
  # Add session targeting information
62
73
  enhanced_payload = dict(payload)
63
74
  enhanced_payload["client_sessions"] = target_sessions
64
-
75
+
65
76
  # Add backward compatibility reply_channel (first session if not provided)
66
77
  if not reply_channel:
67
78
  reply_channel = client_session_manager.get_reply_channel_for_compatibility()
68
79
  if reply_channel:
69
80
  enhanced_payload["reply_channel"] = reply_channel
70
-
71
- logger.debug("handler: Sending response to %d client sessions: %s",
81
+
82
+ logger.debug("handler: Sending response to %d client sessions: %s",
72
83
  len(target_sessions), target_sessions)
73
-
84
+
74
85
  await self.control_channel.send(enhanced_payload)
75
86
  else:
76
87
  # Fallback to original behavior if no client session manager or no clients
@@ -110,6 +121,16 @@ class AsyncHandler(BaseHandler):
110
121
  logger.info("handler: Processing command %s with reply_channel=%s",
111
122
  self.command_name, reply_channel)
112
123
 
124
+ # Add handler_dispatch timestamp if trace present
125
+ if "trace" in message and "request_id" in message:
126
+ handler_dispatch_time = ntp_clock.now_ms()
127
+ if handler_dispatch_time is not None:
128
+ message["trace"]["handler_dispatch"] = handler_dispatch_time
129
+ # Update ping to show total time from client_send
130
+ if "client_send" in message["trace"]:
131
+ message["trace"]["ping"] = handler_dispatch_time - message["trace"]["client_send"]
132
+ logger.info(f"🔧 Handler dispatching: {message['request_id']} ({self.command_name})")
133
+
113
134
  try:
114
135
  response = await self.execute(message)
115
136
  logger.info("handler: Command %s executed successfully", self.command_name)
@@ -120,6 +141,17 @@ class AsyncHandler(BaseHandler):
120
141
  if "request_id" in message and "request_id" not in response:
121
142
  response["request_id"] = message["request_id"]
122
143
 
144
+ # Pass through trace from request to response (add to existing trace, don't create new one)
145
+ if "trace" in message and "request_id" in message:
146
+ response["trace"] = dict(message["trace"])
147
+ handler_complete_time = ntp_clock.now_ms()
148
+ if handler_complete_time is not None:
149
+ response["trace"]["handler_complete"] = handler_complete_time
150
+ # Update ping to show total time from client_send
151
+ if "client_send" in response["trace"]:
152
+ response["trace"]["ping"] = handler_complete_time - response["trace"]["client_send"]
153
+ logger.info(f"✅ Handler completed: {message['request_id']} ({self.command_name})")
154
+
123
155
  # Extract project_id from response for session targeting
124
156
  project_id = response.get("project_id")
125
157
  logger.info("handler: %s response project_id=%s, response=%s",
@@ -151,6 +183,16 @@ class SyncHandler(BaseHandler):
151
183
 
152
184
  async def handle(self, message: Dict[str, Any], reply_channel: Optional[str] = None) -> None:
153
185
  """Handle the command by executing it in an executor and sending the response."""
186
+ # Add handler_dispatch timestamp if trace present
187
+ if "trace" in message and "request_id" in message:
188
+ handler_dispatch_time = ntp_clock.now_ms()
189
+ if handler_dispatch_time is not None:
190
+ message["trace"]["handler_dispatch"] = handler_dispatch_time
191
+ # Update ping to show total time from client_send
192
+ if "client_send" in message["trace"]:
193
+ message["trace"]["ping"] = handler_dispatch_time - message["trace"]["client_send"]
194
+ logger.info(f"🔧 Handler dispatching: {message['request_id']} ({self.command_name})")
195
+
154
196
  try:
155
197
  loop = asyncio.get_running_loop()
156
198
  response = await loop.run_in_executor(None, self.execute, message)
@@ -159,6 +201,14 @@ class SyncHandler(BaseHandler):
159
201
  if "request_id" in message and "request_id" not in response:
160
202
  response["request_id"] = message["request_id"]
161
203
 
204
+ # Pass through trace from request to response (add to existing trace, don't create new one)
205
+ if "trace" in message and "request_id" in message:
206
+ response["trace"] = dict(message["trace"])
207
+ handler_complete_time = ntp_clock.now_ms()
208
+ if handler_complete_time is not None:
209
+ response["trace"]["handler_complete"] = handler_complete_time
210
+ logger.info(f"✅ Handler completed: {message['request_id']} ({self.command_name})")
211
+
162
212
  # Extract project_id from response for session targeting
163
213
  project_id = response.get("project_id")
164
214
  await self.send_response(response, reply_channel, project_id)
@@ -291,7 +291,242 @@ class GitManager:
291
291
  except Exception as e:
292
292
  logger.debug("Error getting Git status for %s: %s", file_path, e)
293
293
  return {"is_tracked": False, "status": None, "is_ignored": False, "is_staged": False}
294
-
294
+
295
+ def get_file_status_batch(self, file_paths: List[str]) -> Dict[str, Dict[str, Any]]:
296
+ """Get Git status for multiple files/directories at once (optimized batch operation).
297
+
298
+ Args:
299
+ file_paths: List of absolute file paths
300
+
301
+ Returns:
302
+ Dict mapping file_path to status dict: {"is_tracked": bool, "status": str, "is_ignored": bool, "is_staged": bool|"mixed"}
303
+ """
304
+ if not self.is_git_repo or not self.repo:
305
+ # Return empty status for all paths
306
+ return {path: {"is_tracked": False, "status": None, "is_ignored": False, "is_staged": False}
307
+ for path in file_paths}
308
+
309
+ result = {}
310
+
311
+ try:
312
+ # Convert all paths to relative paths
313
+ rel_paths_map = {} # abs_path -> rel_path
314
+ for file_path in file_paths:
315
+ try:
316
+ rel_path = os.path.relpath(file_path, self.repo.working_dir)
317
+ rel_paths_map[file_path] = rel_path
318
+ except Exception as e:
319
+ logger.debug("Error converting path %s to relative: %s", file_path, e)
320
+ result[file_path] = {"is_tracked": False, "status": None, "is_ignored": False, "is_staged": False}
321
+
322
+ rel_paths = list(rel_paths_map.values())
323
+
324
+ # BATCH OPERATION 1: Get all ignored paths at once
325
+ ignored_paths = set()
326
+ try:
327
+ ignored_list = self.repo.ignored(*rel_paths)
328
+ ignored_paths = set(ignored_list) if ignored_list else set()
329
+ except Exception as e:
330
+ logger.debug("Error checking ignored status for batch: %s", e)
331
+
332
+ # BATCH OPERATION 2: Get global git data once
333
+ untracked_files = set(self.repo.untracked_files)
334
+
335
+ try:
336
+ staged_files_output = self.repo.git.diff('--cached', '--name-only')
337
+ staged_files = set(staged_files_output.splitlines()) if staged_files_output.strip() else set()
338
+ except Exception:
339
+ staged_files = set()
340
+
341
+ try:
342
+ unstaged_files_output = self.repo.git.diff('--name-only')
343
+ unstaged_files = set(unstaged_files_output.splitlines()) if unstaged_files_output.strip() else set()
344
+ except Exception:
345
+ unstaged_files = set()
346
+
347
+ # BATCH OPERATION 3: Get status for all paths at once
348
+ status_map = {} # rel_path -> status_line
349
+ try:
350
+ status_output = self.repo.git.status(*rel_paths, porcelain=True)
351
+ if status_output.strip():
352
+ for line in status_output.strip().split('\n'):
353
+ if len(line) >= 3:
354
+ file_path_from_status = line[3:] if len(line) > 3 else ""
355
+ status_map[file_path_from_status] = line
356
+ except Exception as e:
357
+ logger.debug("Error getting batch status: %s", e)
358
+
359
+ # BATCH OPERATION 4: Get all tracked files
360
+ try:
361
+ tracked_files_output = self.repo.git.ls_files()
362
+ tracked_files = set(tracked_files_output.splitlines()) if tracked_files_output.strip() else set()
363
+ except Exception:
364
+ tracked_files = set()
365
+
366
+ # Process each file with the batch data
367
+ for file_path, rel_path in rel_paths_map.items():
368
+ try:
369
+ # Check if ignored
370
+ if rel_path in ignored_paths:
371
+ result[file_path] = {"is_tracked": False, "status": "ignored", "is_ignored": True, "is_staged": False}
372
+ continue
373
+
374
+ # Determine staging status
375
+ is_staged = self._get_staging_status_from_batch(
376
+ file_path, rel_path, staged_files, unstaged_files
377
+ )
378
+
379
+ # Handle directories
380
+ if os.path.isdir(file_path):
381
+ result[file_path] = self._get_directory_status_from_batch(
382
+ file_path, rel_path, untracked_files, status_map, tracked_files, is_staged
383
+ )
384
+ # Handle files
385
+ else:
386
+ result[file_path] = self._get_file_status_from_batch(
387
+ file_path, rel_path, untracked_files, staged_files, tracked_files, is_staged
388
+ )
389
+
390
+ except Exception as e:
391
+ logger.debug("Error processing status for %s: %s", file_path, e)
392
+ result[file_path] = {"is_tracked": False, "status": None, "is_ignored": False, "is_staged": False}
393
+
394
+ # Fill in any missing paths with default status
395
+ for file_path in file_paths:
396
+ if file_path not in result:
397
+ result[file_path] = {"is_tracked": False, "status": None, "is_ignored": False, "is_staged": False}
398
+
399
+ except Exception as e:
400
+ logger.error("Error in get_file_status_batch: %s", e)
401
+ # Return default status for all paths on error
402
+ for file_path in file_paths:
403
+ if file_path not in result:
404
+ result[file_path] = {"is_tracked": False, "status": None, "is_ignored": False, "is_staged": False}
405
+
406
+ return result
407
+
408
+ def _get_staging_status_from_batch(self, file_path: str, rel_path: str,
409
+ staged_files: set, unstaged_files: set) -> Union[bool, str]:
410
+ """Get staging status using pre-fetched batch data."""
411
+ try:
412
+ if os.path.isdir(file_path):
413
+ # For directories, check files within
414
+ dir_staged_files = [f for f in staged_files if f.startswith(rel_path + '/') or f == rel_path]
415
+ dir_unstaged_files = [f for f in unstaged_files if f.startswith(rel_path + '/') or f == rel_path]
416
+
417
+ has_staged = len(dir_staged_files) > 0
418
+ has_unstaged = len(dir_unstaged_files) > 0
419
+
420
+ # Check for mixed staging
421
+ has_mixed_files = any(f in dir_unstaged_files for f in dir_staged_files)
422
+
423
+ if has_mixed_files or (has_staged and has_unstaged):
424
+ return "mixed"
425
+ elif has_staged:
426
+ return True
427
+ else:
428
+ return False
429
+ else:
430
+ # For files
431
+ has_staged = rel_path in staged_files
432
+ has_unstaged = rel_path in unstaged_files
433
+
434
+ if has_staged and has_unstaged:
435
+ return "mixed"
436
+ elif has_staged:
437
+ return True
438
+ else:
439
+ return False
440
+ except Exception:
441
+ return False
442
+
443
+ def _get_directory_status_from_batch(self, file_path: str, rel_path: str,
444
+ untracked_files: set, status_map: dict,
445
+ tracked_files: set, is_staged: Union[bool, str]) -> Dict[str, Any]:
446
+ """Get directory status using pre-fetched batch data."""
447
+ try:
448
+ rel_path_normalized = rel_path.replace('\\', '/')
449
+
450
+ # Check for untracked files in this directory
451
+ has_untracked = any(
452
+ f.replace('\\', '/').startswith(rel_path_normalized + '/') or f.replace('\\', '/') == rel_path_normalized
453
+ for f in untracked_files
454
+ )
455
+
456
+ # Check for modified/deleted files using status map
457
+ has_modified = False
458
+ has_deleted = False
459
+
460
+ for status_file_path, status_line in status_map.items():
461
+ if len(status_line) >= 2:
462
+ file_normalized = status_file_path.replace('\\', '/')
463
+ if file_normalized.startswith(rel_path_normalized + '/') or file_normalized == rel_path_normalized:
464
+ index_status = status_line[0] if len(status_line) > 0 else ' '
465
+ worktree_status = status_line[1] if len(status_line) > 1 else ' '
466
+
467
+ if index_status in ['M', 'A', 'R', 'C'] or worktree_status in ['M', 'A', 'R', 'C']:
468
+ has_modified = True
469
+ elif index_status == 'D' or worktree_status == 'D':
470
+ has_deleted = True
471
+
472
+ # Priority order: untracked > modified/deleted > clean
473
+ if has_untracked:
474
+ return {"is_tracked": False, "status": "untracked", "is_ignored": False, "is_staged": is_staged}
475
+ elif has_deleted:
476
+ return {"is_tracked": True, "status": "deleted", "is_ignored": False, "is_staged": is_staged}
477
+ elif has_modified:
478
+ return {"is_tracked": True, "status": "modified", "is_ignored": False, "is_staged": is_staged}
479
+
480
+ # Check if directory has tracked files
481
+ has_tracked = any(
482
+ f.replace('\\', '/').startswith(rel_path_normalized + '/') or f.replace('\\', '/') == rel_path_normalized
483
+ for f in tracked_files
484
+ )
485
+
486
+ status = "clean" if has_tracked else None
487
+ return {"is_tracked": has_tracked, "status": status, "is_ignored": False, "is_staged": is_staged}
488
+
489
+ except Exception as e:
490
+ logger.debug("Error getting directory status for %s: %s", file_path, e)
491
+ return {"is_tracked": False, "status": None, "is_ignored": False, "is_staged": False}
492
+
493
+ def _get_file_status_from_batch(self, file_path: str, rel_path: str,
494
+ untracked_files: set, staged_files: set,
495
+ tracked_files: set, is_staged: Union[bool, str]) -> Dict[str, Any]:
496
+ """Get file status using pre-fetched batch data."""
497
+ try:
498
+ # Check if untracked
499
+ if rel_path in untracked_files:
500
+ return {"is_tracked": False, "status": "untracked", "is_ignored": False, "is_staged": is_staged}
501
+
502
+ # If file is staged, determine original status
503
+ if is_staged:
504
+ # Check if file existed in HEAD
505
+ try:
506
+ self.repo.git.show(f"HEAD:{rel_path}")
507
+ # File existed in HEAD
508
+ return {"is_tracked": True, "status": "modified", "is_ignored": False, "is_staged": is_staged}
509
+ except Exception:
510
+ # File didn't exist in HEAD (new file)
511
+ return {"is_tracked": False, "status": "added", "is_ignored": False, "is_staged": is_staged}
512
+
513
+ # Check if tracked and dirty
514
+ try:
515
+ if self.repo.is_dirty(path=rel_path):
516
+ return {"is_tracked": True, "status": "modified", "is_ignored": False, "is_staged": is_staged}
517
+ except Exception:
518
+ pass
519
+
520
+ # Check if tracked and clean
521
+ if rel_path in tracked_files:
522
+ return {"is_tracked": True, "status": "clean", "is_ignored": False, "is_staged": is_staged}
523
+
524
+ return {"is_tracked": False, "status": None, "is_ignored": False, "is_staged": False}
525
+
526
+ except Exception as e:
527
+ logger.debug("Error getting file status for %s: %s", file_path, e)
528
+ return {"is_tracked": False, "status": None, "is_ignored": False, "is_staged": False}
529
+
295
530
  def get_status_summary(self) -> Dict[str, int]:
296
531
  """Get summary of Git status."""
297
532
  if not self.is_git_repo or not self.repo:
@@ -156,16 +156,23 @@ class ProjectStateManager:
156
156
 
157
157
  git_manager = GitManager(project_folder_path, change_callback=git_change_callback)
158
158
  self.git_managers[client_session_id] = git_manager
159
-
159
+
160
+ # Run git operations in executor to avoid blocking event loop
161
+ loop = asyncio.get_event_loop()
162
+ is_git_repo = git_manager.is_git_repo
163
+ git_branch = await loop.run_in_executor(None, git_manager.get_branch_name)
164
+ git_status_summary = await loop.run_in_executor(None, git_manager.get_status_summary)
165
+ git_detailed_status = await loop.run_in_executor(None, git_manager.get_detailed_status)
166
+
160
167
  # Create project state
161
168
  project_state = ProjectState(
162
169
  client_session_id=client_session_id,
163
170
  project_folder_path=project_folder_path,
164
171
  items=[],
165
- is_git_repo=git_manager.is_git_repo,
166
- git_branch=git_manager.get_branch_name(),
167
- git_status_summary=git_manager.get_status_summary(),
168
- git_detailed_status=git_manager.get_detailed_status()
172
+ is_git_repo=is_git_repo,
173
+ git_branch=git_branch,
174
+ git_status_summary=git_status_summary,
175
+ git_detailed_status=git_detailed_status
169
176
  )
170
177
 
171
178
  # Initialize monitored folders with project root and its immediate subdirectories
@@ -352,16 +359,60 @@ class ProjectStateManager:
352
359
 
353
360
  async def _build_flattened_items_structure(self, project_state: ProjectState):
354
361
  """Build a flattened items structure including ALL items from ALL monitored folders."""
362
+ import time
363
+ func_start = time.time()
364
+
355
365
  all_items = []
356
-
366
+
357
367
  # Create sets for quick lookup
358
368
  expanded_paths = {mf.folder_path for mf in project_state.monitored_folders if mf.is_expanded}
359
369
  monitored_paths = {mf.folder_path for mf in project_state.monitored_folders}
360
-
361
- # Load items from ALL monitored folders
370
+
371
+ # OPTIMIZATION: Collect all file paths first, then batch git operations
372
+ batch_git_start = time.time()
373
+ all_file_paths = []
374
+ folder_to_paths = {} # monitored_folder_path -> list of child paths
375
+
376
+ # First pass: scan all directories to collect file paths
377
+ for monitored_folder in project_state.monitored_folders:
378
+ try:
379
+ child_paths = []
380
+ with os.scandir(monitored_folder.folder_path) as entries:
381
+ for entry in entries:
382
+ if entry.name != '.git' or not entry.is_dir():
383
+ child_paths.append(entry.path)
384
+ all_file_paths.append(entry.path)
385
+ folder_to_paths[monitored_folder.folder_path] = child_paths
386
+ except (OSError, PermissionError) as e:
387
+ logger.error("Error scanning folder %s: %s", monitored_folder.folder_path, e)
388
+ folder_to_paths[monitored_folder.folder_path] = []
389
+
390
+ # BATCH GIT OPERATION: Get status for ALL files at once
391
+ git_manager = self.git_managers.get(project_state.client_session_id)
392
+ git_status_map = {}
393
+ if git_manager and all_file_paths:
394
+ loop = asyncio.get_event_loop()
395
+ git_status_map = await loop.run_in_executor(
396
+ None,
397
+ git_manager.get_file_status_batch,
398
+ all_file_paths
399
+ )
400
+
401
+ batch_git_duration = time.time() - batch_git_start
402
+ logger.info("⏱️ Batch git operations for %d files took %.4f seconds", len(all_file_paths), batch_git_duration)
403
+
404
+ # Second pass: load items using pre-fetched git status
405
+ load_items_start = time.time()
406
+ loop = asyncio.get_event_loop()
362
407
  for monitored_folder in project_state.monitored_folders:
363
- # Load direct children of this monitored folder
364
- children = await self._load_directory_items_list(monitored_folder.folder_path, monitored_folder.folder_path)
408
+ # Load direct children of this monitored folder (run in executor to avoid blocking)
409
+ children = await loop.run_in_executor(
410
+ None,
411
+ self._load_directory_items_list_sync,
412
+ monitored_folder.folder_path,
413
+ monitored_folder.folder_path,
414
+ git_status_map # Pass pre-fetched git status
415
+ )
365
416
 
366
417
  # Set correct expansion and loading states for each child
367
418
  for child in children:
@@ -374,27 +425,41 @@ class ProjectStateManager:
374
425
  # Files are always loaded
375
426
  child.is_loaded = True
376
427
  all_items.append(child)
377
-
428
+
429
+ load_items_duration = time.time() - load_items_start
430
+ logger.info("⏱️ Loading items took %.4f seconds", load_items_duration)
431
+
378
432
  # Remove duplicates (items might be loaded multiple times due to nested monitoring)
379
- # Use a dict to deduplicate by path while preserving the last loaded state
433
+ dedup_start = time.time()
380
434
  items_dict = {}
381
435
  for item in all_items:
382
436
  items_dict[item.path] = item
383
-
437
+
438
+ dedup_duration = time.time() - dedup_start
439
+ logger.info("⏱️ Deduplication took %.4f seconds", dedup_duration)
440
+
384
441
  # Convert back to list and sort for consistent ordering
442
+ sort_start = time.time()
385
443
  project_state.items = list(items_dict.values())
386
444
  project_state.items.sort(key=lambda x: (x.parent_path, not x.is_directory, x.name.lower()))
445
+ sort_duration = time.time() - sort_start
446
+ logger.info("⏱️ Sorting took %.4f seconds", sort_duration)
447
+
448
+ func_duration = time.time() - func_start
449
+ logger.info("⏱️ _build_flattened_items_structure TOTAL: %.4f seconds (batch_git=%.4f, load=%.4f)",
450
+ func_duration, batch_git_duration, load_items_duration)
387
451
 
388
- async def _load_directory_items_list(self, directory_path: str, parent_path: str) -> List[FileItem]:
389
- """Load directory items and return as a list with parent_path."""
390
- git_manager = None
391
- for manager in self.git_managers.values():
392
- if directory_path.startswith(manager.project_path):
393
- git_manager = manager
394
- break
395
-
452
+ def _load_directory_items_list_sync(self, directory_path: str, parent_path: str,
453
+ git_status_map: Dict[str, Dict[str, Any]] = None) -> List[FileItem]:
454
+ """Load directory items and return as a list with parent_path (synchronous version for executor).
455
+
456
+ Args:
457
+ directory_path: Directory to scan
458
+ parent_path: Parent path for items
459
+ git_status_map: Optional pre-fetched git status map (path -> status_dict)
460
+ """
396
461
  items = []
397
-
462
+
398
463
  try:
399
464
  with os.scandir(directory_path) as entries:
400
465
  for entry in entries:
@@ -402,15 +467,15 @@ class ProjectStateManager:
402
467
  # Skip .git folders and their contents
403
468
  if entry.name == '.git' and entry.is_dir():
404
469
  continue
405
-
470
+
406
471
  stat_info = entry.stat()
407
472
  is_hidden = entry.name.startswith('.')
408
-
409
- # Get Git status if available
473
+
474
+ # Get Git status from pre-fetched map or use default
410
475
  git_info = {"is_tracked": False, "status": None, "is_ignored": False, "is_staged": False}
411
- if git_manager:
412
- git_info = git_manager.get_file_status(entry.path)
413
-
476
+ if git_status_map and entry.path in git_status_map:
477
+ git_info = git_status_map[entry.path]
478
+
414
479
  file_item = FileItem(
415
480
  name=entry.name,
416
481
  path=entry.path,
@@ -426,19 +491,19 @@ class ProjectStateManager:
426
491
  is_expanded=False,
427
492
  is_loaded=True # Will be set correctly in _build_flattened_items_structure
428
493
  )
429
-
494
+
430
495
  items.append(file_item)
431
-
496
+
432
497
  except (OSError, PermissionError) as e:
433
498
  logger.debug("Error reading entry %s: %s", entry.path, e)
434
499
  continue
435
-
500
+
436
501
  # Sort items: directories first, then files, both alphabetically
437
502
  items.sort(key=lambda x: (not x.is_directory, x.name.lower()))
438
-
503
+
439
504
  except (OSError, PermissionError) as e:
440
505
  logger.error("Error loading directory %s: %s", directory_path, e)
441
-
506
+
442
507
  return items
443
508
 
444
509
  async def expand_folder(self, client_session_id: str, folder_path: str) -> bool:
@@ -2,6 +2,7 @@
2
2
 
3
3
  import logging
4
4
  from typing import Dict, Type, Any, Optional, List, TYPE_CHECKING
5
+ from portacode.utils.ntp_clock import ntp_clock
5
6
 
6
7
  if TYPE_CHECKING:
7
8
  from ..multiplex import Channel
@@ -72,22 +73,32 @@ class CommandRegistry:
72
73
 
73
74
  async def dispatch(self, command_name: str, message: Dict[str, Any], reply_channel: Optional[str] = None) -> bool:
74
75
  """Dispatch a command to its handler.
75
-
76
+
76
77
  Args:
77
78
  command_name: The command name
78
79
  message: The command message
79
80
  reply_channel: Optional reply channel
80
-
81
+
81
82
  Returns:
82
83
  True if handler was found and executed, False otherwise
83
84
  """
84
85
  logger.info("registry: Dispatching command '%s' with reply_channel=%s", command_name, reply_channel)
85
-
86
+
87
+ # Add handler_receive timestamp if trace present
88
+ if "trace" in message and "request_id" in message:
89
+ handler_receive_time = ntp_clock.now_ms()
90
+ if handler_receive_time is not None:
91
+ message["trace"]["handler_receive"] = handler_receive_time
92
+ # Update ping to show total time from client_send
93
+ if "client_send" in message["trace"]:
94
+ message["trace"]["ping"] = handler_receive_time - message["trace"]["client_send"]
95
+ logger.info(f"🎯 Handler received traced message: {message['request_id']}")
96
+
86
97
  handler = self.get_handler(command_name)
87
98
  if handler is None:
88
99
  logger.warning("registry: No handler found for command: %s", command_name)
89
100
  return False
90
-
101
+
91
102
  try:
92
103
  await handler.handle(message, reply_channel)
93
104
  logger.info("registry: Successfully dispatched command '%s'", command_name)
@@ -497,9 +497,10 @@ class TerminalManager:
497
497
  logger.info("terminal_manager: ✅ Updated client sessions (%d sessions)", len(sessions))
498
498
 
499
499
  # Auto-send initial data only to newly added clients
500
+ # Create a background task so it doesn't block the control loop
500
501
  if newly_added_sessions:
501
- logger.info("terminal_manager: 🚀 Triggering auto-send of initial data to newly added clients")
502
- await self._send_initial_data_to_clients(newly_added_sessions)
502
+ logger.info("terminal_manager: 🚀 Triggering auto-send of initial data to newly added clients (non-blocking)")
503
+ asyncio.create_task(self._send_initial_data_to_clients(newly_added_sessions))
503
504
  else:
504
505
  logger.info("terminal_manager: ℹ️ No new sessions to send data to")
505
506
  continue
@@ -42,6 +42,8 @@ class NTPClock {
42
42
  this._syncAttempts++;
43
43
 
44
44
  try {
45
+ // Capture local time BEFORE the fetch to avoid timing drift
46
+ const localTimeBeforeFetch = Date.now();
45
47
  const t0 = performance.now();
46
48
  const response = await fetch('https://cloudflare.com/cdn-cgi/trace');
47
49
  const t1 = performance.now();
@@ -49,11 +51,11 @@ class NTPClock {
49
51
  const text = await response.text();
50
52
  const serverTime = this._parseCloudflareTime(text);
51
53
 
52
- const localTime = Date.now();
53
54
  const latency = (t1 - t0) / 2; // Estimate one-way latency
54
55
 
55
- // Calculate offset accounting for latency
56
- this.offset = serverTime - localTime + latency;
56
+ // Calculate offset: server generated timestamp at local time (localTimeBeforeFetch + latency)
57
+ // So offset = serverTime - (localTimeBeforeFetch + latency)
58
+ this.offset = serverTime - (localTimeBeforeFetch + latency);
57
59
  this.lastSync = Date.now();
58
60
 
59
61
  console.log(
@@ -0,0 +1,136 @@
1
+ # NTP Clock Architecture
2
+
3
+ ## Overview
4
+
5
+ All entities (client, server, device) synchronize to **time.cloudflare.com** for distributed tracing.
6
+
7
+ ## Architecture: Single Package for Everything
8
+
9
+ All NTP clock implementations (Python and JavaScript) are in the **portacode package** to ensure DRY principles.
10
+
11
+ ## Python Implementation
12
+
13
+ **Location:** `portacode/utils/ntp_clock.py` (in portacode package)
14
+
15
+ ### Import Path
16
+ ```python
17
+ from portacode.utils.ntp_clock import ntp_clock
18
+ ```
19
+
20
+ ### Usage Locations
21
+ 1. **Django Server Consumers** (`server/portacode_django/dashboard/consumers.py`)
22
+ 2. **Device Base Handlers** (`portacode/connection/handlers/base.py`)
23
+ 3. **Device Client** (`server/portacode_django/data/services/device_client.py`)
24
+ 4. **Any Python code with portacode installed**
25
+
26
+ ### Dependencies
27
+ - `setup.py`: Added `ntplib>=0.4.0` to `install_requires`
28
+ - `server/portacode_django/requirements.txt`: Added `portacode>=1.3.26`
29
+
30
+ ### API
31
+ ```python
32
+ # Get NTP-synchronized timestamp (None if not synced)
33
+ ntp_clock.now_ms() # milliseconds
34
+ ntp_clock.now() # seconds
35
+ ntp_clock.now_iso() # ISO format
36
+
37
+ # Check sync status
38
+ status = ntp_clock.get_status()
39
+ # {
40
+ # 'server': 'time.cloudflare.com',
41
+ # 'offset_ms': 6.04,
42
+ # 'last_sync': '2025-10-05T04:37:12.768445+00:00',
43
+ # 'is_synced': True
44
+ # }
45
+ ```
46
+
47
+ ## JavaScript Implementation
48
+
49
+ **Location:** `portacode/static/js/utils/ntp-clock.js` (in portacode package)
50
+
51
+ ### Django Setup
52
+
53
+ Django will serve static files from the portacode package automatically after `collectstatic`:
54
+
55
+ ```python
56
+ # Django settings.py - no changes needed, just ensure:
57
+ INSTALLED_APPS = [
58
+ # ... other apps
59
+ 'portacode', # Add portacode as an installed app (optional, for admin integration)
60
+ ]
61
+
62
+ # Static files will be collected from portacode package
63
+ STATIC_URL = '/static/'
64
+ ```
65
+
66
+ After installing portacode (`pip install portacode` or `pip install -e .`), run:
67
+ ```bash
68
+ python manage.py collectstatic
69
+ ```
70
+
71
+ This will copy `portacode/static/js/utils/ntp-clock.js` to Django's static files directory.
72
+
73
+ ### Import Path (in Django templates/JS)
74
+ ```javascript
75
+ import ntpClock from '/static/js/utils/ntp-clock.js';
76
+ // or relative to your JS file:
77
+ import ntpClock from './utils/ntp-clock.js';
78
+ ```
79
+
80
+ ### Usage Locations
81
+ 1. **Dashboard WebSocket** (`websocket-service.js`)
82
+ 2. **Project WebSocket** (`websocket-service-project.js`)
83
+
84
+ ### API
85
+ ```javascript
86
+ // Get NTP-synchronized timestamp (null if not synced)
87
+ ntpClock.now() // milliseconds
88
+ ntpClock.nowISO() // ISO format
89
+
90
+ // Check sync status
91
+ const status = ntpClock.getStatus();
92
+ // {
93
+ // server: 'time.cloudflare.com',
94
+ // offset: 6.04,
95
+ // lastSync: '2025-10-05T04:37:12.768445+00:00',
96
+ // isSynced: true
97
+ // }
98
+ ```
99
+
100
+ ## Design Principles
101
+
102
+ 1. **DRY (Don't Repeat Yourself)**
103
+ - **Python:** Single implementation in portacode package (`portacode/utils/ntp_clock.py`)
104
+ - **JavaScript:** Single implementation in portacode package (`portacode/static/js/utils/ntp-clock.js`)
105
+ - Both served from the same package, no duplication across repos
106
+
107
+ 2. **No Fallback Servers**
108
+ - All entities MUST sync to time.cloudflare.com
109
+ - If sync fails, timestamps are None/null
110
+ - Ensures all timestamps are comparable
111
+
112
+ 3. **Auto-Sync**
113
+ - Re-syncs every 5 minutes automatically
114
+ - Initial sync on import/load
115
+ - Max 3 retry attempts before marking as failed
116
+
117
+ 4. **Thread-Safe (Python)**
118
+ - Uses threading.Lock for concurrent access
119
+ - Background daemon thread for periodic sync
120
+
121
+ ## Testing
122
+
123
+ ### Python
124
+ ```bash
125
+ python tools/test_python_ntp_clock.py
126
+ ```
127
+
128
+ ### JavaScript
129
+ The test file is included in the package at `portacode/static/js/test-ntp-clock.html`.
130
+
131
+ After Django collectstatic, open: `/static/js/test-ntp-clock.html` in browser
132
+
133
+ Or run directly from package:
134
+ ```bash
135
+ python -c "import portacode, os; print(os.path.join(os.path.dirname(portacode.__file__), 'static/js/test-ntp-clock.html'))"
136
+ ```
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: portacode
3
- Version: 1.3.27.dev0
3
+ Version: 1.3.28
4
4
  Summary: Portacode CLI client and SDK
5
5
  Home-page: https://github.com/portacode/portacode
6
6
  Author: Meena Erian
@@ -59,6 +59,7 @@ portacode/connection/handlers/project_state/models.py
59
59
  portacode/connection/handlers/project_state/utils.py
60
60
  portacode/static/js/test-ntp-clock.html
61
61
  portacode/static/js/utils/ntp-clock.js
62
+ portacode/utils/NTP_ARCHITECTURE.md
62
63
  portacode/utils/__init__.py
63
64
  portacode/utils/ntp_clock.py
64
65
  test_modules/README.md
@@ -86,4 +87,5 @@ testing_framework/core/runner.py
86
87
  testing_framework/core/shared_cli_manager.py
87
88
  testing_framework/core/test_discovery.py
88
89
  todo/issues/indefinite_resource_loading.md
89
- todo/issues/premature_terminal_exit.md
90
+ todo/issues/premature_terminal_exit.md
91
+ tools/test_python_ntp_clock.py
File without changes
@@ -177,8 +177,14 @@ class PlaywrightManager:
177
177
  async def _perform_login(self):
178
178
  """Perform login using provided credentials."""
179
179
  try:
180
+ # Navigate to login page first
181
+ login_url = f"{self.base_url}/accounts/login/"
182
+ await self.page.goto(login_url)
183
+ await self.log_action("navigate_to_login", {"url": login_url})
184
+ await self.take_screenshot("login_page")
185
+
180
186
  await self.log_action("login_start", {"username": self.username})
181
-
187
+
182
188
  # Look for common login form elements
183
189
  username_selectors = [
184
190
  'input[name="username"]',
@@ -0,0 +1,39 @@
1
+ """Test Python NTP Clock"""
2
+ import time
3
+ from portacode.utils.ntp_clock import ntp_clock
4
+
5
+ print("=" * 50)
6
+ print("Testing Python NTP Clock")
7
+ print("=" * 50)
8
+
9
+ # Wait for initial sync
10
+ print("\nWaiting for initial sync...")
11
+ time.sleep(2)
12
+
13
+ # Print status
14
+ status = ntp_clock.get_status()
15
+ print(f"\nSync Status: {'✅ SYNCED' if status['is_synced'] else '❌ NOT SYNCED'}")
16
+ print(f"Server: {status['server']}")
17
+ print(f"Offset: {status['offset_ms']}ms" if status['offset_ms'] is not None else "Offset: None")
18
+ print(f"Last Sync: {status['last_sync']}")
19
+
20
+ # Compare timestamps
21
+ print("\nTimestamp Comparison:")
22
+ print(f" Local time (ms): {int(time.time() * 1000)}")
23
+ ntp_time = ntp_clock.now_ms()
24
+ print(f" NTP time (ms): {ntp_time if ntp_time is not None else 'None (not synced)'}")
25
+ ntp_iso = ntp_clock.now_iso()
26
+ print(f" NTP time (ISO): {ntp_iso if ntp_iso is not None else 'None (not synced)'}")
27
+
28
+ # Test multiple calls
29
+ print("\nTesting consistency (10 calls):")
30
+ for i in range(10):
31
+ ts = ntp_clock.now_ms()
32
+ if ts is not None:
33
+ print(f" {i+1}: {ts}")
34
+ else:
35
+ print(f" {i+1}: None (not synced)")
36
+ time.sleep(0.1)
37
+
38
+ print("\n✅ Test complete")
39
+ print("=" * 50)
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes