django-nativemojo 0.1.10__py3-none-any.whl → 0.1.15__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (120) hide show
  1. django_nativemojo-0.1.15.dist-info/METADATA +136 -0
  2. {django_nativemojo-0.1.10.dist-info → django_nativemojo-0.1.15.dist-info}/RECORD +105 -65
  3. mojo/__init__.py +1 -1
  4. mojo/apps/account/management/__init__.py +5 -0
  5. mojo/apps/account/management/commands/__init__.py +6 -0
  6. mojo/apps/account/management/commands/serializer_admin.py +531 -0
  7. mojo/apps/account/migrations/0004_user_avatar.py +20 -0
  8. mojo/apps/account/migrations/0005_group_last_activity.py +18 -0
  9. mojo/apps/account/models/group.py +25 -7
  10. mojo/apps/account/models/member.py +15 -4
  11. mojo/apps/account/models/user.py +197 -20
  12. mojo/apps/account/rest/group.py +1 -0
  13. mojo/apps/account/rest/user.py +6 -2
  14. mojo/apps/aws/rest/__init__.py +1 -0
  15. mojo/apps/aws/rest/s3.py +64 -0
  16. mojo/apps/fileman/README.md +8 -8
  17. mojo/apps/fileman/backends/base.py +76 -70
  18. mojo/apps/fileman/backends/filesystem.py +86 -86
  19. mojo/apps/fileman/backends/s3.py +200 -108
  20. mojo/apps/fileman/migrations/0001_initial.py +106 -0
  21. mojo/apps/fileman/migrations/0002_filemanager_parent_alter_filemanager_max_file_size.py +24 -0
  22. mojo/apps/fileman/migrations/0003_remove_file_fileman_fil_upload__c4bc35_idx_and_more.py +25 -0
  23. mojo/apps/fileman/migrations/0004_remove_file_original_filename_and_more.py +39 -0
  24. mojo/apps/fileman/migrations/0005_alter_file_upload_token.py +18 -0
  25. mojo/apps/fileman/migrations/0006_file_download_url_filemanager_forever_urls.py +23 -0
  26. mojo/apps/fileman/migrations/0007_remove_filemanager_forever_urls_and_more.py +22 -0
  27. mojo/apps/fileman/migrations/0008_file_category.py +18 -0
  28. mojo/apps/fileman/migrations/0009_rename_file_path_file_storage_file_path.py +18 -0
  29. mojo/apps/fileman/migrations/0010_filerendition.py +33 -0
  30. mojo/apps/fileman/migrations/0011_alter_filerendition_original_file.py +19 -0
  31. mojo/apps/fileman/models/__init__.py +1 -5
  32. mojo/apps/fileman/models/file.py +204 -58
  33. mojo/apps/fileman/models/manager.py +161 -31
  34. mojo/apps/fileman/models/rendition.py +118 -0
  35. mojo/apps/fileman/renderer/__init__.py +111 -0
  36. mojo/apps/fileman/renderer/audio.py +403 -0
  37. mojo/apps/fileman/renderer/base.py +205 -0
  38. mojo/apps/fileman/renderer/document.py +404 -0
  39. mojo/apps/fileman/renderer/image.py +222 -0
  40. mojo/apps/fileman/renderer/utils.py +297 -0
  41. mojo/apps/fileman/renderer/video.py +304 -0
  42. mojo/apps/fileman/rest/__init__.py +1 -18
  43. mojo/apps/fileman/rest/upload.py +22 -32
  44. mojo/apps/fileman/signals.py +58 -0
  45. mojo/apps/fileman/tasks.py +254 -0
  46. mojo/apps/fileman/utils/__init__.py +40 -16
  47. mojo/apps/incident/migrations/0005_incidenthistory.py +39 -0
  48. mojo/apps/incident/migrations/0006_alter_incident_state.py +18 -0
  49. mojo/apps/incident/models/__init__.py +1 -0
  50. mojo/apps/incident/models/history.py +36 -0
  51. mojo/apps/incident/models/incident.py +1 -1
  52. mojo/apps/incident/reporter.py +3 -1
  53. mojo/apps/incident/rest/event.py +7 -1
  54. mojo/apps/logit/migrations/0004_alter_log_level.py +18 -0
  55. mojo/apps/logit/models/log.py +4 -1
  56. mojo/apps/metrics/utils.py +2 -2
  57. mojo/apps/notify/handlers/ses/message.py +1 -1
  58. mojo/apps/notify/providers/aws.py +2 -2
  59. mojo/apps/tasks/__init__.py +34 -1
  60. mojo/apps/tasks/manager.py +200 -45
  61. mojo/apps/tasks/rest/tasks.py +24 -10
  62. mojo/apps/tasks/runner.py +283 -18
  63. mojo/apps/tasks/task.py +99 -0
  64. mojo/apps/tasks/tq_handlers.py +118 -0
  65. mojo/decorators/auth.py +6 -1
  66. mojo/decorators/http.py +7 -2
  67. mojo/helpers/aws/__init__.py +41 -0
  68. mojo/helpers/aws/ec2.py +804 -0
  69. mojo/helpers/aws/iam.py +748 -0
  70. mojo/helpers/aws/s3.py +451 -11
  71. mojo/helpers/aws/ses.py +483 -0
  72. mojo/helpers/aws/sns.py +461 -0
  73. mojo/helpers/crypto/__pycache__/hash.cpython-310.pyc +0 -0
  74. mojo/helpers/crypto/__pycache__/sign.cpython-310.pyc +0 -0
  75. mojo/helpers/crypto/__pycache__/utils.cpython-310.pyc +0 -0
  76. mojo/helpers/dates.py +18 -0
  77. mojo/helpers/response.py +6 -2
  78. mojo/helpers/settings/__init__.py +2 -0
  79. mojo/helpers/{settings.py → settings/helper.py} +1 -37
  80. mojo/helpers/settings/parser.py +132 -0
  81. mojo/middleware/logging.py +1 -1
  82. mojo/middleware/mojo.py +5 -0
  83. mojo/models/rest.py +261 -46
  84. mojo/models/secrets.py +13 -4
  85. mojo/serializers/__init__.py +100 -0
  86. mojo/serializers/advanced/README.md +363 -0
  87. mojo/serializers/advanced/__init__.py +247 -0
  88. mojo/serializers/advanced/formats/__init__.py +28 -0
  89. mojo/serializers/advanced/formats/csv.py +416 -0
  90. mojo/serializers/advanced/formats/excel.py +516 -0
  91. mojo/serializers/advanced/formats/json.py +239 -0
  92. mojo/serializers/advanced/formats/localizers.py +509 -0
  93. mojo/serializers/advanced/formats/response.py +485 -0
  94. mojo/serializers/advanced/serializer.py +568 -0
  95. mojo/serializers/manager.py +501 -0
  96. mojo/serializers/optimized.py +618 -0
  97. mojo/serializers/settings_example.py +322 -0
  98. mojo/serializers/{models.py → simple.py} +38 -15
  99. testit/helpers.py +21 -4
  100. django_nativemojo-0.1.10.dist-info/METADATA +0 -96
  101. mojo/apps/metrics/rest/db.py +0 -0
  102. mojo/helpers/aws/setup_email.py +0 -0
  103. mojo/ws4redis/README.md +0 -174
  104. mojo/ws4redis/__init__.py +0 -2
  105. mojo/ws4redis/client.py +0 -283
  106. mojo/ws4redis/connection.py +0 -327
  107. mojo/ws4redis/exceptions.py +0 -32
  108. mojo/ws4redis/redis.py +0 -183
  109. mojo/ws4redis/servers/base.py +0 -86
  110. mojo/ws4redis/servers/django.py +0 -171
  111. mojo/ws4redis/servers/uwsgi.py +0 -63
  112. mojo/ws4redis/settings.py +0 -45
  113. mojo/ws4redis/utf8validator.py +0 -128
  114. mojo/ws4redis/websocket.py +0 -403
  115. {django_nativemojo-0.1.10.dist-info → django_nativemojo-0.1.15.dist-info}/LICENSE +0 -0
  116. {django_nativemojo-0.1.10.dist-info → django_nativemojo-0.1.15.dist-info}/NOTICE +0 -0
  117. {django_nativemojo-0.1.10.dist-info → django_nativemojo-0.1.15.dist-info}/WHEEL +0 -0
  118. /mojo/{ws4redis/servers → apps/aws}/__init__.py +0 -0
  119. /mojo/apps/{fileman/models/render.py → aws/models/__init__.py} +0 -0
  120. /mojo/apps/fileman/{rest/__init__ → migrations/__init__.py} +0 -0
mojo/apps/tasks/runner.py CHANGED
@@ -1,12 +1,16 @@
1
1
  from importlib import import_module
2
2
  from concurrent.futures import ThreadPoolExecutor
3
3
  from .manager import TaskManager
4
- from mojo.tasks import manager
4
+ from mojo.apps.tasks import manager
5
5
  import os
6
6
  from mojo.helpers import logit
7
7
  from mojo.helpers import daemon
8
8
  from mojo.helpers import paths
9
+ from mojo.apps import metrics
9
10
  import time
11
+ import socket
12
+ import threading
13
+ import json
10
14
 
11
15
 
12
16
  class TaskEngine(daemon.Daemon):
@@ -23,13 +27,174 @@ class TaskEngine(daemon.Daemon):
23
27
  max_workers (int, optional): The maximum number of threads available for task execution. Defaults to 5.
24
28
  """
25
29
  super().__init__("taskit", os.path.join(paths.VAR_ROOT, "taskit"))
30
+ self.hostname = socket.gethostname()
26
31
  self.manager = manager.TaskManager(channels)
27
32
  self.channels = channels
28
33
  if "broadcast" not in self.channels:
29
34
  self.channels.append("broadcast")
35
+
36
+ # Add hostname-specific channel for this runner
37
+ self.runner_channel = f"runner_{self.hostname}"
38
+ if self.runner_channel not in self.channels:
39
+ self.channels.append(self.runner_channel)
40
+
30
41
  self.max_workers = max_workers
31
42
  self.executor = None
32
- self.logger = logit.get_logger("taskit", "taskit.log")
43
+ self.logger = logit.get_logger("tasks", "tasks.log")
44
+ self.ping_thread = None
45
+ self.ping_interval = 30 # seconds
46
+ self.started_at = time.time()
47
+
48
+ def register_runner(self):
49
+ """
50
+ Register this runner as active in the system.
51
+ """
52
+ runner_data = {
53
+ 'hostname': self.hostname,
54
+ 'started_at': self.started_at,
55
+ 'max_workers': self.max_workers,
56
+ 'channels': self.channels,
57
+ 'last_ping': time.time(),
58
+ 'status': 'active'
59
+ }
60
+ self.manager.redis.hset(
61
+ self.manager.get_runners_key(),
62
+ self.hostname,
63
+ json.dumps(runner_data)
64
+ )
65
+ self.logger.info(f"Registered runner {self.hostname}")
66
+
67
+ def unregister_runner(self):
68
+ """
69
+ Unregister this runner from the active runners list.
70
+ """
71
+ self.manager.redis.hdel(self.manager.get_runners_key(), self.hostname)
72
+ self.logger.info(f"Unregistered runner {self.hostname}")
73
+
74
+ def update_runner_status(self, status_data=None):
75
+ """
76
+ Update the status of this runner.
77
+ """
78
+ if status_data is None:
79
+ status_data = {}
80
+
81
+ runner_data = {
82
+ 'hostname': self.hostname,
83
+ 'last_ping': time.time(),
84
+ 'status': 'active',
85
+ 'started_at': self.started_at,
86
+ 'max_workers': self.max_workers,
87
+ 'channels': self.channels,
88
+ **status_data
89
+ }
90
+ self.manager.redis.hset(
91
+ self.manager.get_runners_key(),
92
+ self.hostname,
93
+ json.dumps(runner_data)
94
+ )
95
+
96
+
97
+ def ping_runners(self):
98
+ """
99
+ Send ping messages to all active runners to check their status.
100
+ """
101
+ active_runners = self.manager.get_active_runners()
102
+ for hostname in active_runners.keys():
103
+ if hostname != self.hostname: # Don't ping ourselves
104
+ ping_message = {
105
+ 'type': 'ping',
106
+ 'from': self.hostname,
107
+ 'timestamp': time.time()
108
+ }
109
+ runner_channel = f"runner_{hostname}"
110
+ self.manager.redis.publish(
111
+ self.manager.get_channel_key(runner_channel),
112
+ json.dumps(ping_message)
113
+ )
114
+
115
+ def handle_ping_request(self, message_data):
116
+ """
117
+ Handle incoming ping requests and send response.
118
+ """
119
+ ping_data = json.loads(message_data)
120
+ response = {
121
+ 'type': 'ping_response',
122
+ 'from': self.hostname,
123
+ 'to': ping_data['from'],
124
+ 'timestamp': time.time(),
125
+ 'status': self.get_runner_status()
126
+ }
127
+
128
+ # Send response to the requesting runner's channel
129
+ requester_channel = f"runner_{ping_data['from']}"
130
+ self.manager.redis.publish(
131
+ self.manager.get_channel_key(requester_channel),
132
+ json.dumps(response)
133
+ )
134
+
135
+ def handle_ping_response(self, message_data):
136
+ """
137
+ Handle ping responses from other runners.
138
+ """
139
+ response_data = json.loads(message_data)
140
+ self.logger.info(f"Received ping response from {response_data['from']}")
141
+ # Update the runner's status in our active runners list
142
+ self.manager.redis.hset(
143
+ self.manager.get_runners_key(),
144
+ response_data['from'],
145
+ json.dumps(response_data['status'])
146
+ )
147
+
148
+ def get_runner_status(self):
149
+ """
150
+ Get the current status of this runner.
151
+
152
+ Returns:
153
+ dict: Status information for this runner.
154
+ """
155
+ active_threads = 0
156
+ if self.executor and hasattr(self.executor, '_threads'):
157
+ active_threads = len([t for t in self.executor._threads if t.is_alive()])
158
+
159
+ return {
160
+ 'hostname': self.hostname,
161
+ 'status': 'active',
162
+ 'max_workers': self.max_workers,
163
+ 'active_threads': active_threads,
164
+ 'channels': self.channels,
165
+ 'last_ping': time.time(),
166
+ 'uptime': time.time() - getattr(self, 'start_time', time.time())
167
+ }
168
+
169
+ def start_ping_thread(self):
170
+ """
171
+ Start the background thread that periodically pings other runners.
172
+ """
173
+ def ping_loop():
174
+ while self.running:
175
+ try:
176
+ self.ping_runners()
177
+ self.update_runner_status()
178
+ time.sleep(self.ping_interval)
179
+ except Exception as e:
180
+ self.logger.error(f"Error in ping loop: {e}")
181
+ time.sleep(5)
182
+
183
+ self.ping_thread = threading.Thread(target=ping_loop, daemon=True)
184
+ self.ping_thread.start()
185
+
186
+ def cleanup_stale_runners(self):
187
+ """
188
+ Remove runners that haven't been seen for a while.
189
+ """
190
+ cutoff_time = time.time() - (self.ping_interval * 3) # 3 missed pings
191
+ active_runners = self.manager.get_active_runners()
192
+
193
+ for hostname, runner_data in active_runners.items():
194
+ last_ping = runner_data.get('last_ping', 0)
195
+ if last_ping < cutoff_time:
196
+ self.logger.info(f"Removing stale runner: {hostname}")
197
+ self.manager.redis.hdel(self.manager.get_runners_key(), hostname)
33
198
 
34
199
  def reset_running_tasks(self):
35
200
  """
@@ -38,8 +203,8 @@ class TaskEngine(daemon.Daemon):
38
203
  for channel in self.channels:
39
204
  for task_id in self.manager.get_running_ids(channel):
40
205
  self.logger.info(f"moving task {task_id} from running to pending")
41
- self.manager.remove_from_running(channel, task_id)
42
- self.manager.add_to_pending(channel, task_id)
206
+ self.manager.remove_from_running(task_id, channel)
207
+ self.manager.add_to_pending(task_id, channel)
43
208
 
44
209
  def queue_pending_tasks(self):
45
210
  """
@@ -56,7 +221,23 @@ class TaskEngine(daemon.Daemon):
56
221
  Args:
57
222
  message (dict): A dictionary with message data containing task information.
58
223
  """
59
- self.queue_task(message['data'].decode())
224
+ message_data = message['data'].decode()
225
+
226
+ # Check if this is a ping/status message
227
+ try:
228
+ parsed_message = json.loads(message_data)
229
+ if isinstance(parsed_message, dict) and 'type' in parsed_message:
230
+ if parsed_message['type'] == 'ping':
231
+ self.handle_ping_request(message_data)
232
+ return
233
+ elif parsed_message['type'] == 'ping_response':
234
+ self.handle_ping_response(message_data)
235
+ return
236
+ except (json.JSONDecodeError, TypeError):
237
+ pass
238
+
239
+ # If not a ping message, treat as a task
240
+ self.queue_task(message_data)
60
241
 
61
242
  def on_run_task(self, task_id):
62
243
  """
@@ -71,7 +252,10 @@ class TaskEngine(daemon.Daemon):
71
252
  if not task_data:
72
253
  # this task has expired or no longer exists
73
254
  self.logger.info(f"Task {task_id} has expired or no longer exists")
74
- tman.remove_from_pending(task_id)
255
+ metrics.record("tasks_expired", category="tasks")
256
+ # try and remove any pending dead tasks
257
+ self.manager.channels = self.channels
258
+ self.manager.take_out_the_dead(local=True)
75
259
  return
76
260
  self.logger.info(f"Executing task {task_id}")
77
261
  function_path = task_data.get('function')
@@ -83,15 +267,28 @@ class TaskEngine(daemon.Daemon):
83
267
 
84
268
  try:
85
269
  task_data.started_at = time.time()
86
- func(task_data)
270
+ task_data._thread_id = threading.current_thread().ident
271
+ tdata = task_data.get("data", {})
272
+ if tdata and "args" in tdata and "kwargs" in tdata:
273
+ args = tdata["args"]
274
+ kwargs = tdata["kwargs"]
275
+ # self.logger.info(f"Executing task {task_id} with args {args} and kwargs {kwargs}")
276
+ func(*args, **kwargs)
277
+ else:
278
+ # self.logger.info(f"Executing task {task_id} with no arguments")
279
+ func(task_data)
87
280
  task_data.completed_at = time.time()
88
281
  task_data.elapsed_time = task_data.completed_at - task_data.started_at
282
+ if "_thread_id" in task_data:
283
+ del task_data["_thread_id"]
89
284
  tman.save_task(task_data)
90
285
  tman.add_to_completed(task_data)
286
+ metrics.record("tasks_completed", category="tasks")
91
287
  self.logger.info(f"Task {task_id} completed after {task_data.elapsed_time} seconds")
92
288
  except Exception as e:
93
- self.logger.error(f"Error executing task {task_id}: {str(e)}")
289
+ self.logger.exception(f"Error executing task {task_id}: {str(e)}")
94
290
  tman.add_to_errors(task_data, str(e))
291
+ metrics.record("tasks_errors", category="tasks")
95
292
  finally:
96
293
  tman.remove_from_running(task_id, task_data.channel)
97
294
 
@@ -105,31 +302,68 @@ class TaskEngine(daemon.Daemon):
105
302
  self.logger.info(f"adding task {task_id}")
106
303
  self.executor.submit(self.on_run_task, task_id)
107
304
 
108
- def wait_for_all_tasks_to_complete(self, timeout=30):
305
+
306
+ def _clear_queued_tasks(self):
307
+ import queue
308
+ q = self.executor._work_queue
309
+ removed = 0
310
+ try:
311
+ while True:
312
+ q.get_nowait()
313
+ removed += 1
314
+ except queue.Empty:
315
+ pass
316
+ return removed
317
+
318
+ def _wait_for_active_tasks(self, timeout=5.0):
109
319
  """
110
- Wait for all tasks submitted to the executor to complete.
320
+ Waits up to `timeout` seconds for active executor threads to finish.
321
+ Returns True if all threads completed, False if timeout hit.
111
322
  """
112
- self.executor.shutdown(wait=True, timeout=timeout)
113
- # Check if there are still active threads
114
- active_threads = [thread for thread in self.executor._threads if thread.is_alive()]
115
- if active_threads:
116
- self.logger.warning(f"shutdown issue, {len(active_threads)} tasks exceeded timeout")
117
- self.executor.shutdown(wait=False) # Stop accepting new tasks
323
+ start_time = time.time()
324
+ while time.time() - start_time < timeout:
325
+ active = self.manager.get_all_running_ids(local=True)
326
+ if len(active) == 0:
327
+ return True
328
+ time.sleep(0.01)
329
+ return False
330
+
331
+ def wait_for_all_tasks_to_complete(self, timeout=5):
332
+ """
333
+ Wait for all tasks submitted to the executor to complete with graceful degradation.
334
+ """
335
+ if not self.executor:
336
+ return
337
+
338
+ self.logger.info(f"Initiating graceful shutdown with {timeout}s timeout")
339
+ self.executor.shutdown(wait=False)
340
+ self._clear_queued_tasks()
341
+ result = self._wait_for_active_tasks(timeout)
342
+ if not result:
343
+ self.logger.warning("Timeout reached while waiting for active tasks to complete")
344
+ return result
118
345
 
119
346
  def start_listening(self):
120
347
  """
121
348
  Listen for messages on the subscribed channels and handle them as they arrive.
122
349
  """
123
350
  self.logger.info("starting with channels...", self.channels)
351
+ self.start_time = time.time()
352
+ self.register_runner()
353
+ self.manager.take_out_the_dead(local=True)
124
354
  self.reset_running_tasks()
125
355
  self.queue_pending_tasks()
356
+ self.start_ping_thread()
357
+
126
358
  pubsub = self.manager.redis.pubsub()
127
359
  channel_keys = {self.manager.get_channel_key(channel): self.handle_message for channel in self.channels}
128
360
  pubsub.subscribe(**channel_keys)
361
+
129
362
  for message in pubsub.listen():
130
363
  if not self.running:
131
364
  self.logger.info("shutting down, waiting for tasks to complete")
132
365
  self.wait_for_all_tasks_to_complete()
366
+ self.unregister_runner()
133
367
  self.logger.info("shutdown complete")
134
368
  return
135
369
  if message['type'] != 'message':
@@ -154,6 +388,7 @@ def get_args():
154
388
  parser.add_argument("--start", action="store_true", help="Start the daemon")
155
389
  parser.add_argument("--stop", action="store_true", help="Stop the daemon")
156
390
  parser.add_argument("--foreground", "-f", action="store_true", help="Run in foreground mode")
391
+ parser.add_argument("--status", action="store_true", help="Show status of all runners")
157
392
  parser.add_argument("-v", "--verbose", action="store_true",
158
393
  help="Enable verbose logging")
159
394
  return parser, parser.parse_args()
@@ -162,8 +397,18 @@ def get_args():
162
397
  def main():
163
398
  from mojo.helpers.settings import settings
164
399
  parser, args = get_args()
165
- daemon = TaskEngine(settings.TASKIT_CHANNELS)
166
- if args.start:
400
+ daemon = TaskEngine(settings.TASK_CHANNELS)
401
+
402
+ if args.status:
403
+ runners = daemon.manager.get_active_runners()
404
+ if runners:
405
+ print("Active TaskEngine Runners:")
406
+ for hostname, data in runners.items():
407
+ print(f" {hostname}: {data.get('status', 'unknown')} "
408
+ f"(last ping: {time.time() - data.get('last_ping', 0):.1f}s ago)")
409
+ else:
410
+ print("No active runners found")
411
+ elif args.start:
167
412
  daemon.start()
168
413
  elif args.stop:
169
414
  daemon.stop()
@@ -172,3 +417,23 @@ def main():
172
417
  daemon.run()
173
418
  else:
174
419
  parser.print_help()
420
+
421
+
422
+
423
+ def kill_thread(thread):
424
+ import ctypes
425
+ if not thread.is_alive():
426
+ return False
427
+
428
+ tid = thread.ident
429
+ if tid is None:
430
+ return False
431
+
432
+ res = ctypes.pythonapi.PyThreadState_SetAsyncExc(
433
+ ctypes.c_long(tid), ctypes.py_object(SystemExit)
434
+ )
435
+ if res > 1:
436
+ # Undo if multiple threads were affected
437
+ ctypes.pythonapi.PyThreadState_SetAsyncExc(ctypes.c_long(tid), 0)
438
+ return False
439
+ return True
@@ -0,0 +1,99 @@
1
+ from objict import objict
2
+ import time
3
+
4
+
5
+ class Task(objict):
6
+ """
7
+ Task model for the Django Mojo task system.
8
+
9
+ This class represents a task that can be queued, executed, and tracked
10
+ through various states (pending, running, completed, error, cancelled).
11
+ """
12
+
13
+ def __init__(self, id=None, function=None, data=None, channel="default",
14
+ expires=None, created=None, status="pending", error=None,
15
+ completed_at=None, **kwargs):
16
+ """
17
+ Initialize a new Task instance.
18
+
19
+ Args:
20
+ id (str): Unique identifier for the task
21
+ function (str): Function name to be executed
22
+ data (dict): Data to be passed to the function
23
+ channel (str): Channel name for task routing
24
+ expires (float): Expiration timestamp
25
+ created (float): Creation timestamp
26
+ status (str): Current task status
27
+ error (str): Error message if task failed
28
+ completed_at (float): Completion timestamp
29
+ **kwargs: Additional attributes
30
+ """
31
+ super().__init__(**kwargs)
32
+
33
+ self.id = id
34
+ self.function = function
35
+ self.data = data or {}
36
+ self.channel = channel
37
+ self.expires = expires
38
+ self.created = created or time.time()
39
+ self.status = status
40
+ self.error = error
41
+ self.completed_at = completed_at
42
+
43
+ def is_expired(self):
44
+ """
45
+ Check if the task has expired.
46
+
47
+ Returns:
48
+ bool: True if task has expired, False otherwise
49
+ """
50
+ if self.expires is None:
51
+ return False
52
+ return time.time() > self.expires
53
+
54
+ def is_pending(self):
55
+ """Check if task is in pending state."""
56
+ return self.status == "pending"
57
+
58
+ def is_running(self):
59
+ """Check if task is in running state."""
60
+ return self.status == "running"
61
+
62
+ def is_completed(self):
63
+ """Check if task is in completed state."""
64
+ return self.status == "completed"
65
+
66
+ def is_error(self):
67
+ """Check if task is in error state."""
68
+ return self.status == "error"
69
+
70
+ def is_cancelled(self):
71
+ """Check if task is in cancelled state."""
72
+ return self.status == "cancelled"
73
+
74
+ def mark_as_running(self):
75
+ """Mark task as running."""
76
+ self.status = "running"
77
+
78
+ def mark_as_completed(self):
79
+ """Mark task as completed."""
80
+ self.status = "completed"
81
+ self.completed_at = time.time()
82
+
83
+ def mark_as_error(self, error_message):
84
+ """Mark task as error with error message."""
85
+ self.status = "error"
86
+ self.error = error_message
87
+
88
+ def mark_as_cancelled(self):
89
+ """Mark task as cancelled."""
90
+ self.status = "cancelled"
91
+
92
+ def __str__(self):
93
+ """String representation of the task."""
94
+ return f"Task({self.id}, {self.function}, {self.status})"
95
+
96
+ def __repr__(self):
97
+ """Detailed string representation of the task."""
98
+ return (f"Task(id='{self.id}', function='{self.function}', "
99
+ f"status='{self.status}', channel='{self.channel}')")
@@ -12,3 +12,121 @@ def run_error_task(task):
12
12
  logger.info("Running error task with data", task)
13
13
  time.sleep(2)
14
14
  raise Exception("Example error")
15
+
16
+
17
+ def run_quick_task(task):
18
+ """Quick task for testing - completes immediately"""
19
+ logger.info("Running quick task with data", task)
20
+ return {"status": "completed", "data": task.data}
21
+
22
+
23
+ def run_slow_task(task):
24
+ """Slow task for testing - takes 10 seconds"""
25
+ logger.info("Running slow task with data", task)
26
+ time.sleep(10)
27
+ return {"status": "completed", "duration": 10}
28
+
29
+
30
+ def run_args_kwargs_task(*args, **kwargs):
31
+ """Task that receives args and kwargs directly"""
32
+ logger.info(f"Running args/kwargs task with args: {args}, kwargs: {kwargs}")
33
+ return {"args": args, "kwargs": kwargs}
34
+
35
+
36
+ def run_data_processing_task(task):
37
+ """Task that processes data and returns results"""
38
+ logger.info("Running data processing task")
39
+ data = task.data
40
+ if not isinstance(data, dict):
41
+ raise ValueError("Data must be a dictionary")
42
+
43
+ result = {
44
+ "processed": True,
45
+ "input_keys": list(data.keys()),
46
+ "total_items": len(data)
47
+ }
48
+ return result
49
+
50
+
51
+ def run_counter_task(task):
52
+ """Task that increments a counter - for testing state changes"""
53
+ logger.info("Running counter task")
54
+ count = task.data.get("count", 0)
55
+ new_count = count + 1
56
+ logger.info(f"Counter incremented from {count} to {new_count}")
57
+ return {"count": new_count}
58
+
59
+
60
+ def run_timeout_task(task):
61
+ """Task that times out - for testing timeout scenarios"""
62
+ duration = task.data.get("duration", 60)
63
+ logger.info(f"Running timeout task for {duration} seconds")
64
+ time.sleep(duration)
65
+ return {"completed": True}
66
+
67
+
68
+ def run_memory_task(task):
69
+ """Task that uses memory - for testing resource usage"""
70
+ logger.info("Running memory task")
71
+ size = task.data.get("size", 1000000) # 1MB default
72
+ data = bytearray(size)
73
+ logger.info(f"Allocated {size} bytes")
74
+ return {"allocated_bytes": size}
75
+
76
+
77
+ def run_conditional_error_task(task):
78
+ """Task that conditionally raises an error based on input"""
79
+ logger.info("Running conditional error task")
80
+ should_error = task.data.get("should_error", False)
81
+ error_message = task.data.get("error_message", "Conditional error occurred")
82
+
83
+ if should_error:
84
+ raise Exception(error_message)
85
+
86
+ return {"status": "success", "should_error": should_error}
87
+
88
+
89
+ def run_nested_data_task(task):
90
+ """Task that works with nested data structures"""
91
+ logger.info("Running nested data task")
92
+ data = task.data
93
+
94
+ if "nested" not in data:
95
+ raise ValueError("Missing 'nested' key in data")
96
+
97
+ nested = data["nested"]
98
+ result = {
99
+ "original": nested,
100
+ "keys": list(nested.keys()) if isinstance(nested, dict) else None,
101
+ "length": len(nested) if hasattr(nested, '__len__') else None
102
+ }
103
+
104
+ return result
105
+
106
+
107
+ # Test async task handlers
108
+ def async_quick_task(message="Hello"):
109
+ """Async task handler for testing decorator"""
110
+ logger.info(f"Async quick task: {message}")
111
+ return f"Processed: {message}"
112
+
113
+
114
+ def async_slow_task(duration=5):
115
+ """Async slow task handler for testing decorator"""
116
+ logger.info(f"Async slow task sleeping for {duration} seconds")
117
+ time.sleep(duration)
118
+ return f"Completed after {duration} seconds"
119
+
120
+
121
+ def async_error_task(should_error=True, message="Async error"):
122
+ """Async error task handler for testing decorator"""
123
+ logger.info(f"Async error task - should_error: {should_error}")
124
+ if should_error:
125
+ raise Exception(message)
126
+ return "No error raised"
127
+
128
+
129
+ def async_args_task(*args, **kwargs):
130
+ """Async task that tests args and kwargs handling"""
131
+ logger.info(f"Async args task - args: {args}, kwargs: {kwargs}")
132
+ return {"received_args": args, "received_kwargs": kwargs}
mojo/decorators/auth.py CHANGED
@@ -1,5 +1,8 @@
1
1
  from functools import wraps
2
2
  import mojo.errors
3
+ from mojo.helpers import logit
4
+
5
+ logger = logit.get_logger("error", "error.log")
3
6
 
4
7
  def requires_perms(*required_perms):
5
8
  def decorator(func):
@@ -7,7 +10,9 @@ def requires_perms(*required_perms):
7
10
  def wrapper(request, *args, **kwargs):
8
11
  if not request.user.is_authenticated:
9
12
  raise mojo.errors.PermissionDeniedException()
10
- if not request.user.has_permission(required_perms):
13
+ perms = set(required_perms)
14
+ if not request.user.has_permission(perms):
15
+ logger.error(f"{request.user.username} is missing {perms}")
11
16
  raise mojo.errors.PermissionDeniedException()
12
17
  return func(request, *args, **kwargs)
13
18
  return wrapper
mojo/decorators/http.py CHANGED
@@ -32,7 +32,12 @@ def dispatcher(request, *args, **kwargs):
32
32
  rest.ACTIVE_REQUEST = request
33
33
  key = kwargs.pop('__mojo_rest_root_key__', None)
34
34
  if "group" in request.DATA:
35
- request.group = modules.get_model_instance("account", "Group", int(request.DATA.group))
35
+ try:
36
+ request.group = modules.get_model_instance("account", "Group", int(request.DATA.group))
37
+ if request.group is not None:
38
+ request.group.touch()
39
+ except ValueError:
40
+ return JsonResponse({"error": "Invalid group ID", "code": 400}, status=400)
36
41
  method_key = f"{key}__{request.method}"
37
42
  if method_key not in URLPATTERN_METHODS:
38
43
  method_key = f"{key}__ALL"
@@ -66,7 +71,7 @@ def dispatch_error_handler(func):
66
71
  metrics.record("api_errors", category="mojo_api", min_granularity=API_METRICS_GRANULARITY)
67
72
  # logger.exception(f"Unhandled REST Exception: {request.path}")
68
73
  logger.exception(f"Error: {str(err)}, Path: {request.path}, IP: {request.META.get('REMOTE_ADDR')}")
69
- return JsonResponse({"error": str(err) }, status=500)
74
+ return JsonResponse({"error": str(err), "code": 500 }, status=500)
70
75
 
71
76
  return wrapper
72
77