jararaca 0.2.37a11__py3-none-any.whl → 0.3.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of jararaca might be problematic. Click here for more details.

jararaca/__init__.py CHANGED
@@ -2,6 +2,7 @@ from importlib import import_module
2
2
  from typing import TYPE_CHECKING
3
3
 
4
4
  if TYPE_CHECKING:
5
+ from jararaca.broker_backend.redis_broker_backend import RedisMessageBrokerBackend
5
6
  from jararaca.messagebus.bus_message_controller import (
6
7
  ack,
7
8
  nack,
@@ -60,10 +61,12 @@ if TYPE_CHECKING:
60
61
  from .messagebus.interceptors.aiopika_publisher_interceptor import (
61
62
  AIOPikaConnectionFactory,
62
63
  GenericPoolConfig,
64
+ )
65
+ from .messagebus.interceptors.publisher_interceptor import (
63
66
  MessageBusPublisherInterceptor,
64
67
  )
68
+ from .messagebus.message import Message, MessageOf
65
69
  from .messagebus.publisher import use_publisher
66
- from .messagebus.types import Message, MessageOf
67
70
  from .messagebus.worker import MessageBusWorker
68
71
  from .microservice import Microservice, use_app_context, use_current_container
69
72
  from .persistence.base import T_BASEMODEL, BaseEntity
@@ -115,6 +118,7 @@ if TYPE_CHECKING:
115
118
  from .tools.app_config.interceptor import AppConfigurationInterceptor
116
119
 
117
120
  __all__ = [
121
+ "RedisMessageBrokerBackend",
118
122
  "FilterRuleApplier",
119
123
  "SortRuleApplier",
120
124
  "use_bus_message_controller",
@@ -216,6 +220,11 @@ if TYPE_CHECKING:
216
220
  __SPEC_PARENT__: str = __spec__.parent # type: ignore
217
221
  # A mapping of {<member name>: (package, <module name>)} defining dynamic imports
218
222
  _dynamic_imports: "dict[str, tuple[str, str, str | None]]" = {
223
+ "RedisMessageBrokerBackend": (
224
+ __SPEC_PARENT__,
225
+ "broker_backend.redis_broker_backend",
226
+ None,
227
+ ),
219
228
  "FilterRuleApplier": (__SPEC_PARENT__, "persistence.sort_filter", None),
220
229
  "SortRuleApplier": (__SPEC_PARENT__, "persistence.sort_filter", None),
221
230
  "use_bus_message_controller": (
@@ -286,8 +295,8 @@ _dynamic_imports: "dict[str, tuple[str, str, str | None]]" = {
286
295
  ),
287
296
  "Identifiable": (__SPEC_PARENT__, "persistence.utilities", None),
288
297
  "IdentifiableEntity": (__SPEC_PARENT__, "persistence.utilities", None),
289
- "MessageOf": (__SPEC_PARENT__, "messagebus.types", None),
290
- "Message": (__SPEC_PARENT__, "messagebus.types", None),
298
+ "MessageOf": (__SPEC_PARENT__, "messagebus.message", None),
299
+ "Message": (__SPEC_PARENT__, "messagebus.message", None),
291
300
  "StringCriteria": (__SPEC_PARENT__, "persistence.utilities", None),
292
301
  "DateCriteria": (__SPEC_PARENT__, "persistence.utilities", None),
293
302
  "DateOrderedFilter": (__SPEC_PARENT__, "persistence.utilities", None),
@@ -339,7 +348,7 @@ _dynamic_imports: "dict[str, tuple[str, str, str | None]]" = {
339
348
  ),
340
349
  "MessageBusPublisherInterceptor": (
341
350
  __SPEC_PARENT__,
342
- "messagebus.interceptors.aiopika_publisher_interceptor",
351
+ "messagebus.interceptors.publisher_interceptor",
343
352
  None,
344
353
  ),
345
354
  "RedisWebSocketConnectionBackend": (
@@ -0,0 +1,102 @@
1
+ from abc import ABC
2
+ from contextlib import asynccontextmanager
3
+ from typing import AsyncContextManager, AsyncGenerator, Iterable
4
+
5
+ from jararaca.scheduler.types import DelayedMessageData
6
+
7
+
8
+ class MessageBrokerBackend(ABC):
9
+
10
+ def lock(self) -> AsyncContextManager[None]:
11
+ """
12
+ Acquire a lock for the message broker backend.
13
+ This is used to ensure that only one instance of the scheduler is running at a time.
14
+ """
15
+ raise NotImplementedError(f"lock() is not implemented by {self.__class__}.")
16
+
17
+ async def get_last_dispatch_time(self, action_name: str) -> int | None:
18
+ """
19
+ Get the last dispatch time of the scheduled action.
20
+ This is used to determine if the scheduled action should be executed again
21
+ or if it should be skipped.
22
+ """
23
+ raise NotImplementedError(
24
+ f"get_last_dispatch_time() is not implemented by {self.__class__}."
25
+ )
26
+
27
+ async def set_last_dispatch_time(self, action_name: str, timestamp: int) -> None:
28
+ """
29
+ Set the last dispatch time of the scheduled action.
30
+ This is used to determine if the scheduled action should be executed again
31
+ or if it should be skipped.
32
+ """
33
+ raise NotImplementedError(
34
+ f"set_last_dispatch_time() is not implemented by {self.__class__}."
35
+ )
36
+
37
+ async def get_in_execution_count(self, action_name: str) -> int:
38
+ """
39
+ Get the number of scheduled actions in execution.
40
+ This is used to determine if the scheduled action should be executed again
41
+ or if it should be skipped.
42
+ """
43
+ raise NotImplementedError(
44
+ f"get_in_execution_count() is not implemented by {self.__class__}."
45
+ )
46
+
47
+ def in_execution(self, action_name: str) -> AsyncContextManager[None]:
48
+ """
49
+ Acquire a lock for the scheduled action.
50
+ This is used to ensure that only one instance of the scheduled action is running at a time.
51
+ """
52
+ raise NotImplementedError(
53
+ f"in_execution() is not implemented by {self.__class__}."
54
+ )
55
+
56
+ async def dequeue_next_delayed_messages(
57
+ self, start_timestamp: int
58
+ ) -> Iterable[DelayedMessageData]:
59
+ """
60
+ Dequeue the next delayed messages from the message broker.
61
+ This is used to trigger the scheduled action.
62
+ """
63
+ raise NotImplementedError(
64
+ f"dequeue_next_delayed_messages() is not implemented by {self.__class__}."
65
+ )
66
+
67
+ async def enqueue_delayed_message(
68
+ self, delayed_message: DelayedMessageData
69
+ ) -> None:
70
+ """
71
+ Enqueue a delayed message to the message broker.
72
+ This is used to trigger the scheduled action.
73
+ """
74
+ raise NotImplementedError(
75
+ f"enqueue_delayed_message() is not implemented by {self.__class__}."
76
+ )
77
+
78
+ async def dispose(self) -> None:
79
+ """
80
+ Dispose of the message broker backend.
81
+ This is used to clean up resources used by the message broker backend.
82
+ """
83
+
84
+
85
+ class NullBackend(MessageBrokerBackend):
86
+ """
87
+ A null backend that does nothing.
88
+ This is used for testing purposes.
89
+ """
90
+
91
+ @asynccontextmanager
92
+ async def lock(self) -> AsyncGenerator[None, None]:
93
+ yield
94
+
95
+ async def get_last_dispatch_time(self, action_name: str) -> int:
96
+ return 0
97
+
98
+ async def set_last_dispatch_time(self, action_name: str, timestamp: int) -> None:
99
+ pass
100
+
101
+ async def dispose(self) -> None:
102
+ pass
@@ -0,0 +1,21 @@
1
+ from jararaca.broker_backend import MessageBrokerBackend
2
+
3
+
4
+ def get_message_broker_backend_from_url(url: str) -> MessageBrokerBackend:
5
+ """
6
+ Factory function to create a message broker backend instance from a URL.
7
+ Currently, only Redis is supported.
8
+ """
9
+ if (
10
+ url.startswith("redis://")
11
+ or url.startswith("rediss://")
12
+ or url.startswith("redis-socket://")
13
+ or url.startswith("rediss+socket://")
14
+ ):
15
+ from jararaca.broker_backend.redis_broker_backend import (
16
+ RedisMessageBrokerBackend,
17
+ )
18
+
19
+ return RedisMessageBrokerBackend(url)
20
+ else:
21
+ raise ValueError(f"Unsupported message broker backend URL: {url}")
@@ -0,0 +1,162 @@
1
+ import logging
2
+ import time
3
+ from contextlib import asynccontextmanager
4
+ from typing import AsyncGenerator, Iterable
5
+ from uuid import uuid4
6
+
7
+ import redis.asyncio
8
+
9
+ from jararaca.broker_backend import MessageBrokerBackend
10
+ from jararaca.scheduler.types import DelayedMessageData
11
+
12
+ logger = logging.getLogger(__name__)
13
+
14
+
15
+ class RedisMessageBrokerBackend(MessageBrokerBackend):
16
+ def __init__(self, url: str) -> None:
17
+ self.redis = redis.asyncio.Redis.from_url(url)
18
+ self.last_dispatch_time_key = "last_dispatch_time:{action_name}"
19
+ self.last_execution_time_key = "last_execution_time:{action_name}"
20
+ self.execution_indicator_key = "in_execution:{action_name}:{timestamp}"
21
+ self.execution_indicator_expiration = 60 * 5
22
+ self.delayed_messages_key = "delayed_messages"
23
+ self.delayed_messages_metadata_key = "delayed_messages_metadata:{task_id}"
24
+
25
+ @asynccontextmanager
26
+ async def lock(self) -> AsyncGenerator[None, None]:
27
+ yield
28
+
29
+ async def get_last_dispatch_time(self, action_name: str) -> int | None:
30
+
31
+ key = self.last_dispatch_time_key.format(action_name=action_name)
32
+ last_execution_time = await self.redis.get(key)
33
+ if last_execution_time is None:
34
+ return None
35
+ return int(last_execution_time)
36
+
37
+ async def set_last_dispatch_time(self, action_name: str, timestamp: int) -> None:
38
+ key = self.last_dispatch_time_key.format(action_name=action_name)
39
+ await self.redis.set(key, timestamp)
40
+
41
+ async def get_last_execution_time(self, action_name: str) -> int | None:
42
+ key = self.last_execution_time_key.format(action_name=action_name)
43
+ last_execution_time = await self.redis.get(key)
44
+ if last_execution_time is None:
45
+ return None
46
+ return int(last_execution_time)
47
+
48
+ async def set_last_execution_time(self, action_name: str, timestamp: int) -> None:
49
+ key = self.last_execution_time_key.format(action_name=action_name)
50
+ await self.redis.set(key, timestamp)
51
+
52
+ async def get_in_execution_count(self, action_name: str) -> int:
53
+ key = self.execution_indicator_key.format(
54
+ action_name=action_name, timestamp="*"
55
+ )
56
+ in_execution_count = await self.redis.keys(key)
57
+ if in_execution_count is None:
58
+ return 0
59
+
60
+ return len(in_execution_count)
61
+
62
+ @asynccontextmanager
63
+ async def in_execution(self, action_name: str) -> AsyncGenerator[None, None]:
64
+ """
65
+ Acquire a lock for the scheduled action.
66
+ This is used to ensure that only one instance of the scheduled action is running at a time.
67
+ """
68
+ key = self.execution_indicator_key.format(
69
+ action_name=action_name, timestamp=int(time.time())
70
+ )
71
+ await self.redis.set(key, 1, ex=self.execution_indicator_expiration)
72
+ try:
73
+ yield
74
+ finally:
75
+ await self.redis.delete(key)
76
+
77
+ async def enqueue_delayed_message(
78
+ self, delayed_message: DelayedMessageData
79
+ ) -> None:
80
+ """
81
+ Enqueue a delayed message to the message broker.
82
+ This is used to trigger the scheduled action.
83
+ """
84
+ task_id = str(uuid4())
85
+ async with self.redis.pipeline() as pipe:
86
+ pipe.set(
87
+ self.delayed_messages_metadata_key.format(task_id=task_id),
88
+ delayed_message.model_dump_json().encode(),
89
+ )
90
+ pipe.zadd(
91
+ self.delayed_messages_key,
92
+ {task_id: delayed_message.dispatch_time},
93
+ nx=True,
94
+ )
95
+ await pipe.execute()
96
+
97
+ async def dequeue_next_delayed_messages(
98
+ self, start_timestamp: int
99
+ ) -> Iterable[DelayedMessageData]:
100
+ """
101
+ Dequeue the next delayed messages from the message broker.
102
+ This is used to trigger the scheduled action.
103
+ """
104
+ tasks_ids = await self.redis.zrangebyscore(
105
+ name=self.delayed_messages_key,
106
+ max=start_timestamp,
107
+ min="-inf",
108
+ withscores=False,
109
+ )
110
+
111
+ if not tasks_ids:
112
+ return []
113
+
114
+ tasks_bytes_data: list[bytes] = []
115
+
116
+ for task_id_bytes in tasks_ids:
117
+ metadata = await self.redis.get(
118
+ self.delayed_messages_metadata_key.format(
119
+ task_id=task_id_bytes.decode()
120
+ )
121
+ )
122
+ if metadata is None:
123
+ logger.warning(
124
+ f"Delayed message metadata not found for task_id: {task_id_bytes.decode()}"
125
+ )
126
+
127
+ continue
128
+
129
+ tasks_bytes_data.append(metadata)
130
+
131
+ async with self.redis.pipeline() as pipe:
132
+ for task_id_bytes in tasks_ids:
133
+ pipe.zrem(self.delayed_messages_key, task_id_bytes.decode())
134
+ pipe.delete(
135
+ self.delayed_messages_metadata_key.format(
136
+ task_id=task_id_bytes.decode()
137
+ )
138
+ )
139
+ await pipe.execute()
140
+
141
+ delayed_messages: list[DelayedMessageData] = []
142
+
143
+ for task_bytes_data in tasks_bytes_data:
144
+ try:
145
+ delayed_message = DelayedMessageData.model_validate_json(
146
+ task_bytes_data.decode()
147
+ )
148
+ delayed_messages.append(delayed_message)
149
+ except Exception:
150
+ logger.error(
151
+ f"Error parsing delayed message: {task_bytes_data.decode()}"
152
+ )
153
+ continue
154
+
155
+ return delayed_messages
156
+
157
+ async def dispose(self) -> None:
158
+ """
159
+ Dispose of the message broker backend.
160
+ This is used to close the connection to the message broker.
161
+ """
162
+ await self.redis.close()
jararaca/cli.py CHANGED
@@ -1,5 +1,8 @@
1
1
  import importlib
2
2
  import importlib.resources
3
+ import os
4
+ import sys
5
+ import time
3
6
  from codecs import StreamWriter
4
7
  from typing import Any
5
8
  from urllib.parse import urlparse, urlunsplit
@@ -8,11 +11,13 @@ import click
8
11
  import uvicorn
9
12
  from mako.template import Template # type: ignore
10
13
 
11
- from jararaca.messagebus.worker import AioPikaWorkerConfig, MessageBusWorker
14
+ from jararaca.messagebus import worker as worker_v1
15
+ from jararaca.messagebus import worker_v2 as worker_v2_mod
12
16
  from jararaca.microservice import Microservice
13
17
  from jararaca.presentation.http_microservice import HttpMicroservice
14
18
  from jararaca.presentation.server import create_http_server
15
- from jararaca.scheduler.scheduler import Scheduler, SchedulerBackend, SchedulerConfig
19
+ from jararaca.scheduler.scheduler import Scheduler
20
+ from jararaca.scheduler.scheduler_v2 import SchedulerV2
16
21
  from jararaca.tools.typescript.interface_parser import (
17
22
  write_microservice_to_typescript_interface,
18
23
  )
@@ -32,7 +37,10 @@ def find_item_by_module_path(
32
37
  try:
33
38
  module = importlib.import_module(module_name)
34
39
  except ImportError as e:
35
- raise ImportError("App module not found") from e
40
+ if e.name == module_name:
41
+ raise ImportError("Module not found") from e
42
+ else:
43
+ raise
36
44
 
37
45
  if not hasattr(module, app):
38
46
  raise ValueError("module %s has no attribute %s" % (module, app))
@@ -70,7 +78,7 @@ def cli() -> None:
70
78
  @click.option(
71
79
  "--url",
72
80
  type=str,
73
- default="amqp://guest:guest@localhost/",
81
+ envvar="BROKER_URL",
74
82
  )
75
83
  @click.option(
76
84
  "--username",
@@ -87,11 +95,6 @@ def cli() -> None:
87
95
  type=str,
88
96
  default="jararaca_ex",
89
97
  )
90
- @click.option(
91
- "--queue",
92
- type=str,
93
- default="jararaca_q",
94
- )
95
98
  @click.option(
96
99
  "--prefetch-count",
97
100
  type=int,
@@ -103,7 +106,6 @@ def worker(
103
106
  username: str | None,
104
107
  password: str | None,
105
108
  exchange: str,
106
- queue: str,
107
109
  prefetch_count: int,
108
110
  ) -> None:
109
111
 
@@ -131,30 +133,59 @@ def worker(
131
133
 
132
134
  url = parsed_url.geturl()
133
135
 
134
- config = AioPikaWorkerConfig(
136
+ config = worker_v1.AioPikaWorkerConfig(
135
137
  url=url,
136
138
  exchange=exchange,
137
- queue=queue,
138
139
  prefetch_count=prefetch_count,
139
140
  )
140
141
 
141
- MessageBusWorker(app, config=config).start_sync()
142
+ worker_v1.MessageBusWorker(app, config=config).start_sync()
142
143
 
143
144
 
144
145
  @cli.command()
145
146
  @click.argument(
146
147
  "app_path",
147
148
  type=str,
149
+ envvar="APP_PATH",
150
+ )
151
+ @click.option(
152
+ "--broker-url",
153
+ type=str,
154
+ envvar="BROKER_URL",
155
+ )
156
+ @click.option(
157
+ "--backend-url",
158
+ type=str,
159
+ envvar="BACKEND_URL",
160
+ )
161
+ def worker_v2(app_path: str, broker_url: str, backend_url: str) -> None:
162
+
163
+ app = find_microservice_by_module_path(app_path)
164
+
165
+ worker_v2_mod.MessageBusWorker(
166
+ app=app,
167
+ broker_url=broker_url,
168
+ backend_url=backend_url,
169
+ ).start_sync()
170
+
171
+
172
+ @cli.command()
173
+ @click.argument(
174
+ "app_path",
175
+ type=str,
176
+ envvar="APP_PATH",
148
177
  )
149
178
  @click.option(
150
179
  "--host",
151
180
  type=str,
152
181
  default="0.0.0.0",
182
+ envvar="HOST",
153
183
  )
154
184
  @click.option(
155
185
  "--port",
156
186
  type=int,
157
187
  default=8000,
188
+ envvar="PORT",
158
189
  )
159
190
  def server(app_path: str, host: str, port: int) -> None:
160
191
 
@@ -177,9 +208,6 @@ def server(app_path: str, host: str, port: int) -> None:
177
208
  uvicorn.run(asgi_app, host=host, port=port)
178
209
 
179
210
 
180
- class NullBackend(SchedulerBackend): ...
181
-
182
-
183
211
  @cli.command()
184
212
  @click.argument(
185
213
  "app_path",
@@ -196,7 +224,45 @@ def scheduler(
196
224
  ) -> None:
197
225
  app = find_microservice_by_module_path(app_path)
198
226
 
199
- Scheduler(app, NullBackend(), SchedulerConfig(interval=interval)).run()
227
+ Scheduler(app, interval=interval).run()
228
+
229
+
230
+ @cli.command()
231
+ @click.argument(
232
+ "app_path",
233
+ type=str,
234
+ )
235
+ @click.option(
236
+ "--interval",
237
+ type=int,
238
+ default=1,
239
+ required=True,
240
+ )
241
+ @click.option(
242
+ "--broker-url",
243
+ type=str,
244
+ required=True,
245
+ )
246
+ @click.option(
247
+ "--backend-url",
248
+ type=str,
249
+ required=True,
250
+ )
251
+ def scheduler_v2(
252
+ interval: int,
253
+ broker_url: str,
254
+ backend_url: str,
255
+ app_path: str,
256
+ ) -> None:
257
+
258
+ app = find_microservice_by_module_path(app_path)
259
+ scheduler = SchedulerV2(
260
+ app=app,
261
+ interval=interval,
262
+ backend_url=backend_url,
263
+ broker_url=broker_url,
264
+ )
265
+ scheduler.run()
200
266
 
201
267
 
202
268
  @cli.command()
@@ -208,12 +274,81 @@ def scheduler(
208
274
  "file_path",
209
275
  type=click.File("w"),
210
276
  )
211
- def gen_tsi(app_path: str, file_path: StreamWriter) -> None:
212
- app = find_microservice_by_module_path(app_path)
277
+ @click.option(
278
+ "--watch",
279
+ is_flag=True,
280
+ )
281
+ @click.option(
282
+ "--src-dir",
283
+ type=click.Path(exists=True, file_okay=False, dir_okay=True),
284
+ default="src",
285
+ )
286
+ def gen_tsi(app_path: str, file_path: StreamWriter, watch: bool, src_dir: str) -> None:
287
+ """Generate TypeScript interfaces from a Python microservice."""
288
+
289
+ # Generate typescript interfaces
290
+ def generate_interfaces() -> None:
291
+ try:
292
+ app = find_microservice_by_module_path(app_path)
293
+ content = write_microservice_to_typescript_interface(app)
294
+
295
+ # Save current position
296
+ file_path.tell()
297
+
298
+ # Reset file to beginning
299
+ file_path.seek(0)
300
+ file_path.truncate()
301
+
302
+ # Write new content
303
+ file_path.write(content)
304
+ file_path.flush()
305
+
306
+ print(f"Generated TypeScript interfaces at {time.strftime('%H:%M:%S')}")
307
+ except Exception as e:
308
+ print(f"Error generating TypeScript interfaces: {e}", file=sys.stderr)
213
309
 
214
- content = write_microservice_to_typescript_interface(app)
310
+ # Initial generation
311
+ generate_interfaces()
215
312
 
216
- file_path.write(content)
313
+ # If watch mode is not enabled, exit
314
+ if not watch:
315
+ return
316
+
317
+ try:
318
+ from watchdog.events import FileSystemEvent, FileSystemEventHandler
319
+ from watchdog.observers import Observer
320
+ except ImportError:
321
+ print(
322
+ "Watchdog is required for watch mode. Install it with: pip install watchdog",
323
+ file=sys.stderr,
324
+ )
325
+ return
326
+
327
+ # Set up file system event handler
328
+ class PyFileChangeHandler(FileSystemEventHandler):
329
+ def on_modified(self, event: FileSystemEvent) -> None:
330
+ src_path = (
331
+ event.src_path
332
+ if isinstance(event.src_path, str)
333
+ else str(event.src_path)
334
+ )
335
+ if not event.is_directory and src_path.endswith(".py"):
336
+ print(f"File changed: {src_path}")
337
+ generate_interfaces()
338
+
339
+ # Set up observer
340
+ observer = Observer()
341
+ observer.schedule(PyFileChangeHandler(), src_dir, recursive=True)
342
+ observer.start()
343
+
344
+ print(f"Watching for changes in {os.path.abspath(src_dir)}...")
345
+ try:
346
+ while True:
347
+ time.sleep(1)
348
+ except KeyboardInterrupt:
349
+ observer.stop()
350
+ print("Watch mode stopped")
351
+ observer.join()
217
352
 
218
353
 
219
354
  def camel_case_to_snake_case(name: str) -> str:
@@ -1,3 +1,3 @@
1
- from .types import MessageOf
1
+ from .message import MessageOf
2
2
 
3
3
  __all__ = ["MessageOf"]
File without changes