jararaca 0.2.37a12__py3-none-any.whl → 0.3.1__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of jararaca might be problematic. Click here for more details.

jararaca/__init__.py CHANGED
@@ -2,6 +2,7 @@ from importlib import import_module
2
2
  from typing import TYPE_CHECKING
3
3
 
4
4
  if TYPE_CHECKING:
5
+ from jararaca.broker_backend.redis_broker_backend import RedisMessageBrokerBackend
5
6
  from jararaca.messagebus.bus_message_controller import (
6
7
  ack,
7
8
  nack,
@@ -60,10 +61,12 @@ if TYPE_CHECKING:
60
61
  from .messagebus.interceptors.aiopika_publisher_interceptor import (
61
62
  AIOPikaConnectionFactory,
62
63
  GenericPoolConfig,
64
+ )
65
+ from .messagebus.interceptors.publisher_interceptor import (
63
66
  MessageBusPublisherInterceptor,
64
67
  )
68
+ from .messagebus.message import Message, MessageOf
65
69
  from .messagebus.publisher import use_publisher
66
- from .messagebus.types import Message, MessageOf
67
70
  from .messagebus.worker import MessageBusWorker
68
71
  from .microservice import Microservice, use_app_context, use_current_container
69
72
  from .persistence.base import T_BASEMODEL, BaseEntity
@@ -115,6 +118,7 @@ if TYPE_CHECKING:
115
118
  from .tools.app_config.interceptor import AppConfigurationInterceptor
116
119
 
117
120
  __all__ = [
121
+ "RedisMessageBrokerBackend",
118
122
  "FilterRuleApplier",
119
123
  "SortRuleApplier",
120
124
  "use_bus_message_controller",
@@ -216,6 +220,11 @@ if TYPE_CHECKING:
216
220
  __SPEC_PARENT__: str = __spec__.parent # type: ignore
217
221
  # A mapping of {<member name>: (package, <module name>)} defining dynamic imports
218
222
  _dynamic_imports: "dict[str, tuple[str, str, str | None]]" = {
223
+ "RedisMessageBrokerBackend": (
224
+ __SPEC_PARENT__,
225
+ "broker_backend.redis_broker_backend",
226
+ None,
227
+ ),
219
228
  "FilterRuleApplier": (__SPEC_PARENT__, "persistence.sort_filter", None),
220
229
  "SortRuleApplier": (__SPEC_PARENT__, "persistence.sort_filter", None),
221
230
  "use_bus_message_controller": (
@@ -286,8 +295,8 @@ _dynamic_imports: "dict[str, tuple[str, str, str | None]]" = {
286
295
  ),
287
296
  "Identifiable": (__SPEC_PARENT__, "persistence.utilities", None),
288
297
  "IdentifiableEntity": (__SPEC_PARENT__, "persistence.utilities", None),
289
- "MessageOf": (__SPEC_PARENT__, "messagebus.types", None),
290
- "Message": (__SPEC_PARENT__, "messagebus.types", None),
298
+ "MessageOf": (__SPEC_PARENT__, "messagebus.message", None),
299
+ "Message": (__SPEC_PARENT__, "messagebus.message", None),
291
300
  "StringCriteria": (__SPEC_PARENT__, "persistence.utilities", None),
292
301
  "DateCriteria": (__SPEC_PARENT__, "persistence.utilities", None),
293
302
  "DateOrderedFilter": (__SPEC_PARENT__, "persistence.utilities", None),
@@ -339,7 +348,7 @@ _dynamic_imports: "dict[str, tuple[str, str, str | None]]" = {
339
348
  ),
340
349
  "MessageBusPublisherInterceptor": (
341
350
  __SPEC_PARENT__,
342
- "messagebus.interceptors.aiopika_publisher_interceptor",
351
+ "messagebus.interceptors.publisher_interceptor",
343
352
  None,
344
353
  ),
345
354
  "RedisWebSocketConnectionBackend": (
@@ -0,0 +1,102 @@
1
+ from abc import ABC
2
+ from contextlib import asynccontextmanager
3
+ from typing import AsyncContextManager, AsyncGenerator, Iterable
4
+
5
+ from jararaca.scheduler.types import DelayedMessageData
6
+
7
+
8
+ class MessageBrokerBackend(ABC):
9
+
10
+ def lock(self) -> AsyncContextManager[None]:
11
+ """
12
+ Acquire a lock for the message broker backend.
13
+ This is used to ensure that only one instance of the scheduler is running at a time.
14
+ """
15
+ raise NotImplementedError(f"lock() is not implemented by {self.__class__}.")
16
+
17
+ async def get_last_dispatch_time(self, action_name: str) -> int | None:
18
+ """
19
+ Get the last dispatch time of the scheduled action.
20
+ This is used to determine if the scheduled action should be executed again
21
+ or if it should be skipped.
22
+ """
23
+ raise NotImplementedError(
24
+ f"get_last_dispatch_time() is not implemented by {self.__class__}."
25
+ )
26
+
27
+ async def set_last_dispatch_time(self, action_name: str, timestamp: int) -> None:
28
+ """
29
+ Set the last dispatch time of the scheduled action.
30
+ This is used to determine if the scheduled action should be executed again
31
+ or if it should be skipped.
32
+ """
33
+ raise NotImplementedError(
34
+ f"set_last_dispatch_time() is not implemented by {self.__class__}."
35
+ )
36
+
37
+ async def get_in_execution_count(self, action_name: str) -> int:
38
+ """
39
+ Get the number of scheduled actions in execution.
40
+ This is used to determine if the scheduled action should be executed again
41
+ or if it should be skipped.
42
+ """
43
+ raise NotImplementedError(
44
+ f"get_in_execution_count() is not implemented by {self.__class__}."
45
+ )
46
+
47
+ def in_execution(self, action_name: str) -> AsyncContextManager[None]:
48
+ """
49
+ Acquire a lock for the scheduled action.
50
+ This is used to ensure that only one instance of the scheduled action is running at a time.
51
+ """
52
+ raise NotImplementedError(
53
+ f"in_execution() is not implemented by {self.__class__}."
54
+ )
55
+
56
+ async def dequeue_next_delayed_messages(
57
+ self, start_timestamp: int
58
+ ) -> Iterable[DelayedMessageData]:
59
+ """
60
+ Dequeue the next delayed messages from the message broker.
61
+ This is used to trigger the scheduled action.
62
+ """
63
+ raise NotImplementedError(
64
+ f"dequeue_next_delayed_messages() is not implemented by {self.__class__}."
65
+ )
66
+
67
+ async def enqueue_delayed_message(
68
+ self, delayed_message: DelayedMessageData
69
+ ) -> None:
70
+ """
71
+ Enqueue a delayed message to the message broker.
72
+ This is used to trigger the scheduled action.
73
+ """
74
+ raise NotImplementedError(
75
+ f"enqueue_delayed_message() is not implemented by {self.__class__}."
76
+ )
77
+
78
+ async def dispose(self) -> None:
79
+ """
80
+ Dispose of the message broker backend.
81
+ This is used to clean up resources used by the message broker backend.
82
+ """
83
+
84
+
85
+ class NullBackend(MessageBrokerBackend):
86
+ """
87
+ A null backend that does nothing.
88
+ This is used for testing purposes.
89
+ """
90
+
91
+ @asynccontextmanager
92
+ async def lock(self) -> AsyncGenerator[None, None]:
93
+ yield
94
+
95
+ async def get_last_dispatch_time(self, action_name: str) -> int:
96
+ return 0
97
+
98
+ async def set_last_dispatch_time(self, action_name: str, timestamp: int) -> None:
99
+ pass
100
+
101
+ async def dispose(self) -> None:
102
+ pass
@@ -0,0 +1,21 @@
1
+ from jararaca.broker_backend import MessageBrokerBackend
2
+
3
+
4
+ def get_message_broker_backend_from_url(url: str) -> MessageBrokerBackend:
5
+ """
6
+ Factory function to create a message broker backend instance from a URL.
7
+ Currently, only Redis is supported.
8
+ """
9
+ if (
10
+ url.startswith("redis://")
11
+ or url.startswith("rediss://")
12
+ or url.startswith("redis-socket://")
13
+ or url.startswith("rediss+socket://")
14
+ ):
15
+ from jararaca.broker_backend.redis_broker_backend import (
16
+ RedisMessageBrokerBackend,
17
+ )
18
+
19
+ return RedisMessageBrokerBackend(url)
20
+ else:
21
+ raise ValueError(f"Unsupported message broker backend URL: {url}")
@@ -0,0 +1,162 @@
1
+ import logging
2
+ import time
3
+ from contextlib import asynccontextmanager
4
+ from typing import AsyncGenerator, Iterable
5
+ from uuid import uuid4
6
+
7
+ import redis.asyncio
8
+
9
+ from jararaca.broker_backend import MessageBrokerBackend
10
+ from jararaca.scheduler.types import DelayedMessageData
11
+
12
+ logger = logging.getLogger(__name__)
13
+
14
+
15
+ class RedisMessageBrokerBackend(MessageBrokerBackend):
16
+ def __init__(self, url: str) -> None:
17
+ self.redis = redis.asyncio.Redis.from_url(url)
18
+ self.last_dispatch_time_key = "last_dispatch_time:{action_name}"
19
+ self.last_execution_time_key = "last_execution_time:{action_name}"
20
+ self.execution_indicator_key = "in_execution:{action_name}:{timestamp}"
21
+ self.execution_indicator_expiration = 60 * 5
22
+ self.delayed_messages_key = "delayed_messages"
23
+ self.delayed_messages_metadata_key = "delayed_messages_metadata:{task_id}"
24
+
25
+ @asynccontextmanager
26
+ async def lock(self) -> AsyncGenerator[None, None]:
27
+ yield
28
+
29
+ async def get_last_dispatch_time(self, action_name: str) -> int | None:
30
+
31
+ key = self.last_dispatch_time_key.format(action_name=action_name)
32
+ last_execution_time = await self.redis.get(key)
33
+ if last_execution_time is None:
34
+ return None
35
+ return int(last_execution_time)
36
+
37
+ async def set_last_dispatch_time(self, action_name: str, timestamp: int) -> None:
38
+ key = self.last_dispatch_time_key.format(action_name=action_name)
39
+ await self.redis.set(key, timestamp)
40
+
41
+ async def get_last_execution_time(self, action_name: str) -> int | None:
42
+ key = self.last_execution_time_key.format(action_name=action_name)
43
+ last_execution_time = await self.redis.get(key)
44
+ if last_execution_time is None:
45
+ return None
46
+ return int(last_execution_time)
47
+
48
+ async def set_last_execution_time(self, action_name: str, timestamp: int) -> None:
49
+ key = self.last_execution_time_key.format(action_name=action_name)
50
+ await self.redis.set(key, timestamp)
51
+
52
+ async def get_in_execution_count(self, action_name: str) -> int:
53
+ key = self.execution_indicator_key.format(
54
+ action_name=action_name, timestamp="*"
55
+ )
56
+ in_execution_count = await self.redis.keys(key)
57
+ if in_execution_count is None:
58
+ return 0
59
+
60
+ return len(in_execution_count)
61
+
62
+ @asynccontextmanager
63
+ async def in_execution(self, action_name: str) -> AsyncGenerator[None, None]:
64
+ """
65
+ Acquire a lock for the scheduled action.
66
+ This is used to ensure that only one instance of the scheduled action is running at a time.
67
+ """
68
+ key = self.execution_indicator_key.format(
69
+ action_name=action_name, timestamp=int(time.time())
70
+ )
71
+ await self.redis.set(key, 1, ex=self.execution_indicator_expiration)
72
+ try:
73
+ yield
74
+ finally:
75
+ await self.redis.delete(key)
76
+
77
+ async def enqueue_delayed_message(
78
+ self, delayed_message: DelayedMessageData
79
+ ) -> None:
80
+ """
81
+ Enqueue a delayed message to the message broker.
82
+ This is used to trigger the scheduled action.
83
+ """
84
+ task_id = str(uuid4())
85
+ async with self.redis.pipeline() as pipe:
86
+ pipe.set(
87
+ self.delayed_messages_metadata_key.format(task_id=task_id),
88
+ delayed_message.model_dump_json().encode(),
89
+ )
90
+ pipe.zadd(
91
+ self.delayed_messages_key,
92
+ {task_id: delayed_message.dispatch_time},
93
+ nx=True,
94
+ )
95
+ await pipe.execute()
96
+
97
+ async def dequeue_next_delayed_messages(
98
+ self, start_timestamp: int
99
+ ) -> Iterable[DelayedMessageData]:
100
+ """
101
+ Dequeue the next delayed messages from the message broker.
102
+ This is used to trigger the scheduled action.
103
+ """
104
+ tasks_ids = await self.redis.zrangebyscore(
105
+ name=self.delayed_messages_key,
106
+ max=start_timestamp,
107
+ min="-inf",
108
+ withscores=False,
109
+ )
110
+
111
+ if not tasks_ids:
112
+ return []
113
+
114
+ tasks_bytes_data: list[bytes] = []
115
+
116
+ for task_id_bytes in tasks_ids:
117
+ metadata = await self.redis.get(
118
+ self.delayed_messages_metadata_key.format(
119
+ task_id=task_id_bytes.decode()
120
+ )
121
+ )
122
+ if metadata is None:
123
+ logger.warning(
124
+ f"Delayed message metadata not found for task_id: {task_id_bytes.decode()}"
125
+ )
126
+
127
+ continue
128
+
129
+ tasks_bytes_data.append(metadata)
130
+
131
+ async with self.redis.pipeline() as pipe:
132
+ for task_id_bytes in tasks_ids:
133
+ pipe.zrem(self.delayed_messages_key, task_id_bytes.decode())
134
+ pipe.delete(
135
+ self.delayed_messages_metadata_key.format(
136
+ task_id=task_id_bytes.decode()
137
+ )
138
+ )
139
+ await pipe.execute()
140
+
141
+ delayed_messages: list[DelayedMessageData] = []
142
+
143
+ for task_bytes_data in tasks_bytes_data:
144
+ try:
145
+ delayed_message = DelayedMessageData.model_validate_json(
146
+ task_bytes_data.decode()
147
+ )
148
+ delayed_messages.append(delayed_message)
149
+ except Exception:
150
+ logger.error(
151
+ f"Error parsing delayed message: {task_bytes_data.decode()}"
152
+ )
153
+ continue
154
+
155
+ return delayed_messages
156
+
157
+ async def dispose(self) -> None:
158
+ """
159
+ Dispose of the message broker backend.
160
+ This is used to close the connection to the message broker.
161
+ """
162
+ await self.redis.close()
jararaca/cli.py CHANGED
@@ -1,9 +1,11 @@
1
1
  import importlib
2
2
  import importlib.resources
3
+ import multiprocessing
3
4
  import os
4
5
  import sys
5
6
  import time
6
7
  from codecs import StreamWriter
8
+ from pathlib import Path
7
9
  from typing import Any
8
10
  from urllib.parse import urlparse, urlunsplit
9
11
 
@@ -11,11 +13,13 @@ import click
11
13
  import uvicorn
12
14
  from mako.template import Template # type: ignore
13
15
 
14
- from jararaca.messagebus.worker import AioPikaWorkerConfig, MessageBusWorker
16
+ from jararaca.messagebus import worker as worker_v1
17
+ from jararaca.messagebus import worker_v2 as worker_v2_mod
15
18
  from jararaca.microservice import Microservice
16
19
  from jararaca.presentation.http_microservice import HttpMicroservice
17
20
  from jararaca.presentation.server import create_http_server
18
- from jararaca.scheduler.scheduler import Scheduler, SchedulerBackend, SchedulerConfig
21
+ from jararaca.scheduler.scheduler import Scheduler
22
+ from jararaca.scheduler.scheduler_v2 import SchedulerV2
19
23
  from jararaca.tools.typescript.interface_parser import (
20
24
  write_microservice_to_typescript_interface,
21
25
  )
@@ -35,7 +39,10 @@ def find_item_by_module_path(
35
39
  try:
36
40
  module = importlib.import_module(module_name)
37
41
  except ImportError as e:
38
- raise ImportError("App module not found") from e
42
+ if e.name == module_name:
43
+ raise ImportError("Module not found") from e
44
+ else:
45
+ raise
39
46
 
40
47
  if not hasattr(module, app):
41
48
  raise ValueError("module %s has no attribute %s" % (module, app))
@@ -73,7 +80,7 @@ def cli() -> None:
73
80
  @click.option(
74
81
  "--url",
75
82
  type=str,
76
- default="amqp://guest:guest@localhost/",
83
+ envvar="BROKER_URL",
77
84
  )
78
85
  @click.option(
79
86
  "--username",
@@ -90,11 +97,6 @@ def cli() -> None:
90
97
  type=str,
91
98
  default="jararaca_ex",
92
99
  )
93
- @click.option(
94
- "--queue",
95
- type=str,
96
- default="jararaca_q",
97
- )
98
100
  @click.option(
99
101
  "--prefetch-count",
100
102
  type=int,
@@ -106,7 +108,6 @@ def worker(
106
108
  username: str | None,
107
109
  password: str | None,
108
110
  exchange: str,
109
- queue: str,
110
111
  prefetch_count: int,
111
112
  ) -> None:
112
113
 
@@ -134,30 +135,59 @@ def worker(
134
135
 
135
136
  url = parsed_url.geturl()
136
137
 
137
- config = AioPikaWorkerConfig(
138
+ config = worker_v1.AioPikaWorkerConfig(
138
139
  url=url,
139
140
  exchange=exchange,
140
- queue=queue,
141
141
  prefetch_count=prefetch_count,
142
142
  )
143
143
 
144
- MessageBusWorker(app, config=config).start_sync()
144
+ worker_v1.MessageBusWorker(app, config=config).start_sync()
145
+
146
+
147
+ @cli.command()
148
+ @click.argument(
149
+ "app_path",
150
+ type=str,
151
+ envvar="APP_PATH",
152
+ )
153
+ @click.option(
154
+ "--broker-url",
155
+ type=str,
156
+ envvar="BROKER_URL",
157
+ )
158
+ @click.option(
159
+ "--backend-url",
160
+ type=str,
161
+ envvar="BACKEND_URL",
162
+ )
163
+ def worker_v2(app_path: str, broker_url: str, backend_url: str) -> None:
164
+
165
+ app = find_microservice_by_module_path(app_path)
166
+
167
+ worker_v2_mod.MessageBusWorker(
168
+ app=app,
169
+ broker_url=broker_url,
170
+ backend_url=backend_url,
171
+ ).start_sync()
145
172
 
146
173
 
147
174
  @cli.command()
148
175
  @click.argument(
149
176
  "app_path",
150
177
  type=str,
178
+ envvar="APP_PATH",
151
179
  )
152
180
  @click.option(
153
181
  "--host",
154
182
  type=str,
155
183
  default="0.0.0.0",
184
+ envvar="HOST",
156
185
  )
157
186
  @click.option(
158
187
  "--port",
159
188
  type=int,
160
189
  default=8000,
190
+ envvar="PORT",
161
191
  )
162
192
  def server(app_path: str, host: str, port: int) -> None:
163
193
 
@@ -180,9 +210,6 @@ def server(app_path: str, host: str, port: int) -> None:
180
210
  uvicorn.run(asgi_app, host=host, port=port)
181
211
 
182
212
 
183
- class NullBackend(SchedulerBackend): ...
184
-
185
-
186
213
  @cli.command()
187
214
  @click.argument(
188
215
  "app_path",
@@ -199,7 +226,7 @@ def scheduler(
199
226
  ) -> None:
200
227
  app = find_microservice_by_module_path(app_path)
201
228
 
202
- Scheduler(app, NullBackend(), SchedulerConfig(interval=interval)).run()
229
+ Scheduler(app, interval=interval).run()
203
230
 
204
231
 
205
232
  @cli.command()
@@ -207,47 +234,86 @@ def scheduler(
207
234
  "app_path",
208
235
  type=str,
209
236
  )
210
- @click.argument(
211
- "file_path",
212
- type=click.File("w"),
237
+ @click.option(
238
+ "--interval",
239
+ type=int,
240
+ default=1,
241
+ required=True,
213
242
  )
214
243
  @click.option(
215
- "--watch",
216
- is_flag=True,
217
- help="Watch for file changes and regenerate TypeScript interfaces",
244
+ "--broker-url",
245
+ type=str,
246
+ required=True,
218
247
  )
219
248
  @click.option(
220
- "--src-dir",
221
- type=click.Path(exists=True, file_okay=False, dir_okay=True),
222
- default="src",
223
- help="Source directory to watch for changes (default: src)",
249
+ "--backend-url",
250
+ type=str,
251
+ required=True,
224
252
  )
225
- def gen_tsi(app_path: str, file_path: StreamWriter, watch: bool, src_dir: str) -> None:
226
- """Generate TypeScript interfaces from a Python microservice."""
253
+ def scheduler_v2(
254
+ interval: int,
255
+ broker_url: str,
256
+ backend_url: str,
257
+ app_path: str,
258
+ ) -> None:
227
259
 
228
- # Generate typescript interfaces
229
- def generate_interfaces() -> None:
230
- try:
231
- app = find_microservice_by_module_path(app_path)
232
- content = write_microservice_to_typescript_interface(app)
260
+ app = find_microservice_by_module_path(app_path)
261
+ scheduler = SchedulerV2(
262
+ app=app,
263
+ interval=interval,
264
+ backend_url=backend_url,
265
+ broker_url=broker_url,
266
+ )
267
+ scheduler.run()
233
268
 
269
+
270
+ def generate_interfaces(app_path: str, file_path: str) -> None:
271
+ try:
272
+ app = find_microservice_by_module_path(app_path)
273
+ content = write_microservice_to_typescript_interface(app)
274
+
275
+ with open(file_path, "w", encoding="utf-8") as file:
234
276
  # Save current position
235
- file_path.tell()
277
+ file.tell()
236
278
 
237
279
  # Reset file to beginning
238
- file_path.seek(0)
239
- file_path.truncate()
280
+ file.seek(0)
281
+ file.truncate()
240
282
 
241
283
  # Write new content
242
- file_path.write(content)
243
- file_path.flush()
284
+ file.write(content)
285
+ file.flush()
286
+
287
+ print(
288
+ f"Generated TypeScript interfaces at {time.strftime('%H:%M:%S')} at {str(Path(file_path).absolute())}"
289
+ )
290
+ except Exception as e:
291
+ print(f"Error generating TypeScript interfaces: {e}", file=sys.stderr)
244
292
 
245
- print(f"Generated TypeScript interfaces at {time.strftime('%H:%M:%S')}")
246
- except Exception as e:
247
- print(f"Error generating TypeScript interfaces: {e}", file=sys.stderr)
293
+
294
+ @cli.command()
295
+ @click.argument(
296
+ "app_path",
297
+ type=str,
298
+ )
299
+ @click.argument(
300
+ "file_path",
301
+ type=click.Path(file_okay=True, dir_okay=False),
302
+ )
303
+ @click.option(
304
+ "--watch",
305
+ is_flag=True,
306
+ )
307
+ @click.option(
308
+ "--src-dir",
309
+ type=click.Path(exists=True, file_okay=False, dir_okay=True),
310
+ default="src",
311
+ )
312
+ def gen_tsi(app_path: str, file_path: str, watch: bool, src_dir: str) -> None:
313
+ """Generate TypeScript interfaces from a Python microservice."""
248
314
 
249
315
  # Initial generation
250
- generate_interfaces()
316
+ generate_interfaces(app_path, file_path)
251
317
 
252
318
  # If watch mode is not enabled, exit
253
319
  if not watch:
@@ -273,7 +339,33 @@ def gen_tsi(app_path: str, file_path: StreamWriter, watch: bool, src_dir: str) -
273
339
  )
274
340
  if not event.is_directory and src_path.endswith(".py"):
275
341
  print(f"File changed: {src_path}")
276
- generate_interfaces()
342
+ # Create a completely detached process to ensure classes are reloaded
343
+ process = multiprocessing.get_context("spawn").Process(
344
+ target=generate_interfaces,
345
+ args=(app_path, file_path),
346
+ daemon=False, # Non-daemon to ensure it completes
347
+ )
348
+ process.start()
349
+ # Don't join to keep it detached from main process
350
+
351
+ def _run_generator_in_separate_process(
352
+ self, app_path: str, file_path: str
353
+ ) -> None:
354
+ # Using Python executable to start a completely new process
355
+ # This ensures all modules are freshly imported
356
+ generate_interfaces(app_path, file_path)
357
+ # cmd = [
358
+ # sys.executable,
359
+ # "-c",
360
+ # (
361
+ # f"import sys; sys.path.extend({sys.path}); "
362
+ # f"from jararaca.cli import generate_interfaces; "
363
+ # f"generate_interfaces('{app_path}', '{file_path}')"
364
+ # ),
365
+ # ]
366
+ # import subprocess
367
+
368
+ # subprocess.run(cmd, check=False)
277
369
 
278
370
  # Set up observer
279
371
  observer = Observer()
@@ -1,3 +1,3 @@
1
- from .types import MessageOf
1
+ from .message import MessageOf
2
2
 
3
3
  __all__ = ["MessageOf"]
File without changes