zrb 0.0.38__py3-none-any.whl → 0.0.40__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- zrb/builtin/__init__.py +2 -0
- zrb/builtin/generator/docker_compose_task/template/_automate/snake_task_name.py +4 -1
- zrb/builtin/generator/fastapp/__init__.py +0 -0
- zrb/builtin/generator/fastapp/add.py +131 -0
- zrb/builtin/generator/fastapp/template/_automate/snake_app_name/__init__.py +0 -0
- zrb/builtin/generator/fastapp/template/_automate/snake_app_name/app/__init__.py +0 -0
- zrb/builtin/generator/fastapp/template/_automate/snake_app_name/app/app_env.py +16 -0
- zrb/builtin/generator/fastapp/template/_automate/snake_app_name/cmd/pulumi-destroy.sh +2 -0
- zrb/builtin/generator/fastapp/template/_automate/snake_app_name/cmd/pulumi-up.sh +2 -0
- zrb/builtin/generator/fastapp/template/_automate/snake_app_name/cmd/start.sh +15 -0
- zrb/builtin/generator/fastapp/template/_automate/snake_app_name/common.py +81 -0
- zrb/builtin/generator/fastapp/template/_automate/snake_app_name/compose/__init__.py +0 -0
- zrb/builtin/generator/fastapp/template/_automate/snake_app_name/compose/compose_checker.py +49 -0
- zrb/builtin/generator/fastapp/template/_automate/snake_app_name/compose/compose_env.py +52 -0
- zrb/builtin/generator/fastapp/template/_automate/snake_app_name/container.py +91 -0
- zrb/builtin/generator/fastapp/template/_automate/snake_app_name/deployment.py +55 -0
- zrb/builtin/generator/fastapp/template/_automate/snake_app_name/image.py +44 -0
- zrb/builtin/generator/fastapp/template/_automate/snake_app_name/local.py +92 -0
- zrb/builtin/generator/fastapp/template/src/kebab-app-name/deployment/.gitignore +2 -0
- zrb/builtin/generator/fastapp/template/src/kebab-app-name/deployment/Pulumi.yaml +6 -0
- zrb/builtin/generator/fastapp/template/src/kebab-app-name/deployment/__main__.py +79 -0
- zrb/builtin/generator/fastapp/template/src/kebab-app-name/deployment/requirements.txt +3 -0
- zrb/builtin/generator/fastapp/template/src/kebab-app-name/deployment/state/.gitkeep +0 -0
- zrb/builtin/generator/fastapp/template/src/kebab-app-name/docker-compose.yml +82 -0
- zrb/builtin/generator/fastapp/template/src/kebab-app-name/src/.dockerignore +3 -0
- zrb/builtin/generator/fastapp/template/src/kebab-app-name/src/.gitignore +3 -0
- zrb/builtin/generator/fastapp/template/src/kebab-app-name/src/Dockerfile +5 -0
- zrb/builtin/generator/fastapp/template/src/kebab-app-name/src/__init__.py +0 -0
- zrb/builtin/generator/fastapp/template/src/kebab-app-name/src/component/__init__.py +0 -0
- zrb/builtin/generator/fastapp/template/src/kebab-app-name/src/component/app.py +51 -0
- zrb/builtin/generator/fastapp/template/src/kebab-app-name/src/component/app_state.py +26 -0
- zrb/builtin/generator/fastapp/template/src/kebab-app-name/src/component/log.py +20 -0
- zrb/builtin/generator/fastapp/template/src/kebab-app-name/src/component/message_consumer.py +42 -0
- zrb/builtin/generator/fastapp/template/src/kebab-app-name/src/component/message_mocker.py +6 -0
- zrb/builtin/generator/fastapp/template/src/kebab-app-name/src/component/message_publisher.py +42 -0
- zrb/builtin/generator/fastapp/template/src/kebab-app-name/src/component/message_serializer.py +9 -0
- zrb/builtin/generator/fastapp/template/src/kebab-app-name/src/config.py +43 -0
- zrb/builtin/generator/fastapp/template/src/kebab-app-name/src/core/__init__.py +0 -0
- zrb/builtin/generator/fastapp/template/src/kebab-app-name/src/core/messagebus/__init__.py +0 -0
- zrb/builtin/generator/fastapp/template/src/kebab-app-name/src/core/messagebus/kafka/__init__.py +0 -0
- zrb/builtin/generator/fastapp/template/src/kebab-app-name/src/core/messagebus/kafka/consumer.py +198 -0
- zrb/builtin/generator/fastapp/template/src/kebab-app-name/src/core/messagebus/kafka/publisher.py +144 -0
- zrb/builtin/generator/fastapp/template/src/kebab-app-name/src/core/messagebus/messagebus.py +52 -0
- zrb/builtin/generator/fastapp/template/src/kebab-app-name/src/core/messagebus/mock.py +51 -0
- zrb/builtin/generator/fastapp/template/src/kebab-app-name/src/core/messagebus/rabbitmq/__init__.py +0 -0
- zrb/builtin/generator/fastapp/template/src/kebab-app-name/src/core/messagebus/rabbitmq/consumer.py +92 -0
- zrb/builtin/generator/fastapp/template/src/kebab-app-name/src/core/messagebus/rabbitmq/publisher.py +61 -0
- zrb/builtin/generator/fastapp/template/src/kebab-app-name/src/core/serializer/serializer.py +35 -0
- zrb/builtin/generator/fastapp/template/src/kebab-app-name/src/helper/__init__.py +0 -0
- zrb/builtin/generator/fastapp/template/src/kebab-app-name/src/helper/async_task.py +17 -0
- zrb/builtin/generator/fastapp/template/src/kebab-app-name/src/helper/conversion.py +27 -0
- zrb/builtin/generator/fastapp/template/src/kebab-app-name/src/main.py +38 -0
- zrb/builtin/generator/fastapp/template/src/kebab-app-name/src/requirements.txt +8 -0
- zrb/builtin/generator/fastapp/template/src/kebab-app-name/src/template.env +17 -0
- zrb/builtin/generator/simple_python_app/template/_automate/snake_app_name/cmd/start.sh +3 -1
- zrb/builtin/generator/simple_python_app/template/_automate/snake_app_name/container.py +1 -0
- zrb/builtin/generator/simple_python_app/template/_automate/snake_app_name/local.py +1 -0
- zrb/builtin/generator/simple_python_app/template/src/kebab-app-name/src/template.env +1 -0
- zrb/helper/list/ensure_uniqueness.py +22 -0
- zrb/helper/render_data.py +3 -2
- zrb/helper/string/conversion.py +8 -8
- zrb/helper/util.py +5 -0
- zrb/task/base_model.py +35 -14
- zrb/task/base_task.py +30 -20
- zrb/task/docker_compose_task.py +3 -3
- zrb/task/http_checker.py +29 -5
- zrb/task/port_checker.py +35 -10
- {zrb-0.0.38.dist-info → zrb-0.0.40.dist-info}/METADATA +1 -1
- {zrb-0.0.38.dist-info → zrb-0.0.40.dist-info}/RECORD +72 -20
- zrb/helper/list/append_unique.py +0 -9
- {zrb-0.0.38.dist-info → zrb-0.0.40.dist-info}/LICENSE +0 -0
- {zrb-0.0.38.dist-info → zrb-0.0.40.dist-info}/WHEEL +0 -0
- {zrb-0.0.38.dist-info → zrb-0.0.40.dist-info}/entry_points.txt +0 -0
zrb/builtin/generator/fastapp/template/src/kebab-app-name/src/core/messagebus/kafka/publisher.py
ADDED
@@ -0,0 +1,144 @@
|
|
1
|
+
from typing import Any, Optional
|
2
|
+
from core.messagebus.messagebus import (
|
3
|
+
Publisher, MessageSerializer, get_message_serializer
|
4
|
+
)
|
5
|
+
from aiokafka import AIOKafkaProducer
|
6
|
+
from aiokafka.producer.producer import _missing, DefaultPartitioner
|
7
|
+
|
8
|
+
import logging
|
9
|
+
|
10
|
+
|
11
|
+
class KafkaPublishConnection():
|
12
|
+
def __init__(
|
13
|
+
self,
|
14
|
+
logger: logging.Logger,
|
15
|
+
bootstrap_servers: str = 'localhost',
|
16
|
+
client_id: Optional[Any] = None,
|
17
|
+
metadata_max_age_ms=300000,
|
18
|
+
request_timeout_ms=40000,
|
19
|
+
api_version='auto',
|
20
|
+
acks=_missing,
|
21
|
+
key_serializer=None,
|
22
|
+
value_serializer=None,
|
23
|
+
compression_type=None,
|
24
|
+
max_batch_size=16384,
|
25
|
+
partitioner=DefaultPartitioner(),
|
26
|
+
max_request_size=1048576,
|
27
|
+
linger_ms=0,
|
28
|
+
send_backoff_ms=100,
|
29
|
+
retry_backoff_ms=100,
|
30
|
+
security_protocol="PLAINTEXT",
|
31
|
+
ssl_context=None,
|
32
|
+
connections_max_idle_ms=540000,
|
33
|
+
enable_idempotence=False,
|
34
|
+
transactional_id=None,
|
35
|
+
transaction_timeout_ms=60000,
|
36
|
+
sasl_mechanism="PLAIN",
|
37
|
+
sasl_plain_password=None,
|
38
|
+
sasl_plain_username=None,
|
39
|
+
sasl_kerberos_service_name='kafka',
|
40
|
+
sasl_kerberos_domain_name=None,
|
41
|
+
sasl_oauth_token_provider=None
|
42
|
+
):
|
43
|
+
self.logger = logger
|
44
|
+
self.producer: Optional[AIOKafkaProducer] = None
|
45
|
+
self.bootstrap_servers = bootstrap_servers
|
46
|
+
self.client_id = client_id
|
47
|
+
self.metadata_max_age_ms = metadata_max_age_ms
|
48
|
+
self.request_timeout_ms = request_timeout_ms
|
49
|
+
self.api_version = api_version
|
50
|
+
self.acks = acks
|
51
|
+
self.key_serializer = key_serializer
|
52
|
+
self.value_serializer = value_serializer
|
53
|
+
self.compression_type = compression_type
|
54
|
+
self.max_batch_size = max_batch_size
|
55
|
+
self.partitioner = partitioner
|
56
|
+
self.max_request_size = max_request_size
|
57
|
+
self.linger_ms = linger_ms
|
58
|
+
self.send_backoff_ms = send_backoff_ms
|
59
|
+
self.retry_backoff_ms = retry_backoff_ms
|
60
|
+
self.security_protocol = security_protocol
|
61
|
+
self.ssl_context = ssl_context
|
62
|
+
self.connections_max_idle_ms = connections_max_idle_ms
|
63
|
+
self.enable_idempotence = enable_idempotence
|
64
|
+
self.transactional_id = transactional_id
|
65
|
+
self.transaction_timeout_ms = transaction_timeout_ms
|
66
|
+
self.sasl_mechanism = sasl_mechanism
|
67
|
+
self.sasl_plain_password = sasl_plain_password
|
68
|
+
self.sasl_plain_username = sasl_plain_username
|
69
|
+
self.sasl_kerberos_service_name = sasl_kerberos_service_name
|
70
|
+
self.sasl_kerberos_domain_name = sasl_kerberos_domain_name
|
71
|
+
self.sasl_oauth_token_provider = sasl_oauth_token_provider
|
72
|
+
|
73
|
+
async def __aenter__(self):
|
74
|
+
self.logger.info('🐼 Create kafka producer')
|
75
|
+
self.producer = AIOKafkaProducer(
|
76
|
+
bootstrap_servers=self.bootstrap_servers,
|
77
|
+
client_id=self.client_id,
|
78
|
+
metadata_max_age_ms=self.metadata_max_age_ms,
|
79
|
+
request_timeout_ms=self.request_timeout_ms,
|
80
|
+
api_version=self.api_version,
|
81
|
+
acks=self.acks,
|
82
|
+
key_serializer=self.key_serializer,
|
83
|
+
value_serializer=self.value_serializer,
|
84
|
+
compression_type=self.compression_type,
|
85
|
+
max_batch_size=self.max_batch_size,
|
86
|
+
partitioner=self.partitioner,
|
87
|
+
max_request_size=self.max_request_size,
|
88
|
+
linger_ms=self.linger_ms,
|
89
|
+
send_backoff_ms=self.send_backoff_ms,
|
90
|
+
retry_backoff_ms=self.retry_backoff_ms,
|
91
|
+
security_protocol=self.security_protocol,
|
92
|
+
ssl_context=self.ssl_context,
|
93
|
+
connections_max_idle_ms=self.connections_max_idle_ms,
|
94
|
+
enable_idempotence=self.enable_idempotence,
|
95
|
+
transactional_id=self.transactional_id,
|
96
|
+
transaction_timeout_ms=self.transaction_timeout_ms,
|
97
|
+
sasl_mechanism=self.sasl_mechanism,
|
98
|
+
sasl_plain_password=self.sasl_plain_password,
|
99
|
+
sasl_plain_username=self.sasl_plain_username,
|
100
|
+
sasl_kerberos_service_name=self.sasl_kerberos_service_name,
|
101
|
+
sasl_kerberos_domain_name=self.sasl_kerberos_domain_name,
|
102
|
+
sasl_oauth_token_provider=self.sasl_oauth_token_provider,
|
103
|
+
)
|
104
|
+
self.logger.info('🐼 Start kafka producer')
|
105
|
+
await self.producer.start()
|
106
|
+
self.logger.info('🐼 Kafka producer started')
|
107
|
+
return self
|
108
|
+
|
109
|
+
async def __aexit__(self, exc_type, exc, tb):
|
110
|
+
self.logger.info('🐼 Stop kafka producer')
|
111
|
+
await self.producer.stop()
|
112
|
+
self.logger.info('🐼 Kafka producer stopped')
|
113
|
+
|
114
|
+
|
115
|
+
class KafkaPublisher(Publisher):
|
116
|
+
def __init__(
|
117
|
+
self,
|
118
|
+
logger: logging.Logger,
|
119
|
+
publish_connection: KafkaPublishConnection,
|
120
|
+
serializer: Optional[MessageSerializer] = None,
|
121
|
+
retry: int = 3
|
122
|
+
):
|
123
|
+
self.logger = logger
|
124
|
+
self.serializer = get_message_serializer(serializer)
|
125
|
+
self.conn = publish_connection
|
126
|
+
self._retry = retry
|
127
|
+
|
128
|
+
async def publish(self, event_name: str, message: Any):
|
129
|
+
return await self._publish(event_name, message, self._retry)
|
130
|
+
|
131
|
+
async def _publish(self, event_name: str, message: Any, retry: int):
|
132
|
+
try:
|
133
|
+
async with self.conn as conn:
|
134
|
+
producer: AIOKafkaProducer = conn.producer
|
135
|
+
encoded_value = self.serializer.encode(event_name, message)
|
136
|
+
self.logger.info(
|
137
|
+
f'🐼 Publish "{event_name}": {message}'
|
138
|
+
)
|
139
|
+
await producer.send_and_wait(event_name, encoded_value)
|
140
|
+
retry = self._retry
|
141
|
+
except Exception:
|
142
|
+
if retry == 0:
|
143
|
+
raise
|
144
|
+
await self._publish(event_name, message, retry-1)
|
@@ -0,0 +1,52 @@
|
|
1
|
+
from typing import Any, Callable, Mapping, Optional
|
2
|
+
from abc import ABC, abstractmethod
|
3
|
+
from core.serializer.serializer import Serializer, JsonSerializer
|
4
|
+
|
5
|
+
THandler = Callable[[Any], Any]
|
6
|
+
|
7
|
+
|
8
|
+
class Publisher(ABC):
|
9
|
+
@abstractmethod
|
10
|
+
def publish(self, event_name: str, message: Any):
|
11
|
+
pass
|
12
|
+
|
13
|
+
|
14
|
+
class Consumer(ABC):
|
15
|
+
@abstractmethod
|
16
|
+
def register(self, event_name: str) -> Callable[[THandler], Any]:
|
17
|
+
pass
|
18
|
+
|
19
|
+
@abstractmethod
|
20
|
+
def run(self):
|
21
|
+
pass
|
22
|
+
|
23
|
+
|
24
|
+
class MessageSerializer():
|
25
|
+
def __init__(
|
26
|
+
self,
|
27
|
+
serializers: Optional[Mapping[str, Serializer]] = None
|
28
|
+
):
|
29
|
+
serializers = serializers if serializers is not None else {}
|
30
|
+
self.serializers: Mapping[str, Serializer] = serializers
|
31
|
+
self.default_serializer = JsonSerializer()
|
32
|
+
|
33
|
+
def encode(self, event_name: str, message: Any) -> Any:
|
34
|
+
serializer = self._get_serializer(event_name)
|
35
|
+
return serializer.encode(message)
|
36
|
+
|
37
|
+
def decode(self, event_name: str, encoded_message: Any) -> Any:
|
38
|
+
serializer = self._get_serializer(event_name)
|
39
|
+
return serializer.decode(encoded_message)
|
40
|
+
|
41
|
+
def _get_serializer(self, event_name: str) -> Serializer:
|
42
|
+
return self.serializers.get(
|
43
|
+
event_name, self.default_serializer
|
44
|
+
)
|
45
|
+
|
46
|
+
|
47
|
+
def get_message_serializer(
|
48
|
+
serializer: Optional[MessageSerializer] = None
|
49
|
+
) -> MessageSerializer:
|
50
|
+
if serializer is None:
|
51
|
+
return MessageSerializer()
|
52
|
+
return serializer
|
@@ -0,0 +1,51 @@
|
|
1
|
+
from typing import Any, Callable, Mapping
|
2
|
+
from core.messagebus.messagebus import (
|
3
|
+
Publisher, Consumer, MessageSerializer, THandler
|
4
|
+
)
|
5
|
+
import asyncio
|
6
|
+
import inspect
|
7
|
+
import logging
|
8
|
+
|
9
|
+
|
10
|
+
class MockConsumer(Consumer):
|
11
|
+
def __init__(
|
12
|
+
self, logger: logging.Logger, serializer: MessageSerializer
|
13
|
+
):
|
14
|
+
self.logger = logger
|
15
|
+
self.serializer = serializer
|
16
|
+
self._handlers: Mapping[str, THandler] = {}
|
17
|
+
|
18
|
+
def register(self, event_name: str) -> Callable[[THandler], Any]:
|
19
|
+
def wrapper(handler: THandler):
|
20
|
+
self.logger.warning(f'🪵 Register handler for "{event_name}"')
|
21
|
+
self._handlers[event_name] = handler
|
22
|
+
return handler
|
23
|
+
return wrapper
|
24
|
+
|
25
|
+
async def handle(self, event_name: str, encoded_value: Any):
|
26
|
+
message_handler = self._handlers[event_name]
|
27
|
+
decoded_value = self.serializer.decode(event_name, encoded_value)
|
28
|
+
self.logger.info(f'🪵 Consume "{event_name}": {decoded_value}')
|
29
|
+
if inspect.iscoroutinefunction(message_handler):
|
30
|
+
return asyncio.create_task(message_handler(decoded_value))
|
31
|
+
return message_handler(decoded_value)
|
32
|
+
|
33
|
+
async def run(self):
|
34
|
+
return
|
35
|
+
|
36
|
+
|
37
|
+
class MockPublisher(Publisher):
|
38
|
+
def __init__(
|
39
|
+
self,
|
40
|
+
logger: logging.Logger,
|
41
|
+
consumer: MockConsumer,
|
42
|
+
serializer: MessageSerializer
|
43
|
+
):
|
44
|
+
self.logger = logger
|
45
|
+
self.consumer = consumer
|
46
|
+
self.serializer = serializer
|
47
|
+
|
48
|
+
async def publish(self, event_name: str, message: Any):
|
49
|
+
encoded_value = self.serializer.encode(event_name, message)
|
50
|
+
self.logger.info(f'🪵 Publish "{event_name}": {message}')
|
51
|
+
await self.consumer.handle(event_name, encoded_value)
|
zrb/builtin/generator/fastapp/template/src/kebab-app-name/src/core/messagebus/rabbitmq/__init__.py
ADDED
File without changes
|
zrb/builtin/generator/fastapp/template/src/kebab-app-name/src/core/messagebus/rabbitmq/consumer.py
ADDED
@@ -0,0 +1,92 @@
|
|
1
|
+
from typing import Any, Callable, Mapping, Optional
|
2
|
+
from core.messagebus.messagebus import (
|
3
|
+
Consumer, THandler, MessageSerializer, get_message_serializer
|
4
|
+
)
|
5
|
+
import asyncio
|
6
|
+
import aiormq
|
7
|
+
import inspect
|
8
|
+
import logging
|
9
|
+
|
10
|
+
|
11
|
+
class RMQConsumeConnection():
|
12
|
+
def __init__(self, logger: logging.Logger, connection_string: str):
|
13
|
+
self.logger = logger
|
14
|
+
self.connection_string = connection_string
|
15
|
+
self.connection: Optional[aiormq.Connection] = None
|
16
|
+
|
17
|
+
async def __aenter__(self):
|
18
|
+
self.logger.info('🐰 Create consumer connection')
|
19
|
+
self.connection = await aiormq.connect(self.connection_string)
|
20
|
+
self.logger.info('🐰 Consumer connection created')
|
21
|
+
return self
|
22
|
+
|
23
|
+
async def __aexit__(self, exc_type, exc, tb):
|
24
|
+
self.logger.info('🐰 Close consumer connection')
|
25
|
+
await self.connection.close()
|
26
|
+
self.logger.info('🐰 Consumer connection closed')
|
27
|
+
|
28
|
+
|
29
|
+
class RMQConsumer(Consumer):
|
30
|
+
def __init__(
|
31
|
+
self,
|
32
|
+
logger: logging.Logger,
|
33
|
+
consume_connection: RMQConsumeConnection,
|
34
|
+
serializer: Optional[MessageSerializer] = None,
|
35
|
+
retry: int = 5
|
36
|
+
):
|
37
|
+
self.logger = logger
|
38
|
+
self.conn = consume_connection
|
39
|
+
self._handlers: Mapping[str, THandler] = {}
|
40
|
+
self.serializer = get_message_serializer(serializer)
|
41
|
+
self._retry = retry
|
42
|
+
|
43
|
+
def register(self, event_name: str) -> Callable[[THandler], Any]:
|
44
|
+
def wrapper(handler: THandler):
|
45
|
+
self.logger.warning(f'🐰 Register handler for "{event_name}"')
|
46
|
+
self._handlers[event_name] = handler
|
47
|
+
return handler
|
48
|
+
return wrapper
|
49
|
+
|
50
|
+
async def run(self):
|
51
|
+
return await self._run(self._retry)
|
52
|
+
|
53
|
+
async def _run(self, retry: int):
|
54
|
+
try:
|
55
|
+
async with self.conn as conn:
|
56
|
+
connection: aiormq.Connection = conn.connection
|
57
|
+
self.logger.info('🐰 Get channel')
|
58
|
+
channel = await connection.channel()
|
59
|
+
for event_name, handler in self._handlers.items():
|
60
|
+
self.logger.info(f'🐰 Declare queue: {event_name}')
|
61
|
+
await channel.queue_declare(event_name)
|
62
|
+
on_message = self._create_consumer_callback(
|
63
|
+
channel, event_name
|
64
|
+
)
|
65
|
+
asyncio.create_task(channel.basic_consume(
|
66
|
+
queue=event_name, consumer_callback=on_message
|
67
|
+
))
|
68
|
+
retry = self._retry
|
69
|
+
except Exception:
|
70
|
+
if retry == 0:
|
71
|
+
raise
|
72
|
+
await self._run(retry-1)
|
73
|
+
|
74
|
+
def _create_consumer_callback(
|
75
|
+
self,
|
76
|
+
channel: aiormq.Channel,
|
77
|
+
event_name: str,
|
78
|
+
) -> Callable[[Any], Any]:
|
79
|
+
async def on_message(message):
|
80
|
+
decoded_value = self.serializer.decode(event_name, message.body)
|
81
|
+
handler = self._handlers.get(event_name)
|
82
|
+
self.logger.info(f'🐰 Consume "{event_name}": {decoded_value}')
|
83
|
+
await self._run_handler(handler, decoded_value)
|
84
|
+
await channel.basic_ack(message.delivery_tag)
|
85
|
+
return on_message
|
86
|
+
|
87
|
+
async def _run_handler(
|
88
|
+
self, message_handler: THandler, decoded_value: Any
|
89
|
+
):
|
90
|
+
if inspect.iscoroutinefunction(message_handler):
|
91
|
+
return asyncio.create_task(message_handler(decoded_value))
|
92
|
+
return message_handler(decoded_value)
|
zrb/builtin/generator/fastapp/template/src/kebab-app-name/src/core/messagebus/rabbitmq/publisher.py
ADDED
@@ -0,0 +1,61 @@
|
|
1
|
+
from typing import Any, Optional
|
2
|
+
from core.messagebus.messagebus import (
|
3
|
+
Publisher, MessageSerializer, get_message_serializer
|
4
|
+
)
|
5
|
+
import aiormq
|
6
|
+
import logging
|
7
|
+
|
8
|
+
|
9
|
+
class RMQPublishConnection():
|
10
|
+
def __init__(self, logger: logging.Logger, connection_string: str):
|
11
|
+
self.logger = logger
|
12
|
+
self.connection_string = connection_string
|
13
|
+
self.connection: Optional[aiormq.Connection] = None
|
14
|
+
|
15
|
+
async def __aenter__(self):
|
16
|
+
self.logger.info('🐰 Create publisher connection')
|
17
|
+
self.connection = await aiormq.connect(self.connection_string)
|
18
|
+
self.logger.info('🐰 Publisher connection created')
|
19
|
+
return self
|
20
|
+
|
21
|
+
async def __aexit__(self, exc_type, exc, tb):
|
22
|
+
self.logger.info('🐰 Close publisher connection')
|
23
|
+
await self.connection.close()
|
24
|
+
self.logger.info('🐰 Publisher connection closed')
|
25
|
+
|
26
|
+
|
27
|
+
class RMQPublisher(Publisher):
|
28
|
+
def __init__(
|
29
|
+
self,
|
30
|
+
logger: logging.Logger,
|
31
|
+
publish_connection: RMQPublishConnection,
|
32
|
+
serializer: Optional[MessageSerializer] = None,
|
33
|
+
retry: int = 5
|
34
|
+
):
|
35
|
+
self.logger = logger
|
36
|
+
self.serializer = get_message_serializer(serializer)
|
37
|
+
self.conn = publish_connection
|
38
|
+
self._retry = retry
|
39
|
+
|
40
|
+
async def publish(self, event_name: str, message: Any):
|
41
|
+
return await self._publish(event_name, message, self._retry)
|
42
|
+
|
43
|
+
async def _publish(self, event_name: str, message: Any, retry: int):
|
44
|
+
try:
|
45
|
+
async with self.conn as conn:
|
46
|
+
connection: aiormq.Connection = conn.connection
|
47
|
+
self.logger.info('🐰 Get channel')
|
48
|
+
channel = await connection.channel()
|
49
|
+
self.logger.info(f'🐰 Declare queue: {event_name}')
|
50
|
+
await channel.queue_declare(event_name)
|
51
|
+
self.logger.info(f'🐰 Publish "{event_name}": {message}')
|
52
|
+
await channel.basic_publish(
|
53
|
+
body=self.serializer.encode(event_name, message),
|
54
|
+
routing_key=event_name,
|
55
|
+
)
|
56
|
+
retry = self._retry
|
57
|
+
except Exception:
|
58
|
+
if retry == 0:
|
59
|
+
raise
|
60
|
+
await self._publish(event_name, message, retry-1)
|
61
|
+
|
@@ -0,0 +1,35 @@
|
|
1
|
+
from typing import Any, Callable
|
2
|
+
from abc import ABC, abstractmethod
|
3
|
+
import jsons
|
4
|
+
|
5
|
+
|
6
|
+
class Serializer(ABC):
|
7
|
+
@abstractmethod
|
8
|
+
def encode(self, message: Any) -> Any:
|
9
|
+
pass
|
10
|
+
|
11
|
+
@abstractmethod
|
12
|
+
def decode(self, message: Any) -> Any:
|
13
|
+
pass
|
14
|
+
|
15
|
+
|
16
|
+
class CustomSerializer(Serializer):
|
17
|
+
def __init__(
|
18
|
+
self, encoder: Callable[[Any], Any], decoder: Callable[[Any], Any]
|
19
|
+
):
|
20
|
+
self.encoder = encoder
|
21
|
+
self.decoder = decoder
|
22
|
+
|
23
|
+
def encode(self, message: Any) -> Any:
|
24
|
+
return self.encoder(message)
|
25
|
+
|
26
|
+
def decode(self, encoded_message: Any) -> Any:
|
27
|
+
return self.decoder(encoded_message)
|
28
|
+
|
29
|
+
|
30
|
+
class JsonSerializer(Serializer):
|
31
|
+
def encode(self, message: Any) -> Any:
|
32
|
+
return jsons.dumps(message).encode()
|
33
|
+
|
34
|
+
def decode(self, encoded_message: Any) -> Any:
|
35
|
+
return jsons.loads(encoded_message.decode())
|
File without changes
|
@@ -0,0 +1,17 @@
|
|
1
|
+
from typing import Awaitable, Callable
|
2
|
+
import asyncio
|
3
|
+
import inspect
|
4
|
+
|
5
|
+
|
6
|
+
def create_task(
|
7
|
+
awaitable: Awaitable,
|
8
|
+
on_error: Callable
|
9
|
+
) -> asyncio.Task:
|
10
|
+
async def critical_task(awaitable):
|
11
|
+
try:
|
12
|
+
return await awaitable
|
13
|
+
except Exception as exception:
|
14
|
+
if inspect.iscoroutinefunction(on_error):
|
15
|
+
return await on_error(exception)
|
16
|
+
return on_error(exception)
|
17
|
+
return asyncio.create_task(critical_task(awaitable))
|
@@ -0,0 +1,27 @@
|
|
1
|
+
import logging
|
2
|
+
|
3
|
+
LOGGING_LEVEL_MAP = {
|
4
|
+
'critical': logging.CRITICAL,
|
5
|
+
'fatal': logging.FATAL,
|
6
|
+
'error': logging.ERROR,
|
7
|
+
'warning': logging.WARNING,
|
8
|
+
'warn': logging.WARN,
|
9
|
+
'info': logging.INFO,
|
10
|
+
'debug': logging.DEBUG,
|
11
|
+
'notset': logging.NOTSET,
|
12
|
+
}
|
13
|
+
|
14
|
+
|
15
|
+
def str_to_boolean(value: str):
|
16
|
+
if value.lower() in ('0', 'false', 'no', 'n'):
|
17
|
+
return False
|
18
|
+
if value.lower() in ('1', 'true', 'yes', 'y'):
|
19
|
+
return True
|
20
|
+
raise Exception(f'Cannot convert to boolean: "{value}"')
|
21
|
+
|
22
|
+
|
23
|
+
def str_to_logging_level(logging_level_str: str) -> int:
|
24
|
+
lower_logging_level_str = logging_level_str.lower()
|
25
|
+
if lower_logging_level_str in LOGGING_LEVEL_MAP:
|
26
|
+
return LOGGING_LEVEL_MAP[lower_logging_level_str]
|
27
|
+
return logging.WARNING
|
@@ -0,0 +1,38 @@
|
|
1
|
+
from component.app import app
|
2
|
+
from component.app_state import app_state
|
3
|
+
from component.log import logger
|
4
|
+
from component.message_consumer import consumer
|
5
|
+
from component.message_publisher import publisher
|
6
|
+
from helper.async_task import create_task
|
7
|
+
|
8
|
+
|
9
|
+
app_state.set_liveness(True)
|
10
|
+
messages = []
|
11
|
+
|
12
|
+
|
13
|
+
async def on_error(exception: Exception):
|
14
|
+
logger.critical(exception)
|
15
|
+
app_state.set_readiness(False)
|
16
|
+
|
17
|
+
|
18
|
+
@app.on_event('startup')
|
19
|
+
async def startup_event():
|
20
|
+
logger.info('Started')
|
21
|
+
create_task(consumer.run(), on_error=on_error)
|
22
|
+
app_state.set_readiness(True)
|
23
|
+
|
24
|
+
|
25
|
+
@consumer.register('coba')
|
26
|
+
async def handle_event(message):
|
27
|
+
messages.append(message)
|
28
|
+
print(messages)
|
29
|
+
|
30
|
+
|
31
|
+
@app.get('/')
|
32
|
+
def handle_get():
|
33
|
+
return ('hello world')
|
34
|
+
|
35
|
+
|
36
|
+
@app.get('/send')
|
37
|
+
async def handle_send():
|
38
|
+
return await publisher.publish('coba', 'sesuatu')
|
@@ -0,0 +1,17 @@
|
|
1
|
+
APP_NAME=fastapp
|
2
|
+
APP_LOGGING_LEVEL=info
|
3
|
+
APP_HOST=0.0.0.0
|
4
|
+
APP_PORT=8080
|
5
|
+
APP_RELOAD=true
|
6
|
+
|
7
|
+
APP_BROKER_TYPE=rabbitmq
|
8
|
+
APP_RMQ_CONNECTION=amqp://guest:guest@localhost/
|
9
|
+
APP_KAFKA_BOOTSTRAP_SERVERS=localhost:9092
|
10
|
+
|
11
|
+
APP_CORS_ALLOW_ORIGINS='["*"]'
|
12
|
+
APP_CORS_ALLOW_ORIGIN_REGEX=''
|
13
|
+
APP_CORS_ALLOW_METHODS='["*"]'
|
14
|
+
APP_CORS_ALLOW_HEADERS='["*"]'
|
15
|
+
APP_CORS_ALLOW_CREDENTIALS=0
|
16
|
+
APP_CORS_EXPOSE_HEADERS=0
|
17
|
+
APP_CORS_MAX_AGE=600
|
@@ -5,9 +5,11 @@ then
|
|
5
5
|
echo "Init venv"
|
6
6
|
python -m venv venv
|
7
7
|
fi
|
8
|
+
echo "Activate venv"
|
9
|
+
source venv/bin/activate
|
8
10
|
|
9
11
|
echo "Install packages"
|
10
12
|
pip install -r requirements.txt
|
11
13
|
|
12
14
|
echo "Start app"
|
13
|
-
|
15
|
+
uvicorn main:app --host {{env.get("APP_HOST", "0.0.0.0")}} --port {{env.get("APP_PORT", "8080")}}
|
@@ -31,6 +31,7 @@ start_snake_app_name = CmdTask(
|
|
31
31
|
cmd_path=os.path.join(CURRENT_DIR, 'cmd', 'start.sh'),
|
32
32
|
checkers=[
|
33
33
|
HTTPChecker(
|
34
|
+
name='check-kebab-app-name',
|
34
35
|
host='{{input.snake_app_name_host}}',
|
35
36
|
port='{{env.APP_PORT}}',
|
36
37
|
is_https='{{input.snake_app_name_https}}'
|
@@ -0,0 +1,22 @@
|
|
1
|
+
from typing import Any, Callable, List, Optional
|
2
|
+
|
3
|
+
TComparator = Callable[[Any, Any], bool]
|
4
|
+
|
5
|
+
|
6
|
+
def ensure_uniqueness(
|
7
|
+
data: List[Any], comparator: Optional[TComparator] = None
|
8
|
+
) -> List[Any]:
|
9
|
+
unique_data = []
|
10
|
+
for datum in data:
|
11
|
+
if datum in unique_data:
|
12
|
+
continue
|
13
|
+
if comparator is not None:
|
14
|
+
is_exist = False
|
15
|
+
for unique_datum in unique_data:
|
16
|
+
if comparator(unique_datum, datum):
|
17
|
+
is_exist = True
|
18
|
+
break
|
19
|
+
if is_exist:
|
20
|
+
continue
|
21
|
+
unique_data.append(datum)
|
22
|
+
return unique_data
|
zrb/helper/render_data.py
CHANGED
@@ -1,6 +1,6 @@
|
|
1
1
|
from .util import (
|
2
2
|
coalesce, coalesce_str, to_camel_case, to_pascal_case, to_kebab_case,
|
3
|
-
to_snake_case, to_human_readable
|
3
|
+
to_snake_case, to_human_readable, to_boolean
|
4
4
|
)
|
5
5
|
import datetime
|
6
6
|
import os
|
@@ -19,6 +19,7 @@ DEFAULT_RENDER_DATA = {
|
|
19
19
|
'to_pascal_case': to_pascal_case,
|
20
20
|
'to_kebab_case': to_kebab_case,
|
21
21
|
'to_snake_case': to_snake_case,
|
22
|
-
'to_human_readable': to_human_readable
|
22
|
+
'to_human_readable': to_human_readable,
|
23
|
+
'to_boolean': to_boolean,
|
23
24
|
}
|
24
25
|
}
|
zrb/helper/string/conversion.py
CHANGED
@@ -1,9 +1,9 @@
|
|
1
|
+
import keyword
|
1
2
|
import logging
|
2
3
|
import re
|
3
4
|
|
4
5
|
NON_WORD = re.compile(r'[\W]+')
|
5
|
-
|
6
|
-
LEADING_NUM = re.compile(r'^[0-9]+')
|
6
|
+
LEADING_NUM = re.compile(r'^\d+')
|
7
7
|
LOGGING_LEVEL_MAP = {
|
8
8
|
'critical': logging.CRITICAL,
|
9
9
|
'fatal': logging.FATAL,
|
@@ -21,14 +21,14 @@ def to_cmd_name(name: str) -> str:
|
|
21
21
|
|
22
22
|
|
23
23
|
def to_variable_name(string: str) -> str:
|
24
|
-
#
|
25
|
-
string =
|
26
|
-
# Convert to lowercase
|
27
|
-
string = string.lower()
|
28
|
-
# Replace spaces with underscores
|
29
|
-
string = string.replace(' ', '_')
|
24
|
+
# Replace any non-word characters with underscore
|
25
|
+
string = NON_WORD.sub('_', string).strip()
|
30
26
|
# Remove leading digits
|
31
27
|
string = LEADING_NUM.sub('', string)
|
28
|
+
# Convert to lowercase
|
29
|
+
string = string.lower()
|
30
|
+
if keyword.iskeyword(string):
|
31
|
+
return string + '_'
|
32
32
|
return string
|
33
33
|
|
34
34
|
|
zrb/helper/util.py
CHANGED
@@ -1,4 +1,5 @@
|
|
1
1
|
from typing import Any, Optional
|
2
|
+
from .string.conversion import to_boolean as conversion_to_boolean
|
2
3
|
import re
|
3
4
|
import jinja2
|
4
5
|
|
@@ -87,3 +88,7 @@ def to_human_readable(text: Optional[str]) -> str:
|
|
87
88
|
if new_part != '':
|
88
89
|
new_parts.append(new_part)
|
89
90
|
return ' '.join(new_parts).strip(' ')
|
91
|
+
|
92
|
+
|
93
|
+
def to_boolean(text: str) -> bool:
|
94
|
+
return conversion_to_boolean(text)
|