zrb 0.0.38__py3-none-any.whl → 0.0.40__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (73) hide show
  1. zrb/builtin/__init__.py +2 -0
  2. zrb/builtin/generator/docker_compose_task/template/_automate/snake_task_name.py +4 -1
  3. zrb/builtin/generator/fastapp/__init__.py +0 -0
  4. zrb/builtin/generator/fastapp/add.py +131 -0
  5. zrb/builtin/generator/fastapp/template/_automate/snake_app_name/__init__.py +0 -0
  6. zrb/builtin/generator/fastapp/template/_automate/snake_app_name/app/__init__.py +0 -0
  7. zrb/builtin/generator/fastapp/template/_automate/snake_app_name/app/app_env.py +16 -0
  8. zrb/builtin/generator/fastapp/template/_automate/snake_app_name/cmd/pulumi-destroy.sh +2 -0
  9. zrb/builtin/generator/fastapp/template/_automate/snake_app_name/cmd/pulumi-up.sh +2 -0
  10. zrb/builtin/generator/fastapp/template/_automate/snake_app_name/cmd/start.sh +15 -0
  11. zrb/builtin/generator/fastapp/template/_automate/snake_app_name/common.py +81 -0
  12. zrb/builtin/generator/fastapp/template/_automate/snake_app_name/compose/__init__.py +0 -0
  13. zrb/builtin/generator/fastapp/template/_automate/snake_app_name/compose/compose_checker.py +49 -0
  14. zrb/builtin/generator/fastapp/template/_automate/snake_app_name/compose/compose_env.py +52 -0
  15. zrb/builtin/generator/fastapp/template/_automate/snake_app_name/container.py +91 -0
  16. zrb/builtin/generator/fastapp/template/_automate/snake_app_name/deployment.py +55 -0
  17. zrb/builtin/generator/fastapp/template/_automate/snake_app_name/image.py +44 -0
  18. zrb/builtin/generator/fastapp/template/_automate/snake_app_name/local.py +92 -0
  19. zrb/builtin/generator/fastapp/template/src/kebab-app-name/deployment/.gitignore +2 -0
  20. zrb/builtin/generator/fastapp/template/src/kebab-app-name/deployment/Pulumi.yaml +6 -0
  21. zrb/builtin/generator/fastapp/template/src/kebab-app-name/deployment/__main__.py +79 -0
  22. zrb/builtin/generator/fastapp/template/src/kebab-app-name/deployment/requirements.txt +3 -0
  23. zrb/builtin/generator/fastapp/template/src/kebab-app-name/deployment/state/.gitkeep +0 -0
  24. zrb/builtin/generator/fastapp/template/src/kebab-app-name/docker-compose.yml +82 -0
  25. zrb/builtin/generator/fastapp/template/src/kebab-app-name/src/.dockerignore +3 -0
  26. zrb/builtin/generator/fastapp/template/src/kebab-app-name/src/.gitignore +3 -0
  27. zrb/builtin/generator/fastapp/template/src/kebab-app-name/src/Dockerfile +5 -0
  28. zrb/builtin/generator/fastapp/template/src/kebab-app-name/src/__init__.py +0 -0
  29. zrb/builtin/generator/fastapp/template/src/kebab-app-name/src/component/__init__.py +0 -0
  30. zrb/builtin/generator/fastapp/template/src/kebab-app-name/src/component/app.py +51 -0
  31. zrb/builtin/generator/fastapp/template/src/kebab-app-name/src/component/app_state.py +26 -0
  32. zrb/builtin/generator/fastapp/template/src/kebab-app-name/src/component/log.py +20 -0
  33. zrb/builtin/generator/fastapp/template/src/kebab-app-name/src/component/message_consumer.py +42 -0
  34. zrb/builtin/generator/fastapp/template/src/kebab-app-name/src/component/message_mocker.py +6 -0
  35. zrb/builtin/generator/fastapp/template/src/kebab-app-name/src/component/message_publisher.py +42 -0
  36. zrb/builtin/generator/fastapp/template/src/kebab-app-name/src/component/message_serializer.py +9 -0
  37. zrb/builtin/generator/fastapp/template/src/kebab-app-name/src/config.py +43 -0
  38. zrb/builtin/generator/fastapp/template/src/kebab-app-name/src/core/__init__.py +0 -0
  39. zrb/builtin/generator/fastapp/template/src/kebab-app-name/src/core/messagebus/__init__.py +0 -0
  40. zrb/builtin/generator/fastapp/template/src/kebab-app-name/src/core/messagebus/kafka/__init__.py +0 -0
  41. zrb/builtin/generator/fastapp/template/src/kebab-app-name/src/core/messagebus/kafka/consumer.py +198 -0
  42. zrb/builtin/generator/fastapp/template/src/kebab-app-name/src/core/messagebus/kafka/publisher.py +144 -0
  43. zrb/builtin/generator/fastapp/template/src/kebab-app-name/src/core/messagebus/messagebus.py +52 -0
  44. zrb/builtin/generator/fastapp/template/src/kebab-app-name/src/core/messagebus/mock.py +51 -0
  45. zrb/builtin/generator/fastapp/template/src/kebab-app-name/src/core/messagebus/rabbitmq/__init__.py +0 -0
  46. zrb/builtin/generator/fastapp/template/src/kebab-app-name/src/core/messagebus/rabbitmq/consumer.py +92 -0
  47. zrb/builtin/generator/fastapp/template/src/kebab-app-name/src/core/messagebus/rabbitmq/publisher.py +61 -0
  48. zrb/builtin/generator/fastapp/template/src/kebab-app-name/src/core/serializer/serializer.py +35 -0
  49. zrb/builtin/generator/fastapp/template/src/kebab-app-name/src/helper/__init__.py +0 -0
  50. zrb/builtin/generator/fastapp/template/src/kebab-app-name/src/helper/async_task.py +17 -0
  51. zrb/builtin/generator/fastapp/template/src/kebab-app-name/src/helper/conversion.py +27 -0
  52. zrb/builtin/generator/fastapp/template/src/kebab-app-name/src/main.py +38 -0
  53. zrb/builtin/generator/fastapp/template/src/kebab-app-name/src/requirements.txt +8 -0
  54. zrb/builtin/generator/fastapp/template/src/kebab-app-name/src/template.env +17 -0
  55. zrb/builtin/generator/simple_python_app/template/_automate/snake_app_name/cmd/start.sh +3 -1
  56. zrb/builtin/generator/simple_python_app/template/_automate/snake_app_name/container.py +1 -0
  57. zrb/builtin/generator/simple_python_app/template/_automate/snake_app_name/local.py +1 -0
  58. zrb/builtin/generator/simple_python_app/template/src/kebab-app-name/src/template.env +1 -0
  59. zrb/helper/list/ensure_uniqueness.py +22 -0
  60. zrb/helper/render_data.py +3 -2
  61. zrb/helper/string/conversion.py +8 -8
  62. zrb/helper/util.py +5 -0
  63. zrb/task/base_model.py +35 -14
  64. zrb/task/base_task.py +30 -20
  65. zrb/task/docker_compose_task.py +3 -3
  66. zrb/task/http_checker.py +29 -5
  67. zrb/task/port_checker.py +35 -10
  68. {zrb-0.0.38.dist-info → zrb-0.0.40.dist-info}/METADATA +1 -1
  69. {zrb-0.0.38.dist-info → zrb-0.0.40.dist-info}/RECORD +72 -20
  70. zrb/helper/list/append_unique.py +0 -9
  71. {zrb-0.0.38.dist-info → zrb-0.0.40.dist-info}/LICENSE +0 -0
  72. {zrb-0.0.38.dist-info → zrb-0.0.40.dist-info}/WHEEL +0 -0
  73. {zrb-0.0.38.dist-info → zrb-0.0.40.dist-info}/entry_points.txt +0 -0
@@ -0,0 +1,79 @@
1
+ '''A Kubernetes Python Pulumi program to deploy kebab-app-name'''
2
+
3
+ from dotenv import dotenv_values
4
+ import pulumi
5
+ import pulumi_kubernetes as k8s
6
+ import os
7
+
8
+ CURRENT_DIR = os.path.dirname(__file__)
9
+ APP_DIR = os.path.abspath(os.path.join(CURRENT_DIR, '..', 'src'))
10
+ TEMPLATE_ENV_FILE_NAME = os.path.join(APP_DIR, 'template.env')
11
+
12
+ image = os.getenv('IMAGE', 'kebab-app-name:latest')
13
+ replica = int(os.getenv('REPLICA', '1'))
14
+ app_labels = {'app': 'kebab-app-name'}
15
+ env_map = dotenv_values(TEMPLATE_ENV_FILE_NAME)
16
+ app_port = int(os.getenv('APP_PORT', env_map.get('APP_PORT', '8080')))
17
+
18
+ # Pulumi deployment docs:
19
+ # https://www.pulumi.com/registry/packages/kubernetes/api-docs/apps/v1/deployment/
20
+ deployment = k8s.apps.v1.Deployment(
21
+ resource_name='kebab-app-name',
22
+ spec=k8s.apps.v1.DeploymentSpecArgs(
23
+ selector=k8s.meta.v1.LabelSelectorArgs(match_labels=app_labels),
24
+ replicas=replica,
25
+ template=k8s.core.v1.PodTemplateSpecArgs(
26
+ metadata=k8s.meta.v1.ObjectMetaArgs(labels=app_labels),
27
+ spec=k8s.core.v1.PodSpecArgs(
28
+ containers=[
29
+ k8s.core.v1.ContainerArgs(
30
+ name='kebab-app-name',
31
+ image=image,
32
+ env=[
33
+ k8s.core.v1.EnvVarArgs(
34
+ name=env_name,
35
+ value=os.getenv(env_name, default_value)
36
+ )
37
+ for env_name, default_value in env_map.items()
38
+ ],
39
+ ports=[
40
+ k8s.core.v1.ContainerPortArgs(
41
+ container_port=app_port
42
+ )
43
+ ],
44
+ liveness_probe=k8s.core.v1.ProbeArgs(
45
+ http_get=k8s.core.v1.HTTPGetActionArgs(
46
+ port=app_port
47
+ )
48
+ ),
49
+ readiness_probe=k8s.core.v1.ProbeArgs(
50
+ http_get=k8s.core.v1.HTTPGetActionArgs(
51
+ port=app_port
52
+ )
53
+ ),
54
+ )
55
+ ]
56
+ )
57
+ )
58
+ )
59
+ )
60
+
61
+ # Pulumi services docs:
62
+ # https://www.pulumi.com/registry/packages/kubernetes/api-docs/core/v1/service/
63
+ service = k8s.core.v1.Service(
64
+ resource_name='kebab-app-name',
65
+ spec=k8s.core.v1.ServiceSpecArgs(
66
+ selector=app_labels,
67
+ ports=[
68
+ k8s.core.v1.ServicePortArgs(
69
+ port=80,
70
+ protocol="TCP",
71
+ target_port=app_port,
72
+ )
73
+ ],
74
+ type='LoadBalancer',
75
+ )
76
+ )
77
+
78
+ pulumi.export('deployment-name', deployment.metadata['name'])
79
+ pulumi.export('service-name', service.metadata['name'])
@@ -0,0 +1,3 @@
1
+ pulumi>=3.0.0,<4.0.0
2
+ pulumi-kubernetes>=3.0.0,<4.0.0
3
+ python-dotenv==0.21.1
@@ -0,0 +1,82 @@
1
+ version: '3'
2
+ services:
3
+
4
+
5
+ redpanda:
6
+ image: docker.redpanda.com/redpandadata/redpanda:v22.3.11
7
+ hostname: redpanda
8
+ command:
9
+ - redpanda start
10
+ - --smp 1
11
+ - --reserve-memory 0M
12
+ - --overprovisioned
13
+ - --node-id 1
14
+ - --kafka-addr PLAINTEXT://0.0.0.0:29092,OUTSIDE://0.0.0.0:9092
15
+ - --advertise-kafka-addr PLAINTEXT://redpanda:29092,OUTSIDE://localhost:9092
16
+ - --pandaproxy-addr PLAINTEXT://0.0.0.0:28082,OUTSIDE://0.0.0.0:8082
17
+ - --advertise-pandaproxy-addr PLAINTEXT://redpanda:28082,OUTSIDE://localhost:8082
18
+ - --rpc-addr 0.0.0.0:33145
19
+ - --advertise-rpc-addr redpanda:33145
20
+ ports:
21
+ - 8082:8082
22
+ - ${KAFKA_OUTSIDE_HOST_PORT:-9092}:9092
23
+ - 28082:28082
24
+ - ${KAFKA_PLAINTEXT_HOST_PORT:-29092}:29092
25
+ restart: on-failure
26
+ profiles:
27
+ - kafka
28
+ healthcheck:
29
+ test: ["CMD", "redpanda", "admin", "check"]
30
+ interval: 5s
31
+ timeout: 1s
32
+ retries: 30
33
+
34
+
35
+ redpanda-console:
36
+ image: docker.redpanda.com/redpandadata/console:v2.2.2
37
+ hostname: redpanda-console
38
+ ports:
39
+ - ${REDPANDA_CONSOLE_HOST_PORT:-9000}:8080
40
+ environment:
41
+ - KAFKA_BROKERS=redpanda:29092
42
+ profiles:
43
+ - kafka
44
+ depends_on:
45
+ - redpanda
46
+ restart: on-failure
47
+
48
+
49
+ rabbitmq:
50
+ image: rabbitmq:3-management
51
+ hostname: rabbitmq
52
+ ports:
53
+ - "${RABBITMQ_HOST_PORT:-5672}:5672"
54
+ - "${RABBITMQ_MANAGEMENT_HOST_PORT:-15672}:15672"
55
+ # volumes:
56
+ # - rabbitmq:/var/lib/rabbitmq/mnesia
57
+ environment:
58
+ - RABBITMQ_DEFAULT_USER=guest
59
+ - RABBITMQ_DEFAULT_PASS=guest
60
+ restart: on-failure
61
+ profiles:
62
+ - rabbitmq
63
+ healthcheck:
64
+ test: ["CMD", "rabbitmqctl", "status"]
65
+ interval: 5s
66
+ timeout: 1s
67
+ retries: 30
68
+
69
+
70
+ snake_app_name:
71
+ build: src
72
+ image: ${IMAGE:-kebab-app-name}
73
+ container_name: snake_app_name
74
+ hostname: snake_app_name
75
+ env_file: 'src/template.env'
76
+ environment:
77
+ APP_PORT: ${APP_PORT:-8080}
78
+ ports:
79
+ - "${APP_HOST_PORT:-8080}:${APP_PORT:-8080}"
80
+ restart: on-failure
81
+ profiles:
82
+ - monolith
@@ -0,0 +1,3 @@
1
+ .env
2
+ __pycache__
3
+ venv
@@ -0,0 +1,3 @@
1
+ .env
2
+ __pycache__
3
+ venv
@@ -0,0 +1,5 @@
1
+ FROM python:3.10-slim
2
+ WORKDIR /home
3
+ COPY . .
4
+ RUN pip install -r requirements.txt
5
+ CMD python main.py
@@ -0,0 +1,51 @@
1
+ from fastapi import status
2
+ from fastapi.responses import JSONResponse
3
+ from fastapi.middleware.cors import CORSMiddleware
4
+ from fastapi import FastAPI
5
+ from config import (
6
+ cors_allow_credentials, cors_allow_headers, cors_allow_methods,
7
+ cors_allow_origin_regex, cors_allow_origins, cors_expose_headers,
8
+ cors_max_age
9
+ )
10
+ from component.app_state import app_state
11
+
12
+ app = FastAPI()
13
+
14
+ app.add_middleware(
15
+ CORSMiddleware,
16
+ allow_origins=cors_allow_origins,
17
+ allow_origin_regex=cors_allow_origin_regex,
18
+ allow_methods=cors_allow_methods,
19
+ allow_headers=cors_allow_headers,
20
+ allow_credentials=cors_allow_credentials,
21
+ expose_headers=cors_expose_headers,
22
+ max_age=cors_max_age,
23
+ )
24
+
25
+
26
+ @app.head('/liveness')
27
+ @app.get('/liveness')
28
+ def handle_liveness():
29
+ if app_state.get_liveness():
30
+ return JSONResponse(
31
+ content={'message': 'Service is alive'},
32
+ status_code=status.HTTP_200_OK
33
+ )
34
+ return JSONResponse(
35
+ content={'message': 'Service is not alive'},
36
+ status_code=status.HTTP_503_SERVICE_UNAVAILABLE
37
+ )
38
+
39
+
40
+ @app.head('/readiness')
41
+ @app.get('/readiness')
42
+ def handle_readiness():
43
+ if app_state.get_readiness():
44
+ return JSONResponse(
45
+ content={'message': 'Service is ready'},
46
+ status_code=status.HTTP_200_OK
47
+ )
48
+ return JSONResponse(
49
+ content={'message': 'Service is not ready'},
50
+ status_code=status.HTTP_503_SERVICE_UNAVAILABLE
51
+ )
@@ -0,0 +1,26 @@
1
+ class AppState():
2
+ __instance = None
3
+
4
+ def __new__(cls):
5
+ if cls.__instance is None:
6
+ cls.__instance = super().__new__(cls)
7
+ return cls.__instance
8
+
9
+ def __init__(self):
10
+ self.readiness: bool = False
11
+ self.liveness: bool = False
12
+
13
+ def set_liveness(self, value: bool):
14
+ self.liveness = value
15
+
16
+ def set_readiness(self, value: bool):
17
+ self.readiness = value
18
+
19
+ def get_liveness(self) -> bool:
20
+ return self.liveness
21
+
22
+ def get_readiness(self) -> bool:
23
+ return self.readiness
24
+
25
+
26
+ app_state = AppState()
@@ -0,0 +1,20 @@
1
+ from config import app_logging_level
2
+ import logging
3
+
4
+ # create logger
5
+ logger = logging.getLogger('src')
6
+ logger.setLevel(app_logging_level)
7
+
8
+ ch = logging.StreamHandler()
9
+ ch.setLevel(app_logging_level)
10
+
11
+ # create formatter
12
+ formatter = logging.Formatter(
13
+ '%(levelname)s:\t%(message)s'
14
+ )
15
+
16
+ # add formatter to ch
17
+ ch.setFormatter(formatter)
18
+
19
+ # add ch to logger
20
+ logger.addHandler(ch)
@@ -0,0 +1,42 @@
1
+ from config import (
2
+ app_name, app_broker_type, app_rmq_connection, app_kafka_bootstrap_servers
3
+ )
4
+ from core.messagebus.messagebus import Consumer, MessageSerializer
5
+ from core.messagebus.rabbitmq.consumer import RMQConsumeConnection, RMQConsumer
6
+ from core.messagebus.kafka.consumer import (
7
+ KafkaConsumeConnection, KafkaConsumer
8
+ )
9
+ from component.message_mocker import mock_consumer
10
+ from component.message_serializer import message_serializer
11
+ from component.log import logger
12
+
13
+
14
+ def init_consumer(
15
+ broker_type: str, serializer: MessageSerializer
16
+ ) -> Consumer:
17
+ if broker_type == 'rabbitmq':
18
+ consume_connection = RMQConsumeConnection(
19
+ logger=logger, connection_string=app_rmq_connection
20
+ )
21
+ return RMQConsumer(
22
+ logger=logger,
23
+ consume_connection=consume_connection,
24
+ serializer=serializer
25
+ )
26
+ if broker_type == 'kafka':
27
+ consume_connection = KafkaConsumeConnection(
28
+ logger=logger,
29
+ bootstrap_servers=app_kafka_bootstrap_servers,
30
+ group_id=app_name
31
+ )
32
+ return KafkaConsumer(
33
+ logger=logger,
34
+ consume_connection=consume_connection,
35
+ serializer=serializer
36
+ )
37
+ if broker_type == 'mock':
38
+ return mock_consumer
39
+ raise Exception(f'Invalid broker type: {broker_type}')
40
+
41
+
42
+ consumer = init_consumer(app_broker_type, message_serializer)
@@ -0,0 +1,6 @@
1
+ from core.messagebus.mock import MockConsumer, MockPublisher
2
+ from component.message_serializer import message_serializer
3
+ from component.log import logger
4
+
5
+ mock_consumer = MockConsumer(logger, message_serializer)
6
+ mock_publisher = MockPublisher(logger, mock_consumer, message_serializer)
@@ -0,0 +1,42 @@
1
+ from config import (
2
+ app_broker_type, app_rmq_connection, app_kafka_bootstrap_servers
3
+ )
4
+ from core.messagebus.messagebus import Publisher, MessageSerializer
5
+ from core.messagebus.kafka.publisher import (
6
+ KafkaPublishConnection, KafkaPublisher
7
+ )
8
+ from core.messagebus.rabbitmq.publisher import (
9
+ RMQPublishConnection, RMQPublisher
10
+ )
11
+ from component.message_mocker import mock_publisher
12
+ from component.message_serializer import message_serializer
13
+ from component.log import logger
14
+
15
+
16
+ def init_publisher(
17
+ broker_type: str, serializer: MessageSerializer
18
+ ) -> Publisher:
19
+ if broker_type == 'rabbitmq':
20
+ publish_connection = RMQPublishConnection(
21
+ logger=logger, connection_string=app_rmq_connection
22
+ )
23
+ return RMQPublisher(
24
+ logger=logger,
25
+ publish_connection=publish_connection,
26
+ serializer=serializer
27
+ )
28
+ if broker_type == 'kafka':
29
+ publish_connection = KafkaPublishConnection(
30
+ logger=logger, bootstrap_servers=app_kafka_bootstrap_servers
31
+ )
32
+ return KafkaPublisher(
33
+ logger=logger,
34
+ publish_connection=publish_connection,
35
+ serializer=serializer
36
+ )
37
+ if broker_type == 'mock':
38
+ return mock_publisher
39
+ raise Exception(f'Invalid broker type: {broker_type}')
40
+
41
+
42
+ publisher = init_publisher(app_broker_type, message_serializer)
@@ -0,0 +1,9 @@
1
+ from core.messagebus.messagebus import MessageSerializer
2
+
3
+
4
+ def init_message_serializer() -> MessageSerializer:
5
+ # Add custom logic if necessary
6
+ return MessageSerializer()
7
+
8
+
9
+ message_serializer = init_message_serializer()
@@ -0,0 +1,43 @@
1
+ from typing import List
2
+ from helper.conversion import str_to_boolean, str_to_logging_level
3
+ import os
4
+ import json
5
+
6
+ app_name = os.environ.get('APP_NAME', 'app')
7
+ app_logging_level = str_to_logging_level(
8
+ os.environ.get('APP_LOGGING_LEVEL', 'INFO')
9
+ )
10
+ app_broker_type = os.environ.get('APP_BROKER_TYPE', 'mock')
11
+ app_host = os.environ.get('APP_HOST', '0.0.0.0')
12
+ app_port = int(os.environ.get('APP_PORT', '8080'))
13
+ app_reload = str_to_boolean(os.environ.get('APP_RELOAD', 'true'))
14
+
15
+ app_rmq_connection = os.environ.get(
16
+ 'APP_RMQ_CONNECTION', 'amqp://guest:guest@localhost/'
17
+ )
18
+
19
+ app_kafka_bootstrap_servers = os.environ.get(
20
+ 'APP_KAFKA_BOOTSTRAP_SERVERS', 'localhost:9092'
21
+ )
22
+
23
+ cors_allow_origins: List[str] = json.loads(os.getenv(
24
+ 'APP_CORS_ALLOW_ORIGINS', '["*"]'
25
+ ))
26
+ cors_allow_origin_regex: str = os.getenv(
27
+ 'APP_CORS_ALLOW_ORIGIN_REGEX', ''
28
+ )
29
+ cors_allow_methods: List[str] = json.loads(os.getenv(
30
+ 'APP_CORS_ALLOW_METHODS', '["*"]'
31
+ ))
32
+ cors_allow_headers: List[str] = json.loads(os.getenv(
33
+ 'APP_CORS_ALLOW_HEADERS', '["*"]'
34
+ ))
35
+ cors_allow_credentials: bool = str_to_boolean(os.getenv(
36
+ 'APP_CORS_ALLOW_CREDENTIALS', 'false'
37
+ ))
38
+ cors_expose_headers: bool = str_to_boolean(os.getenv(
39
+ 'APP_CORS_EXPOSE_HEADERS', 'false'
40
+ ))
41
+ cors_max_age: int = int(os.getenv(
42
+ 'APP_CORS_MAX_AGE', '600'
43
+ ))
@@ -0,0 +1,198 @@
1
+ from typing import Any, Callable, Mapping, Optional
2
+ from core.messagebus.messagebus import (
3
+ Consumer, THandler, MessageSerializer,
4
+ get_message_serializer
5
+ )
6
+ from aiokafka import AIOKafkaConsumer, __version__
7
+ from aiokafka.consumer.consumer import RoundRobinPartitionAssignor
8
+
9
+ import asyncio
10
+ import inspect
11
+ import logging
12
+
13
+
14
+ class KafkaConsumeConnection():
15
+ def __init__(
16
+ self,
17
+ logger: logging.Logger,
18
+ bootstrap_servers: str,
19
+ client_id='aiokafka-' + __version__,
20
+ group_id: Optional[str] = None,
21
+ key_deserializer=None,
22
+ value_deserializer=None,
23
+ fetch_max_wait_ms=500,
24
+ fetch_max_bytes=52428800,
25
+ fetch_min_bytes=1,
26
+ max_partition_fetch_bytes=1 * 1024 * 1024,
27
+ request_timeout_ms=40 * 1000,
28
+ retry_backoff_ms=100,
29
+ auto_offset_reset='latest',
30
+ enable_auto_commit=True,
31
+ auto_commit_interval_ms=5000,
32
+ check_crcs=True,
33
+ metadata_max_age_ms=5 * 60 * 1000,
34
+ partition_assignment_strategy=(RoundRobinPartitionAssignor,),
35
+ max_poll_interval_ms=300000,
36
+ rebalance_timeout_ms=None,
37
+ session_timeout_ms=10000,
38
+ heartbeat_interval_ms=3000,
39
+ consumer_timeout_ms=200,
40
+ max_poll_records=None,
41
+ ssl_context=None,
42
+ security_protocol='PLAINTEXT',
43
+ api_version='auto',
44
+ exclude_internal_topics=True,
45
+ connections_max_idle_ms=540000,
46
+ isolation_level="read_uncommitted",
47
+ sasl_mechanism="PLAIN",
48
+ sasl_plain_password=None,
49
+ sasl_plain_username=None,
50
+ sasl_kerberos_service_name='kafka',
51
+ sasl_kerberos_domain_name=None,
52
+ sasl_oauth_token_provider=None
53
+ ):
54
+ self.logger = logger
55
+ self.consumer: Optional[AIOKafkaConsumer] = None
56
+ self.bootstrap_servers = bootstrap_servers
57
+ self.client_id = client_id
58
+ self.group_id = group_id
59
+ self.key_deserializer = key_deserializer
60
+ self.value_deserializer = value_deserializer
61
+ self.fetch_max_wait_ms = fetch_max_wait_ms
62
+ self.fetch_max_bytes = fetch_max_bytes
63
+ self.fetch_min_bytes = fetch_min_bytes
64
+ self.max_partition_fetch_bytes = max_partition_fetch_bytes
65
+ self.request_timeout_ms = request_timeout_ms
66
+ self.retry_backoff_ms = retry_backoff_ms
67
+ self.auto_offset_reset = auto_offset_reset
68
+ self.enable_auto_commit = enable_auto_commit
69
+ self.auto_commit_interval_ms = auto_commit_interval_ms
70
+ self.check_crcs = check_crcs
71
+ self.metadata_max_age_ms = metadata_max_age_ms
72
+ self.partition_assignment_strategy = partition_assignment_strategy
73
+ self.max_poll_interval_ms = max_poll_interval_ms
74
+ self.rebalance_timeout_ms = rebalance_timeout_ms
75
+ self.session_timeout_ms = session_timeout_ms
76
+ self.heartbeat_interval_ms = heartbeat_interval_ms
77
+ self.consumer_timeout_ms = consumer_timeout_ms
78
+ self.max_poll_records = max_poll_records
79
+ self.ssl_context = ssl_context
80
+ self.security_protocol = security_protocol
81
+ self.api_version = api_version
82
+ self.exclude_internal_topics = exclude_internal_topics
83
+ self.connections_max_idle_ms = connections_max_idle_ms
84
+ self.isolation_level = isolation_level
85
+ self.sasl_mechanism = sasl_mechanism
86
+ self.sasl_plain_password = sasl_plain_password
87
+ self.sasl_plain_username = sasl_plain_username
88
+ self.sasl_kerberos_service_name = sasl_kerberos_service_name
89
+ self.sasl_kerberos_domain_name = sasl_kerberos_domain_name
90
+ self.sasl_oauth_token_provider = sasl_oauth_token_provider
91
+
92
+ async def __aenter__(self):
93
+ self.logger.info('🐼 Create kafka consumer')
94
+ self.consumer = AIOKafkaConsumer(
95
+ bootstrap_servers=self.bootstrap_servers,
96
+ client_id=self.client_id,
97
+ group_id=self.group_id,
98
+ key_deserializer=self.key_deserializer,
99
+ value_deserializer=self.value_deserializer,
100
+ fetch_max_wait_ms=self.fetch_max_wait_ms,
101
+ fetch_max_bytes=self.fetch_max_bytes,
102
+ fetch_min_bytes=self.fetch_min_bytes,
103
+ max_partition_fetch_bytes=self.max_partition_fetch_bytes,
104
+ request_timeout_ms=self.request_timeout_ms,
105
+ retry_backoff_ms=self.retry_backoff_ms,
106
+ auto_offset_reset=self.auto_offset_reset,
107
+ enable_auto_commit=self.enable_auto_commit,
108
+ auto_commit_interval_ms=self.auto_commit_interval_ms,
109
+ check_crcs=self.check_crcs,
110
+ metadata_max_age_ms=self.metadata_max_age_ms,
111
+ partition_assignment_strategy=self.partition_assignment_strategy,
112
+ max_poll_interval_ms=self.max_poll_interval_ms,
113
+ rebalance_timeout_ms=self.rebalance_timeout_ms,
114
+ session_timeout_ms=self.session_timeout_ms,
115
+ heartbeat_interval_ms=self.heartbeat_interval_ms,
116
+ consumer_timeout_ms=self.consumer_timeout_ms,
117
+ max_poll_records=self.max_poll_records,
118
+ ssl_context=self.ssl_context,
119
+ security_protocol=self.security_protocol,
120
+ api_version=self.api_version,
121
+ exclude_internal_topics=self.exclude_internal_topics,
122
+ connections_max_idle_ms=self.connections_max_idle_ms,
123
+ isolation_level=self.isolation_level,
124
+ sasl_mechanism=self.sasl_mechanism,
125
+ sasl_plain_password=self.sasl_plain_password,
126
+ sasl_plain_username=self.sasl_plain_username,
127
+ sasl_kerberos_service_name=self.sasl_kerberos_service_name,
128
+ sasl_kerberos_domain_name=self.sasl_kerberos_domain_name,
129
+ sasl_oauth_token_provider=self.sasl_oauth_token_provider,
130
+ )
131
+ self.logger.info('🐼 Start kafka consumer')
132
+ await self.consumer.start()
133
+ self.logger.info('🐼 Kafka consumer started')
134
+ return self
135
+
136
+ async def __aexit__(self, exc_type, exc, tb):
137
+ self.logger.info('🐼 Unsubscribe kafka consumer from all topics')
138
+ self.consumer.unsubscribe()
139
+ self.logger.info('🐼 Stop kafka consumer')
140
+ await self.consumer.stop()
141
+ self.logger.info('🐼 Kafka consumer stopped')
142
+
143
+
144
+ class KafkaConsumer(Consumer):
145
+ def __init__(
146
+ self,
147
+ logger: logging.Logger,
148
+ consume_connection: KafkaConsumeConnection,
149
+ serializer: Optional[MessageSerializer] = None,
150
+ retry: int = 5
151
+ ):
152
+ self.logger = logger
153
+ self.serializer = get_message_serializer(serializer)
154
+ self.conn = consume_connection
155
+ self._handlers: Mapping[str, THandler] = {}
156
+ self._retry = retry
157
+
158
+ def register(self, event_name: str) -> Callable[[THandler], Any]:
159
+ def wrapper(handler: THandler):
160
+ self.logger.warning(f'🐼 Register handler for "{event_name}"')
161
+ self._handlers[event_name] = handler
162
+ return handler
163
+ return wrapper
164
+
165
+ async def run(self):
166
+ return await self._run(self._retry)
167
+
168
+ async def _run(self, retry: int):
169
+ try:
170
+ async with self.conn as conn:
171
+ consumer: AIOKafkaConsumer = conn.consumer
172
+ topics = list(self._handlers.keys())
173
+ self.logger.warning(f'🐼 Subscribe to topics: {topics}')
174
+ consumer.subscribe(topics=topics)
175
+ async for message in consumer:
176
+ event_name = message.topic
177
+ message_handler = self._handlers.get(event_name)
178
+ decoded_value = self.serializer.decode(
179
+ event_name, message.value
180
+ )
181
+ self.logger.info(
182
+ f'🐼 Consume "{event_name}": {decoded_value}'
183
+ )
184
+ await self._run_handler(message_handler, decoded_value)
185
+ retry = self._retry
186
+ except Exception:
187
+ if retry == 0:
188
+ self.logger.fatal('🐼 Cannot retry')
189
+ raise
190
+ self.logger.warning('🐼 Retry to consume')
191
+ await self._run(retry-1)
192
+
193
+ async def _run_handler(
194
+ self, message_handler: THandler, decoded_value: Any
195
+ ):
196
+ if inspect.iscoroutinefunction(message_handler):
197
+ return asyncio.create_task(message_handler(decoded_value))
198
+ return message_handler(decoded_value)