pulse-broker 1.0.8__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
pulse/__init__.py ADDED
@@ -0,0 +1,15 @@
1
+ # Try to update proto first
2
+ try:
3
+ from .proto_manager import ensure_proto
4
+ import os
5
+ host = os.environ.get("PULSE_HOST", "localhost")
6
+ port = int(os.environ.get("PULSE_HTTP_PORT", "5555"))
7
+ ensure_proto(host, port)
8
+ except Exception:
9
+ pass
10
+
11
+ from .producer import Producer
12
+ from .consumer import consumer, commit, run
13
+ from .config import load_config
14
+
15
+ __all__ = ["Producer", "consumer", "commit", "run", "load_config"]
pulse/config.py ADDED
@@ -0,0 +1,59 @@
1
+ import os
2
+ import yaml
3
+
4
+ DEFAULT_CONFIG = {
5
+ "broker": {
6
+ "host": "localhost",
7
+ "http_port": 5555,
8
+ "grpc_port": 5556,
9
+ "timeout_ms": 5000
10
+ },
11
+ "client": {
12
+ "id": "python-client",
13
+ "auto_commit": True,
14
+ "max_retries": 3
15
+ },
16
+ "topics": []
17
+ }
18
+
19
+ def load_config(path=None):
20
+ """
21
+ Load configuration from a YAML file.
22
+ If path is not provided, looks for pulse.yaml or pulse.yml in the current directory.
23
+ """
24
+ if not path:
25
+ if os.path.exists("pulse.yaml"):
26
+ path = "pulse.yaml"
27
+ elif os.path.exists("pulse.yml"):
28
+ path = "pulse.yml"
29
+
30
+ config = DEFAULT_CONFIG.copy()
31
+
32
+ if path and os.path.exists(path):
33
+ with open(path, "r") as f:
34
+ file_config = yaml.safe_load(f)
35
+ if file_config:
36
+ _merge_dicts(config, file_config)
37
+
38
+ return config
39
+
40
+ def _merge_dicts(base, update):
41
+ """Recursively merge dictionaries."""
42
+ for k, v in update.items():
43
+ if isinstance(v, dict) and k in base and isinstance(base[k], dict):
44
+ _merge_dicts(base[k], v)
45
+ else:
46
+ base[k] = v
47
+
48
+ # Global config instance
49
+ _config = load_config()
50
+
51
+ def get_config():
52
+ return _config
53
+
54
+ def get_topic_config(topic_name):
55
+ """Get configuration for a specific topic."""
56
+ for topic in _config.get("topics", []):
57
+ if topic.get("name") == topic_name:
58
+ return topic
59
+ return None
pulse/consumer.py ADDED
@@ -0,0 +1,260 @@
1
+ import grpc
2
+ import threading
3
+ import time
4
+ import contextvars
5
+ import json
6
+ from .config import get_config, get_topic_config
7
+ from .proto import pulse_pb2, pulse_pb2_grpc
8
+
9
+ # Registry of registered consumers
10
+ _consumers = []
11
+
12
+ # Context variable to hold the current message context for manual commit
13
+ _current_context = contextvars.ContextVar("current_context", default=None)
14
+
15
+ class MessageContext:
16
+ def __init__(self, stub, topic, consumer_group, offset):
17
+ self.stub = stub
18
+ self.topic = topic
19
+ self.consumer_group = consumer_group
20
+ self.offset = offset
21
+ self.committed = False
22
+
23
+ def commit():
24
+ """
25
+ Manually commit the current message offset.
26
+ Must be called within a consumer handler.
27
+ """
28
+ ctx = _current_context.get()
29
+ if not ctx:
30
+ raise RuntimeError("commit() called outside of a consumer handler")
31
+
32
+ if ctx.committed:
33
+ return
34
+
35
+ try:
36
+ ctx.stub.CommitOffset(pulse_pb2.CommitOffsetRequest(
37
+ topic=ctx.topic,
38
+ consumer_name=ctx.consumer_group,
39
+ offset=ctx.offset + 1
40
+ ))
41
+ ctx.committed = True
42
+ except grpc.RpcError as e:
43
+ print(f"Error committing offset: {e}")
44
+ raise e
45
+
46
+ class Message:
47
+ def __init__(self, proto_msg):
48
+ self.offset = proto_msg.offset
49
+ self.timestamp = proto_msg.timestamp
50
+ self._raw_payload = proto_msg.payload
51
+ # Headers from the broker (map<string,string>)
52
+ # Some older messages may not have headers.
53
+ try:
54
+ self._headers = dict(proto_msg.headers)
55
+ except Exception:
56
+ self._headers = {}
57
+
58
+ @property
59
+ def payload(self):
60
+ """Return payload converted to original type using headers.
61
+
62
+ header `payload-type` values: 'json', 'string', 'bytes'.
63
+ If missing, attempt to parse JSON and fall back to bytes.
64
+ """
65
+ ptype = self._headers.get("payload-type")
66
+ if ptype == "json":
67
+ try:
68
+ return json.loads(self._raw_payload)
69
+ except Exception:
70
+ return self._raw_payload
71
+ if ptype == "string":
72
+ try:
73
+ return self._raw_payload.decode("utf-8")
74
+ except Exception:
75
+ return self._raw_payload
76
+ if ptype == "bytes":
77
+ return self._raw_payload
78
+
79
+ # Fallback for older messages: try JSON then bytes
80
+ try:
81
+ return json.loads(self._raw_payload)
82
+ except Exception:
83
+ return self._raw_payload
84
+
85
+ @property
86
+ def raw_payload(self):
87
+ return self._raw_payload
88
+
89
+ @property
90
+ def headers(self):
91
+ return self._headers
92
+
93
+ def __str__(self):
94
+ return f"Message(offset={self.offset}, payload={self.payload})"
95
+
96
+ def consumer(topic, host=None, port=None, consumer_group=None, auto_commit=None, grouped=True):
97
+ """
98
+ Decorator to register a function as a consumer for a topic.
99
+
100
+ Parameters:
101
+ - topic: topic name
102
+ - consumer_group: explicit group id (overrides config client.id)
103
+ - grouped (bool): when True (default), handlers with the same topic+group
104
+ will share a single stream/consumer and messages will be distributed
105
+ among them. When False, the decorator will create a unique consumer
106
+ group id so the handler receives all messages independently.
107
+ """
108
+ def decorator(func):
109
+ config = get_config()
110
+ topic_config = get_topic_config(topic)
111
+
112
+ # Determine configuration
113
+ c_host = host or config["broker"]["host"]
114
+ c_port = port or config["broker"]["grpc_port"]
115
+
116
+ # Determine base consumer group
117
+ base_group = consumer_group or config["client"]["id"]
118
+
119
+ # If grouped is False, create a unique consumer id so it will consume
120
+ # independently of other consumers using the same app id.
121
+ if not grouped:
122
+ import uuid
123
+ c_group = f"{base_group}-{uuid.uuid4().hex}"
124
+ else:
125
+ c_group = base_group
126
+
127
+ # Determine auto_commit
128
+ c_auto_commit = auto_commit
129
+ if c_auto_commit is None:
130
+ c_auto_commit = config["client"]["auto_commit"]
131
+ if topic_config and "consume" in topic_config:
132
+ if "auto_commit" in topic_config["consume"]:
133
+ c_auto_commit = topic_config["consume"]["auto_commit"]
134
+
135
+ # Register handler grouped by (topic, group). We'll keep a single
136
+ # _consumers entry per (topic, group) and collect handlers so a single
137
+ # stream can dispatch to them (avoiding duplicate delivery within the
138
+ # same process when using grouped=True).
139
+ _consumers.append({
140
+ "topic": topic,
141
+ "host": c_host,
142
+ "port": c_port,
143
+ "group": c_group,
144
+ "auto_commit": c_auto_commit,
145
+ "handler": func,
146
+ "grouped": grouped
147
+ })
148
+ return func
149
+ return decorator
150
+
151
+ def run():
152
+ """
153
+ Start all registered consumers.
154
+ This function blocks.
155
+ """
156
+ # Coalesce consumers by (topic, group) when grouped=True so we create
157
+ # a single streaming connection per group and dispatch messages to the
158
+ # registered handlers in this process. If grouped=False was used, each
159
+ # entry will already have a unique group id and will be treated separately.
160
+ grouped_map = {}
161
+ for c in _consumers:
162
+ key = (c["topic"], c["host"], c["port"], c["group"])
163
+ if key not in grouped_map:
164
+ grouped_map[key] = {
165
+ "topic": c["topic"],
166
+ "host": c["host"],
167
+ "port": c["port"],
168
+ "group": c["group"],
169
+ "auto_commit": c["auto_commit"],
170
+ "handlers": []
171
+ }
172
+ grouped_map[key]["handlers"].append(c["handler"])
173
+
174
+ threads = []
175
+ for key, gc in grouped_map.items():
176
+ t = threading.Thread(target=_consume_loop_group, args=(gc,), daemon=True)
177
+ t.start()
178
+ threads.append(t)
179
+
180
+ # Keep main thread alive
181
+ try:
182
+ while True:
183
+ time.sleep(1)
184
+ except KeyboardInterrupt:
185
+ print("Stopping consumers...")
186
+
187
+ def _consume_loop(c_config):
188
+ address = f"{c_config['host']}:{c_config['port']}"
189
+ channel = grpc.insecure_channel(address)
190
+ stub = pulse_pb2_grpc.PulseServiceStub(channel)
191
+
192
+ # Backwards-compatible single-consumer loop (kept for compatibility with
193
+ # any direct calls). Wrap into group-style data and forward to group loop.
194
+ gc = {
195
+ "topic": c_config["topic"],
196
+ "host": c_config["host"],
197
+ "port": c_config["port"],
198
+ "group": c_config["group"],
199
+ "auto_commit": c_config["auto_commit"],
200
+ "handlers": [c_config.get("handler")]
201
+ }
202
+ _consume_loop_group(gc)
203
+
204
+
205
+ def _consume_loop_group(group_config):
206
+ topic = group_config["topic"]
207
+ group = group_config["group"]
208
+ auto_commit = group_config["auto_commit"]
209
+ handlers = group_config.get("handlers", [])
210
+
211
+ address = f"{group_config['host']}:{group_config['port']}"
212
+ channel = grpc.insecure_channel(address)
213
+ stub = pulse_pb2_grpc.PulseServiceStub(channel)
214
+
215
+ print(f"Starting consumer for topic '{topic}' (group: {group}) on {address}")
216
+
217
+ # simple round-robin index for dispatching to handlers
218
+ handler_idx = 0
219
+
220
+ while True:
221
+ try:
222
+ request = pulse_pb2.ConsumeRequest(
223
+ topic=topic,
224
+ consumer_name=group,
225
+ offset=0
226
+ )
227
+ stream = stub.Consume(request)
228
+
229
+ for proto_msg in stream:
230
+ msg = Message(proto_msg)
231
+
232
+ # Round-robin select handler
233
+ if not handlers:
234
+ # No handlers registered; skip
235
+ continue
236
+ handler = handlers[handler_idx % len(handlers)]
237
+ handler_idx += 1
238
+
239
+ # Set context for manual commit
240
+ ctx = MessageContext(stub, topic, group, msg.offset)
241
+ token = _current_context.set(ctx)
242
+
243
+ try:
244
+ handler(msg)
245
+
246
+ # Auto-commit if enabled and not manually committed
247
+ if auto_commit and not ctx.committed:
248
+ commit()
249
+
250
+ except Exception as e:
251
+ print(f"Error processing message: {e}")
252
+ finally:
253
+ _current_context.reset(token)
254
+
255
+ except grpc.RpcError as e:
256
+ print(f"Connection lost for {topic}: {e}. Retrying in 5s...")
257
+ time.sleep(5)
258
+ except Exception as e:
259
+ print(f"Unexpected error in consumer {topic}: {e}")
260
+ time.sleep(5)
pulse/producer.py ADDED
@@ -0,0 +1,101 @@
1
+ import json
2
+ import grpc
3
+ from .config import get_config
4
+ from .proto import pulse_pb2, pulse_pb2_grpc
5
+
6
+ class Producer:
7
+ def __init__(self, host=None, port=None):
8
+ config = get_config()
9
+ self.host = host or config["broker"]["host"]
10
+ self.port = port or config["broker"]["grpc_port"]
11
+ self.address = f"{self.host}:{self.port}"
12
+
13
+ self.channel = grpc.insecure_channel(self.address)
14
+ self.stub = pulse_pb2_grpc.PulseServiceStub(self.channel)
15
+
16
+ self._setup_topics(config)
17
+
18
+ def _setup_topics(self, config):
19
+ for topic_cfg in config.get("topics", []):
20
+ if topic_cfg.get("create_if_missing", False):
21
+ name = topic_cfg["name"]
22
+ t_config = topic_cfg.get("config", {})
23
+ fifo = t_config.get("fifo", False)
24
+ retention_bytes = t_config.get("retention_bytes", 0)
25
+
26
+ req = pulse_pb2.CreateTopicRequest(
27
+ topic=name,
28
+ fifo=fifo,
29
+ retention_bytes=retention_bytes
30
+ )
31
+ try:
32
+ self.stub.CreateTopic(req)
33
+ except grpc.RpcError:
34
+ # Ignore errors (e.g. topic already exists)
35
+ pass
36
+
37
+ def send(self, topic, payload):
38
+ """
39
+ Send a message to a topic.
40
+ payload can be bytes or a dict (which will be JSON serialized).
41
+ """
42
+ headers = {}
43
+ if isinstance(payload, dict):
44
+ data = json.dumps(payload).encode("utf-8")
45
+ headers["payload-type"] = "json"
46
+ elif isinstance(payload, str):
47
+ data = payload.encode("utf-8")
48
+ headers["payload-type"] = "string"
49
+ elif isinstance(payload, bytes):
50
+ data = payload
51
+ headers["payload-type"] = "bytes"
52
+ else:
53
+ raise ValueError("Payload must be bytes, str, or dict")
54
+
55
+ request = pulse_pb2.PublishRequest(
56
+ topic=topic,
57
+ payload=data,
58
+ headers=headers
59
+ )
60
+
61
+ try:
62
+ self.stub.Publish(request)
63
+ except grpc.RpcError as e:
64
+ # TODO: Handle retries based on config
65
+ raise e
66
+
67
+ def stream_send(self, message_iterator):
68
+ """
69
+ Send a stream of messages to the broker.
70
+ message_iterator should yield (topic, payload) tuples.
71
+ """
72
+ def request_generator():
73
+ for topic, payload in message_iterator:
74
+ headers = {}
75
+ if isinstance(payload, dict):
76
+ data = json.dumps(payload).encode("utf-8")
77
+ headers["payload-type"] = "json"
78
+ elif isinstance(payload, str):
79
+ data = payload.encode("utf-8")
80
+ headers["payload-type"] = "string"
81
+ elif isinstance(payload, bytes):
82
+ data = payload
83
+ headers["payload-type"] = "bytes"
84
+ else:
85
+ raise ValueError("Payload must be bytes, str, or dict")
86
+
87
+ yield pulse_pb2.PublishRequest(
88
+ topic=topic,
89
+ payload=data,
90
+ headers=headers
91
+ )
92
+
93
+ try:
94
+ # Consume the response stream (acks)
95
+ for _ in self.stub.StreamPublish(request_generator()):
96
+ pass
97
+ except grpc.RpcError as e:
98
+ raise e
99
+
100
+ def close(self):
101
+ self.channel.close()
File without changes
@@ -0,0 +1,65 @@
1
+ # -*- coding: utf-8 -*-
2
+ # Generated by the protocol buffer compiler. DO NOT EDIT!
3
+ # NO CHECKED-IN PROTOBUF GENCODE
4
+ # source: pulse.proto
5
+ # Protobuf Python Version: 6.31.1
6
+ """Generated protocol buffer code."""
7
+ from google.protobuf import descriptor as _descriptor
8
+ from google.protobuf import descriptor_pool as _descriptor_pool
9
+ from google.protobuf import runtime_version as _runtime_version
10
+ from google.protobuf import symbol_database as _symbol_database
11
+ from google.protobuf.internal import builder as _builder
12
+ _runtime_version.ValidateProtobufRuntimeVersion(
13
+ _runtime_version.Domain.PUBLIC,
14
+ 6,
15
+ 31,
16
+ 1,
17
+ '',
18
+ 'pulse.proto'
19
+ )
20
+ # @@protoc_insertion_point(imports)
21
+
22
+ _sym_db = _symbol_database.Default()
23
+
24
+
25
+
26
+
27
+ DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n\x0bpulse.proto\x12\x08pulse.v1\"\x98\x01\n\x0ePublishRequest\x12\r\n\x05topic\x18\x01 \x01(\t\x12\x0f\n\x07payload\x18\x02 \x01(\x0c\x12\x36\n\x07headers\x18\x03 \x03(\x0b\x32%.pulse.v1.PublishRequest.HeadersEntry\x1a.\n\x0cHeadersEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01\"-\n\x0fPublishResponse\x12\n\n\x02id\x18\x01 \x01(\t\x12\x0e\n\x06offset\x18\x02 \x01(\x04\"F\n\x0e\x43onsumeRequest\x12\r\n\x05topic\x18\x01 \x01(\t\x12\x15\n\rconsumer_name\x18\x02 \x01(\t\x12\x0e\n\x06offset\x18\x03 \x01(\x04\"\xae\x01\n\x0f\x43onsumeResponse\x12\x0e\n\x06offset\x18\x01 \x01(\x04\x12\x11\n\ttimestamp\x18\x02 \x01(\x03\x12\x0f\n\x07payload\x18\x03 \x01(\x0c\x12\x37\n\x07headers\x18\x04 \x03(\x0b\x32&.pulse.v1.ConsumeResponse.HeadersEntry\x1a.\n\x0cHeadersEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01\"K\n\x13\x43ommitOffsetRequest\x12\r\n\x05topic\x18\x01 \x01(\t\x12\x15\n\rconsumer_name\x18\x02 \x01(\t\x12\x0e\n\x06offset\x18\x03 \x01(\x04\"\'\n\x14\x43ommitOffsetResponse\x12\x0f\n\x07success\x18\x01 \x01(\x08\"\xa9\x01\n\x12\x43reateTopicRequest\x12\r\n\x05topic\x18\x01 \x01(\t\x12\x0c\n\x04\x66ifo\x18\x02 \x01(\x08\x12\x17\n\x0fretention_bytes\x18\x03 \x01(\x03\x12\x16\n\x0eretention_time\x18\x04 \x01(\x03\x12\x17\n\x0f\x66lush_threshold\x18\x05 \x01(\x05\x12\x16\n\x0e\x66lush_interval\x18\x06 \x01(\x03\x12\x14\n\x0csegment_size\x18\x07 \x01(\x03\"&\n\x13\x43reateTopicResponse\x12\x0f\n\x07success\x18\x01 \x01(\x08\"\x13\n\x11ListTopicsRequest\"$\n\x12ListTopicsResponse\x12\x0e\n\x06topics\x18\x01 \x03(\t2\xbe\x03\n\x0cPulseService\x12>\n\x07Publish\x12\x18.pulse.v1.PublishRequest\x1a\x19.pulse.v1.PublishResponse\x12H\n\rStreamPublish\x12\x18.pulse.v1.PublishRequest\x1a\x19.pulse.v1.PublishResponse(\x01\x30\x01\x12@\n\x07\x43onsume\x12\x18.pulse.v1.ConsumeRequest\x1a\x19.pulse.v1.ConsumeResponse0\x01\x12M\n\x0c\x43ommitOffset\x12\x1d.pulse.v1.CommitOffsetRequest\x1a\x1e.pulse.v1.CommitOffsetResponse\x12J\n\x0b\x43reateTopic\x12\x1c.pulse.v1.CreateTopicRequest\x1a\x1d.pulse.v1.CreateTopicResponse\x12G\n\nListTopics\x12\x1b.pulse.v1.ListTopicsRequest\x1a\x1c.pulse.v1.ListTopicsResponseB\x11Z\x0fpulse/pkg/protob\x06proto3')
28
+
29
+ _globals = globals()
30
+ _builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, _globals)
31
+ _builder.BuildTopDescriptorsAndMessages(DESCRIPTOR, 'pulse_pb2', _globals)
32
+ if not _descriptor._USE_C_DESCRIPTORS:
33
+ _globals['DESCRIPTOR']._loaded_options = None
34
+ _globals['DESCRIPTOR']._serialized_options = b'Z\017pulse/pkg/proto'
35
+ _globals['_PUBLISHREQUEST_HEADERSENTRY']._loaded_options = None
36
+ _globals['_PUBLISHREQUEST_HEADERSENTRY']._serialized_options = b'8\001'
37
+ _globals['_CONSUMERESPONSE_HEADERSENTRY']._loaded_options = None
38
+ _globals['_CONSUMERESPONSE_HEADERSENTRY']._serialized_options = b'8\001'
39
+ _globals['_PUBLISHREQUEST']._serialized_start=26
40
+ _globals['_PUBLISHREQUEST']._serialized_end=178
41
+ _globals['_PUBLISHREQUEST_HEADERSENTRY']._serialized_start=132
42
+ _globals['_PUBLISHREQUEST_HEADERSENTRY']._serialized_end=178
43
+ _globals['_PUBLISHRESPONSE']._serialized_start=180
44
+ _globals['_PUBLISHRESPONSE']._serialized_end=225
45
+ _globals['_CONSUMEREQUEST']._serialized_start=227
46
+ _globals['_CONSUMEREQUEST']._serialized_end=297
47
+ _globals['_CONSUMERESPONSE']._serialized_start=300
48
+ _globals['_CONSUMERESPONSE']._serialized_end=474
49
+ _globals['_CONSUMERESPONSE_HEADERSENTRY']._serialized_start=132
50
+ _globals['_CONSUMERESPONSE_HEADERSENTRY']._serialized_end=178
51
+ _globals['_COMMITOFFSETREQUEST']._serialized_start=476
52
+ _globals['_COMMITOFFSETREQUEST']._serialized_end=551
53
+ _globals['_COMMITOFFSETRESPONSE']._serialized_start=553
54
+ _globals['_COMMITOFFSETRESPONSE']._serialized_end=592
55
+ _globals['_CREATETOPICREQUEST']._serialized_start=595
56
+ _globals['_CREATETOPICREQUEST']._serialized_end=764
57
+ _globals['_CREATETOPICRESPONSE']._serialized_start=766
58
+ _globals['_CREATETOPICRESPONSE']._serialized_end=804
59
+ _globals['_LISTTOPICSREQUEST']._serialized_start=806
60
+ _globals['_LISTTOPICSREQUEST']._serialized_end=825
61
+ _globals['_LISTTOPICSRESPONSE']._serialized_start=827
62
+ _globals['_LISTTOPICSRESPONSE']._serialized_end=863
63
+ _globals['_PULSESERVICE']._serialized_start=866
64
+ _globals['_PULSESERVICE']._serialized_end=1312
65
+ # @@protoc_insertion_point(module_scope)
@@ -0,0 +1,318 @@
1
+ # Generated by the gRPC Python protocol compiler plugin. DO NOT EDIT!
2
+ """Client and server classes corresponding to protobuf-defined services."""
3
+ import grpc
4
+ import warnings
5
+
6
+ from . import pulse_pb2 as pulse__pb2
7
+
8
+ GRPC_GENERATED_VERSION = '1.76.0'
9
+ GRPC_VERSION = grpc.__version__
10
+ _version_not_supported = False
11
+
12
+ try:
13
+ from grpc._utilities import first_version_is_lower
14
+ _version_not_supported = first_version_is_lower(GRPC_VERSION, GRPC_GENERATED_VERSION)
15
+ except ImportError:
16
+ _version_not_supported = True
17
+
18
+ if _version_not_supported:
19
+ raise RuntimeError(
20
+ f'The grpc package installed is at version {GRPC_VERSION},'
21
+ + ' but the generated code in pulse_pb2_grpc.py depends on'
22
+ + f' grpcio>={GRPC_GENERATED_VERSION}.'
23
+ + f' Please upgrade your grpc module to grpcio>={GRPC_GENERATED_VERSION}'
24
+ + f' or downgrade your generated code using grpcio-tools<={GRPC_VERSION}.'
25
+ )
26
+
27
+
28
+ class PulseServiceStub(object):
29
+ """Missing associated documentation comment in .proto file."""
30
+
31
+ def __init__(self, channel):
32
+ """Constructor.
33
+
34
+ Args:
35
+ channel: A grpc.Channel.
36
+ """
37
+ self.Publish = channel.unary_unary(
38
+ '/pulse.v1.PulseService/Publish',
39
+ request_serializer=pulse__pb2.PublishRequest.SerializeToString,
40
+ response_deserializer=pulse__pb2.PublishResponse.FromString,
41
+ _registered_method=True)
42
+ self.StreamPublish = channel.stream_stream(
43
+ '/pulse.v1.PulseService/StreamPublish',
44
+ request_serializer=pulse__pb2.PublishRequest.SerializeToString,
45
+ response_deserializer=pulse__pb2.PublishResponse.FromString,
46
+ _registered_method=True)
47
+ self.Consume = channel.unary_stream(
48
+ '/pulse.v1.PulseService/Consume',
49
+ request_serializer=pulse__pb2.ConsumeRequest.SerializeToString,
50
+ response_deserializer=pulse__pb2.ConsumeResponse.FromString,
51
+ _registered_method=True)
52
+ self.CommitOffset = channel.unary_unary(
53
+ '/pulse.v1.PulseService/CommitOffset',
54
+ request_serializer=pulse__pb2.CommitOffsetRequest.SerializeToString,
55
+ response_deserializer=pulse__pb2.CommitOffsetResponse.FromString,
56
+ _registered_method=True)
57
+ self.CreateTopic = channel.unary_unary(
58
+ '/pulse.v1.PulseService/CreateTopic',
59
+ request_serializer=pulse__pb2.CreateTopicRequest.SerializeToString,
60
+ response_deserializer=pulse__pb2.CreateTopicResponse.FromString,
61
+ _registered_method=True)
62
+ self.ListTopics = channel.unary_unary(
63
+ '/pulse.v1.PulseService/ListTopics',
64
+ request_serializer=pulse__pb2.ListTopicsRequest.SerializeToString,
65
+ response_deserializer=pulse__pb2.ListTopicsResponse.FromString,
66
+ _registered_method=True)
67
+
68
+
69
+ class PulseServiceServicer(object):
70
+ """Missing associated documentation comment in .proto file."""
71
+
72
+ def Publish(self, request, context):
73
+ """Publish sends a message to a topic.
74
+ """
75
+ context.set_code(grpc.StatusCode.UNIMPLEMENTED)
76
+ context.set_details('Method not implemented!')
77
+ raise NotImplementedError('Method not implemented!')
78
+
79
+ def StreamPublish(self, request_iterator, context):
80
+ """StreamPublish sends a stream of messages to a topic.
81
+ """
82
+ context.set_code(grpc.StatusCode.UNIMPLEMENTED)
83
+ context.set_details('Method not implemented!')
84
+ raise NotImplementedError('Method not implemented!')
85
+
86
+ def Consume(self, request, context):
87
+ """Consume reads messages from a topic as a stream.
88
+ """
89
+ context.set_code(grpc.StatusCode.UNIMPLEMENTED)
90
+ context.set_details('Method not implemented!')
91
+ raise NotImplementedError('Method not implemented!')
92
+
93
+ def CommitOffset(self, request, context):
94
+ """CommitOffset commits the offset for a consumer group.
95
+ """
96
+ context.set_code(grpc.StatusCode.UNIMPLEMENTED)
97
+ context.set_details('Method not implemented!')
98
+ raise NotImplementedError('Method not implemented!')
99
+
100
+ def CreateTopic(self, request, context):
101
+ """CreateTopic creates a new topic.
102
+ """
103
+ context.set_code(grpc.StatusCode.UNIMPLEMENTED)
104
+ context.set_details('Method not implemented!')
105
+ raise NotImplementedError('Method not implemented!')
106
+
107
+ def ListTopics(self, request, context):
108
+ """ListTopics returns a list of all topics.
109
+ """
110
+ context.set_code(grpc.StatusCode.UNIMPLEMENTED)
111
+ context.set_details('Method not implemented!')
112
+ raise NotImplementedError('Method not implemented!')
113
+
114
+
115
+ def add_PulseServiceServicer_to_server(servicer, server):
116
+ rpc_method_handlers = {
117
+ 'Publish': grpc.unary_unary_rpc_method_handler(
118
+ servicer.Publish,
119
+ request_deserializer=pulse__pb2.PublishRequest.FromString,
120
+ response_serializer=pulse__pb2.PublishResponse.SerializeToString,
121
+ ),
122
+ 'StreamPublish': grpc.stream_stream_rpc_method_handler(
123
+ servicer.StreamPublish,
124
+ request_deserializer=pulse__pb2.PublishRequest.FromString,
125
+ response_serializer=pulse__pb2.PublishResponse.SerializeToString,
126
+ ),
127
+ 'Consume': grpc.unary_stream_rpc_method_handler(
128
+ servicer.Consume,
129
+ request_deserializer=pulse__pb2.ConsumeRequest.FromString,
130
+ response_serializer=pulse__pb2.ConsumeResponse.SerializeToString,
131
+ ),
132
+ 'CommitOffset': grpc.unary_unary_rpc_method_handler(
133
+ servicer.CommitOffset,
134
+ request_deserializer=pulse__pb2.CommitOffsetRequest.FromString,
135
+ response_serializer=pulse__pb2.CommitOffsetResponse.SerializeToString,
136
+ ),
137
+ 'CreateTopic': grpc.unary_unary_rpc_method_handler(
138
+ servicer.CreateTopic,
139
+ request_deserializer=pulse__pb2.CreateTopicRequest.FromString,
140
+ response_serializer=pulse__pb2.CreateTopicResponse.SerializeToString,
141
+ ),
142
+ 'ListTopics': grpc.unary_unary_rpc_method_handler(
143
+ servicer.ListTopics,
144
+ request_deserializer=pulse__pb2.ListTopicsRequest.FromString,
145
+ response_serializer=pulse__pb2.ListTopicsResponse.SerializeToString,
146
+ ),
147
+ }
148
+ generic_handler = grpc.method_handlers_generic_handler(
149
+ 'pulse.v1.PulseService', rpc_method_handlers)
150
+ server.add_generic_rpc_handlers((generic_handler,))
151
+ server.add_registered_method_handlers('pulse.v1.PulseService', rpc_method_handlers)
152
+
153
+
154
+ # This class is part of an EXPERIMENTAL API.
155
+ class PulseService(object):
156
+ """Missing associated documentation comment in .proto file."""
157
+
158
+ @staticmethod
159
+ def Publish(request,
160
+ target,
161
+ options=(),
162
+ channel_credentials=None,
163
+ call_credentials=None,
164
+ insecure=False,
165
+ compression=None,
166
+ wait_for_ready=None,
167
+ timeout=None,
168
+ metadata=None):
169
+ return grpc.experimental.unary_unary(
170
+ request,
171
+ target,
172
+ '/pulse.v1.PulseService/Publish',
173
+ pulse__pb2.PublishRequest.SerializeToString,
174
+ pulse__pb2.PublishResponse.FromString,
175
+ options,
176
+ channel_credentials,
177
+ insecure,
178
+ call_credentials,
179
+ compression,
180
+ wait_for_ready,
181
+ timeout,
182
+ metadata,
183
+ _registered_method=True)
184
+
185
+ @staticmethod
186
+ def StreamPublish(request_iterator,
187
+ target,
188
+ options=(),
189
+ channel_credentials=None,
190
+ call_credentials=None,
191
+ insecure=False,
192
+ compression=None,
193
+ wait_for_ready=None,
194
+ timeout=None,
195
+ metadata=None):
196
+ return grpc.experimental.stream_stream(
197
+ request_iterator,
198
+ target,
199
+ '/pulse.v1.PulseService/StreamPublish',
200
+ pulse__pb2.PublishRequest.SerializeToString,
201
+ pulse__pb2.PublishResponse.FromString,
202
+ options,
203
+ channel_credentials,
204
+ insecure,
205
+ call_credentials,
206
+ compression,
207
+ wait_for_ready,
208
+ timeout,
209
+ metadata,
210
+ _registered_method=True)
211
+
212
+ @staticmethod
213
+ def Consume(request,
214
+ target,
215
+ options=(),
216
+ channel_credentials=None,
217
+ call_credentials=None,
218
+ insecure=False,
219
+ compression=None,
220
+ wait_for_ready=None,
221
+ timeout=None,
222
+ metadata=None):
223
+ return grpc.experimental.unary_stream(
224
+ request,
225
+ target,
226
+ '/pulse.v1.PulseService/Consume',
227
+ pulse__pb2.ConsumeRequest.SerializeToString,
228
+ pulse__pb2.ConsumeResponse.FromString,
229
+ options,
230
+ channel_credentials,
231
+ insecure,
232
+ call_credentials,
233
+ compression,
234
+ wait_for_ready,
235
+ timeout,
236
+ metadata,
237
+ _registered_method=True)
238
+
239
+ @staticmethod
240
+ def CommitOffset(request,
241
+ target,
242
+ options=(),
243
+ channel_credentials=None,
244
+ call_credentials=None,
245
+ insecure=False,
246
+ compression=None,
247
+ wait_for_ready=None,
248
+ timeout=None,
249
+ metadata=None):
250
+ return grpc.experimental.unary_unary(
251
+ request,
252
+ target,
253
+ '/pulse.v1.PulseService/CommitOffset',
254
+ pulse__pb2.CommitOffsetRequest.SerializeToString,
255
+ pulse__pb2.CommitOffsetResponse.FromString,
256
+ options,
257
+ channel_credentials,
258
+ insecure,
259
+ call_credentials,
260
+ compression,
261
+ wait_for_ready,
262
+ timeout,
263
+ metadata,
264
+ _registered_method=True)
265
+
266
+ @staticmethod
267
+ def CreateTopic(request,
268
+ target,
269
+ options=(),
270
+ channel_credentials=None,
271
+ call_credentials=None,
272
+ insecure=False,
273
+ compression=None,
274
+ wait_for_ready=None,
275
+ timeout=None,
276
+ metadata=None):
277
+ return grpc.experimental.unary_unary(
278
+ request,
279
+ target,
280
+ '/pulse.v1.PulseService/CreateTopic',
281
+ pulse__pb2.CreateTopicRequest.SerializeToString,
282
+ pulse__pb2.CreateTopicResponse.FromString,
283
+ options,
284
+ channel_credentials,
285
+ insecure,
286
+ call_credentials,
287
+ compression,
288
+ wait_for_ready,
289
+ timeout,
290
+ metadata,
291
+ _registered_method=True)
292
+
293
+ @staticmethod
294
+ def ListTopics(request,
295
+ target,
296
+ options=(),
297
+ channel_credentials=None,
298
+ call_credentials=None,
299
+ insecure=False,
300
+ compression=None,
301
+ wait_for_ready=None,
302
+ timeout=None,
303
+ metadata=None):
304
+ return grpc.experimental.unary_unary(
305
+ request,
306
+ target,
307
+ '/pulse.v1.PulseService/ListTopics',
308
+ pulse__pb2.ListTopicsRequest.SerializeToString,
309
+ pulse__pb2.ListTopicsResponse.FromString,
310
+ options,
311
+ channel_credentials,
312
+ insecure,
313
+ call_credentials,
314
+ compression,
315
+ wait_for_ready,
316
+ timeout,
317
+ metadata,
318
+ _registered_method=True)
pulse/proto_manager.py ADDED
@@ -0,0 +1,59 @@
1
+ import os
2
+ import sys
3
+ import urllib.request
4
+ import re
5
+
6
+ def ensure_proto(host="localhost", port=5555):
7
+ try:
8
+ from grpc_tools import protoc
9
+ except ImportError:
10
+ return
11
+
12
+ current_dir = os.path.dirname(os.path.abspath(__file__))
13
+ proto_dir = os.path.join(current_dir, "proto")
14
+ proto_file = os.path.join(proto_dir, "pulse.proto")
15
+
16
+ # Try to download
17
+ try:
18
+ url = f"http://{host}:{port}/proto"
19
+ # Short timeout to avoid blocking startup
20
+ with urllib.request.urlopen(url, timeout=0.5) as response:
21
+ content = response.read()
22
+
23
+ if not os.path.exists(proto_dir):
24
+ os.makedirs(proto_dir)
25
+
26
+ # Check if content changed? For now just overwrite.
27
+ with open(proto_file, "wb") as f:
28
+ f.write(content)
29
+
30
+ # Compile
31
+ cmd = [
32
+ "grpc_tools.protoc",
33
+ f"-I{proto_dir}",
34
+ f"--python_out={proto_dir}",
35
+ f"--grpc_python_out={proto_dir}",
36
+ "pulse.proto"
37
+ ]
38
+
39
+ # We run from current_dir to avoid path issues?
40
+ # Actually, let's just pass the args.
41
+ exit_code = protoc.main(cmd)
42
+
43
+ if exit_code == 0:
44
+ # Fix imports in pulse_pb2_grpc.py to use relative import
45
+ grpc_file = os.path.join(proto_dir, "pulse_pb2_grpc.py")
46
+ if os.path.exists(grpc_file):
47
+ with open(grpc_file, "r") as f:
48
+ content = f.read()
49
+
50
+ # Replace 'import pulse_pb2' with 'from . import pulse_pb2'
51
+ new_content = re.sub(r'^import pulse_pb2', 'from . import pulse_pb2', content, flags=re.MULTILINE)
52
+
53
+ if new_content != content:
54
+ with open(grpc_file, "w") as f:
55
+ f.write(new_content)
56
+
57
+ except Exception:
58
+ # Ignore errors (server down, etc)
59
+ pass
@@ -0,0 +1,133 @@
1
+ Metadata-Version: 2.4
2
+ Name: pulse-broker
3
+ Version: 1.0.8
4
+ Summary: Python SDK for Pulse Broker
5
+ Home-page: https://github.com/marcosrosa/pulse
6
+ Author: Marcos Rosa
7
+ Author-email: marcos@example.com
8
+ Project-URL: Bug Tracker, https://github.com/marcosrosa/pulse/issues
9
+ Classifier: Programming Language :: Python :: 3
10
+ Classifier: License :: OSI Approved :: MIT License
11
+ Classifier: Operating System :: OS Independent
12
+ Requires-Python: >=3.7
13
+ Description-Content-Type: text/markdown
14
+ Requires-Dist: grpcio>=1.50.0
15
+ Requires-Dist: protobuf>=4.21.0
16
+ Requires-Dist: pyyaml>=6.0
17
+ Dynamic: author
18
+ Dynamic: author-email
19
+ Dynamic: classifier
20
+ Dynamic: description
21
+ Dynamic: description-content-type
22
+ Dynamic: home-page
23
+ Dynamic: project-url
24
+ Dynamic: requires-dist
25
+ Dynamic: requires-python
26
+ Dynamic: summary
27
+
28
+ # Pulse Python SDK
29
+
30
+ Official Python client for [Pulse Broker](https://github.com/marcosrosa/pulse).
31
+
32
+ ## Installation
33
+
34
+ ```bash
35
+ pip install pulse-broker
36
+ ```
37
+
38
+ ## Configuration
39
+
40
+ The SDK looks for a `pulse.yaml` (or `pulse.yml`) file in your project root. If not found, it defaults to `localhost:5555` (HTTP) and `localhost:5556` (gRPC).
41
+
42
+ ### Example `pulse.yaml`
43
+
44
+ ```yaml
45
+ # Connection Settings
46
+ broker:
47
+ host: "localhost"
48
+ http_port: 5555
49
+ grpc_port: 5556
50
+ timeout_ms: 5000
51
+
52
+ # Client Defaults
53
+ client:
54
+ id: "my-python-app"
55
+ auto_commit: true # Automatically commit offsets after successful processing
56
+ max_retries: 3
57
+
58
+ # Topic Configuration
59
+ topics:
60
+ - name: "events"
61
+ create_if_missing: true
62
+ config:
63
+ fifo: false
64
+ retention_bytes: 1073741824 # 1GB
65
+ consume:
66
+ auto_commit: true
67
+
68
+ - name: "transactions"
69
+ create_if_missing: true
70
+ config:
71
+ fifo: true
72
+ consume:
73
+ auto_commit: false # Manual commit required
74
+
75
+ - name: "logs"
76
+ create_if_missing: true
77
+ config:
78
+ fifo: false
79
+ consume:
80
+ auto_commit: true
81
+ ```
82
+
83
+ ## Usage
84
+
85
+ ### Producer
86
+
87
+ You can send dictionaries (automatically serialized to JSON) or raw bytes.
88
+
89
+ ```python
90
+ from pulse import Producer
91
+
92
+ # Initialize (uses pulse.yaml or defaults)
93
+ # You can override settings: Producer(host="10.0.0.1", port=9090)
94
+ producer = Producer()
95
+
96
+ # Send JSON
97
+ producer.send("events", {"type": "user_created", "id": 123})
98
+
99
+ # Send Bytes
100
+ producer.send("logs", b"raw log line")
101
+
102
+ producer.close()
103
+ ```
104
+
105
+ ### Consumer
106
+
107
+ Use the `@consumer` decorator to register message handlers.
108
+
109
+ ```python
110
+ from pulse import consumer, commit, run
111
+
112
+ # Simple Consumer (uses auto_commit from config)
113
+ @consumer("events")
114
+ def handle_event(msg):
115
+ print(f"Received event: {msg.payload}")
116
+ # msg.payload is a dict if JSON, else bytes
117
+
118
+ # Manual Commit Consumer
119
+ # Override config params directly in the decorator if needed
120
+ @consumer("transactions", auto_commit=False)
121
+ def handle_transaction(msg):
122
+ try:
123
+ process_payment(msg.payload)
124
+ commit() # Manually commit offset
125
+ print(f"Processed transaction {msg.offset}")
126
+ except Exception as e:
127
+ print(f"Failed to process: {e}")
128
+ # Do not commit, message will be redelivered on restart/rebalance
129
+
130
+ if __name__ == "__main__":
131
+ print("Starting consumers...")
132
+ run() # Blocks and runs all registered consumers
133
+ ```
@@ -0,0 +1,14 @@
1
+ pulse/__init__.py,sha256=4_52hPziGhNe8OmYHpsC8Ds600AB2BqHNu33B2HxxxE,434
2
+ pulse/config.py,sha256=Tj33DqGHBWGtwD-i1OY6g2zg7EbcpN7ZYzxxHHu7mGY,1463
3
+ pulse/consumer.py,sha256=ptI_Ct9wlQSD5TR69rGM_EyMDkURNpROuQd_StUiJMI,8659
4
+ pulse/producer.py,sha256=puFFS5a-_tlJKEsoMjnaPApE6-3AW_CrC41uSaA6CXw,3519
5
+ pulse/proto_manager.py,sha256=EWCHAP9SnrCsqIh3z7vlMtCgmGabGvECd9cp-DJWUbk,1978
6
+ pulse/proto/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
7
+ pulse/proto/pulse_pb2.py,sha256=fozRHf0Nc0ow0m6Js9x_UwpWUSB31P40rcnl1W1AjUw,5093
8
+ pulse/proto/pulse_pb2_grpc.py,sha256=BpsOTT2QLMBk50bzoJuW9AIOuLzSteQhpTRzPu8XSfc,11808
9
+ tests/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
10
+ tests/test_pulse.py,sha256=kfOc45I3_8u4ahcDV662VFm2ptMEGl4HrBjv8cqQoHU,2339
11
+ pulse_broker-1.0.8.dist-info/METADATA,sha256=XvyoxEft2WJtHavaSkA8nlyWDh1VPC0FOL7CgHSk67s,3149
12
+ pulse_broker-1.0.8.dist-info/WHEEL,sha256=_zCd3N1l69ArxyTb8rzEoP9TpbYXkqRFSNOD5OuxnTs,91
13
+ pulse_broker-1.0.8.dist-info/top_level.txt,sha256=zkkZiZZQSAp3NX32LmPIvlprjgl4x0qokPGr3fQSmno,12
14
+ pulse_broker-1.0.8.dist-info/RECORD,,
@@ -0,0 +1,5 @@
1
+ Wheel-Version: 1.0
2
+ Generator: setuptools (80.9.0)
3
+ Root-Is-Purelib: true
4
+ Tag: py3-none-any
5
+
@@ -0,0 +1,2 @@
1
+ pulse
2
+ tests
tests/__init__.py ADDED
File without changes
tests/test_pulse.py ADDED
@@ -0,0 +1,76 @@
1
+ import pytest
2
+ from unittest.mock import MagicMock, patch, ANY
3
+ import pulse
4
+ from pulse.proto import pulse_pb2
5
+
6
+ # --- Producer Tests ---
7
+
8
+ def test_producer_init():
9
+ with patch('pulse.producer.grpc.insecure_channel') as mock_channel:
10
+ p = pulse.Producer(host="myhost", port=9999)
11
+ mock_channel.assert_called_with("myhost:9999")
12
+
13
+ def test_producer_send_dict():
14
+ with patch('pulse.producer.grpc.insecure_channel'):
15
+ mock_stub = MagicMock()
16
+ with patch('pulse.producer.pulse_pb2_grpc.PulseServiceStub', return_value=mock_stub):
17
+ p = pulse.Producer()
18
+ p.send("topic1", {"key": "value"})
19
+
20
+ mock_stub.Publish.assert_called_once()
21
+ args, _ = mock_stub.Publish.call_args
22
+ req = args[0]
23
+ assert req.topic == "topic1"
24
+ assert req.payload == b'{"key": "value"}'
25
+
26
+ def test_producer_send_bytes():
27
+ with patch('pulse.producer.grpc.insecure_channel'):
28
+ mock_stub = MagicMock()
29
+ with patch('pulse.producer.pulse_pb2_grpc.PulseServiceStub', return_value=mock_stub):
30
+ p = pulse.Producer()
31
+ p.send("topic1", b"raw-data")
32
+
33
+ args, _ = mock_stub.Publish.call_args
34
+ req = args[0]
35
+ assert req.payload == b"raw-data"
36
+
37
+ # --- Consumer Tests ---
38
+
39
+ def test_consumer_decorator_registration():
40
+ # Clear consumers
41
+ from pulse.consumer import _consumers
42
+ _consumers.clear()
43
+
44
+ @pulse.consumer("test-topic", consumer_group="my-group")
45
+ def my_handler(msg):
46
+ pass
47
+
48
+ assert len(_consumers) == 1
49
+ c = _consumers[0]
50
+ assert c["topic"] == "test-topic"
51
+ assert c["group"] == "my-group"
52
+ assert c["handler"] == my_handler
53
+
54
+ def test_commit_outside_context():
55
+ with pytest.raises(RuntimeError):
56
+ pulse.commit()
57
+
58
+ # --- Config Tests ---
59
+
60
+ def test_load_config_defaults():
61
+ cfg = pulse.load_config(path="nonexistent")
62
+ assert cfg["broker"]["http_port"] == 5555
63
+
64
+ def test_load_config_file(tmp_path):
65
+ f = tmp_path / "pulse.yaml"
66
+ f.write_text("""
67
+ broker:
68
+ http_port: 8080
69
+ client:
70
+ id: "test-client"
71
+ """)
72
+ cfg = pulse.load_config(path=str(f))
73
+ assert cfg["broker"]["http_port"] == 8080
74
+ assert cfg["client"]["id"] == "test-client"
75
+ # Check default preserved
76
+ assert cfg["broker"]["grpc_port"] == 5556