sentry-arroyo 2.28.4__tar.gz → 2.29.0__tar.gz
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- {sentry_arroyo-2.28.4/sentry_arroyo.egg-info → sentry_arroyo-2.29.0}/PKG-INFO +1 -1
- {sentry_arroyo-2.28.4 → sentry_arroyo-2.29.0}/arroyo/backends/kafka/configuration.py +101 -8
- {sentry_arroyo-2.28.4 → sentry_arroyo-2.29.0}/arroyo/backends/kafka/consumer.py +0 -1
- {sentry_arroyo-2.28.4 → sentry_arroyo-2.29.0}/arroyo/utils/metricDefs.json +1 -1
- {sentry_arroyo-2.28.4 → sentry_arroyo-2.29.0}/arroyo/utils/metric_defs.py +33 -0
- {sentry_arroyo-2.28.4 → sentry_arroyo-2.29.0/sentry_arroyo.egg-info}/PKG-INFO +1 -1
- {sentry_arroyo-2.28.4 → sentry_arroyo-2.29.0}/setup.py +1 -1
- {sentry_arroyo-2.28.4 → sentry_arroyo-2.29.0}/tests/backends/test_kafka_producer.py +14 -14
- {sentry_arroyo-2.28.4 → sentry_arroyo-2.29.0}/LICENSE +0 -0
- {sentry_arroyo-2.28.4 → sentry_arroyo-2.29.0}/MANIFEST.in +0 -0
- {sentry_arroyo-2.28.4 → sentry_arroyo-2.29.0}/README.md +0 -0
- {sentry_arroyo-2.28.4 → sentry_arroyo-2.29.0}/arroyo/__init__.py +0 -0
- {sentry_arroyo-2.28.4 → sentry_arroyo-2.29.0}/arroyo/backends/__init__.py +0 -0
- {sentry_arroyo-2.28.4 → sentry_arroyo-2.29.0}/arroyo/backends/abstract.py +0 -0
- {sentry_arroyo-2.28.4 → sentry_arroyo-2.29.0}/arroyo/backends/kafka/__init__.py +0 -0
- {sentry_arroyo-2.28.4 → sentry_arroyo-2.29.0}/arroyo/backends/kafka/commit.py +0 -0
- {sentry_arroyo-2.28.4 → sentry_arroyo-2.29.0}/arroyo/backends/local/__init__.py +0 -0
- {sentry_arroyo-2.28.4 → sentry_arroyo-2.29.0}/arroyo/backends/local/backend.py +0 -0
- {sentry_arroyo-2.28.4 → sentry_arroyo-2.29.0}/arroyo/backends/local/storages/__init__.py +0 -0
- {sentry_arroyo-2.28.4 → sentry_arroyo-2.29.0}/arroyo/backends/local/storages/abstract.py +0 -0
- {sentry_arroyo-2.28.4 → sentry_arroyo-2.29.0}/arroyo/backends/local/storages/memory.py +0 -0
- {sentry_arroyo-2.28.4 → sentry_arroyo-2.29.0}/arroyo/commit.py +0 -0
- {sentry_arroyo-2.28.4 → sentry_arroyo-2.29.0}/arroyo/dlq.py +0 -0
- {sentry_arroyo-2.28.4 → sentry_arroyo-2.29.0}/arroyo/errors.py +0 -0
- {sentry_arroyo-2.28.4 → sentry_arroyo-2.29.0}/arroyo/processing/__init__.py +0 -0
- {sentry_arroyo-2.28.4 → sentry_arroyo-2.29.0}/arroyo/processing/processor.py +0 -0
- {sentry_arroyo-2.28.4 → sentry_arroyo-2.29.0}/arroyo/processing/strategies/__init__.py +0 -0
- {sentry_arroyo-2.28.4 → sentry_arroyo-2.29.0}/arroyo/processing/strategies/abstract.py +0 -0
- {sentry_arroyo-2.28.4 → sentry_arroyo-2.29.0}/arroyo/processing/strategies/batching.py +0 -0
- {sentry_arroyo-2.28.4 → sentry_arroyo-2.29.0}/arroyo/processing/strategies/buffer.py +0 -0
- {sentry_arroyo-2.28.4 → sentry_arroyo-2.29.0}/arroyo/processing/strategies/commit.py +0 -0
- {sentry_arroyo-2.28.4 → sentry_arroyo-2.29.0}/arroyo/processing/strategies/filter.py +0 -0
- {sentry_arroyo-2.28.4 → sentry_arroyo-2.29.0}/arroyo/processing/strategies/guard.py +0 -0
- {sentry_arroyo-2.28.4 → sentry_arroyo-2.29.0}/arroyo/processing/strategies/healthcheck.py +0 -0
- {sentry_arroyo-2.28.4 → sentry_arroyo-2.29.0}/arroyo/processing/strategies/noop.py +0 -0
- {sentry_arroyo-2.28.4 → sentry_arroyo-2.29.0}/arroyo/processing/strategies/produce.py +0 -0
- {sentry_arroyo-2.28.4 → sentry_arroyo-2.29.0}/arroyo/processing/strategies/reduce.py +0 -0
- {sentry_arroyo-2.28.4 → sentry_arroyo-2.29.0}/arroyo/processing/strategies/run_task.py +0 -0
- {sentry_arroyo-2.28.4 → sentry_arroyo-2.29.0}/arroyo/processing/strategies/run_task_in_threads.py +0 -0
- {sentry_arroyo-2.28.4 → sentry_arroyo-2.29.0}/arroyo/processing/strategies/run_task_with_multiprocessing.py +0 -0
- {sentry_arroyo-2.28.4 → sentry_arroyo-2.29.0}/arroyo/processing/strategies/unfold.py +0 -0
- {sentry_arroyo-2.28.4 → sentry_arroyo-2.29.0}/arroyo/py.typed +0 -0
- {sentry_arroyo-2.28.4 → sentry_arroyo-2.29.0}/arroyo/types.py +0 -0
- {sentry_arroyo-2.28.4 → sentry_arroyo-2.29.0}/arroyo/utils/__init__.py +0 -0
- {sentry_arroyo-2.28.4 → sentry_arroyo-2.29.0}/arroyo/utils/clock.py +0 -0
- {sentry_arroyo-2.28.4 → sentry_arroyo-2.29.0}/arroyo/utils/codecs.py +0 -0
- {sentry_arroyo-2.28.4 → sentry_arroyo-2.29.0}/arroyo/utils/concurrent.py +0 -0
- {sentry_arroyo-2.28.4 → sentry_arroyo-2.29.0}/arroyo/utils/logging.py +0 -0
- {sentry_arroyo-2.28.4 → sentry_arroyo-2.29.0}/arroyo/utils/metrics.py +0 -0
- {sentry_arroyo-2.28.4 → sentry_arroyo-2.29.0}/arroyo/utils/profiler.py +0 -0
- {sentry_arroyo-2.28.4 → sentry_arroyo-2.29.0}/arroyo/utils/retries.py +0 -0
- {sentry_arroyo-2.28.4 → sentry_arroyo-2.29.0}/examples/transform_and_produce/__init__.py +0 -0
- {sentry_arroyo-2.28.4 → sentry_arroyo-2.29.0}/examples/transform_and_produce/batched.py +0 -0
- {sentry_arroyo-2.28.4 → sentry_arroyo-2.29.0}/examples/transform_and_produce/script.py +0 -0
- {sentry_arroyo-2.28.4 → sentry_arroyo-2.29.0}/examples/transform_and_produce/simple.py +0 -0
- {sentry_arroyo-2.28.4 → sentry_arroyo-2.29.0}/requirements.txt +0 -0
- {sentry_arroyo-2.28.4 → sentry_arroyo-2.29.0}/sentry_arroyo.egg-info/SOURCES.txt +0 -0
- {sentry_arroyo-2.28.4 → sentry_arroyo-2.29.0}/sentry_arroyo.egg-info/dependency_links.txt +0 -0
- {sentry_arroyo-2.28.4 → sentry_arroyo-2.29.0}/sentry_arroyo.egg-info/not-zip-safe +0 -0
- {sentry_arroyo-2.28.4 → sentry_arroyo-2.29.0}/sentry_arroyo.egg-info/requires.txt +0 -0
- {sentry_arroyo-2.28.4 → sentry_arroyo-2.29.0}/sentry_arroyo.egg-info/top_level.txt +0 -0
- {sentry_arroyo-2.28.4 → sentry_arroyo-2.29.0}/setup.cfg +0 -0
- {sentry_arroyo-2.28.4 → sentry_arroyo-2.29.0}/tests/backends/__init__.py +0 -0
- {sentry_arroyo-2.28.4 → sentry_arroyo-2.29.0}/tests/backends/mixins.py +0 -0
- {sentry_arroyo-2.28.4 → sentry_arroyo-2.29.0}/tests/backends/test_commit.py +0 -0
- {sentry_arroyo-2.28.4 → sentry_arroyo-2.29.0}/tests/backends/test_kafka.py +0 -0
- {sentry_arroyo-2.28.4 → sentry_arroyo-2.29.0}/tests/backends/test_local.py +0 -0
- {sentry_arroyo-2.28.4 → sentry_arroyo-2.29.0}/tests/processing/__init__.py +0 -0
- {sentry_arroyo-2.28.4 → sentry_arroyo-2.29.0}/tests/processing/strategies/__init__.py +0 -0
- {sentry_arroyo-2.28.4 → sentry_arroyo-2.29.0}/tests/processing/strategies/test_all.py +0 -0
- {sentry_arroyo-2.28.4 → sentry_arroyo-2.29.0}/tests/processing/strategies/test_batching.py +0 -0
- {sentry_arroyo-2.28.4 → sentry_arroyo-2.29.0}/tests/processing/strategies/test_buffer.py +0 -0
- {sentry_arroyo-2.28.4 → sentry_arroyo-2.29.0}/tests/processing/strategies/test_commit.py +0 -0
- {sentry_arroyo-2.28.4 → sentry_arroyo-2.29.0}/tests/processing/strategies/test_filter.py +0 -0
- {sentry_arroyo-2.28.4 → sentry_arroyo-2.29.0}/tests/processing/strategies/test_guard.py +0 -0
- {sentry_arroyo-2.28.4 → sentry_arroyo-2.29.0}/tests/processing/strategies/test_noop.py +0 -0
- {sentry_arroyo-2.28.4 → sentry_arroyo-2.29.0}/tests/processing/strategies/test_produce.py +0 -0
- {sentry_arroyo-2.28.4 → sentry_arroyo-2.29.0}/tests/processing/strategies/test_reduce.py +0 -0
- {sentry_arroyo-2.28.4 → sentry_arroyo-2.29.0}/tests/processing/strategies/test_run_task.py +0 -0
- {sentry_arroyo-2.28.4 → sentry_arroyo-2.29.0}/tests/processing/strategies/test_run_task_in_threads.py +0 -0
- {sentry_arroyo-2.28.4 → sentry_arroyo-2.29.0}/tests/processing/strategies/test_run_task_with_multiprocessing.py +0 -0
- {sentry_arroyo-2.28.4 → sentry_arroyo-2.29.0}/tests/processing/strategies/test_unfold.py +0 -0
- {sentry_arroyo-2.28.4 → sentry_arroyo-2.29.0}/tests/processing/test_processor.py +0 -0
- {sentry_arroyo-2.28.4 → sentry_arroyo-2.29.0}/tests/test_commit.py +0 -0
- {sentry_arroyo-2.28.4 → sentry_arroyo-2.29.0}/tests/test_dlq.py +0 -0
- {sentry_arroyo-2.28.4 → sentry_arroyo-2.29.0}/tests/test_kip848_e2e.py +0 -0
- {sentry_arroyo-2.28.4 → sentry_arroyo-2.29.0}/tests/test_types.py +0 -0
- {sentry_arroyo-2.28.4 → sentry_arroyo-2.29.0}/tests/utils/__init__.py +0 -0
- {sentry_arroyo-2.28.4 → sentry_arroyo-2.29.0}/tests/utils/test_concurrent.py +0 -0
- {sentry_arroyo-2.28.4 → sentry_arroyo-2.29.0}/tests/utils/test_metrics.py +0 -0
- {sentry_arroyo-2.28.4 → sentry_arroyo-2.29.0}/tests/utils/test_retries.py +0 -0
|
@@ -1,4 +1,5 @@
|
|
|
1
1
|
import copy
|
|
2
|
+
import functools
|
|
2
3
|
import json
|
|
3
4
|
import logging
|
|
4
5
|
from typing import Any, Dict, Mapping, Optional, Sequence
|
|
@@ -49,26 +50,32 @@ def stats_callback(stats_json: str) -> None:
|
|
|
49
50
|
)
|
|
50
51
|
|
|
51
52
|
|
|
52
|
-
def producer_stats_callback(stats_json: str) -> None:
|
|
53
|
+
def producer_stats_callback(stats_json: str, producer_name: Optional[str]) -> None:
|
|
53
54
|
stats = json.loads(stats_json)
|
|
54
55
|
metrics = get_metrics()
|
|
55
56
|
|
|
56
57
|
# Extract broker-level int_latency metrics
|
|
57
58
|
brokers = stats.get("brokers", {})
|
|
59
|
+
producer_name_tag = producer_name or "unknown"
|
|
60
|
+
|
|
61
|
+
# Record all broker-level metrics in a single loop
|
|
58
62
|
for broker_id, broker_stats in brokers.items():
|
|
63
|
+
broker_tags = {"broker_id": str(broker_id), "producer_name": producer_name_tag}
|
|
64
|
+
|
|
65
|
+
# Record broker latency metrics
|
|
59
66
|
int_latency = broker_stats.get("int_latency", {})
|
|
60
67
|
if int_latency:
|
|
61
68
|
p99_latency_ms = int_latency.get("p99", 0) / 1000.0
|
|
62
69
|
metrics.timing(
|
|
63
70
|
"arroyo.producer.librdkafka.p99_int_latency",
|
|
64
71
|
p99_latency_ms,
|
|
65
|
-
tags=
|
|
72
|
+
tags=broker_tags,
|
|
66
73
|
)
|
|
67
74
|
avg_latency_ms = int_latency.get("avg", 0) / 1000.0
|
|
68
75
|
metrics.timing(
|
|
69
76
|
"arroyo.producer.librdkafka.avg_int_latency",
|
|
70
77
|
avg_latency_ms,
|
|
71
|
-
tags=
|
|
78
|
+
tags=broker_tags,
|
|
72
79
|
)
|
|
73
80
|
|
|
74
81
|
outbuf_latency = broker_stats.get("outbuf_latency", {})
|
|
@@ -77,13 +84,13 @@ def producer_stats_callback(stats_json: str) -> None:
|
|
|
77
84
|
metrics.timing(
|
|
78
85
|
"arroyo.producer.librdkafka.p99_outbuf_latency",
|
|
79
86
|
p99_latency_ms,
|
|
80
|
-
tags=
|
|
87
|
+
tags=broker_tags,
|
|
81
88
|
)
|
|
82
89
|
avg_latency_ms = outbuf_latency.get("avg", 0) / 1000.0
|
|
83
90
|
metrics.timing(
|
|
84
91
|
"arroyo.producer.librdkafka.avg_outbuf_latency",
|
|
85
92
|
avg_latency_ms,
|
|
86
|
-
tags=
|
|
93
|
+
tags=broker_tags,
|
|
87
94
|
)
|
|
88
95
|
|
|
89
96
|
rtt = broker_stats.get("rtt", {})
|
|
@@ -92,15 +99,96 @@ def producer_stats_callback(stats_json: str) -> None:
|
|
|
92
99
|
metrics.timing(
|
|
93
100
|
"arroyo.producer.librdkafka.p99_rtt",
|
|
94
101
|
p99_rtt_ms,
|
|
95
|
-
tags=
|
|
102
|
+
tags=broker_tags,
|
|
96
103
|
)
|
|
97
104
|
avg_rtt_ms = rtt.get("avg", 0) / 1000.0
|
|
98
105
|
metrics.timing(
|
|
99
106
|
"arroyo.producer.librdkafka.avg_rtt",
|
|
100
107
|
avg_rtt_ms,
|
|
101
|
-
tags=
|
|
108
|
+
tags=broker_tags,
|
|
109
|
+
)
|
|
110
|
+
|
|
111
|
+
# Record broker transmission metrics
|
|
112
|
+
if broker_stats.get("tx"):
|
|
113
|
+
metrics.gauge(
|
|
114
|
+
"arroyo.producer.librdkafka.broker_tx",
|
|
115
|
+
broker_stats["tx"],
|
|
116
|
+
tags=broker_tags,
|
|
117
|
+
)
|
|
118
|
+
|
|
119
|
+
if broker_stats.get("txbytes"):
|
|
120
|
+
metrics.gauge(
|
|
121
|
+
"arroyo.producer.librdkafka.broker_txbytes",
|
|
122
|
+
broker_stats["txbytes"],
|
|
123
|
+
tags=broker_tags,
|
|
124
|
+
)
|
|
125
|
+
|
|
126
|
+
# Record broker buffer metrics
|
|
127
|
+
if broker_stats.get("outbuf_cnt"):
|
|
128
|
+
metrics.gauge(
|
|
129
|
+
"arroyo.producer.librdkafka.broker_outbuf_requests",
|
|
130
|
+
broker_stats["outbuf_cnt"],
|
|
131
|
+
tags=broker_tags,
|
|
132
|
+
)
|
|
133
|
+
|
|
134
|
+
if broker_stats.get("outbuf_msg_cnt"):
|
|
135
|
+
metrics.gauge(
|
|
136
|
+
"arroyo.producer.librdkafka.broker_outbuf_messages",
|
|
137
|
+
broker_stats["outbuf_msg_cnt"],
|
|
138
|
+
tags=broker_tags,
|
|
102
139
|
)
|
|
103
140
|
|
|
141
|
+
# Record broker connection metrics
|
|
142
|
+
if broker_stats.get("connects"):
|
|
143
|
+
metrics.gauge(
|
|
144
|
+
"arroyo.producer.librdkafka.broker_connects",
|
|
145
|
+
broker_stats["connects"],
|
|
146
|
+
tags=broker_tags,
|
|
147
|
+
)
|
|
148
|
+
|
|
149
|
+
if broker_stats.get("disconnects"):
|
|
150
|
+
metrics.gauge(
|
|
151
|
+
"arroyo.producer.librdkafka.broker_disconnects",
|
|
152
|
+
broker_stats["disconnects"],
|
|
153
|
+
tags=broker_tags,
|
|
154
|
+
)
|
|
155
|
+
|
|
156
|
+
# Record global producer metrics (librdkafka namespace)
|
|
157
|
+
if stats.get("msg_cnt"):
|
|
158
|
+
metrics.gauge(
|
|
159
|
+
"arroyo.producer.librdkafka.message_count",
|
|
160
|
+
stats["msg_cnt"],
|
|
161
|
+
tags={"producer_name": producer_name_tag},
|
|
162
|
+
)
|
|
163
|
+
|
|
164
|
+
if stats.get("msg_max"):
|
|
165
|
+
metrics.gauge(
|
|
166
|
+
"arroyo.producer.librdkafka.message_count_max",
|
|
167
|
+
stats["msg_max"],
|
|
168
|
+
tags={"producer_name": producer_name_tag},
|
|
169
|
+
)
|
|
170
|
+
|
|
171
|
+
if stats.get("msg_size"):
|
|
172
|
+
metrics.gauge(
|
|
173
|
+
"arroyo.producer.librdkafka.message_size",
|
|
174
|
+
stats["msg_size"],
|
|
175
|
+
tags={"producer_name": producer_name_tag},
|
|
176
|
+
)
|
|
177
|
+
|
|
178
|
+
if stats.get("msg_size_max"):
|
|
179
|
+
metrics.gauge(
|
|
180
|
+
"arroyo.producer.librdkafka.message_size_max",
|
|
181
|
+
stats["msg_size_max"],
|
|
182
|
+
tags={"producer_name": producer_name_tag},
|
|
183
|
+
)
|
|
184
|
+
|
|
185
|
+
if stats.get("txmsgs"):
|
|
186
|
+
metrics.gauge(
|
|
187
|
+
"arroyo.producer.librdkafka.txmsgs",
|
|
188
|
+
stats["txmsgs"],
|
|
189
|
+
tags={"producer_name": producer_name_tag},
|
|
190
|
+
)
|
|
191
|
+
|
|
104
192
|
|
|
105
193
|
def build_kafka_producer_configuration(
|
|
106
194
|
default_config: Mapping[str, Any],
|
|
@@ -111,10 +199,15 @@ def build_kafka_producer_configuration(
|
|
|
111
199
|
default_config, bootstrap_servers, override_params
|
|
112
200
|
)
|
|
113
201
|
|
|
202
|
+
# Extract client.id to use as producer name for metrics
|
|
203
|
+
producer_name = broker_config.get("client.id")
|
|
204
|
+
|
|
114
205
|
broker_config.update(
|
|
115
206
|
{
|
|
116
207
|
"statistics.interval.ms": STATS_COLLECTION_FREQ_MS,
|
|
117
|
-
"stats_cb":
|
|
208
|
+
"stats_cb": functools.partial(
|
|
209
|
+
producer_stats_callback, producer_name=producer_name
|
|
210
|
+
),
|
|
118
211
|
}
|
|
119
212
|
)
|
|
120
213
|
return broker_config
|
|
@@ -654,7 +654,6 @@ class KafkaProducer(Producer[KafkaPayload]):
|
|
|
654
654
|
self, configuration: Mapping[str, Any], use_simple_futures: bool = False
|
|
655
655
|
) -> None:
|
|
656
656
|
self.__configuration = configuration
|
|
657
|
-
|
|
658
657
|
self.__producer = ConfluentProducer(configuration)
|
|
659
658
|
self.__shutdown_requested = Event()
|
|
660
659
|
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"arroyo.strategies.run_task_with_multiprocessing.batch.size.msg": {"name": "arroyo.strategies.run_task_with_multiprocessing.batch.size.msg", "type": "Time", "description": "Number of messages in a multiprocessing batch"}, "arroyo.strategies.run_task_with_multiprocessing.batch.size.bytes": {"name": "arroyo.strategies.run_task_with_multiprocessing.batch.size.bytes", "type": "Time", "description": "Number of bytes in a multiprocessing batch"}, "arroyo.strategies.run_task_with_multiprocessing.output_batch.size.msg": {"name": "arroyo.strategies.run_task_with_multiprocessing.output_batch.size.msg", "type": "Time", "description": "Number of messages in a multiprocessing batch after the message transformation"}, "arroyo.strategies.run_task_with_multiprocessing.output_batch.size.bytes": {"name": "arroyo.strategies.run_task_with_multiprocessing.output_batch.size.bytes", "type": "Time", "description": "Number of bytes in a multiprocessing batch after the message transformation"}, "arroyo.consumer.run.count": {"name": "arroyo.consumer.run.count", "type": "Counter", "description": "Number of times the consumer is spinning"}, "arroyo.consumer.invalid_message.count": {"name": "arroyo.consumer.invalid_message.count", "type": "Counter", "description": "Number of times the consumer encountered an invalid message."}, "arroyo.strategies.reduce.batch_time": {"name": "arroyo.strategies.reduce.batch_time", "type": "Time", "description": "How long it took the Reduce step to fill up a batch"}, "arroyo.strategies.run_task_with_multiprocessing.batch.backpressure": {"name": "arroyo.strategies.run_task_with_multiprocessing.batch.backpressure", "type": "Counter", "description": "Incremented when a strategy after multiprocessing applies\nbackpressure to multiprocessing. May be a reason why CPU cannot be\nsaturated."}, "arroyo.strategies.run_task_with_multiprocessing.batch.input.overflow": {"name": "arroyo.strategies.run_task_with_multiprocessing.batch.input.overflow", "type": "Counter", "description": "Incremented when multiprocessing cannot fill the input batch\nbecause not enough memory was allocated. This results in batches smaller\nthan configured. Increase `input_block_size` to fix."}, "arroyo.strategies.run_task_with_multiprocessing.batch.output.overflow": {"name": "arroyo.strategies.run_task_with_multiprocessing.batch.output.overflow", "type": "Counter", "description": "Incremented when multiprocessing cannot pull results in batches\nequal to the input batch size, because not enough memory was allocated.\nThis can be devastating for throughput. Increase `output_block_size` to\nfix."}, "arroyo.strategies.run_task_with_multiprocessing.batch.input.resize": {"name": "arroyo.strategies.run_task_with_multiprocessing.batch.input.resize", "type": "Counter", "description": "Arroyo has decided to re-allocate a block in order to combat input\nbuffer overflow. This behavior can be disabled by explicitly setting\n`input_block_size` to a not-None value in `RunTaskWithMultiprocessing`."}, "arroyo.strategies.run_task_with_multiprocessing.batch.output.resize": {"name": "arroyo.strategies.run_task_with_multiprocessing.batch.output.resize", "type": "Counter", "description": "Arroyo has decided to re-allocate a block in order to combat output\nbuffer overflow. This behavior can be disabled by explicitly setting\n`output_block_size` to a not-None value in `RunTaskWithMultiprocessing`."}, "arroyo.strategies.run_task_with_multiprocessing.batches_in_progress": {"name": "arroyo.strategies.run_task_with_multiprocessing.batches_in_progress", "type": "Gauge", "description": "How many batches are being processed in parallel by multiprocessing."}, "arroyo.strategies.run_task_with_multiprocessing.processes": {"name": "arroyo.strategies.run_task_with_multiprocessing.processes", "type": "Counter", "description": "A subprocess by multiprocessing unexpectedly died.\n\"sigchld.detected\",\nGauge: Shows how many processes the multiprocessing strategy is\nconfigured with."}, "arroyo.strategies.run_task_with_multiprocessing.pool.create": {"name": "arroyo.strategies.run_task_with_multiprocessing.pool.create", "type": "Counter", "description": "Incremented when the multiprocessing pool is created (or re-created)."}, "arroyo.consumer.poll.time": {"name": "arroyo.consumer.poll.time", "type": "Time", "description": "(unitless) spent polling librdkafka for new messages."}, "arroyo.consumer.processing.time": {"name": "arroyo.consumer.processing.time", "type": "Time", "description": "(unitless) spent in strategies (blocking in strategy.submit or\nstrategy.poll)"}, "arroyo.consumer.backpressure.time": {"name": "arroyo.consumer.backpressure.time", "type": "Time", "description": "(unitless) spent pausing the consumer due to backpressure (MessageRejected)"}, "arroyo.consumer.dlq.time": {"name": "arroyo.consumer.dlq.time", "type": "Time", "description": "(unitless) spent in handling `InvalidMessage` exceptions and sending\nmessages to the the DLQ."}, "arroyo.consumer.join.time": {"name": "arroyo.consumer.join.time", "type": "Time", "description": "(unitless) spent in waiting for the strategy to exit, such as during\nshutdown or rebalancing."}, "arroyo.consumer.callback.time": {"name": "arroyo.consumer.callback.time", "type": "Time", "description": "(unitless) spent in librdkafka callbacks. This metric's timings\noverlap other timings, and might spike at the same time."}, "arroyo.consumer.shutdown.time": {"name": "arroyo.consumer.shutdown.time", "type": "Time", "description": "(unitless) spent in shutting down the consumer. This metric's\ntimings overlap other timings, and might spike at the same time."}, "arroyo.consumer.run.callback": {"name": "arroyo.consumer.run.callback", "type": "Time", "description": "A regular duration metric where each datapoint is measuring the time it\ntook to execute a single callback. This metric is distinct from the\narroyo.consumer.*.time metrics as it does not attempt to accumulate time\nspent per second in an attempt to keep monitoring overhead low.\nThe metric is tagged by the name of the internal callback function being\nexecuted, as 'callback_name'. Possible values are on_partitions_assigned\nand on_partitions_revoked."}, "arroyo.consumer.run.close_strategy": {"name": "arroyo.consumer.run.close_strategy", "type": "Time", "description": "Duration metric measuring the time it took to flush in-flight messages\nand shut down the strategies."}, "arroyo.consumer.run.create_strategy": {"name": "arroyo.consumer.run.create_strategy", "type": "Time", "description": "Duration metric measuring the time it took to create the processing strategy."}, "arroyo.consumer.partitions_revoked.count": {"name": "arroyo.consumer.partitions_revoked.count", "type": "Counter", "description": "How many partitions have been revoked just now."}, "arroyo.consumer.partitions_assigned.count": {"name": "arroyo.consumer.partitions_assigned.count", "type": "Counter", "description": "How many partitions have been assigned just now."}, "arroyo.consumer.latency": {"name": "arroyo.consumer.latency", "type": "Time", "description": "Consumer latency in seconds. Recorded by the commit offsets strategy."}, "arroyo.consumer.pause": {"name": "arroyo.consumer.pause", "type": "Counter", "description": "Metric for when the underlying rdkafka consumer is being paused.\nThis flushes internal prefetch buffers."}, "arroyo.consumer.resume": {"name": "arroyo.consumer.resume", "type": "Counter", "description": "Metric for when the underlying rdkafka consumer is being resumed.\nThis might cause increased network usage as messages are being re-fetched."}, "arroyo.consumer.librdkafka.total_queue_size": {"name": "arroyo.consumer.librdkafka.total_queue_size", "type": "Gauge", "description": "Queue size of background queue that librdkafka uses to prefetch messages."}, "arroyo.processing.strategies.healthcheck.touch": {"name": "arroyo.processing.strategies.healthcheck.touch", "type": "Counter", "description": "Counter metric to measure how often the healthcheck file has been touched."}, "arroyo.strategies.filter.dropped_messages": {"name": "arroyo.strategies.filter.dropped_messages", "type": "Counter", "description": "Number of messages dropped in the FilterStep strategy"}, "arroyo.consumer.dlq.dropped_messages": {"name": "arroyo.consumer.dlq.dropped_messages", "type": "Counter", "description": "how many messages are dropped due to errors producing to the dlq"}, "arroyo.consumer.dlq_buffer.len": {"name": "arroyo.consumer.dlq_buffer.len", "type": "Gauge", "description": "Current length of the DLQ buffer deque"}, "arroyo.consumer.dlq_buffer.exceeded": {"name": "arroyo.consumer.dlq_buffer.exceeded", "type": "Counter", "description": "Number of times the DLQ buffer size has been exceeded, causing messages to be dropped"}, "arroyo.consumer.dlq_buffer.assigned_partitions": {"name": "arroyo.consumer.dlq_buffer.assigned_partitions", "type": "Gauge", "description": "Number of partitions being tracked in the DLQ buffer"}, "arroyo.producer.librdkafka.p99_int_latency": {"name": "arroyo.producer.librdkafka.p99_int_latency", "type": "Time", "description": "Internal producer queue latency from librdkafka statistics.\nTagged by broker_id."}, "arroyo.producer.librdkafka.p99_outbuf_latency": {"name": "arroyo.producer.librdkafka.p99_outbuf_latency", "type": "Time", "description": "Output buffer latency from librdkafka statistics.\nTagged by broker_id."}, "arroyo.producer.librdkafka.p99_rtt": {"name": "arroyo.producer.librdkafka.p99_rtt", "type": "Time", "description": "Round-trip time to brokers from librdkafka statistics.\nTagged by broker_id."}, "arroyo.producer.librdkafka.avg_int_latency": {"name": "arroyo.producer.librdkafka.avg_int_latency", "type": "Time", "description": "Average internal producer queue latency from librdkafka statistics.\nTagged by broker_id."}, "arroyo.producer.librdkafka.avg_outbuf_latency": {"name": "arroyo.producer.librdkafka.avg_outbuf_latency", "type": "Time", "description": "Average output buffer latency from librdkafka statistics.\nTagged by broker_id."}, "arroyo.producer.librdkafka.avg_rtt": {"name": "arroyo.producer.librdkafka.avg_rtt", "type": "Time", "description": "Average round-trip time to brokers from librdkafka statistics.\nTagged by broker_id."}, "arroyo.producer.produce_status": {"name": "arroyo.producer.produce_status", "type": "Counter", "description": "Number of times the produce strategy failed to produce a message"}}
|
|
1
|
+
{"arroyo.strategies.run_task_with_multiprocessing.batch.size.msg": {"name": "arroyo.strategies.run_task_with_multiprocessing.batch.size.msg", "type": "Time", "description": "Number of messages in a multiprocessing batch"}, "arroyo.strategies.run_task_with_multiprocessing.batch.size.bytes": {"name": "arroyo.strategies.run_task_with_multiprocessing.batch.size.bytes", "type": "Time", "description": "Number of bytes in a multiprocessing batch"}, "arroyo.strategies.run_task_with_multiprocessing.output_batch.size.msg": {"name": "arroyo.strategies.run_task_with_multiprocessing.output_batch.size.msg", "type": "Time", "description": "Number of messages in a multiprocessing batch after the message transformation"}, "arroyo.strategies.run_task_with_multiprocessing.output_batch.size.bytes": {"name": "arroyo.strategies.run_task_with_multiprocessing.output_batch.size.bytes", "type": "Time", "description": "Number of bytes in a multiprocessing batch after the message transformation"}, "arroyo.consumer.run.count": {"name": "arroyo.consumer.run.count", "type": "Counter", "description": "Number of times the consumer is spinning"}, "arroyo.consumer.invalid_message.count": {"name": "arroyo.consumer.invalid_message.count", "type": "Counter", "description": "Number of times the consumer encountered an invalid message."}, "arroyo.strategies.reduce.batch_time": {"name": "arroyo.strategies.reduce.batch_time", "type": "Time", "description": "How long it took the Reduce step to fill up a batch"}, "arroyo.strategies.run_task_with_multiprocessing.batch.backpressure": {"name": "arroyo.strategies.run_task_with_multiprocessing.batch.backpressure", "type": "Counter", "description": "Incremented when a strategy after multiprocessing applies\nbackpressure to multiprocessing. May be a reason why CPU cannot be\nsaturated."}, "arroyo.strategies.run_task_with_multiprocessing.batch.input.overflow": {"name": "arroyo.strategies.run_task_with_multiprocessing.batch.input.overflow", "type": "Counter", "description": "Incremented when multiprocessing cannot fill the input batch\nbecause not enough memory was allocated. This results in batches smaller\nthan configured. Increase `input_block_size` to fix."}, "arroyo.strategies.run_task_with_multiprocessing.batch.output.overflow": {"name": "arroyo.strategies.run_task_with_multiprocessing.batch.output.overflow", "type": "Counter", "description": "Incremented when multiprocessing cannot pull results in batches\nequal to the input batch size, because not enough memory was allocated.\nThis can be devastating for throughput. Increase `output_block_size` to\nfix."}, "arroyo.strategies.run_task_with_multiprocessing.batch.input.resize": {"name": "arroyo.strategies.run_task_with_multiprocessing.batch.input.resize", "type": "Counter", "description": "Arroyo has decided to re-allocate a block in order to combat input\nbuffer overflow. This behavior can be disabled by explicitly setting\n`input_block_size` to a not-None value in `RunTaskWithMultiprocessing`."}, "arroyo.strategies.run_task_with_multiprocessing.batch.output.resize": {"name": "arroyo.strategies.run_task_with_multiprocessing.batch.output.resize", "type": "Counter", "description": "Arroyo has decided to re-allocate a block in order to combat output\nbuffer overflow. This behavior can be disabled by explicitly setting\n`output_block_size` to a not-None value in `RunTaskWithMultiprocessing`."}, "arroyo.strategies.run_task_with_multiprocessing.batches_in_progress": {"name": "arroyo.strategies.run_task_with_multiprocessing.batches_in_progress", "type": "Gauge", "description": "How many batches are being processed in parallel by multiprocessing."}, "arroyo.strategies.run_task_with_multiprocessing.processes": {"name": "arroyo.strategies.run_task_with_multiprocessing.processes", "type": "Counter", "description": "A subprocess by multiprocessing unexpectedly died.\n\"sigchld.detected\",\nGauge: Shows how many processes the multiprocessing strategy is\nconfigured with."}, "arroyo.strategies.run_task_with_multiprocessing.pool.create": {"name": "arroyo.strategies.run_task_with_multiprocessing.pool.create", "type": "Counter", "description": "Incremented when the multiprocessing pool is created (or re-created)."}, "arroyo.consumer.poll.time": {"name": "arroyo.consumer.poll.time", "type": "Time", "description": "(unitless) spent polling librdkafka for new messages."}, "arroyo.consumer.processing.time": {"name": "arroyo.consumer.processing.time", "type": "Time", "description": "(unitless) spent in strategies (blocking in strategy.submit or\nstrategy.poll)"}, "arroyo.consumer.backpressure.time": {"name": "arroyo.consumer.backpressure.time", "type": "Time", "description": "(unitless) spent pausing the consumer due to backpressure (MessageRejected)"}, "arroyo.consumer.dlq.time": {"name": "arroyo.consumer.dlq.time", "type": "Time", "description": "(unitless) spent in handling `InvalidMessage` exceptions and sending\nmessages to the the DLQ."}, "arroyo.consumer.join.time": {"name": "arroyo.consumer.join.time", "type": "Time", "description": "(unitless) spent in waiting for the strategy to exit, such as during\nshutdown or rebalancing."}, "arroyo.consumer.callback.time": {"name": "arroyo.consumer.callback.time", "type": "Time", "description": "(unitless) spent in librdkafka callbacks. This metric's timings\noverlap other timings, and might spike at the same time."}, "arroyo.consumer.shutdown.time": {"name": "arroyo.consumer.shutdown.time", "type": "Time", "description": "(unitless) spent in shutting down the consumer. This metric's\ntimings overlap other timings, and might spike at the same time."}, "arroyo.consumer.run.callback": {"name": "arroyo.consumer.run.callback", "type": "Time", "description": "A regular duration metric where each datapoint is measuring the time it\ntook to execute a single callback. This metric is distinct from the\narroyo.consumer.*.time metrics as it does not attempt to accumulate time\nspent per second in an attempt to keep monitoring overhead low.\nThe metric is tagged by the name of the internal callback function being\nexecuted, as 'callback_name'. Possible values are on_partitions_assigned\nand on_partitions_revoked."}, "arroyo.consumer.run.close_strategy": {"name": "arroyo.consumer.run.close_strategy", "type": "Time", "description": "Duration metric measuring the time it took to flush in-flight messages\nand shut down the strategies."}, "arroyo.consumer.run.create_strategy": {"name": "arroyo.consumer.run.create_strategy", "type": "Time", "description": "Duration metric measuring the time it took to create the processing strategy."}, "arroyo.consumer.partitions_revoked.count": {"name": "arroyo.consumer.partitions_revoked.count", "type": "Counter", "description": "How many partitions have been revoked just now."}, "arroyo.consumer.partitions_assigned.count": {"name": "arroyo.consumer.partitions_assigned.count", "type": "Counter", "description": "How many partitions have been assigned just now."}, "arroyo.consumer.latency": {"name": "arroyo.consumer.latency", "type": "Time", "description": "Consumer latency in seconds. Recorded by the commit offsets strategy."}, "arroyo.consumer.pause": {"name": "arroyo.consumer.pause", "type": "Counter", "description": "Metric for when the underlying rdkafka consumer is being paused.\nThis flushes internal prefetch buffers."}, "arroyo.consumer.resume": {"name": "arroyo.consumer.resume", "type": "Counter", "description": "Metric for when the underlying rdkafka consumer is being resumed.\nThis might cause increased network usage as messages are being re-fetched."}, "arroyo.consumer.librdkafka.total_queue_size": {"name": "arroyo.consumer.librdkafka.total_queue_size", "type": "Gauge", "description": "Queue size of background queue that librdkafka uses to prefetch messages."}, "arroyo.processing.strategies.healthcheck.touch": {"name": "arroyo.processing.strategies.healthcheck.touch", "type": "Counter", "description": "Counter metric to measure how often the healthcheck file has been touched."}, "arroyo.strategies.filter.dropped_messages": {"name": "arroyo.strategies.filter.dropped_messages", "type": "Counter", "description": "Number of messages dropped in the FilterStep strategy"}, "arroyo.consumer.dlq.dropped_messages": {"name": "arroyo.consumer.dlq.dropped_messages", "type": "Counter", "description": "how many messages are dropped due to errors producing to the dlq"}, "arroyo.consumer.dlq_buffer.len": {"name": "arroyo.consumer.dlq_buffer.len", "type": "Gauge", "description": "Current length of the DLQ buffer deque"}, "arroyo.consumer.dlq_buffer.exceeded": {"name": "arroyo.consumer.dlq_buffer.exceeded", "type": "Counter", "description": "Number of times the DLQ buffer size has been exceeded, causing messages to be dropped"}, "arroyo.consumer.dlq_buffer.assigned_partitions": {"name": "arroyo.consumer.dlq_buffer.assigned_partitions", "type": "Gauge", "description": "Number of partitions being tracked in the DLQ buffer"}, "arroyo.producer.librdkafka.p99_int_latency": {"name": "arroyo.producer.librdkafka.p99_int_latency", "type": "Time", "description": "Internal producer queue latency from librdkafka statistics.\nTagged by broker_id."}, "arroyo.producer.librdkafka.p99_outbuf_latency": {"name": "arroyo.producer.librdkafka.p99_outbuf_latency", "type": "Time", "description": "Output buffer latency from librdkafka statistics.\nTagged by broker_id."}, "arroyo.producer.librdkafka.p99_rtt": {"name": "arroyo.producer.librdkafka.p99_rtt", "type": "Time", "description": "Round-trip time to brokers from librdkafka statistics.\nTagged by broker_id."}, "arroyo.producer.librdkafka.avg_int_latency": {"name": "arroyo.producer.librdkafka.avg_int_latency", "type": "Time", "description": "Average internal producer queue latency from librdkafka statistics.\nTagged by broker_id."}, "arroyo.producer.librdkafka.avg_outbuf_latency": {"name": "arroyo.producer.librdkafka.avg_outbuf_latency", "type": "Time", "description": "Average output buffer latency from librdkafka statistics.\nTagged by broker_id."}, "arroyo.producer.librdkafka.avg_rtt": {"name": "arroyo.producer.librdkafka.avg_rtt", "type": "Time", "description": "Average round-trip time to brokers from librdkafka statistics.\nTagged by broker_id."}, "arroyo.producer.produce_status": {"name": "arroyo.producer.produce_status", "type": "Counter", "description": "Number of times the produce strategy failed to produce a message"}, "arroyo.producer.librdkafka.message_count": {"name": "arroyo.producer.librdkafka.message_count", "type": "Gauge", "description": "Producer message count metric from librdkafka statistics\nTagged by producer_name"}, "arroyo.producer.librdkafka.message_count_max": {"name": "arroyo.producer.librdkafka.message_count_max", "type": "Gauge", "description": "Maximum producer message count from librdkafka statistics\nTagged by producer_name"}, "arroyo.producer.librdkafka.message_size": {"name": "arroyo.producer.librdkafka.message_size", "type": "Gauge", "description": "Producer message size from librdkafka statistics\nTagged by producer_name"}, "arroyo.producer.librdkafka.message_size_max": {"name": "arroyo.producer.librdkafka.message_size_max", "type": "Gauge", "description": "Maximum producer message size from librdkafka statistics\nTagged by producer_name"}, "arroyo.producer.librdkafka.txmsgs": {"name": "arroyo.producer.librdkafka.txmsgs", "type": "Gauge", "description": "Total number of messages transmitted from librdkafka statistics\nTagged by producer_name"}, "arroyo.producer.librdkafka.broker_tx": {"name": "arroyo.producer.librdkafka.broker_tx", "type": "Gauge", "description": "Total number of transmission requests from librdkafka statistics\nTagged by broker_id, producer_name"}, "arroyo.producer.librdkafka.broker_txbytes": {"name": "arroyo.producer.librdkafka.broker_txbytes", "type": "Gauge", "description": "Total number of bytes transmitted from librdkafka statistics\nTagged by broker_id, producer_name"}, "arroyo.producer.librdkafka.broker_outbuf_requests": {"name": "arroyo.producer.librdkafka.broker_outbuf_requests", "type": "Gauge", "description": "Number of requests awaiting transmission to broker from librdkafka statistics\nTagged by broker_id, producer_name"}, "arroyo.producer.librdkafka.broker_outbuf_messages": {"name": "arroyo.producer.librdkafka.broker_outbuf_messages", "type": "Gauge", "description": "Number of messages awaiting transmission to broker from librdkafka statistics\nTagged by broker_id, producer_name"}, "arroyo.producer.librdkafka.broker_connects": {"name": "arroyo.producer.librdkafka.broker_connects", "type": "Gauge", "description": "Number of connection attempts to broker from librdkafka statistics\nTagged by broker_id, producer_name"}, "arroyo.producer.librdkafka.broker_disconnects": {"name": "arroyo.producer.librdkafka.broker_disconnects", "type": "Gauge", "description": "Number of disconnections from broker from librdkafka statistics\nTagged by broker_id, producer_name"}}
|
|
@@ -126,4 +126,37 @@ MetricName = Literal[
|
|
|
126
126
|
"arroyo.producer.librdkafka.avg_rtt",
|
|
127
127
|
# Counter: Number of times the produce strategy failed to produce a message
|
|
128
128
|
"arroyo.producer.produce_status",
|
|
129
|
+
# Gauge: Producer message count metric from librdkafka statistics
|
|
130
|
+
# Tagged by producer_name
|
|
131
|
+
"arroyo.producer.librdkafka.message_count",
|
|
132
|
+
# Gauge: Maximum producer message count from librdkafka statistics
|
|
133
|
+
# Tagged by producer_name
|
|
134
|
+
"arroyo.producer.librdkafka.message_count_max",
|
|
135
|
+
# Gauge: Producer message size from librdkafka statistics
|
|
136
|
+
# Tagged by producer_name
|
|
137
|
+
"arroyo.producer.librdkafka.message_size",
|
|
138
|
+
# Gauge: Maximum producer message size from librdkafka statistics
|
|
139
|
+
# Tagged by producer_name
|
|
140
|
+
"arroyo.producer.librdkafka.message_size_max",
|
|
141
|
+
# Gauge: Total number of messages transmitted from librdkafka statistics
|
|
142
|
+
# Tagged by producer_name
|
|
143
|
+
"arroyo.producer.librdkafka.txmsgs",
|
|
144
|
+
# Gauge: Total number of transmission requests from librdkafka statistics
|
|
145
|
+
# Tagged by broker_id, producer_name
|
|
146
|
+
"arroyo.producer.librdkafka.broker_tx",
|
|
147
|
+
# Gauge: Total number of bytes transmitted from librdkafka statistics
|
|
148
|
+
# Tagged by broker_id, producer_name
|
|
149
|
+
"arroyo.producer.librdkafka.broker_txbytes",
|
|
150
|
+
# Gauge: Number of requests awaiting transmission to broker from librdkafka statistics
|
|
151
|
+
# Tagged by broker_id, producer_name
|
|
152
|
+
"arroyo.producer.librdkafka.broker_outbuf_requests",
|
|
153
|
+
# Gauge: Number of messages awaiting transmission to broker from librdkafka statistics
|
|
154
|
+
# Tagged by broker_id, producer_name
|
|
155
|
+
"arroyo.producer.librdkafka.broker_outbuf_messages",
|
|
156
|
+
# Gauge: Number of connection attempts to broker from librdkafka statistics
|
|
157
|
+
# Tagged by broker_id, producer_name
|
|
158
|
+
"arroyo.producer.librdkafka.broker_connects",
|
|
159
|
+
# Gauge: Number of disconnections from broker from librdkafka statistics
|
|
160
|
+
# Tagged by broker_id, producer_name
|
|
161
|
+
"arroyo.producer.librdkafka.broker_disconnects",
|
|
129
162
|
]
|
|
@@ -22,28 +22,28 @@ def test_producer_stats_callback_with_both_latencies(
|
|
|
22
22
|
}
|
|
23
23
|
)
|
|
24
24
|
|
|
25
|
-
producer_stats_callback(stats_json)
|
|
25
|
+
producer_stats_callback(stats_json, None)
|
|
26
26
|
|
|
27
27
|
assert mock_metrics.timing.call_count == 4
|
|
28
28
|
mock_metrics.timing.assert_any_call(
|
|
29
29
|
"arroyo.producer.librdkafka.p99_int_latency",
|
|
30
30
|
2.0,
|
|
31
|
-
tags={"broker_id": "1"},
|
|
31
|
+
tags={"broker_id": "1", "producer_name": "unknown"},
|
|
32
32
|
)
|
|
33
33
|
mock_metrics.timing.assert_any_call(
|
|
34
34
|
"arroyo.producer.librdkafka.avg_int_latency",
|
|
35
35
|
1.0,
|
|
36
|
-
tags={"broker_id": "1"},
|
|
36
|
+
tags={"broker_id": "1", "producer_name": "unknown"},
|
|
37
37
|
)
|
|
38
38
|
mock_metrics.timing.assert_any_call(
|
|
39
39
|
"arroyo.producer.librdkafka.p99_outbuf_latency",
|
|
40
40
|
4.0,
|
|
41
|
-
tags={"broker_id": "1"},
|
|
41
|
+
tags={"broker_id": "1", "producer_name": "unknown"},
|
|
42
42
|
)
|
|
43
43
|
mock_metrics.timing.assert_any_call(
|
|
44
44
|
"arroyo.producer.librdkafka.avg_outbuf_latency",
|
|
45
45
|
2.0,
|
|
46
|
-
tags={"broker_id": "1"},
|
|
46
|
+
tags={"broker_id": "1", "producer_name": "unknown"},
|
|
47
47
|
)
|
|
48
48
|
|
|
49
49
|
|
|
@@ -54,7 +54,7 @@ def test_producer_stats_callback_no_brokers(mock_get_metrics: mock.Mock) -> None
|
|
|
54
54
|
|
|
55
55
|
stats_json = json.dumps({})
|
|
56
56
|
|
|
57
|
-
producer_stats_callback(stats_json)
|
|
57
|
+
producer_stats_callback(stats_json, None)
|
|
58
58
|
|
|
59
59
|
mock_metrics.timing.assert_not_called()
|
|
60
60
|
|
|
@@ -68,7 +68,7 @@ def test_producer_stats_callback_empty_broker_stats(
|
|
|
68
68
|
|
|
69
69
|
stats_json = json.dumps({"brokers": {"1": {}}})
|
|
70
70
|
|
|
71
|
-
producer_stats_callback(stats_json)
|
|
71
|
+
producer_stats_callback(stats_json, None)
|
|
72
72
|
|
|
73
73
|
mock_metrics.timing.assert_not_called()
|
|
74
74
|
|
|
@@ -90,36 +90,36 @@ def test_producer_stats_callback_with_all_metrics(mock_get_metrics: mock.Mock) -
|
|
|
90
90
|
}
|
|
91
91
|
)
|
|
92
92
|
|
|
93
|
-
producer_stats_callback(stats_json)
|
|
93
|
+
producer_stats_callback(stats_json, None)
|
|
94
94
|
|
|
95
95
|
assert mock_metrics.timing.call_count == 6
|
|
96
96
|
mock_metrics.timing.assert_any_call(
|
|
97
97
|
"arroyo.producer.librdkafka.p99_int_latency",
|
|
98
98
|
2.0,
|
|
99
|
-
tags={"broker_id": "1"},
|
|
99
|
+
tags={"broker_id": "1", "producer_name": "unknown"},
|
|
100
100
|
)
|
|
101
101
|
mock_metrics.timing.assert_any_call(
|
|
102
102
|
"arroyo.producer.librdkafka.avg_int_latency",
|
|
103
103
|
1.0,
|
|
104
|
-
tags={"broker_id": "1"},
|
|
104
|
+
tags={"broker_id": "1", "producer_name": "unknown"},
|
|
105
105
|
)
|
|
106
106
|
mock_metrics.timing.assert_any_call(
|
|
107
107
|
"arroyo.producer.librdkafka.p99_outbuf_latency",
|
|
108
108
|
4.0,
|
|
109
|
-
tags={"broker_id": "1"},
|
|
109
|
+
tags={"broker_id": "1", "producer_name": "unknown"},
|
|
110
110
|
)
|
|
111
111
|
mock_metrics.timing.assert_any_call(
|
|
112
112
|
"arroyo.producer.librdkafka.avg_outbuf_latency",
|
|
113
113
|
2.0,
|
|
114
|
-
tags={"broker_id": "1"},
|
|
114
|
+
tags={"broker_id": "1", "producer_name": "unknown"},
|
|
115
115
|
)
|
|
116
116
|
mock_metrics.timing.assert_any_call(
|
|
117
117
|
"arroyo.producer.librdkafka.p99_rtt",
|
|
118
118
|
1.5,
|
|
119
|
-
tags={"broker_id": "1"},
|
|
119
|
+
tags={"broker_id": "1", "producer_name": "unknown"},
|
|
120
120
|
)
|
|
121
121
|
mock_metrics.timing.assert_any_call(
|
|
122
122
|
"arroyo.producer.librdkafka.avg_rtt",
|
|
123
123
|
0.75,
|
|
124
|
-
tags={"broker_id": "1"},
|
|
124
|
+
tags={"broker_id": "1", "producer_name": "unknown"},
|
|
125
125
|
)
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
{sentry_arroyo-2.28.4 → sentry_arroyo-2.29.0}/arroyo/processing/strategies/run_task_in_threads.py
RENAMED
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|