moose-lib 0.6.90__py3-none-any.whl → 0.6.283__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (59) hide show
  1. moose_lib/__init__.py +38 -3
  2. moose_lib/blocks.py +497 -37
  3. moose_lib/clients/redis_client.py +26 -14
  4. moose_lib/commons.py +94 -5
  5. moose_lib/config/config_file.py +44 -2
  6. moose_lib/config/runtime.py +137 -5
  7. moose_lib/data_models.py +451 -46
  8. moose_lib/dmv2/__init__.py +88 -60
  9. moose_lib/dmv2/_registry.py +3 -1
  10. moose_lib/dmv2/_source_capture.py +37 -0
  11. moose_lib/dmv2/consumption.py +55 -32
  12. moose_lib/dmv2/ingest_api.py +9 -2
  13. moose_lib/dmv2/ingest_pipeline.py +56 -13
  14. moose_lib/dmv2/life_cycle.py +3 -1
  15. moose_lib/dmv2/materialized_view.py +24 -14
  16. moose_lib/dmv2/moose_model.py +165 -0
  17. moose_lib/dmv2/olap_table.py +304 -119
  18. moose_lib/dmv2/registry.py +28 -3
  19. moose_lib/dmv2/sql_resource.py +16 -8
  20. moose_lib/dmv2/stream.py +241 -21
  21. moose_lib/dmv2/types.py +14 -8
  22. moose_lib/dmv2/view.py +13 -6
  23. moose_lib/dmv2/web_app.py +175 -0
  24. moose_lib/dmv2/web_app_helpers.py +96 -0
  25. moose_lib/dmv2/workflow.py +37 -9
  26. moose_lib/internal.py +537 -68
  27. moose_lib/main.py +87 -56
  28. moose_lib/query_builder.py +18 -5
  29. moose_lib/query_param.py +54 -20
  30. moose_lib/secrets.py +122 -0
  31. moose_lib/streaming/streaming_function_runner.py +266 -156
  32. moose_lib/utilities/sql.py +0 -1
  33. {moose_lib-0.6.90.dist-info → moose_lib-0.6.283.dist-info}/METADATA +19 -1
  34. moose_lib-0.6.283.dist-info/RECORD +63 -0
  35. tests/__init__.py +1 -1
  36. tests/conftest.py +38 -1
  37. tests/test_backward_compatibility.py +85 -0
  38. tests/test_cluster_validation.py +85 -0
  39. tests/test_codec.py +75 -0
  40. tests/test_column_formatting.py +80 -0
  41. tests/test_fixedstring.py +43 -0
  42. tests/test_iceberg_config.py +105 -0
  43. tests/test_int_types.py +211 -0
  44. tests/test_kafka_config.py +141 -0
  45. tests/test_materialized.py +74 -0
  46. tests/test_metadata.py +37 -0
  47. tests/test_moose.py +21 -30
  48. tests/test_moose_model.py +153 -0
  49. tests/test_olap_table_moosemodel.py +89 -0
  50. tests/test_olap_table_versioning.py +210 -0
  51. tests/test_query_builder.py +97 -9
  52. tests/test_redis_client.py +10 -3
  53. tests/test_s3queue_config.py +211 -110
  54. tests/test_secrets.py +239 -0
  55. tests/test_simple_aggregate.py +114 -0
  56. tests/test_web_app.py +227 -0
  57. moose_lib-0.6.90.dist-info/RECORD +0 -42
  58. {moose_lib-0.6.90.dist-info → moose_lib-0.6.283.dist-info}/WHEEL +0 -0
  59. {moose_lib-0.6.90.dist-info → moose_lib-0.6.283.dist-info}/top_level.txt +0 -0
@@ -120,13 +120,13 @@ class MooseCache:
120
120
 
121
121
  # Use provided TTL or default to 1 hour
122
122
  ttl = ttl_seconds if ttl_seconds is not None else 3600
123
-
123
+
124
124
  # Store the value and its type metadata
125
125
  pipe = self._client.pipeline()
126
126
  pipe.setex(prefixed_key, ttl, string_value)
127
127
  pipe.setex(metadata_key, ttl, value_type)
128
128
  pipe.execute()
129
-
129
+
130
130
  except Exception as e:
131
131
  print(f"Error setting cache key {key}: {e}")
132
132
  raise
@@ -174,13 +174,13 @@ class MooseCache:
174
174
  self._ensure_connected()
175
175
  prefixed_key = self._get_prefixed_key(key)
176
176
  metadata_key = f"{prefixed_key}:__type__"
177
-
177
+
178
178
  # Get both the value and metadata in a single pipeline call
179
179
  pipe = self._client.pipeline()
180
180
  pipe.get(prefixed_key)
181
181
  pipe.get(metadata_key)
182
182
  results = pipe.execute()
183
-
183
+
184
184
  value, stored_type = results[0], results[1]
185
185
 
186
186
  if value is None:
@@ -193,33 +193,45 @@ class MooseCache:
193
193
  return value
194
194
  elif type_hint is list:
195
195
  # Type mismatch: stored as string but requested as list
196
- raise ValueError(f"Value was stored as string but requested as list")
196
+ raise ValueError(
197
+ f"Value was stored as string but requested as list"
198
+ )
197
199
  else:
198
- raise ValueError(f"Value was stored as string but requested as {type_hint.__name__}")
199
-
200
+ raise ValueError(
201
+ f"Value was stored as string but requested as {type_hint.__name__}"
202
+ )
203
+
200
204
  elif stored_type == "list":
201
205
  parsed_value = json.loads(value)
202
206
  if type_hint is list:
203
207
  return parsed_value
204
208
  elif type_hint is str:
205
209
  # Type mismatch: stored as list but requested as string
206
- raise ValueError(f"Value was stored as list but requested as string")
210
+ raise ValueError(
211
+ f"Value was stored as list but requested as string"
212
+ )
207
213
  else:
208
- raise ValueError(f"Value was stored as list but requested as {type_hint.__name__}")
209
-
214
+ raise ValueError(
215
+ f"Value was stored as list but requested as {type_hint.__name__}"
216
+ )
217
+
210
218
  elif stored_type.startswith("pydantic:"):
211
219
  parsed_value = json.loads(value)
212
220
  if isinstance(type_hint, type) and issubclass(type_hint, BaseModel):
213
221
  return type_hint.model_validate(parsed_value)
214
222
  elif type_hint is str:
215
223
  # Type mismatch: stored as Pydantic but requested as string
216
- raise ValueError(f"Value was stored as Pydantic model but requested as string")
224
+ raise ValueError(
225
+ f"Value was stored as Pydantic model but requested as string"
226
+ )
217
227
  elif type_hint is list:
218
228
  # Type mismatch: stored as Pydantic but requested as list
219
- raise ValueError(f"Value was stored as Pydantic model but requested as list")
229
+ raise ValueError(
230
+ f"Value was stored as Pydantic model but requested as list"
231
+ )
220
232
  else:
221
233
  return type_hint.model_validate(parsed_value)
222
-
234
+
223
235
  # Backwards compatibility: no metadata found, use legacy behavior
224
236
  # But remove the problematic auto-detection for strings
225
237
  if type_hint is str:
@@ -256,7 +268,7 @@ class MooseCache:
256
268
  self._ensure_connected()
257
269
  prefixed_key = self._get_prefixed_key(key)
258
270
  metadata_key = f"{prefixed_key}:__type__"
259
-
271
+
260
272
  # Delete both the value and its metadata
261
273
  pipe = self._client.pipeline()
262
274
  pipe.delete(prefixed_key)
moose_lib/commons.py CHANGED
@@ -4,8 +4,9 @@ from datetime import datetime, timezone
4
4
 
5
5
  import requests
6
6
  import json
7
- from typing import Optional, Literal
7
+ from typing import Optional, Literal, Any, Union, Callable
8
8
  import os
9
+ from kafka import KafkaConsumer, KafkaProducer
9
10
 
10
11
 
11
12
  class CliLogData:
@@ -14,8 +15,12 @@ class CliLogData:
14
15
  ERROR = "Error"
15
16
  HIGHLIGHT = "Highlight"
16
17
 
17
- def __init__(self, action: str, message: str,
18
- message_type: Optional[Literal[INFO, SUCCESS, ERROR, HIGHLIGHT]] = INFO):
18
+ def __init__(
19
+ self,
20
+ action: str,
21
+ message: str,
22
+ message_type: Optional[Literal[INFO, SUCCESS, ERROR, HIGHLIGHT]] = INFO,
23
+ ):
19
24
  self.message_type = message_type
20
25
  self.action = action
21
26
  self.message = message
@@ -32,7 +37,7 @@ def cli_log(log: CliLogData) -> None:
32
37
  # tries to send logs when moose hasn't fully started, the requests will fail.
33
38
  # The try catch is to ignore those errors.
34
39
  url = f"http://localhost:{moose_management_port}/logs"
35
- headers = {'Content-Type': 'application/json'}
40
+ headers = {"Content-Type": "application/json"}
36
41
  requests.post(url, data=json.dumps(log.__dict__), headers=headers)
37
42
  except:
38
43
  pass
@@ -59,7 +64,11 @@ class Logger:
59
64
  elif message_type == CliLogData.HIGHLIGHT:
60
65
  moose_scripts_logger.warning(message)
61
66
  else:
62
- cli_log(CliLogData(action=self.action, message=message, message_type=message_type))
67
+ cli_log(
68
+ CliLogData(
69
+ action=self.action, message=message, message_type=message_type
70
+ )
71
+ )
63
72
 
64
73
  def info(self, message: str) -> None:
65
74
  self._log(message, CliLogData.INFO)
@@ -100,3 +109,83 @@ class EnhancedJSONEncoder(json.JSONEncoder):
100
109
  if dataclasses.is_dataclass(o):
101
110
  return dataclasses.asdict(o)
102
111
  return super().default(o)
112
+
113
+
114
+ def _build_kafka_kwargs(
115
+ broker: Union[str, list[str]],
116
+ sasl_username: Optional[str] = None,
117
+ sasl_password: Optional[str] = None,
118
+ sasl_mechanism: Optional[str] = None,
119
+ security_protocol: Optional[str] = None,
120
+ ) -> dict[str, Any]:
121
+ """Builds common Kafka client kwargs from provided parameters."""
122
+ kwargs: dict[str, Any] = {
123
+ "bootstrap_servers": broker,
124
+ }
125
+ if sasl_mechanism:
126
+ kwargs["sasl_mechanism"] = sasl_mechanism
127
+ if sasl_username is not None:
128
+ kwargs["sasl_plain_username"] = sasl_username
129
+ if sasl_password is not None:
130
+ kwargs["sasl_plain_password"] = sasl_password
131
+ if security_protocol is not None:
132
+ kwargs["security_protocol"] = security_protocol
133
+ return kwargs
134
+
135
+
136
+ def get_kafka_consumer(
137
+ *,
138
+ broker: Union[str, list[str]],
139
+ client_id: str,
140
+ group_id: str,
141
+ sasl_username: Optional[str] = None,
142
+ sasl_password: Optional[str] = None,
143
+ sasl_mechanism: Optional[str] = None,
144
+ security_protocol: Optional[str] = None,
145
+ value_deserializer=lambda m: json.loads(m.decode("utf-8")),
146
+ **extra_kwargs: Any,
147
+ ) -> KafkaConsumer:
148
+ """Creates a configured KafkaConsumer with optional SASL/security settings."""
149
+ kwargs = _build_kafka_kwargs(
150
+ broker,
151
+ sasl_username=sasl_username,
152
+ sasl_password=sasl_password,
153
+ sasl_mechanism=sasl_mechanism,
154
+ security_protocol=security_protocol,
155
+ )
156
+ return KafkaConsumer(
157
+ client_id=client_id,
158
+ group_id=group_id,
159
+ value_deserializer=value_deserializer,
160
+ **kwargs,
161
+ **extra_kwargs,
162
+ )
163
+
164
+
165
+ def get_kafka_producer(
166
+ *,
167
+ broker: Union[str, list[str]],
168
+ sasl_username: Optional[str] = None,
169
+ sasl_password: Optional[str] = None,
170
+ sasl_mechanism: Optional[str] = None,
171
+ security_protocol: Optional[str] = None,
172
+ max_request_size: Optional[int] = None,
173
+ value_serializer: Optional[Callable[[Any], bytes]] = None,
174
+ **extra_kwargs: Any,
175
+ ) -> KafkaProducer:
176
+ """Creates a configured KafkaProducer with optional SASL/security settings."""
177
+ kwargs = _build_kafka_kwargs(
178
+ broker,
179
+ sasl_username=sasl_username,
180
+ sasl_password=sasl_password,
181
+ sasl_mechanism=sasl_mechanism,
182
+ security_protocol=security_protocol,
183
+ )
184
+ if max_request_size is not None:
185
+ kwargs["max_request_size"] = max_request_size
186
+ kwargs["max_in_flight_requests_per_connection"] = 1
187
+ if value_serializer is not None:
188
+ kwargs["value_serializer"] = value_serializer
189
+ # Allow callers to pass through additional Kafka configs like linger_ms, acks, retries, etc.
190
+ kwargs.update(extra_kwargs)
191
+ return KafkaProducer(**kwargs)
@@ -4,14 +4,17 @@ Configuration file handling for Moose.
4
4
  This module provides functionality for reading and parsing the moose.config.toml file,
5
5
  which contains project-wide configuration settings.
6
6
  """
7
+
7
8
  import os
8
9
  import tomllib
9
10
  from dataclasses import dataclass
10
11
  from typing import Optional
11
12
 
13
+
12
14
  @dataclass
13
15
  class ClickHouseConfig:
14
16
  """ClickHouse configuration settings from moose.config.toml."""
17
+
15
18
  host: str
16
19
  host_port: int
17
20
  user: str
@@ -20,11 +23,29 @@ class ClickHouseConfig:
20
23
  use_ssl: bool = False
21
24
  native_port: Optional[int] = None
22
25
 
26
+
27
+ @dataclass
28
+ class KafkaConfig:
29
+ """Redpanda/Kafka configuration settings from moose.config.toml."""
30
+
31
+ broker: str
32
+ message_timeout_ms: int
33
+ sasl_username: Optional[str] = None
34
+ sasl_password: Optional[str] = None
35
+ sasl_mechanism: Optional[str] = None
36
+ security_protocol: Optional[str] = None
37
+ namespace: Optional[str] = None
38
+ schema_registry_url: Optional[str] = None
39
+
40
+
23
41
  @dataclass
24
42
  class ProjectConfig:
25
43
  """Project configuration from moose.config.toml."""
44
+
26
45
  language: str
27
46
  clickhouse_config: ClickHouseConfig
47
+ kafka_config: Optional[KafkaConfig] = None
48
+
28
49
 
29
50
  def find_config_file(start_dir: str = os.getcwd()) -> Optional[str]:
30
51
  """Find moose.config.toml by walking up directory tree.
@@ -48,6 +69,7 @@ def find_config_file(start_dir: str = os.getcwd()) -> Optional[str]:
48
69
  current_dir = parent_dir
49
70
  return None
50
71
 
72
+
51
73
  def read_project_config() -> ProjectConfig:
52
74
  """Read and parse moose.config.toml.
53
75
 
@@ -71,12 +93,32 @@ def read_project_config() -> ProjectConfig:
71
93
  password=config_data["clickhouse_config"]["password"],
72
94
  db_name=config_data["clickhouse_config"]["db_name"],
73
95
  use_ssl=config_data["clickhouse_config"].get("use_ssl", False),
74
- native_port=config_data["clickhouse_config"].get("native_port")
96
+ native_port=config_data["clickhouse_config"].get("native_port"),
75
97
  )
76
98
 
99
+ def _parse_kafka(section_name: str) -> Optional[KafkaConfig]:
100
+ sec = config_data.get(section_name)
101
+ if sec is None:
102
+ return None
103
+ return KafkaConfig(
104
+ broker=sec["broker"],
105
+ message_timeout_ms=sec.get("message_timeout_ms", 1000),
106
+ sasl_username=sec.get("sasl_username"),
107
+ sasl_password=sec.get("sasl_password"),
108
+ sasl_mechanism=sec.get("sasl_mechanism"),
109
+ security_protocol=sec.get("security_protocol"),
110
+ namespace=sec.get("namespace"),
111
+ schema_registry_url=sec.get("schema_registry_url"),
112
+ )
113
+
114
+ kafka_cfg = _parse_kafka("kafka_config")
115
+ if kafka_cfg is None:
116
+ kafka_cfg = _parse_kafka("redpanda_config")
117
+
77
118
  return ProjectConfig(
78
119
  language=config_data["language"],
79
- clickhouse_config=clickhouse_config
120
+ clickhouse_config=clickhouse_config,
121
+ kafka_config=kafka_cfg,
80
122
  )
81
123
  except Exception as e:
82
124
  raise RuntimeError(f"Failed to parse moose.config.toml: {e}")
@@ -4,13 +4,16 @@ Runtime configuration management for Moose.
4
4
  This module provides a singleton registry for managing runtime configuration settings,
5
5
  particularly for ClickHouse connections.
6
6
  """
7
+
7
8
  import os
8
9
  from dataclasses import dataclass
9
10
  from typing import Optional
10
11
 
12
+
11
13
  @dataclass
12
14
  class RuntimeClickHouseConfig:
13
15
  """Runtime ClickHouse configuration settings."""
16
+
14
17
  host: str
15
18
  port: str
16
19
  username: str
@@ -18,17 +21,34 @@ class RuntimeClickHouseConfig:
18
21
  database: str
19
22
  use_ssl: bool
20
23
 
24
+
25
+ @dataclass
26
+ class RuntimeKafkaConfig:
27
+ """Runtime Kafka configuration settings."""
28
+
29
+ broker: str
30
+ message_timeout_ms: int
31
+ sasl_username: Optional[str]
32
+ sasl_password: Optional[str]
33
+ sasl_mechanism: Optional[str]
34
+ security_protocol: Optional[str]
35
+ namespace: Optional[str]
36
+ schema_registry_url: Optional[str]
37
+
38
+
21
39
  class ConfigurationRegistry:
22
40
  """Singleton registry for managing runtime configuration.
23
41
 
24
42
  This class provides a centralized way to manage and access runtime configuration
25
43
  settings, with fallback to file-based configuration when runtime settings are not set.
26
44
  """
27
- _instance: Optional['ConfigurationRegistry'] = None
45
+
46
+ _instance: Optional["ConfigurationRegistry"] = None
28
47
  _clickhouse_config: Optional[RuntimeClickHouseConfig] = None
48
+ _kafka_config: Optional[RuntimeKafkaConfig] = None
29
49
 
30
50
  @classmethod
31
- def get_instance(cls) -> 'ConfigurationRegistry':
51
+ def get_instance(cls) -> "ConfigurationRegistry":
32
52
  """Get the singleton instance of ConfigurationRegistry.
33
53
 
34
54
  Returns:
@@ -46,6 +66,14 @@ class ConfigurationRegistry:
46
66
  """
47
67
  self._clickhouse_config = config
48
68
 
69
+ def set_kafka_config(self, config: "RuntimeKafkaConfig") -> None:
70
+ """Set the runtime Kafka configuration.
71
+
72
+ Args:
73
+ config: The Kafka configuration to use.
74
+ """
75
+ self._kafka_config = config
76
+
49
77
  def get_clickhouse_config(self) -> RuntimeClickHouseConfig:
50
78
  """Get the current ClickHouse configuration.
51
79
 
@@ -93,18 +121,122 @@ class ConfigurationRegistry:
93
121
  username=env_user or config.clickhouse_config.user,
94
122
  password=env_password or config.clickhouse_config.password,
95
123
  database=env_db or config.clickhouse_config.db_name,
96
- use_ssl=(env_use_ssl if env_use_ssl is not None else config.clickhouse_config.use_ssl),
124
+ use_ssl=(
125
+ env_use_ssl
126
+ if env_use_ssl is not None
127
+ else config.clickhouse_config.use_ssl
128
+ ),
97
129
  )
98
130
  except Exception as e:
99
131
  raise RuntimeError(f"Failed to get ClickHouse configuration: {e}")
100
132
 
133
+ def get_kafka_config(self) -> "RuntimeKafkaConfig":
134
+ """Get the current Kafka configuration.
135
+
136
+ If runtime configuration is not set, falls back to reading from moose.config.toml
137
+ and environment variables (Redpanda- and Kafka-prefixed).
138
+
139
+ Returns:
140
+ The current Kafka configuration.
141
+ """
142
+ if self._kafka_config:
143
+ return self._kafka_config
144
+
145
+ from .config_file import read_project_config
146
+
147
+ def _env(name: str) -> Optional[str]:
148
+ val = os.environ.get(name)
149
+ if val is None:
150
+ return None
151
+ trimmed = val.strip()
152
+ return trimmed if trimmed else None
153
+
154
+ try:
155
+ config = read_project_config()
156
+
157
+ # Prefer Redpanda-prefixed env vars; fallback to Kafka-prefixed
158
+ broker = _env("MOOSE_REDPANDA_CONFIG__BROKER") or _env(
159
+ "MOOSE_KAFKA_CONFIG__BROKER"
160
+ )
161
+ message_timeout_ms = _env(
162
+ "MOOSE_REDPANDA_CONFIG__MESSAGE_TIMEOUT_MS"
163
+ ) or _env("MOOSE_KAFKA_CONFIG__MESSAGE_TIMEOUT_MS")
164
+ sasl_username = _env("MOOSE_REDPANDA_CONFIG__SASL_USERNAME") or _env(
165
+ "MOOSE_KAFKA_CONFIG__SASL_USERNAME"
166
+ )
167
+ sasl_password = _env("MOOSE_REDPANDA_CONFIG__SASL_PASSWORD") or _env(
168
+ "MOOSE_KAFKA_CONFIG__SASL_PASSWORD"
169
+ )
170
+ sasl_mechanism = _env("MOOSE_REDPANDA_CONFIG__SASL_MECHANISM") or _env(
171
+ "MOOSE_KAFKA_CONFIG__SASL_MECHANISM"
172
+ )
173
+ security_protocol = _env(
174
+ "MOOSE_REDPANDA_CONFIG__SECURITY_PROTOCOL"
175
+ ) or _env("MOOSE_KAFKA_CONFIG__SECURITY_PROTOCOL")
176
+ namespace = _env("MOOSE_REDPANDA_CONFIG__NAMESPACE") or _env(
177
+ "MOOSE_KAFKA_CONFIG__NAMESPACE"
178
+ )
179
+ schema_registry_url = _env(
180
+ "MOOSE_REDPANDA_CONFIG__SCHEMA_REGISTRY_URL"
181
+ ) or _env("MOOSE_KAFKA_CONFIG__SCHEMA_REGISTRY_URL")
182
+
183
+ file_kafka = config.kafka_config
184
+
185
+ def _to_int(value: Optional[str], fallback: int) -> int:
186
+ try:
187
+ return int(value) if value is not None else fallback
188
+ except Exception:
189
+ return fallback
190
+
191
+ return RuntimeKafkaConfig(
192
+ broker=broker
193
+ or (file_kafka.broker if file_kafka else "localhost:19092"),
194
+ message_timeout_ms=_to_int(
195
+ message_timeout_ms,
196
+ file_kafka.message_timeout_ms if file_kafka else 1000,
197
+ ),
198
+ sasl_username=(
199
+ sasl_username
200
+ if sasl_username is not None
201
+ else (file_kafka.sasl_username if file_kafka else None)
202
+ ),
203
+ sasl_password=(
204
+ sasl_password
205
+ if sasl_password is not None
206
+ else (file_kafka.sasl_password if file_kafka else None)
207
+ ),
208
+ sasl_mechanism=(
209
+ sasl_mechanism
210
+ if sasl_mechanism is not None
211
+ else (file_kafka.sasl_mechanism if file_kafka else None)
212
+ ),
213
+ security_protocol=(
214
+ security_protocol
215
+ if security_protocol is not None
216
+ else (file_kafka.security_protocol if file_kafka else None)
217
+ ),
218
+ namespace=(
219
+ namespace
220
+ if namespace is not None
221
+ else (file_kafka.namespace if file_kafka else None)
222
+ ),
223
+ schema_registry_url=(
224
+ schema_registry_url
225
+ if schema_registry_url is not None
226
+ else (file_kafka.schema_registry_url if file_kafka else None)
227
+ ),
228
+ )
229
+ except Exception as e:
230
+ raise RuntimeError(f"Failed to get Kafka configuration: {e}")
231
+
101
232
  def has_runtime_config(self) -> bool:
102
233
  """Check if runtime configuration is set.
103
234
 
104
235
  Returns:
105
- True if runtime configuration is set, False otherwise.
236
+ True if either runtime clickhouse or kafka configuration is set, False otherwise.
106
237
  """
107
- return self._clickhouse_config is not None
238
+ return self._clickhouse_config is not None or self._kafka_config is not None
239
+
108
240
 
109
241
  # Create singleton instance
110
242
  config_registry = ConfigurationRegistry.get_instance()