moose-lib 0.6.148.dev3442438466__py3-none-any.whl → 0.6.283__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (59) hide show
  1. moose_lib/__init__.py +34 -3
  2. moose_lib/blocks.py +416 -52
  3. moose_lib/clients/redis_client.py +26 -14
  4. moose_lib/commons.py +37 -30
  5. moose_lib/config/config_file.py +5 -1
  6. moose_lib/config/runtime.py +73 -34
  7. moose_lib/data_models.py +331 -61
  8. moose_lib/dmv2/__init__.py +69 -73
  9. moose_lib/dmv2/_registry.py +2 -1
  10. moose_lib/dmv2/_source_capture.py +37 -0
  11. moose_lib/dmv2/consumption.py +55 -32
  12. moose_lib/dmv2/ingest_api.py +9 -2
  13. moose_lib/dmv2/ingest_pipeline.py +35 -16
  14. moose_lib/dmv2/life_cycle.py +3 -1
  15. moose_lib/dmv2/materialized_view.py +24 -14
  16. moose_lib/dmv2/moose_model.py +165 -0
  17. moose_lib/dmv2/olap_table.py +299 -151
  18. moose_lib/dmv2/registry.py +18 -3
  19. moose_lib/dmv2/sql_resource.py +16 -8
  20. moose_lib/dmv2/stream.py +75 -23
  21. moose_lib/dmv2/types.py +14 -8
  22. moose_lib/dmv2/view.py +13 -6
  23. moose_lib/dmv2/web_app.py +11 -6
  24. moose_lib/dmv2/web_app_helpers.py +5 -1
  25. moose_lib/dmv2/workflow.py +37 -9
  26. moose_lib/internal.py +340 -56
  27. moose_lib/main.py +87 -56
  28. moose_lib/query_builder.py +18 -5
  29. moose_lib/query_param.py +54 -20
  30. moose_lib/secrets.py +122 -0
  31. moose_lib/streaming/streaming_function_runner.py +233 -117
  32. moose_lib/utilities/sql.py +0 -1
  33. {moose_lib-0.6.148.dev3442438466.dist-info → moose_lib-0.6.283.dist-info}/METADATA +18 -1
  34. moose_lib-0.6.283.dist-info/RECORD +63 -0
  35. tests/__init__.py +1 -1
  36. tests/conftest.py +6 -5
  37. tests/test_backward_compatibility.py +85 -0
  38. tests/test_cluster_validation.py +85 -0
  39. tests/test_codec.py +75 -0
  40. tests/test_column_formatting.py +80 -0
  41. tests/test_fixedstring.py +43 -0
  42. tests/test_iceberg_config.py +105 -0
  43. tests/test_int_types.py +211 -0
  44. tests/test_kafka_config.py +141 -0
  45. tests/test_materialized.py +74 -0
  46. tests/test_metadata.py +37 -0
  47. tests/test_moose.py +21 -30
  48. tests/test_moose_model.py +153 -0
  49. tests/test_olap_table_moosemodel.py +89 -0
  50. tests/test_olap_table_versioning.py +52 -58
  51. tests/test_query_builder.py +97 -9
  52. tests/test_redis_client.py +10 -3
  53. tests/test_s3queue_config.py +211 -110
  54. tests/test_secrets.py +239 -0
  55. tests/test_simple_aggregate.py +42 -40
  56. tests/test_web_app.py +11 -5
  57. moose_lib-0.6.148.dev3442438466.dist-info/RECORD +0 -47
  58. {moose_lib-0.6.148.dev3442438466.dist-info → moose_lib-0.6.283.dist-info}/WHEEL +0 -0
  59. {moose_lib-0.6.148.dev3442438466.dist-info → moose_lib-0.6.283.dist-info}/top_level.txt +0 -0
@@ -120,13 +120,13 @@ class MooseCache:
120
120
 
121
121
  # Use provided TTL or default to 1 hour
122
122
  ttl = ttl_seconds if ttl_seconds is not None else 3600
123
-
123
+
124
124
  # Store the value and its type metadata
125
125
  pipe = self._client.pipeline()
126
126
  pipe.setex(prefixed_key, ttl, string_value)
127
127
  pipe.setex(metadata_key, ttl, value_type)
128
128
  pipe.execute()
129
-
129
+
130
130
  except Exception as e:
131
131
  print(f"Error setting cache key {key}: {e}")
132
132
  raise
@@ -174,13 +174,13 @@ class MooseCache:
174
174
  self._ensure_connected()
175
175
  prefixed_key = self._get_prefixed_key(key)
176
176
  metadata_key = f"{prefixed_key}:__type__"
177
-
177
+
178
178
  # Get both the value and metadata in a single pipeline call
179
179
  pipe = self._client.pipeline()
180
180
  pipe.get(prefixed_key)
181
181
  pipe.get(metadata_key)
182
182
  results = pipe.execute()
183
-
183
+
184
184
  value, stored_type = results[0], results[1]
185
185
 
186
186
  if value is None:
@@ -193,33 +193,45 @@ class MooseCache:
193
193
  return value
194
194
  elif type_hint is list:
195
195
  # Type mismatch: stored as string but requested as list
196
- raise ValueError(f"Value was stored as string but requested as list")
196
+ raise ValueError(
197
+ f"Value was stored as string but requested as list"
198
+ )
197
199
  else:
198
- raise ValueError(f"Value was stored as string but requested as {type_hint.__name__}")
199
-
200
+ raise ValueError(
201
+ f"Value was stored as string but requested as {type_hint.__name__}"
202
+ )
203
+
200
204
  elif stored_type == "list":
201
205
  parsed_value = json.loads(value)
202
206
  if type_hint is list:
203
207
  return parsed_value
204
208
  elif type_hint is str:
205
209
  # Type mismatch: stored as list but requested as string
206
- raise ValueError(f"Value was stored as list but requested as string")
210
+ raise ValueError(
211
+ f"Value was stored as list but requested as string"
212
+ )
207
213
  else:
208
- raise ValueError(f"Value was stored as list but requested as {type_hint.__name__}")
209
-
214
+ raise ValueError(
215
+ f"Value was stored as list but requested as {type_hint.__name__}"
216
+ )
217
+
210
218
  elif stored_type.startswith("pydantic:"):
211
219
  parsed_value = json.loads(value)
212
220
  if isinstance(type_hint, type) and issubclass(type_hint, BaseModel):
213
221
  return type_hint.model_validate(parsed_value)
214
222
  elif type_hint is str:
215
223
  # Type mismatch: stored as Pydantic but requested as string
216
- raise ValueError(f"Value was stored as Pydantic model but requested as string")
224
+ raise ValueError(
225
+ f"Value was stored as Pydantic model but requested as string"
226
+ )
217
227
  elif type_hint is list:
218
228
  # Type mismatch: stored as Pydantic but requested as list
219
- raise ValueError(f"Value was stored as Pydantic model but requested as list")
229
+ raise ValueError(
230
+ f"Value was stored as Pydantic model but requested as list"
231
+ )
220
232
  else:
221
233
  return type_hint.model_validate(parsed_value)
222
-
234
+
223
235
  # Backwards compatibility: no metadata found, use legacy behavior
224
236
  # But remove the problematic auto-detection for strings
225
237
  if type_hint is str:
@@ -256,7 +268,7 @@ class MooseCache:
256
268
  self._ensure_connected()
257
269
  prefixed_key = self._get_prefixed_key(key)
258
270
  metadata_key = f"{prefixed_key}:__type__"
259
-
271
+
260
272
  # Delete both the value and its metadata
261
273
  pipe = self._client.pipeline()
262
274
  pipe.delete(prefixed_key)
moose_lib/commons.py CHANGED
@@ -15,8 +15,12 @@ class CliLogData:
15
15
  ERROR = "Error"
16
16
  HIGHLIGHT = "Highlight"
17
17
 
18
- def __init__(self, action: str, message: str,
19
- message_type: Optional[Literal[INFO, SUCCESS, ERROR, HIGHLIGHT]] = INFO):
18
+ def __init__(
19
+ self,
20
+ action: str,
21
+ message: str,
22
+ message_type: Optional[Literal[INFO, SUCCESS, ERROR, HIGHLIGHT]] = INFO,
23
+ ):
20
24
  self.message_type = message_type
21
25
  self.action = action
22
26
  self.message = message
@@ -33,7 +37,7 @@ def cli_log(log: CliLogData) -> None:
33
37
  # tries to send logs when moose hasn't fully started, the requests will fail.
34
38
  # The try catch is to ignore those errors.
35
39
  url = f"http://localhost:{moose_management_port}/logs"
36
- headers = {'Content-Type': 'application/json'}
40
+ headers = {"Content-Type": "application/json"}
37
41
  requests.post(url, data=json.dumps(log.__dict__), headers=headers)
38
42
  except:
39
43
  pass
@@ -60,7 +64,11 @@ class Logger:
60
64
  elif message_type == CliLogData.HIGHLIGHT:
61
65
  moose_scripts_logger.warning(message)
62
66
  else:
63
- cli_log(CliLogData(action=self.action, message=message, message_type=message_type))
67
+ cli_log(
68
+ CliLogData(
69
+ action=self.action, message=message, message_type=message_type
70
+ )
71
+ )
64
72
 
65
73
  def info(self, message: str) -> None:
66
74
  self._log(message, CliLogData.INFO)
@@ -104,11 +112,11 @@ class EnhancedJSONEncoder(json.JSONEncoder):
104
112
 
105
113
 
106
114
  def _build_kafka_kwargs(
107
- broker: Union[str, list[str]],
108
- sasl_username: Optional[str] = None,
109
- sasl_password: Optional[str] = None,
110
- sasl_mechanism: Optional[str] = None,
111
- security_protocol: Optional[str] = None,
115
+ broker: Union[str, list[str]],
116
+ sasl_username: Optional[str] = None,
117
+ sasl_password: Optional[str] = None,
118
+ sasl_mechanism: Optional[str] = None,
119
+ security_protocol: Optional[str] = None,
112
120
  ) -> dict[str, Any]:
113
121
  """Builds common Kafka client kwargs from provided parameters."""
114
122
  kwargs: dict[str, Any] = {
@@ -126,16 +134,16 @@ def _build_kafka_kwargs(
126
134
 
127
135
 
128
136
  def get_kafka_consumer(
129
- *,
130
- broker: Union[str, list[str]],
131
- client_id: str,
132
- group_id: str,
133
- sasl_username: Optional[str] = None,
134
- sasl_password: Optional[str] = None,
135
- sasl_mechanism: Optional[str] = None,
136
- security_protocol: Optional[str] = None,
137
- value_deserializer=lambda m: json.loads(m.decode("utf-8")),
138
- **extra_kwargs: Any,
137
+ *,
138
+ broker: Union[str, list[str]],
139
+ client_id: str,
140
+ group_id: str,
141
+ sasl_username: Optional[str] = None,
142
+ sasl_password: Optional[str] = None,
143
+ sasl_mechanism: Optional[str] = None,
144
+ security_protocol: Optional[str] = None,
145
+ value_deserializer=lambda m: json.loads(m.decode("utf-8")),
146
+ **extra_kwargs: Any,
139
147
  ) -> KafkaConsumer:
140
148
  """Creates a configured KafkaConsumer with optional SASL/security settings."""
141
149
  kwargs = _build_kafka_kwargs(
@@ -155,18 +163,17 @@ def get_kafka_consumer(
155
163
 
156
164
 
157
165
  def get_kafka_producer(
158
- *,
159
- broker: Union[str, list[str]],
160
- sasl_username: Optional[str] = None,
161
- sasl_password: Optional[str] = None,
162
- sasl_mechanism: Optional[str] = None,
163
- security_protocol: Optional[str] = None,
164
- max_request_size: Optional[int] = None,
165
- value_serializer: Optional[Callable[[Any], bytes]] = None,
166
- **extra_kwargs: Any,
166
+ *,
167
+ broker: Union[str, list[str]],
168
+ sasl_username: Optional[str] = None,
169
+ sasl_password: Optional[str] = None,
170
+ sasl_mechanism: Optional[str] = None,
171
+ security_protocol: Optional[str] = None,
172
+ max_request_size: Optional[int] = None,
173
+ value_serializer: Optional[Callable[[Any], bytes]] = None,
174
+ **extra_kwargs: Any,
167
175
  ) -> KafkaProducer:
168
- """Creates a configured KafkaProducer with optional SASL/security settings.
169
- """
176
+ """Creates a configured KafkaProducer with optional SASL/security settings."""
170
177
  kwargs = _build_kafka_kwargs(
171
178
  broker,
172
179
  sasl_username=sasl_username,
@@ -4,6 +4,7 @@ Configuration file handling for Moose.
4
4
  This module provides functionality for reading and parsing the moose.config.toml file,
5
5
  which contains project-wide configuration settings.
6
6
  """
7
+
7
8
  import os
8
9
  import tomllib
9
10
  from dataclasses import dataclass
@@ -13,6 +14,7 @@ from typing import Optional
13
14
  @dataclass
14
15
  class ClickHouseConfig:
15
16
  """ClickHouse configuration settings from moose.config.toml."""
17
+
16
18
  host: str
17
19
  host_port: int
18
20
  user: str
@@ -25,6 +27,7 @@ class ClickHouseConfig:
25
27
  @dataclass
26
28
  class KafkaConfig:
27
29
  """Redpanda/Kafka configuration settings from moose.config.toml."""
30
+
28
31
  broker: str
29
32
  message_timeout_ms: int
30
33
  sasl_username: Optional[str] = None
@@ -38,6 +41,7 @@ class KafkaConfig:
38
41
  @dataclass
39
42
  class ProjectConfig:
40
43
  """Project configuration from moose.config.toml."""
44
+
41
45
  language: str
42
46
  clickhouse_config: ClickHouseConfig
43
47
  kafka_config: Optional[KafkaConfig] = None
@@ -89,7 +93,7 @@ def read_project_config() -> ProjectConfig:
89
93
  password=config_data["clickhouse_config"]["password"],
90
94
  db_name=config_data["clickhouse_config"]["db_name"],
91
95
  use_ssl=config_data["clickhouse_config"].get("use_ssl", False),
92
- native_port=config_data["clickhouse_config"].get("native_port")
96
+ native_port=config_data["clickhouse_config"].get("native_port"),
93
97
  )
94
98
 
95
99
  def _parse_kafka(section_name: str) -> Optional[KafkaConfig]:
@@ -4,6 +4,7 @@ Runtime configuration management for Moose.
4
4
  This module provides a singleton registry for managing runtime configuration settings,
5
5
  particularly for ClickHouse connections.
6
6
  """
7
+
7
8
  import os
8
9
  from dataclasses import dataclass
9
10
  from typing import Optional
@@ -12,6 +13,7 @@ from typing import Optional
12
13
  @dataclass
13
14
  class RuntimeClickHouseConfig:
14
15
  """Runtime ClickHouse configuration settings."""
16
+
15
17
  host: str
16
18
  port: str
17
19
  username: str
@@ -23,6 +25,7 @@ class RuntimeClickHouseConfig:
23
25
  @dataclass
24
26
  class RuntimeKafkaConfig:
25
27
  """Runtime Kafka configuration settings."""
28
+
26
29
  broker: str
27
30
  message_timeout_ms: int
28
31
  sasl_username: Optional[str]
@@ -39,12 +42,13 @@ class ConfigurationRegistry:
39
42
  This class provides a centralized way to manage and access runtime configuration
40
43
  settings, with fallback to file-based configuration when runtime settings are not set.
41
44
  """
42
- _instance: Optional['ConfigurationRegistry'] = None
45
+
46
+ _instance: Optional["ConfigurationRegistry"] = None
43
47
  _clickhouse_config: Optional[RuntimeClickHouseConfig] = None
44
48
  _kafka_config: Optional[RuntimeKafkaConfig] = None
45
49
 
46
50
  @classmethod
47
- def get_instance(cls) -> 'ConfigurationRegistry':
51
+ def get_instance(cls) -> "ConfigurationRegistry":
48
52
  """Get the singleton instance of ConfigurationRegistry.
49
53
 
50
54
  Returns:
@@ -62,7 +66,7 @@ class ConfigurationRegistry:
62
66
  """
63
67
  self._clickhouse_config = config
64
68
 
65
- def set_kafka_config(self, config: 'RuntimeKafkaConfig') -> None:
69
+ def set_kafka_config(self, config: "RuntimeKafkaConfig") -> None:
66
70
  """Set the runtime Kafka configuration.
67
71
 
68
72
  Args:
@@ -117,12 +121,16 @@ class ConfigurationRegistry:
117
121
  username=env_user or config.clickhouse_config.user,
118
122
  password=env_password or config.clickhouse_config.password,
119
123
  database=env_db or config.clickhouse_config.db_name,
120
- use_ssl=(env_use_ssl if env_use_ssl is not None else config.clickhouse_config.use_ssl),
124
+ use_ssl=(
125
+ env_use_ssl
126
+ if env_use_ssl is not None
127
+ else config.clickhouse_config.use_ssl
128
+ ),
121
129
  )
122
130
  except Exception as e:
123
131
  raise RuntimeError(f"Failed to get ClickHouse configuration: {e}")
124
132
 
125
- def get_kafka_config(self) -> 'RuntimeKafkaConfig':
133
+ def get_kafka_config(self) -> "RuntimeKafkaConfig":
126
134
  """Get the current Kafka configuration.
127
135
 
128
136
  If runtime configuration is not set, falls back to reading from moose.config.toml
@@ -147,22 +155,30 @@ class ConfigurationRegistry:
147
155
  config = read_project_config()
148
156
 
149
157
  # Prefer Redpanda-prefixed env vars; fallback to Kafka-prefixed
150
- broker = _env("MOOSE_REDPANDA_CONFIG__BROKER") or \
151
- _env("MOOSE_KAFKA_CONFIG__BROKER")
152
- message_timeout_ms = _env("MOOSE_REDPANDA_CONFIG__MESSAGE_TIMEOUT_MS") or \
153
- _env("MOOSE_KAFKA_CONFIG__MESSAGE_TIMEOUT_MS")
154
- sasl_username = _env("MOOSE_REDPANDA_CONFIG__SASL_USERNAME") or \
155
- _env("MOOSE_KAFKA_CONFIG__SASL_USERNAME")
156
- sasl_password = _env("MOOSE_REDPANDA_CONFIG__SASL_PASSWORD") or \
157
- _env("MOOSE_KAFKA_CONFIG__SASL_PASSWORD")
158
- sasl_mechanism = _env("MOOSE_REDPANDA_CONFIG__SASL_MECHANISM") or \
159
- _env("MOOSE_KAFKA_CONFIG__SASL_MECHANISM")
160
- security_protocol = _env("MOOSE_REDPANDA_CONFIG__SECURITY_PROTOCOL") or \
161
- _env("MOOSE_KAFKA_CONFIG__SECURITY_PROTOCOL")
162
- namespace = _env("MOOSE_REDPANDA_CONFIG__NAMESPACE") or \
163
- _env("MOOSE_KAFKA_CONFIG__NAMESPACE")
164
- schema_registry_url = _env("MOOSE_REDPANDA_CONFIG__SCHEMA_REGISTRY_URL") or \
165
- _env("MOOSE_KAFKA_CONFIG__SCHEMA_REGISTRY_URL")
158
+ broker = _env("MOOSE_REDPANDA_CONFIG__BROKER") or _env(
159
+ "MOOSE_KAFKA_CONFIG__BROKER"
160
+ )
161
+ message_timeout_ms = _env(
162
+ "MOOSE_REDPANDA_CONFIG__MESSAGE_TIMEOUT_MS"
163
+ ) or _env("MOOSE_KAFKA_CONFIG__MESSAGE_TIMEOUT_MS")
164
+ sasl_username = _env("MOOSE_REDPANDA_CONFIG__SASL_USERNAME") or _env(
165
+ "MOOSE_KAFKA_CONFIG__SASL_USERNAME"
166
+ )
167
+ sasl_password = _env("MOOSE_REDPANDA_CONFIG__SASL_PASSWORD") or _env(
168
+ "MOOSE_KAFKA_CONFIG__SASL_PASSWORD"
169
+ )
170
+ sasl_mechanism = _env("MOOSE_REDPANDA_CONFIG__SASL_MECHANISM") or _env(
171
+ "MOOSE_KAFKA_CONFIG__SASL_MECHANISM"
172
+ )
173
+ security_protocol = _env(
174
+ "MOOSE_REDPANDA_CONFIG__SECURITY_PROTOCOL"
175
+ ) or _env("MOOSE_KAFKA_CONFIG__SECURITY_PROTOCOL")
176
+ namespace = _env("MOOSE_REDPANDA_CONFIG__NAMESPACE") or _env(
177
+ "MOOSE_KAFKA_CONFIG__NAMESPACE"
178
+ )
179
+ schema_registry_url = _env(
180
+ "MOOSE_REDPANDA_CONFIG__SCHEMA_REGISTRY_URL"
181
+ ) or _env("MOOSE_KAFKA_CONFIG__SCHEMA_REGISTRY_URL")
166
182
 
167
183
  file_kafka = config.kafka_config
168
184
 
@@ -173,19 +189,42 @@ class ConfigurationRegistry:
173
189
  return fallback
174
190
 
175
191
  return RuntimeKafkaConfig(
176
- broker=broker or (file_kafka.broker if file_kafka else "localhost:19092"),
177
- message_timeout_ms=_to_int(message_timeout_ms, file_kafka.message_timeout_ms if file_kafka else 1000),
178
- sasl_username=sasl_username if sasl_username is not None else (
179
- file_kafka.sasl_username if file_kafka else None),
180
- sasl_password=sasl_password if sasl_password is not None else (
181
- file_kafka.sasl_password if file_kafka else None),
182
- sasl_mechanism=sasl_mechanism if sasl_mechanism is not None else (
183
- file_kafka.sasl_mechanism if file_kafka else None),
184
- security_protocol=security_protocol if security_protocol is not None else (
185
- file_kafka.security_protocol if file_kafka else None),
186
- namespace=namespace if namespace is not None else (file_kafka.namespace if file_kafka else None),
187
- schema_registry_url=schema_registry_url if schema_registry_url is not None else (
188
- file_kafka.schema_registry_url if file_kafka else None),
192
+ broker=broker
193
+ or (file_kafka.broker if file_kafka else "localhost:19092"),
194
+ message_timeout_ms=_to_int(
195
+ message_timeout_ms,
196
+ file_kafka.message_timeout_ms if file_kafka else 1000,
197
+ ),
198
+ sasl_username=(
199
+ sasl_username
200
+ if sasl_username is not None
201
+ else (file_kafka.sasl_username if file_kafka else None)
202
+ ),
203
+ sasl_password=(
204
+ sasl_password
205
+ if sasl_password is not None
206
+ else (file_kafka.sasl_password if file_kafka else None)
207
+ ),
208
+ sasl_mechanism=(
209
+ sasl_mechanism
210
+ if sasl_mechanism is not None
211
+ else (file_kafka.sasl_mechanism if file_kafka else None)
212
+ ),
213
+ security_protocol=(
214
+ security_protocol
215
+ if security_protocol is not None
216
+ else (file_kafka.security_protocol if file_kafka else None)
217
+ ),
218
+ namespace=(
219
+ namespace
220
+ if namespace is not None
221
+ else (file_kafka.namespace if file_kafka else None)
222
+ ),
223
+ schema_registry_url=(
224
+ schema_registry_url
225
+ if schema_registry_url is not None
226
+ else (file_kafka.schema_registry_url if file_kafka else None)
227
+ ),
189
228
  )
190
229
  except Exception as e:
191
230
  raise RuntimeError(f"Failed to get Kafka configuration: {e}")