moose-lib 0.4.261__py3-none-any.whl → 0.4.263__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of moose-lib might be problematic. Click here for more details.
- moose_lib/__init__.py +1 -1
- moose_lib/commons.py +37 -4
- moose_lib/main.py +207 -16
- moose_lib/streaming/streaming_function_runner.py +1 -28
- {moose_lib-0.4.261.dist-info → moose_lib-0.4.263.dist-info}/METADATA +1 -1
- {moose_lib-0.4.261.dist-info → moose_lib-0.4.263.dist-info}/RECORD +8 -8
- {moose_lib-0.4.261.dist-info → moose_lib-0.4.263.dist-info}/WHEEL +0 -0
- {moose_lib-0.4.261.dist-info → moose_lib-0.4.263.dist-info}/top_level.txt +0 -0
moose_lib/__init__.py
CHANGED
moose_lib/commons.py
CHANGED
|
@@ -1,15 +1,20 @@
|
|
|
1
|
+
import dataclasses
|
|
1
2
|
import logging
|
|
3
|
+
from datetime import datetime, timezone
|
|
4
|
+
|
|
2
5
|
import requests
|
|
3
6
|
import json
|
|
4
7
|
from typing import Optional, Literal
|
|
5
8
|
|
|
9
|
+
|
|
6
10
|
class CliLogData:
|
|
7
11
|
INFO = "Info"
|
|
8
12
|
SUCCESS = "Success"
|
|
9
13
|
ERROR = "Error"
|
|
10
14
|
HIGHLIGHT = "Highlight"
|
|
11
15
|
|
|
12
|
-
def __init__(self, action: str, message: str,
|
|
16
|
+
def __init__(self, action: str, message: str,
|
|
17
|
+
message_type: Optional[Literal[INFO, SUCCESS, ERROR, HIGHLIGHT]] = INFO):
|
|
13
18
|
self.message_type = message_type
|
|
14
19
|
self.action = action
|
|
15
20
|
self.message = message
|
|
@@ -31,11 +36,11 @@ def cli_log(log: CliLogData) -> None:
|
|
|
31
36
|
|
|
32
37
|
class Logger:
|
|
33
38
|
default_action = "Custom"
|
|
34
|
-
|
|
39
|
+
|
|
35
40
|
def __init__(self, action: Optional[str] = None, is_moose_task: bool = False):
|
|
36
41
|
self.action = action or Logger.default_action
|
|
37
42
|
self._is_moose_task = is_moose_task
|
|
38
|
-
|
|
43
|
+
|
|
39
44
|
def _log(self, message: str, message_type: str) -> None:
|
|
40
45
|
if self._is_moose_task:
|
|
41
46
|
# We have a task decorator in the lib that initializes a logger
|
|
@@ -62,4 +67,32 @@ class Logger:
|
|
|
62
67
|
self._log(message, CliLogData.ERROR)
|
|
63
68
|
|
|
64
69
|
def highlight(self, message: str) -> None:
|
|
65
|
-
self._log(message, CliLogData.HIGHLIGHT)
|
|
70
|
+
self._log(message, CliLogData.HIGHLIGHT)
|
|
71
|
+
|
|
72
|
+
|
|
73
|
+
class EnhancedJSONEncoder(json.JSONEncoder):
|
|
74
|
+
"""
|
|
75
|
+
Custom JSON encoder that handles:
|
|
76
|
+
- datetime objects (converts to ISO format with timezone)
|
|
77
|
+
- dataclass instances (converts to dict)
|
|
78
|
+
- Pydantic models (converts to dict)
|
|
79
|
+
"""
|
|
80
|
+
|
|
81
|
+
def default(self, o):
|
|
82
|
+
if isinstance(o, datetime):
|
|
83
|
+
if o.tzinfo is None:
|
|
84
|
+
o = o.replace(tzinfo=timezone.utc)
|
|
85
|
+
return o.isoformat()
|
|
86
|
+
if hasattr(o, "model_dump"): # Handle Pydantic v2 models
|
|
87
|
+
# Convert to dict and handle datetime fields
|
|
88
|
+
data = o.model_dump()
|
|
89
|
+
# Handle any datetime fields that might be present
|
|
90
|
+
for key, value in data.items():
|
|
91
|
+
if isinstance(value, datetime):
|
|
92
|
+
if value.tzinfo is None:
|
|
93
|
+
value = value.replace(tzinfo=timezone.utc)
|
|
94
|
+
data[key] = value.isoformat()
|
|
95
|
+
return data
|
|
96
|
+
if dataclasses.is_dataclass(o):
|
|
97
|
+
return dataclasses.asdict(o)
|
|
98
|
+
return super().default(o)
|
moose_lib/main.py
CHANGED
|
@@ -5,13 +5,23 @@ including configuration objects, clients for interacting with services (ClickHou
|
|
|
5
5
|
and utilities for defining data models and SQL queries.
|
|
6
6
|
"""
|
|
7
7
|
from clickhouse_connect.driver.client import Client as ClickhouseClient
|
|
8
|
-
from
|
|
8
|
+
from clickhouse_connect import get_client
|
|
9
|
+
from pydantic import BaseModel
|
|
9
10
|
from dataclasses import dataclass, asdict
|
|
10
11
|
from enum import Enum
|
|
11
|
-
from typing import Any, Callable, Dict, Optional, TypeVar, overload
|
|
12
|
+
from typing import Any, Callable, Dict, Optional, TypeVar, overload, Type, Union
|
|
12
13
|
import sys
|
|
13
14
|
import os
|
|
14
15
|
import json
|
|
16
|
+
import hashlib
|
|
17
|
+
import asyncio
|
|
18
|
+
from string import Formatter
|
|
19
|
+
from temporalio.client import Client as TemporalClient, TLSConfig
|
|
20
|
+
from temporalio.common import RetryPolicy, WorkflowIDConflictPolicy, WorkflowIDReusePolicy
|
|
21
|
+
from datetime import timedelta
|
|
22
|
+
from .config.runtime import RuntimeClickHouseConfig
|
|
23
|
+
|
|
24
|
+
from moose_lib.commons import EnhancedJSONEncoder
|
|
15
25
|
|
|
16
26
|
|
|
17
27
|
@dataclass
|
|
@@ -24,7 +34,6 @@ class StreamingFunction:
|
|
|
24
34
|
run: Callable
|
|
25
35
|
|
|
26
36
|
|
|
27
|
-
|
|
28
37
|
@dataclass
|
|
29
38
|
class StorageConfig:
|
|
30
39
|
"""Configuration related to data storage, typically in an OLAP table.
|
|
@@ -53,6 +62,7 @@ class DataModelConfig:
|
|
|
53
62
|
|
|
54
63
|
class CustomEncoder(json.JSONEncoder):
|
|
55
64
|
"""Custom JSON encoder that handles Enum types by encoding their values."""
|
|
65
|
+
|
|
56
66
|
def default(self, obj):
|
|
57
67
|
if isinstance(obj, Enum):
|
|
58
68
|
return obj.value
|
|
@@ -92,6 +102,7 @@ def moose_data_model(arg: Any = None) -> Any:
|
|
|
92
102
|
Returns:
|
|
93
103
|
A decorator function or the decorated class.
|
|
94
104
|
"""
|
|
105
|
+
|
|
95
106
|
def get_file(t: type) -> Optional[str]:
|
|
96
107
|
"""Helper to get the file path of a type's definition."""
|
|
97
108
|
module = sys.modules.get(t.__module__)
|
|
@@ -139,33 +150,197 @@ class ConsumptionApiResult:
|
|
|
139
150
|
class QueryClient:
|
|
140
151
|
"""Client for executing queries, typically against ClickHouse.
|
|
141
152
|
|
|
142
|
-
(Note: Current implementation is a placeholder.)
|
|
143
|
-
|
|
144
153
|
Args:
|
|
145
|
-
|
|
154
|
+
ch_client_or_config: Either an instance of the ClickHouse client or a RuntimeClickHouseConfig.
|
|
146
155
|
"""
|
|
147
|
-
def __init__(self, ch_client: ClickhouseClient):
|
|
148
|
-
self.ch_client = ch_client
|
|
149
156
|
|
|
150
|
-
def
|
|
151
|
-
|
|
152
|
-
|
|
157
|
+
def __init__(self, ch_client_or_config: Union[ClickhouseClient, RuntimeClickHouseConfig]):
|
|
158
|
+
if isinstance(ch_client_or_config, RuntimeClickHouseConfig):
|
|
159
|
+
# Create ClickHouse client from configuration
|
|
160
|
+
config = ch_client_or_config
|
|
161
|
+
interface = 'https' if config.use_ssl else 'http'
|
|
162
|
+
self.ch_client = get_client(
|
|
163
|
+
interface=interface,
|
|
164
|
+
host=config.host,
|
|
165
|
+
port=int(config.port),
|
|
166
|
+
username=config.username,
|
|
167
|
+
password=config.password,
|
|
168
|
+
database=config.database,
|
|
169
|
+
)
|
|
170
|
+
else:
|
|
171
|
+
# Use provided ClickHouse client directly
|
|
172
|
+
self.ch_client = ch_client_or_config
|
|
173
|
+
|
|
174
|
+
def __call__(self, input, variables):
|
|
175
|
+
return self.execute(input, variables)
|
|
176
|
+
|
|
177
|
+
def execute(self, input, variables, row_type: Type[BaseModel] = None):
|
|
178
|
+
params = {}
|
|
179
|
+
values = {}
|
|
180
|
+
|
|
181
|
+
for i, (_, variable_name, _, _) in enumerate(Formatter().parse(input)):
|
|
182
|
+
if variable_name:
|
|
183
|
+
value = variables[variable_name]
|
|
184
|
+
if isinstance(value, list) and len(value) == 1:
|
|
185
|
+
# handling passing the value of the query string dict directly to variables
|
|
186
|
+
value = value[0]
|
|
187
|
+
|
|
188
|
+
t = 'String' if isinstance(value, str) else \
|
|
189
|
+
'Int64' if isinstance(value, int) else \
|
|
190
|
+
'Float64' if isinstance(value, float) else "String" # unknown type
|
|
191
|
+
|
|
192
|
+
params[variable_name] = f'{{p{i}: {t}}}'
|
|
193
|
+
values[f'p{i}'] = value
|
|
194
|
+
clickhouse_query = input.format_map(params)
|
|
195
|
+
|
|
196
|
+
# We are not using the result of the ping
|
|
197
|
+
# but this ensures that if the clickhouse cloud service is idle, we
|
|
198
|
+
# wake it up, before we send the query.
|
|
199
|
+
self.ch_client.ping()
|
|
200
|
+
|
|
201
|
+
val = self.ch_client.query(clickhouse_query, values)
|
|
202
|
+
|
|
203
|
+
if row_type is None:
|
|
204
|
+
return list(val.named_results())
|
|
205
|
+
else:
|
|
206
|
+
return list(row_type(**row) for row in val.named_results())
|
|
207
|
+
|
|
208
|
+
def close(self):
|
|
209
|
+
"""Close the ClickHouse client connection."""
|
|
210
|
+
if self.ch_client:
|
|
211
|
+
try:
|
|
212
|
+
self.ch_client.close()
|
|
213
|
+
except Exception as e:
|
|
214
|
+
print(f"Error closing ClickHouse client: {e}")
|
|
153
215
|
|
|
154
216
|
|
|
155
217
|
class WorkflowClient:
|
|
156
218
|
"""Client for interacting with Temporal workflows.
|
|
157
219
|
|
|
158
|
-
(Note: Current implementation is a placeholder.)
|
|
159
|
-
|
|
160
220
|
Args:
|
|
161
221
|
temporal_client: An instance of the Temporal client.
|
|
162
222
|
"""
|
|
223
|
+
|
|
163
224
|
def __init__(self, temporal_client: TemporalClient):
|
|
164
225
|
self.temporal_client = temporal_client
|
|
226
|
+
self.configs = self.load_consolidated_configs()
|
|
227
|
+
print(f"WorkflowClient - configs: {self.configs}")
|
|
165
228
|
|
|
229
|
+
# Test workflow executor in rust if this changes significantly
|
|
166
230
|
def execute(self, name: str, input_data: Any) -> Dict[str, Any]:
|
|
167
|
-
|
|
168
|
-
|
|
231
|
+
try:
|
|
232
|
+
workflow_id, run_id = asyncio.run(self._start_workflow_async(name, input_data))
|
|
233
|
+
print(f"WorkflowClient - started workflow: {name}")
|
|
234
|
+
return {
|
|
235
|
+
"status": 200,
|
|
236
|
+
"body": f"Workflow started: {name}. View it in the Temporal dashboard: http://localhost:8080/namespaces/default/workflows/{workflow_id}/{run_id}/history"
|
|
237
|
+
}
|
|
238
|
+
except Exception as e:
|
|
239
|
+
print(f"WorkflowClient - error while starting workflow: {e}")
|
|
240
|
+
return {
|
|
241
|
+
"status": 400,
|
|
242
|
+
"body": str(e)
|
|
243
|
+
}
|
|
244
|
+
|
|
245
|
+
async def _start_workflow_async(self, name: str, input_data: Any):
|
|
246
|
+
# Extract configuration based on workflow type
|
|
247
|
+
config = self._get_workflow_config(name)
|
|
248
|
+
|
|
249
|
+
# Process input data and generate workflow ID (common logic)
|
|
250
|
+
processed_input, workflow_id = self._process_input_data(name, input_data)
|
|
251
|
+
|
|
252
|
+
# Create retry policy and timeout (common logic)
|
|
253
|
+
retry_policy = RetryPolicy(maximum_attempts=config['retry_count'])
|
|
254
|
+
run_timeout = self.parse_timeout_to_timedelta(config['timeout_str'])
|
|
255
|
+
|
|
256
|
+
print(
|
|
257
|
+
f"WorkflowClient - starting {'DMv2 ' if config['is_dmv2'] else ''}workflow: {name} with retry policy: {retry_policy} and timeout: {run_timeout}")
|
|
258
|
+
|
|
259
|
+
# Start workflow with appropriate args
|
|
260
|
+
workflow_args = self._build_workflow_args(name, processed_input, config['is_dmv2'])
|
|
261
|
+
|
|
262
|
+
workflow_handle = await self.temporal_client.start_workflow(
|
|
263
|
+
"ScriptWorkflow",
|
|
264
|
+
args=workflow_args,
|
|
265
|
+
id=workflow_id,
|
|
266
|
+
task_queue="python-script-queue",
|
|
267
|
+
id_conflict_policy=WorkflowIDConflictPolicy.FAIL,
|
|
268
|
+
id_reuse_policy=WorkflowIDReusePolicy.ALLOW_DUPLICATE,
|
|
269
|
+
retry_policy=retry_policy,
|
|
270
|
+
run_timeout=run_timeout
|
|
271
|
+
)
|
|
272
|
+
|
|
273
|
+
return workflow_id, workflow_handle.result_run_id
|
|
274
|
+
|
|
275
|
+
def _get_workflow_config(self, name: str) -> Dict[str, Any]:
|
|
276
|
+
"""Extract workflow configuration from DMv2 or legacy config."""
|
|
277
|
+
from moose_lib.dmv2 import get_workflow
|
|
278
|
+
|
|
279
|
+
dmv2_workflow = get_workflow(name)
|
|
280
|
+
if dmv2_workflow is not None:
|
|
281
|
+
return {
|
|
282
|
+
'retry_count': dmv2_workflow.config.retries or 3,
|
|
283
|
+
'timeout_str': dmv2_workflow.config.timeout or "1h",
|
|
284
|
+
'is_dmv2': True
|
|
285
|
+
}
|
|
286
|
+
else:
|
|
287
|
+
config = self.configs.get(name, {})
|
|
288
|
+
return {
|
|
289
|
+
'retry_count': config.get('retries', 3),
|
|
290
|
+
'timeout_str': config.get('timeout', "1h"),
|
|
291
|
+
'is_dmv2': False
|
|
292
|
+
}
|
|
293
|
+
|
|
294
|
+
def _process_input_data(self, name: str, input_data: Any) -> tuple[Any, str]:
|
|
295
|
+
"""Process input data and generate workflow ID."""
|
|
296
|
+
workflow_id = name
|
|
297
|
+
if input_data:
|
|
298
|
+
try:
|
|
299
|
+
# Handle Pydantic model input for DMv2
|
|
300
|
+
if isinstance(input_data, BaseModel):
|
|
301
|
+
input_data = input_data.model_dump()
|
|
302
|
+
elif isinstance(input_data, str):
|
|
303
|
+
input_data = json.loads(input_data)
|
|
304
|
+
|
|
305
|
+
# Encode with custom encoder
|
|
306
|
+
input_data = json.loads(
|
|
307
|
+
json.dumps({"data": input_data}, cls=EnhancedJSONEncoder)
|
|
308
|
+
)
|
|
309
|
+
|
|
310
|
+
params_str = json.dumps(input_data, sort_keys=True)
|
|
311
|
+
params_hash = hashlib.sha256(params_str.encode()).hexdigest()[:16]
|
|
312
|
+
workflow_id = f"{name}-{params_hash}"
|
|
313
|
+
except Exception as e:
|
|
314
|
+
raise ValueError(f"Invalid input data: {e}")
|
|
315
|
+
|
|
316
|
+
return input_data, workflow_id
|
|
317
|
+
|
|
318
|
+
def _build_workflow_args(self, name: str, input_data: Any, is_dmv2: bool) -> list:
|
|
319
|
+
"""Build workflow arguments based on workflow type."""
|
|
320
|
+
if is_dmv2:
|
|
321
|
+
return [f"{name}", input_data]
|
|
322
|
+
else:
|
|
323
|
+
return [f"{os.getcwd()}/app/scripts/{name}", input_data]
|
|
324
|
+
|
|
325
|
+
def load_consolidated_configs(self):
|
|
326
|
+
try:
|
|
327
|
+
file_path = os.path.join(os.getcwd(), ".moose", "workflow_configs.json")
|
|
328
|
+
with open(file_path, 'r') as file:
|
|
329
|
+
data = json.load(file)
|
|
330
|
+
config_map = {config['name']: config for config in data}
|
|
331
|
+
return config_map
|
|
332
|
+
except Exception as e:
|
|
333
|
+
raise ValueError(f"Error loading file {file_path}: {e}")
|
|
334
|
+
|
|
335
|
+
def parse_timeout_to_timedelta(self, timeout_str: str) -> timedelta:
|
|
336
|
+
if timeout_str.endswith('h'):
|
|
337
|
+
return timedelta(hours=int(timeout_str[:-1]))
|
|
338
|
+
elif timeout_str.endswith('m'):
|
|
339
|
+
return timedelta(minutes=int(timeout_str[:-1]))
|
|
340
|
+
elif timeout_str.endswith('s'):
|
|
341
|
+
return timedelta(seconds=int(timeout_str[:-1]))
|
|
342
|
+
else:
|
|
343
|
+
raise ValueError(f"Unsupported timeout format: {timeout_str}")
|
|
169
344
|
|
|
170
345
|
|
|
171
346
|
class MooseClient:
|
|
@@ -182,13 +357,29 @@ class MooseClient:
|
|
|
182
357
|
query (QueryClient): Client for executing queries.
|
|
183
358
|
workflow (Optional[WorkflowClient]): Client for workflow operations (if configured).
|
|
184
359
|
"""
|
|
360
|
+
|
|
185
361
|
def __init__(self, ch_client: ClickhouseClient, temporal_client: Optional[TemporalClient] = None):
|
|
186
362
|
self.query = QueryClient(ch_client)
|
|
363
|
+
self.temporal_client = temporal_client
|
|
187
364
|
if temporal_client:
|
|
188
365
|
self.workflow = WorkflowClient(temporal_client)
|
|
189
366
|
else:
|
|
190
367
|
self.workflow = None
|
|
191
368
|
|
|
369
|
+
async def cleanup(self):
|
|
370
|
+
"""Cleanup resources before shutdown"""
|
|
371
|
+
if self.query:
|
|
372
|
+
try:
|
|
373
|
+
self.query.close()
|
|
374
|
+
except Exception as e:
|
|
375
|
+
print(f"Error closing Clickhouse client: {e}")
|
|
376
|
+
|
|
377
|
+
if self.temporal_client:
|
|
378
|
+
try:
|
|
379
|
+
await self.temporal_client.close()
|
|
380
|
+
except Exception as e:
|
|
381
|
+
print(f"Error closing Temporal client: {e}")
|
|
382
|
+
|
|
192
383
|
|
|
193
384
|
class Sql:
|
|
194
385
|
"""Represents a SQL query template with embedded values.
|
|
@@ -211,6 +402,7 @@ class Sql:
|
|
|
211
402
|
values (list[Any]): The flattened list of values corresponding to the gaps
|
|
212
403
|
between the strings.
|
|
213
404
|
"""
|
|
405
|
+
|
|
214
406
|
def __init__(self, raw_strings: list[str], raw_values: list['RawValue']):
|
|
215
407
|
if len(raw_strings) - 1 != len(raw_values):
|
|
216
408
|
if len(raw_strings) == 0:
|
|
@@ -251,4 +443,3 @@ def sigterm_handler():
|
|
|
251
443
|
"""Handles SIGTERM signals by printing a message and exiting gracefully."""
|
|
252
444
|
print("SIGTERM received")
|
|
253
445
|
sys.exit(0)
|
|
254
|
-
|
|
@@ -30,6 +30,7 @@ from typing import Optional, Callable, Tuple, Any
|
|
|
30
30
|
|
|
31
31
|
from moose_lib.dmv2 import get_streams, DeadLetterModel
|
|
32
32
|
from moose_lib import cli_log, CliLogData, DeadLetterQueue
|
|
33
|
+
from moose_lib.commons import EnhancedJSONEncoder
|
|
33
34
|
|
|
34
35
|
# Force stdout to be unbuffered
|
|
35
36
|
sys.stdout = io.TextIOWrapper(
|
|
@@ -82,34 +83,6 @@ class KafkaTopicConfig:
|
|
|
82
83
|
return name
|
|
83
84
|
|
|
84
85
|
|
|
85
|
-
class EnhancedJSONEncoder(json.JSONEncoder):
|
|
86
|
-
"""
|
|
87
|
-
Custom JSON encoder that handles:
|
|
88
|
-
- datetime objects (converts to ISO format with timezone)
|
|
89
|
-
- dataclass instances (converts to dict)
|
|
90
|
-
- Pydantic models (converts to dict)
|
|
91
|
-
"""
|
|
92
|
-
|
|
93
|
-
def default(self, o):
|
|
94
|
-
if isinstance(o, datetime):
|
|
95
|
-
if o.tzinfo is None:
|
|
96
|
-
o = o.replace(tzinfo=timezone.utc)
|
|
97
|
-
return o.isoformat()
|
|
98
|
-
if hasattr(o, "model_dump"): # Handle Pydantic v2 models
|
|
99
|
-
# Convert to dict and handle datetime fields
|
|
100
|
-
data = o.model_dump()
|
|
101
|
-
# Handle any datetime fields that might be present
|
|
102
|
-
for key, value in data.items():
|
|
103
|
-
if isinstance(value, datetime):
|
|
104
|
-
if value.tzinfo is None:
|
|
105
|
-
value = value.replace(tzinfo=timezone.utc)
|
|
106
|
-
data[key] = value.isoformat()
|
|
107
|
-
return data
|
|
108
|
-
if dataclasses.is_dataclass(o):
|
|
109
|
-
return dataclasses.asdict(o)
|
|
110
|
-
return super().default(o)
|
|
111
|
-
|
|
112
|
-
|
|
113
86
|
def load_streaming_function_dmv1(function_file_dir: str, function_file_name: str) -> Tuple[type, Callable]:
|
|
114
87
|
"""
|
|
115
88
|
Load a DMV1 (legacy) streaming function from a Python module.
|
|
@@ -1,10 +1,10 @@
|
|
|
1
|
-
moose_lib/__init__.py,sha256=
|
|
1
|
+
moose_lib/__init__.py,sha256=_XXfHrzRl-M5M1e3CwBQGGblVQYKnU5QJkipghe4bXU,184
|
|
2
2
|
moose_lib/blocks.py,sha256=_wdvC2NC_Y3MMEnB71WTgWbeQ--zPNHk19xjToJW0C0,3185
|
|
3
|
-
moose_lib/commons.py,sha256=
|
|
3
|
+
moose_lib/commons.py,sha256=z63RPtdHFRMFvPDWIvOCYV6UHlib7si34RpD5VPa0Gs,3602
|
|
4
4
|
moose_lib/data_models.py,sha256=FjFNd88WWCGMG5p5gV-Vjgp1cXARBeFapEHqDy0-WX8,9792
|
|
5
5
|
moose_lib/dmv2-serializer.py,sha256=CL_Pvvg8tJOT8Qk6hywDNzY8MYGhMVdTOw8arZi3jng,49
|
|
6
6
|
moose_lib/internal.py,sha256=ezqTTWS3T6nAPAxcjMPGYs-6ZwZwxOTZVuVFHiSkEmw,14269
|
|
7
|
-
moose_lib/main.py,sha256=
|
|
7
|
+
moose_lib/main.py,sha256=Fn_J7xppx9gR0d2NNoXSUH0TSpeIG7KQPgOBGGZBBQM,16434
|
|
8
8
|
moose_lib/query_param.py,sha256=AB5BKu610Ji-h1iYGMBZKfnEFqt85rS94kzhDwhWJnc,6288
|
|
9
9
|
moose_lib/tasks.py,sha256=6MXA0j7nhvQILAJVTQHCAsquwrSOi2zAevghAc_7kXs,1554
|
|
10
10
|
moose_lib/clients/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
@@ -26,12 +26,12 @@ moose_lib/dmv2/types.py,sha256=5FsB0HLHFkYB-8cjJ0rtRUjqahVA-ToLr2JXT1lFiss,3276
|
|
|
26
26
|
moose_lib/dmv2/view.py,sha256=fVbfbJgc2lvhjpGvpfKcFUqZqxKuLD4X59jdupxIe94,1350
|
|
27
27
|
moose_lib/dmv2/workflow.py,sha256=ZNEMaYWGCkOw_1qropfl2m553aq5YG16Y0rOJjo8eak,5916
|
|
28
28
|
moose_lib/streaming/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
29
|
-
moose_lib/streaming/streaming_function_runner.py,sha256=
|
|
29
|
+
moose_lib/streaming/streaming_function_runner.py,sha256=l8oF3qJpYDxPLQFvoF8TrbCWI0WPV6lVLYwdw9Oqyg4,23378
|
|
30
30
|
tests/__init__.py,sha256=0Gh4yzPkkC3TzBGKhenpMIxJcRhyrrCfxLSfpTZnPMQ,53
|
|
31
31
|
tests/conftest.py,sha256=ZVJNbnr4DwbcqkTmePW6U01zAzE6QD0kNAEZjPG1f4s,169
|
|
32
32
|
tests/test_moose.py,sha256=mBsx_OYWmL8ppDzL_7Bd7xR6qf_i3-pCIO3wm2iQNaA,2136
|
|
33
33
|
tests/test_redis_client.py,sha256=d9_MLYsJ4ecVil_jPB2gW3Q5aWnavxmmjZg2uYI3LVo,3256
|
|
34
|
-
moose_lib-0.4.
|
|
35
|
-
moose_lib-0.4.
|
|
36
|
-
moose_lib-0.4.
|
|
37
|
-
moose_lib-0.4.
|
|
34
|
+
moose_lib-0.4.263.dist-info/METADATA,sha256=1GngrV_nw0uSr4N81ws4Amy5lDgMvzI48uHoZa0dpfM,729
|
|
35
|
+
moose_lib-0.4.263.dist-info/WHEEL,sha256=_zCd3N1l69ArxyTb8rzEoP9TpbYXkqRFSNOD5OuxnTs,91
|
|
36
|
+
moose_lib-0.4.263.dist-info/top_level.txt,sha256=XEns2-4aCmGp2XjJAeEH9TAUcGONLnSLy6ycT9FSJh8,16
|
|
37
|
+
moose_lib-0.4.263.dist-info/RECORD,,
|
|
File without changes
|
|
File without changes
|