chalkpy 2.94.2__py3-none-any.whl → 2.94.4__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- chalk/__init__.py +2 -1
- chalk/_version.py +1 -1
- chalk/features/inference.py +17 -8
- chalk/features/resolver.py +182 -0
- chalk/ml/__init__.py +2 -0
- chalk/sql/_internal/sql_source.py +35 -2
- chalk/utils/tracing.py +12 -1
- {chalkpy-2.94.2.dist-info → chalkpy-2.94.4.dist-info}/METADATA +1 -1
- {chalkpy-2.94.2.dist-info → chalkpy-2.94.4.dist-info}/RECORD +12 -12
- {chalkpy-2.94.2.dist-info → chalkpy-2.94.4.dist-info}/WHEEL +0 -0
- {chalkpy-2.94.2.dist-info → chalkpy-2.94.4.dist-info}/entry_points.txt +0 -0
- {chalkpy-2.94.2.dist-info → chalkpy-2.94.4.dist-info}/top_level.txt +0 -0
chalk/__init__.py
CHANGED
|
@@ -26,7 +26,7 @@ from chalk.features._document import Document
|
|
|
26
26
|
from chalk.features._last import Last
|
|
27
27
|
from chalk.features.filter import freeze_time
|
|
28
28
|
from chalk.features.pseudofeatures import Distance, Now
|
|
29
|
-
from chalk.features.resolver import OfflineResolver, OnlineResolver, Resolver, offline, online
|
|
29
|
+
from chalk.features.resolver import OfflineResolver, OnlineResolver, Resolver, make_model_resolver, offline, online
|
|
30
30
|
from chalk.features.tag import BranchId, EnvironmentId
|
|
31
31
|
from chalk.features.underscore import _, __, underscore
|
|
32
32
|
from chalk.importer import get_resolver
|
|
@@ -98,6 +98,7 @@ __all__ = (
|
|
|
98
98
|
"has_many",
|
|
99
99
|
"has_one",
|
|
100
100
|
"is_primary",
|
|
101
|
+
"make_model_resolver",
|
|
101
102
|
"make_sql_file_resolver",
|
|
102
103
|
"offline",
|
|
103
104
|
"online",
|
chalk/_version.py
CHANGED
|
@@ -1 +1 @@
|
|
|
1
|
-
__version__ = "2.94.
|
|
1
|
+
__version__ = "2.94.4"
|
chalk/features/inference.py
CHANGED
|
@@ -12,7 +12,7 @@ from chalk.utils.collections import ensure_tuple
|
|
|
12
12
|
|
|
13
13
|
|
|
14
14
|
def build_inference_function(
|
|
15
|
-
model_version: ModelVersion, pkey: Feature,
|
|
15
|
+
model_version: ModelVersion, pkey: Feature, output_features: Optional[Feature | list[Feature]] = None
|
|
16
16
|
) -> Callable[[DataFrame], DataFrame]:
|
|
17
17
|
"""Build the core inference function that takes a DataFrame and returns predictions.
|
|
18
18
|
|
|
@@ -24,8 +24,9 @@ def build_inference_function(
|
|
|
24
24
|
The model version to use for prediction
|
|
25
25
|
pkey
|
|
26
26
|
The primary key feature to exclude from predictions
|
|
27
|
-
|
|
28
|
-
Optional output feature to add predictions to the DataFrame
|
|
27
|
+
output_features
|
|
28
|
+
Optional output feature(s) to add predictions to the DataFrame.
|
|
29
|
+
Can be a single Feature or a list of Features for multi-output models.
|
|
29
30
|
|
|
30
31
|
Returns
|
|
31
32
|
-------
|
|
@@ -45,11 +46,19 @@ def build_inference_function(
|
|
|
45
46
|
# Run prediction
|
|
46
47
|
result = model_version.predict(model_input)
|
|
47
48
|
|
|
48
|
-
if
|
|
49
|
-
#
|
|
50
|
-
|
|
51
|
-
|
|
52
|
-
|
|
49
|
+
if output_features is not None:
|
|
50
|
+
# Normalize to list for uniform processing
|
|
51
|
+
features_list = output_features if isinstance(output_features, list) else [output_features]
|
|
52
|
+
|
|
53
|
+
# Extract output for each feature and build columns dict
|
|
54
|
+
columns_dict = {}
|
|
55
|
+
for output_feature in features_list:
|
|
56
|
+
# Use model-specific output extraction (default: identity, ONNX: extract field)
|
|
57
|
+
output_feature_name = str(output_feature).split(".")[-1]
|
|
58
|
+
result_data = model_version.predictor.extract_output(result, output_feature_name)
|
|
59
|
+
columns_dict[output_feature] = result_data
|
|
60
|
+
|
|
61
|
+
return inp[pkey_string].with_columns(columns_dict)
|
|
53
62
|
|
|
54
63
|
return result
|
|
55
64
|
|
chalk/features/resolver.py
CHANGED
|
@@ -115,6 +115,7 @@ if TYPE_CHECKING:
|
|
|
115
115
|
|
|
116
116
|
from chalk.features import Underscore
|
|
117
117
|
from chalk.features.underscore import UnderscoreAttr, UnderscoreCall, UnderscoreCast, UnderscoreFunction
|
|
118
|
+
from chalk.ml.model_version import ModelVersion
|
|
118
119
|
from chalk.sql import BaseSQLSourceProtocol, SQLSourceGroup
|
|
119
120
|
from chalk.sql._internal.sql_settings import SQLResolverSettings
|
|
120
121
|
from chalk.sql._internal.sql_source import BaseSQLSource
|
|
@@ -4478,3 +4479,184 @@ def validate_message_attributes(
|
|
|
4478
4479
|
message_type = message_type.__args__[0] # pyright: ignore[reportAttributeAccessIssue]
|
|
4479
4480
|
for expression in expressions:
|
|
4480
4481
|
validate_underscore_expression(expression, message_type, error_builder, name)
|
|
4482
|
+
|
|
4483
|
+
|
|
4484
|
+
def make_model_resolver(
|
|
4485
|
+
name: str,
|
|
4486
|
+
model: "ModelVersion",
|
|
4487
|
+
inputs: Dict[Feature, str] | List[Feature],
|
|
4488
|
+
output: Feature | List[Feature] | Dict[Feature, str],
|
|
4489
|
+
feature_class: Optional[type[Features]] = None,
|
|
4490
|
+
resource_group: Optional[str] = None,
|
|
4491
|
+
resource_hint: Optional[ResourceHint] = None,
|
|
4492
|
+
) -> OnlineResolver:
|
|
4493
|
+
"""
|
|
4494
|
+
Create an online resolver that runs inference on a model.
|
|
4495
|
+
|
|
4496
|
+
This function provides an imperative API for creating model inference resolvers,
|
|
4497
|
+
as an alternative to using F.inference in feature definitions. It uses the same
|
|
4498
|
+
underlying implementation as F.inference but allows you to create resolvers
|
|
4499
|
+
programmatically.
|
|
4500
|
+
|
|
4501
|
+
Parameters
|
|
4502
|
+
----------
|
|
4503
|
+
name
|
|
4504
|
+
The name of the resolver
|
|
4505
|
+
model
|
|
4506
|
+
A ModelVersion reference to a deployed model
|
|
4507
|
+
inputs
|
|
4508
|
+
Either a dict mapping Feature objects to model input names (strings), or a list of
|
|
4509
|
+
Feature objects. If a dict, the values represent the model's expected input names
|
|
4510
|
+
(for future use). If a list, the features will be passed as a single DataFrame to
|
|
4511
|
+
the model.
|
|
4512
|
+
output
|
|
4513
|
+
The output feature(s) that will contain the predictions.
|
|
4514
|
+
Can be a single Feature, a list of Features, or a dict mapping Feature objects to
|
|
4515
|
+
model output names (strings) for future use with multi-output models.
|
|
4516
|
+
feature_class
|
|
4517
|
+
Optional feature class to use. If not provided, will be inferred from the inputs.
|
|
4518
|
+
resource_group
|
|
4519
|
+
Optional resource group for the resolver
|
|
4520
|
+
resource_hint
|
|
4521
|
+
Optional resource hint for execution (e.g., CPU/GPU preferences)
|
|
4522
|
+
|
|
4523
|
+
Returns
|
|
4524
|
+
-------
|
|
4525
|
+
OnlineResolver
|
|
4526
|
+
The created resolver
|
|
4527
|
+
|
|
4528
|
+
Examples
|
|
4529
|
+
--------
|
|
4530
|
+
>>> from chalk.features import features, feature
|
|
4531
|
+
>>> from chalk.features.resolver import make_model_resolver
|
|
4532
|
+
>>> from chalk.ml import ModelVersion
|
|
4533
|
+
>>>
|
|
4534
|
+
>>> @features
|
|
4535
|
+
... class User:
|
|
4536
|
+
... id: str = feature(primary=True)
|
|
4537
|
+
... age: float
|
|
4538
|
+
... income: float
|
|
4539
|
+
... risk_score: float
|
|
4540
|
+
... credit_score: float
|
|
4541
|
+
>>>
|
|
4542
|
+
>>> # Create a model version reference
|
|
4543
|
+
>>> model = ModelVersion(
|
|
4544
|
+
... name="risk_model",
|
|
4545
|
+
... version=1,
|
|
4546
|
+
... model_type="sklearn",
|
|
4547
|
+
... model_encoding="pickle",
|
|
4548
|
+
... filename="model.pkl"
|
|
4549
|
+
... )
|
|
4550
|
+
>>>
|
|
4551
|
+
>>> # Create resolver with single output
|
|
4552
|
+
>>> resolver = make_model_resolver(
|
|
4553
|
+
... name="risk_model",
|
|
4554
|
+
... model=model,
|
|
4555
|
+
... inputs=[User.age, User.income],
|
|
4556
|
+
... output=User.risk_score,
|
|
4557
|
+
... )
|
|
4558
|
+
>>>
|
|
4559
|
+
>>> # Create resolver with multiple outputs (list)
|
|
4560
|
+
>>> resolver = make_model_resolver(
|
|
4561
|
+
... name="multi_output_model",
|
|
4562
|
+
... model=model,
|
|
4563
|
+
... inputs=[User.age, User.income],
|
|
4564
|
+
... output=[User.risk_score, User.credit_score],
|
|
4565
|
+
... )
|
|
4566
|
+
>>>
|
|
4567
|
+
>>> # Create resolver with named inputs and outputs (dict)
|
|
4568
|
+
>>> resolver = make_model_resolver(
|
|
4569
|
+
... name="named_model",
|
|
4570
|
+
... model=model,
|
|
4571
|
+
... inputs={User.age: "age_input", User.income: "income_input"},
|
|
4572
|
+
... output={User.risk_score: "risk_output", User.credit_score: "credit_output"},
|
|
4573
|
+
... )
|
|
4574
|
+
"""
|
|
4575
|
+
from chalk.features.inference import build_inference_function
|
|
4576
|
+
|
|
4577
|
+
if isinstance(inputs, dict):
|
|
4578
|
+
input_features_raw = list(inputs.keys())
|
|
4579
|
+
else:
|
|
4580
|
+
input_features_raw = inputs
|
|
4581
|
+
|
|
4582
|
+
input_features = [unwrap_feature(f) for f in input_features_raw]
|
|
4583
|
+
|
|
4584
|
+
if isinstance(output, dict):
|
|
4585
|
+
output_features = [unwrap_feature(f) for f in output.keys()]
|
|
4586
|
+
elif isinstance(output, list):
|
|
4587
|
+
output_features = [unwrap_feature(f) for f in output]
|
|
4588
|
+
else:
|
|
4589
|
+
output_features = [unwrap_feature(output)]
|
|
4590
|
+
|
|
4591
|
+
# If feature_class is not provided, try to infer it from the first input feature
|
|
4592
|
+
if feature_class is None:
|
|
4593
|
+
if not input_features:
|
|
4594
|
+
raise ValueError("Cannot infer feature class: no input features provided and feature_class not specified")
|
|
4595
|
+
|
|
4596
|
+
first_input = input_features[0]
|
|
4597
|
+
|
|
4598
|
+
if hasattr(first_input, "features_cls") and first_input.features_cls is not None:
|
|
4599
|
+
feature_class = first_input.features_cls
|
|
4600
|
+
else:
|
|
4601
|
+
raise ValueError(
|
|
4602
|
+
"Cannot infer feature class from inputs. Please provide feature_class parameter explicitly."
|
|
4603
|
+
)
|
|
4604
|
+
|
|
4605
|
+
pkey = feature_class.__chalk_primary__
|
|
4606
|
+
if pkey is None:
|
|
4607
|
+
raise ValueError(f"Feature class {feature_class} does not have a primary key defined")
|
|
4608
|
+
|
|
4609
|
+
first_output = output_features[0]
|
|
4610
|
+
|
|
4611
|
+
output_namespace = (
|
|
4612
|
+
first_output.namespace
|
|
4613
|
+
if hasattr(first_output, "namespace") and first_output.namespace
|
|
4614
|
+
else feature_class.__name__.lower()
|
|
4615
|
+
)
|
|
4616
|
+
|
|
4617
|
+
# Use the same underlying inference function as F.inference
|
|
4618
|
+
# Pass list of outputs if multiple, single if only one
|
|
4619
|
+
output_for_inference = output_features if len(output_features) > 1 else output_features[0]
|
|
4620
|
+
inference_fn = build_inference_function(model, pkey, output_for_inference)
|
|
4621
|
+
|
|
4622
|
+
if len(output_features) == 1:
|
|
4623
|
+
output_names = output_features[0].name
|
|
4624
|
+
else:
|
|
4625
|
+
output_names = "_".join(f.name for f in output_features)
|
|
4626
|
+
|
|
4627
|
+
resolver = OnlineResolver(
|
|
4628
|
+
function_definition="",
|
|
4629
|
+
filename="",
|
|
4630
|
+
fqn=f"{name}__{output_namespace}_{output_names}",
|
|
4631
|
+
doc=None,
|
|
4632
|
+
inputs=[DataFrame[[pkey, *ensure_tuple(input_features)]]],
|
|
4633
|
+
state=None,
|
|
4634
|
+
output=Features[DataFrame[tuple([*output_features, pkey])]], # type: ignore[misc]
|
|
4635
|
+
fn=inference_fn,
|
|
4636
|
+
environment=None,
|
|
4637
|
+
machine_type=None,
|
|
4638
|
+
default_args=[None],
|
|
4639
|
+
timeout=None,
|
|
4640
|
+
cron=None,
|
|
4641
|
+
when=None,
|
|
4642
|
+
tags=None,
|
|
4643
|
+
owner=None,
|
|
4644
|
+
resource_hint=resource_hint or model.resource_hint,
|
|
4645
|
+
data_sources=None,
|
|
4646
|
+
is_sql_file_resolver=False,
|
|
4647
|
+
source_line=None,
|
|
4648
|
+
lsp_builder=get_resolver_error_builder(inference_fn),
|
|
4649
|
+
parse=None,
|
|
4650
|
+
static=False,
|
|
4651
|
+
total=False,
|
|
4652
|
+
autogenerated=False,
|
|
4653
|
+
unique_on=None,
|
|
4654
|
+
partitioned_by=None,
|
|
4655
|
+
data_lineage=None,
|
|
4656
|
+
sql_settings=None,
|
|
4657
|
+
)
|
|
4658
|
+
|
|
4659
|
+
# Register the resolver
|
|
4660
|
+
RESOLVER_REGISTRY.add_to_registry(resolver, override=False)
|
|
4661
|
+
|
|
4662
|
+
return resolver
|
chalk/ml/__init__.py
CHANGED
|
@@ -2,6 +2,7 @@ from __future__ import annotations
|
|
|
2
2
|
|
|
3
3
|
from chalk.ml.model_file_transfer import FileInfo, HFSourceConfig, LocalSourceConfig, S3SourceConfig, SourceConfig
|
|
4
4
|
from chalk.ml.model_reference import ModelReference
|
|
5
|
+
from chalk.ml.model_version import ModelVersion
|
|
5
6
|
from chalk.ml.utils import ModelClass, ModelEncoding, ModelRunCriterion, ModelType
|
|
6
7
|
|
|
7
8
|
__all__ = (
|
|
@@ -9,6 +10,7 @@ __all__ = (
|
|
|
9
10
|
"ModelClass",
|
|
10
11
|
"ModelEncoding",
|
|
11
12
|
"ModelReference",
|
|
13
|
+
"ModelVersion",
|
|
12
14
|
"SourceConfig",
|
|
13
15
|
"LocalSourceConfig",
|
|
14
16
|
"S3SourceConfig",
|
|
@@ -214,11 +214,32 @@ class BaseSQLSource(BaseSQLSourceProtocol):
|
|
|
214
214
|
if getattr(self, "kind", None) != SQLSourceKind.trino:
|
|
215
215
|
engine_args.setdefault("pool_pre_ping", env_var_bool("USE_CLIENT_POOL_PRE_PING"))
|
|
216
216
|
async_engine_args.setdefault("pool_pre_ping", env_var_bool("USE_CLIENT_POOL_PRE_PING"))
|
|
217
|
-
|
|
218
|
-
self.
|
|
217
|
+
# Store raw args internally, expose filtered versions via properties
|
|
218
|
+
self._raw_engine_args = engine_args
|
|
219
|
+
self._raw_async_engine_args = async_engine_args
|
|
219
220
|
self._engine = None
|
|
220
221
|
self._async_engine = None
|
|
221
222
|
|
|
223
|
+
@property
|
|
224
|
+
def engine_args(self) -> Dict[str, Any]:
|
|
225
|
+
"""Engine arguments with native_args filtered out for SQLAlchemy."""
|
|
226
|
+
return {k: v for k, v in self._raw_engine_args.items() if k != "native_args"}
|
|
227
|
+
|
|
228
|
+
@engine_args.setter
|
|
229
|
+
def engine_args(self, args: dict[str, Any]):
|
|
230
|
+
"""Set raw engine args (for backward compatibility)."""
|
|
231
|
+
self._raw_engine_args = args
|
|
232
|
+
|
|
233
|
+
@property
|
|
234
|
+
def async_engine_args(self) -> Dict[str, Any]:
|
|
235
|
+
"""Async engine arguments with native_args filtered out for SQLAlchemy."""
|
|
236
|
+
return {k: v for k, v in self._raw_async_engine_args.items() if k != "native_args"}
|
|
237
|
+
|
|
238
|
+
@async_engine_args.setter
|
|
239
|
+
def async_engine_args(self, args: dict[str, Any]):
|
|
240
|
+
"""Set raw async engine args (for backward compatibility)."""
|
|
241
|
+
self._raw_async_engine_args = args
|
|
242
|
+
|
|
222
243
|
@property
|
|
223
244
|
def _engine_args(self):
|
|
224
245
|
"""Backcompat support for private subclassing of BaseSQLSource"""
|
|
@@ -239,6 +260,16 @@ class BaseSQLSource(BaseSQLSourceProtocol):
|
|
|
239
260
|
"""Backcompat support for private subclassing of BaseSQLSource"""
|
|
240
261
|
self.async_engine_args = args
|
|
241
262
|
|
|
263
|
+
@property
|
|
264
|
+
def native_args(self) -> Dict[str, Any]:
|
|
265
|
+
"""Native arguments to be passed to the underlying database driver.
|
|
266
|
+
|
|
267
|
+
These arguments are extracted from engine_args and async_engine_args
|
|
268
|
+
and are not passed to SQLAlchemy's create_engine or create_async_engine.
|
|
269
|
+
Instead, they should be used by subclasses to configure native driver connections.
|
|
270
|
+
"""
|
|
271
|
+
return self._raw_engine_args.get("native_args", {})
|
|
272
|
+
|
|
242
273
|
def get_sqlglot_dialect(self) -> Union[str, None]:
|
|
243
274
|
"""Returns the name of the SQL dialect (if it has one) for `sqlglot` to parse the SQL string.
|
|
244
275
|
This allows for use of dialect-specific syntax while parsing and modifying queries."""
|
|
@@ -832,6 +863,7 @@ class BaseSQLSource(BaseSQLSourceProtocol):
|
|
|
832
863
|
if self._engine is None:
|
|
833
864
|
self.register_sqlalchemy_compiler_overrides()
|
|
834
865
|
self._check_engine_isolation_level()
|
|
866
|
+
# engine_args property already filters out native_args
|
|
835
867
|
self._engine = create_engine(url=self.local_engine_url(), **self.engine_args)
|
|
836
868
|
return self._engine
|
|
837
869
|
|
|
@@ -841,6 +873,7 @@ class BaseSQLSource(BaseSQLSourceProtocol):
|
|
|
841
873
|
if self._async_engine is None:
|
|
842
874
|
self.register_sqlalchemy_compiler_overrides()
|
|
843
875
|
self._check_engine_isolation_level()
|
|
876
|
+
# async_engine_args property already filters out native_args
|
|
844
877
|
self._async_engine = create_async_engine(url=self.async_local_engine_url(), **self.async_engine_args)
|
|
845
878
|
return self._async_engine
|
|
846
879
|
|
chalk/utils/tracing.py
CHANGED
|
@@ -10,16 +10,21 @@ from typing import TYPE_CHECKING, Any, Mapping, Union, cast
|
|
|
10
10
|
from chalk.utils._ddtrace_version import can_use_datadog_statsd, can_use_ddtrace
|
|
11
11
|
from chalk.utils._otel_version import can_use_otel_trace
|
|
12
12
|
from chalk.utils.environment_parsing import env_var_bool
|
|
13
|
+
from chalk.utils.log_with_context import get_logger
|
|
13
14
|
|
|
14
15
|
if TYPE_CHECKING:
|
|
15
16
|
import ddtrace.context
|
|
16
17
|
from opentelemetry import trace as otel_trace
|
|
17
18
|
|
|
19
|
+
_logger = get_logger(__name__)
|
|
20
|
+
|
|
18
21
|
if can_use_otel_trace:
|
|
19
22
|
from opentelemetry import context as otel_context
|
|
20
23
|
from opentelemetry import trace as otel_trace
|
|
21
24
|
from opentelemetry.propagate import inject as otel_inject
|
|
22
25
|
|
|
26
|
+
_logger.debug("OTEL trace packages installed, otel tracing is available")
|
|
27
|
+
|
|
23
28
|
@contextlib.contextmanager
|
|
24
29
|
def safe_trace(span_id: str, attributes: Mapping[str, str] | None = None): # pyright: ignore[reportRedeclaration]
|
|
25
30
|
if attributes is None:
|
|
@@ -73,10 +78,12 @@ if can_use_otel_trace:
|
|
|
73
78
|
otel_inject(headers, context=ctx)
|
|
74
79
|
return headers
|
|
75
80
|
|
|
76
|
-
elif can_use_ddtrace
|
|
81
|
+
elif can_use_ddtrace:
|
|
77
82
|
import ddtrace
|
|
78
83
|
from ddtrace.propagation.http import HTTPPropagator
|
|
79
84
|
|
|
85
|
+
_logger.debug("ddtrace installed and available, using it to trace")
|
|
86
|
+
|
|
80
87
|
@contextlib.contextmanager
|
|
81
88
|
def safe_trace(span_id: str, attributes: Mapping[str, str] | None = None): # pyright: ignore[reportRedeclaration]
|
|
82
89
|
if not ddtrace.tracer.enabled:
|
|
@@ -134,6 +141,7 @@ elif can_use_ddtrace and can_use_datadog_statsd:
|
|
|
134
141
|
return headers
|
|
135
142
|
|
|
136
143
|
else:
|
|
144
|
+
_logger.debug("no trace packages found, tracing will not work")
|
|
137
145
|
|
|
138
146
|
@contextlib.contextmanager
|
|
139
147
|
def safe_trace(span_id: str, attributes: Mapping[str, str] | None = None): # pyright: ignore[reportRedeclaration]
|
|
@@ -283,3 +291,6 @@ def configure_tracing(default_service_name: str):
|
|
|
283
291
|
)
|
|
284
292
|
else:
|
|
285
293
|
_logger.warning("neither opentelemetry nor ddtrace are installed")
|
|
294
|
+
|
|
295
|
+
|
|
296
|
+
configure_tracing("chalkpy")
|
|
@@ -1,5 +1,5 @@
|
|
|
1
|
-
chalk/__init__.py,sha256=
|
|
2
|
-
chalk/_version.py,sha256=
|
|
1
|
+
chalk/__init__.py,sha256=vKsx9-cl5kImlVWGHVRYO6bweBm79NAzGs3l36u71wM,2657
|
|
2
|
+
chalk/_version.py,sha256=luYLQQ9SuJMxCYjx5Y4oiVD-k1_vNu6P83uTjbtFHlI,23
|
|
3
3
|
chalk/cli.py,sha256=ckqqfOI-A2mT23-rnZzDMmblYj-2x1VBX8ebHlIEn9A,5873
|
|
4
4
|
chalk/importer.py,sha256=m4lMn1lSYj_euDq8CS7LYTBnek9JOcjGJf9-82dJHbA,64441
|
|
5
5
|
chalk/prompts.py,sha256=2H9UomLAamdfRTNUdKs9i3VTpiossuyRhntqsAXUhhg,16117
|
|
@@ -631,12 +631,12 @@ chalk/features/feature_time.py,sha256=iUk8NDelig81jP7QT3tguyzx5eOZ-YC84OVgJRRKVw
|
|
|
631
631
|
chalk/features/feature_wrapper.py,sha256=OolNWGGX67IAEMHCObFvOCpH5EmwjbMvMygRSBJJtu0,19259
|
|
632
632
|
chalk/features/filter.py,sha256=2ldMbqvXC-nJ0jc-OZ36qHtrej-Jkx4TNQ1W_NZodAs,11177
|
|
633
633
|
chalk/features/hooks.py,sha256=KPJnudnhosEH7UptQT8W8sumIYPcZVMVjaJJJhPsOyk,5489
|
|
634
|
-
chalk/features/inference.py,sha256=
|
|
634
|
+
chalk/features/inference.py,sha256=D1mieSj9AvVZ47tsf_gaipw_y4QnBVD6XU1AJedwTo0,5377
|
|
635
635
|
chalk/features/live_updates.py,sha256=8ZbiDjcLqfFruSL15_aycwzSqJ0TbKNhas06KfZLyLU,3644
|
|
636
636
|
chalk/features/namespace_context.py,sha256=fL-nPohqtNiyPDS1uQTAaHLns4aivuBL2Flf50DajU4,1813
|
|
637
637
|
chalk/features/primary.py,sha256=BZ8mrMmKfRNy_wnKGDJt2cdnejP_CZb6xBsD9Ljgajc,5209
|
|
638
638
|
chalk/features/pseudofeatures.py,sha256=50Pe_Xi8ttYWtgNNRpgkhBxP8xoCZCYwyLb0aWUQ-PI,2147
|
|
639
|
-
chalk/features/resolver.py,sha256=
|
|
639
|
+
chalk/features/resolver.py,sha256=B_iIP3wm-nfYCkW1ZuxFMH2rWZogwaYshrLSZqLR0oQ,188553
|
|
640
640
|
chalk/features/tag.py,sha256=LRmKRA8ANCOvmaIAk-L5j1QW2U0aah2SeASy8Uydkmk,2675
|
|
641
641
|
chalk/features/underscore.py,sha256=4xnfQV3bfvVn0PNEtkT4J-k7hW4ebtH9KBe4_BvGjY4,26763
|
|
642
642
|
chalk/features/underscore_features.py,sha256=PlVCoaDDffOgtiSMaxPSWCoj8IjscbkOzDLA471HsJ4,13005
|
|
@@ -678,7 +678,7 @@ chalk/integrations/catalogs/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMp
|
|
|
678
678
|
chalk/integrations/catalogs/base_catalog.py,sha256=CwfZZWRvLHJtbIdeRWtBvuXjbtKjUuOWaGxTfvVMlgU,237
|
|
679
679
|
chalk/integrations/catalogs/glue_catalog.py,sha256=V0FzB_sy8espMAijnQpmTs-VSmmTLbTd_Ijn08RKrn4,5635
|
|
680
680
|
chalk/logging/__init__.py,sha256=CpWJAO83W8kOwihQSqPEBTOCCJBJ7jo2JsHPr-JDjPc,746
|
|
681
|
-
chalk/ml/__init__.py,sha256=
|
|
681
|
+
chalk/ml/__init__.py,sha256=B7QkTqFBhNlHTTvLK5xa8sWA_UcF386S3V91TfQXMU4,577
|
|
682
682
|
chalk/ml/chalk_train.py,sha256=B-Kr1_vKgaeUk8kIk7-8YYCDoWjYzQvgbAQHkkb8JCI,2335
|
|
683
683
|
chalk/ml/model_file_transfer.py,sha256=tpD3tg1PI20bNSLwyE63wzFU4qeG4rLFt1sL0JRkzMs,9587
|
|
684
684
|
chalk/ml/model_hooks.py,sha256=vAd8iqW5zarEl2nLAdhoPYuJxxThx2_UuZ4orK9b9kI,14927
|
|
@@ -729,7 +729,7 @@ chalk/sql/_internal/query_execution_parameters.py,sha256=FT0GixOryGeKR1x7UrQNoGF
|
|
|
729
729
|
chalk/sql/_internal/query_registry.py,sha256=jOk2x9NvW2pZocI_iTvgd9DyOlr4-ajz8rTi0kkLSaI,3005
|
|
730
730
|
chalk/sql/_internal/sql_file_resolver.py,sha256=BCGTpe3tVLQTWNSrfyUFsg8VfO-pupeN8yxGpiSmy5E,77817
|
|
731
731
|
chalk/sql/_internal/sql_settings.py,sha256=9lcpHNrmEhr1Zxl_Ct7U0p0AbLUvlpSayxHEPmyqu8E,543
|
|
732
|
-
chalk/sql/_internal/sql_source.py,sha256=
|
|
732
|
+
chalk/sql/_internal/sql_source.py,sha256=083tGM7YzSsL-AhwTAtCzvGSjKxGVH_R5pILEpe5g1c,43825
|
|
733
733
|
chalk/sql/_internal/sql_source_group.py,sha256=RIMEjEG3oSGwign37w9avpCRdVfVQepR8SPVv6JoOx0,5562
|
|
734
734
|
chalk/sql/_internal/string_chalk_query.py,sha256=mfPoyP_-nUx8B_RvqJtIMoI0orGPknW7QYEHOb_jQII,4342
|
|
735
735
|
chalk/sql/_internal/integrations/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
@@ -799,12 +799,12 @@ chalk/utils/storage_client.py,sha256=cK5KH8DVAt4Okk3X4jNMCkMiZgfUJE9Sq3zn4HkaBQo
|
|
|
799
799
|
chalk/utils/string.py,sha256=mHciu1FR1NdXiE0GjiCOOs_Q3JBVpaNnjUQPorE5cJg,4268
|
|
800
800
|
chalk/utils/stubgen.py,sha256=-mKIWFeiZojtfPwaTd9o3h4m4RvTmMTk6i-bI9JpU6c,21580
|
|
801
801
|
chalk/utils/threading.py,sha256=dacvfFCpDs9GDWdRrE2mmM3Ex5DKOIaj5rCYDTqGshk,5305
|
|
802
|
-
chalk/utils/tracing.py,sha256=
|
|
802
|
+
chalk/utils/tracing.py,sha256=ye5z6UCEsrxXC3ofXUNCDdUCf8ydPahEO92qQTd0AIA,11383
|
|
803
803
|
chalk/utils/weak_set_by_identity.py,sha256=VmikA_laYwFeOphCwXJIuyOIkrdlQe0bSzaXq7onoQw,953
|
|
804
804
|
chalk/utils/pydanticutil/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
805
805
|
chalk/utils/pydanticutil/pydantic_compat.py,sha256=O575lLYJ5GvZC4HMzR9yATxf9XwjC6NrDUXbNwZidlE,3031
|
|
806
|
-
chalkpy-2.94.
|
|
807
|
-
chalkpy-2.94.
|
|
808
|
-
chalkpy-2.94.
|
|
809
|
-
chalkpy-2.94.
|
|
810
|
-
chalkpy-2.94.
|
|
806
|
+
chalkpy-2.94.4.dist-info/METADATA,sha256=H4Au5kWLocgZWNgy9_jBffgCfWQPqJZvdNBVWBaZHys,27494
|
|
807
|
+
chalkpy-2.94.4.dist-info/WHEEL,sha256=_zCd3N1l69ArxyTb8rzEoP9TpbYXkqRFSNOD5OuxnTs,91
|
|
808
|
+
chalkpy-2.94.4.dist-info/entry_points.txt,sha256=Vg23sd8icwq-morJrljVFr-kQnMbm95rZfZj5wsZGis,42
|
|
809
|
+
chalkpy-2.94.4.dist-info/top_level.txt,sha256=1Q6_19IGYfNxSw50W8tYKEJ2t5HKQ3W9Wiw4ia5yg2c,6
|
|
810
|
+
chalkpy-2.94.4.dist-info/RECORD,,
|
|
File without changes
|
|
File without changes
|
|
File without changes
|