liesel-gam 0.0.4__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -0,0 +1 @@
1
+ __version__ = "0.0.4"
liesel_gam/__init__.py ADDED
@@ -0,0 +1,9 @@
1
+ from .__about__ import __version__ as __version__
2
+ from .dist import MultivariateNormalSingular as MultivariateNormalSingular
3
+ from .kernel import init_star_ig_gibbs as init_star_ig_gibbs
4
+ from .kernel import star_ig_gibbs as star_ig_gibbs
5
+ from .predictor import AdditivePredictor as AdditivePredictor
6
+ from .var import Basis as Basis
7
+ from .var import Intercept as Intercept
8
+ from .var import LinearTerm as LinearTerm
9
+ from .var import SmoothTerm as SmoothTerm
liesel_gam/dist.py ADDED
@@ -0,0 +1,100 @@
1
+ from functools import cached_property
2
+
3
+ import jax
4
+ import jax.numpy as jnp
5
+ import tensorflow_probability.substrates.jax.distributions as tfd
6
+ from tensorflow_probability.substrates.jax import tf2jax as tf
7
+ from tensorflow_probability.substrates.jax.internal.parameter_properties import (
8
+ ParameterProperties,
9
+ )
10
+
11
+ Array = jax.typing.ArrayLike
12
+
13
+
14
+ class MultivariateNormalSingular(tfd.Distribution):
15
+ def __init__(
16
+ self,
17
+ loc: Array,
18
+ scale: Array,
19
+ penalty: Array,
20
+ penalty_rank: Array,
21
+ validate_args: bool = False,
22
+ allow_nan_stats: bool = True,
23
+ name: str = "MultivariateNormalSingular",
24
+ ):
25
+ parameters = dict(locals())
26
+
27
+ self._loc = jnp.asarray(loc)
28
+ self._scale = jnp.asarray(scale)
29
+ self._penalty = jnp.asarray(penalty)
30
+ self._penalty_rank = jnp.asarray(penalty_rank)
31
+
32
+ super().__init__(
33
+ dtype=self._loc.dtype,
34
+ reparameterization_type=tfd.FULLY_REPARAMETERIZED,
35
+ validate_args=validate_args,
36
+ allow_nan_stats=allow_nan_stats,
37
+ parameters=parameters,
38
+ name=name,
39
+ )
40
+
41
+ @classmethod
42
+ def _parameter_properties(cls, dtype=jnp.float32, num_classes=None):
43
+ return dict(
44
+ loc=ParameterProperties(event_ndims=1),
45
+ scale=ParameterProperties(event_ndims=0),
46
+ penalty=ParameterProperties(event_ndims=2),
47
+ penalty_rank=ParameterProperties(event_ndims=0),
48
+ )
49
+
50
+ def _event_shape(self):
51
+ return tf.TensorShape((jnp.shape(self._penalty)[-1],))
52
+
53
+ def _event_shape_tensor(self):
54
+ return jnp.array((jnp.shape(self._penalty)[-1],), dtype=self._penalty.dtype)
55
+
56
+ def _log_prob(self, x: Array) -> Array:
57
+ x_centered = x - self._loc
58
+
59
+ # The following lines illustrate what the jnp.einsum call is conceptually
60
+ # doing.
61
+ # xt = jnp.expand_dims(x, axis=-2) # [batch_dims, 1, event_dim]
62
+ # x = jnp.swapaxes(xt, -2, -1) # [batch_dims, event_dim, 1]
63
+ # quad_form = jnp.squeeze((xt @ self._penalty @ x))
64
+ quad_form = jnp.einsum(
65
+ "...i,...ij,...j->...", x_centered, self._penalty, x_centered
66
+ )
67
+
68
+ neg_kernel = 0.5 * quad_form * jnp.power(self._scale, -2.0)
69
+
70
+ return -(jnp.log(self._scale) * self._penalty_rank + neg_kernel)
71
+
72
+ def _sample_n(self, n, seed=None) -> Array:
73
+ shape = [n] + self.batch_shape + self.event_shape
74
+
75
+ # The added dimension at the end here makes sure that matrix multiplication
76
+ # with the "sqrt pcov" matrices works out correctly.
77
+ z = jax.random.normal(key=seed, shape=shape + [1])
78
+
79
+ # Add a dimension at 0 for the sample size.
80
+ sqrt_cov = jnp.expand_dims(self._sqrt_cov, 0)
81
+ centered_samples = jnp.reshape(sqrt_cov @ z, shape)
82
+
83
+ # Add a dimension at 0 for the sample size.
84
+ loc = jnp.expand_dims(self._loc, 0)
85
+ scale = jnp.expand_dims(self._scale, 0)
86
+
87
+ return scale * centered_samples + loc
88
+
89
+ @cached_property
90
+ def _sqrt_cov(self) -> Array:
91
+ eigenvalues, evecs = jnp.linalg.eigh(self._penalty)
92
+ sqrt_eval = jnp.sqrt(1 / eigenvalues)
93
+ sqrt_eval = sqrt_eval.at[: -self._penalty_rank].set(0.0)
94
+
95
+ event_shape = sqrt_eval.shape[-1]
96
+ shape = sqrt_eval.shape + (event_shape,)
97
+
98
+ r = tuple(range(event_shape))
99
+ diags = jnp.zeros(shape).at[..., r, r].set(sqrt_eval)
100
+ return evecs @ diags
liesel_gam/kernel.py ADDED
@@ -0,0 +1,48 @@
1
+ from collections.abc import Sequence
2
+
3
+ import jax
4
+ import jax.numpy as jnp
5
+ import liesel.goose as gs
6
+ import liesel.model as lsl
7
+
8
+
9
+ def star_ig_gibbs(coef: lsl.Var) -> gs.GibbsKernel:
10
+ variance_var = coef.dist_node["scale"].value_node[0] # type: ignore
11
+ a_value = variance_var.dist_node["concentration"].value # type: ignore
12
+ b_value = variance_var.dist_node["scale"].value # type: ignore
13
+
14
+ penalty_value = coef.dist_node["penalty"].value # type: ignore
15
+ rank_value = jnp.linalg.matrix_rank(penalty_value)
16
+
17
+ model = coef.model
18
+ if model is None:
19
+ raise ValueError("The model must be set in the coefficient variable.")
20
+
21
+ name = variance_var.name
22
+
23
+ def transition(prng_key, model_state):
24
+ pos = model.extract_position([coef.name], model_state)
25
+
26
+ coef_value = pos[coef.name].squeeze()
27
+
28
+ a_gibbs = jnp.squeeze(a_value + 0.5 * rank_value)
29
+ b_gibbs = jnp.squeeze(b_value + 0.5 * (coef_value @ penalty_value @ coef_value))
30
+
31
+ draw = b_gibbs / jax.random.gamma(prng_key, a_gibbs)
32
+
33
+ return {name: draw}
34
+
35
+ return gs.GibbsKernel([name], transition)
36
+
37
+
38
+ def init_star_ig_gibbs(position_keys: Sequence[str], coef: lsl.Var) -> gs.GibbsKernel:
39
+ if len(position_keys) != 1:
40
+ raise ValueError("The position keys must be a single key.")
41
+
42
+ variance_var = coef.dist_node["scale"].value_node[0] # type: ignore
43
+ name = variance_var.name
44
+
45
+ if position_keys[0] != name:
46
+ raise ValueError(f"The position key must be {name}.")
47
+
48
+ return star_ig_gibbs(coef) # type: ignore
@@ -0,0 +1,48 @@
1
+ from __future__ import annotations
2
+
3
+ from collections.abc import Callable
4
+ from typing import Any, Self, cast
5
+
6
+ import liesel.model as lsl
7
+
8
+ Array = Any
9
+
10
+
11
+ class AdditivePredictor(lsl.Var):
12
+ def __init__(
13
+ self, name: str, inv_link: Callable[[Array], Array] | None = None
14
+ ) -> None:
15
+ if inv_link is None:
16
+
17
+ def _sum(*args, **kwargs):
18
+ # the + 0. implicitly ensures correct dtype also for empty predictors
19
+ return sum(args) + sum(kwargs.values()) + 0.0
20
+ else:
21
+
22
+ def _sum(*args, **kwargs):
23
+ # the + 0. implicitly ensures correct dtype also for empty predictors
24
+ return inv_link(sum(args) + sum(kwargs.values()) + 0.0)
25
+
26
+ super().__init__(lsl.Calc(_sum), name=name)
27
+ self.update()
28
+ self.terms: dict[str, lsl.Var] = {}
29
+ """Dictionary of terms in this predictor."""
30
+
31
+ def update(self) -> Self:
32
+ return cast(Self, super().update())
33
+
34
+ def __add__(self, other: lsl.Var) -> Self:
35
+ self.value_node.add_inputs(other)
36
+ self.terms[other.name] = other
37
+ return self.update()
38
+
39
+ def __iadd__(self, other: lsl.Var) -> Self:
40
+ self.value_node.add_inputs(other)
41
+ self.terms[other.name] = other
42
+ return self.update()
43
+
44
+ def __getitem__(self, name) -> lsl.Var:
45
+ return self.terms[name]
46
+
47
+ def __repr__(self) -> str:
48
+ return f"{type(self).__name__}({self.name=}, {len(self.terms)} terms)"
liesel_gam/roles.py ADDED
@@ -0,0 +1,9 @@
1
+ class Roles:
2
+ basis: str = "gam_basis"
3
+ coef_smooth: str = "gam_coef_smooth"
4
+ coef_linear: str = "gam_coef_linear"
5
+ variance_smooth: str = "gam_variance_smooth"
6
+ scale_smooth: str = "gam_scale_smooth"
7
+ term_smooth: str = "gam_term_smooth"
8
+ term_linear: str = "gam_term_linear"
9
+ intercept: str = "gam_intercept"
liesel_gam/var.py ADDED
@@ -0,0 +1,218 @@
1
+ from __future__ import annotations
2
+
3
+ from collections.abc import Callable
4
+ from typing import Any, Self
5
+
6
+ import jax
7
+ import jax.numpy as jnp
8
+ import liesel.goose as gs
9
+ import liesel.model as lsl
10
+ import tensorflow_probability.substrates.jax.distributions as tfd
11
+
12
+ from .dist import MultivariateNormalSingular
13
+ from .kernel import init_star_ig_gibbs
14
+ from .roles import Roles
15
+
16
+ InferenceTypes = Any
17
+ Array = Any
18
+
19
+
20
+ class SmoothTerm(lsl.Var):
21
+ def __init__(
22
+ self,
23
+ basis: Basis | lsl.Var,
24
+ penalty: lsl.Var | Array,
25
+ scale: lsl.Var,
26
+ name: str,
27
+ inference: InferenceTypes = None,
28
+ coef_name: str | None = None,
29
+ ):
30
+ coef_name = f"{name}_coef" if coef_name is None else coef_name
31
+
32
+ if not jnp.asarray(basis.value).ndim == 2:
33
+ raise ValueError(f"basis must have 2 dimensions, got {basis.value.ndim}.")
34
+
35
+ nbases = jnp.shape(basis.value)[-1]
36
+
37
+ prior = lsl.Dist(
38
+ MultivariateNormalSingular,
39
+ loc=0.0,
40
+ scale=scale,
41
+ penalty=penalty,
42
+ penalty_rank=jnp.linalg.matrix_rank(penalty),
43
+ )
44
+
45
+ self.scale = scale
46
+ self.nbases = nbases
47
+ self.basis = basis
48
+ self.coef = lsl.Var.new_param(
49
+ jnp.zeros(nbases), prior, inference=inference, name=coef_name
50
+ )
51
+ calc = lsl.Calc(jnp.dot, basis, self.coef)
52
+
53
+ super().__init__(calc, name=name)
54
+ self.coef.update()
55
+ self.update()
56
+ self.coef.role = Roles.coef_smooth
57
+ self.role = Roles.term_smooth
58
+
59
+ @classmethod
60
+ def new_ig(
61
+ cls,
62
+ basis: Basis | lsl.Var,
63
+ penalty: Array,
64
+ name: str,
65
+ ig_concentration: float = 0.01,
66
+ ig_scale: float = 0.01,
67
+ inference: InferenceTypes = None,
68
+ variance_value: float | None = None,
69
+ variance_name: str | None = None,
70
+ variance_jitter_dist: tfd.Distribution | None = None,
71
+ coef_name: str | None = None,
72
+ ) -> Self:
73
+ variance_name = f"{name}_variance" if variance_name is None else variance_name
74
+
75
+ variance = lsl.Var.new_param(
76
+ value=1.0,
77
+ distribution=lsl.Dist(
78
+ tfd.InverseGamma,
79
+ concentration=ig_concentration,
80
+ scale=ig_scale,
81
+ ),
82
+ name=variance_name,
83
+ )
84
+ variance.role = Roles.variance_smooth
85
+
86
+ scale = lsl.Var.new_calc(jnp.sqrt, variance, name=f"{variance_name}_root")
87
+ scale.role = Roles.scale_smooth
88
+
89
+ if variance_value is None:
90
+ ig_median = variance.dist_node.init_dist().quantile(0.5) # type: ignore
91
+ variance.value = min(ig_median, 10.0)
92
+ else:
93
+ variance.value = variance_value
94
+
95
+ term = cls(
96
+ basis=basis,
97
+ scale=scale,
98
+ penalty=penalty,
99
+ inference=inference,
100
+ name=name,
101
+ coef_name=coef_name,
102
+ )
103
+
104
+ variance.inference = gs.MCMCSpec(
105
+ init_star_ig_gibbs,
106
+ kernel_kwargs={"coef": term.coef},
107
+ jitter_dist=variance_jitter_dist,
108
+ )
109
+
110
+ return term
111
+
112
+
113
+ class LinearTerm(lsl.Var):
114
+ def __init__(
115
+ self,
116
+ x: lsl.Var | Array,
117
+ name: str,
118
+ distribution: lsl.Dist | None = None,
119
+ inference: InferenceTypes = None,
120
+ add_intercept: bool = False,
121
+ coef_name: str | None = None,
122
+ basis_name: str | None = None,
123
+ ):
124
+ coef_name = f"{name}_coef" if coef_name is None else coef_name
125
+ basis_name = f"B({name})" if basis_name is None else basis_name
126
+
127
+ def _matrix(x):
128
+ x = jnp.atleast_1d(x)
129
+ if len(jnp.shape(x)) == 1:
130
+ x = jnp.expand_dims(x, -1)
131
+ if add_intercept:
132
+ ones = jnp.ones(x.shape[0])
133
+ x = jnp.c_[ones, x]
134
+ return x
135
+
136
+ if not isinstance(x, lsl.Var):
137
+ x = lsl.Var.new_obs(x, name=f"{name}_input")
138
+
139
+ basis = lsl.Var(lsl.TransientCalc(_matrix, x=x), name=basis_name)
140
+ basis.role = Roles.basis
141
+
142
+ nbases = jnp.shape(basis.value)[-1]
143
+
144
+ self.nbases = nbases
145
+ self.basis = basis
146
+ self.coef = lsl.Var.new_param(
147
+ jnp.zeros(nbases), distribution, inference=inference, name=coef_name
148
+ )
149
+ calc = lsl.Calc(jnp.dot, basis, self.coef)
150
+
151
+ super().__init__(calc, name=name)
152
+ self.coef.role = Roles.coef_linear
153
+ self.role = Roles.term_linear
154
+
155
+
156
+ class Intercept(lsl.Var):
157
+ def __init__(
158
+ self,
159
+ name: str,
160
+ value: Array | float = 0.0,
161
+ distribution: lsl.Dist | None = None,
162
+ inference: InferenceTypes = None,
163
+ ) -> None:
164
+ super().__init__(
165
+ value=value, distribution=distribution, name=name, inference=inference
166
+ )
167
+ self.parameter = True
168
+ self.role = Roles.intercept
169
+
170
+
171
+ class Basis(lsl.Var):
172
+ def __init__(
173
+ self,
174
+ value: lsl.Var | lsl.Node,
175
+ basis_fn: Callable[[Array], Array] | Callable[..., Array],
176
+ *args,
177
+ name: str | None = None,
178
+ **kwargs,
179
+ ) -> None:
180
+ try:
181
+ value_ar = jnp.asarray(value.value)
182
+ except AttributeError:
183
+ raise TypeError(f"{value=} should be a liesel.model.Var instance.")
184
+
185
+ dtype = value_ar.dtype
186
+
187
+ input_shape = jnp.shape(basis_fn(value_ar, *args, **kwargs))
188
+ if len(input_shape):
189
+ k = input_shape[-1]
190
+
191
+ def fn(x):
192
+ n = jnp.shape(jnp.atleast_1d(x))[0]
193
+ if len(input_shape) == 2:
194
+ shape = (n, k)
195
+ elif len(input_shape) == 1:
196
+ shape = (n,)
197
+ elif not len(input_shape):
198
+ shape = ()
199
+ else:
200
+ raise RuntimeError(
201
+ "Return shape of 'basis_fn(value)' must"
202
+ " have <= dimensions, got {input_shape}"
203
+ )
204
+ result_shape = jax.ShapeDtypeStruct(shape, dtype)
205
+ result = jax.pure_callback(
206
+ basis_fn, result_shape, x, *args, vmap_method="sequential", **kwargs
207
+ )
208
+ return result
209
+
210
+ if not value.name:
211
+ raise ValueError(f"{value=} must be named.")
212
+
213
+ if name is None:
214
+ name_ = f"B({value.name})"
215
+
216
+ super().__init__(lsl.Calc(fn, value, _name=name_ + "_calc"), name=name_)
217
+ self.update()
218
+ self.role = Roles.basis
@@ -0,0 +1,160 @@
1
+ Metadata-Version: 2.4
2
+ Name: liesel_gam
3
+ Version: 0.0.4
4
+ Summary: Functionality for Generalized Additive Models in Liesel
5
+ Author: Johannes Brachem
6
+ License-File: LICENSE
7
+ Keywords: machine-learning,statistics
8
+ Classifier: Intended Audience :: Science/Research
9
+ Classifier: License :: OSI Approved :: MIT License
10
+ Classifier: Programming Language :: Python :: 3
11
+ Classifier: Programming Language :: Python :: 3.13
12
+ Requires-Python: >=3.13
13
+ Requires-Dist: liesel>=0.4
14
+ Description-Content-Type: text/markdown
15
+
16
+ # Generalized Additive Models Functionality in Liesel
17
+
18
+ [![pre-commit](https://github.com/liesel-devs/liesel_gam/actions/workflows/pre-commit.yml/badge.svg)](https://github.com/liesel-devs/liesel_gam/actions/workflows/pre-commit.yml)
19
+ [![pytest](https://github.com/liesel-devs/liesel_gam/actions/workflows/pytest.yml/badge.svg)](https://github.com/liesel-devs/liesel_gam/actions/workflows/pytest.yml)
20
+ [![pytest-cov](tests/coverage.svg)](https://github.com/liesel-devs/liesel_gam/actions/workflows/pytest.yml)
21
+
22
+ This package provides functionality to make the setup of
23
+ semiparametric generalized additive distributional regression models in [Liesel](https://github.com/liesel-devs/liesel)
24
+ convenient. It works nicely with [liesel-devs/smoothcon](https://github.com/liesel-devs/smoothcon),
25
+ which can be used to obtain basis and penalty matrices from the R package [mgcv](https://cran.r-project.org/web/packages/mgcv/index.html).
26
+
27
+ ## Disclaimer
28
+
29
+ This package is experimental and under active development. That means:
30
+
31
+ - The API cannot be considered stable. If you depend on this package, pin the version.
32
+ - Testing has not been extensive as of now. Please check and verify!
33
+ - There is currently no documentation beyond this readme.
34
+
35
+ In any case, this package comes with no warranty or guarantees.
36
+
37
+ ## Installation
38
+
39
+ You can install the development version from GitHub via pip:
40
+
41
+ ```bash
42
+ pip install git+https://github.com/liesel-devs/liesel_gam.git
43
+ ```
44
+
45
+ ## Illustration
46
+
47
+ This is a short pseudo-code illustration without real data. For full examples, please
48
+ consider the [notebooks](https://github.com/liesel-devs/liesel_gam/blob/main/notebooks).
49
+
50
+ ```python
51
+ import liesel.model as lsl
52
+ import liesel.goose as gs
53
+
54
+ import liesel_gam as gam
55
+
56
+ import jax.numpy as jnp
57
+ ```
58
+
59
+ Set up the response model.
60
+
61
+ ```python
62
+ loc = gam.AdditivePredictor("loc")
63
+ scale = gam.AdditivePredictor("scale", inv_link=jnp.exp) # terms will be added on the linked level
64
+
65
+ y = lsl.Var.new_obs(
66
+ value=...,
67
+ distribution=lsl.Dist(..., loc=loc, scale=scale),
68
+ name="y"
69
+ )
70
+ ```
71
+
72
+ Add intercept terms
73
+
74
+ ```python
75
+ loc += gam.Intercept(
76
+ value=0.0, # this is the default
77
+ distribution=None, # this is the default
78
+ inference=gs.MCMCSpec(gs.IWLSKernel), # supply inference information here
79
+ name="b0"
80
+ )
81
+
82
+ scale += gam.Intercept( # this term will be applied on the log link level
83
+ value=0.0,
84
+ distribution=None,
85
+ inference=gs.MCMCSpec(gs.IWLSKernel),
86
+ name="g0"
87
+ )
88
+
89
+ ```
90
+
91
+ Add a smooth term, which can be any structured additive term defined by a basis matrix
92
+ and a penalty matrix. A potentially rank-deficient multivariate normal prior will
93
+ be set up for the coefficient of this term.
94
+
95
+ ```python
96
+ loc += gam.SmoothTerm(
97
+ basis=...,
98
+ penalty=...,
99
+ scale=lsl.Var.new_param(..., name="tau"),
100
+ inference=gs.MCMCSpec(gs.IWLSKernel),
101
+ name="s(x)"
102
+ )
103
+ ```
104
+
105
+ Add a linear term.
106
+
107
+ ```python
108
+ loc += gam.LinearTerm(
109
+ x=..., # 1d-array or 2d-array are both allowed
110
+ distribution=lsl.Dist(...),
111
+ inference=gs.MCMCSpec(gs.IWLSKernel),
112
+ name="x"
113
+ )
114
+ ```
115
+
116
+ Get a Liesel EngineBuilder instance to set up MCMC sampling.
117
+
118
+ ```python
119
+ model = lsl.Model([y])
120
+ eb = gs.LieselMCMC(model).get_engine_builder() # get your engine builder instance
121
+ ```
122
+
123
+ ## Contents
124
+
125
+ ```python
126
+ import liesel.model as lsl
127
+ import liesel.goose as gs
128
+
129
+ import liesel_gam as gam
130
+ ```
131
+
132
+ This package provides the following classes and functions:
133
+
134
+ - `gam.AdditivePredictor`: A `lsl.Var` object that provides a convenient way to define an additive predictor.
135
+ - `gam.SmoothTerm`: A `lsl.Var` object that provides a convenient way to set up a structured additive term with a singular multivariate normal prior, given a basis matrix, a penalty matrix, and a `lsl.Var` representing the prior scale parameter.
136
+ - The alternative constructor `gam.SmoothTerm.new_ig` can be used to quickly set up a term with an inverse gamma prior on the prior variance parameter. This variance parameter will be initialized with a suitable Gibbs kernel.
137
+ - `gam.LinearTerm`: A `lsl.Var` object that provides a convenient way to set up a linear term.
138
+ - `gam.Intercept`: A `lsl.Var` parameter object that represents an intercept.
139
+ - `gam.Basis`: An observed `lsl.Var` object that represents a basis matrix.
140
+
141
+ A bit more behind the scenes:
142
+
143
+ - `gam.MultivariateNormalSingular`: An implementation of the singular multivariate normal distribution in the `tensorflow_probability` interface.
144
+ - `gam.star_ig_gibbs` and `gam.init_star_ig_gibbs`: Shortcuts for setting up a `gs.GibbsKernel` for a variance parameter with an inverse gamma prior.
145
+
146
+ ## Usage
147
+
148
+ Usage is illustrated in the following notebooks.
149
+
150
+ - [notebooks/test_gam_gibbs.ipynb](https://github.com/liesel-devs/liesel_gam/blob/main/notebooks/test_gam_gibbs.ipynb): Uses the `gam.SmoothTerm.new_ig` constructor for the quickest and most convenient setup.
151
+ - [notebooks/test_gam_manual.ipynb](https://github.com/liesel-devs/liesel_gam/blob/main/notebooks/test_gam_manual.ipynb): Uses `gam.SmoothTerm` with a manually initialized scale parameter. This is less convenient, but demonstrates how to use any `lsl.Var` for the scale parameter.
152
+
153
+ ## Usage with bases and penalties from `mgcv` via `smoothcon`
154
+
155
+ We can get access to a large class of possible basis and penalty matrices by
156
+ interfacing with the wonderful R package [mgcv](https://cran.r-project.org/web/packages/mgcv/index.html)
157
+ via [liesel-devs/smoothcon](https://github.com/liesel-devs/smoothcon).
158
+
159
+ Example notebooks that illustrate smoothcon usage are provided in the [smoothcon
160
+ repository](https://github.com/liesel-devs/smoothcon/tree/main/notebooks).
@@ -0,0 +1,11 @@
1
+ liesel_gam/__about__.py,sha256=1mptEzQihbdyqqzMgdns_j5ZGK9gz7hR2bsgA_TnjO4,22
2
+ liesel_gam/__init__.py,sha256=BlkbwITQP5raDtDdtg69C5yZB_pdptVcwgE3uAJWvNQ,455
3
+ liesel_gam/dist.py,sha256=ZrxMjGRqNWWNGE2NAPAfvabEO6QeHteZaKFG-G6BFBI,3466
4
+ liesel_gam/kernel.py,sha256=x1cPHf8orgv_X1824GYvgjIYPLydljq2Gp3xEgvAMSE,1552
5
+ liesel_gam/predictor.py,sha256=SMfo7fybgAcYN9WqSyyCMmic_5GROUv6ui_aRzXdJwc,1473
6
+ liesel_gam/roles.py,sha256=eZeuZI5YccNzlrgqOR5ltREB4dRBV4k4afZt9701doM,335
7
+ liesel_gam/var.py,sha256=4-KEQLupQUok8ZWaiL-UF0eSRnXndeamtc3dyg6TbI0,6444
8
+ liesel_gam-0.0.4.dist-info/METADATA,sha256=y7ldCSSJaS-h9P0LoDKV2DMBteYisdBIjD5NpD2Ry64,6067
9
+ liesel_gam-0.0.4.dist-info/WHEEL,sha256=qtCwoSJWgHk21S1Kb4ihdzI2rlJ1ZKaIurTj_ngOhyQ,87
10
+ liesel_gam-0.0.4.dist-info/licenses/LICENSE,sha256=pjhYbDHmDl8Gms9kI5nPaJoWte2QGB0F6Cwa1r9jsQ0,1063
11
+ liesel_gam-0.0.4.dist-info/RECORD,,
@@ -0,0 +1,4 @@
1
+ Wheel-Version: 1.0
2
+ Generator: hatchling 1.27.0
3
+ Root-Is-Purelib: true
4
+ Tag: py3-none-any
@@ -0,0 +1,21 @@
1
+ MIT License
2
+
3
+ Copyright (c) 2025 Liesel
4
+
5
+ Permission is hereby granted, free of charge, to any person obtaining a copy
6
+ of this software and associated documentation files (the "Software"), to deal
7
+ in the Software without restriction, including without limitation the rights
8
+ to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
9
+ copies of the Software, and to permit persons to whom the Software is
10
+ furnished to do so, subject to the following conditions:
11
+
12
+ The above copyright notice and this permission notice shall be included in all
13
+ copies or substantial portions of the Software.
14
+
15
+ THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
16
+ IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
17
+ FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
18
+ AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
19
+ LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
20
+ OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
21
+ SOFTWARE.