torchcurves 0.1.0__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (41) hide show
  1. torchcurves-0.1.0/.github/workflows/build.yml +52 -0
  2. torchcurves-0.1.0/.github/workflows/tests.yml +44 -0
  3. torchcurves-0.1.0/.gitignore +65 -0
  4. torchcurves-0.1.0/.pre-commit-config.yaml +24 -0
  5. torchcurves-0.1.0/.readthedocs.yaml +25 -0
  6. torchcurves-0.1.0/LICENSE +201 -0
  7. torchcurves-0.1.0/PKG-INFO +289 -0
  8. torchcurves-0.1.0/README.md +263 -0
  9. torchcurves-0.1.0/doc/Makefile +20 -0
  10. torchcurves-0.1.0/doc/source/_templates/classtemplate.rst +9 -0
  11. torchcurves-0.1.0/doc/source/api.rst +6 -0
  12. torchcurves-0.1.0/doc/source/conf.py +56 -0
  13. torchcurves-0.1.0/doc/source/example_notebooks.rst +12 -0
  14. torchcurves-0.1.0/doc/source/examples/draw_bspline.ipynb +259 -0
  15. torchcurves-0.1.0/doc/source/examples/draw_legendre.ipynb +235 -0
  16. torchcurves-0.1.0/doc/source/examples/factorization_machine.ipynb +626 -0
  17. torchcurves-0.1.0/doc/source/examples/kan_bspline_rat.ipynb +334 -0
  18. torchcurves-0.1.0/doc/source/examples/kan_legendre_rat.ipynb +335 -0
  19. torchcurves-0.1.0/doc/source/examples/transformer_mixed_curves.ipynb +666 -0
  20. torchcurves-0.1.0/doc/source/index.rst +14 -0
  21. torchcurves-0.1.0/doc/source/torchcurves.functional.rst +38 -0
  22. torchcurves-0.1.0/doc/source/torchcurves.rst +27 -0
  23. torchcurves-0.1.0/logo.png +0 -0
  24. torchcurves-0.1.0/logo_small.png +0 -0
  25. torchcurves-0.1.0/pyproject.toml +87 -0
  26. torchcurves-0.1.0/src/torchcurves/__init__.py +5 -0
  27. torchcurves-0.1.0/src/torchcurves/functional/__init__.py +5 -0
  28. torchcurves-0.1.0/src/torchcurves/functional/_bspline.py +422 -0
  29. torchcurves-0.1.0/src/torchcurves/functional/_legendre.py +30 -0
  30. torchcurves-0.1.0/src/torchcurves/functional/_normalization.py +58 -0
  31. torchcurves-0.1.0/src/torchcurves/modules/__init__.py +5 -0
  32. torchcurves-0.1.0/src/torchcurves/modules/_bspline.py +181 -0
  33. torchcurves-0.1.0/src/torchcurves/modules/_kan_tools.py +18 -0
  34. torchcurves-0.1.0/src/torchcurves/modules/_legendre.py +100 -0
  35. torchcurves-0.1.0/src/torchcurves/modules/_normalization.py +7 -0
  36. torchcurves-0.1.0/src/torchcurves/types.py +28 -0
  37. torchcurves-0.1.0/tests/__init__.py +0 -0
  38. torchcurves-0.1.0/tests/conftest.py +0 -0
  39. torchcurves-0.1.0/tests/test_bspline.py +650 -0
  40. torchcurves-0.1.0/tests/test_legendre.py +133 -0
  41. torchcurves-0.1.0/uv.lock +3799 -0
@@ -0,0 +1,52 @@
1
+ name: Build
2
+
3
+ on:
4
+ push:
5
+ branches: [ master ]
6
+ pull_request:
7
+ branches: [ master ]
8
+
9
+ jobs:
10
+ build:
11
+ runs-on: ubuntu-latest
12
+ strategy:
13
+ matrix:
14
+ python-version: ["3.9", "3.10", "3.11", "3.12", "3.13"]
15
+ steps:
16
+ - uses: actions/checkout@v4
17
+
18
+ - name: Install uv
19
+ uses: astral-sh/setup-uv@v3
20
+
21
+ - name: Set up Python ${{ matrix.python-version }}
22
+ run: uv python install ${{ matrix.python-version }}
23
+
24
+ - name: Install build dependencies
25
+ run: |
26
+ uv venv
27
+ uv sync --all-groups
28
+
29
+ - name: Build package
30
+ run: |
31
+ uv build
32
+
33
+ - name: Archive package
34
+ uses: actions/upload-artifact@v4
35
+ with:
36
+ name: wheel_and_source_${{ matrix.python-version }}
37
+ path: |
38
+ dist
39
+
40
+ - name: Build documentation
41
+ run: |
42
+ sudo apt-get -qq update
43
+ sudo apt-get install -y pandoc
44
+ cd doc
45
+ make html
46
+
47
+ - name: Archive documentation
48
+ uses: actions/upload-artifact@v4
49
+ with:
50
+ name: docs_${{ matrix.python-version }}
51
+ path: |
52
+ doc/build/html
@@ -0,0 +1,44 @@
1
+ name: Tests
2
+
3
+ on:
4
+ push:
5
+ branches: [ master ]
6
+ pull_request:
7
+ branches: [ master ]
8
+
9
+ jobs:
10
+ test:
11
+ runs-on: ubuntu-latest
12
+ strategy:
13
+ matrix:
14
+ python-version: ["3.9", "3.10", "3.11", "3.12", "3.13"]
15
+
16
+ steps:
17
+ - uses: actions/checkout@v4
18
+
19
+ - name: Install uv
20
+ uses: astral-sh/setup-uv@v3
21
+
22
+ - name: Set up Python ${{ matrix.python-version }}
23
+ run: uv python install ${{ matrix.python-version }}
24
+
25
+ - name: Install dependencies
26
+ run: |
27
+ uv venv
28
+ uv sync --all-groups
29
+
30
+ - name: Lint with ruff
31
+ run: |
32
+ uv run ruff check src/ tests/
33
+
34
+ - name: Format with black
35
+ run: |
36
+ uv run black --check --line-length 120 src/ tests/
37
+
38
+ - name: Type check with mypy
39
+ run: |
40
+ uv run mypy src/
41
+
42
+ - name: Test with pytest
43
+ run: |
44
+ uv run pytest tests/ -v --cov=torch_bspline --cov-report=term-missing
@@ -0,0 +1,65 @@
1
+ # Python
2
+ __pycache__/
3
+ *.py[cod]
4
+ *$py.class
5
+ *.so
6
+ .Python
7
+ build/
8
+ develop-eggs/
9
+ dist/
10
+ downloads/
11
+ eggs/
12
+ .eggs/
13
+ lib/
14
+ lib64/
15
+ parts/
16
+ sdist/
17
+ var/
18
+ wheels/
19
+ *.egg-info/
20
+ .installed.cfg
21
+ *.egg
22
+ MANIFEST
23
+
24
+ # Virtual environments
25
+ venv/
26
+ ENV/
27
+ env/
28
+ .venv
29
+
30
+ # IDEs
31
+ .vscode/
32
+ .idea/
33
+ *.swp
34
+ *.swo
35
+ *~
36
+
37
+ # Testing
38
+ .tox/
39
+ .coverage
40
+ .coverage.*
41
+ .cache
42
+ .pytest_cache/
43
+ nosetests.xml
44
+ coverage.xml
45
+ *.cover
46
+ .hypothesis/
47
+
48
+ # Jupyter
49
+ .ipynb_checkpoints/
50
+
51
+ # Documentation
52
+ docs/_build/
53
+ docs/_static/
54
+ docs/_templates/
55
+
56
+ # OS
57
+ .DS_Store
58
+ Thumbs.db
59
+
60
+ # UV
61
+ .python-version
62
+ .ruff_cache
63
+
64
+ # Sphinx
65
+ doc/source/generated
@@ -0,0 +1,24 @@
1
+ repos:
2
+ - repo: https://github.com/pre-commit/pre-commit-hooks
3
+ rev: v4.5.0
4
+ hooks:
5
+ - id: trailing-whitespace
6
+ - id: end-of-file-fixer
7
+ - id: check-yaml
8
+ - id: check-added-large-files
9
+
10
+ - repo: https://github.com/astral-sh/ruff-pre-commit
11
+ # Ruff version.
12
+ rev: v0.9.3
13
+ hooks:
14
+ # Run the linter.
15
+ - id: ruff
16
+ types_or: [ python, pyi ]
17
+ args: [ --fix ]
18
+ # Run the formatter.
19
+ - id: ruff-format
20
+ types_or: [ python, pyi ]
21
+ - repo: https://github.com/astral-sh/uv-pre-commit
22
+ rev: 0.5.25
23
+ hooks:
24
+ - id: uv-lock
@@ -0,0 +1,25 @@
1
+ # Read the Docs configuration file
2
+ # See https://docs.readthedocs.io/en/stable/config-file/v2.html for details
3
+
4
+ # Required
5
+ version: 2
6
+
7
+ # Set the OS, Python version, and other tools you might need
8
+ build:
9
+ os: ubuntu-24.04
10
+ tools:
11
+ python: "3.13"
12
+ apt_packages:
13
+ - pandoc
14
+ jobs:
15
+ pre_create_environment:
16
+ - asdf plugin add uv
17
+ - asdf install uv latest
18
+ - asdf global uv latest
19
+ create_environment:
20
+ - uv venv "${READTHEDOCS_VIRTUALENV_PATH}"
21
+ install:
22
+ - UV_PROJECT_ENVIRONMENT="${READTHEDOCS_VIRTUALENV_PATH}" uv sync --frozen --all-groups
23
+
24
+ sphinx:
25
+ configuration: doc/source/conf.py
@@ -0,0 +1,201 @@
1
+ Apache License
2
+ Version 2.0, January 2004
3
+ http://www.apache.org/licenses/
4
+
5
+ TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
6
+
7
+ 1. Definitions.
8
+
9
+ "License" shall mean the terms and conditions for use, reproduction,
10
+ and distribution as defined by Sections 1 through 9 of this document.
11
+
12
+ "Licensor" shall mean the copyright owner or entity authorized by
13
+ the copyright owner that is granting the License.
14
+
15
+ "Legal Entity" shall mean the union of the acting entity and all
16
+ other entities that control, are controlled by, or are under common
17
+ control with that entity. For the purposes of this definition,
18
+ "control" means (i) the power, direct or indirect, to cause the
19
+ direction or management of such entity, whether by contract or
20
+ otherwise, or (ii) ownership of fifty percent (50%) or more of the
21
+ outstanding shares, or (iii) beneficial ownership of such entity.
22
+
23
+ "You" (or "Your") shall mean an individual or Legal Entity
24
+ exercising permissions granted by this License.
25
+
26
+ "Source" form shall mean the preferred form for making modifications,
27
+ including but not limited to software source code, documentation
28
+ source, and configuration files.
29
+
30
+ "Object" form shall mean any form resulting from mechanical
31
+ transformation or translation of a Source form, including but
32
+ not limited to compiled object code, generated documentation,
33
+ and conversions to other media types.
34
+
35
+ "Work" shall mean the work of authorship, whether in Source or
36
+ Object form, made available under the License, as indicated by a
37
+ copyright notice that is included in or attached to the work
38
+ (an example is provided in the Appendix below).
39
+
40
+ "Derivative Works" shall mean any work, whether in Source or Object
41
+ form, that is based on (or derived from) the Work and for which the
42
+ editorial revisions, annotations, elaborations, or other modifications
43
+ represent, as a whole, an original work of authorship. For the purposes
44
+ of this License, Derivative Works shall not include works that remain
45
+ separable from, or merely link (or bind by name) to the interfaces of,
46
+ the Work and Derivative Works thereof.
47
+
48
+ "Contribution" shall mean any work of authorship, including
49
+ the original version of the Work and any modifications or additions
50
+ to that Work or Derivative Works thereof, that is intentionally
51
+ submitted to Licensor for inclusion in the Work by the copyright owner
52
+ or by an individual or Legal Entity authorized to submit on behalf of
53
+ the copyright owner. For the purposes of this definition, "submitted"
54
+ means any form of electronic, verbal, or written communication sent
55
+ to the Licensor or its representatives, including but not limited to
56
+ communication on electronic mailing lists, source code control systems,
57
+ and issue tracking systems that are managed by, or on behalf of, the
58
+ Licensor for the purpose of discussing and improving the Work, but
59
+ excluding communication that is conspicuously marked or otherwise
60
+ designated in writing by the copyright owner as "Not a Contribution."
61
+
62
+ "Contributor" shall mean Licensor and any individual or Legal Entity
63
+ on behalf of whom a Contribution has been received by Licensor and
64
+ subsequently incorporated within the Work.
65
+
66
+ 2. Grant of Copyright License. Subject to the terms and conditions of
67
+ this License, each Contributor hereby grants to You a perpetual,
68
+ worldwide, non-exclusive, no-charge, royalty-free, irrevocable
69
+ copyright license to reproduce, prepare Derivative Works of,
70
+ publicly display, publicly perform, sublicense, and distribute the
71
+ Work and such Derivative Works in Source or Object form.
72
+
73
+ 3. Grant of Patent License. Subject to the terms and conditions of
74
+ this License, each Contributor hereby grants to You a perpetual,
75
+ worldwide, non-exclusive, no-charge, royalty-free, irrevocable
76
+ (except as stated in this section) patent license to make, have made,
77
+ use, offer to sell, sell, import, and otherwise transfer the Work,
78
+ where such license applies only to those patent claims licensable
79
+ by such Contributor that are necessarily infringed by their
80
+ Contribution(s) alone or by combination of their Contribution(s)
81
+ with the Work to which such Contribution(s) was submitted. If You
82
+ institute patent litigation against any entity (including a
83
+ cross-claim or counterclaim in a lawsuit) alleging that the Work
84
+ or a Contribution incorporated within the Work constitutes direct
85
+ or contributory patent infringement, then any patent licenses
86
+ granted to You under this License for that Work shall terminate
87
+ as of the date such litigation is filed.
88
+
89
+ 4. Redistribution. You may reproduce and distribute copies of the
90
+ Work or Derivative Works thereof in any medium, with or without
91
+ modifications, and in Source or Object form, provided that You
92
+ meet the following conditions:
93
+
94
+ (a) You must give any other recipients of the Work or
95
+ Derivative Works a copy of this License; and
96
+
97
+ (b) You must cause any modified files to carry prominent notices
98
+ stating that You changed the files; and
99
+
100
+ (c) You must retain, in the Source form of any Derivative Works
101
+ that You distribute, all copyright, patent, trademark, and
102
+ attribution notices from the Source form of the Work,
103
+ excluding those notices that do not pertain to any part of
104
+ the Derivative Works; and
105
+
106
+ (d) If the Work includes a "NOTICE" text file as part of its
107
+ distribution, then any Derivative Works that You distribute must
108
+ include a readable copy of the attribution notices contained
109
+ within such NOTICE file, excluding those notices that do not
110
+ pertain to any part of the Derivative Works, in at least one
111
+ of the following places: within a NOTICE text file distributed
112
+ as part of the Derivative Works; within the Source form or
113
+ documentation, if provided along with the Derivative Works; or,
114
+ within a display generated by the Derivative Works, if and
115
+ wherever such third-party notices normally appear. The contents
116
+ of the NOTICE file are for informational purposes only and
117
+ do not modify the License. You may add Your own attribution
118
+ notices within Derivative Works that You distribute, alongside
119
+ or as an addendum to the NOTICE text from the Work, provided
120
+ that such additional attribution notices cannot be construed
121
+ as modifying the License.
122
+
123
+ You may add Your own copyright statement to Your modifications and
124
+ may provide additional or different license terms and conditions
125
+ for use, reproduction, or distribution of Your modifications, or
126
+ for any such Derivative Works as a whole, provided Your use,
127
+ reproduction, and distribution of the Work otherwise complies with
128
+ the conditions stated in this License.
129
+
130
+ 5. Submission of Contributions. Unless You explicitly state otherwise,
131
+ any Contribution intentionally submitted for inclusion in the Work
132
+ by You to the Licensor shall be under the terms and conditions of
133
+ this License, without any additional terms or conditions.
134
+ Notwithstanding the above, nothing herein shall supersede or modify
135
+ the terms of any separate license agreement you may have executed
136
+ with Licensor regarding such Contributions.
137
+
138
+ 6. Trademarks. This License does not grant permission to use the trade
139
+ names, trademarks, service marks, or product names of the Licensor,
140
+ except as required for reasonable and customary use in describing the
141
+ origin of the Work and reproducing the content of the NOTICE file.
142
+
143
+ 7. Disclaimer of Warranty. Unless required by applicable law or
144
+ agreed to in writing, Licensor provides the Work (and each
145
+ Contributor provides its Contributions) on an "AS IS" BASIS,
146
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
147
+ implied, including, without limitation, any warranties or conditions
148
+ of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
149
+ PARTICULAR PURPOSE. You are solely responsible for determining the
150
+ appropriateness of using or redistributing the Work and assume any
151
+ risks associated with Your exercise of permissions under this License.
152
+
153
+ 8. Limitation of Liability. In no event and under no legal theory,
154
+ whether in tort (including negligence), contract, or otherwise,
155
+ unless required by applicable law (such as deliberate and grossly
156
+ negligent acts) or agreed to in writing, shall any Contributor be
157
+ liable to You for damages, including any direct, indirect, special,
158
+ incidental, or consequential damages of any character arising as a
159
+ result of this License or out of the use or inability to use the
160
+ Work (including but not limited to damages for loss of goodwill,
161
+ work stoppage, computer failure or malfunction, or any and all
162
+ other commercial damages or losses), even if such Contributor
163
+ has been advised of the possibility of such damages.
164
+
165
+ 9. Accepting Warranty or Additional Liability. While redistributing
166
+ the Work or Derivative Works thereof, You may choose to offer,
167
+ and charge a fee for, acceptance of support, warranty, indemnity,
168
+ or other liability obligations and/or rights consistent with this
169
+ License. However, in accepting such obligations, You may act only
170
+ on Your own behalf and on Your sole responsibility, not on behalf
171
+ of any other Contributor, and only if You agree to indemnify,
172
+ defend, and hold each Contributor harmless for any liability
173
+ incurred by, or claims asserted against, such Contributor by reason
174
+ of your accepting any such warranty or additional liability.
175
+
176
+ END OF TERMS AND CONDITIONS
177
+
178
+ APPENDIX: How to apply the Apache License to your work.
179
+
180
+ To apply the Apache License to your work, attach the following
181
+ boilerplate notice, with the fields enclosed by brackets "[]"
182
+ replaced with your own identifying information. (Don't include
183
+ the brackets!) The text should be enclosed in the appropriate
184
+ comment syntax for the file format. We also recommend that a
185
+ file or class name and description of purpose be included on the
186
+ same "printed page" as the copyright notice for easier
187
+ identification within third-party archives.
188
+
189
+ Copyright [yyyy] [name of copyright owner]
190
+
191
+ Licensed under the Apache License, Version 2.0 (the "License");
192
+ you may not use this file except in compliance with the License.
193
+ You may obtain a copy of the License at
194
+
195
+ http://www.apache.org/licenses/LICENSE-2.0
196
+
197
+ Unless required by applicable law or agreed to in writing, software
198
+ distributed under the License is distributed on an "AS IS" BASIS,
199
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
200
+ See the License for the specific language governing permissions and
201
+ limitations under the License.
@@ -0,0 +1,289 @@
1
+ Metadata-Version: 2.4
2
+ Name: torchcurves
3
+ Version: 0.1.0
4
+ Summary: PyTorch module for differentiable parametric curves with learnable coefficients
5
+ Project-URL: Homepage, https://github.com/alexshtf/torchcurves
6
+ Project-URL: Repository, https://github.com/alexshtf/torchcurves
7
+ Project-URL: Issues, https://github.com/alexshtf/torchcurves/issues
8
+ Author-email: Alex Shtoff <alex.shtf@gmail.com>
9
+ License: Apache 2.0
10
+ License-File: LICENSE
11
+ Keywords: bspline,curves,deep-learning,differentiable,geometric-deep-learning,pytorch
12
+ Classifier: Development Status :: 3 - Alpha
13
+ Classifier: Intended Audience :: Developers
14
+ Classifier: Intended Audience :: Science/Research
15
+ Classifier: License :: OSI Approved :: MIT License
16
+ Classifier: Programming Language :: Python :: 3
17
+ Classifier: Programming Language :: Python :: 3.9
18
+ Classifier: Programming Language :: Python :: 3.10
19
+ Classifier: Programming Language :: Python :: 3.11
20
+ Classifier: Programming Language :: Python :: 3.12
21
+ Classifier: Topic :: Scientific/Engineering :: Artificial Intelligence
22
+ Classifier: Topic :: Scientific/Engineering :: Mathematics
23
+ Requires-Python: >=3.9
24
+ Requires-Dist: torch>=1.10.0
25
+ Description-Content-Type: text/markdown
26
+
27
+ # torchcurves
28
+
29
+ ![logo](logo.png)
30
+
31
+ A PyTorch module for differentiable parametric curves with learnable coefficients,
32
+ such as a B-Spline curve with learnable control points.
33
+
34
+ This package provides fully differentiable curve implementations that integrate
35
+ seamlessly with PyTorch's autograd system. It streamlines use cases such as
36
+ continuous numerical embeddings for factorization machines [6] or transformers
37
+ [2,3], Kolmogorov-Arnold networks [1], or path planning in robotics.
38
+
39
+ Full documentation is available at
40
+ [torchcurves.readthedocs.io](https://torchcurves.readthedocs.io/en/latest/).
41
+
42
+ ## Features
43
+
44
+ - **Fully Differentiable**: Custom autograd function ensures gradients flow
45
+ properly through the curve evaluation.
46
+ - **Batch Processing**: Vectorized operations for efficient batch evaluation.
47
+
48
+ ## Installation
49
+
50
+ ```bash
51
+ pip install torchcurves
52
+ ```
53
+
54
+ ```bash
55
+ uv add torchcurves
56
+ ```
57
+
58
+ ## Use cases
59
+
60
+ There are examples in the `examples` directory showing how to build models using
61
+ this library. Here we show some simple code snippets to appreciate the library.
62
+
63
+ ## Use case 1 - continuous embeddings
64
+
65
+ ```python
66
+ import torchcurves as tc
67
+ from torch import nn
68
+ import torch
69
+
70
+
71
+ def Net(nn.Module):
72
+ def __init__(self, num_categorical, num_numerical, dim, num_knots=10):
73
+ super().__init__()
74
+ self.cat_emb = nn.Embedding(num_categorical, dim)
75
+ self.num_emb = tc.BSplineEmbeddings(num_numerical, dim, knots_config=num_knots)
76
+ self.my_super_duper_transformer = MySuperDuperTransformer()
77
+
78
+ def forward(self, x_categorical, x_numerical):
79
+ embeddings = torch.cat([self.cat_emb(x_categorical), self.num_emb(x_numerical)], axis=-2)
80
+ return self.my_super_duper_transformer(embeddings)
81
+ ```
82
+
83
+ ## Use case 2 - Kolmogorov-Arnold networks
84
+
85
+ A KAN [1] based on the B-Spline basis, along the lines of the original paper:
86
+
87
+ ```python
88
+ import torchcurves as tc
89
+ from torch import nn
90
+
91
+ input_dim = 2
92
+ intermediate_dim = 5
93
+ num_control_points = 10
94
+
95
+ kan = nn.Sequential(
96
+ # layer 1
97
+ tc.BSplineCurve(input_dim, intermediate_dim, knots_config=num_control_points),
98
+ tc.Sum(dim=-2),
99
+ # layer 2
100
+ tc.BSplineCurve(intermediate_dim, intermediate_dim, knots_config=num_control_points),
101
+ tc.Sum(dim=-2),
102
+ # layer 3
103
+ tc.BSplineCurve(intermediate_dim, 1, knots_config=num_control_points),
104
+ tc.Sum(dim=-2),
105
+ )
106
+ ```
107
+ Yes, we know the original KAN paper used a different curve parametrization,
108
+ B-Spline + arcsinh, but the whole point of this repo is showing that KAN
109
+ activations can be parametrized in arbitrary ways.
110
+
111
+ For example, here is a KAN based on Legendre polynomials of degree 5:
112
+
113
+ ```python
114
+ import torchcurves as tc
115
+ from torch import nn
116
+
117
+ input_dim = 2
118
+ intermediate_dim = 5
119
+ degree = 5
120
+
121
+ kan = nn.Sequential(
122
+ # layer 1
123
+ tc.LegendreCurve(input_dim, intermediate_dim, degree=degree),
124
+ tc.Sum(dim=-2),
125
+ # layer 2
126
+ tc.LegendreCurve(intermediate_dim, intermediate_dim, degree=degree),
127
+ tc.Sum(dim=-2),
128
+ # layer 3
129
+ tc.LegendreCurve(intermediate_dim, 1, degree=degree),
130
+ tc.Sum(dim=-2),
131
+ )
132
+ ```
133
+
134
+ Since KANs are the primary use case for the `tc.Sum()` layer, we can omit the `dim=-2` argument, but it is provided
135
+ here for clarity.
136
+
137
+ ## Advanced features
138
+
139
+ The curves we provide here typically rely on their inputs to lie in a compact
140
+ interval, typically [-1, 1]. Arbitrary inputs need to be normalized to this
141
+ interval. We provide two simple out-of-the-box normalization strategies
142
+ described below.
143
+
144
+ ## Rational scaling
145
+
146
+ This is the default strategy — this strategy computes
147
+
148
+ ```math
149
+ x \to \frac{x}{\sqrt{s^2 + x^2}},
150
+ ```
151
+
152
+ and is based on the paper
153
+ >Wang, Z.Q. and Guo, B.Y., 2004. Modified Legendre rational spectral method for the whole line. Journal of Computational Mathematics, pp.457-474.
154
+
155
+ In Python it looks like this:
156
+
157
+ ```python
158
+ tc.BSplineCurve(curve_dim, normalization_fn='rational', normalization_scale=s)
159
+ ```
160
+
161
+ ## Clamping
162
+
163
+ The inputs are simply clipped to [-1, 1] after scaling, i.e.
164
+
165
+ ```math
166
+ x \to \max(\min(1, x / s), -1)
167
+ ```
168
+
169
+ In Python it looks like this:
170
+
171
+ ```python
172
+ tc.BSplineCurve(curve_dim, normalization_fn='clamp', normalization_scale=s)
173
+ ```
174
+
175
+ ## Custom normalization
176
+
177
+ Provide a custom function that maps its input to the designated range after
178
+ scaling. Example:
179
+
180
+ ```python
181
+ def erf_clamp(x: Tensor, scale: float = 1, out_min: float = -1, out_max: float = 1) -> Tensor:
182
+ mapped = torch.special.erf(x / scale)
183
+ return ((mapped + 1) * (out_max - out_min)) / 2 + out_min
184
+
185
+ tc.BSplineCurve(curve_dim, normalization_fn=erf_clamp, normalization_scale=s)
186
+ ```
187
+
188
+ ## Example: B-Spline KAN with clamping
189
+
190
+ A KAN based on rationally scaled B-Spline basis with the default scale of $s=1$:
191
+
192
+ ```python
193
+ spline_kan = nn.Sequential(
194
+ # layer 1
195
+ tc.BSplineCurve(input_dim, intermediate_dim, knots_config=knots, normalization_fn='clamp'),
196
+ tc.Sum(),
197
+ # layer 2
198
+ tc.BSplineCurve(intermediate_dim, intermediate_dim, knots_config=knots, normalization_fn='clamp'),
199
+ tc.Sum(),
200
+ # layer 3
201
+ tc.BSplineCurve(intermediate_dim, 1, knots_config=knots, normalization_fn='clamp'),
202
+ tc.Sum(),
203
+ )
204
+ ```
205
+
206
+ ### Legendre KAN with rational clamping
207
+
208
+ ```python
209
+ import torchcurves as tc
210
+ from torch import nn
211
+
212
+ input_dim = 2
213
+ intermediate_dim = 5
214
+ degree = 5
215
+
216
+ config = dict(degree=degree, normalization_fn="clamp")
217
+ kan = nn.Sequential(
218
+ # layer 1
219
+ tc.LegendreCurve(input_dim, intermediate_dim, **config),
220
+ tc.Sum(),
221
+ # layer 2
222
+ tc.LegendreCurve(intermediate_dim, intermediate_dim, **config),
223
+ tc.Sum(),
224
+ # layer 3
225
+ tc.LegendreCurve(intermediate_dim, 1, **config),
226
+ tc.Sum(),
227
+ )
228
+ ```
229
+
230
+
231
+ ## Development
232
+
233
+ ## Development Installation
234
+
235
+ Using [uv](https://github.com/astral-sh/uv) (recommended):
236
+
237
+ ```bash
238
+ # Clone the repository
239
+ git clone https://github.com/alexshtf/torchcurves.git
240
+ cd torchcurves
241
+
242
+ # Create virtual environment and install
243
+ uv venv
244
+ uv sync --all-groups
245
+ ```
246
+
247
+ ## Running Tests
248
+
249
+ ```bash
250
+ # Run all tests
251
+ uv run pytest
252
+
253
+ # Run with coverage
254
+ uv run pytest --cov=torchcurves
255
+
256
+ # Run specific test file
257
+ uv run pytest tests/test_bspline.py -v
258
+ ```
259
+
260
+ ## Building the docs
261
+
262
+ ```bash
263
+ # Prepare API docs
264
+ cd docs
265
+ make html
266
+ ```
267
+
268
+ ## Citation
269
+
270
+ If you use this package in your research, please cite:
271
+
272
+ ```bibtex
273
+ @software{torchcurves,
274
+ author = {Shtoff, Alex},
275
+ title = {torchcurves: Differentiable Parametric Curves in PyTorch},
276
+ year = {2025},
277
+ publisher = {GitHub},
278
+ url = {https://github.com/alexshtf/torchcurves}
279
+ }
280
+ ```
281
+
282
+ ## References
283
+
284
+ [1]: Ziming Liu, Yixuan Wang, Sachin Vaidya, Fabian Ruehle, James Halverson, Marin Soljacic, Thomas Y. Hou, Max Tegmark. "KAN: Kolmogorov–Arnold Networks." *ICLR* (2025). \
285
+ [2]: Juergen Schmidhuber. "Learning to control fast-weight memories: An alternative to dynamic recurrent networks." *Neural Computation*, 4(1), pp.131-139. (1992) \
286
+ [3]: Ashish Vaswani, Noam Shazeer, Niki Parmar, Jakob Uszkoreit, Llion Jones, Aidan N. Gomez, Łukasz Kaiser, and Illia Polosukhin. "Attention is all you need." *Advances in neural information processing systems* 30 (2017). \
287
+ [4]: Alex Shtoff, Elie Abboud, Rotem Stram, and Oren Somekh. "Function Basis Encoding of Numerical Features in Factorization Machines." *Transactions on Machine Learning Research*. \
288
+ [5]: Rügamer, David. "Scalable Higher-Order Tensor Product Spline Models." In *International Conference on Artificial Intelligence and Statistics*, pp. 1-9. PMLR, 2024. \
289
+ [6]: Steffen Rendle. "Factorization machines." In *2010 IEEE International conference on data mining*, pp. 995-1000. IEEE, 2010.