featomic-torch 0.6.0__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -0,0 +1,6 @@
1
+ Guillaume Fraux
2
+ Philip Loche
3
+ Sergei Kliavinek
4
+ Kevin Kazuki Huguenin-Dumittan
5
+ Davide Tisi
6
+ Alexander Goscinski
@@ -0,0 +1,28 @@
1
+ BSD 3-Clause License
2
+
3
+ Copyright (c) 2023, featomic developers
4
+
5
+ Redistribution and use in source and binary forms, with or without
6
+ modification, are permitted provided that the following conditions are met:
7
+
8
+ 1. Redistributions of source code must retain the above copyright notice, this
9
+ list of conditions and the following disclaimer.
10
+
11
+ 2. Redistributions in binary form must reproduce the above copyright notice,
12
+ this list of conditions and the following disclaimer in the documentation
13
+ and/or other materials provided with the distribution.
14
+
15
+ 3. Neither the name of the copyright holder nor the names of its
16
+ contributors may be used to endorse or promote products derived from
17
+ this software without specific prior written permission.
18
+
19
+ THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
20
+ AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
21
+ IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
22
+ DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE
23
+ FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
24
+ DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
25
+ SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
26
+ CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
27
+ OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
28
+ OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
@@ -0,0 +1,7 @@
1
+ include featomic-torch-cxx-*.tar.gz
2
+ include git_version_info
3
+ include build-backend/backend.py
4
+
5
+ include pyproject.toml
6
+ include AUTHORS
7
+ include LICENSE
@@ -0,0 +1,36 @@
1
+ Metadata-Version: 2.1
2
+ Name: featomic-torch
3
+ Version: 0.6.0
4
+ Summary: TorchScript bindings to featomic
5
+ Author: Guillaume Fraux, Philip Loche, Sergei Kliavinek, Kevin Kazuki Huguenin-Dumittan, Davide Tisi, Alexander Goscinski
6
+ License: BSD-3-Clause
7
+ Project-URL: homepage, https://metatensor.github.io/featomic/latest/
8
+ Project-URL: documentation, https://metatensor.github.io/featomic/latest/
9
+ Project-URL: repository, https://github.com/metatensor/featomic
10
+ Keywords: computational science,machine learning,molecular modeling,atomistic representations,torch
11
+ Classifier: Development Status :: 4 - Beta
12
+ Classifier: Intended Audience :: Science/Research
13
+ Classifier: License :: OSI Approved :: BSD License
14
+ Classifier: Operating System :: POSIX
15
+ Classifier: Operating System :: MacOS :: MacOS X
16
+ Classifier: Operating System :: Microsoft :: Windows
17
+ Classifier: Programming Language :: Python
18
+ Classifier: Programming Language :: Python :: 3
19
+ Classifier: Topic :: Scientific/Engineering
20
+ Classifier: Topic :: Scientific/Engineering :: Bio-Informatics
21
+ Classifier: Topic :: Scientific/Engineering :: Chemistry
22
+ Classifier: Topic :: Scientific/Engineering :: Physics
23
+ Classifier: Topic :: Software Development :: Libraries
24
+ Classifier: Topic :: Software Development :: Libraries :: Python Modules
25
+ Requires-Python: >=3.9
26
+ Description-Content-Type: text/x-rst
27
+ License-File: LICENSE
28
+ License-File: AUTHORS
29
+ Requires-Dist: torch>=1.12
30
+ Requires-Dist: metatensor-torch<0.7.0,>=0.6.0
31
+ Requires-Dist: featomic<0.7.0,>=0.6.0
32
+
33
+ featomic-torch
34
+ ===============
35
+
36
+ This package contains the TorchScript bindings to featomic.
@@ -0,0 +1,4 @@
1
+ featomic-torch
2
+ ===============
3
+
4
+ This package contains the TorchScript bindings to featomic.
@@ -0,0 +1,47 @@
1
+ # this is a custom Python build backend wrapping setuptool's to add a build-time
2
+ # dependencies to featomic, using the local version if it exists, and otherwise
3
+ # falling back to the one on PyPI.
4
+ import os
5
+
6
+ from setuptools import build_meta
7
+
8
+
9
+ ROOT = os.path.realpath(os.path.join(os.path.dirname(__file__), ".."))
10
+ FEATOMIC_SRC = os.path.realpath(os.path.join(ROOT, "..", "featomic"))
11
+ FORCED_FEATOMIC_VERSION = os.environ.get("FEATOMIC_TORCH_BUILD_WITH_FEATOMIC_VERSION")
12
+
13
+ FEATOMIC_NO_LOCAL_DEPS = os.environ.get("FEATOMIC_NO_LOCAL_DEPS", "0") == "1"
14
+
15
+ if FORCED_FEATOMIC_VERSION is not None:
16
+ # force a specific version for metatensor-core, this is used when checking the build
17
+ # from a sdist on a non-released version
18
+ FEATOMIC_DEP = f"featomic =={FORCED_FEATOMIC_VERSION}"
19
+
20
+ elif not FEATOMIC_NO_LOCAL_DEPS and os.path.exists(FEATOMIC_SRC):
21
+ # we are building from a git checkout
22
+ FEATOMIC_DEP = f"featomic @ file://{FEATOMIC_SRC}"
23
+ else:
24
+ # we are building from a sdist
25
+ FEATOMIC_DEP = "featomic >=0.6.0,<0.7.0"
26
+
27
+ FORCED_TORCH_VERSION = os.environ.get("FEATOMIC_TORCH_BUILD_WITH_TORCH_VERSION")
28
+ if FORCED_TORCH_VERSION is not None:
29
+ TORCH_DEP = f"torch =={FORCED_TORCH_VERSION}"
30
+ else:
31
+ TORCH_DEP = "torch >=1.12"
32
+
33
+
34
+ get_requires_for_build_sdist = build_meta.get_requires_for_build_sdist
35
+ prepare_metadata_for_build_wheel = build_meta.prepare_metadata_for_build_wheel
36
+ build_wheel = build_meta.build_wheel
37
+ build_sdist = build_meta.build_sdist
38
+
39
+
40
+ def get_requires_for_build_wheel(config_settings=None):
41
+ defaults = build_meta.get_requires_for_build_wheel(config_settings)
42
+ return defaults + [
43
+ "cmake",
44
+ TORCH_DEP,
45
+ "metatensor-torch >=0.6.0,<0.7.0",
46
+ FEATOMIC_DEP,
47
+ ]
@@ -0,0 +1,39 @@
1
+ import importlib.metadata
2
+
3
+
4
+ __version__ = importlib.metadata.version("featomic-torch")
5
+
6
+
7
+ from ._c_lib import _load_library
8
+
9
+
10
+ _load_library()
11
+
12
+ from . import utils # noqa: E402, F401
13
+ from .calculator_base import CalculatorModule, register_autograd # noqa: E402, F401
14
+
15
+ # don't forget to also update `featomic/__init__.py` and
16
+ # `featomic/torch/calculators.py` when modifying this file
17
+ from .calculators import ( # noqa: E402, F401
18
+ AtomicComposition,
19
+ LodeSphericalExpansion,
20
+ NeighborList,
21
+ SoapPowerSpectrum,
22
+ SoapRadialSpectrum,
23
+ SortedDistances,
24
+ SphericalExpansion,
25
+ SphericalExpansionByPair,
26
+ )
27
+ from .system import systems_to_torch # noqa: E402, F401
28
+
29
+
30
+ __all__ = [
31
+ "AtomicComposition",
32
+ "LodeSphericalExpansion",
33
+ "NeighborList",
34
+ "SoapPowerSpectrum",
35
+ "SoapRadialSpectrum",
36
+ "SortedDistances",
37
+ "SphericalExpansion",
38
+ "SphericalExpansionByPair",
39
+ ]
@@ -0,0 +1,138 @@
1
+ import glob
2
+ import os
3
+ import re
4
+ import sys
5
+ from collections import namedtuple
6
+
7
+ import metatensor.torch
8
+ import torch
9
+
10
+ import featomic
11
+
12
+ from ._build_versions import BUILD_FEATOMIC_VERSION
13
+
14
+
15
+ Version = namedtuple("Version", ["major", "minor", "patch"])
16
+
17
+
18
+ def parse_version(version):
19
+ match = re.match(r"(\d+)\.(\d+)\.(\d+).*", version)
20
+ if match:
21
+ return Version(*map(int, match.groups()))
22
+ else:
23
+ raise ValueError("Invalid version string format")
24
+
25
+
26
+ def version_compatible(actual, required):
27
+ actual = parse_version(actual)
28
+ required = parse_version(required)
29
+
30
+ if actual.major != required.major:
31
+ return False
32
+ elif actual.minor != required.minor:
33
+ return False
34
+ else:
35
+ return True
36
+
37
+
38
+ if not version_compatible(featomic.__version__, BUILD_FEATOMIC_VERSION):
39
+ raise ImportError(
40
+ f"Trying to load featomic-torch with featomic v{featomic.__version__}, "
41
+ f"but it was compiled against featomic v{BUILD_FEATOMIC_VERSION}, which "
42
+ "is not ABI compatible"
43
+ )
44
+
45
+ _HERE = os.path.realpath(os.path.dirname(__file__))
46
+
47
+
48
+ def _lib_path():
49
+ torch_version = parse_version(torch.__version__)
50
+ install_prefix = os.path.join(
51
+ _HERE, f"torch-{torch_version.major}.{torch_version.minor}"
52
+ )
53
+
54
+ if os.path.exists(install_prefix):
55
+ if sys.platform.startswith("darwin"):
56
+ path = os.path.join(install_prefix, "lib", "libfeatomic_torch.dylib")
57
+ windows = False
58
+ elif sys.platform.startswith("linux"):
59
+ path = os.path.join(install_prefix, "lib", "libfeatomic_torch.so")
60
+ windows = False
61
+ elif sys.platform.startswith("win"):
62
+ path = os.path.join(install_prefix, "bin", "featomic_torch.dll")
63
+ windows = True
64
+ else:
65
+ raise ImportError("Unknown platform. Please edit this file")
66
+
67
+ if os.path.isfile(path):
68
+ if windows:
69
+ _check_dll(path)
70
+ return path
71
+ else:
72
+ raise ImportError("Could not find featomic_torch shared library at " + path)
73
+
74
+ # gather which torch version(s) the current install was built
75
+ # with to create the error message
76
+ existing_versions = []
77
+ for prefix in glob.glob(os.path.join(_HERE, "torch-*")):
78
+ existing_versions.append(os.path.basename(prefix)[6:])
79
+
80
+ if len(existing_versions) == 1:
81
+ raise ImportError(
82
+ f"Trying to load featomic-torch with torch v{torch.__version__}, "
83
+ f"but it was compiled against torch v{existing_versions[0]}, which "
84
+ "is not ABI compatible"
85
+ )
86
+ else:
87
+ all_versions = ", ".join(map(lambda version: f"v{version}", existing_versions))
88
+ raise ImportError(
89
+ f"Trying to load featomic-torch with torch v{torch.__version__}, "
90
+ f"we found builds for torch {all_versions}; which are not ABI compatible.\n"
91
+ "You can try to re-install from source with "
92
+ "`pip install featomic-torch --no-binary=featomic-torch`"
93
+ )
94
+
95
+
96
+ def _check_dll(path):
97
+ """
98
+ Check if the DLL pointer size matches Python (32-bit or 64-bit)
99
+ """
100
+ import platform
101
+ import struct
102
+
103
+ IMAGE_FILE_MACHINE_I386 = 332
104
+ IMAGE_FILE_MACHINE_AMD64 = 34404
105
+
106
+ machine = None
107
+ with open(path, "rb") as fd:
108
+ header = fd.read(2).decode(encoding="utf-8", errors="strict")
109
+ if header != "MZ":
110
+ raise ImportError(path + " is not a DLL")
111
+ else:
112
+ fd.seek(60)
113
+ header = fd.read(4)
114
+ header_offset = struct.unpack("<L", header)[0]
115
+ fd.seek(header_offset + 4)
116
+ header = fd.read(2)
117
+ machine = struct.unpack("<H", header)[0]
118
+
119
+ arch = platform.architecture()[0]
120
+ if arch == "32bit":
121
+ if machine != IMAGE_FILE_MACHINE_I386:
122
+ raise ImportError("Python is 32-bit, but this DLL is not")
123
+ elif arch == "64bit":
124
+ if machine != IMAGE_FILE_MACHINE_AMD64:
125
+ raise ImportError("Python is 64-bit, but this DLL is not")
126
+ else:
127
+ raise ImportError("Could not determine pointer size of Python")
128
+
129
+
130
+ def _load_library():
131
+ # Load featomic & metatensor-torch shared library in the process first, to ensure
132
+ # the featomic_torch shared library can find them
133
+ metatensor.torch._c_lib._load_library()
134
+
135
+ featomic._c_lib._get_library()
136
+
137
+ # load the C++ operators and custom classes
138
+ torch.ops.load_library(_lib_path())
@@ -0,0 +1,176 @@
1
+ from typing import List, Optional, Union
2
+
3
+ import torch
4
+ from metatensor.torch import Labels, TensorMap
5
+ from metatensor.torch.atomistic import NeighborListOptions
6
+
7
+ from .system import System
8
+
9
+
10
+ CalculatorHolder = torch.classes.featomic.CalculatorHolder
11
+
12
+
13
+ def register_autograd(
14
+ systems: Union[List[System], System],
15
+ precomputed: TensorMap,
16
+ forward_gradients: Optional[List[str]] = None,
17
+ ) -> TensorMap:
18
+ """
19
+ Register autograd nodes between ``system.positions`` and ``system.cell`` for each of
20
+ the systems and the values in the ``precomputed``
21
+ :py:class:`metatensor.torch.TensorMap`.
22
+
23
+ This is an advanced function must users should not need to use.
24
+
25
+ The autograd nodes ``backward`` function will use the gradients already stored in
26
+ ``precomputed``, meaning that if any of the system's positions ``requires_grad``,
27
+ ``precomputed`` must contain ``"positions"`` gradients. Similarly, if any of the
28
+ system's cell ``requires_grad``, ``precomputed`` must contain ``"cell"`` gradients.
29
+
30
+ :param systems: list of system used to compute ``precomputed``
31
+ :param precomputed: precomputed :py:class:`metatensor.torch.TensorMap`
32
+ :param forward_gradients: which gradients to keep in the output, defaults to None
33
+ """
34
+ if forward_gradients is None:
35
+ forward_gradients = []
36
+
37
+ if not isinstance(systems, list):
38
+ systems = [systems]
39
+
40
+ return torch.ops.featomic.register_autograd(systems, precomputed, forward_gradients)
41
+
42
+
43
+ class CalculatorModule(torch.nn.Module):
44
+ """
45
+ This is the base class for calculators in featomic-torch, providing the
46
+ :py:meth:`CalculatorModule.compute` function and integration with
47
+ :py:class:`torch.nn.Module`.
48
+
49
+ One can initialize a py:class:`CalculatorModule` in two ways: either directly with
50
+ the registered name and JSON parameter string (which are documented in the
51
+ :ref:`userdoc-calculators`); or through one of the child class documented below.
52
+
53
+ :param name: name used to register this calculator
54
+ :param parameters: JSON parameter string for the calculator
55
+ """
56
+
57
+ def __init__(self, name: str, parameters: str):
58
+ """"""
59
+ # empty docstring here for the docs to render corectly
60
+ super().__init__()
61
+ self._c_name = name
62
+ self._c = CalculatorHolder(name=name, parameters=parameters)
63
+
64
+ @property
65
+ def name(self) -> str:
66
+ """name of this calculator"""
67
+ return self._c.name
68
+
69
+ @property
70
+ def c_name(self) -> str:
71
+ """name used to register & create this calculator"""
72
+ return self._c_name
73
+
74
+ @property
75
+ def parameters(self) -> str:
76
+ """parameters (formatted as JSON) used to create this calculator"""
77
+ return self._c.parameters
78
+
79
+ @property
80
+ def cutoffs(self) -> List[float]:
81
+ """all the radial cutoffs used by this calculator's neighbors lists"""
82
+ return self._c.cutoffs
83
+
84
+ def requested_neighbor_lists(self) -> List[NeighborListOptions]:
85
+ options = []
86
+ for cutoff in self.cutoffs:
87
+ options.append(
88
+ NeighborListOptions(
89
+ cutoff=cutoff,
90
+ full_list=False,
91
+ # we will re-filter the NL when converting to featomic internal
92
+ # type, so we don't need the engine to pre-filter it for us
93
+ strict=False,
94
+ requestor="featomic",
95
+ )
96
+ )
97
+ return options
98
+
99
+ def compute(
100
+ self,
101
+ systems: Union[System, List[System]],
102
+ gradients: Optional[List[str]] = None,
103
+ use_native_system: bool = True,
104
+ selected_samples: Optional[Union[Labels, TensorMap]] = None,
105
+ selected_properties: Optional[Union[Labels, TensorMap]] = None,
106
+ selected_keys: Optional[Labels] = None,
107
+ ) -> TensorMap:
108
+ """Runs a calculation with this calculator on the given ``systems``.
109
+
110
+ .. seealso::
111
+
112
+ :py:func:`featomic.calculators.CalculatorBase.compute` for more information
113
+ on the different parameters of this function.
114
+
115
+ :param systems: single system or list of systems on which to run the
116
+ calculation. If any of the systems' ``positions`` or ``cell`` has
117
+ ``requires_grad`` set to ``True``, then the corresponding gradients are
118
+ computed and registered as a custom node in the computational graph, to
119
+ allow backward propagation of the gradients later.
120
+
121
+ :param gradients: List of forward gradients to keep in the output. If this is
122
+ ``None`` or an empty list ``[]``, no gradients are kept in the output. Some
123
+ gradients might still be computed at runtime to allow for backward
124
+ propagation.
125
+
126
+ :param use_native_system: This can only be ``True``, and is here for
127
+ compatibility with the same parameter on
128
+ :py:meth:`featomic.calculators.CalculatorBase.compute`.
129
+
130
+ :param selected_samples: Set of samples on which to run the calculation, with
131
+ the same meaning as in
132
+ :py:func:`featomic.calculators.CalculatorBase.compute`.
133
+
134
+ :param selected_properties: Set of properties to compute, with the same meaning
135
+ as in :py:func:`featomic.calculators.CalculatorBase.compute`.
136
+
137
+ :param selected_keys: Selection for the keys to include in the output, with the
138
+ same meaning as in :py:func:`featomic.calculators.CalculatorBase.compute`.
139
+ """
140
+ if gradients is None:
141
+ gradients = []
142
+
143
+ if not isinstance(systems, list):
144
+ systems = [systems]
145
+
146
+ # We have this parameter to have the same API as featomic.
147
+ if not use_native_system:
148
+ raise ValueError("only `use_native_system=True` is supported")
149
+
150
+ options = torch.classes.featomic.CalculatorOptions()
151
+ options.gradients = gradients
152
+ options.selected_samples = selected_samples
153
+ options.selected_properties = selected_properties
154
+ options.selected_keys = selected_keys
155
+
156
+ return self._c.compute(systems=systems, options=options)
157
+
158
+ def forward(
159
+ self,
160
+ systems: List[System],
161
+ gradients: Optional[List[str]] = None,
162
+ use_native_system: bool = True,
163
+ selected_samples: Optional[Union[Labels, TensorMap]] = None,
164
+ selected_properties: Optional[Union[Labels, TensorMap]] = None,
165
+ selected_keys: Optional[Labels] = None,
166
+ ) -> TensorMap:
167
+ """forward just calls :py:meth:`CalculatorModule.compute`"""
168
+
169
+ return self.compute(
170
+ systems=systems,
171
+ gradients=gradients,
172
+ use_native_system=use_native_system,
173
+ selected_samples=selected_samples,
174
+ selected_properties=selected_properties,
175
+ selected_keys=selected_keys,
176
+ )
@@ -0,0 +1,52 @@
1
+ import importlib
2
+ import sys
3
+
4
+ import featomic.calculators
5
+
6
+ from .calculator_base import CalculatorModule
7
+
8
+
9
+ # CAREFUL ADVENTURER, HERE BE DRAGONS!
10
+ #
11
+ # \||/
12
+ # | @___oo
13
+ # /\ /\ / (__,,,,|
14
+ # ) /^\) ^\/ _)
15
+ # ) /^\/ _)
16
+ # ) _ / / _)
17
+ # /\ )/\/ || | )_)
18
+ # < > |(,,) )__)
19
+ # || / \)___)\
20
+ # | \____( )___) )___
21
+ # \______(_______;;; __;;;
22
+ #
23
+ #
24
+ # This module tries to re-use code from `featomic.calculators`, which contains a more
25
+ # user-friendly interface to the different calculator. At the C-API level calculators
26
+ # are just defined by a name & JSON parameter string. `featomic.calculators` defines
27
+ # one class for each name and set the `__init__` parameters with the top-level keys of
28
+ # the JSON parameters.
29
+ #
30
+ # To achieve this, we import the module in a special mode with `importlib`, defining a
31
+ # global variable `CalculatorBase` which is pointing to `CalculatorModule`. Then,
32
+ # `featomic.calculators` checks if `CalculatorBase` is defined and otherwise imports it
33
+ # from `featomic.calculator_base`.
34
+ #
35
+ # This means the same code is used to define two versions of each class: one will be
36
+ # used in `featomic` and have a base class of `featomic.CalculatorBase`, and one in
37
+ # `featomic.torch` with base classes `featomic.torch.CalculatorModule` and
38
+ # `torch.nn.Module`.
39
+
40
+
41
+ spec = importlib.util.spec_from_file_location(
42
+ # create a module with this name
43
+ "featomic.torch.calculators",
44
+ # using the code from there
45
+ featomic.calculators.__file__,
46
+ )
47
+ module = importlib.util.module_from_spec(spec)
48
+ sys.modules[spec.name] = module
49
+
50
+ module.__dict__["CalculatorBase"] = CalculatorModule
51
+
52
+ spec.loader.exec_module(module)
@@ -0,0 +1,89 @@
1
+ import importlib
2
+ import os
3
+ import sys
4
+ from typing import Any
5
+
6
+ import metatensor.torch
7
+ import torch
8
+
9
+ import featomic.utils
10
+
11
+ from .calculator_base import CalculatorModule
12
+ from .system import System
13
+
14
+
15
+ # For details what is happening here take a look an `featomic.torch.calculators`.
16
+
17
+ # create the `_backend` module as an empty module
18
+ spec = importlib.util.spec_from_loader(
19
+ "featomic.torch.clebsch_gordan._backend",
20
+ loader=None,
21
+ )
22
+ module = importlib.util.module_from_spec(spec)
23
+ # This module only exposes a handful of things, defined here. Any changes here MUST also
24
+ # be made to the `featomic/clebsch_gordan/_backend.py` file, which is used in
25
+ # non-TorchScript mode.
26
+ module.__dict__["BACKEND_IS_METATENSOR_TORCH"] = True
27
+
28
+ module.__dict__["Labels"] = metatensor.torch.Labels
29
+ module.__dict__["TensorBlock"] = metatensor.torch.TensorBlock
30
+ module.__dict__["TensorMap"] = metatensor.torch.TensorMap
31
+ module.__dict__["LabelsEntry"] = metatensor.torch.LabelsEntry
32
+
33
+ module.__dict__["CalculatorBase"] = CalculatorModule
34
+ module.__dict__["IntoSystem"] = System
35
+
36
+ module.__dict__["TorchTensor"] = torch.Tensor
37
+ module.__dict__["TorchModule"] = torch.nn.Module
38
+ module.__dict__["TorchScriptClass"] = torch.ScriptClass
39
+ module.__dict__["Array"] = torch.Tensor
40
+ module.__dict__["DType"] = torch.dtype
41
+ module.__dict__["Device"] = torch.device
42
+
43
+ module.__dict__["torch_jit_is_scripting"] = torch.jit.is_scripting
44
+ module.__dict__["torch_jit_export"] = torch.jit.export
45
+
46
+ if os.environ.get("METATENSOR_IMPORT_FOR_SPHINX", "0") == "0":
47
+ module.__dict__["operations"] = metatensor.torch.operations
48
+ else:
49
+ # FIXME: we can remove this hack once metatensor-operations v0.2.4 is released
50
+ module.__dict__["operations"] = None
51
+
52
+
53
+ def is_labels(obj: Any):
54
+ return isinstance(obj, metatensor.torch.Labels)
55
+
56
+
57
+ if os.environ.get("FEATOMIC_IMPORT_FOR_SPHINX") is None:
58
+ is_labels = torch.jit.script(is_labels)
59
+
60
+ module.__dict__["is_labels"] = is_labels
61
+
62
+
63
+ def check_isinstance(obj, ty):
64
+ if isinstance(ty, torch.ScriptClass):
65
+ # This branch is taken when `ty` is a custom class (TensorMap, …). since `ty` is
66
+ # an instance of `torch.ScriptClass` and not a class itself, there is no way to
67
+ # check if obj is an "instance" of this class, so we always return True and hope
68
+ # for the best. Most errors should be caught by the TorchScript compiler anyway.
69
+ return True
70
+ else:
71
+ assert isinstance(ty, type)
72
+ return isinstance(obj, ty)
73
+
74
+
75
+ # register the module in sys.modules, so future import find it directly
76
+ sys.modules[spec.name] = module
77
+
78
+ # create a module named `featomic.torch.clebsch_gordan` using code from
79
+ # `featomic.clebsch_gordan`
80
+ spec = importlib.util.spec_from_file_location(
81
+ "featomic.torch.clebsch_gordan", featomic.clebsch_gordan.__file__
82
+ )
83
+
84
+ module = importlib.util.module_from_spec(spec)
85
+
86
+ # override `featomic.torch.clebsch_gordan` (the module associated with the current file)
87
+ # with the newly created module
88
+ sys.modules[spec.name] = module
89
+ spec.loader.exec_module(module)
@@ -0,0 +1,106 @@
1
+ from typing import List, Optional, Sequence, overload
2
+
3
+ import numpy as np
4
+ import torch
5
+ from metatensor.torch.atomistic import System
6
+
7
+ import featomic
8
+
9
+
10
+ @overload
11
+ def systems_to_torch(
12
+ systems: featomic.systems.IntoSystem,
13
+ positions_requires_grad: Optional[bool] = None,
14
+ cell_requires_grad: Optional[bool] = None,
15
+ ) -> System:
16
+ pass
17
+
18
+
19
+ @overload
20
+ def systems_to_torch(
21
+ systems: Sequence[featomic.systems.IntoSystem],
22
+ positions_requires_grad: Optional[bool] = None,
23
+ cell_requires_grad: Optional[bool] = None,
24
+ ) -> List[System]:
25
+ pass
26
+
27
+
28
+ def systems_to_torch(
29
+ systems,
30
+ positions_requires_grad=None,
31
+ cell_requires_grad=None,
32
+ ) -> List[System]:
33
+ """
34
+ Convert a arbitrary system to metatensor's atomistic
35
+ :py:class:`metatensor.torch.atomistic.System`, putting all the data in
36
+ :py:class:`torch.Tensor` and making the overall object compatible with TorchScript.
37
+
38
+ :param system: any system supported by featomic. If this is an iterable of system,
39
+ this function converts them all and returns a list of converted systems.
40
+
41
+ :param positions_requires_grad: The value of ``requires_grad`` on the output
42
+ ``positions``. If ``None`` and the positions of the input is already a
43
+ :py:class:`torch.Tensor`, ``requires_grad`` is kept the same. Otherwise it is
44
+ initialized to ``False``.
45
+
46
+ :param cell_requires_grad: The value of ``requires_grad`` on the output ``cell``. If
47
+ ``None`` and the positions of the input is already a :py:class:`torch.Tensor`,
48
+ ``requires_grad`` is kept the same. Otherwise it is initialized to ``False``.
49
+ """
50
+
51
+ try:
52
+ return _system_to_torch(systems, positions_requires_grad, cell_requires_grad)
53
+ except TypeError:
54
+ # try iterating over the systems
55
+ return [
56
+ _system_to_torch(system, positions_requires_grad, cell_requires_grad)
57
+ for system in systems
58
+ ]
59
+
60
+
61
+ def _system_to_torch(system, positions_requires_grad, cell_requires_grad):
62
+ if not _is_torch_system(system):
63
+ system = featomic.systems.wrap_system(system)
64
+ system = System(
65
+ types=torch.tensor(system.types()),
66
+ positions=torch.tensor(system.positions()),
67
+ cell=torch.tensor(system.cell()),
68
+ pbc=(
69
+ torch.tensor([False, False, False])
70
+ if np.all(system.cell() == 0.0)
71
+ else torch.tensor([True, True, True])
72
+ ),
73
+ )
74
+
75
+ if positions_requires_grad is not None:
76
+ system.positions.requires_grad_(positions_requires_grad)
77
+
78
+ if cell_requires_grad is not None:
79
+ system.cell.requires_grad_(cell_requires_grad)
80
+
81
+ return system
82
+
83
+
84
+ def _is_torch_system(system):
85
+ if not isinstance(system, torch.ScriptObject):
86
+ return False
87
+
88
+ torch_version_tuple = tuple(map(int, torch.__version__.split(".")[:2]))
89
+ if torch_version_tuple >= (2, 1):
90
+ return system._type().name() == "System"
91
+
92
+ # For older torch version, we check that we have the right properties
93
+ properties = system._properties()
94
+ if len(properties) != 3:
95
+ return False
96
+
97
+ if properties[0].name != "species":
98
+ return False
99
+
100
+ if properties[1].name != "positions":
101
+ return False
102
+
103
+ if properties[2].name != "cell":
104
+ return False
105
+
106
+ return True
@@ -0,0 +1,19 @@
1
+ import os
2
+
3
+ import torch
4
+
5
+ from ._c_lib import parse_version
6
+
7
+
8
+ _HERE = os.path.dirname(__file__)
9
+
10
+
11
+ _TORCH_VERSION = parse_version(torch.__version__)
12
+ install_prefix = os.path.join(
13
+ _HERE, f"torch-{_TORCH_VERSION.major}.{_TORCH_VERSION.minor}"
14
+ )
15
+
16
+ cmake_prefix_path = os.path.join(install_prefix, "lib", "cmake")
17
+ """
18
+ Path containing the CMake configuration files for the underlying C++ library
19
+ """
@@ -0,0 +1,36 @@
1
+ Metadata-Version: 2.1
2
+ Name: featomic-torch
3
+ Version: 0.6.0
4
+ Summary: TorchScript bindings to featomic
5
+ Author: Guillaume Fraux, Philip Loche, Sergei Kliavinek, Kevin Kazuki Huguenin-Dumittan, Davide Tisi, Alexander Goscinski
6
+ License: BSD-3-Clause
7
+ Project-URL: homepage, https://metatensor.github.io/featomic/latest/
8
+ Project-URL: documentation, https://metatensor.github.io/featomic/latest/
9
+ Project-URL: repository, https://github.com/metatensor/featomic
10
+ Keywords: computational science,machine learning,molecular modeling,atomistic representations,torch
11
+ Classifier: Development Status :: 4 - Beta
12
+ Classifier: Intended Audience :: Science/Research
13
+ Classifier: License :: OSI Approved :: BSD License
14
+ Classifier: Operating System :: POSIX
15
+ Classifier: Operating System :: MacOS :: MacOS X
16
+ Classifier: Operating System :: Microsoft :: Windows
17
+ Classifier: Programming Language :: Python
18
+ Classifier: Programming Language :: Python :: 3
19
+ Classifier: Topic :: Scientific/Engineering
20
+ Classifier: Topic :: Scientific/Engineering :: Bio-Informatics
21
+ Classifier: Topic :: Scientific/Engineering :: Chemistry
22
+ Classifier: Topic :: Scientific/Engineering :: Physics
23
+ Classifier: Topic :: Software Development :: Libraries
24
+ Classifier: Topic :: Software Development :: Libraries :: Python Modules
25
+ Requires-Python: >=3.9
26
+ Description-Content-Type: text/x-rst
27
+ License-File: LICENSE
28
+ License-File: AUTHORS
29
+ Requires-Dist: torch>=1.12
30
+ Requires-Dist: metatensor-torch<0.7.0,>=0.6.0
31
+ Requires-Dist: featomic<0.7.0,>=0.6.0
32
+
33
+ featomic-torch
34
+ ===============
35
+
36
+ This package contains the TorchScript bindings to featomic.
@@ -0,0 +1,22 @@
1
+ AUTHORS
2
+ LICENSE
3
+ MANIFEST.in
4
+ README.rst
5
+ featomic-torch-cxx-0.6.0.tar.gz
6
+ git_version_info
7
+ pyproject.toml
8
+ setup.py
9
+ build-backend/backend.py
10
+ featomic/torch/__init__.py
11
+ featomic/torch/_c_lib.py
12
+ featomic/torch/calculator_base.py
13
+ featomic/torch/calculators.py
14
+ featomic/torch/clebsch_gordan.py
15
+ featomic/torch/system.py
16
+ featomic/torch/utils.py
17
+ featomic_torch.egg-info/PKG-INFO
18
+ featomic_torch.egg-info/SOURCES.txt
19
+ featomic_torch.egg-info/dependency_links.txt
20
+ featomic_torch.egg-info/not-zip-safe
21
+ featomic_torch.egg-info/requires.txt
22
+ featomic_torch.egg-info/top_level.txt
@@ -0,0 +1,3 @@
1
+ torch>=1.12
2
+ metatensor-torch<0.7.0,>=0.6.0
3
+ featomic<0.7.0,>=0.6.0
@@ -0,0 +1,2 @@
1
+ featomic
2
+ featomic_torch
@@ -0,0 +1,2 @@
1
+ 0
2
+ git.575b7a8
@@ -0,0 +1,62 @@
1
+ [project]
2
+ name = "featomic-torch"
3
+ dynamic = ["version", "authors", "dependencies"]
4
+ requires-python = ">=3.9"
5
+
6
+ readme = "README.rst"
7
+ license = {text = "BSD-3-Clause"}
8
+ description = "TorchScript bindings to featomic"
9
+
10
+ keywords = ["computational science", "machine learning", "molecular modeling", "atomistic representations", "torch"]
11
+ classifiers = [
12
+ "Development Status :: 4 - Beta",
13
+ "Intended Audience :: Science/Research",
14
+ "License :: OSI Approved :: BSD License",
15
+ "Operating System :: POSIX",
16
+ "Operating System :: MacOS :: MacOS X",
17
+ "Operating System :: Microsoft :: Windows",
18
+ "Programming Language :: Python",
19
+ "Programming Language :: Python :: 3",
20
+ "Topic :: Scientific/Engineering",
21
+ "Topic :: Scientific/Engineering :: Bio-Informatics",
22
+ "Topic :: Scientific/Engineering :: Chemistry",
23
+ "Topic :: Scientific/Engineering :: Physics",
24
+ "Topic :: Software Development :: Libraries",
25
+ "Topic :: Software Development :: Libraries :: Python Modules",
26
+ ]
27
+
28
+ [project.urls]
29
+ homepage = "https://metatensor.github.io/featomic/latest/"
30
+ documentation = "https://metatensor.github.io/featomic/latest/"
31
+ repository = "https://github.com/metatensor/featomic"
32
+ # changelog = "TODO"
33
+
34
+ ### ======================================================================== ###
35
+ [build-system]
36
+ requires = [
37
+ "setuptools",
38
+ "wheel",
39
+ "packaging",
40
+ ]
41
+
42
+ # use a custom build backend to add a dependency on the right version of featomic
43
+ build-backend = "backend"
44
+ backend-path = ["build-backend"]
45
+
46
+ [tool.setuptools]
47
+ zip-safe = false
48
+
49
+ [tool.setuptools.packages.find]
50
+ include = ["featomic*"]
51
+ namespaces = true
52
+
53
+ ### ======================================================================== ###
54
+
55
+ [tool.pytest.ini_options]
56
+ python_files = ["*.py"]
57
+ testpaths = ["tests"]
58
+
59
+ ### ======================================================================== ###
60
+
61
+ [tool.uv.pip]
62
+ reinstall-package = ["featomic", "featomic-torch"]
@@ -0,0 +1,4 @@
1
+ [egg_info]
2
+ tag_build =
3
+ tag_date = 0
4
+
@@ -0,0 +1,379 @@
1
+ import glob
2
+ import os
3
+ import subprocess
4
+ import sys
5
+
6
+ import packaging
7
+ from setuptools import Extension, setup
8
+ from setuptools.command.bdist_egg import bdist_egg
9
+ from setuptools.command.build_ext import build_ext
10
+ from setuptools.command.sdist import sdist
11
+ from wheel.bdist_wheel import bdist_wheel
12
+
13
+
14
+ ROOT = os.path.realpath(os.path.dirname(__file__))
15
+
16
+ FEATOMIC_PYTHON_SRC = os.path.realpath(os.path.join(ROOT, "..", "featomic"))
17
+ FEATOMIC_TORCH_SRC = os.path.realpath(os.path.join(ROOT, "..", "..", "featomic-torch"))
18
+
19
+
20
+ class universal_wheel(bdist_wheel):
21
+ # When building the wheel, the `wheel` package assumes that if we have a
22
+ # binary extension then we are linking to `libpython.so`; and thus the wheel
23
+ # is only usable with a single python version. This is not the case for
24
+ # here, and the wheel will be compatible with any Python >=3. This is
25
+ # tracked in https://github.com/pypa/wheel/issues/185, but until then we
26
+ # manually override the wheel tag.
27
+ def get_tag(self):
28
+ tag = bdist_wheel.get_tag(self)
29
+ # tag[2:] contains the os/arch tags, we want to keep them
30
+ return ("py3", "none") + tag[2:]
31
+
32
+
33
+ class cmake_ext(build_ext):
34
+ """Build the native library using cmake"""
35
+
36
+ def run(self):
37
+ import metatensor
38
+ import metatensor.torch
39
+ import torch
40
+
41
+ import featomic
42
+
43
+ source_dir = FEATOMIC_TORCH_SRC
44
+ build_dir = os.path.join(ROOT, "build", "cmake-build")
45
+ install_dir = os.path.join(os.path.realpath(self.build_lib), "featomic/torch")
46
+
47
+ os.makedirs(build_dir, exist_ok=True)
48
+
49
+ # Tell CMake where to find featomic & torch
50
+ cmake_prefix_path = [
51
+ featomic.utils.cmake_prefix_path,
52
+ metatensor.utils.cmake_prefix_path,
53
+ metatensor.torch.utils.cmake_prefix_path,
54
+ torch.utils.cmake_prefix_path,
55
+ ]
56
+
57
+ # Install the shared library in a prefix matching the torch version used to
58
+ # compile the code. This allows having multiple version of this shared library
59
+ # inside the wheel; and dynamically pick the right one.
60
+ torch_major, torch_minor, *_ = torch.__version__.split(".")
61
+ cmake_install_prefix = os.path.join(
62
+ install_dir, f"torch-{torch_major}.{torch_minor}"
63
+ )
64
+
65
+ cmake_options = [
66
+ "-DCMAKE_BUILD_TYPE=Release",
67
+ f"-DCMAKE_INSTALL_PREFIX={cmake_install_prefix}",
68
+ f"-DCMAKE_PREFIX_PATH={';'.join(cmake_prefix_path)}",
69
+ ]
70
+
71
+ # ==================================================================== #
72
+ # HACK: Torch cmake build system has a hard time finding CuDNN, so we
73
+ # help it by pointing it to the right files
74
+
75
+ # First try using the `nvidia.cudnn` package (dependency of torch on PyPI)
76
+ try:
77
+ import nvidia.cudnn
78
+
79
+ cudnn_root = os.path.dirname(nvidia.cudnn.__file__)
80
+ except ImportError:
81
+ # Otherwise try to find CuDNN inside PyTorch itself
82
+ cudnn_root = os.path.join(torch.utils.cmake_prefix_path, "..", "..")
83
+
84
+ cudnn_version = os.path.join(cudnn_root, "include", "cudnn_version.h")
85
+ if not os.path.exists(cudnn_version):
86
+ # create a minimal cudnn_version.h (with a made-up version),
87
+ # because it is not bundled together with the CuDNN shared
88
+ # library in PyTorch conda distribution, see
89
+ # https://github.com/pytorch/pytorch/issues/47743
90
+ with open(cudnn_version, "w") as fd:
91
+ fd.write("#define CUDNN_MAJOR 8\n")
92
+ fd.write("#define CUDNN_MINOR 5\n")
93
+ fd.write("#define CUDNN_PATCHLEVEL 0\n")
94
+
95
+ cmake_options.append(f"-DCUDNN_INCLUDE_DIR={cudnn_root}/include")
96
+ cmake_options.append(f"-DCUDNN_LIBRARY={cudnn_root}/lib")
97
+ # do not warn if the two variables above aren't used
98
+ cmake_options.append("--no-warn-unused-cli")
99
+
100
+ # end of HACK
101
+ # ==================================================================== #
102
+
103
+ subprocess.run(
104
+ ["cmake", source_dir, *cmake_options],
105
+ cwd=build_dir,
106
+ check=True,
107
+ )
108
+ subprocess.run(
109
+ [
110
+ "cmake",
111
+ "--build",
112
+ build_dir,
113
+ "--parallel",
114
+ "--config",
115
+ "Release",
116
+ "--target",
117
+ "install",
118
+ ],
119
+ check=True,
120
+ )
121
+
122
+ with open(os.path.join(install_dir, "_build_versions.py"), "w") as fd:
123
+ fd.write("# Autogenerated file, do not edit\n\n\n")
124
+ # Store the version of featomic used to build featomic_torch, to give a
125
+ # nice error message to the user when trying to load the package
126
+ # with an older featomic version installed
127
+ fd.write(
128
+ "# version of featomic used when compiling this package\n"
129
+ f"BUILD_FEATOMIC_VERSION = '{featomic.__version__}'\n"
130
+ )
131
+
132
+
133
+ class bdist_egg_disabled(bdist_egg):
134
+ """Disabled version of bdist_egg
135
+
136
+ Prevents setup.py install performing setuptools' default easy_install,
137
+ which it should never ever do.
138
+ """
139
+
140
+ def run(self):
141
+ sys.exit(
142
+ "Aborting implicit building of eggs. "
143
+ + "Use `pip install .` or `python setup.py bdist_wheel && pip "
144
+ + "install dist/metatensor-*.whl` to install from source."
145
+ )
146
+
147
+
148
+ class sdist_generate_data(sdist):
149
+ """
150
+ Create a sdist with an additional generated files:
151
+ - `git_version_info`
152
+ - `featomic-torch-cxx-*.tar.gz`
153
+ """
154
+
155
+ def run(self):
156
+ n_commits, git_hash = git_version_info()
157
+ with open("git_version_info", "w") as fd:
158
+ fd.write(f"{n_commits}\n{git_hash}\n")
159
+
160
+ generate_cxx_tar()
161
+
162
+ # run original sdist
163
+ super().run()
164
+
165
+ os.unlink("git_version_info")
166
+ for path in glob.glob("featomic-torch-cxx-*.tar.gz"):
167
+ os.unlink(path)
168
+
169
+
170
+ def generate_cxx_tar():
171
+ script = os.path.join(ROOT, "..", "..", "scripts", "package-featomic-torch.sh")
172
+ assert os.path.exists(script)
173
+
174
+ try:
175
+ output = subprocess.run(
176
+ ["bash", "--version"],
177
+ stderr=subprocess.PIPE,
178
+ stdout=subprocess.PIPE,
179
+ encoding="utf8",
180
+ )
181
+ except Exception as e:
182
+ raise RuntimeError("could not run `bash`, is it installed?") from e
183
+
184
+ output = subprocess.run(
185
+ ["bash", script, os.getcwd()],
186
+ stderr=subprocess.PIPE,
187
+ stdout=subprocess.PIPE,
188
+ encoding="utf8",
189
+ )
190
+ if output.returncode != 0:
191
+ stderr = output.stderr
192
+ stdout = output.stdout
193
+ raise RuntimeError(
194
+ "failed to collect C++ sources for Python sdist\n"
195
+ f"stdout:\n {stdout}\n\nstderr:\n {stderr}"
196
+ )
197
+
198
+
199
+ def git_version_info():
200
+ """
201
+ If git is available and we are building from a checkout, get the number of commits
202
+ since the last tag & full hash of the code. Otherwise, this always returns (0, "").
203
+ """
204
+ TAG_PREFIX = "featomic-torch-v"
205
+
206
+ if os.path.exists("git_version_info"):
207
+ # we are building from a sdist, without git available, but the git
208
+ # version was recorded in the `git_version_info` file
209
+ with open("git_version_info") as fd:
210
+ n_commits = int(fd.readline().strip())
211
+ git_hash = fd.readline().strip()
212
+ else:
213
+ script = os.path.join(ROOT, "..", "..", "scripts", "git-version-info.py")
214
+ assert os.path.exists(script)
215
+
216
+ output = subprocess.run(
217
+ [sys.executable, script, TAG_PREFIX],
218
+ stderr=subprocess.PIPE,
219
+ stdout=subprocess.PIPE,
220
+ encoding="utf8",
221
+ )
222
+
223
+ if output.returncode != 0:
224
+ raise Exception(
225
+ "failed to get git version info.\n"
226
+ f"stdout: {output.stdout}\n"
227
+ f"stderr: {output.stderr}\n"
228
+ )
229
+ elif output.stderr:
230
+ print(output.stderr, file=sys.stderr)
231
+ n_commits = 0
232
+ git_hash = ""
233
+ else:
234
+ lines = output.stdout.splitlines()
235
+ n_commits = int(lines[0].strip())
236
+ git_hash = lines[1].strip()
237
+
238
+ return n_commits, git_hash
239
+
240
+
241
+ def create_version_number(version):
242
+ version = packaging.version.parse(version)
243
+
244
+ n_commits, git_hash = git_version_info()
245
+ if n_commits != 0:
246
+ # `n_commits` will be non zero only if we have commits since the last tag. This
247
+ # mean we are in a pre-release of the next version. So we increase either the
248
+ # minor version number or the release candidate number (if we are closing up on
249
+ # a release)
250
+ if version.pre is not None:
251
+ assert version.pre[0] == "rc"
252
+ pre = ("rc", version.pre[1] + 1)
253
+ release = version.release
254
+ else:
255
+ major, minor, patch = version.release
256
+ release = (major, minor + 1, 0)
257
+ pre = None
258
+
259
+ # this is using a private API which is intended to become public soon:
260
+ # https://github.com/pypa/packaging/pull/698. In the mean time we'll
261
+ # use this
262
+ version._version = version._version._replace(release=release)
263
+ version._version = version._version._replace(pre=pre)
264
+ version._version = version._version._replace(dev=("dev", n_commits))
265
+ version._version = version._version._replace(local=(git_hash,))
266
+
267
+ return str(version)
268
+
269
+
270
+ if __name__ == "__main__":
271
+ if sys.platform == "win32":
272
+ # On Windows, starting with PyTorch 2.3, the file shm.dll in torch has a
273
+ # dependency on mkl DLLs. When building the code using pip build isolation, pip
274
+ # installs the mkl package in a place where the os is not trying to load
275
+ #
276
+ # This is a very similar fix to https://github.com/pytorch/pytorch/pull/126095,
277
+ # except only applying when importing torch from a build-isolation virtual
278
+ # environment created by pip (`python -m build` does not seems to suffer from
279
+ # this).
280
+ import wheel
281
+
282
+ pip_virtualenv = os.path.realpath(
283
+ os.path.join(
284
+ os.path.dirname(wheel.__file__),
285
+ "..",
286
+ "..",
287
+ "..",
288
+ "..",
289
+ )
290
+ )
291
+ mkl_dll_dir = os.path.join(
292
+ pip_virtualenv,
293
+ "normal",
294
+ "Library",
295
+ "bin",
296
+ )
297
+
298
+ if os.path.exists(mkl_dll_dir):
299
+ os.add_dll_directory(mkl_dll_dir)
300
+
301
+ # End of Windows/MKL/PIP hack
302
+
303
+ if not os.path.exists(FEATOMIC_TORCH_SRC):
304
+ # we are building from a sdist, which should include featomic-torch
305
+ # sources as a tarball
306
+ tarballs = glob.glob(os.path.join(ROOT, "featomic-torch-cxx-*.tar.gz"))
307
+
308
+ if not len(tarballs) == 1:
309
+ raise RuntimeError(
310
+ "expected a single 'featomic-torch-cxx-*.tar.gz' file containing "
311
+ "featomic-torch C++ sources"
312
+ )
313
+
314
+ FEATOMIC_TORCH_SRC = os.path.realpath(tarballs[0])
315
+ subprocess.run(
316
+ ["cmake", "-E", "tar", "xf", FEATOMIC_TORCH_SRC],
317
+ cwd=ROOT,
318
+ check=True,
319
+ )
320
+
321
+ FEATOMIC_TORCH_SRC = ".".join(FEATOMIC_TORCH_SRC.split(".")[:-2])
322
+
323
+ with open(os.path.join(FEATOMIC_TORCH_SRC, "VERSION")) as fd:
324
+ version = create_version_number(fd.read().strip())
325
+
326
+ with open(os.path.join(ROOT, "AUTHORS")) as fd:
327
+ authors = fd.read().splitlines()
328
+
329
+ if authors[0].startswith(".."):
330
+ # handle "raw" symlink files (on Windows or from full repo tarball)
331
+ with open(os.path.join(ROOT, authors[0])) as fd:
332
+ authors = fd.read().splitlines()
333
+
334
+ try:
335
+ import torch
336
+
337
+ # if we have torch, we are building a wheel, which will only be compatible with
338
+ # a single torch version
339
+ torch_v_major, torch_v_minor, *_ = torch.__version__.split(".")
340
+ torch_version = f"== {torch_v_major}.{torch_v_minor}.*"
341
+ except ImportError:
342
+ # otherwise we are building a sdist
343
+ torch_version = ">= 1.12"
344
+
345
+ install_requires = [
346
+ f"torch {torch_version}",
347
+ "metatensor-torch >=0.6.0,<0.7.0",
348
+ ]
349
+
350
+ # when packaging a sdist for release, we should never use local dependencies
351
+ FEATOMIC_NO_LOCAL_DEPS = os.environ.get("FEATOMIC_NO_LOCAL_DEPS", "0") == "1"
352
+ if not FEATOMIC_NO_LOCAL_DEPS and os.path.exists(FEATOMIC_PYTHON_SRC):
353
+ # we are building from a git checkout
354
+ install_requires.append(f"featomic @ file://{FEATOMIC_PYTHON_SRC}")
355
+ else:
356
+ # we are building from a sdist/installing from a wheel
357
+ install_requires.append("featomic >=0.6.0,<0.7.0")
358
+
359
+ setup(
360
+ version=version,
361
+ author=", ".join(authors),
362
+ install_requires=install_requires,
363
+ ext_modules=[
364
+ Extension(name="featomic_torch", sources=[]),
365
+ ],
366
+ cmdclass={
367
+ "build_ext": cmake_ext,
368
+ "bdist_egg": bdist_egg if "bdist_egg" in sys.argv else bdist_egg_disabled,
369
+ "bdist_wheel": universal_wheel,
370
+ "sdist": sdist_generate_data,
371
+ },
372
+ package_data={
373
+ "featomic-torch": [
374
+ "featomic/torch*/bin/*",
375
+ "featomic/torch*/lib/*",
376
+ "featomic/torch*/include/*",
377
+ ]
378
+ },
379
+ )