kernels 0.1.7__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
kernels/__init__.py ADDED
@@ -0,0 +1,3 @@
1
+ from kernels.utils import get_kernel, install_kernel, load_kernel, get_locked_kernel
2
+
3
+ __all__ = ["get_kernel", "get_locked_kernel", "load_kernel", "install_kernel"]
kernels/build.py ADDED
@@ -0,0 +1,144 @@
1
+ """
2
+ Python shims for the PEP 517 and PEP 660 build backend.
3
+
4
+ Major imports in this module are required to be lazy:
5
+ ```
6
+ $ hyperfine \
7
+ "/usr/bin/python3 -c \"print('hi')\"" \
8
+ "/usr/bin/python3 -c \"from subprocess import check_call; print('hi')\""
9
+ Base: Time (mean ± σ): 11.0 ms ± 1.7 ms [User: 8.5 ms, System: 2.5 ms]
10
+ With import: Time (mean ± σ): 15.2 ms ± 2.0 ms [User: 12.3 ms, System: 2.9 ms]
11
+ Base 1.38 ± 0.28 times faster than with import
12
+ ```
13
+
14
+ The same thing goes for the typing module, so we use Python 3.10 type annotations that
15
+ don't require importing typing but then quote them so earlier Python version ignore
16
+ them while IDEs and type checker can see through the quotes.
17
+ """
18
+
19
+ from kernels.compat import tomllib
20
+
21
+ TYPE_CHECKING = False
22
+ if TYPE_CHECKING:
23
+ from collections.abc import Mapping, Sequence # noqa:I001
24
+ from typing import Any # noqa:I001
25
+
26
+
27
+ def warn_config_settings(config_settings: "Mapping[Any, Any] | None" = None) -> None:
28
+ import sys
29
+
30
+ if config_settings:
31
+ print("Warning: Config settings are not supported", file=sys.stderr)
32
+
33
+
34
+ def call(
35
+ args: "Sequence[str]", config_settings: "Mapping[Any, Any] | None" = None
36
+ ) -> str:
37
+ """Invoke a uv subprocess and return the filename from stdout."""
38
+ import shutil
39
+ import subprocess
40
+ import sys
41
+
42
+ warn_config_settings(config_settings)
43
+ # Unlike `find_uv_bin`, this mechanism must work according to PEP 517
44
+ import os
45
+
46
+ cwd = os.getcwd()
47
+ filename = os.path.join(cwd, "pyproject.toml")
48
+ with open(filename, "rb") as f:
49
+ data = tomllib.load(f)
50
+
51
+ for kernel, _ in (
52
+ data.get("tool", {}).get("kernels", {}).get("dependencies", {}).items()
53
+ ):
54
+ from kernels.utils import install_kernel
55
+
56
+ install_kernel(kernel, revision="main")
57
+ uv_bin = shutil.which("uv")
58
+ if uv_bin is None:
59
+ raise RuntimeError("uv was not properly installed")
60
+ # Forward stderr, capture stdout for the filename
61
+ result = subprocess.run([uv_bin, *args], stdout=subprocess.PIPE)
62
+ if result.returncode != 0:
63
+ sys.exit(result.returncode)
64
+ # If there was extra stdout, forward it (there should not be extra stdout)
65
+ stdout = result.stdout.decode("utf-8").strip().splitlines(keepends=True)
66
+ sys.stdout.writelines(stdout[:-1])
67
+ # Fail explicitly instead of an irrelevant stacktrace
68
+ if not stdout:
69
+ print("uv subprocess did not return a filename on stdout", file=sys.stderr)
70
+ sys.exit(1)
71
+ return stdout[-1].strip()
72
+
73
+
74
+ def build_sdist(
75
+ sdist_directory: str, config_settings: "Mapping[Any, Any] | None" = None
76
+ ) -> str:
77
+ """PEP 517 hook `build_sdist`."""
78
+ args = ["build-backend", "build-sdist", sdist_directory]
79
+ return call(args, config_settings)
80
+
81
+
82
+ def build_wheel(
83
+ wheel_directory: str,
84
+ config_settings: "Mapping[Any, Any] | None" = None,
85
+ metadata_directory: "str | None" = None,
86
+ ) -> str:
87
+ """PEP 517 hook `build_wheel`."""
88
+ args = ["build-backend", "build-wheel", wheel_directory]
89
+ if metadata_directory:
90
+ args.extend(["--metadata-directory", metadata_directory])
91
+ return call(args, config_settings)
92
+
93
+
94
+ def get_requires_for_build_sdist(
95
+ config_settings: "Mapping[Any, Any] | None" = None,
96
+ ) -> "Sequence[str]":
97
+ """PEP 517 hook `get_requires_for_build_sdist`."""
98
+ warn_config_settings(config_settings)
99
+ return []
100
+
101
+
102
+ def get_requires_for_build_wheel(
103
+ config_settings: "Mapping[Any, Any] | None" = None,
104
+ ) -> "Sequence[str]":
105
+ """PEP 517 hook `get_requires_for_build_wheel`."""
106
+ warn_config_settings(config_settings)
107
+ return []
108
+
109
+
110
+ def prepare_metadata_for_build_wheel(
111
+ metadata_directory: str, config_settings: "Mapping[Any, Any] | None" = None
112
+ ) -> str:
113
+ """PEP 517 hook `prepare_metadata_for_build_wheel`."""
114
+ args = ["build-backend", "prepare-metadata-for-build-wheel", metadata_directory]
115
+ return call(args, config_settings)
116
+
117
+
118
+ def build_editable(
119
+ wheel_directory: str,
120
+ config_settings: "Mapping[Any, Any] | None" = None,
121
+ metadata_directory: "str | None" = None,
122
+ ) -> str:
123
+ """PEP 660 hook `build_editable`."""
124
+ args = ["build-backend", "build-editable", wheel_directory]
125
+
126
+ if metadata_directory:
127
+ args.extend(["--metadata-directory", metadata_directory])
128
+ return call(args, config_settings)
129
+
130
+
131
+ def get_requires_for_build_editable(
132
+ config_settings: "Mapping[Any, Any] | None" = None,
133
+ ) -> "Sequence[str]":
134
+ """PEP 660 hook `get_requires_for_build_editable`."""
135
+ warn_config_settings(config_settings)
136
+ return []
137
+
138
+
139
+ def prepare_metadata_for_build_editable(
140
+ metadata_directory: str, config_settings: "Mapping[Any, Any] | None" = None
141
+ ) -> str:
142
+ """PEP 660 hook `prepare_metadata_for_build_editable`."""
143
+ args = ["build-backend", "prepare-metadata-for-build-editable", metadata_directory]
144
+ return call(args, config_settings)
kernels/cli.py ADDED
@@ -0,0 +1,98 @@
1
+ import argparse
2
+ import dataclasses
3
+ import json
4
+ import sys
5
+ from pathlib import Path
6
+
7
+ from kernels.compat import tomllib
8
+ from kernels.lockfile import KernelLock, get_kernel_locks
9
+ from kernels.utils import build_variant, install_kernel, install_kernel_all_variants
10
+
11
+
12
+ def main():
13
+ parser = argparse.ArgumentParser(
14
+ prog="kernel", description="Manage compute kernels"
15
+ )
16
+ subparsers = parser.add_subparsers(required=True)
17
+
18
+ download_parser = subparsers.add_parser("download", help="Download locked kernels")
19
+ download_parser.add_argument(
20
+ "project_dir",
21
+ type=Path,
22
+ help="The project directory",
23
+ )
24
+ download_parser.add_argument(
25
+ "--all-variants",
26
+ action="store_true",
27
+ help="Download all build variants of the kernel",
28
+ )
29
+ download_parser.set_defaults(func=download_kernels)
30
+
31
+ lock_parser = subparsers.add_parser("lock", help="Lock kernel revisions")
32
+ lock_parser.add_argument(
33
+ "project_dir",
34
+ type=Path,
35
+ help="The project directory",
36
+ )
37
+ lock_parser.set_defaults(func=lock_kernels)
38
+
39
+ args = parser.parse_args()
40
+ args.func(args)
41
+
42
+
43
+ def download_kernels(args):
44
+ lock_path = args.project_dir / "kernels.lock"
45
+
46
+ if not lock_path.exists():
47
+ print(f"No kernels.lock file found in: {args.project_dir}", file=sys.stderr)
48
+ sys.exit(1)
49
+
50
+ with open(args.project_dir / "kernels.lock", "r") as f:
51
+ lock_json = json.load(f)
52
+
53
+ all_successful = True
54
+
55
+ for kernel_lock_json in lock_json:
56
+ kernel_lock = KernelLock.from_json(kernel_lock_json)
57
+ print(
58
+ f"Downloading `{kernel_lock.repo_id}` at with SHA: {kernel_lock.sha}",
59
+ file=sys.stderr,
60
+ )
61
+ if args.all_variants:
62
+ install_kernel_all_variants(
63
+ kernel_lock.repo_id, kernel_lock.sha, variant_locks=kernel_lock.variants
64
+ )
65
+ else:
66
+ try:
67
+ install_kernel(
68
+ kernel_lock.repo_id,
69
+ kernel_lock.sha,
70
+ variant_lock=kernel_lock.variants[build_variant()],
71
+ )
72
+ except FileNotFoundError as e:
73
+ print(e, file=sys.stderr)
74
+ all_successful = False
75
+
76
+ if not all_successful:
77
+ sys.exit(1)
78
+
79
+
80
+ def lock_kernels(args):
81
+ with open(args.project_dir / "pyproject.toml", "rb") as f:
82
+ data = tomllib.load(f)
83
+
84
+ kernel_versions = data.get("tool", {}).get("kernels", {}).get("dependencies", None)
85
+
86
+ all_locks = []
87
+ for kernel, version in kernel_versions.items():
88
+ all_locks.append(get_kernel_locks(kernel, version))
89
+
90
+ with open(args.project_dir / "kernels.lock", "w") as f:
91
+ json.dump(all_locks, f, cls=_JSONEncoder, indent=2)
92
+
93
+
94
+ class _JSONEncoder(json.JSONEncoder):
95
+ def default(self, o):
96
+ if dataclasses.is_dataclass(o):
97
+ return dataclasses.asdict(o)
98
+ return super().default(o)
kernels/compat.py ADDED
@@ -0,0 +1,8 @@
1
+ import sys
2
+
3
+ if sys.version_info >= (3, 11):
4
+ import tomllib
5
+ else:
6
+ import tomli as tomllib
7
+
8
+ __all__ = ["tomllib"]
kernels/lockfile.py ADDED
@@ -0,0 +1,134 @@
1
+ from dataclasses import dataclass
2
+ import hashlib
3
+ from pathlib import Path
4
+ from typing import Dict
5
+
6
+ from huggingface_hub import HfApi
7
+ from packaging.specifiers import SpecifierSet
8
+ from packaging.version import InvalidVersion, Version
9
+
10
+ from kernels.compat import tomllib
11
+
12
+
13
+ @dataclass
14
+ class VariantLock:
15
+ hash: str
16
+ hash_type: str = "git_lfs_concat"
17
+
18
+
19
+ @dataclass
20
+ class KernelLock:
21
+ repo_id: str
22
+ sha: str
23
+ variants: Dict[str, VariantLock]
24
+
25
+ @classmethod
26
+ def from_json(cls, o: Dict):
27
+ variants = {
28
+ variant: VariantLock(**lock) for variant, lock in o["variants"].items()
29
+ }
30
+ return cls(repo_id=o["repo_id"], sha=o["sha"], variants=variants)
31
+
32
+
33
+ def _get_available_versions(repo_id: str):
34
+ """Get kernel versions that are available in the repository."""
35
+ versions = {}
36
+ for tag in HfApi().list_repo_refs(repo_id).tags:
37
+ if not tag.name.startswith("v"):
38
+ continue
39
+ try:
40
+ versions[Version(tag.name[1:])] = tag
41
+ except InvalidVersion:
42
+ continue
43
+
44
+ return versions
45
+
46
+
47
+ def get_kernel_locks(repo_id: str, version_spec: str):
48
+ """
49
+ Get the locks for a kernel with the given version spec.
50
+
51
+ The version specifier can be any valid Python version specifier:
52
+ https://packaging.python.org/en/latest/specifications/version-specifiers/#version-specifiers
53
+ """
54
+ versions = _get_available_versions(repo_id)
55
+ requirement = SpecifierSet(version_spec)
56
+ accepted_versions = sorted(requirement.filter(versions.keys()))
57
+
58
+ if len(accepted_versions) == 0:
59
+ raise ValueError(
60
+ f"No version of `{repo_id}` satisfies requirement: {version_spec}"
61
+ )
62
+
63
+ tag_for_newest = versions[accepted_versions[-1]]
64
+
65
+ r = HfApi().repo_info(
66
+ repo_id=repo_id, revision=tag_for_newest.target_commit, files_metadata=True
67
+ )
68
+ if r.sha is None:
69
+ raise ValueError(
70
+ f"Cannot get commit SHA for repo {repo_id} for tag {tag_for_newest.name}"
71
+ )
72
+
73
+ if r.siblings is None:
74
+ raise ValueError(
75
+ f"Cannot get sibling information for {repo_id} for tag {tag_for_newest.name}"
76
+ )
77
+
78
+ variant_files = {}
79
+ for sibling in r.siblings:
80
+ if sibling.rfilename.startswith("build/torch"):
81
+ if sibling.blob_id is None:
82
+ raise ValueError(f"Cannot get blob ID for {sibling.rfilename}")
83
+
84
+ path = Path(sibling.rfilename)
85
+ variant = path.parts[1]
86
+ filename = Path(*path.parts[2:])
87
+
88
+ hash = sibling.lfs.sha256 if sibling.lfs is not None else sibling.blob_id
89
+
90
+ files = variant_files.setdefault(variant, [])
91
+
92
+ # Encode as posix for consistent slash handling, then encode
93
+ # as utf-8 for byte-wise sorting later.
94
+ files.append((filename.as_posix().encode("utf-8"), hash))
95
+
96
+ variant_locks = {}
97
+ for variant, files in variant_files.items():
98
+ m = hashlib.sha256()
99
+ for filename, hash in sorted(files):
100
+ # Filename as bytes.
101
+ m.update(filename)
102
+ # Git blob or LFS file hash as bytes.
103
+ m.update(bytes.fromhex(hash))
104
+
105
+ variant_locks[variant] = VariantLock(hash=f"sha256-{m.hexdigest()}")
106
+
107
+ return KernelLock(repo_id=repo_id, sha=r.sha, variants=variant_locks)
108
+
109
+
110
+ def write_egg_lockfile(cmd, basename, filename):
111
+ import logging
112
+
113
+ cwd = Path.cwd()
114
+ pyproject_path = cwd / "pyproject.toml"
115
+ if not pyproject_path.exists():
116
+ # Nothing to do if the project doesn't have pyproject.toml.
117
+ return
118
+
119
+ with open(pyproject_path, "rb") as f:
120
+ data = tomllib.load(f)
121
+
122
+ kernel_versions = data.get("tool", {}).get("kernels", {}).get("dependencies", None)
123
+ if kernel_versions is None:
124
+ return
125
+
126
+ lock_path = cwd / "kernels.lock"
127
+ if not lock_path.exists():
128
+ logging.warning(f"Lock file {lock_path} does not exist")
129
+ # Ensure that the file gets deleted in editable installs.
130
+ data = None
131
+ else:
132
+ data = open(lock_path, "r").read()
133
+
134
+ cmd.write_or_delete_file(basename, filename, data)
kernels/utils.py ADDED
@@ -0,0 +1,264 @@
1
+ import ctypes
2
+ import hashlib
3
+ import importlib
4
+ import importlib.metadata
5
+ import inspect
6
+ import json
7
+ import os
8
+ from pathlib import Path
9
+ import platform
10
+ import sys
11
+ from importlib.metadata import Distribution
12
+ from types import ModuleType
13
+ from typing import Dict, List, Optional, Tuple
14
+
15
+ from huggingface_hub import hf_hub_download, snapshot_download
16
+ from packaging.version import parse
17
+
18
+ from kernels.compat import tomllib
19
+ from kernels.lockfile import KernelLock, VariantLock
20
+
21
+ CACHE_DIR: Optional[str] = os.environ.get("HF_KERNELS_CACHE", None)
22
+
23
+
24
+ def build_variant():
25
+ import torch
26
+
27
+ if torch.version.cuda is None:
28
+ raise AssertionError(
29
+ "This kernel requires CUDA to be installed. Torch was not compiled with CUDA enabled."
30
+ )
31
+
32
+ torch_version = parse(torch.__version__)
33
+ cuda_version = parse(torch.version.cuda)
34
+ cxxabi = "cxx11" if torch.compiled_with_cxx11_abi() else "cxx98"
35
+ cpu = platform.machine()
36
+ os = platform.system().lower()
37
+
38
+ return f"torch{torch_version.major}{torch_version.minor}-{cxxabi}-cu{cuda_version.major}{cuda_version.minor}-{cpu}-{os}"
39
+
40
+
41
+ def import_from_path(module_name: str, file_path):
42
+ # We cannot use the module name as-is, after adding it to `sys.modules`,
43
+ # it would also be used for other imports. So, we make a module name that
44
+ # depends on the path for it to be unique using the hex-encoded hash of
45
+ # the path.
46
+ path_hash = "{:x}".format(ctypes.c_size_t(hash(file_path)).value)
47
+ module_name = f"{module_name}_{path_hash}"
48
+ spec = importlib.util.spec_from_file_location(module_name, file_path)
49
+ module = importlib.util.module_from_spec(spec)
50
+ sys.modules[module_name] = module
51
+ spec.loader.exec_module(module)
52
+ return module
53
+
54
+
55
+ def install_kernel(
56
+ repo_id: str,
57
+ revision: str,
58
+ local_files_only: bool = False,
59
+ variant_lock: Optional[VariantLock] = None,
60
+ ) -> Tuple[str, str]:
61
+ """
62
+ Download a kernel for the current environment to the cache.
63
+
64
+ The output path is validated againt `hash` when set.
65
+ """
66
+ package_name = repo_id.split("/")[-1]
67
+ package_name = package_name.replace("-", "_")
68
+ variant = build_variant()
69
+ repo_path = snapshot_download(
70
+ repo_id,
71
+ allow_patterns=f"build/{variant}/*",
72
+ cache_dir=CACHE_DIR,
73
+ revision=revision,
74
+ local_files_only=local_files_only,
75
+ )
76
+
77
+ if variant_lock is not None:
78
+ validate_kernel(repo_path=repo_path, variant=variant, hash=variant_lock.hash)
79
+
80
+ variant_path = f"{repo_path}/build/{variant}"
81
+ module_init_path = f"{variant_path}/{package_name}/__init__.py"
82
+
83
+ if not os.path.exists(module_init_path):
84
+ raise FileNotFoundError(
85
+ f"Kernel `{repo_id}` at revision {revision} does not have build: {variant}"
86
+ )
87
+
88
+ return package_name, variant_path
89
+
90
+
91
+ def install_kernel_all_variants(
92
+ repo_id: str,
93
+ revision: str,
94
+ local_files_only: bool = False,
95
+ variant_locks: Optional[Dict[str, VariantLock]] = None,
96
+ ) -> str:
97
+ repo_path = Path(
98
+ snapshot_download(
99
+ repo_id,
100
+ allow_patterns="build/*",
101
+ cache_dir=CACHE_DIR,
102
+ revision=revision,
103
+ local_files_only=local_files_only,
104
+ )
105
+ )
106
+
107
+ if variant_locks is not None:
108
+ for entry in (repo_path / "build").iterdir():
109
+ variant = entry.parts[-1]
110
+
111
+ variant_lock = variant_locks.get(variant)
112
+ if variant_lock is None:
113
+ raise ValueError(f"No lock found for build variant: {variant}")
114
+
115
+ validate_kernel(
116
+ repo_path=repo_path, variant=variant, hash=variant_lock.hash
117
+ )
118
+
119
+ return f"{repo_path}/build"
120
+
121
+
122
+ def get_metadata(repo_id: str, revision: str, local_files_only: bool = False):
123
+ with open(
124
+ hf_hub_download(
125
+ repo_id,
126
+ "build.toml",
127
+ cache_dir=CACHE_DIR,
128
+ revision=revision,
129
+ local_files_only=local_files_only,
130
+ ),
131
+ "rb",
132
+ ) as f:
133
+ return tomllib.load(f)
134
+
135
+
136
+ def get_kernel(repo_id: str, revision: str = "main"):
137
+ package_name, package_path = install_kernel(repo_id, revision=revision)
138
+ return import_from_path(package_name, f"{package_path}/{package_name}/__init__.py")
139
+
140
+
141
+ def load_kernel(repo_id: str):
142
+ """Get a pre-downloaded, locked kernel."""
143
+ locked_sha = _get_caller_locked_kernel(repo_id)
144
+
145
+ if locked_sha is None:
146
+ raise ValueError(f"Kernel `{repo_id}` is not locked")
147
+
148
+ filename = hf_hub_download(
149
+ repo_id,
150
+ "build.toml",
151
+ cache_dir=CACHE_DIR,
152
+ local_files_only=True,
153
+ revision=locked_sha,
154
+ )
155
+ with open(filename, "rb") as f:
156
+ metadata = tomllib.load(f)
157
+ package_name = metadata["torch"]["name"]
158
+
159
+ repo_path = os.path.dirname(filename)
160
+ package_path = f"{repo_path}/build/{build_variant()}"
161
+ return import_from_path(package_name, f"{package_path}/{package_name}/__init__.py")
162
+
163
+
164
+ def get_locked_kernel(repo_id: str, local_files_only: bool = False):
165
+ """Get a kernel using a lock file."""
166
+ locked_sha = _get_caller_locked_kernel(repo_id)
167
+
168
+ if locked_sha is None:
169
+ raise ValueError(f"Kernel `{repo_id}` is not locked")
170
+
171
+ package_name, package_path = install_kernel(
172
+ repo_id, locked_sha, local_files_only=local_files_only
173
+ )
174
+
175
+ return import_from_path(package_name, f"{package_path}/{package_name}/__init__.py")
176
+
177
+
178
+ def _get_caller_locked_kernel(repo_id: str) -> Optional[str]:
179
+ for dist in _get_caller_distributions():
180
+ lock_json = dist.read_text("kernels.lock")
181
+ if lock_json is not None:
182
+ for kernel_lock_json in json.loads(lock_json):
183
+ kernel_lock = KernelLock.from_json(kernel_lock_json)
184
+ if kernel_lock.repo_id == repo_id:
185
+ return kernel_lock.sha
186
+ return None
187
+
188
+
189
+ def _get_caller_distributions() -> List[Distribution]:
190
+ module = _get_caller_module()
191
+ if module is None:
192
+ return []
193
+
194
+ # Look up all possible distributions that this module could be from.
195
+ package = module.__name__.split(".")[0]
196
+ dist_names = importlib.metadata.packages_distributions().get(package)
197
+ if dist_names is None:
198
+ return []
199
+
200
+ return [importlib.metadata.distribution(dist_name) for dist_name in dist_names]
201
+
202
+
203
+ def _get_caller_module() -> Optional[ModuleType]:
204
+ stack = inspect.stack()
205
+ # Get first module in the stack that is not the current module.
206
+ first_module = inspect.getmodule(stack[0][0])
207
+ for frame in stack[1:]:
208
+ module = inspect.getmodule(frame[0])
209
+ if module is not None and module != first_module:
210
+ return module
211
+ return first_module
212
+
213
+
214
+ def validate_kernel(*, repo_path: str, variant: str, hash: str):
215
+ """Validate the given build variant of a kernel against a hasht."""
216
+ variant_path = Path(repo_path) / "build" / variant
217
+
218
+ # Get the file paths. The first element is a byte-encoded relative path
219
+ # used for sorting. The second element is the absolute path.
220
+ files: List[Tuple[bytes, Path]] = []
221
+ # Ideally we'd use Path.walk, but it's only available in Python 3.12.
222
+ for dirpath, _, filenames in os.walk(variant_path):
223
+ for filename in filenames:
224
+ file_abs = Path(dirpath) / filename
225
+
226
+ # Python likes to create files when importing modules from the
227
+ # cache, only hash files that are symlinked blobs.
228
+ if file_abs.is_symlink():
229
+ files.append(
230
+ (
231
+ file_abs.relative_to(variant_path).as_posix().encode("utf-8"),
232
+ file_abs,
233
+ )
234
+ )
235
+
236
+ m = hashlib.sha256()
237
+
238
+ for filename, full_path in sorted(files):
239
+ m.update(filename)
240
+
241
+ blob_filename = full_path.resolve().name
242
+ if len(blob_filename) == 40:
243
+ # SHA-1 hashed, so a Git blob.
244
+ m.update(git_hash_object(full_path.read_bytes()))
245
+ elif len(blob_filename) == 64:
246
+ # SHA-256 hashed, so a Git LFS blob.
247
+ m.update(hashlib.sha256(full_path.read_bytes()).digest())
248
+ else:
249
+ raise ValueError(f"Unexpected blob filename length: {len(blob_filename)}")
250
+
251
+ computedHash = f"sha256-{m.hexdigest()}"
252
+ if computedHash != hash:
253
+ raise ValueError(
254
+ f"Lock file specifies kernel with hash {hash}, but downloaded kernel has hash: {computedHash}"
255
+ )
256
+
257
+
258
+ def git_hash_object(data: bytes, object_type: str = "blob"):
259
+ """Calculate git SHA1 of data."""
260
+ header = f"{object_type} {len(data)}\0".encode()
261
+ m = hashlib.sha1()
262
+ m.update(header)
263
+ m.update(data)
264
+ return m.digest()
@@ -0,0 +1,122 @@
1
+ Metadata-Version: 2.2
2
+ Name: kernels
3
+ Version: 0.1.7
4
+ Summary: Download cuda kernels
5
+ Author-email: OlivierDehaene <olivier@huggingface.co>, Daniel de Kok <daniel@huggingface.co>, David Holtz <david@huggingface.co>, Nicolas Patry <nicolas@huggingface.co>
6
+ Requires-Python: >=3.9
7
+ Description-Content-Type: text/markdown
8
+ Requires-Dist: huggingface-hub>=0.26.3
9
+ Requires-Dist: packaging>=24.2
10
+ Requires-Dist: tomli>=2.0.1; python_version < "3.11"
11
+ Requires-Dist: torch>=2.4
12
+
13
+ # kernels
14
+
15
+ The Kernel Hub allows Python libraries and applications to load compute
16
+ kernels directly from the [Hub](https://hf.co/). To support this kind
17
+ of dynamic loading, Hub kernels differ from traditional Python kernel
18
+ packages in that they are made to be:
19
+
20
+ - Portable: a kernel can be loaded from paths outside `PYTHONPATH`.
21
+ - Unique: multiple versions of the same kernel can be loaded in the
22
+ same Python process.
23
+ - Compatible: kernels must support all recent versions of Python and
24
+ the different PyTorch build configurations (various CUDA versions
25
+ and C++ ABIs). Furthermore, older C library versions must be supported.
26
+
27
+ ## Usage
28
+
29
+ Kernels depends on `torch>=2.5` and CUDA for now.
30
+
31
+ Here is how you would use the [activation](https://huggingface.co/kernels-community/activation) kernels from the Hugging Face Hub:
32
+
33
+ ```python
34
+ import torch
35
+
36
+ from kernels import get_kernel
37
+
38
+ # Download optimized kernels from the Hugging Face hub
39
+ activation = get_kernel("kernels-community/activation")
40
+
41
+ # Random tensor
42
+ x = torch.randn((10, 10), dtype=torch.float16, device="cuda")
43
+
44
+ # Run the kernel
45
+ y = torch.empty_like(x)
46
+ activation.gelu_fast(y, x)
47
+
48
+ print(y)
49
+ ```
50
+
51
+ These kernels can be built from the [kernel-builder library](https://github.com/huggingface/kernel-builder).
52
+
53
+ If you're looking to better understand how these kernels are structured, or looking to build your own kernels,
54
+ please take a look at the following guide:
55
+ [writing kernels](https://github.com/huggingface/kernel-builder/blob/main/docs/writing-kernels.md).
56
+
57
+ ## Installation
58
+
59
+ To install `kernels`, we recommend installing from the pypi package:
60
+
61
+ ```bash
62
+ pip install kernels
63
+ ```
64
+
65
+ You should then be able to run the script above (also in [examples/basic.py](examples/basic.py)):
66
+ ```bash
67
+ python examples/basic.py
68
+ ```
69
+
70
+ ## Docker Reference
71
+
72
+ build and run the reference [examples/basic.py](examples/basic.py) in a Docker container with the following commands:
73
+
74
+ ```bash
75
+ docker build --platform linux/amd64 -t kernels-reference -f docker/Dockerfile.reference .
76
+ docker run --gpus all -it --rm -e HF_TOKEN=$HF_TOKEN kernels-reference
77
+ ```
78
+
79
+ ## Locking kernel versions
80
+
81
+ Projects that use `setuptools` can lock the kernel versions that should be
82
+ used. First specify the accepted versions in `pyproject.toml` and make
83
+ sure that `kernels` is a build dependency:
84
+
85
+ ```toml
86
+ [build-system]
87
+ requires = ["kernels", "setuptools"]
88
+ build-backend = "setuptools.build_meta"
89
+
90
+ [tool.kernels.dependencies]
91
+ "kernels-community/activation" = ">=0.0.1"
92
+ ```
93
+
94
+ Then run `kernel lock .` in the project directory. This generates a `kernels.lock` file with
95
+ the locked revisions. The locked revision will be used when loading a kernel with
96
+ `get_locked_kernel`:
97
+
98
+ ```python
99
+ from kernels import get_locked_kernel
100
+
101
+ activation = get_locked_kernel("kernels-community/activation")
102
+ ```
103
+
104
+ **Note:** the lock file is included in the package metadata, so it will only be visible
105
+ to `kernels` after doing an (editable or regular) installation of your project.
106
+
107
+ ## Pre-downloading locked kernels
108
+
109
+ Locked kernels can be pre-downloaded by running `kernel download .` in your
110
+ project directory. This will download the kernels to your local Hugging Face
111
+ Hub cache.
112
+
113
+ The pre-downloaded kernels are used by the `get_locked_kernel` function.
114
+ `get_locked_kernel` will download a kernel when it is not pre-downloaded. If you
115
+ want kernel loading to error when a kernel is not pre-downloaded, you can use
116
+ the `load_kernel` function instead:
117
+
118
+ ```python
119
+ from kernels import load_kernel
120
+
121
+ activation = load_kernel("kernels-community/activation")
122
+ ```
@@ -0,0 +1,11 @@
1
+ kernels/__init__.py,sha256=P6Y2kXOCJWnSMajdiGBREmRx9T0rlSBD8Az4LXWmH_k,165
2
+ kernels/build.py,sha256=_T89LLg1hCP2ypa2b5lB7UGVCZUdLiAv1FhI3PCGyHQ,4867
3
+ kernels/cli.py,sha256=pm1ljPSvJjDV7ItfhjYCG3VaFwuWQaemYMyUqDI9bik,2943
4
+ kernels/compat.py,sha256=1DTrGg6048IV5bJoGkjqerTw9y-Sti2ePbiCbOwoEJU,120
5
+ kernels/lockfile.py,sha256=4BNyUOVFn0e6cV-7NERB0XdAjBbWoHiprIFcbyb6qEQ,4104
6
+ kernels/utils.py,sha256=NfY5Al5yXsrkMwdwEt8g0KWo5-oUj8wOyQUYiA__pTc,8663
7
+ kernels-0.1.7.dist-info/METADATA,sha256=KJc9KXs359NyEaDzv7mf6Km26I0Nj53zqoKGsW2_qpY,4010
8
+ kernels-0.1.7.dist-info/WHEEL,sha256=In9FTNxeP60KnTkGw7wk6mJPYd_dQSjEZmXdBdMCI-8,91
9
+ kernels-0.1.7.dist-info/entry_points.txt,sha256=Q0zGqrVqbX7YwPRYkCUF60_3spqgMBI7hOrFN6xmfNg,116
10
+ kernels-0.1.7.dist-info/top_level.txt,sha256=uO_uHWo6VnVSrria-cIfRu733iZI5qs5mwirtAFigww,8
11
+ kernels-0.1.7.dist-info/RECORD,,
@@ -0,0 +1,5 @@
1
+ Wheel-Version: 1.0
2
+ Generator: setuptools (75.8.0)
3
+ Root-Is-Purelib: true
4
+ Tag: py3-none-any
5
+
@@ -0,0 +1,5 @@
1
+ [console_scripts]
2
+ kernels = kernels.cli:main
3
+
4
+ [egg_info.writers]
5
+ kernels.lock = kernels.lockfile:write_egg_lockfile
@@ -0,0 +1 @@
1
+ kernels