torch-einops-utils 0.0.20__tar.gz → 0.0.21__tar.gz
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- {torch_einops_utils-0.0.20 → torch_einops_utils-0.0.21}/PKG-INFO +1 -1
- {torch_einops_utils-0.0.20 → torch_einops_utils-0.0.21}/pyproject.toml +1 -1
- {torch_einops_utils-0.0.20 → torch_einops_utils-0.0.21}/torch_einops_utils/torch_einops_utils.py +5 -1
- {torch_einops_utils-0.0.20 → torch_einops_utils-0.0.21}/.github/workflows/python-publish.yml +0 -0
- {torch_einops_utils-0.0.20 → torch_einops_utils-0.0.21}/.github/workflows/test.yml +0 -0
- {torch_einops_utils-0.0.20 → torch_einops_utils-0.0.21}/.gitignore +0 -0
- {torch_einops_utils-0.0.20 → torch_einops_utils-0.0.21}/LICENSE +0 -0
- {torch_einops_utils-0.0.20 → torch_einops_utils-0.0.21}/README.md +0 -0
- {torch_einops_utils-0.0.20 → torch_einops_utils-0.0.21}/tests/test_save_load.py +0 -0
- {torch_einops_utils-0.0.20 → torch_einops_utils-0.0.21}/tests/test_utils.py +0 -0
- {torch_einops_utils-0.0.20 → torch_einops_utils-0.0.21}/torch_einops_utils/__init__.py +0 -0
- {torch_einops_utils-0.0.20 → torch_einops_utils-0.0.21}/torch_einops_utils/save_load.py +0 -0
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
Metadata-Version: 2.4
|
|
2
2
|
Name: torch-einops-utils
|
|
3
|
-
Version: 0.0.
|
|
3
|
+
Version: 0.0.21
|
|
4
4
|
Summary: Personal utility functions
|
|
5
5
|
Project-URL: Homepage, https://pypi.org/project/torch-einops-utils/
|
|
6
6
|
Project-URL: Repository, https://github.com/lucidrains/torch-einops-utils
|
{torch_einops_utils-0.0.20 → torch_einops_utils-0.0.21}/torch_einops_utils/torch_einops_utils.py
RENAMED
|
@@ -244,7 +244,8 @@ def pad_sequence(
|
|
|
244
244
|
value = 0.,
|
|
245
245
|
left = False,
|
|
246
246
|
dim_stack = 0,
|
|
247
|
-
return_lens = False
|
|
247
|
+
return_lens = False,
|
|
248
|
+
pad_lens = False # returns padding length instead of sequence lengths
|
|
248
249
|
):
|
|
249
250
|
if len(tensors) == 0:
|
|
250
251
|
return None
|
|
@@ -262,6 +263,9 @@ def pad_sequence(
|
|
|
262
263
|
if not return_lens:
|
|
263
264
|
return stacked
|
|
264
265
|
|
|
266
|
+
if pad_lens:
|
|
267
|
+
lens = max_len - lens
|
|
268
|
+
|
|
265
269
|
return stacked, lens
|
|
266
270
|
|
|
267
271
|
# tree flatten with inverse
|
{torch_einops_utils-0.0.20 → torch_einops_utils-0.0.21}/.github/workflows/python-publish.yml
RENAMED
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|