HomOpt 0.1.0__tar.gz → 0.1.1__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -4,7 +4,7 @@ from torch.optim import Optimizer
4
4
  class HomM(Optimizer):
5
5
  def __init__(self, params, lr=0.1, a=-0.5, k1=-1.0, k2=-1.0, eps=0.2):
6
6
  """
7
- Finite-Time Momentum Optimizer
7
+ Homogeneous Momentum Optimizer
8
8
 
9
9
  Args:
10
10
  params (iterable): model parameters
@@ -1,3 +1 @@
1
1
  from .HomM import HomM
2
-
3
- __all__ = ['HomM']
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: HomOpt
3
- Version: 0.1.0
3
+ Version: 0.1.1
4
4
  Summary: A collection of homogeneous optimizers for PyTorch
5
5
  Home-page: https://github.com/Yu-Zhou-1/HomOpt
6
6
  Author: Yu Zhou
@@ -19,6 +19,7 @@ Requires-Dist: torch>=1.6.0
19
19
  Dynamic: author
20
20
  Dynamic: author-email
21
21
  Dynamic: classifier
22
+ Dynamic: description
22
23
  Dynamic: description-content-type
23
24
  Dynamic: home-page
24
25
  Dynamic: keywords
@@ -27,3 +28,22 @@ Dynamic: license-file
27
28
  Dynamic: requires-dist
28
29
  Dynamic: requires-python
29
30
  Dynamic: summary
31
+
32
+ # HomOpt
33
+
34
+ HomOpt is a collection of homogeneous optimizers for PyTorch, designed to improve the performance of deep learning models. The optimizers are based on homogeneous dynamical systems and aim to provide more stable and efficient training.
35
+
36
+ ## Features
37
+
38
+ - A set of homogeneous optimizers for PyTorch, including the `HomM` optimizer.
39
+ - Optimizers designed to improve the training stability and convergence rates for deep learning tasks.
40
+ - Easy-to-use and integrate into your PyTorch training workflows.
41
+
42
+ ## Installation
43
+
44
+ You can install `HomOpt` using pip. First, ensure that you have Python 3.6 or later and PyTorch 1.6.0 or later installed.
45
+
46
+ To install directly from PyPI:
47
+
48
+ ```bash
49
+ pip install HomOpt
@@ -1,4 +1,5 @@
1
1
  LICENSE
2
+ MANIFEST.in
2
3
  README.md
3
4
  setup.py
4
5
  HomOpt/HomM.py
@@ -7,9 +8,4 @@ HomOpt.egg-info/PKG-INFO
7
8
  HomOpt.egg-info/SOURCES.txt
8
9
  HomOpt.egg-info/dependency_links.txt
9
10
  HomOpt.egg-info/requires.txt
10
- HomOpt.egg-info/top_level.txt
11
- homopt.egg-info/PKG-INFO
12
- homopt.egg-info/SOURCES.txt
13
- homopt.egg-info/dependency_links.txt
14
- homopt.egg-info/requires.txt
15
- homopt.egg-info/top_level.txt
11
+ HomOpt.egg-info/top_level.txt
@@ -0,0 +1,2 @@
1
+ include README.md
2
+ include LICENSE
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: HomOpt
3
- Version: 0.1.0
3
+ Version: 0.1.1
4
4
  Summary: A collection of homogeneous optimizers for PyTorch
5
5
  Home-page: https://github.com/Yu-Zhou-1/HomOpt
6
6
  Author: Yu Zhou
@@ -19,6 +19,7 @@ Requires-Dist: torch>=1.6.0
19
19
  Dynamic: author
20
20
  Dynamic: author-email
21
21
  Dynamic: classifier
22
+ Dynamic: description
22
23
  Dynamic: description-content-type
23
24
  Dynamic: home-page
24
25
  Dynamic: keywords
@@ -27,3 +28,22 @@ Dynamic: license-file
27
28
  Dynamic: requires-dist
28
29
  Dynamic: requires-python
29
30
  Dynamic: summary
31
+
32
+ # HomOpt
33
+
34
+ HomOpt is a collection of homogeneous optimizers for PyTorch, designed to improve the performance of deep learning models. The optimizers are based on homogeneous dynamical systems and aim to provide more stable and efficient training.
35
+
36
+ ## Features
37
+
38
+ - A set of homogeneous optimizers for PyTorch, including the `HomM` optimizer.
39
+ - Optimizers designed to improve the training stability and convergence rates for deep learning tasks.
40
+ - Easy-to-use and integrate into your PyTorch training workflows.
41
+
42
+ ## Installation
43
+
44
+ You can install `HomOpt` using pip. First, ensure that you have Python 3.6 or later and PyTorch 1.6.0 or later installed.
45
+
46
+ To install directly from PyPI:
47
+
48
+ ```bash
49
+ pip install HomOpt
homopt-0.1.1/README.md ADDED
@@ -0,0 +1,18 @@
1
+ # HomOpt
2
+
3
+ HomOpt is a collection of homogeneous optimizers for PyTorch, designed to improve the performance of deep learning models. The optimizers are based on homogeneous dynamical systems and aim to provide more stable and efficient training.
4
+
5
+ ## Features
6
+
7
+ - A set of homogeneous optimizers for PyTorch, including the `HomM` optimizer.
8
+ - Optimizers designed to improve the training stability and convergence rates for deep learning tasks.
9
+ - Easy-to-use and integrate into your PyTorch training workflows.
10
+
11
+ ## Installation
12
+
13
+ You can install `HomOpt` using pip. First, ensure that you have Python 3.6 or later and PyTorch 1.6.0 or later installed.
14
+
15
+ To install directly from PyPI:
16
+
17
+ ```bash
18
+ pip install HomOpt
@@ -1,18 +1,18 @@
1
1
  from setuptools import setup, find_packages
2
2
  import pathlib
3
3
 
4
- # 读取 README.md 内容作为长描述
4
+ # Read the README.md file for the long description
5
5
  here = pathlib.Path(__file__).parent
6
6
  long_description = (here / "README.md").read_text(encoding="utf-8")
7
7
 
8
8
  setup(
9
9
  name="HomOpt",
10
- version="0.1.0",
10
+ version="0.1.1",
11
11
  packages=find_packages(),
12
12
  install_requires=[
13
- "torch>=1.6.0", # 指定最低版本要求
13
+ "torch>=1.6.0", # Specify minimum required version of torch
14
14
  ],
15
- python_requires=">=3.6", # 指定Python版本要求
15
+ python_requires=">=3.6", # Specify Python version requirement
16
16
  author="Yu Zhou",
17
17
  author_email="yu_zhou@yeah.net",
18
18
  description="A collection of homogeneous optimizers for PyTorch",
@@ -26,8 +26,7 @@ setup(
26
26
  "Intended Audience :: Science/Research",
27
27
  "Topic :: Scientific/Engineering :: Artificial Intelligence",
28
28
  ],
29
- license="MIT",
29
+ license="MIT", # Specify the license type here
30
30
  keywords="pytorch optimizer deep-learning",
31
- # 确保包含非Python文件(如果有)
32
- include_package_data=True,
33
- )
31
+ include_package_data=True, # Ensure non-Python files are included
32
+ )
homopt-0.1.0/README.md DELETED
File without changes
File without changes
File without changes