homa 3__tar.gz → 3.2__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
homa-3.2/PKG-INFO ADDED
@@ -0,0 +1,60 @@
1
+ Metadata-Version: 2.1
2
+ Name: homa
3
+ Version: 3.2
4
+ Maintainer: Taha Shieenavaz
5
+ Maintainer-email: tahashieenavaz@gmail.com
6
+ Description-Content-Type: text/markdown
7
+ License-File: LICENSE
8
+ Requires-Dist: torchvision
9
+ Requires-Dist: torch
10
+
11
+ # Homa
12
+
13
+ - [Homa](#homa)
14
+ - [Helpers](#helpers)
15
+ - [Device](#device)
16
+ - [Pickle](#pickle)
17
+ - [Transformers](#transformers)
18
+ - [Positional Encoding](#positional-encoding)
19
+
20
+ <div align="center">
21
+ <img src="https://github.com/tahashieenavaz/homa/raw/main/art/homa.svg" width="500" />
22
+ </div>
23
+
24
+ ## Helpers
25
+
26
+ ### Device
27
+
28
+ ```py
29
+ from homa import get_device
30
+
31
+ torch.tensor(torch.randint(-20, 10, (32, 10))).to(get_device())
32
+ ```
33
+
34
+ ### Pickle
35
+
36
+ ```py
37
+ from homa import pickle
38
+
39
+ person = {"name": "John Doe", age: 88}
40
+ pickle(person, "person.pkl")
41
+
42
+ loaded_person = pickle("person.pkl")
43
+ ```
44
+
45
+ ## Transformers
46
+
47
+ ### Positional Encoding
48
+
49
+ ```py
50
+ from homa import positional_encoding
51
+
52
+ class TransformerModel(torch.nn.Module):
53
+ def __init__(self, seq_length, dimension):
54
+ # ...
55
+ self.positional_encoding = positional_encoding(seq_length, dimension)
56
+
57
+ def forward(self, x):
58
+ # ...
59
+ x = x + self.positional_encoding
60
+ ```
homa-3.2/README.md ADDED
@@ -0,0 +1,50 @@
1
+ # Homa
2
+
3
+ - [Homa](#homa)
4
+ - [Helpers](#helpers)
5
+ - [Device](#device)
6
+ - [Pickle](#pickle)
7
+ - [Transformers](#transformers)
8
+ - [Positional Encoding](#positional-encoding)
9
+
10
+ <div align="center">
11
+ <img src="https://github.com/tahashieenavaz/homa/raw/main/art/homa.svg" width="500" />
12
+ </div>
13
+
14
+ ## Helpers
15
+
16
+ ### Device
17
+
18
+ ```py
19
+ from homa import get_device
20
+
21
+ torch.tensor(torch.randint(-20, 10, (32, 10))).to(get_device())
22
+ ```
23
+
24
+ ### Pickle
25
+
26
+ ```py
27
+ from homa import pickle
28
+
29
+ person = {"name": "John Doe", age: 88}
30
+ pickle(person, "person.pkl")
31
+
32
+ loaded_person = pickle("person.pkl")
33
+ ```
34
+
35
+ ## Transformers
36
+
37
+ ### Positional Encoding
38
+
39
+ ```py
40
+ from homa import positional_encoding
41
+
42
+ class TransformerModel(torch.nn.Module):
43
+ def __init__(self, seq_length, dimension):
44
+ # ...
45
+ self.positional_encoding = positional_encoding(seq_length, dimension)
46
+
47
+ def forward(self, x):
48
+ # ...
49
+ x = x + self.positional_encoding
50
+ ```
@@ -0,0 +1,5 @@
1
+ from .helpers import get_device
2
+ from .helpers import pickle
3
+ from .helpers import flush
4
+
5
+ from .transformers import positional_encoding
@@ -1,22 +1,34 @@
1
- import pickle
1
+ import pickle as pickle_object
2
+ import torch
2
3
 
3
4
 
4
- def pickle(filename_or_variable: str | object, target_file: str | None = None):
5
+ def flush(*args, **kwargs):
6
+ kwargs["flush"] = True
7
+ print(*args, **kwargs)
8
+
9
+
10
+ def get_device():
11
+ return torch.device("cuda" if torch.cuda.is_available() else "cpu")
12
+
13
+
14
+ def pickle(
15
+ filename_or_variable: str | object, target_file: str | None = None
16
+ ) -> object | None:
5
17
  if not target_file and isinstance(filename_or_variable, str):
6
18
  return read_from_pickle(filename_or_variable)
7
19
 
8
20
  if target_file and isinstance(filename_or_variable, object):
9
21
  write_to_pickle(filename_or_variable, target_file)
22
+ return
10
23
 
11
24
  raise Exception("Wrong pickle helper inputs")
12
25
 
13
26
 
14
27
  def write_to_pickle(data, filename):
15
28
  with open(filename, "wb") as f:
16
- pickle.dump(data, f)
29
+ pickle_object.dump(data, f)
17
30
 
18
31
 
19
32
  def read_from_pickle(filename):
20
33
  with open(filename, "rb") as f:
21
- data = pickle.load(f)
22
- return data
34
+ return pickle_object.load(f)
@@ -0,0 +1,19 @@
1
+ import torch
2
+ import math
3
+ from .helpers import get_device
4
+
5
+
6
+ def positional_encoding(sequence_length: int, model_dimension: int) -> torch.Tensor:
7
+ positional_embeddings = torch.zeros(
8
+ sequence_length, model_dimension, device=get_device()
9
+ )
10
+ for pos in range(sequence_length):
11
+ for i in range(0, model_dimension, 2):
12
+ positional_embeddings[pos, i] = math.sin(
13
+ pos / (10000 ** (i / model_dimension))
14
+ )
15
+ if i + 1 < model_dimension:
16
+ positional_embeddings[pos, i + 1] = math.cos(
17
+ pos / (10000 ** (i / model_dimension))
18
+ )
19
+ return positional_embeddings
@@ -0,0 +1,60 @@
1
+ Metadata-Version: 2.1
2
+ Name: homa
3
+ Version: 3.2
4
+ Maintainer: Taha Shieenavaz
5
+ Maintainer-email: tahashieenavaz@gmail.com
6
+ Description-Content-Type: text/markdown
7
+ License-File: LICENSE
8
+ Requires-Dist: torchvision
9
+ Requires-Dist: torch
10
+
11
+ # Homa
12
+
13
+ - [Homa](#homa)
14
+ - [Helpers](#helpers)
15
+ - [Device](#device)
16
+ - [Pickle](#pickle)
17
+ - [Transformers](#transformers)
18
+ - [Positional Encoding](#positional-encoding)
19
+
20
+ <div align="center">
21
+ <img src="https://github.com/tahashieenavaz/homa/raw/main/art/homa.svg" width="500" />
22
+ </div>
23
+
24
+ ## Helpers
25
+
26
+ ### Device
27
+
28
+ ```py
29
+ from homa import get_device
30
+
31
+ torch.tensor(torch.randint(-20, 10, (32, 10))).to(get_device())
32
+ ```
33
+
34
+ ### Pickle
35
+
36
+ ```py
37
+ from homa import pickle
38
+
39
+ person = {"name": "John Doe", age: 88}
40
+ pickle(person, "person.pkl")
41
+
42
+ loaded_person = pickle("person.pkl")
43
+ ```
44
+
45
+ ## Transformers
46
+
47
+ ### Positional Encoding
48
+
49
+ ```py
50
+ from homa import positional_encoding
51
+
52
+ class TransformerModel(torch.nn.Module):
53
+ def __init__(self, seq_length, dimension):
54
+ # ...
55
+ self.positional_encoding = positional_encoding(seq_length, dimension)
56
+
57
+ def forward(self, x):
58
+ # ...
59
+ x = x + self.positional_encoding
60
+ ```
@@ -4,8 +4,10 @@ setup.py
4
4
  homa/__init__.py
5
5
  homa/datasets.py
6
6
  homa/helpers.py
7
+ homa/transformers.py
7
8
  homa.egg-info/PKG-INFO
8
9
  homa.egg-info/SOURCES.txt
9
10
  homa.egg-info/dependency_links.txt
10
11
  homa.egg-info/requires.txt
11
- homa.egg-info/top_level.txt
12
+ homa.egg-info/top_level.txt
13
+ test/test.py
@@ -8,7 +8,7 @@ setup(
8
8
  name="homa",
9
9
  maintainer="Taha Shieenavaz",
10
10
  maintainer_email="tahashieenavaz@gmail.com",
11
- version=3,
11
+ version=3.02,
12
12
  packages=find_packages(),
13
13
  install_requires=["torchvision", "torch"],
14
14
  long_description=description,
homa-3.2/test/test.py ADDED
@@ -0,0 +1,4 @@
1
+ from homa import pickle
2
+
3
+ a = {"name": "Taha", "age": 30}
4
+ pickle(a, "name")
homa-3/PKG-INFO DELETED
@@ -1,15 +0,0 @@
1
- Metadata-Version: 2.1
2
- Name: homa
3
- Version: 3
4
- Maintainer: Taha Shieenavaz
5
- Maintainer-email: tahashieenavaz@gmail.com
6
- Description-Content-Type: text/markdown
7
- License-File: LICENSE
8
- Requires-Dist: torchvision
9
- Requires-Dist: torch
10
-
11
- # Homa
12
-
13
- <div align="center">
14
- <img src="https://github.com/tahashieenavaz/homa/raw/main/art/homa.svg" width="500" />
15
- </div>
homa-3/README.md DELETED
@@ -1,5 +0,0 @@
1
- # Homa
2
-
3
- <div align="center">
4
- <img src="https://github.com/tahashieenavaz/homa/raw/main/art/homa.svg" width="500" />
5
- </div>
homa-3/homa/__init__.py DELETED
@@ -1,3 +0,0 @@
1
- from .datasets import ImageDataset
2
- from .datasets import AugmentedDataset
3
- from .helpers import pickle
@@ -1,15 +0,0 @@
1
- Metadata-Version: 2.1
2
- Name: homa
3
- Version: 3
4
- Maintainer: Taha Shieenavaz
5
- Maintainer-email: tahashieenavaz@gmail.com
6
- Description-Content-Type: text/markdown
7
- License-File: LICENSE
8
- Requires-Dist: torchvision
9
- Requires-Dist: torch
10
-
11
- # Homa
12
-
13
- <div align="center">
14
- <img src="https://github.com/tahashieenavaz/homa/raw/main/art/homa.svg" width="500" />
15
- </div>
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes