homa 0.0.19__tar.gz → 0.1.91__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (73) hide show
  1. homa-0.1.91/PKG-INFO +75 -0
  2. homa-0.1.91/README.md +64 -0
  3. {homa-0.0.19 → homa-0.1.91}/pyproject.toml +1 -1
  4. {homa-0.0.19 → homa-0.1.91}/src/homa/activations/classes/StochasticActivation.py +15 -1
  5. {homa-0.0.19 → homa-0.1.91}/src/homa/ensemble/Ensemble.py +2 -4
  6. homa-0.1.91/src/homa/ensemble/concerns/CalculatesMetricNecessities.py +24 -0
  7. {homa-0.0.19 → homa-0.1.91}/src/homa/ensemble/concerns/PredictsProbabilities.py +4 -0
  8. {homa-0.0.19 → homa-0.1.91}/src/homa/ensemble/concerns/ReportsEnsembleAccuracy.py +3 -2
  9. {homa-0.0.19 → homa-0.1.91}/src/homa/ensemble/concerns/ReportsLogits.py +4 -0
  10. {homa-0.0.19 → homa-0.1.91}/src/homa/ensemble/concerns/ReportsSize.py +2 -2
  11. homa-0.1.91/src/homa/ensemble/concerns/StoresModels.py +29 -0
  12. {homa-0.0.19 → homa-0.1.91}/src/homa/ensemble/concerns/__init__.py +1 -2
  13. homa-0.1.91/src/homa/torch/__init__.py +1 -0
  14. homa-0.1.91/src/homa/vision/ClassificationModel.py +5 -0
  15. homa-0.1.91/src/homa/vision/Resnet.py +13 -0
  16. homa-0.1.91/src/homa/vision/concerns/HasLabels.py +13 -0
  17. homa-0.1.91/src/homa/vision/concerns/HasLogits.py +12 -0
  18. homa-0.1.91/src/homa/vision/concerns/HasProbabilities.py +9 -0
  19. homa-0.1.91/src/homa/vision/concerns/ReportsAccuracy.py +27 -0
  20. homa-0.1.91/src/homa/vision/concerns/ReportsMetrics.py +6 -0
  21. {homa-0.0.19 → homa-0.1.91}/src/homa/vision/concerns/Trainable.py +5 -2
  22. homa-0.1.91/src/homa/vision/concerns/__init__.py +6 -0
  23. {homa-0.0.19 → homa-0.1.91}/src/homa/vision/utils.py +4 -0
  24. homa-0.1.91/src/homa.egg-info/PKG-INFO +75 -0
  25. {homa-0.0.19 → homa-0.1.91}/src/homa.egg-info/SOURCES.txt +8 -8
  26. homa-0.0.19/PKG-INFO +0 -21
  27. homa-0.0.19/README.md +0 -10
  28. homa-0.0.19/src/homa/ensemble/concerns/CalculatesMetricNecessities.py +0 -20
  29. homa-0.0.19/src/homa/ensemble/concerns/HasNetwork.py +0 -5
  30. homa-0.0.19/src/homa/ensemble/concerns/HasStateDicts.py +0 -8
  31. homa-0.0.19/src/homa/ensemble/concerns/RecordsStateDictionaries.py +0 -23
  32. homa-0.0.19/src/homa/torch/Module.py +0 -8
  33. homa-0.0.19/src/homa/torch/__init__.py +0 -2
  34. homa-0.0.19/src/homa/vision/Resnet.py +0 -12
  35. homa-0.0.19/src/homa/vision/concerns/__init__.py +0 -1
  36. homa-0.0.19/src/homa.egg-info/PKG-INFO +0 -21
  37. homa-0.0.19/tests/test_ensemble.py +0 -28
  38. homa-0.0.19/tests/test_resnet.py +0 -21
  39. homa-0.0.19/tests/test_stochastic_resnet.py +0 -20
  40. {homa-0.0.19 → homa-0.1.91}/setup.cfg +0 -0
  41. {homa-0.0.19 → homa-0.1.91}/src/homa/__init__.py +0 -0
  42. {homa-0.0.19 → homa-0.1.91}/src/homa/activations/__init__.py +0 -0
  43. {homa-0.0.19 → homa-0.1.91}/src/homa/activations/classes/APLU.py +0 -0
  44. {homa-0.0.19 → homa-0.1.91}/src/homa/activations/classes/GALU.py +0 -0
  45. {homa-0.0.19 → homa-0.1.91}/src/homa/activations/classes/MELU.py +0 -0
  46. {homa-0.0.19 → homa-0.1.91}/src/homa/activations/classes/PDELU.py +0 -0
  47. {homa-0.0.19 → homa-0.1.91}/src/homa/activations/classes/SReLU.py +0 -0
  48. {homa-0.0.19 → homa-0.1.91}/src/homa/activations/classes/SmallGALU.py +0 -0
  49. {homa-0.0.19 → homa-0.1.91}/src/homa/activations/classes/WideMELU.py +0 -0
  50. {homa-0.0.19 → homa-0.1.91}/src/homa/activations/classes/__init__.py +0 -0
  51. {homa-0.0.19 → homa-0.1.91}/src/homa/activations/utils.py +0 -0
  52. {homa-0.0.19 → homa-0.1.91}/src/homa/cli/HomaCommand.py +0 -0
  53. {homa-0.0.19 → homa-0.1.91}/src/homa/cli/namespaces/CacheNamespace.py +0 -0
  54. {homa-0.0.19 → homa-0.1.91}/src/homa/cli/namespaces/MakeNamespace.py +0 -0
  55. {homa-0.0.19 → homa-0.1.91}/src/homa/cli/namespaces/__init__.py +0 -0
  56. {homa-0.0.19 → homa-0.1.91}/src/homa/device.py +0 -0
  57. {homa-0.0.19 → homa-0.1.91}/src/homa/ensemble/__init__.py +0 -0
  58. {homa-0.0.19 → homa-0.1.91}/src/homa/ensemble/concerns/ReportsClassificationMetrics.py +1 -1
  59. {homa-0.0.19 → homa-0.1.91}/src/homa/ensemble/concerns/ReportsEnsembleF1.py +0 -0
  60. {homa-0.0.19 → homa-0.1.91}/src/homa/ensemble/concerns/ReportsEnsembleKappa.py +0 -0
  61. {homa-0.0.19 → homa-0.1.91}/src/homa/settings.py +0 -0
  62. {homa-0.0.19 → homa-0.1.91}/src/homa/torch/helpers.py +0 -0
  63. {homa-0.0.19 → homa-0.1.91}/src/homa/utils.py +0 -0
  64. {homa-0.0.19 → homa-0.1.91}/src/homa/vision/Model.py +0 -0
  65. {homa-0.0.19 → homa-0.1.91}/src/homa/vision/StochasticResnet.py +0 -0
  66. {homa-0.0.19 → homa-0.1.91}/src/homa/vision/__init__.py +0 -0
  67. {homa-0.0.19 → homa-0.1.91}/src/homa/vision/modules/ResnetModule.py +0 -0
  68. {homa-0.0.19 → homa-0.1.91}/src/homa/vision/modules/StochasticResnetModule.py +0 -0
  69. {homa-0.0.19 → homa-0.1.91}/src/homa/vision/modules/__init__.py +0 -0
  70. {homa-0.0.19 → homa-0.1.91}/src/homa.egg-info/dependency_links.txt +0 -0
  71. {homa-0.0.19 → homa-0.1.91}/src/homa.egg-info/entry_points.txt +0 -0
  72. {homa-0.0.19 → homa-0.1.91}/src/homa.egg-info/requires.txt +0 -0
  73. {homa-0.0.19 → homa-0.1.91}/src/homa.egg-info/top_level.txt +0 -0
homa-0.1.91/PKG-INFO ADDED
@@ -0,0 +1,75 @@
1
+ Metadata-Version: 2.4
2
+ Name: homa
3
+ Version: 0.1.91
4
+ Summary: A curated list of machine learning and deep learning helpers.
5
+ Author-email: Taha Shieenavaz <tahashieenavaz@gmail.com>
6
+ Requires-Python: >=3.7
7
+ Description-Content-Type: text/markdown
8
+ Requires-Dist: numpy
9
+ Requires-Dist: torch
10
+ Requires-Dist: fire
11
+
12
+ # Core
13
+
14
+ ### Device Management
15
+
16
+ ```py
17
+ from homa import cpu, mps, cuda, device
18
+
19
+ torch.tensor([1, 2, 3, 4, 5]).to(cpu())
20
+ torch.tensor([1, 2, 3, 4, 5]).to(cuda())
21
+ torch.tensor([1, 2, 3, 4, 5]).to(mps())
22
+ torch.tensor([1, 2, 3, 4, 5]).to(device())
23
+ ```
24
+
25
+ # Vision
26
+
27
+ ## Resnet
28
+
29
+ This is the standard ResNet50 module.
30
+
31
+ You can train the model with a `DataLoader` object.
32
+
33
+ ```py
34
+ from homa.vision import Resnet
35
+
36
+ model = Resnet(num_classes=10, lr=0.001)
37
+ for epoch in range(10):
38
+ model.train(train_dataloader)
39
+ ```
40
+
41
+ Similarly you can manually take care of decomposition of data from the `DataLoader`.
42
+
43
+ ```py
44
+ from homa.vision import Resnet
45
+
46
+ model = Resnet(num_classes=10, lr=0.001)
47
+ for epoch in range(10):
48
+ for x, y in train_dataloader:
49
+ model.train(x, y)
50
+ ```
51
+
52
+ ## StochasticResnet
53
+
54
+ This is a ResNet module whose activation functions are replaced from a pool of different activation functions randomly. Read more on the [(paper)](https://www.mdpi.com/1424-8220/22/16/6129).
55
+
56
+ You can train the model with a `DataLoader` object.
57
+
58
+ ```py
59
+ from homa.vision import StochasticResnet
60
+
61
+ model = StochasticResnet(num_classes=10, lr=0.001)
62
+ for epoch in range(10):
63
+ model.train(train_dataloader)
64
+ ```
65
+
66
+ Similarly you can manually take care of decomposition of data from the `DataLoader`.
67
+
68
+ ```py
69
+ from homa.vision import StochasticResnet
70
+
71
+ model = StochasticResnet(num_classes=10, lr=0.001)
72
+ for epoch in range(10):
73
+ for x, y in train_dataloader:
74
+ model.train(x, y)
75
+ ```
homa-0.1.91/README.md ADDED
@@ -0,0 +1,64 @@
1
+ # Core
2
+
3
+ ### Device Management
4
+
5
+ ```py
6
+ from homa import cpu, mps, cuda, device
7
+
8
+ torch.tensor([1, 2, 3, 4, 5]).to(cpu())
9
+ torch.tensor([1, 2, 3, 4, 5]).to(cuda())
10
+ torch.tensor([1, 2, 3, 4, 5]).to(mps())
11
+ torch.tensor([1, 2, 3, 4, 5]).to(device())
12
+ ```
13
+
14
+ # Vision
15
+
16
+ ## Resnet
17
+
18
+ This is the standard ResNet50 module.
19
+
20
+ You can train the model with a `DataLoader` object.
21
+
22
+ ```py
23
+ from homa.vision import Resnet
24
+
25
+ model = Resnet(num_classes=10, lr=0.001)
26
+ for epoch in range(10):
27
+ model.train(train_dataloader)
28
+ ```
29
+
30
+ Similarly you can manually take care of decomposition of data from the `DataLoader`.
31
+
32
+ ```py
33
+ from homa.vision import Resnet
34
+
35
+ model = Resnet(num_classes=10, lr=0.001)
36
+ for epoch in range(10):
37
+ for x, y in train_dataloader:
38
+ model.train(x, y)
39
+ ```
40
+
41
+ ## StochasticResnet
42
+
43
+ This is a ResNet module whose activation functions are replaced from a pool of different activation functions randomly. Read more on the [(paper)](https://www.mdpi.com/1424-8220/22/16/6129).
44
+
45
+ You can train the model with a `DataLoader` object.
46
+
47
+ ```py
48
+ from homa.vision import StochasticResnet
49
+
50
+ model = StochasticResnet(num_classes=10, lr=0.001)
51
+ for epoch in range(10):
52
+ model.train(train_dataloader)
53
+ ```
54
+
55
+ Similarly you can manually take care of decomposition of data from the `DataLoader`.
56
+
57
+ ```py
58
+ from homa.vision import StochasticResnet
59
+
60
+ model = StochasticResnet(num_classes=10, lr=0.001)
61
+ for epoch in range(10):
62
+ for x, y in train_dataloader:
63
+ model.train(x, y)
64
+ ```
@@ -4,7 +4,7 @@ build-backend = "setuptools.build_meta"
4
4
 
5
5
  [project]
6
6
  name = "homa"
7
- version = "0.0.19"
7
+ version = "0.1.91"
8
8
  description = "A curated list of machine learning and deep learning helpers."
9
9
  authors = [
10
10
  { name="Taha Shieenavaz", email="tahashieenavaz@gmail.com" },
@@ -13,7 +13,21 @@ from .SReLU import SReLU
13
13
  class StochasticActivation(torch.nn.Module):
14
14
  def __init__(self):
15
15
  super().__init__()
16
- self.gate = random.choice([APLU, GALU, SmallGALU, MELU, WideMELU, PDELU, SReLU])
16
+ self.gate = random.choice(
17
+ [
18
+ APLU,
19
+ GALU,
20
+ SmallGALU,
21
+ MELU,
22
+ WideMELU,
23
+ PDELU,
24
+ SReLU,
25
+ torch.nn.ReLU,
26
+ torch.nn.PReLU,
27
+ torch.nn.LeakyReLU,
28
+ torch.nn.ELU,
29
+ ]
30
+ )
17
31
  self.gate = self.gate()
18
32
 
19
33
  def forward(self, x: torch.Tensor) -> torch.Tensor:
@@ -1,8 +1,7 @@
1
1
  from .concerns import (
2
2
  ReportsSize,
3
- RecordsStateDictionaries,
3
+ StoresModels,
4
4
  ReportsClassificationMetrics,
5
- HasNetwork,
6
5
  PredictsProbabilities,
7
6
  )
8
7
 
@@ -10,9 +9,8 @@ from .concerns import (
10
9
  class Ensemble(
11
10
  ReportsSize,
12
11
  ReportsClassificationMetrics,
13
- RecordsStateDictionaries,
14
12
  PredictsProbabilities,
15
- HasNetwork,
13
+ StoresModels,
16
14
  ):
17
15
  def __init__(self):
18
16
  super().__init__()
@@ -0,0 +1,24 @@
1
+ import torch
2
+ from ...device import get_device
3
+
4
+
5
+ class CalculatesMetricNecessities:
6
+ def __init__(self, *args, **kwargs):
7
+ super().__init__(*args, **kwargs)
8
+
9
+ @torch.no_grad()
10
+ def metric_necessities(self, dataloader):
11
+ predictions, labels = [], []
12
+ device = get_device()
13
+ for x, y in dataloader:
14
+ x, y = x.to(device), y.to(device)
15
+ sum_logits = None
16
+ for model in self.models:
17
+ model.to(device)
18
+ model.eval()
19
+ logits = model(x)
20
+ sum_logits = logits if sum_logits is None else sum_logits + logits
21
+ predictions = sum_logits.argmax(dim=1)
22
+ predictions.extend(predictions.cpu().numpy())
23
+ labels.extend(y.cpu().numpy())
24
+ return predictions, labels
@@ -9,3 +9,7 @@ class PredictsProbabilities(ReportsLogits):
9
9
  def predict(self, x: torch.Tensor) -> torch.Tensor:
10
10
  logits = self.logits(x)
11
11
  return torch.nn.functional.softmax(logits, dim=1)
12
+
13
+ @torch.no_grad()
14
+ def predict_(self, x: torch.Tensor) -> torch.Tensor:
15
+ return self.predict(x)
@@ -1,10 +1,11 @@
1
1
  from sklearn.metrics import accuracy_score as accuracy
2
+ from torch.utils.data import DataLoader
2
3
 
3
4
 
4
5
  class ReportsEnsembleAccuracy:
5
6
  def __init__(self, *args, **kwargs):
6
7
  super().__init__(*args, **kwargs)
7
8
 
8
- def accuracy(self) -> float:
9
- predictions, labels = self.metric_necessities()
9
+ def accuracy(self, dataloader: DataLoader) -> float:
10
+ predictions, labels = self.metric_necessities(dataloader)
10
11
  return accuracy(labels, predictions)
@@ -11,3 +11,7 @@ class ReportsLogits:
11
11
  for model in self.models:
12
12
  logits += model(x)
13
13
  return logits
14
+
15
+ @torch.no_grad()
16
+ def logits_(self, *args, **kwargs):
17
+ return self.logits(*args, **kwargs)
@@ -4,8 +4,8 @@ class ReportsSize:
4
4
 
5
5
  @property
6
6
  def size(self):
7
- return len(self.state_dicts)
7
+ return len(self.models)
8
8
 
9
9
  @property
10
10
  def length(self):
11
- return len(self.state_dicts)
11
+ return len(self.models)
@@ -0,0 +1,29 @@
1
+ import torch
2
+ from copy import deepcopy
3
+ from typing import List
4
+ from ...vision import Model
5
+
6
+
7
+ class StoresModels:
8
+ def __init__(self, *args, **kwargs):
9
+ super().__init__(*args, **kwargs)
10
+ self.models: List[torch.nn.Module] = []
11
+
12
+ def record(self, model: Model | torch.nn.Module):
13
+ model_: torch.nn.Module | None = None
14
+ if isinstance(model, Model):
15
+ model_ = deepcopy(model.network)
16
+ elif isinstance(model, torch.nn.Module):
17
+ model_ = deepcopy(model)
18
+ else:
19
+ raise TypeError("Wrong input to ensemble record")
20
+ self.models.append(model_)
21
+
22
+ def push(self, *args, **kwargs):
23
+ self.record(*args, **kwargs)
24
+
25
+ def append(self, *args, **kwargs):
26
+ self.record(*args, **kwargs)
27
+
28
+ def add(self, *args, **kwargs):
29
+ self.record(*args, **kwargs)
@@ -1,10 +1,9 @@
1
1
  from .CalculatesMetricNecessities import CalculatesMetricNecessities
2
- from .HasNetwork import HasNetwork
3
2
  from .PredictsProbabilities import PredictsProbabilities
4
- from .RecordsStateDictionaries import RecordsStateDictionaries
5
3
  from .ReportsClassificationMetrics import ReportsClassificationMetrics
6
4
  from .ReportsEnsembleAccuracy import ReportsEnsembleAccuracy
7
5
  from .ReportsEnsembleF1 import ReportsEnsembleF1
8
6
  from .ReportsEnsembleKappa import ReportsEnsembleKappa
9
7
  from .ReportsLogits import ReportsLogits
10
8
  from .ReportsSize import ReportsSize
9
+ from .StoresModels import StoresModels
@@ -0,0 +1 @@
1
+ from .helpers import *
@@ -0,0 +1,5 @@
1
+ from .Model import Model
2
+
3
+
4
+ class ClassificationModel(Model):
5
+ pass
@@ -0,0 +1,13 @@
1
+ import torch
2
+ from .modules import ResnetModule
3
+ from .ClassificationModel import ClassificationModel
4
+ from .concerns import Trainable, ReportsMetrics
5
+ from ..device import get_device
6
+
7
+
8
+ class Resnet(ClassificationModel, Trainable, ReportsMetrics):
9
+ def __init__(self, num_classes: int, lr: float = 0.001):
10
+ super().__init__()
11
+ self.network = ResnetModule(num_classes).to(get_device())
12
+ self.criterion = torch.nn.CrossEntropyLoss()
13
+ self.optimizer = torch.optim.SGD(self.network.parameters(), lr=lr, momentum=0.9)
@@ -0,0 +1,13 @@
1
+ import torch
2
+
3
+
4
+ class HasLabels:
5
+ def __init__(self, *args, **kwargs):
6
+ super().__init__(*args, **kwargs)
7
+
8
+ def predict(self, x: torch.Tensor):
9
+ return torch.argmax(self.logits(x), dim=1)
10
+
11
+ @torch.no_grad()
12
+ def predict_(self, x: torch.Tensor):
13
+ return torch.argmax(self.logits(x), dim=1)
@@ -0,0 +1,12 @@
1
+ import torch
2
+
3
+
4
+ class HasLogits:
5
+ def __init__(self, *args, **kwargs):
6
+ super().__init__(*args, **kwargs)
7
+
8
+ def logits(self, x: torch.Tensor) -> torch.Tensor:
9
+ return self.network(x)
10
+
11
+ def logits_(self, x: torch.Tensor) -> torch.Tensor:
12
+ return self.network(x)
@@ -0,0 +1,9 @@
1
+ import torch
2
+
3
+
4
+ class HasProbabilities:
5
+ def __init__(self, *args, **kwargs):
6
+ super().__init__(*args, **kwargs)
7
+
8
+ def probabilities(self, x: torch.Tensor):
9
+ return torch.softmax(self.logits(x), dim=1)
@@ -0,0 +1,27 @@
1
+ from torch import Tensor, no_grad
2
+ from torch.utils.data.dataloader import DataLoader
3
+ from ...device import get_device
4
+
5
+
6
+ class ReportsAccuracy:
7
+ def __init__(self, *args, **kwargs):
8
+ super().__init__(*args, **kwargs)
9
+
10
+ def accuracy_tensors(self, x: Tensor, y: Tensor) -> float:
11
+ predictions = self.predict_(x)
12
+ return (predictions == y).float().mean().item()
13
+
14
+ def accuracy_dataloader(self, dataloader: DataLoader):
15
+ correct, total = 0, 0
16
+ for x, y in dataloader:
17
+ x, y = x.to(get_device()), y.to(get_device())
18
+ predictions = self.predict_(x)
19
+ correct += (predictions == y).sum().item()
20
+ total += y.numel()
21
+ return correct / total if total > 0 else 0.0
22
+
23
+ def accuracy(self, x: Tensor | DataLoader, y: Tensor | None = None) -> float:
24
+ self.network.eval()
25
+ if isinstance(x, DataLoader):
26
+ return self.accuracy_dataloader(x)
27
+ return self.accuracy_tensors(x, y)
@@ -0,0 +1,6 @@
1
+ from .ReportsAccuracy import ReportsAccuracy
2
+
3
+
4
+ class ReportsMetrics(ReportsAccuracy):
5
+ def __init__(self, *args, **kwargs):
6
+ super().__init__(*args, **kwargs)
@@ -1,9 +1,12 @@
1
1
  from torch import Tensor
2
2
  from torch.utils.data.dataloader import DataLoader
3
+ from .HasLogits import HasLogits
4
+ from .HasProbabilities import HasProbabilities
5
+ from .HasLabels import HasLabels
3
6
  from ...device import get_device
4
7
 
5
8
 
6
- class Trainable:
9
+ class Trainable(HasLogits, HasProbabilities, HasLabels):
7
10
  def __init__(self, *args, **kwargs):
8
11
  super().__init__(*args, **kwargs)
9
12
 
@@ -16,7 +19,7 @@ class Trainable:
16
19
  def train_tensors(self, x: Tensor, y: Tensor):
17
20
  self.network.train()
18
21
  self.optimizer.zero_grad()
19
- loss = self.criterion(x, y)
22
+ loss = self.criterion(self.network(x).float(), y)
20
23
  loss.backward()
21
24
  self.optimizer.step()
22
25
 
@@ -0,0 +1,6 @@
1
+ from .HasLabels import HasLabels
2
+ from .HasLogits import HasLogits
3
+ from .HasProbabilities import HasProbabilities
4
+ from .ReportsAccuracy import ReportsAccuracy
5
+ from .ReportsMetrics import ReportsMetrics
6
+ from .Trainable import Trainable
@@ -19,3 +19,7 @@ def replace_modules(
19
19
 
20
20
  def replace_relu(model: torch.nn.Module, replacement: torch.nn.Module):
21
21
  return replace_modules(model, torch.nn.ReLU, replacement)
22
+
23
+
24
+ def replace_gelu(model: torch.nn.Module, replacement: torch.nn.Module):
25
+ return replace_modules(model, torch.nn.GELU, replacement)
@@ -0,0 +1,75 @@
1
+ Metadata-Version: 2.4
2
+ Name: homa
3
+ Version: 0.1.91
4
+ Summary: A curated list of machine learning and deep learning helpers.
5
+ Author-email: Taha Shieenavaz <tahashieenavaz@gmail.com>
6
+ Requires-Python: >=3.7
7
+ Description-Content-Type: text/markdown
8
+ Requires-Dist: numpy
9
+ Requires-Dist: torch
10
+ Requires-Dist: fire
11
+
12
+ # Core
13
+
14
+ ### Device Management
15
+
16
+ ```py
17
+ from homa import cpu, mps, cuda, device
18
+
19
+ torch.tensor([1, 2, 3, 4, 5]).to(cpu())
20
+ torch.tensor([1, 2, 3, 4, 5]).to(cuda())
21
+ torch.tensor([1, 2, 3, 4, 5]).to(mps())
22
+ torch.tensor([1, 2, 3, 4, 5]).to(device())
23
+ ```
24
+
25
+ # Vision
26
+
27
+ ## Resnet
28
+
29
+ This is the standard ResNet50 module.
30
+
31
+ You can train the model with a `DataLoader` object.
32
+
33
+ ```py
34
+ from homa.vision import Resnet
35
+
36
+ model = Resnet(num_classes=10, lr=0.001)
37
+ for epoch in range(10):
38
+ model.train(train_dataloader)
39
+ ```
40
+
41
+ Similarly you can manually take care of decomposition of data from the `DataLoader`.
42
+
43
+ ```py
44
+ from homa.vision import Resnet
45
+
46
+ model = Resnet(num_classes=10, lr=0.001)
47
+ for epoch in range(10):
48
+ for x, y in train_dataloader:
49
+ model.train(x, y)
50
+ ```
51
+
52
+ ## StochasticResnet
53
+
54
+ This is a ResNet module whose activation functions are replaced from a pool of different activation functions randomly. Read more on the [(paper)](https://www.mdpi.com/1424-8220/22/16/6129).
55
+
56
+ You can train the model with a `DataLoader` object.
57
+
58
+ ```py
59
+ from homa.vision import StochasticResnet
60
+
61
+ model = StochasticResnet(num_classes=10, lr=0.001)
62
+ for epoch in range(10):
63
+ model.train(train_dataloader)
64
+ ```
65
+
66
+ Similarly you can manually take care of decomposition of data from the `DataLoader`.
67
+
68
+ ```py
69
+ from homa.vision import StochasticResnet
70
+
71
+ model = StochasticResnet(num_classes=10, lr=0.001)
72
+ for epoch in range(10):
73
+ for x, y in train_dataloader:
74
+ model.train(x, y)
75
+ ```
@@ -28,30 +28,30 @@ src/homa/cli/namespaces/__init__.py
28
28
  src/homa/ensemble/Ensemble.py
29
29
  src/homa/ensemble/__init__.py
30
30
  src/homa/ensemble/concerns/CalculatesMetricNecessities.py
31
- src/homa/ensemble/concerns/HasNetwork.py
32
- src/homa/ensemble/concerns/HasStateDicts.py
33
31
  src/homa/ensemble/concerns/PredictsProbabilities.py
34
- src/homa/ensemble/concerns/RecordsStateDictionaries.py
35
32
  src/homa/ensemble/concerns/ReportsClassificationMetrics.py
36
33
  src/homa/ensemble/concerns/ReportsEnsembleAccuracy.py
37
34
  src/homa/ensemble/concerns/ReportsEnsembleF1.py
38
35
  src/homa/ensemble/concerns/ReportsEnsembleKappa.py
39
36
  src/homa/ensemble/concerns/ReportsLogits.py
40
37
  src/homa/ensemble/concerns/ReportsSize.py
38
+ src/homa/ensemble/concerns/StoresModels.py
41
39
  src/homa/ensemble/concerns/__init__.py
42
- src/homa/torch/Module.py
43
40
  src/homa/torch/__init__.py
44
41
  src/homa/torch/helpers.py
42
+ src/homa/vision/ClassificationModel.py
45
43
  src/homa/vision/Model.py
46
44
  src/homa/vision/Resnet.py
47
45
  src/homa/vision/StochasticResnet.py
48
46
  src/homa/vision/__init__.py
49
47
  src/homa/vision/utils.py
48
+ src/homa/vision/concerns/HasLabels.py
49
+ src/homa/vision/concerns/HasLogits.py
50
+ src/homa/vision/concerns/HasProbabilities.py
51
+ src/homa/vision/concerns/ReportsAccuracy.py
52
+ src/homa/vision/concerns/ReportsMetrics.py
50
53
  src/homa/vision/concerns/Trainable.py
51
54
  src/homa/vision/concerns/__init__.py
52
55
  src/homa/vision/modules/ResnetModule.py
53
56
  src/homa/vision/modules/StochasticResnetModule.py
54
- src/homa/vision/modules/__init__.py
55
- tests/test_ensemble.py
56
- tests/test_resnet.py
57
- tests/test_stochastic_resnet.py
57
+ src/homa/vision/modules/__init__.py
homa-0.0.19/PKG-INFO DELETED
@@ -1,21 +0,0 @@
1
- Metadata-Version: 2.4
2
- Name: homa
3
- Version: 0.0.19
4
- Summary: A curated list of machine learning and deep learning helpers.
5
- Author-email: Taha Shieenavaz <tahashieenavaz@gmail.com>
6
- Requires-Python: >=3.7
7
- Description-Content-Type: text/markdown
8
- Requires-Dist: numpy
9
- Requires-Dist: torch
10
- Requires-Dist: fire
11
-
12
- ## Device Management
13
-
14
- ```py
15
- from homa import cpu, mps, cuda, device
16
-
17
- torch.tensor([1, 2, 3, 4, 5]).to(cpu())
18
- torch.tensor([1, 2, 3, 4, 5]).to(cuda())
19
- torch.tensor([1, 2, 3, 4, 5]).to(mps())
20
- torch.tensor([1, 2, 3, 4, 5]).to(device())
21
- ```
homa-0.0.19/README.md DELETED
@@ -1,10 +0,0 @@
1
- ## Device Management
2
-
3
- ```py
4
- from homa import cpu, mps, cuda, device
5
-
6
- torch.tensor([1, 2, 3, 4, 5]).to(cpu())
7
- torch.tensor([1, 2, 3, 4, 5]).to(cuda())
8
- torch.tensor([1, 2, 3, 4, 5]).to(mps())
9
- torch.tensor([1, 2, 3, 4, 5]).to(device())
10
- ```
@@ -1,20 +0,0 @@
1
- import torch
2
-
3
-
4
- class CalculatesMetricNecessities:
5
- def __init__(self, *args, **kwargs):
6
- super().__init__(*args, **kwargs)
7
-
8
- def metric_necessities(self, dataloader):
9
- all_predictions = []
10
- all_labels = []
11
- for x, y in dataloader:
12
- batch_logits_list = []
13
- for model in self.models:
14
- batch_logits_list.append(model(x))
15
- all_batch_logits = torch.stack(batch_logits_list)
16
- avg_logits = torch.mean(all_batch_logits, dim=0)
17
- _, preds = torch.max(avg_logits, 1)
18
- all_predictions.extend(preds.cpu().numpy())
19
- all_labels.extend(y.cpu().numpy())
20
- return all_predictions, all_labels
@@ -1,5 +0,0 @@
1
- class HasNetwork:
2
- def __init__(self, *args, **kwargs):
3
- super().__init__(*args, **kwargs)
4
- # This property is going to be filled with the first model that is fed into the ensemble.
5
- self.network = None
@@ -1,8 +0,0 @@
1
- from typing import List
2
- from collections import OrderedDict
3
-
4
-
5
- class HasStateDicts:
6
- def __init__(self, *args, **kwargs):
7
- super().__init__(*args, **kwargs)
8
- self.state_dicts: List[OrderedDict] = []
@@ -1,23 +0,0 @@
1
- from copy import deepcopy
2
- from .HasStateDicts import HasStateDicts
3
- from ...vision import Model
4
-
5
-
6
- class RecordsStateDictionaries(HasStateDicts):
7
- def __init__(self, *args, **kwargs):
8
- super().__init__(*args, **kwargs)
9
-
10
- def record(self, model: Model):
11
- if self.network is None:
12
- self.network = deepcopy(model.network)
13
-
14
- self.state_dicts.append(model.network.state_dict())
15
-
16
- def push(self, *args, **kwargs):
17
- self.record(*args, **kwargs)
18
-
19
- def append(self, *args, **kwargs):
20
- self.record(*args, **kwargs)
21
-
22
- def add(self, *args, **kwargs):
23
- self.record(*args, **kwargs)
@@ -1,8 +0,0 @@
1
- import torch
2
- from ..device import get_device
3
-
4
-
5
- class Module(torch.nn.Module):
6
- def __init__(self):
7
- super(Module, self).__init__()
8
- self.to(get_device())
@@ -1,2 +0,0 @@
1
- from .Module import Module
2
- from .helpers import *
@@ -1,12 +0,0 @@
1
- import torch
2
- from .modules import ResnetModule
3
- from .Model import Model
4
- from .concerns import Trainable
5
-
6
-
7
- class Resnet(Model, Trainable):
8
- def __init__(self, num_classes: int, lr: float):
9
- super().__init__()
10
- self.network = ResnetModule(num_classes)
11
- self.criterion = torch.nn.CrossEntropyLoss()
12
- self.optimizer = torch.optim.SGD(self.network.parameters(), lr=lr, momentum=0.9)
@@ -1 +0,0 @@
1
- from .Trainable import Trainable
@@ -1,21 +0,0 @@
1
- Metadata-Version: 2.4
2
- Name: homa
3
- Version: 0.0.19
4
- Summary: A curated list of machine learning and deep learning helpers.
5
- Author-email: Taha Shieenavaz <tahashieenavaz@gmail.com>
6
- Requires-Python: >=3.7
7
- Description-Content-Type: text/markdown
8
- Requires-Dist: numpy
9
- Requires-Dist: torch
10
- Requires-Dist: fire
11
-
12
- ## Device Management
13
-
14
- ```py
15
- from homa import cpu, mps, cuda, device
16
-
17
- torch.tensor([1, 2, 3, 4, 5]).to(cpu())
18
- torch.tensor([1, 2, 3, 4, 5]).to(cuda())
19
- torch.tensor([1, 2, 3, 4, 5]).to(mps())
20
- torch.tensor([1, 2, 3, 4, 5]).to(device())
21
- ```
@@ -1,28 +0,0 @@
1
- import pytest
2
- from homa.ensemble import Ensemble
3
- from homa.vision import Resnet
4
-
5
-
6
- @pytest.fixture
7
- def ensemble():
8
- return Ensemble()
9
-
10
-
11
- @pytest.fixture
12
- def resnet():
13
- return Resnet(lr=0.001, num_classes=10)
14
-
15
-
16
- def test_ensemble_initialization(ensemble):
17
- assert isinstance(ensemble, Ensemble)
18
-
19
-
20
- def test_ensemble_records_models(ensemble, resnet):
21
- assert ensemble.network is None
22
- ensemble.record(resnet)
23
- assert ensemble.network is not None
24
- ensemble.append(resnet)
25
- ensemble.push(resnet)
26
- ensemble.add(resnet)
27
- assert ensemble.size == 4
28
- assert ensemble.length == 4
@@ -1,21 +0,0 @@
1
- import pytest
2
- import torch
3
- from homa.vision import Resnet, Model
4
- from homa.vision.modules import ResnetModule
5
-
6
-
7
- @pytest.fixture
8
- def resnet_model():
9
- model = Resnet(num_classes=5, lr=0.001)
10
- return model
11
-
12
-
13
- def test_resnet_initialization(resnet_model):
14
- assert isinstance(resnet_model, Resnet)
15
- assert isinstance(resnet_model, Model)
16
- assert hasattr(resnet_model, "network")
17
- assert hasattr(resnet_model, "optimizer")
18
- assert hasattr(resnet_model, "criterion")
19
- assert isinstance(resnet_model.network, ResnetModule)
20
- assert isinstance(resnet_model.optimizer, torch.optim.SGD)
21
- assert isinstance(resnet_model.criterion, torch.nn.CrossEntropyLoss)
@@ -1,20 +0,0 @@
1
- import pytest
2
- import torch
3
- from homa.vision import StochasticResnet, Model
4
- from homa.vision.modules import ResnetModule
5
-
6
-
7
- @pytest.fixture
8
- def stochastic_resnet_model():
9
- return StochasticResnet(num_classes=5, lr=0.001)
10
-
11
-
12
- def test_resnet_initialization(stochastic_resnet_model):
13
- assert isinstance(stochastic_resnet_model, StochasticResnet)
14
- assert isinstance(stochastic_resnet_model, Model)
15
- assert hasattr(stochastic_resnet_model, "network")
16
- assert hasattr(stochastic_resnet_model, "optimizer")
17
- assert hasattr(stochastic_resnet_model, "criterion")
18
- assert isinstance(stochastic_resnet_model.network, ResnetModule)
19
- assert isinstance(stochastic_resnet_model.optimizer, torch.optim.SGD)
20
- assert isinstance(stochastic_resnet_model.criterion, torch.nn.CrossEntropyLoss)
File without changes
File without changes
File without changes
File without changes
@@ -6,8 +6,8 @@ from .CalculatesMetricNecessities import CalculatesMetricNecessities
6
6
 
7
7
  class ReportsClassificationMetrics(
8
8
  CalculatesMetricNecessities,
9
- ReportsEnsembleF1,
10
9
  ReportsEnsembleAccuracy,
10
+ ReportsEnsembleF1,
11
11
  ReportsEnsembleKappa,
12
12
  ):
13
13
  pass
File without changes
File without changes
File without changes
File without changes
File without changes