joonmyung 1.5.9__tar.gz → 1.5.11__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (47) hide show
  1. {joonmyung-1.5.9 → joonmyung-1.5.11}/PKG-INFO +1 -1
  2. {joonmyung-1.5.9 → joonmyung-1.5.11}/joonmyung/analysis/analysis.py +22 -26
  3. {joonmyung-1.5.9 → joonmyung-1.5.11}/joonmyung/script.py +2 -2
  4. {joonmyung-1.5.9 → joonmyung-1.5.11}/joonmyung.egg-info/PKG-INFO +1 -1
  5. {joonmyung-1.5.9 → joonmyung-1.5.11}/setup.py +1 -1
  6. {joonmyung-1.5.9 → joonmyung-1.5.11}/LICENSE.txt +0 -0
  7. {joonmyung-1.5.9 → joonmyung-1.5.11}/README.md +0 -0
  8. {joonmyung-1.5.9 → joonmyung-1.5.11}/joonmyung/__init__.py +0 -0
  9. {joonmyung-1.5.9 → joonmyung-1.5.11}/joonmyung/analysis/__init__.py +0 -0
  10. {joonmyung-1.5.9 → joonmyung-1.5.11}/joonmyung/analysis/dataset.py +0 -0
  11. {joonmyung-1.5.9 → joonmyung-1.5.11}/joonmyung/analysis/hook.py +0 -0
  12. {joonmyung-1.5.9 → joonmyung-1.5.11}/joonmyung/analysis/metric.py +0 -0
  13. {joonmyung-1.5.9 → joonmyung-1.5.11}/joonmyung/analysis/model.py +0 -0
  14. {joonmyung-1.5.9 → joonmyung-1.5.11}/joonmyung/analysis/utils.py +0 -0
  15. {joonmyung-1.5.9 → joonmyung-1.5.11}/joonmyung/app.py +0 -0
  16. {joonmyung-1.5.9 → joonmyung-1.5.11}/joonmyung/data.py +0 -0
  17. {joonmyung-1.5.9 → joonmyung-1.5.11}/joonmyung/draw.py +0 -0
  18. {joonmyung-1.5.9 → joonmyung-1.5.11}/joonmyung/dummy.py +0 -0
  19. {joonmyung-1.5.9 → joonmyung-1.5.11}/joonmyung/file.py +0 -0
  20. {joonmyung-1.5.9 → joonmyung-1.5.11}/joonmyung/gradcam.py +0 -0
  21. {joonmyung-1.5.9 → joonmyung-1.5.11}/joonmyung/log.py +0 -0
  22. {joonmyung-1.5.9 → joonmyung-1.5.11}/joonmyung/meta_data/__init__.py +0 -0
  23. {joonmyung-1.5.9 → joonmyung-1.5.11}/joonmyung/meta_data/label.py +0 -0
  24. {joonmyung-1.5.9 → joonmyung-1.5.11}/joonmyung/meta_data/utils.py +0 -0
  25. {joonmyung-1.5.9 → joonmyung-1.5.11}/joonmyung/metric.py +0 -0
  26. {joonmyung-1.5.9 → joonmyung-1.5.11}/joonmyung/models/__init__.py +0 -0
  27. {joonmyung-1.5.9 → joonmyung-1.5.11}/joonmyung/models/tome.py +0 -0
  28. {joonmyung-1.5.9 → joonmyung-1.5.11}/joonmyung/status.py +0 -0
  29. {joonmyung-1.5.9 → joonmyung-1.5.11}/joonmyung/utils.py +0 -0
  30. {joonmyung-1.5.9 → joonmyung-1.5.11}/joonmyung.egg-info/SOURCES.txt +0 -0
  31. {joonmyung-1.5.9 → joonmyung-1.5.11}/joonmyung.egg-info/dependency_links.txt +0 -0
  32. {joonmyung-1.5.9 → joonmyung-1.5.11}/joonmyung.egg-info/not-zip-safe +0 -0
  33. {joonmyung-1.5.9 → joonmyung-1.5.11}/joonmyung.egg-info/top_level.txt +0 -0
  34. {joonmyung-1.5.9 → joonmyung-1.5.11}/models/SA/MHSA.py +0 -0
  35. {joonmyung-1.5.9 → joonmyung-1.5.11}/models/SA/PVTSA.py +0 -0
  36. {joonmyung-1.5.9 → joonmyung-1.5.11}/models/SA/TMSA.py +0 -0
  37. {joonmyung-1.5.9 → joonmyung-1.5.11}/models/SA/__init__.py +0 -0
  38. {joonmyung-1.5.9 → joonmyung-1.5.11}/models/__init__.py +0 -0
  39. {joonmyung-1.5.9 → joonmyung-1.5.11}/models/deit.py +0 -0
  40. {joonmyung-1.5.9 → joonmyung-1.5.11}/models/evit.py +0 -0
  41. {joonmyung-1.5.9 → joonmyung-1.5.11}/models/modules/PE.py +0 -0
  42. {joonmyung-1.5.9 → joonmyung-1.5.11}/models/modules/__init__.py +0 -0
  43. {joonmyung-1.5.9 → joonmyung-1.5.11}/models/modules/blocks.py +0 -0
  44. {joonmyung-1.5.9 → joonmyung-1.5.11}/models/pvt.py +0 -0
  45. {joonmyung-1.5.9 → joonmyung-1.5.11}/models/pvt_v2.py +0 -0
  46. {joonmyung-1.5.9 → joonmyung-1.5.11}/models/tome.py +0 -0
  47. {joonmyung-1.5.9 → joonmyung-1.5.11}/setup.cfg +0 -0
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: joonmyung
3
- Version: 1.5.9
3
+ Version: 1.5.11
4
4
  Summary: JoonMyung's Library
5
5
  Home-page: https://github.com/pizard/JoonMyung.git
6
6
  Author: JoonMyung Choi
@@ -14,17 +14,12 @@ import cv2
14
14
 
15
15
  def anaModel(transformer_class):
16
16
  class VisionTransformer(transformer_class):
17
- def has_parameter(self, parameter_name):
18
- return parameter_name in self.__init__.__code__.co_varnames
19
-
20
17
  def forward_features(self, x):
21
18
  x = self.patch_embed(x)
22
- if self.has_parameter("cls_token"):
19
+ if hasattr(self, "cls_token"):
23
20
  cls_token = self.cls_token.expand(x.shape[0], -1, -1) # stole cls_tokens impl from Phil Wang, thanks
24
21
  x = torch.cat((cls_token, x), dim=1)
25
22
 
26
-
27
-
28
23
  if self.analysis[0] == 1: # PATCH
29
24
  x = x # (8, 197, 192)
30
25
  elif self.analysis[0] == 2: # POS
@@ -39,27 +34,27 @@ def anaModel(transformer_class):
39
34
 
40
35
  x = self.blocks(x)
41
36
  x = self.norm(x)
42
- if self.has_parameter("cls_token") and self.has_parameter("dist_token"):
37
+ if hasattr(self, "cls_token") and hasattr(self, "cls_token"):
43
38
  return x[:, 0], x[:, 1]
44
- elif self.has_parameter("cls_token"):
39
+ elif hasattr(self, "cls_token"):
45
40
  return self.pre_logits(x[:, 0])
46
41
  else:
47
42
  return self.pre_logits(x.mean(dim=1))
48
43
 
49
-
50
44
  return VisionTransformer
51
45
 
52
46
  class Analysis:
53
47
  def __init__(self, model, analysis = [0], activate = [True, False, False, False], detach=True, key_name=None, num_classes = 1000
54
- , cls_start=0, cls_end=1, patch_start=1, patch_end=None
48
+ , cls_start=0, cls_end=1, patch_start=1, patch_end=None, wrapping=False
55
49
  , amp_autocast=suppress, device="cuda"):
56
50
  # Section A. Model
57
51
  self.num_classes = num_classes
58
52
  self.key_name = key_name
53
+ if wrapping:
54
+ model_ = anaModel(model.__class__)
55
+ model.__class__ = model_
56
+ model.analysis = analysis
59
57
 
60
- model_ = anaModel(model.__class__)
61
- model.__class__ = model_
62
- model.analysis = analysis
63
58
  self.model = model
64
59
  self.detach = detach
65
60
 
@@ -72,8 +67,7 @@ class Analysis:
72
67
  {"name_i": 'qkv', "name_o": 'decoder', "fn_f": self.qkv_forward, "fn_b": self.qkv_backward},
73
68
  {"name_i": 'head', "name_o": 'decoder', "fn_f": self.head_forward, "fn_b": self.head_backward},
74
69
  {"name_i": 'patch_embed.norm', "name_o": 'decoder', "fn_f": self.input_forward, "fn_b": self.input_backward}]
75
- hooks = [h for h, a in zip(hooks, activate) if a]
76
-
70
+ self.activate = activate
77
71
 
78
72
  self.amp_autocast = amp_autocast
79
73
  self.device = device
@@ -87,15 +81,15 @@ class Analysis:
87
81
 
88
82
  def attn_forward(self, module, input, output):
89
83
  # input/output : 1 * (8, 3, 197, 197) / (8, 3, 197, 197)
90
- self.info["attn"]["f"] = output.detach() if self.detach else output
84
+ if self.activate[0]: self.info["attn"]["f"] = output.detach() if self.detach else output
91
85
 
92
86
  def attn_backward(self, module, grad_input, grad_output):
93
87
  # input/output : 1 * (8, 3, 197, 192) / (8, 3, 197, 576)
94
- self.info["attn"]["b"] = grad_input[0].detach() if self.detach else grad_input[0]
88
+ if self.activate[0]: self.info["attn"]["b"] = grad_input[0].detach() if self.detach else grad_input[0]
95
89
 
96
90
  def qkv_forward(self, module, input, output):
97
91
  # input/output : 1 * (8, 197, 192) / (8, 197, 576)
98
- self.info["qkv"]["f"].append(output.detach())
92
+ if self.activate[1]: self.info["qkv"]["f"].append(output.detach())
99
93
 
100
94
  def qkv_backward(self, module, grad_input, grad_output):
101
95
  # self.info["qkv"]["b"].append(grad_input[0].detach())
@@ -103,20 +97,22 @@ class Analysis:
103
97
 
104
98
  def head_forward(self, module, input, output):
105
99
  # input : 1 * (8(B), 192(D)), output : (8(B), 1000(C))
106
- B = output.shape[0]
107
- pred = targetPred(output, self.targets, topk=5)
108
- self.info["head"]["TF"] += (pred[:, 0] == pred[:, 1])
100
+ if self.activate[2]:
101
+ B = output.shape[0]
102
+ pred = targetPred(output, self.targets, topk=5)
103
+ self.info["head"]["TF"] += (pred[:, 0] == pred[:, 1])
109
104
 
110
- acc1, acc5 = accuracy(output, self.targets, topk=(1,5))
111
- self.info["head"]["acc1"].update(acc1.item(), n=B)
112
- self.info["head"]["acc5"].update(acc5.item(), n=B)
105
+ acc1, acc5 = accuracy(output, self.targets, topk=(1,5))
106
+ self.info["head"]["acc1"].update(acc1.item(), n=B)
107
+ self.info["head"]["acc5"].update(acc5.item(), n=B)
113
108
 
114
109
  def head_backward(self, module, grad_input, grad_output):
115
110
  pass
116
111
 
117
112
  def input_forward(self, module, input, output):
118
- norm = F.normalize(output, dim=-1)
119
- self.info["input"]["sim"] += (norm @ norm.transpose(-1, -2)).mean(dim=(-1, -2))
113
+ if self.activate[3]:
114
+ norm = F.normalize(output, dim=-1)
115
+ self.info["input"]["sim"] += (norm @ norm.transpose(-1, -2)).mean(dim=(-1, -2))
120
116
 
121
117
  def input_backward(self, module, grad_input, grad_output):
122
118
  pass
@@ -97,7 +97,7 @@ class GPU_Worker():
97
97
  return
98
98
 
99
99
  def message(self, text):
100
- url = "https://hooks.slack.com/services/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"
100
+ url = "https://hooks.slack.com/services/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"
101
101
  payload = {"text": text}
102
102
  headers = {'Content-type': 'application/json'}
103
103
 
@@ -123,7 +123,7 @@ def Process_Worker(processes, gpuWorker, id = "", p = True):
123
123
  training_time = datetime.timedelta(seconds=time.mktime(end) - time.mktime(start))
124
124
  print(f"Time 1/all : {training_time}/{training_time / len(processes)} ------")
125
125
  gpuWorker.message(f"Experiments Finished"
126
- f"{id} : "
126
+ f"{id} : {server}"
127
127
  f"Time 1/all : {training_time}/{training_time / len(processes)}"
128
128
  )
129
129
 
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: joonmyung
3
- Version: 1.5.9
3
+ Version: 1.5.11
4
4
  Summary: JoonMyung's Library
5
5
  Home-page: https://github.com/pizard/JoonMyung.git
6
6
  Author: JoonMyung Choi
@@ -3,7 +3,7 @@ from setuptools import find_packages
3
3
 
4
4
  setuptools.setup(
5
5
  name="joonmyung",
6
- version="1.5.9",
6
+ version="1.5.11",
7
7
  author="JoonMyung Choi",
8
8
  author_email="pizard@korea.ac.kr",
9
9
  description="JoonMyung's Library",
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes