mttf 1.1.6__py3-none-any.whl → 1.1.9__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of mttf might be problematic. Click here for more details.

mt/keras/__init__.py CHANGED
@@ -1,32 +1 @@
1
1
  """MT package that represents the working Keras 2 from the system."""
2
-
3
- from packaging.version import Version
4
- import tensorflow as tf
5
-
6
- tf_ver = Version(tf.__version__)
7
- if tf_ver >= Version("2.16"):
8
- try:
9
- import tf_keras
10
- except:
11
- raise ImportError(
12
- f"mt.keras can only work with Keras 2. You have TF version {tf_ver}. Please install tf_keras."
13
- )
14
- from tf_keras import *
15
-
16
- __version__ = tf_keras.__version__
17
- __source__ = "tf_keras"
18
- else:
19
- try:
20
- import keras
21
-
22
- kr_ver = Version(keras.__version__)
23
- except ImportError:
24
- kr_ver = None
25
- if kr_ver is None or kr_ver >= Version("3.0"):
26
- __version__ = tf.__version__
27
- __source__ = "tensorflow.python"
28
- from tensorflow.keras import *
29
- else:
30
- __version__ = keras.__version__
31
- __source__ = "keras"
32
- from keras import *
@@ -0,0 +1,10 @@
1
+ from .base import keras_version, keras_source
2
+
3
+ if keras_source == "tf_keras":
4
+ from tf_keras import *
5
+ elif keras_source == "keras":
6
+ from keras import *
7
+ elif keras_source == "tensorflow.keras":
8
+ from tensorflow.keras import *
9
+ else:
10
+ raise ImportError(f"Unknown value '{keras_source}' for variable 'keras_source'.")
mt/keras/base/base.py ADDED
@@ -0,0 +1,28 @@
1
+ """Determines the working Keras 2 from the system to be used by mt.keras."""
2
+
3
+ from packaging.version import Version
4
+ import tensorflow as tf
5
+
6
+ tf_ver = Version(tf.__version__)
7
+ if tf_ver >= Version("2.16"):
8
+ try:
9
+ import tf_keras
10
+ except:
11
+ raise ImportError(
12
+ f"mt.keras can only work with Keras 2. You have TF version {tf_ver}. Please install tf_keras."
13
+ )
14
+ keras_version = tf_keras.__version__
15
+ keras_source = "tf_keras"
16
+ else:
17
+ try:
18
+ import keras
19
+
20
+ kr_ver = Version(keras.__version__)
21
+ except ImportError:
22
+ kr_ver = None
23
+ if kr_ver is None or kr_ver >= Version("3.0"):
24
+ keras_version = tf.__version__
25
+ keras_source = "tensorflow.python"
26
+ else:
27
+ keras_version = keras.__version__
28
+ keras_source = "keras"
@@ -0,0 +1,39 @@
1
+ from ..base import layers as _layers
2
+
3
+ for _x, _y in _layers.__dict__.items():
4
+ if x.startswith("_"):
5
+ continue
6
+ globals()[_x] = _y
7
+ globals.__doc__ = _layers.__doc__
8
+
9
+ from .identical import *
10
+ from .floor import *
11
+ from .var_regularizer import *
12
+ from .simple_mha import *
13
+ from .image_sizing import *
14
+ from .counter import Counter
15
+ from .normed_conv2d import NormedConv2D
16
+ from .utils import *
17
+
18
+
19
+ __api__ = [
20
+ "Identical",
21
+ "Floor",
22
+ "VarianceRegularizer",
23
+ "SimpleMHA2D",
24
+ "MHAPool2D",
25
+ "DUCLayer",
26
+ "Downsize2D",
27
+ "Upsize2D",
28
+ "Downsize2D_V2",
29
+ "Upsize2D_V2",
30
+ "Downsize2D_V3",
31
+ "Downsize2D_V4",
32
+ "DownsizeX2D",
33
+ "UpsizeX2D",
34
+ "DownsizeY2D",
35
+ "UpsizeY2D",
36
+ "Counter",
37
+ "conv2d",
38
+ "dense2d",
39
+ ]
@@ -1,12 +1,12 @@
1
1
  import tensorflow as tf
2
- import tensorflow.keras as tk
2
+ from ..base import layers, initializers
3
3
 
4
4
 
5
- class Counter(tk.layers.Layer):
5
+ class Counter(layers.Layer):
6
6
  """A layer that counts from 0 during training and does nothing during inference."""
7
7
 
8
8
  def build(self, input_shape):
9
- initializer = tk.initializers.Constant(value=0.0)
9
+ initializer = initializers.Constant(value=0.0)
10
10
  self.counter = self.add_weight(
11
11
  name="counter", shape=(1,), initializer=initializer
12
12
  )
@@ -19,9 +19,9 @@ class Counter(tk.layers.Layer):
19
19
  y = tf.stop_gradient(y) * 0.0
20
20
  return self.counter + y
21
21
 
22
- call.__doc__ = tk.layers.Layer.call.__doc__
22
+ call.__doc__ = layers.Layer.call.__doc__
23
23
 
24
24
  def compute_output_shape(self, input_shape):
25
25
  return (1,)
26
26
 
27
- compute_output_shape.__doc__ = tk.layers.Layer.compute_output_shape.__doc__
27
+ compute_output_shape.__doc__ = layers.Layer.compute_output_shape.__doc__
@@ -1,4 +1,5 @@
1
1
  import tensorflow as tf
2
+ from ..base import layers
2
3
 
3
4
 
4
5
  @tf.custom_gradient
@@ -9,15 +10,15 @@ def floor(x):
9
10
  return tf.math.floor(x), grad
10
11
 
11
12
 
12
- class Floor(tf.keras.layers.Layer):
13
+ class Floor(layers.Layer):
13
14
  """TensorFlow floor but gradient is identity."""
14
15
 
15
16
  def call(self, x):
16
17
  return floor(x)
17
18
 
18
- call.__doc__ = tf.keras.layers.Layer.call.__doc__
19
+ call.__doc__ = layers.Layer.call.__doc__
19
20
 
20
21
  def compute_output_shape(self, input_shape):
21
22
  return input_shape
22
23
 
23
- compute_output_shape.__doc__ = tf.keras.layers.Layer.compute_output_shape.__doc__
24
+ compute_output_shape.__doc__ = layers.Layer.compute_output_shape.__doc__
@@ -1,15 +1,15 @@
1
- import tensorflow.keras.layers as _kl
1
+ from ..base import layers
2
2
 
3
3
 
4
- class Identical(_kl.Layer):
4
+ class Identical(layers.Layer):
5
5
  """An identical layer, mainly for renaming purposes."""
6
6
 
7
7
  def call(self, x):
8
8
  return x
9
9
 
10
- call.__doc__ = _kl.Layer.call.__doc__
10
+ call.__doc__ = layers.Layer.call.__doc__
11
11
 
12
12
  def compute_output_shape(self, input_shape):
13
13
  return input_shape
14
14
 
15
- compute_output_shape.__doc__ = _kl.Layer.compute_output_shape.__doc__
15
+ compute_output_shape.__doc__ = layers.Layer.compute_output_shape.__doc__
@@ -1,9 +1,10 @@
1
1
  """Module involves upsizing and downsizing images in each axis individually using convolutions of residuals."""
2
2
 
3
- import tensorflow as tf
4
-
5
3
  from mt import tp, np
6
4
 
5
+ import tensorflow as tf
6
+ from ..base import layers, initializers, regularizers, constraints
7
+
7
8
 
8
9
  def mirror_all_weights(l_weights: list) -> list:
9
10
  """TBC"""
@@ -27,7 +28,7 @@ def mirror_all_weights(l_weights: list) -> list:
27
28
  return l_newWeights
28
29
 
29
30
 
30
- class DUCLayer(tf.keras.layers.Layer):
31
+ class DUCLayer(layers.Layer):
31
32
  """Base layer for all DUC layer implementations.
32
33
 
33
34
  Parameters
@@ -65,33 +66,27 @@ class DUCLayer(tf.keras.layers.Layer):
65
66
  super(DUCLayer, self).__init__(**kwargs)
66
67
 
67
68
  self._kernel_size = kernel_size
68
- self._kernel_initializer = tf.keras.initializers.get(kernel_initializer)
69
- self._bias_initializer = tf.keras.initializers.get(bias_initializer)
70
- self._kernel_regularizer = tf.keras.regularizers.get(kernel_regularizer)
71
- self._bias_regularizer = tf.keras.regularizers.get(bias_regularizer)
72
- self._kernel_constraint = tf.keras.constraints.get(kernel_constraint)
73
- self._bias_constraint = tf.keras.constraints.get(bias_constraint)
69
+ self._kernel_initializer = initializers.get(kernel_initializer)
70
+ self._bias_initializer = initializers.get(bias_initializer)
71
+ self._kernel_regularizer = regularizers.get(kernel_regularizer)
72
+ self._bias_regularizer = regularizers.get(bias_regularizer)
73
+ self._kernel_constraint = constraints.get(kernel_constraint)
74
+ self._bias_constraint = constraints.get(bias_constraint)
74
75
 
75
76
  def get_config(self):
76
77
  config = {
77
78
  "kernel_size": self._kernel_size,
78
- "kernel_initializer": tf.keras.initializers.serialize(
79
- self._kernel_initializer
80
- ),
81
- "bias_initializer": tf.keras.initializers.serialize(self._bias_initializer),
82
- "kernel_regularizer": tf.keras.regularizers.serialize(
83
- self._kernel_regularizer
84
- ),
85
- "bias_regularizer": tf.keras.regularizers.serialize(self._bias_regularizer),
86
- "kernel_constraint": tf.keras.constraints.serialize(
87
- self._kernel_constraint
88
- ),
89
- "bias_constraint": tf.keras.constraints.serialize(self._bias_constraint),
79
+ "kernel_initializer": initializers.serialize(self._kernel_initializer),
80
+ "bias_initializer": initializers.serialize(self._bias_initializer),
81
+ "kernel_regularizer": regularizers.serialize(self._kernel_regularizer),
82
+ "bias_regularizer": regularizers.serialize(self._bias_regularizer),
83
+ "kernel_constraint": constraints.serialize(self._kernel_constraint),
84
+ "bias_constraint": constraints.serialize(self._bias_constraint),
90
85
  }
91
86
  base_config = super(DUCLayer, self).get_config()
92
87
  return dict(list(base_config.items()) + list(config.items()))
93
88
 
94
- get_config.__doc__ = tf.keras.layers.Layer.get_config.__doc__
89
+ get_config.__doc__ = layers.Layer.get_config.__doc__
95
90
 
96
91
  def get_mirrored_weights(self):
97
92
  return mirror_all_weights(self.get_weights())
@@ -166,8 +161,8 @@ class Upsize2D(DUCLayer):
166
161
  self._expansion_factor = expansion_factor
167
162
 
168
163
  if self._expansion_factor > 1:
169
- self.prenorm1_layer = tf.keras.layers.LayerNormalization(name="prenorm1")
170
- self.expansion_layer = tf.keras.layers.Conv2D(
164
+ self.prenorm1_layer = layers.LayerNormalization(name="prenorm1")
165
+ self.expansion_layer = layers.Conv2D(
171
166
  self._input_dim * 2 * expansion_factor,
172
167
  self._kernel_size,
173
168
  padding="same",
@@ -180,8 +175,8 @@ class Upsize2D(DUCLayer):
180
175
  bias_constraint=self._bias_constraint,
181
176
  name="expand",
182
177
  )
183
- self.prenorm2_layer = tf.keras.layers.LayerNormalization(name="prenorm2")
184
- self.projection_layer = tf.keras.layers.Conv2D(
178
+ self.prenorm2_layer = layers.LayerNormalization(name="prenorm2")
179
+ self.projection_layer = layers.Conv2D(
185
180
  self._input_dim * 2,
186
181
  self._kernel_size,
187
182
  padding="same",
@@ -332,8 +327,8 @@ class Downsize2D(DUCLayer):
332
327
  self._expansion_factor = expansion_factor
333
328
 
334
329
  if self._expansion_factor > 1:
335
- self.prenorm1_layer = tf.keras.layers.LayerNormalization(name="prenorm1")
336
- self.expansion_layer = tf.keras.layers.Conv2D(
330
+ self.prenorm1_layer = layers.LayerNormalization(name="prenorm1")
331
+ self.expansion_layer = layers.Conv2D(
337
332
  self._input_dim * 4 * self._expansion_factor,
338
333
  self._kernel_size,
339
334
  padding="same",
@@ -346,8 +341,8 @@ class Downsize2D(DUCLayer):
346
341
  bias_constraint=self._bias_constraint,
347
342
  name="expand",
348
343
  )
349
- self.prenorm2_layer = tf.keras.layers.LayerNormalization(name="prenorm2")
350
- self.projection_layer = tf.keras.layers.Conv2D(
344
+ self.prenorm2_layer = layers.LayerNormalization(name="prenorm2")
345
+ self.projection_layer = layers.Conv2D(
351
346
  self._input_dim,
352
347
  self._kernel_size,
353
348
  padding="same",
@@ -523,8 +518,8 @@ class Downsize2D_V2(DUCLayer):
523
518
  self._projection_uses_bias = projection_uses_bias
524
519
 
525
520
  if self._expansion_factor > 1:
526
- self.prenorm1_layer = tf.keras.layers.LayerNormalization(name="prenorm1")
527
- self.expansion_layer = tf.keras.layers.Conv2D(
521
+ self.prenorm1_layer = layers.LayerNormalization(name="prenorm1")
522
+ self.expansion_layer = layers.Conv2D(
528
523
  (self._img_dim + self._res_dim) * 4 * self._expansion_factor,
529
524
  self._kernel_size,
530
525
  padding="same",
@@ -537,8 +532,8 @@ class Downsize2D_V2(DUCLayer):
537
532
  bias_constraint=self._bias_constraint,
538
533
  name="expand",
539
534
  )
540
- self.prenorm2_layer = tf.keras.layers.LayerNormalization(name="prenorm2")
541
- self.projection_layer = tf.keras.layers.Conv2D(
535
+ self.prenorm2_layer = layers.LayerNormalization(name="prenorm2")
536
+ self.projection_layer = layers.Conv2D(
542
537
  self._img_dim + self._res_dim * 2,
543
538
  1,
544
539
  padding="same",
@@ -710,8 +705,8 @@ class Upsize2D_V2(DUCLayer):
710
705
  self._expansion_factor = expansion_factor
711
706
 
712
707
  if self._expansion_factor > 1:
713
- self.prenorm1_layer = tf.keras.layers.LayerNormalization(name="prenorm1")
714
- self.expansion_layer = tf.keras.layers.Conv2D(
708
+ self.prenorm1_layer = layers.LayerNormalization(name="prenorm1")
709
+ self.expansion_layer = layers.Conv2D(
715
710
  (self._img_dim + self._res_dim) * 2 * expansion_factor,
716
711
  self._kernel_size,
717
712
  padding="same",
@@ -724,8 +719,8 @@ class Upsize2D_V2(DUCLayer):
724
719
  bias_constraint=self._bias_constraint,
725
720
  name="expand",
726
721
  )
727
- self.prenorm2_layer = tf.keras.layers.LayerNormalization(name="prenorm2")
728
- self.projection_layer = tf.keras.layers.Conv2D(
722
+ self.prenorm2_layer = layers.LayerNormalization(name="prenorm2")
723
+ self.projection_layer = layers.Conv2D(
729
724
  (self._img_dim + self._res_dim) * 2,
730
725
  self._kernel_size if self._expansion_factor <= 1 else 1,
731
726
  padding="same",
@@ -875,10 +870,8 @@ class Downsize2D_V3(DUCLayer):
875
870
 
876
871
  if res_dim > 0:
877
872
  if res_dim > img_dim:
878
- self.prenorm1_layer = tf.keras.layers.LayerNormalization(
879
- name="prenorm1"
880
- )
881
- self.expand1_layer = tf.keras.layers.Conv2D(
873
+ self.prenorm1_layer = layers.LayerNormalization(name="prenorm1")
874
+ self.expand1_layer = layers.Conv2D(
882
875
  img_dim * 2 + res_dim * 4,
883
876
  self._kernel_size,
884
877
  padding="same",
@@ -892,8 +885,8 @@ class Downsize2D_V3(DUCLayer):
892
885
  name="expand1",
893
886
  )
894
887
  RR = (img_dim + res_dim * 3 + 1) // 2
895
- self.prenorm2_layer = tf.keras.layers.LayerNormalization(name="prenorm2")
896
- self.project1_layer = tf.keras.layers.Conv2D(
888
+ self.prenorm2_layer = layers.LayerNormalization(name="prenorm2")
889
+ self.project1_layer = layers.Conv2D(
897
890
  RR,
898
891
  1,
899
892
  padding="same",
@@ -906,8 +899,8 @@ class Downsize2D_V3(DUCLayer):
906
899
  bias_constraint=self._bias_constraint,
907
900
  name="project1",
908
901
  )
909
- self.prenorm3_layer = tf.keras.layers.LayerNormalization(name="prenorm3")
910
- self.expand2_layer = tf.keras.layers.Conv2D(
902
+ self.prenorm3_layer = layers.LayerNormalization(name="prenorm3")
903
+ self.expand2_layer = layers.Conv2D(
911
904
  img_dim * 2 + RR * 4,
912
905
  self._kernel_size,
913
906
  padding="same",
@@ -920,8 +913,8 @@ class Downsize2D_V3(DUCLayer):
920
913
  bias_constraint=self._bias_constraint,
921
914
  name="expand2",
922
915
  )
923
- self.prenorm4_layer = tf.keras.layers.LayerNormalization(name="prenorm4")
924
- self.project2_layer = tf.keras.layers.Conv2D(
916
+ self.prenorm4_layer = layers.LayerNormalization(name="prenorm4")
917
+ self.project2_layer = layers.Conv2D(
925
918
  img_dim + res_dim * 2,
926
919
  1,
927
920
  padding="same",
@@ -1174,10 +1167,8 @@ class DownsizeX2D(DUCLayerV5):
1174
1167
 
1175
1168
  if res_dim > 0:
1176
1169
  if res_dim > img_dim:
1177
- self.prenorm1_layer = tf.keras.layers.LayerNormalization(
1178
- name="prenorm1"
1179
- )
1180
- self.expand1_layer = tf.keras.layers.Conv2D(
1170
+ self.prenorm1_layer = layers.LayerNormalization(name="prenorm1")
1171
+ self.expand1_layer = layers.Conv2D(
1181
1172
  (self.I + self.R) * 4,
1182
1173
  self._kernel_size,
1183
1174
  padding="same",
@@ -1190,8 +1181,8 @@ class DownsizeX2D(DUCLayerV5):
1190
1181
  bias_constraint=self._bias_constraint,
1191
1182
  name="expand1",
1192
1183
  )
1193
- self.prenorm2_layer = tf.keras.layers.LayerNormalization(name="prenorm2")
1194
- self.project1_layer = tf.keras.layers.Conv2D(
1184
+ self.prenorm2_layer = layers.LayerNormalization(name="prenorm2")
1185
+ self.project1_layer = layers.Conv2D(
1195
1186
  self.RX,
1196
1187
  1,
1197
1188
  padding="same",
@@ -1298,8 +1289,8 @@ class UpsizeX2D(DUCLayerV5):
1298
1289
  )
1299
1290
 
1300
1291
  if res_dim > 0:
1301
- self.prenorm1_layer = tf.keras.layers.LayerNormalization(name="prenorm1")
1302
- self.expand1_layer = tf.keras.layers.Conv2D(
1292
+ self.prenorm1_layer = layers.LayerNormalization(name="prenorm1")
1293
+ self.expand1_layer = layers.Conv2D(
1303
1294
  (self.I + self.R) * 4,
1304
1295
  self._kernel_size,
1305
1296
  padding="same",
@@ -1312,8 +1303,8 @@ class UpsizeX2D(DUCLayerV5):
1312
1303
  bias_constraint=self._bias_constraint,
1313
1304
  name="expand1",
1314
1305
  )
1315
- self.prenorm2_layer = tf.keras.layers.LayerNormalization(name="prenorm2")
1316
- self.project1_layer = tf.keras.layers.Conv2D(
1306
+ self.prenorm2_layer = layers.LayerNormalization(name="prenorm2")
1307
+ self.project1_layer = layers.Conv2D(
1317
1308
  self.R,
1318
1309
  1,
1319
1310
  padding="same",
@@ -1326,7 +1317,7 @@ class UpsizeX2D(DUCLayerV5):
1326
1317
  bias_constraint=self._bias_constraint,
1327
1318
  name="project1",
1328
1319
  )
1329
- self.project2_layer = tf.keras.layers.Conv2D(
1320
+ self.project2_layer = layers.Conv2D(
1330
1321
  self.I + self.R,
1331
1322
  1,
1332
1323
  padding="same",
@@ -1429,8 +1420,8 @@ class DownsizeY2D(DUCLayerV5):
1429
1420
  )
1430
1421
 
1431
1422
  if self.R > 0:
1432
- self.prenorm1_layer = tf.keras.layers.LayerNormalization(name="prenorm1")
1433
- self.expand1_layer = tf.keras.layers.Conv2D(
1423
+ self.prenorm1_layer = layers.LayerNormalization(name="prenorm1")
1424
+ self.expand1_layer = layers.Conv2D(
1434
1425
  (self.I + self.RX) * 4,
1435
1426
  self._kernel_size,
1436
1427
  padding="same",
@@ -1443,8 +1434,8 @@ class DownsizeY2D(DUCLayerV5):
1443
1434
  bias_constraint=self._bias_constraint,
1444
1435
  name="expand1",
1445
1436
  )
1446
- self.prenorm2_layer = tf.keras.layers.LayerNormalization(name="prenorm2")
1447
- self.project1_layer = tf.keras.layers.Conv2D(
1437
+ self.prenorm2_layer = layers.LayerNormalization(name="prenorm2")
1438
+ self.project1_layer = layers.Conv2D(
1448
1439
  self.RY,
1449
1440
  1,
1450
1441
  padding="same",
@@ -1547,8 +1538,8 @@ class UpsizeY2D(DUCLayerV5):
1547
1538
  **kwargs
1548
1539
  )
1549
1540
 
1550
- self.prenorm1_layer = tf.keras.layers.LayerNormalization(name="prenorm1")
1551
- self.expand1_layer = tf.keras.layers.Conv2D(
1541
+ self.prenorm1_layer = layers.LayerNormalization(name="prenorm1")
1542
+ self.expand1_layer = layers.Conv2D(
1552
1543
  (self.I + self.RX) * 4,
1553
1544
  self._kernel_size,
1554
1545
  padding="same",
@@ -1561,8 +1552,8 @@ class UpsizeY2D(DUCLayerV5):
1561
1552
  bias_constraint=self._bias_constraint,
1562
1553
  name="expand1",
1563
1554
  )
1564
- self.prenorm2_layer = tf.keras.layers.LayerNormalization(name="prenorm2")
1565
- self.project1_layer = tf.keras.layers.Conv2D(
1555
+ self.prenorm2_layer = layers.LayerNormalization(name="prenorm2")
1556
+ self.project1_layer = layers.Conv2D(
1566
1557
  self.RX,
1567
1558
  1,
1568
1559
  padding="same",
@@ -1575,7 +1566,7 @@ class UpsizeY2D(DUCLayerV5):
1575
1566
  bias_constraint=self._bias_constraint,
1576
1567
  name="project1",
1577
1568
  )
1578
- self.project2_layer = tf.keras.layers.Conv2D(
1569
+ self.project2_layer = layers.Conv2D(
1579
1570
  self.I + self.RX,
1580
1571
  1,
1581
1572
  padding="same",
@@ -1,10 +1,8 @@
1
- import tensorflow.keras as tk
2
-
3
-
1
+ from ..base import layers, initializers, regularizers, constraints
4
2
  from .counter import Counter
5
3
 
6
4
 
7
- class NormedConv2D(tk.layers.Layer):
5
+ class NormedConv2D(layers.Layer):
8
6
  """A block of Conv2D without activation, followed by LayerNormalization, then activation.
9
7
 
10
8
  This layer represents the following block:
@@ -171,7 +169,7 @@ class NormedConv2D(tk.layers.Layer):
171
169
  for key in self.keys:
172
170
  setattr(self, key, locals()[key])
173
171
 
174
- self.conv2d = tk.layers.Conv2D(
172
+ self.conv2d = layers.Conv2D(
175
173
  filters,
176
174
  kernel_size,
177
175
  strides=strides,
@@ -188,7 +186,7 @@ class NormedConv2D(tk.layers.Layer):
188
186
 
189
187
  self.counter = Counter()
190
188
 
191
- self.norm = tk.layers.LayerNormalization(
189
+ self.norm = layers.LayerNormalization(
192
190
  axis=-1,
193
191
  epsilon=epsilon,
194
192
  scale=True,
@@ -202,7 +200,7 @@ class NormedConv2D(tk.layers.Layer):
202
200
  )
203
201
 
204
202
  if activation is not None:
205
- self.acti = tk.layers.Activation(activation)
203
+ self.acti = layers.Activation(activation)
206
204
 
207
205
  def call(self, x, training: bool = False):
208
206
  count = self.counter(x, training=training)
@@ -215,7 +213,7 @@ class NormedConv2D(tk.layers.Layer):
215
213
  w = self.acti(z, training=training)
216
214
  return w
217
215
 
218
- call.__doc__ = tk.layers.Layer.call.__doc__
216
+ call.__doc__ = layers.Layer.call.__doc__
219
217
 
220
218
  def get_config(self):
221
219
  config = {key: getattr(self, key) for key in self.keys}
@@ -224,17 +222,17 @@ class NormedConv2D(tk.layers.Layer):
224
222
  key = prefix + "_initializer"
225
223
  value = config[key]
226
224
  if not isinstance(value, str):
227
- value = tk.initializers.serialize(value)
225
+ value = initializers.serialize(value)
228
226
  config[key] = value
229
227
  key = prefix + "_regularizer"
230
228
  value = config[key]
231
229
  if not isinstance(value, str):
232
- value = tk.regularizers.serialize(value)
230
+ value = regularizers.serialize(value)
233
231
  config[key] = value
234
232
  key = prefix + "_constraint"
235
233
  value = config[key]
236
234
  if not isinstance(value, str):
237
- value = tk.constraints.serialize(value)
235
+ value = constraints.serialize(value)
238
236
  config[key] = value
239
237
  config = {key: value for key, value in config.items() if value is not None}
240
238
  base_config = super(NormedConv2D, self).get_config()
@@ -21,9 +21,11 @@ from tensorflow.python.util.tf_export import keras_export
21
21
 
22
22
  from mt import tp, tfc
23
23
 
24
+ from ..base import layers, initializers, regularizers, constraints
25
+
24
26
 
25
27
  @keras_export("keras.layers.SimpleMHA2D")
26
- class SimpleMHA2D(tf.keras.layers.Layer):
28
+ class SimpleMHA2D(layers.Layer):
27
29
  """SimpleMHA2D layer.
28
30
 
29
31
  This is a simplified version of the Keras-based MultiHeadAttention layer.
@@ -70,7 +72,7 @@ class SimpleMHA2D(tf.keras.layers.Layer):
70
72
  --------
71
73
 
72
74
  >>> layer = SimpleMHA2D(num_heads=3, key_dim=40, value_dim=80)
73
- >>> input_tensor = tf.keras.Input(shape=[8, 8, 160])
75
+ >>> input_tensor = layers.Input(shape=[8, 8, 160])
74
76
  >>> output_tensor = layer(input_tensor)
75
77
  >>> print(output_tensor.shape)
76
78
  (None, 3, 80)
@@ -97,13 +99,13 @@ class SimpleMHA2D(tf.keras.layers.Layer):
97
99
  self._key_dim = key_dim
98
100
  self._value_dim = value_dim if value_dim else key_dim
99
101
  self._use_bias = use_bias
100
- self._activation = tf.keras.activations.get(activation)
101
- self._kernel_initializer = tf.keras.initializers.get(kernel_initializer)
102
- self._bias_initializer = tf.keras.initializers.get(bias_initializer)
103
- self._kernel_regularizer = tf.keras.regularizers.get(kernel_regularizer)
104
- self._bias_regularizer = tf.keras.regularizers.get(bias_regularizer)
105
- self._kernel_constraint = tf.keras.constraints.get(kernel_constraint)
106
- self._bias_constraint = tf.keras.constraints.get(bias_constraint)
102
+ self._activation = activations.get(activation)
103
+ self._kernel_initializer = initializers.get(kernel_initializer)
104
+ self._bias_initializer = initializers.get(bias_initializer)
105
+ self._kernel_regularizer = regularizers.get(kernel_regularizer)
106
+ self._bias_regularizer = regularizers.get(bias_regularizer)
107
+ self._kernel_constraint = constraints.get(kernel_constraint)
108
+ self._bias_constraint = constraints.get(bias_constraint)
107
109
  self._dropout = dropout
108
110
 
109
111
  self.tensor_query = self.add_weight(
@@ -113,7 +115,7 @@ class SimpleMHA2D(tf.keras.layers.Layer):
113
115
  trainable=True,
114
116
  )
115
117
 
116
- self.layer_key_proj = tf.keras.layers.Conv2D(
118
+ self.layer_key_proj = layers.Conv2D(
117
119
  self._num_heads * self._key_dim, # filters
118
120
  1, # kernel_size
119
121
  use_bias=self._use_bias,
@@ -125,7 +127,7 @@ class SimpleMHA2D(tf.keras.layers.Layer):
125
127
  bias_constraint=self._bias_constraint,
126
128
  )
127
129
 
128
- self.layer_value_proj = tf.keras.layers.Conv2D(
130
+ self.layer_value_proj = layers.Conv2D(
129
131
  self._num_heads * self._value_dim, # filters
130
132
  1, # kernel_size
131
133
  use_bias=self._use_bias,
@@ -138,9 +140,9 @@ class SimpleMHA2D(tf.keras.layers.Layer):
138
140
  bias_constraint=self._bias_constraint,
139
141
  )
140
142
 
141
- self.layer_softmax = tf.keras.layers.Softmax(axis=1)
143
+ self.layer_softmax = layers.Softmax(axis=1)
142
144
  if self._dropout > 0:
143
- self.layer_dropout = tf.keras.layers.Dropout(rate=self._dropout)
145
+ self.layer_dropout = layers.Dropout(rate=self._dropout)
144
146
 
145
147
  def call(self, key_value, training=None):
146
148
  """The call function.
@@ -203,19 +205,13 @@ class SimpleMHA2D(tf.keras.layers.Layer):
203
205
  "key_dim": self._key_dim,
204
206
  "value_dim": self._value_dim,
205
207
  "use_bias": self._use_bias,
206
- "activation": tf.keras.activations.serialize(self._activation),
207
- "kernel_initializer": tf.keras.initializers.serialize(
208
- self._kernel_initializer
209
- ),
210
- "bias_initializer": tf.keras.initializers.serialize(self._bias_initializer),
211
- "kernel_regularizer": tf.keras.regularizers.serialize(
212
- self._kernel_regularizer
213
- ),
214
- "bias_regularizer": tf.keras.regularizers.serialize(self._bias_regularizer),
215
- "kernel_constraint": tf.keras.constraints.serialize(
216
- self._kernel_constraint
217
- ),
218
- "bias_constraint": tf.keras.constraints.serialize(self._bias_constraint),
208
+ "activation": activations.serialize(self._activation),
209
+ "kernel_initializer": initializers.serialize(self._kernel_initializer),
210
+ "bias_initializer": initializers.serialize(self._bias_initializer),
211
+ "kernel_regularizer": regularizers.serialize(self._kernel_regularizer),
212
+ "bias_regularizer": regularizers.serialize(self._bias_regularizer),
213
+ "kernel_constraint": constraints.serialize(self._kernel_constraint),
214
+ "bias_constraint": constraints.serialize(self._bias_constraint),
219
215
  "dropout": self._dropout,
220
216
  }
221
217
  base_config = super(SimpleMHA2D, self).get_config()
@@ -223,7 +219,7 @@ class SimpleMHA2D(tf.keras.layers.Layer):
223
219
 
224
220
 
225
221
  @keras_export("keras.layers.MHAPool2D")
226
- class MHAPool2D(tf.keras.layers.Layer):
222
+ class MHAPool2D(layers.Layer):
227
223
  """Pooling in 2D using Keras-based self-attention.
228
224
 
229
225
  The layer takes as input a high-dim image tensor of shape [B, H, W, D] where B is the
@@ -279,7 +275,7 @@ class MHAPool2D(tf.keras.layers.Layer):
279
275
  --------
280
276
 
281
277
  >>> layer = MHAPool2D(num_heads=3, key_dim=40, value_dim=80)
282
- >>> input_tensor = tf.keras.Input(shape=[8, 8, 160])
278
+ >>> input_tensor = layers.Input(shape=[8, 8, 160])
283
279
  >>> output_tensor = layer(input_tensor)
284
280
  >>> print(output_tensor.shape)
285
281
  (None, 4, 4, 240)
@@ -310,25 +306,25 @@ class MHAPool2D(tf.keras.layers.Layer):
310
306
  self._pooling = pooling
311
307
  self._pool_size = pool_size
312
308
  self._use_bias = use_bias
313
- self._activation = tf.keras.activations.get(activation)
314
- self._kernel_initializer = tf.keras.initializers.get(kernel_initializer)
315
- self._bias_initializer = tf.keras.initializers.get(bias_initializer)
316
- self._kernel_regularizer = tf.keras.regularizers.get(kernel_regularizer)
317
- self._bias_regularizer = tf.keras.regularizers.get(bias_regularizer)
318
- self._kernel_constraint = tf.keras.constraints.get(kernel_constraint)
319
- self._bias_constraint = tf.keras.constraints.get(bias_constraint)
309
+ self._activation = activations.get(activation)
310
+ self._kernel_initializer = initializers.get(kernel_initializer)
311
+ self._bias_initializer = initializers.get(bias_initializer)
312
+ self._kernel_regularizer = regularizers.get(kernel_regularizer)
313
+ self._bias_regularizer = regularizers.get(bias_regularizer)
314
+ self._kernel_constraint = constraints.get(kernel_constraint)
315
+ self._bias_constraint = constraints.get(bias_constraint)
320
316
  self._dropout = dropout
321
317
 
322
318
  if self._pooling == "max":
323
- self.layer_pool = tf.keras.layers.MaxPool2D()
319
+ self.layer_pool = layers.MaxPool2D()
324
320
  elif self._pooling == "avg":
325
- self.layer_pool = tf.keras.layers.AveragePooling2D()
321
+ self.layer_pool = layers.AveragePooling2D()
326
322
  else:
327
323
  raise tfc.ModelSyntaxError(
328
324
  "Invalid pooling string: '{}'.".format(self._pooling)
329
325
  )
330
326
 
331
- self.layer_query_proj = tf.keras.layers.Conv2D(
327
+ self.layer_query_proj = layers.Conv2D(
332
328
  self._num_heads * self._key_dim, # filters
333
329
  1, # kernel_size
334
330
  use_bias=self._use_bias,
@@ -342,7 +338,7 @@ class MHAPool2D(tf.keras.layers.Layer):
342
338
  name="query_proj",
343
339
  )
344
340
 
345
- self.layer_key_proj = tf.keras.layers.Conv2D(
341
+ self.layer_key_proj = layers.Conv2D(
346
342
  self._num_heads * self._key_dim, # filters
347
343
  1, # kernel_size
348
344
  use_bias=self._use_bias,
@@ -356,7 +352,7 @@ class MHAPool2D(tf.keras.layers.Layer):
356
352
  name="key_proj",
357
353
  )
358
354
 
359
- self.layer_value_proj = tf.keras.layers.Conv2D(
355
+ self.layer_value_proj = layers.Conv2D(
360
356
  self._num_heads * self._value_dim, # filters
361
357
  1, # kernel_size
362
358
  use_bias=self._use_bias,
@@ -370,9 +366,9 @@ class MHAPool2D(tf.keras.layers.Layer):
370
366
  name="value_proj",
371
367
  )
372
368
 
373
- self.layer_softmax = tf.keras.layers.Softmax(axis=3)
369
+ self.layer_softmax = layers.Softmax(axis=3)
374
370
  if self._dropout > 0:
375
- self.layer_dropout = tf.keras.layers.Dropout(rate=self._dropout)
371
+ self.layer_dropout = layers.Dropout(rate=self._dropout)
376
372
 
377
373
  def call(self, blob, training=None, return_attention_scores: bool = False):
378
374
  """The call function.
@@ -464,19 +460,13 @@ class MHAPool2D(tf.keras.layers.Layer):
464
460
  "pooling": self._pooling,
465
461
  "pool_size": self._pool_size,
466
462
  "use_bias": self._use_bias,
467
- "activation": tf.keras.activations.serialize(self._activation),
468
- "kernel_initializer": tf.keras.initializers.serialize(
469
- self._kernel_initializer
470
- ),
471
- "bias_initializer": tf.keras.initializers.serialize(self._bias_initializer),
472
- "kernel_regularizer": tf.keras.regularizers.serialize(
473
- self._kernel_regularizer
474
- ),
475
- "bias_regularizer": tf.keras.regularizers.serialize(self._bias_regularizer),
476
- "kernel_constraint": tf.keras.constraints.serialize(
477
- self._kernel_constraint
478
- ),
479
- "bias_constraint": tf.keras.constraints.serialize(self._bias_constraint),
463
+ "activation": activations.serialize(self._activation),
464
+ "kernel_initializer": initializers.serialize(self._kernel_initializer),
465
+ "bias_initializer": initializers.serialize(self._bias_initializer),
466
+ "kernel_regularizer": regularizers.serialize(self._kernel_regularizer),
467
+ "bias_regularizer": regularizers.serialize(self._bias_regularizer),
468
+ "kernel_constraint": constraints.serialize(self._kernel_constraint),
469
+ "bias_constraint": constraints.serialize(self._bias_constraint),
480
470
  "dropout": self._dropout,
481
471
  }
482
472
  base_config = super(MHAPool2D, self).get_config()
@@ -2,7 +2,7 @@
2
2
 
3
3
  from mt import tp
4
4
 
5
- from ..utils import NameScope
5
+ from mt.tf.utils import NameScope
6
6
 
7
7
 
8
8
  def conv2d(name_scope: NameScope, x, filters, kernel_size, **kwargs):
@@ -30,11 +30,11 @@ def conv2d(name_scope: NameScope, x, filters, kernel_size, **kwargs):
30
30
  TF tensor as output
31
31
  """
32
32
 
33
- import tensorflow.keras.layers as kl
33
+ from ..base import layers
34
34
 
35
35
  next(name_scope)
36
- x = kl.LayerNormalization(name=name_scope("prenorm"))(x)
37
- x = kl.Conv2D(filters, kernel_size, name=name_scope("conv"), **kwargs)(x)
36
+ x = layers.LayerNormalization(name=name_scope("prenorm"))(x)
37
+ x = layers.Conv2D(filters, kernel_size, name=name_scope("conv"), **kwargs)(x)
38
38
 
39
39
  return x
40
40
 
@@ -71,15 +71,15 @@ def dense2d(
71
71
  TF tensor as output
72
72
  """
73
73
 
74
- import tensorflow.keras.layers as kl
74
+ from ..base import layers
75
75
 
76
76
  next(name_scope)
77
- x = kl.LayerNormalization(name=name_scope("expand_prenorm"))(x)
78
- x = kl.Conv2D(
77
+ x = layers.LayerNormalization(name=name_scope("expand_prenorm"))(x)
78
+ x = layers.Conv2D(
79
79
  x.shape[3] * 2, 1, name=name_scope("expand"), activation="relu", **kwargs
80
80
  )(x)
81
- x = kl.LayerNormalization(name=name_scope("project_prenorm"))(x)
82
- x = kl.Conv2D(
81
+ x = layers.LayerNormalization(name=name_scope("project_prenorm"))(x)
82
+ x = layers.Conv2D(
83
83
  filters,
84
84
  kernel_size,
85
85
  name=name_scope("project"),
@@ -2,8 +2,10 @@ import tensorflow as tf
2
2
 
3
3
  from mt import tp
4
4
 
5
+ from ..base import layers
5
6
 
6
- class VarianceRegularizer(tf.keras.layers.Layer):
7
+
8
+ class VarianceRegularizer(layers.Layer):
7
9
  """A regularizer on the variance of the input tensor.
8
10
 
9
11
  Negative rate for making the variance larger. Positive rate for making the variance smaller.
@@ -23,7 +25,7 @@ class VarianceRegularizer(tf.keras.layers.Layer):
23
25
  self.add_loss(self.rate * sum_var)
24
26
  return x
25
27
 
26
- call.__doc__ = tf.keras.layers.Layer.call.__doc__
28
+ call.__doc__ = layers.Layer.call.__doc__
27
29
 
28
30
  def get_config(self):
29
31
  config = {
@@ -33,4 +35,4 @@ class VarianceRegularizer(tf.keras.layers.Layer):
33
35
  base_config = super(VarianceRegularizer, self).get_config()
34
36
  return dict(list(base_config.items()) + list(config.items()))
35
37
 
36
- get_config.__doc__ = tf.keras.layers.Layer.get_config.__doc__
38
+ get_config.__doc__ = layers.Layer.get_config.__doc__
@@ -1,31 +1,5 @@
1
- from .identical import *
2
- from .floor import *
3
- from .var_regularizer import *
4
- from .simple_mha import *
5
- from .image_sizing import *
6
- from .counter import Counter
7
- from .normed_conv2d import NormedConv2D
8
- from .utils import *
1
+ from mt.logg import logger
9
2
 
3
+ logger.warn_module_move("mt.tf.keras_layers", "mt.keras.layers")
10
4
 
11
- __api__ = [
12
- "Identical",
13
- "Floor",
14
- "VarianceRegularizer",
15
- "SimpleMHA2D",
16
- "MHAPool2D",
17
- "DUCLayer",
18
- "Downsize2D",
19
- "Upsize2D",
20
- "Downsize2D_V2",
21
- "Upsize2D_V2",
22
- "Downsize2D_V3",
23
- "Downsize2D_V4",
24
- "DownsizeX2D",
25
- "UpsizeX2D",
26
- "DownsizeY2D",
27
- "UpsizeY2D",
28
- "Counter",
29
- "conv2d",
30
- "dense2d",
31
- ]
5
+ from mt.keras.layers import *
mt/tf/version.py CHANGED
@@ -1,5 +1,5 @@
1
1
  MAJOR_VERSION = 1
2
2
  MINOR_VERSION = 1
3
- PATCH_VERSION = 6
3
+ PATCH_VERSION = 9
4
4
  version = '{}.{}.{}'.format(MAJOR_VERSION, MINOR_VERSION, PATCH_VERSION)
5
5
  __all__ = ['MAJOR_VERSION', 'MINOR_VERSION', 'PATCH_VERSION', 'version']
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: mttf
3
- Version: 1.1.6
3
+ Version: 1.1.9
4
4
  Summary: A package to detect and monkey-patch TensorFlow and Keras, for Minh-Tri Pham
5
5
  Home-page: https://github.com/inteplus/mttf
6
6
  Author: ['Minh-Tri Pham']
@@ -0,0 +1,36 @@
1
+ mt/keras/__init__.py,sha256=oM2xsSZTuGSUvFJSpntk8r7bzGcTytNvn-Apv_SBFh4,70
2
+ mt/keras/base/__init__.py,sha256=vIvuf3gjbpXC-rGpXLfIApLRUV6w1GUzVK8YJOgWLyk,327
3
+ mt/keras/base/base.py,sha256=_B2sSUMlHOtGSAqQD1p5YD0raEDL4W0Bh3uKD6BXOJM,807
4
+ mt/keras/layers/__init__.py,sha256=mGO1iwfYK9ml1gcvpBZC-fpB_Kc4ayeKKc_qWX3kZGI,757
5
+ mt/keras/layers/counter.py,sha256=J3__IXbaa7zp72a5P8FFi0bfftTHwa1xzzCwxCIU2gc,856
6
+ mt/keras/layers/floor.py,sha256=4mSpmTrhM7VqTK85POkjC3OhaTZUNUF9knO7gTbSGtc,512
7
+ mt/keras/layers/identical.py,sha256=AIqC36PxU9sXyF9rZuQ-5ObjnIjBiSIMHIb5MwqVdmY,361
8
+ mt/keras/layers/image_sizing.py,sha256=LeWwyFwQSYrDq_2fjOVi1YiRn7GcwMzrLUCLR8zcaKI,58530
9
+ mt/keras/layers/normed_conv2d.py,sha256=dFqeuNS0WyzrGIP3wiaKl_iSFmTgTPg-w3nCvn2X4d0,10680
10
+ mt/keras/layers/simple_mha.py,sha256=ecGL5Sy8mbpj6F8hAAN2h-7Dt0TiAOEM7o6SyVrVTp4,19225
11
+ mt/keras/layers/utils.py,sha256=lk9y0Sl4_w69JtFXKrKlWIgWBJx5lz9WrQi2LszvaZE,2834
12
+ mt/keras/layers/var_regularizer.py,sha256=EZ8ueXrObfu-oo4qixM0UkULe-C03kdNQppQ_NmtYSA,1108
13
+ mt/tf/__init__.py,sha256=M8xiJNdrAUJZgiZTOQOdfkehjO-CYzGpoxh5HVGBkms,338
14
+ mt/tf/init.py,sha256=BUpw33uyA_DmeJjrN2fX4MIs8MynKxkwgc2oTGTqssU,1294
15
+ mt/tf/mttf_version.py,sha256=ha53i-H9pE-crufFttUECgXHwPvam07zMKzApUts1Gs,206
16
+ mt/tf/utils.py,sha256=Copl5VM0PpuFUchK-AcBuGO6QitDwHcEs4FruZb2GAI,2460
17
+ mt/tf/version.py,sha256=OJ5C8dPfTjHQppzvBtr3tex8vKiblUMZ8i5wkIS8_U0,206
18
+ mt/tf/keras_applications/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
19
+ mt/tf/keras_applications/mobilenet_v3_split.py,sha256=1oPB3EX3k3c7iju9Ksuw9xyv32-mOKPs3uy2Mk5tLd8,19716
20
+ mt/tf/keras_applications/mobilevit.py,sha256=VsKB_U-f9jwUEjpd0eq-YXL4rDsuAbKQ0yIzkbMfLzw,9949
21
+ mt/tf/keras_layers/__init__.py,sha256=NsuFD-kSuy6cVV3Kl7ab95tw4g7x4Igv3cF-Ky3VuCo,124
22
+ mt/tfc/__init__.py,sha256=XFXAbmEi5uTAuZj797cBqw9ZBnVy6ptxP0TTzVauMNk,8446
23
+ mt/tfg/__init__.py,sha256=6Ly2QImAyQTsg_ZszuAuK_L2n56v89Cix9yYmMVk0CM,304
24
+ mt/tfp/__init__.py,sha256=AQkGCkmDRwswEt3qoOSpxe-fZekx78sHHBs2ZVz33gc,383
25
+ mt/tfp/real_nvp.py,sha256=U9EmkXGqFcvtS2yeh5_RgbKlVKKlGFGklAb7Voyazz4,4440
26
+ mttf-1.1.9.data/scripts/dmt_pipi.sh,sha256=NNsj4P332unHMqU4mAFjU9PQvxQ8TK5XQ42LC29IZY8,510
27
+ mttf-1.1.9.data/scripts/dmt_twineu.sh,sha256=KZhcYwuCW0c36tWcOgCe7uxJmS08rz-J6YNY76Exy4M,193
28
+ mttf-1.1.9.data/scripts/pipi.sh,sha256=kdo96bdaKq2QIa52Z4XFSiGPcbDm09SAU9cju6I2Lxo,289
29
+ mttf-1.1.9.data/scripts/wml_nexus.py,sha256=kW0ju8_kdXc4jOjhdzKiMsFuO1MNpHmu87skrhu9SEg,1492
30
+ mttf-1.1.9.data/scripts/wml_pipi.sh,sha256=CuidIcbuxyXSBNQqYRhCcSC8QbBaSGnQX0KAIFaIvKA,499
31
+ mttf-1.1.9.data/scripts/wml_twineu.sh,sha256=av1JLN765oOWC5LPkv2eSWIVof26y60601tMGkuYdb8,180
32
+ mttf-1.1.9.dist-info/licenses/LICENSE,sha256=e_JtcszdGZ2ZGfjcymTGrcxFj_9XPicZOVtnsrPvruk,1070
33
+ mttf-1.1.9.dist-info/METADATA,sha256=YyP4zCEwqSJvYI-6QfagU_NVwuVjHhugu0Jpviq22t0,567
34
+ mttf-1.1.9.dist-info/WHEEL,sha256=wXxTzcEDnjrTwFYjLPcsW_7_XihufBwmpiBeiXNBGEA,91
35
+ mttf-1.1.9.dist-info/top_level.txt,sha256=WcqGFu9cV7iMZg09iam8eNxUvGpLSKKF2Iubf6SJVOo,3
36
+ mttf-1.1.9.dist-info/RECORD,,
@@ -1,33 +0,0 @@
1
- mt/keras/__init__.py,sha256=Qv3x6wnkG-ytMPzgQ8xPNe4NxxiD4z9RG23rvvLyRvg,883
2
- mt/tf/__init__.py,sha256=M8xiJNdrAUJZgiZTOQOdfkehjO-CYzGpoxh5HVGBkms,338
3
- mt/tf/init.py,sha256=BUpw33uyA_DmeJjrN2fX4MIs8MynKxkwgc2oTGTqssU,1294
4
- mt/tf/mttf_version.py,sha256=ha53i-H9pE-crufFttUECgXHwPvam07zMKzApUts1Gs,206
5
- mt/tf/utils.py,sha256=Copl5VM0PpuFUchK-AcBuGO6QitDwHcEs4FruZb2GAI,2460
6
- mt/tf/version.py,sha256=WILNiQZrHE_E8TaKdRSqSywIA2x-8Z8oGUEwvLmUqMI,206
7
- mt/tf/keras_applications/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
8
- mt/tf/keras_applications/mobilenet_v3_split.py,sha256=1oPB3EX3k3c7iju9Ksuw9xyv32-mOKPs3uy2Mk5tLd8,19716
9
- mt/tf/keras_applications/mobilevit.py,sha256=VsKB_U-f9jwUEjpd0eq-YXL4rDsuAbKQ0yIzkbMfLzw,9949
10
- mt/tf/keras_layers/__init__.py,sha256=fSfhKmDz4mIHUYXgRrditWY_aAkgWGM_KjmAilOauXg,578
11
- mt/tf/keras_layers/counter.py,sha256=KsM25lRvxQZZUz0GLbeUNoPLwVoLF1MK0tQPn9-dVWQ,858
12
- mt/tf/keras_layers/floor.py,sha256=cSP-2B3o50ffTFMHZy-C-r7jWA6OFLMyaxTz9kAlsiM,513
13
- mt/tf/keras_layers/identical.py,sha256=ZBiK5P0QisWBowVKUODH-LfnJT_YI7MLvhEobIlEGx0,364
14
- mt/tf/keras_layers/image_sizing.py,sha256=OTfCHTyMSJO3LLOEeCbvJiuyV6FeSvMb5mtvCSYvk9Q,59133
15
- mt/tf/keras_layers/normed_conv2d.py,sha256=UVV5x6jpjGeOqx6DlSbvdV1O7nO_64-wKT3rXkNQxGA,10669
16
- mt/tf/keras_layers/simple_mha.py,sha256=Xv4lyFxDWRKYLmsPUUNJeZQYi8rLya7HlJsaXGbmV0E,19710
17
- mt/tf/keras_layers/utils.py,sha256=PzlrGrtgPMzTaNuY3p3QwnVSqsMnZ6NIbRSEqXEamGA,2828
18
- mt/tf/keras_layers/var_regularizer.py,sha256=Afe5Mpd8TynsXVD06RtEfFatuSKmruKcNze4_C2E2po,1108
19
- mt/tfc/__init__.py,sha256=XFXAbmEi5uTAuZj797cBqw9ZBnVy6ptxP0TTzVauMNk,8446
20
- mt/tfg/__init__.py,sha256=6Ly2QImAyQTsg_ZszuAuK_L2n56v89Cix9yYmMVk0CM,304
21
- mt/tfp/__init__.py,sha256=AQkGCkmDRwswEt3qoOSpxe-fZekx78sHHBs2ZVz33gc,383
22
- mt/tfp/real_nvp.py,sha256=U9EmkXGqFcvtS2yeh5_RgbKlVKKlGFGklAb7Voyazz4,4440
23
- mttf-1.1.6.data/scripts/dmt_pipi.sh,sha256=NNsj4P332unHMqU4mAFjU9PQvxQ8TK5XQ42LC29IZY8,510
24
- mttf-1.1.6.data/scripts/dmt_twineu.sh,sha256=KZhcYwuCW0c36tWcOgCe7uxJmS08rz-J6YNY76Exy4M,193
25
- mttf-1.1.6.data/scripts/pipi.sh,sha256=kdo96bdaKq2QIa52Z4XFSiGPcbDm09SAU9cju6I2Lxo,289
26
- mttf-1.1.6.data/scripts/wml_nexus.py,sha256=kW0ju8_kdXc4jOjhdzKiMsFuO1MNpHmu87skrhu9SEg,1492
27
- mttf-1.1.6.data/scripts/wml_pipi.sh,sha256=CuidIcbuxyXSBNQqYRhCcSC8QbBaSGnQX0KAIFaIvKA,499
28
- mttf-1.1.6.data/scripts/wml_twineu.sh,sha256=av1JLN765oOWC5LPkv2eSWIVof26y60601tMGkuYdb8,180
29
- mttf-1.1.6.dist-info/licenses/LICENSE,sha256=e_JtcszdGZ2ZGfjcymTGrcxFj_9XPicZOVtnsrPvruk,1070
30
- mttf-1.1.6.dist-info/METADATA,sha256=O8Ly1k7_qsC-jBLBywSh3jwYFRTCuaRfmfd3ovnAUos,567
31
- mttf-1.1.6.dist-info/WHEEL,sha256=wXxTzcEDnjrTwFYjLPcsW_7_XihufBwmpiBeiXNBGEA,91
32
- mttf-1.1.6.dist-info/top_level.txt,sha256=WcqGFu9cV7iMZg09iam8eNxUvGpLSKKF2Iubf6SJVOo,3
33
- mttf-1.1.6.dist-info/RECORD,,
File without changes
File without changes
File without changes
File without changes