mttf 0.31.202309060229__py3-none-any.whl → 1.0.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of mttf might be problematic. Click here for more details.

@@ -1,101 +0,0 @@
1
- '''Additional optimizer-related classes.'''
2
-
3
- import abc
4
- import math
5
-
6
- from tensorflow.python.framework import constant_op
7
- from tensorflow.python.framework import ops
8
- from tensorflow.python.ops import math_ops
9
- from tensorflow.keras.optimizers.schedules import LearningRateSchedule
10
-
11
- class CosineDecay(LearningRateSchedule): # available in TF 2.5
12
- """A LearningRateSchedule that uses a cosine decay schedule.
13
-
14
- See [Loshchilov & Hutter, ICLR2016](https://arxiv.org/abs/1608.03983),
15
- SGDR: Stochastic Gradient Descent with Warm Restarts.
16
-
17
- When training a model, it is often useful to lower the learning rate as
18
- the training progresses. This schedule applies a cosine decay function
19
- to an optimizer step, given a provided initial learning rate.
20
- It requires a `step` value to compute the decayed learning rate. You can
21
- just pass a TensorFlow variable that you increment at each training step.
22
-
23
- The schedule a 1-arg callable that produces a decayed learning
24
- rate when passed the current optimizer step. This can be useful for changing
25
- the learning rate value across different invocations of optimizer functions.
26
- It is computed as:
27
-
28
- ```python
29
- def decayed_learning_rate(step):
30
- step = min(step, decay_steps)
31
- cosine_decay = 0.5 * (1 + cos(pi * step / decay_steps))
32
- decayed = (1 - alpha) * cosine_decay + alpha
33
- return initial_learning_rate * decayed
34
- ```
35
-
36
- Example usage:
37
- ```python
38
- decay_steps = 1000
39
- lr_decayed_fn = tf.keras.optimizers.schedules.CosineDecay(
40
- initial_learning_rate, decay_steps)
41
- ```
42
-
43
- You can pass this schedule directly into a `tf.keras.optimizers.Optimizer`
44
- as the learning rate. The learning rate schedule is also serializable and
45
- deserializable using `tf.keras.optimizers.schedules.serialize` and
46
- `tf.keras.optimizers.schedules.deserialize`.
47
-
48
- Returns:
49
- A 1-arg callable learning rate schedule that takes the current optimizer
50
- step and outputs the decayed learning rate, a scalar `Tensor` of the same
51
- type as `initial_learning_rate`.
52
- """
53
-
54
- def __init__(
55
- self,
56
- initial_learning_rate,
57
- decay_steps,
58
- alpha=0.0,
59
- name=None):
60
- """Applies cosine decay to the learning rate.
61
-
62
- Args:
63
- initial_learning_rate: A scalar `float32` or `float64` Tensor or a
64
- Python number. The initial learning rate.
65
- decay_steps: A scalar `int32` or `int64` `Tensor` or a Python number.
66
- Number of steps to decay over.
67
- alpha: A scalar `float32` or `float64` Tensor or a Python number.
68
- Minimum learning rate value as a fraction of initial_learning_rate.
69
- name: String. Optional name of the operation. Defaults to 'CosineDecay'.
70
- """
71
- super(CosineDecay, self).__init__()
72
-
73
- self.initial_learning_rate = initial_learning_rate
74
- self.decay_steps = decay_steps
75
- self.alpha = alpha
76
- self.name = name
77
-
78
- def __call__(self, step):
79
- with ops.name_scope_v2(self.name or "CosineDecay"):
80
- initial_learning_rate = ops.convert_to_tensor_v2_with_dispatch(
81
- self.initial_learning_rate, name="initial_learning_rate")
82
- dtype = initial_learning_rate.dtype
83
- decay_steps = math_ops.cast(self.decay_steps, dtype)
84
-
85
- global_step_recomp = math_ops.cast(step, dtype)
86
- global_step_recomp = math_ops.minimum(global_step_recomp, decay_steps)
87
- completed_fraction = global_step_recomp / decay_steps
88
- cosine_decayed = 0.5 * (1.0 + math_ops.cos(
89
- constant_op.constant(math.pi) * completed_fraction))
90
-
91
- decayed = (1 - self.alpha) * cosine_decayed + self.alpha
92
- return math_ops.multiply(initial_learning_rate, decayed)
93
-
94
- def get_config(self):
95
- return {
96
- "initial_learning_rate": self.initial_learning_rate,
97
- "decay_steps": self.decay_steps,
98
- "alpha": self.alpha,
99
- "name": self.name
100
- }
101
-
@@ -1,2 +0,0 @@
1
- #/!bin/bash
2
- wml_nexus.py pip3 install --trusted-host localhost --extra-index https://localhost:5443/repository/minhtri-pypi-dev/simple/ $@