tf-models-nightly 2.17.0.dev20240416__py2.py3-none-any.whl → 2.17.0.dev20240418__py2.py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -164,7 +164,7 @@ class MultiTaskBaseTrainer(orbit.StandardTrainer):
164
164
  task_metrics=self.training_metrics)
165
165
  for key, loss in losses.items():
166
166
  self.training_losses[key].update_state(loss)
167
+ self.global_step.assign_add(1)
167
168
 
168
169
  self.strategy.run(
169
170
  step_fn, args=(tf.nest.map_structure(next, iterator_map),))
170
- self.global_step.assign_add(1)
@@ -81,6 +81,15 @@ class MultiTaskInterleavingTrainer(base_trainer.MultiTaskBaseTrainer):
81
81
  def task_step_counter(self, name):
82
82
  return self._task_step_counters[name]
83
83
 
84
+ def _task_train_step(self, name):
85
+ """Runs one training step and updates counters."""
86
+ def _step_fn(inputs):
87
+ self._task_train_step_map[name](inputs)
88
+ self.global_step.assign_add(1)
89
+ self.task_step_counter(name).assign_add(1)
90
+
91
+ return _step_fn
92
+
84
93
  def train_step(self, iterator_map):
85
94
  # Sample one task to train according to a multinomial distribution
86
95
  rn = tf.random.stateless_uniform(shape=[], seed=(0, self.global_step))
@@ -96,9 +105,7 @@ class MultiTaskInterleavingTrainer(base_trainer.MultiTaskBaseTrainer):
96
105
  end = cumulative_sample_distribution[idx + 1]
97
106
  if rn >= begin and rn < end:
98
107
  self._strategy.run(
99
- self._task_train_step_map[name], args=(next(iterator_map[name]),))
100
- self.global_step.assign_add(1)
101
- self.task_step_counter(name).assign_add(1)
108
+ self._task_train_step(name), args=(next(iterator_map[name]),))
102
109
 
103
110
  def train_loop_end(self):
104
111
  """Record loss and metric values per task."""
@@ -22,6 +22,7 @@ import tensorflow as tf, tf_keras
22
22
 
23
23
  from official.recommendation.uplift import types
24
24
  from official.recommendation.uplift.metrics import loss_metric
25
+ from official.recommendation.uplift.metrics import treatment_sliced_metric
25
26
 
26
27
 
27
28
  @tf_keras.utils.register_keras_serializable(package="Uplift")
@@ -121,3 +122,157 @@ class LogLoss(loss_metric.LossMetric):
121
122
  @classmethod
122
123
  def from_config(cls, config: dict[str, Any]) -> LogLoss:
123
124
  return cls(**config)
125
+
126
+
127
+ def _safe_x_minus_xlogx(x: tf.Tensor) -> tf.Tensor:
128
+ """Computes x - x * log(x) with 0 as its continuity point when x equals 0."""
129
+ values = x * (1.0 - tf.math.log(x))
130
+ return tf.where(tf.equal(x, 0.0), tf.zeros_like(x), values)
131
+
132
+
133
+ class LogLossMeanBaseline(tf_keras.metrics.Metric):
134
+ """Computes the (weighted) poisson log loss for a mean predictor."""
135
+
136
+ def __init__(
137
+ self,
138
+ compute_full_loss: bool = False,
139
+ slice_by_treatment: bool = True,
140
+ name: str = "poisson_log_loss_mean_baseline",
141
+ dtype: tf.DType = tf.float32,
142
+ ):
143
+ """Initializes the instance.
144
+
145
+ Args:
146
+ compute_full_loss: Specifies whether to compute the full poisson log loss
147
+ for the mean predictor or not. Defaults to `False`.
148
+ slice_by_treatment: Specifies whether the loss should be sliced by the
149
+ treatment indicator tensor. If `True`, the metric's result will return
150
+ the loss values sliced by the treatment group. Note that this can only
151
+ be set to `True` when `y_pred` is of type `TwoTowerTrainingOutputs`.
152
+ name: Optional name for the instance.
153
+ dtype: Optional data type for the instance.
154
+ """
155
+ super().__init__(name=name, dtype=dtype)
156
+
157
+ if compute_full_loss:
158
+ raise NotImplementedError("Full loss computation is not yet supported.")
159
+
160
+ self._compute_full_loss = compute_full_loss
161
+ self._slice_by_treatment = slice_by_treatment
162
+
163
+ if slice_by_treatment:
164
+ self._mean_label = treatment_sliced_metric.TreatmentSlicedMetric(
165
+ metric=tf_keras.metrics.Mean(name=name, dtype=dtype)
166
+ )
167
+ else:
168
+ self._mean_label = tf_keras.metrics.Mean(name=name, dtype=dtype)
169
+
170
+ def update_state(
171
+ self,
172
+ y_true: tf.Tensor,
173
+ y_pred: types.TwoTowerTrainingOutputs | tf.Tensor | None = None,
174
+ sample_weight: tf.Tensor | None = None,
175
+ ):
176
+ is_treatment = {}
177
+ if self._slice_by_treatment:
178
+ if not isinstance(y_pred, types.TwoTowerTrainingOutputs):
179
+ raise ValueError(
180
+ "`slice_by_treatment` must be set to `False` when `y_pred` is not"
181
+ " of type `TwoTowerTrainingOutputs`."
182
+ )
183
+ is_treatment["is_treatment"] = y_pred.is_treatment
184
+
185
+ self._mean_label.update_state(
186
+ y_true, sample_weight=sample_weight, **is_treatment
187
+ )
188
+
189
+ def result(self) -> tf.Tensor | dict[str, tf.Tensor]:
190
+ return tf.nest.map_structure(_safe_x_minus_xlogx, self._mean_label.result())
191
+
192
+ def get_config(self) -> dict[str, Any]:
193
+ config = super().get_config()
194
+ config["compute_full_loss"] = self._compute_full_loss
195
+ config["slice_by_treatment"] = self._slice_by_treatment
196
+ return config
197
+
198
+ @classmethod
199
+ def from_config(cls, config: dict[str, Any]) -> LogLossMeanBaseline:
200
+ return cls(**config)
201
+
202
+
203
+ class LogLossMinimum(tf_keras.metrics.Metric):
204
+ """Computes the minimum achievable (weighted) poisson log loss.
205
+
206
+ Given labels `y` and the model's predictions `x`, the minimum loss is obtained
207
+ when `x` equals `y`. In this case the loss is computed as:
208
+ `loss = y - y * log(y) + [y * log(y) - y + 0.5 * log(2 * pi * y)]`
209
+
210
+ Note that `[y * log(y) - y + 0.5 * log(2 * pi * y)]` is only computed if
211
+ `compute_full_loss` is set to `True`.
212
+ """
213
+
214
+ def __init__(
215
+ self,
216
+ compute_full_loss: bool = False,
217
+ slice_by_treatment: bool = True,
218
+ name: str = "poisson_log_loss_minimum",
219
+ dtype: tf.DType = tf.float32,
220
+ ):
221
+ """Initializes the instance.
222
+
223
+ Args:
224
+ compute_full_loss: Specifies whether to compute the full minimum log loss
225
+ or not. Defaults to `False`.
226
+ slice_by_treatment: Specifies whether the loss should be sliced by the
227
+ treatment indicator tensor. If `True`, the metric's result will return
228
+ the loss values sliced by the treatment group. Note that this can only
229
+ be set to `True` when `y_pred` is of type `TwoTowerTrainingOutputs`.
230
+ name: Optional name for the instance.
231
+ dtype: Optional data type for the instance.
232
+ """
233
+ super().__init__(name=name, dtype=dtype)
234
+
235
+ if compute_full_loss:
236
+ raise NotImplementedError("Full loss computation is not yet supported.")
237
+
238
+ self._compute_full_loss = compute_full_loss
239
+ self._slice_by_treatment = slice_by_treatment
240
+
241
+ if slice_by_treatment:
242
+ self._loss = treatment_sliced_metric.TreatmentSlicedMetric(
243
+ metric=tf_keras.metrics.Mean(name=name, dtype=dtype)
244
+ )
245
+ else:
246
+ self._loss = tf_keras.metrics.Mean(name=name, dtype=dtype)
247
+
248
+ def update_state(
249
+ self,
250
+ y_true: tf.Tensor,
251
+ y_pred: types.TwoTowerTrainingOutputs | tf.Tensor | None = None,
252
+ sample_weight: tf.Tensor | None = None,
253
+ ):
254
+ is_treatment = {}
255
+ if self._slice_by_treatment:
256
+ if not isinstance(y_pred, types.TwoTowerTrainingOutputs):
257
+ raise ValueError(
258
+ "`slice_by_treatment` must be set to `False` when `y_pred` is not"
259
+ " of type `TwoTowerTrainingOutputs`."
260
+ )
261
+ is_treatment["is_treatment"] = y_pred.is_treatment
262
+
263
+ self._loss.update_state(
264
+ _safe_x_minus_xlogx(y_true), sample_weight=sample_weight, **is_treatment
265
+ )
266
+
267
+ def result(self) -> tf.Tensor | dict[str, tf.Tensor]:
268
+ return self._loss.result()
269
+
270
+ def get_config(self) -> dict[str, Any]:
271
+ config = super().get_config()
272
+ config["compute_full_loss"] = self._compute_full_loss
273
+ config["slice_by_treatment"] = self._slice_by_treatment
274
+ return config
275
+
276
+ @classmethod
277
+ def from_config(cls, config: dict[str, Any]) -> LogLossMinimum:
278
+ return cls(**config)
@@ -23,7 +23,8 @@ from official.recommendation.uplift.metrics import poisson_metrics
23
23
 
24
24
 
25
25
  def _get_two_tower_outputs(
26
- true_logits: tf.Tensor, is_treatment: tf.Tensor
26
+ is_treatment: tf.Tensor,
27
+ true_logits: tf.Tensor | None = None,
27
28
  ) -> types.TwoTowerTrainingOutputs:
28
29
  # Only the true_logits and is_treatment tensors are needed for testing.
29
30
  return types.TwoTowerTrainingOutputs(
@@ -33,7 +34,9 @@ def _get_two_tower_outputs(
33
34
  uplift=tf.ones_like(is_treatment),
34
35
  control_logits=tf.ones_like(is_treatment),
35
36
  treatment_logits=tf.ones_like(is_treatment),
36
- true_logits=true_logits,
37
+ true_logits=(
38
+ true_logits if true_logits is not None else tf.ones_like(is_treatment)
39
+ ),
37
40
  true_predictions=tf.ones_like(is_treatment),
38
41
  is_treatment=is_treatment,
39
42
  )
@@ -177,5 +180,155 @@ class LogLossTest(keras_test_case.KerasTestCase, parameterized.TestCase):
177
180
  )
178
181
 
179
182
 
183
+ class LogLossMeanBaselineTest(
184
+ keras_test_case.KerasTestCase, parameterized.TestCase
185
+ ):
186
+
187
+ @parameterized.named_parameters(
188
+ {
189
+ "testcase_name": "label_zero",
190
+ "expected_loss": 0.0,
191
+ "y_true": tf.constant([0], dtype=tf.float32),
192
+ },
193
+ {
194
+ "testcase_name": "small_positive_label",
195
+ "expected_loss": 0.0,
196
+ "y_true": tf.constant([1e-10], dtype=tf.float32),
197
+ },
198
+ {
199
+ "testcase_name": "label_one",
200
+ "expected_loss": 1.0,
201
+ "y_true": tf.constant([1], dtype=tf.float32),
202
+ },
203
+ {
204
+ "testcase_name": "weighted_loss",
205
+ "expected_loss": 1.0,
206
+ "y_true": tf.constant([[0], [1]], dtype=tf.float32),
207
+ "sample_weight": tf.constant([[0], [1]], dtype=tf.float32),
208
+ },
209
+ {
210
+ "testcase_name": "two_tower_outputs",
211
+ "expected_loss": 0.5 - 0.5 * tf.math.log(0.5),
212
+ "y_true": tf.constant([[0], [1]], dtype=tf.float32),
213
+ "y_pred": _get_two_tower_outputs(
214
+ is_treatment=tf.constant([[0], [1]], dtype=tf.float32),
215
+ ),
216
+ },
217
+ {
218
+ "testcase_name": "two_tower_outputs_sliced_loss",
219
+ "expected_loss": {
220
+ "loss": 0.5 - 0.5 * tf.math.log(0.5),
221
+ "loss/control": 0.0,
222
+ "loss/treatment": 1.0,
223
+ },
224
+ "y_true": tf.constant([[0], [1]], dtype=tf.float32),
225
+ "y_pred": _get_two_tower_outputs(
226
+ is_treatment=tf.constant([[0], [1]], dtype=tf.float32),
227
+ ),
228
+ "slice_by_treatment": True,
229
+ },
230
+ )
231
+ def test_metric_computes_correct_loss(
232
+ self,
233
+ expected_loss: tf.Tensor,
234
+ y_true: tf.Tensor,
235
+ y_pred: types.TwoTowerTrainingOutputs | tf.Tensor | None = None,
236
+ sample_weight: tf.Tensor | None = None,
237
+ slice_by_treatment: bool = False,
238
+ ):
239
+ metric = poisson_metrics.LogLossMeanBaseline(
240
+ slice_by_treatment=slice_by_treatment, name="loss"
241
+ )
242
+ metric.update_state(y_true, y_pred, sample_weight=sample_weight)
243
+ self.assertAllClose(expected_loss, metric.result())
244
+
245
+ def test_negative_label_returns_nan_loss(self):
246
+ metric = poisson_metrics.LogLossMeanBaseline(slice_by_treatment=False)
247
+ metric.update_state(tf.constant([-1.0]))
248
+ self.assertTrue(tf.math.is_nan(metric.result()).numpy().item())
249
+
250
+ def test_metric_is_configurable(self):
251
+ metric = poisson_metrics.LogLossMeanBaseline(slice_by_treatment=False)
252
+ self.assertLayerConfigurable(
253
+ layer=metric,
254
+ y_true=tf.constant([[0], [0], [2], [7]], dtype=tf.float32),
255
+ serializable=True,
256
+ )
257
+
258
+
259
+ class LogLossMinimumTest(keras_test_case.KerasTestCase, parameterized.TestCase):
260
+
261
+ @parameterized.named_parameters(
262
+ {
263
+ "testcase_name": "label_zero",
264
+ "expected_loss": 0.0,
265
+ "y_true": tf.constant([0], dtype=tf.float32),
266
+ },
267
+ {
268
+ "testcase_name": "small_positive_label",
269
+ "expected_loss": 0.0,
270
+ "y_true": tf.constant([1e-10], dtype=tf.float32),
271
+ },
272
+ {
273
+ "testcase_name": "label_one",
274
+ "expected_loss": 1.0,
275
+ "y_true": tf.constant([1], dtype=tf.float32),
276
+ },
277
+ {
278
+ "testcase_name": "weighted_loss",
279
+ "expected_loss": 1.0,
280
+ "y_true": tf.constant([[0], [1]], dtype=tf.float32),
281
+ "sample_weight": tf.constant([[0], [1]], dtype=tf.float32),
282
+ },
283
+ {
284
+ "testcase_name": "two_tower_outputs",
285
+ "expected_loss": 0.5,
286
+ "y_true": tf.constant([[0], [1]], dtype=tf.float32),
287
+ "y_pred": _get_two_tower_outputs(
288
+ is_treatment=tf.constant([[0], [1]], dtype=tf.float32),
289
+ ),
290
+ },
291
+ {
292
+ "testcase_name": "two_tower_outputs_sliced_loss",
293
+ "expected_loss": {
294
+ "loss": 0.5,
295
+ "loss/control": 0.0,
296
+ "loss/treatment": 1.0,
297
+ },
298
+ "y_true": tf.constant([[0], [1]], dtype=tf.float32),
299
+ "y_pred": _get_two_tower_outputs(
300
+ is_treatment=tf.constant([[0], [1]], dtype=tf.float32),
301
+ ),
302
+ "slice_by_treatment": True,
303
+ },
304
+ )
305
+ def test_metric_computes_correct_loss(
306
+ self,
307
+ expected_loss: tf.Tensor,
308
+ y_true: tf.Tensor,
309
+ y_pred: types.TwoTowerTrainingOutputs | tf.Tensor | None = None,
310
+ sample_weight: tf.Tensor | None = None,
311
+ slice_by_treatment: bool = False,
312
+ ):
313
+ metric = poisson_metrics.LogLossMinimum(
314
+ slice_by_treatment=slice_by_treatment, name="loss"
315
+ )
316
+ metric.update_state(y_true, y_pred, sample_weight=sample_weight)
317
+ self.assertAllClose(expected_loss, metric.result())
318
+
319
+ def test_negative_label_returns_nan_loss(self):
320
+ metric = poisson_metrics.LogLossMinimum(slice_by_treatment=False)
321
+ metric.update_state(tf.constant([-1.0]))
322
+ self.assertTrue(tf.math.is_nan(metric.result()).numpy().item())
323
+
324
+ def test_metric_is_configurable(self):
325
+ metric = poisson_metrics.LogLossMinimum(slice_by_treatment=False)
326
+ self.assertLayerConfigurable(
327
+ layer=metric,
328
+ y_true=tf.constant([[0], [0], [2], [7]], dtype=tf.float32),
329
+ serializable=True,
330
+ )
331
+
332
+
180
333
  if __name__ == "__main__":
181
334
  tf.test.main()
@@ -625,6 +625,14 @@ MNV3SmallReducedFilters = {
625
625
  }
626
626
 
627
627
 
628
+ """
629
+ Architecture: https://arxiv.org/abs/2404.10518
630
+
631
+ "MobileNetV4 - Universal Models for the Mobile Ecosystem"
632
+ Danfeng Qin, Chas Leichner, Manolis Delakis, Marco Fornoni, Shixin Luo, Fan
633
+ Yang, Weijun Wang, Colby Banbury, Chengxi Ye, Berkin Akin, Vaibhav Aggarwal,
634
+ Tenghui Zhu, Daniele Moro, Andrew Howard
635
+ """
628
636
  MNV4ConvSmall_BLOCK_SPECS = {
629
637
  'spec_name': 'MobileNetV4ConvSmall',
630
638
  'block_spec_schema': [
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: tf-models-nightly
3
- Version: 2.17.0.dev20240416
3
+ Version: 2.17.0.dev20240418
4
4
  Summary: TensorFlow Official Models
5
5
  Home-page: https://github.com/tensorflow/models
6
6
  Author: Google Inc.
@@ -213,12 +213,12 @@ official/modeling/hyperparams/params_dict.py,sha256=63fftQdUlycgJErxcyIj7655zL57
213
213
  official/modeling/hyperparams/params_dict_test.py,sha256=WPX-VU7L3JVjS42b7BWe77QxP1kdwch05Ib7LvNOTYs,14673
214
214
  official/modeling/multitask/__init__.py,sha256=7oiypy0N82PDw9aSdcJBLVoGTd_oRSUOdvuJhMv4leQ,609
215
215
  official/modeling/multitask/base_model.py,sha256=QI8qb8ipj75IUj6bKNjcAFHPjeqmNjqHr7nUbPd6a-o,1946
216
- official/modeling/multitask/base_trainer.py,sha256=niGf1XLtsYyvJE7NFYXphTmIPwKlJbpS34PGK3VgQFo,5856
216
+ official/modeling/multitask/base_trainer.py,sha256=83cLDajiyS2lJPMhllTdIsKXqiVTFLDaGZaherTPCa8,5858
217
217
  official/modeling/multitask/base_trainer_test.py,sha256=qJ7z4kid2XAX6hOIvUHa7dwqxouemMekS9ZXhPjWW9w,3663
218
218
  official/modeling/multitask/configs.py,sha256=ZO2waQrMn9CAgyFpsmeQvplCF5VeXz7tCPmIuy5jvlc,3164
219
219
  official/modeling/multitask/evaluator.py,sha256=spDm2X8EX62qsxI2ehVjrkIKoo-omQQOYcAVKZNgxHc,6078
220
220
  official/modeling/multitask/evaluator_test.py,sha256=vU-q-gM7GqiMqE5zbBnOT8mPFhQmHjniMyNnwganhso,4643
221
- official/modeling/multitask/interleaving_trainer.py,sha256=ZZHKsqbJKLqvwtgy-PUv_S_8bDG0MhJDNwWICY_IF6Q,4458
221
+ official/modeling/multitask/interleaving_trainer.py,sha256=f111ZhknyS34hpP0FfdWjX3_iiLViHfBd0VSuC715s0,4635
222
222
  official/modeling/multitask/interleaving_trainer_test.py,sha256=MeQQxpcinPTQuTrAcITjwHa2bAj-XCBCqYsrbxPBus8,4305
223
223
  official/modeling/multitask/multitask.py,sha256=DV-ysfhPiIZgsrzZNylsPBxKNBf_xzPxJYjF4buWVgE,5948
224
224
  official/modeling/multitask/task_sampler.py,sha256=SGVVdjMb5oG4vnCczpfdgBtbsdsXiyBLl9si_0V6nko,4897
@@ -915,8 +915,8 @@ official/recommendation/uplift/metrics/label_variance_test.py,sha256=k0mdEU1WU53
915
915
  official/recommendation/uplift/metrics/loss_metric.py,sha256=gYZdnTsuL_2q1FZuPip-DaWxt_Q-02YYaePyMBVNx7w,7344
916
916
  official/recommendation/uplift/metrics/loss_metric_test.py,sha256=48rQG8bKFdy0xBFjoOLXKRUlYpCEyAzSmPOFoF7FX94,16021
917
917
  official/recommendation/uplift/metrics/metric_configs.py,sha256=Z-r79orE4EycQ5TJ7xdI5LhjOHT3wzChYyDxcxGqLXk,1670
918
- official/recommendation/uplift/metrics/poisson_metrics.py,sha256=LJnovpST0H9kFGu-ziDstWOVlAYARLo9oPLDTjzrdu4,4623
919
- official/recommendation/uplift/metrics/poisson_metrics_test.py,sha256=Kd8CuQeEBlxRklA-7mGKHcUD0CyskE1S3cJqk6mEvv4,6756
918
+ official/recommendation/uplift/metrics/poisson_metrics.py,sha256=1zzwots4WkpxoYmOICt_CZxuAeXWyT9dktdDgT2IAu4,10197
919
+ official/recommendation/uplift/metrics/poisson_metrics_test.py,sha256=o0efkAz1OusU2C85qLl9ZZJLfXflV7acfvWp1igT-1U,12016
920
920
  official/recommendation/uplift/metrics/sliced_metric.py,sha256=uhvzudOWtMNKZ0avwGhX-37UELR9Cq9b4C0g8erBkXw,8688
921
921
  official/recommendation/uplift/metrics/sliced_metric_test.py,sha256=bhVGyI1tOkFkVOtruJo3p6XopDFyG1JW5qdZm9-RqeU,12248
922
922
  official/recommendation/uplift/metrics/treatment_fraction.py,sha256=WHrKfsN42xU7S-pK99xEVpVtd3zLD7UidLT1K8vgIn4,2757
@@ -1049,7 +1049,7 @@ official/vision/modeling/backbones/factory.py,sha256=coJKJpPMhgM9gAc2Q7I5_CuzAaH
1049
1049
  official/vision/modeling/backbones/factory_test.py,sha256=7ZJRDSQ_cqJFyfqLK375V_wEqgrQpqibzNDZzNbhthU,8635
1050
1050
  official/vision/modeling/backbones/mobiledet.py,sha256=iEC_KbqYqUBBBwZUfRCVtqllQwK6N4T1jmiDl29B-Ys,24896
1051
1051
  official/vision/modeling/backbones/mobiledet_test.py,sha256=O2yfL7MSCGtKsnXr0IVUtjicrhZGGkwTXWCLtqdsL0Y,3804
1052
- official/vision/modeling/backbones/mobilenet.py,sha256=dVABJm7mRizcqwVFiWjulb6aIGgVoa4JuUX5ms_H7II,60978
1052
+ official/vision/modeling/backbones/mobilenet.py,sha256=iwUS9WSAZA6cyagw2Ld1zUlyR1MmvA9-AS7Gm4M8HZA,61286
1053
1053
  official/vision/modeling/backbones/mobilenet_test.py,sha256=7cl5eerD5j5UqHL8SLmpou-PjufBz8oz_cn3tqwW1vM,13057
1054
1054
  official/vision/modeling/backbones/resnet.py,sha256=dnYkdlYUzChGLOrQnUbwb9YJ7BDiFwgnLptks7kFb7k,16384
1055
1055
  official/vision/modeling/backbones/resnet_3d.py,sha256=Cq1lrlRqIg9ss_ud1iM_axW9lsTVtGYe3iA4DL9Orzk,18657
@@ -1206,9 +1206,9 @@ tensorflow_models/tensorflow_models_test.py,sha256=nc6A9K53OGqF25xN5St8EiWvdVbda
1206
1206
  tensorflow_models/nlp/__init__.py,sha256=4tA5Pf4qaFwT-fIFOpX7x7FHJpnyJT-5UgOeFYTyMlc,807
1207
1207
  tensorflow_models/uplift/__init__.py,sha256=mqfa55gweOdpKoaQyid4A_4u7xw__FcQeSIF0k_pYmI,999
1208
1208
  tensorflow_models/vision/__init__.py,sha256=zBorY_v5xva1uI-qxhZO3Qh-Dii-Suq6wEYh6hKHDfc,833
1209
- tf_models_nightly-2.17.0.dev20240416.dist-info/AUTHORS,sha256=1dG3fXVu9jlo7bul8xuix5F5vOnczMk7_yWn4y70uw0,337
1210
- tf_models_nightly-2.17.0.dev20240416.dist-info/LICENSE,sha256=WxeBS_DejPZQabxtfMOM_xn8qoZNJDQjrT7z2wG1I4U,11512
1211
- tf_models_nightly-2.17.0.dev20240416.dist-info/METADATA,sha256=OYh0knB0PXfjGoWNi4k_RNY5sOmwpsSjfLlHlzIIIm0,1432
1212
- tf_models_nightly-2.17.0.dev20240416.dist-info/WHEEL,sha256=kGT74LWyRUZrL4VgLh6_g12IeVl_9u9ZVhadrgXZUEY,110
1213
- tf_models_nightly-2.17.0.dev20240416.dist-info/top_level.txt,sha256=gum2FfO5R4cvjl2-QtP-S1aNmsvIZaFFT6VFzU0f4-g,33
1214
- tf_models_nightly-2.17.0.dev20240416.dist-info/RECORD,,
1209
+ tf_models_nightly-2.17.0.dev20240418.dist-info/AUTHORS,sha256=1dG3fXVu9jlo7bul8xuix5F5vOnczMk7_yWn4y70uw0,337
1210
+ tf_models_nightly-2.17.0.dev20240418.dist-info/LICENSE,sha256=WxeBS_DejPZQabxtfMOM_xn8qoZNJDQjrT7z2wG1I4U,11512
1211
+ tf_models_nightly-2.17.0.dev20240418.dist-info/METADATA,sha256=Qga8KYfDdCPyinPp79D7UyN7doPZH066mZ98Wp5QlUM,1432
1212
+ tf_models_nightly-2.17.0.dev20240418.dist-info/WHEEL,sha256=kGT74LWyRUZrL4VgLh6_g12IeVl_9u9ZVhadrgXZUEY,110
1213
+ tf_models_nightly-2.17.0.dev20240418.dist-info/top_level.txt,sha256=gum2FfO5R4cvjl2-QtP-S1aNmsvIZaFFT6VFzU0f4-g,33
1214
+ tf_models_nightly-2.17.0.dev20240418.dist-info/RECORD,,