tensorbored 2.21.0rc1769983804__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- tensorbored/__init__.py +112 -0
- tensorbored/_vendor/__init__.py +0 -0
- tensorbored/_vendor/bleach/__init__.py +125 -0
- tensorbored/_vendor/bleach/_vendor/__init__.py +0 -0
- tensorbored/_vendor/bleach/_vendor/html5lib/__init__.py +35 -0
- tensorbored/_vendor/bleach/_vendor/html5lib/_ihatexml.py +289 -0
- tensorbored/_vendor/bleach/_vendor/html5lib/_inputstream.py +918 -0
- tensorbored/_vendor/bleach/_vendor/html5lib/_tokenizer.py +1735 -0
- tensorbored/_vendor/bleach/_vendor/html5lib/_trie/__init__.py +5 -0
- tensorbored/_vendor/bleach/_vendor/html5lib/_trie/_base.py +40 -0
- tensorbored/_vendor/bleach/_vendor/html5lib/_trie/py.py +67 -0
- tensorbored/_vendor/bleach/_vendor/html5lib/_utils.py +159 -0
- tensorbored/_vendor/bleach/_vendor/html5lib/constants.py +2946 -0
- tensorbored/_vendor/bleach/_vendor/html5lib/filters/__init__.py +0 -0
- tensorbored/_vendor/bleach/_vendor/html5lib/filters/alphabeticalattributes.py +29 -0
- tensorbored/_vendor/bleach/_vendor/html5lib/filters/base.py +12 -0
- tensorbored/_vendor/bleach/_vendor/html5lib/filters/inject_meta_charset.py +73 -0
- tensorbored/_vendor/bleach/_vendor/html5lib/filters/lint.py +93 -0
- tensorbored/_vendor/bleach/_vendor/html5lib/filters/optionaltags.py +207 -0
- tensorbored/_vendor/bleach/_vendor/html5lib/filters/sanitizer.py +916 -0
- tensorbored/_vendor/bleach/_vendor/html5lib/filters/whitespace.py +38 -0
- tensorbored/_vendor/bleach/_vendor/html5lib/html5parser.py +2795 -0
- tensorbored/_vendor/bleach/_vendor/html5lib/serializer.py +409 -0
- tensorbored/_vendor/bleach/_vendor/html5lib/treeadapters/__init__.py +30 -0
- tensorbored/_vendor/bleach/_vendor/html5lib/treeadapters/genshi.py +54 -0
- tensorbored/_vendor/bleach/_vendor/html5lib/treeadapters/sax.py +50 -0
- tensorbored/_vendor/bleach/_vendor/html5lib/treebuilders/__init__.py +88 -0
- tensorbored/_vendor/bleach/_vendor/html5lib/treebuilders/base.py +417 -0
- tensorbored/_vendor/bleach/_vendor/html5lib/treebuilders/dom.py +239 -0
- tensorbored/_vendor/bleach/_vendor/html5lib/treebuilders/etree.py +343 -0
- tensorbored/_vendor/bleach/_vendor/html5lib/treebuilders/etree_lxml.py +392 -0
- tensorbored/_vendor/bleach/_vendor/html5lib/treewalkers/__init__.py +154 -0
- tensorbored/_vendor/bleach/_vendor/html5lib/treewalkers/base.py +252 -0
- tensorbored/_vendor/bleach/_vendor/html5lib/treewalkers/dom.py +43 -0
- tensorbored/_vendor/bleach/_vendor/html5lib/treewalkers/etree.py +131 -0
- tensorbored/_vendor/bleach/_vendor/html5lib/treewalkers/etree_lxml.py +215 -0
- tensorbored/_vendor/bleach/_vendor/html5lib/treewalkers/genshi.py +69 -0
- tensorbored/_vendor/bleach/_vendor/parse.py +1078 -0
- tensorbored/_vendor/bleach/callbacks.py +32 -0
- tensorbored/_vendor/bleach/html5lib_shim.py +757 -0
- tensorbored/_vendor/bleach/linkifier.py +633 -0
- tensorbored/_vendor/bleach/parse_shim.py +1 -0
- tensorbored/_vendor/bleach/sanitizer.py +638 -0
- tensorbored/_vendor/bleach/six_shim.py +19 -0
- tensorbored/_vendor/webencodings/__init__.py +342 -0
- tensorbored/_vendor/webencodings/labels.py +231 -0
- tensorbored/_vendor/webencodings/mklabels.py +59 -0
- tensorbored/_vendor/webencodings/x_user_defined.py +325 -0
- tensorbored/assets.py +36 -0
- tensorbored/auth.py +102 -0
- tensorbored/backend/__init__.py +0 -0
- tensorbored/backend/application.py +604 -0
- tensorbored/backend/auth_context_middleware.py +38 -0
- tensorbored/backend/client_feature_flags.py +113 -0
- tensorbored/backend/empty_path_redirect.py +46 -0
- tensorbored/backend/event_processing/__init__.py +0 -0
- tensorbored/backend/event_processing/data_ingester.py +276 -0
- tensorbored/backend/event_processing/data_provider.py +535 -0
- tensorbored/backend/event_processing/directory_loader.py +142 -0
- tensorbored/backend/event_processing/directory_watcher.py +272 -0
- tensorbored/backend/event_processing/event_accumulator.py +950 -0
- tensorbored/backend/event_processing/event_file_inspector.py +463 -0
- tensorbored/backend/event_processing/event_file_loader.py +292 -0
- tensorbored/backend/event_processing/event_multiplexer.py +521 -0
- tensorbored/backend/event_processing/event_util.py +68 -0
- tensorbored/backend/event_processing/io_wrapper.py +223 -0
- tensorbored/backend/event_processing/plugin_asset_util.py +104 -0
- tensorbored/backend/event_processing/plugin_event_accumulator.py +721 -0
- tensorbored/backend/event_processing/plugin_event_multiplexer.py +522 -0
- tensorbored/backend/event_processing/reservoir.py +266 -0
- tensorbored/backend/event_processing/tag_types.py +29 -0
- tensorbored/backend/experiment_id.py +71 -0
- tensorbored/backend/experimental_plugin.py +51 -0
- tensorbored/backend/http_util.py +263 -0
- tensorbored/backend/json_util.py +70 -0
- tensorbored/backend/path_prefix.py +67 -0
- tensorbored/backend/process_graph.py +74 -0
- tensorbored/backend/security_validator.py +202 -0
- tensorbored/compat/__init__.py +69 -0
- tensorbored/compat/proto/__init__.py +0 -0
- tensorbored/compat/proto/allocation_description_pb2.py +35 -0
- tensorbored/compat/proto/api_def_pb2.py +82 -0
- tensorbored/compat/proto/attr_value_pb2.py +80 -0
- tensorbored/compat/proto/cluster_pb2.py +58 -0
- tensorbored/compat/proto/config_pb2.py +271 -0
- tensorbored/compat/proto/coordination_config_pb2.py +45 -0
- tensorbored/compat/proto/cost_graph_pb2.py +87 -0
- tensorbored/compat/proto/cpp_shape_inference_pb2.py +70 -0
- tensorbored/compat/proto/debug_pb2.py +65 -0
- tensorbored/compat/proto/event_pb2.py +149 -0
- tensorbored/compat/proto/full_type_pb2.py +74 -0
- tensorbored/compat/proto/function_pb2.py +157 -0
- tensorbored/compat/proto/graph_debug_info_pb2.py +111 -0
- tensorbored/compat/proto/graph_pb2.py +41 -0
- tensorbored/compat/proto/histogram_pb2.py +39 -0
- tensorbored/compat/proto/meta_graph_pb2.py +254 -0
- tensorbored/compat/proto/node_def_pb2.py +61 -0
- tensorbored/compat/proto/op_def_pb2.py +81 -0
- tensorbored/compat/proto/resource_handle_pb2.py +48 -0
- tensorbored/compat/proto/rewriter_config_pb2.py +93 -0
- tensorbored/compat/proto/rpc_options_pb2.py +35 -0
- tensorbored/compat/proto/saved_object_graph_pb2.py +193 -0
- tensorbored/compat/proto/saver_pb2.py +38 -0
- tensorbored/compat/proto/step_stats_pb2.py +116 -0
- tensorbored/compat/proto/struct_pb2.py +144 -0
- tensorbored/compat/proto/summary_pb2.py +111 -0
- tensorbored/compat/proto/tensor_description_pb2.py +38 -0
- tensorbored/compat/proto/tensor_pb2.py +68 -0
- tensorbored/compat/proto/tensor_shape_pb2.py +46 -0
- tensorbored/compat/proto/tfprof_log_pb2.py +307 -0
- tensorbored/compat/proto/trackable_object_graph_pb2.py +90 -0
- tensorbored/compat/proto/types_pb2.py +105 -0
- tensorbored/compat/proto/variable_pb2.py +62 -0
- tensorbored/compat/proto/verifier_config_pb2.py +38 -0
- tensorbored/compat/proto/versions_pb2.py +35 -0
- tensorbored/compat/tensorflow_stub/__init__.py +38 -0
- tensorbored/compat/tensorflow_stub/app.py +124 -0
- tensorbored/compat/tensorflow_stub/compat/__init__.py +131 -0
- tensorbored/compat/tensorflow_stub/compat/v1/__init__.py +20 -0
- tensorbored/compat/tensorflow_stub/dtypes.py +692 -0
- tensorbored/compat/tensorflow_stub/error_codes.py +169 -0
- tensorbored/compat/tensorflow_stub/errors.py +507 -0
- tensorbored/compat/tensorflow_stub/flags.py +124 -0
- tensorbored/compat/tensorflow_stub/io/__init__.py +17 -0
- tensorbored/compat/tensorflow_stub/io/gfile.py +1011 -0
- tensorbored/compat/tensorflow_stub/pywrap_tensorflow.py +285 -0
- tensorbored/compat/tensorflow_stub/tensor_shape.py +1035 -0
- tensorbored/context.py +129 -0
- tensorbored/data/__init__.py +0 -0
- tensorbored/data/grpc_provider.py +365 -0
- tensorbored/data/ingester.py +46 -0
- tensorbored/data/proto/__init__.py +0 -0
- tensorbored/data/proto/data_provider_pb2.py +517 -0
- tensorbored/data/proto/data_provider_pb2_grpc.py +374 -0
- tensorbored/data/provider.py +1365 -0
- tensorbored/data/server_ingester.py +301 -0
- tensorbored/data_compat.py +159 -0
- tensorbored/dataclass_compat.py +224 -0
- tensorbored/default.py +124 -0
- tensorbored/errors.py +130 -0
- tensorbored/lazy.py +99 -0
- tensorbored/main.py +48 -0
- tensorbored/main_lib.py +62 -0
- tensorbored/manager.py +487 -0
- tensorbored/notebook.py +441 -0
- tensorbored/plugin_util.py +266 -0
- tensorbored/plugins/__init__.py +0 -0
- tensorbored/plugins/audio/__init__.py +0 -0
- tensorbored/plugins/audio/audio_plugin.py +229 -0
- tensorbored/plugins/audio/metadata.py +69 -0
- tensorbored/plugins/audio/plugin_data_pb2.py +37 -0
- tensorbored/plugins/audio/summary.py +230 -0
- tensorbored/plugins/audio/summary_v2.py +124 -0
- tensorbored/plugins/base_plugin.py +367 -0
- tensorbored/plugins/core/__init__.py +0 -0
- tensorbored/plugins/core/core_plugin.py +981 -0
- tensorbored/plugins/custom_scalar/__init__.py +0 -0
- tensorbored/plugins/custom_scalar/custom_scalars_plugin.py +320 -0
- tensorbored/plugins/custom_scalar/layout_pb2.py +85 -0
- tensorbored/plugins/custom_scalar/metadata.py +35 -0
- tensorbored/plugins/custom_scalar/summary.py +79 -0
- tensorbored/plugins/debugger_v2/__init__.py +0 -0
- tensorbored/plugins/debugger_v2/debug_data_multiplexer.py +631 -0
- tensorbored/plugins/debugger_v2/debug_data_provider.py +634 -0
- tensorbored/plugins/debugger_v2/debugger_v2_plugin.py +504 -0
- tensorbored/plugins/distribution/__init__.py +0 -0
- tensorbored/plugins/distribution/compressor.py +158 -0
- tensorbored/plugins/distribution/distributions_plugin.py +116 -0
- tensorbored/plugins/distribution/metadata.py +19 -0
- tensorbored/plugins/graph/__init__.py +0 -0
- tensorbored/plugins/graph/graph_util.py +129 -0
- tensorbored/plugins/graph/graphs_plugin.py +336 -0
- tensorbored/plugins/graph/keras_util.py +328 -0
- tensorbored/plugins/graph/metadata.py +42 -0
- tensorbored/plugins/histogram/__init__.py +0 -0
- tensorbored/plugins/histogram/histograms_plugin.py +144 -0
- tensorbored/plugins/histogram/metadata.py +63 -0
- tensorbored/plugins/histogram/plugin_data_pb2.py +34 -0
- tensorbored/plugins/histogram/summary.py +234 -0
- tensorbored/plugins/histogram/summary_v2.py +292 -0
- tensorbored/plugins/hparams/__init__.py +14 -0
- tensorbored/plugins/hparams/_keras.py +93 -0
- tensorbored/plugins/hparams/api.py +130 -0
- tensorbored/plugins/hparams/api_pb2.py +208 -0
- tensorbored/plugins/hparams/backend_context.py +606 -0
- tensorbored/plugins/hparams/download_data.py +158 -0
- tensorbored/plugins/hparams/error.py +26 -0
- tensorbored/plugins/hparams/get_experiment.py +71 -0
- tensorbored/plugins/hparams/hparams_plugin.py +206 -0
- tensorbored/plugins/hparams/hparams_util_pb2.py +69 -0
- tensorbored/plugins/hparams/json_format_compat.py +38 -0
- tensorbored/plugins/hparams/list_metric_evals.py +57 -0
- tensorbored/plugins/hparams/list_session_groups.py +1040 -0
- tensorbored/plugins/hparams/metadata.py +125 -0
- tensorbored/plugins/hparams/metrics.py +41 -0
- tensorbored/plugins/hparams/plugin_data_pb2.py +69 -0
- tensorbored/plugins/hparams/summary.py +205 -0
- tensorbored/plugins/hparams/summary_v2.py +597 -0
- tensorbored/plugins/image/__init__.py +0 -0
- tensorbored/plugins/image/images_plugin.py +232 -0
- tensorbored/plugins/image/metadata.py +65 -0
- tensorbored/plugins/image/plugin_data_pb2.py +34 -0
- tensorbored/plugins/image/summary.py +159 -0
- tensorbored/plugins/image/summary_v2.py +130 -0
- tensorbored/plugins/mesh/__init__.py +14 -0
- tensorbored/plugins/mesh/mesh_plugin.py +292 -0
- tensorbored/plugins/mesh/metadata.py +152 -0
- tensorbored/plugins/mesh/plugin_data_pb2.py +37 -0
- tensorbored/plugins/mesh/summary.py +251 -0
- tensorbored/plugins/mesh/summary_v2.py +214 -0
- tensorbored/plugins/metrics/__init__.py +0 -0
- tensorbored/plugins/metrics/metadata.py +17 -0
- tensorbored/plugins/metrics/metrics_plugin.py +623 -0
- tensorbored/plugins/pr_curve/__init__.py +0 -0
- tensorbored/plugins/pr_curve/metadata.py +75 -0
- tensorbored/plugins/pr_curve/plugin_data_pb2.py +34 -0
- tensorbored/plugins/pr_curve/pr_curves_plugin.py +241 -0
- tensorbored/plugins/pr_curve/summary.py +574 -0
- tensorbored/plugins/profile_redirect/__init__.py +0 -0
- tensorbored/plugins/profile_redirect/profile_redirect_plugin.py +49 -0
- tensorbored/plugins/projector/__init__.py +67 -0
- tensorbored/plugins/projector/metadata.py +26 -0
- tensorbored/plugins/projector/projector_config_pb2.py +54 -0
- tensorbored/plugins/projector/projector_plugin.py +795 -0
- tensorbored/plugins/projector/tf_projector_plugin/index.js +32 -0
- tensorbored/plugins/projector/tf_projector_plugin/projector_binary.html +524 -0
- tensorbored/plugins/projector/tf_projector_plugin/projector_binary.js +15536 -0
- tensorbored/plugins/scalar/__init__.py +0 -0
- tensorbored/plugins/scalar/metadata.py +60 -0
- tensorbored/plugins/scalar/plugin_data_pb2.py +34 -0
- tensorbored/plugins/scalar/scalars_plugin.py +181 -0
- tensorbored/plugins/scalar/summary.py +109 -0
- tensorbored/plugins/scalar/summary_v2.py +124 -0
- tensorbored/plugins/text/__init__.py +0 -0
- tensorbored/plugins/text/metadata.py +62 -0
- tensorbored/plugins/text/plugin_data_pb2.py +34 -0
- tensorbored/plugins/text/summary.py +114 -0
- tensorbored/plugins/text/summary_v2.py +124 -0
- tensorbored/plugins/text/text_plugin.py +288 -0
- tensorbored/plugins/wit_redirect/__init__.py +0 -0
- tensorbored/plugins/wit_redirect/wit_redirect_plugin.py +49 -0
- tensorbored/program.py +910 -0
- tensorbored/summary/__init__.py +35 -0
- tensorbored/summary/_output.py +124 -0
- tensorbored/summary/_tf/__init__.py +14 -0
- tensorbored/summary/_tf/summary/__init__.py +178 -0
- tensorbored/summary/_writer.py +105 -0
- tensorbored/summary/v1.py +51 -0
- tensorbored/summary/v2.py +25 -0
- tensorbored/summary/writer/__init__.py +13 -0
- tensorbored/summary/writer/event_file_writer.py +291 -0
- tensorbored/summary/writer/record_writer.py +50 -0
- tensorbored/util/__init__.py +0 -0
- tensorbored/util/encoder.py +116 -0
- tensorbored/util/grpc_util.py +311 -0
- tensorbored/util/img_mime_type_detector.py +40 -0
- tensorbored/util/io_util.py +20 -0
- tensorbored/util/lazy_tensor_creator.py +110 -0
- tensorbored/util/op_evaluator.py +104 -0
- tensorbored/util/platform_util.py +20 -0
- tensorbored/util/tb_logging.py +24 -0
- tensorbored/util/tensor_util.py +617 -0
- tensorbored/util/timing.py +122 -0
- tensorbored/version.py +21 -0
- tensorbored/webfiles.zip +0 -0
- tensorbored-2.21.0rc1769983804.dist-info/METADATA +49 -0
- tensorbored-2.21.0rc1769983804.dist-info/RECORD +271 -0
- tensorbored-2.21.0rc1769983804.dist-info/WHEEL +5 -0
- tensorbored-2.21.0rc1769983804.dist-info/entry_points.txt +6 -0
- tensorbored-2.21.0rc1769983804.dist-info/licenses/LICENSE +739 -0
- tensorbored-2.21.0rc1769983804.dist-info/top_level.txt +1 -0
|
@@ -0,0 +1,301 @@
|
|
|
1
|
+
# Copyright 2020 The TensorFlow Authors. All Rights Reserved.
|
|
2
|
+
#
|
|
3
|
+
# Licensed under the Apache License, Version 2.0 (the "License");
|
|
4
|
+
# you may not use this file except in compliance with the License.
|
|
5
|
+
# You may obtain a copy of the License at
|
|
6
|
+
#
|
|
7
|
+
# http://www.apache.org/licenses/LICENSE-2.0
|
|
8
|
+
#
|
|
9
|
+
# Unless required by applicable law or agreed to in writing, software
|
|
10
|
+
# distributed under the License is distributed on an "AS IS" BASIS,
|
|
11
|
+
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
12
|
+
# See the License for the specific language governing permissions and
|
|
13
|
+
# limitations under the License.
|
|
14
|
+
# ==============================================================================
|
|
15
|
+
"""Provides data ingestion logic backed by a gRPC server."""
|
|
16
|
+
|
|
17
|
+
import errno
|
|
18
|
+
import logging
|
|
19
|
+
import os
|
|
20
|
+
import subprocess
|
|
21
|
+
import tempfile
|
|
22
|
+
import time
|
|
23
|
+
|
|
24
|
+
import grpc
|
|
25
|
+
import pkg_resources
|
|
26
|
+
|
|
27
|
+
from tensorbored.data import grpc_provider
|
|
28
|
+
from tensorbored.data import ingester
|
|
29
|
+
from tensorbored.data.proto import data_provider_pb2
|
|
30
|
+
from tensorbored.util import tb_logging
|
|
31
|
+
|
|
32
|
+
logger = tb_logging.get_logger()
|
|
33
|
+
|
|
34
|
+
# If this environment variable is non-empty, it will be used as the path to the
|
|
35
|
+
# data server binary rather than using a bundled version.
|
|
36
|
+
_ENV_DATA_SERVER_BINARY = "TENSORBOARD_DATA_SERVER_BINARY"
|
|
37
|
+
|
|
38
|
+
|
|
39
|
+
class ExistingServerDataIngester(ingester.DataIngester):
|
|
40
|
+
"""Connect to an already running gRPC server."""
|
|
41
|
+
|
|
42
|
+
def __init__(self, address, *, channel_creds_type):
|
|
43
|
+
"""Initializes an ingester with the given configuration.
|
|
44
|
+
|
|
45
|
+
Args:
|
|
46
|
+
address: String, as passed to `--grpc_data_provider`.
|
|
47
|
+
channel_creds_type: `grpc_util.ChannelCredsType`, as passed to
|
|
48
|
+
`--grpc_creds_type`.
|
|
49
|
+
"""
|
|
50
|
+
stub = _make_stub(address, channel_creds_type)
|
|
51
|
+
self._data_provider = grpc_provider.GrpcDataProvider(address, stub)
|
|
52
|
+
|
|
53
|
+
@property
|
|
54
|
+
def data_provider(self):
|
|
55
|
+
return self._data_provider
|
|
56
|
+
|
|
57
|
+
def start(self):
|
|
58
|
+
pass
|
|
59
|
+
|
|
60
|
+
|
|
61
|
+
class SubprocessServerDataIngester(ingester.DataIngester):
|
|
62
|
+
"""Start a new data server as a subprocess."""
|
|
63
|
+
|
|
64
|
+
def __init__(
|
|
65
|
+
self,
|
|
66
|
+
server_binary,
|
|
67
|
+
logdir,
|
|
68
|
+
*,
|
|
69
|
+
reload_interval,
|
|
70
|
+
channel_creds_type,
|
|
71
|
+
samples_per_plugin=None,
|
|
72
|
+
extra_flags=None,
|
|
73
|
+
):
|
|
74
|
+
"""Initializes an ingester with the given configuration.
|
|
75
|
+
|
|
76
|
+
Args:
|
|
77
|
+
server_binary: `ServerBinary` to launch.
|
|
78
|
+
logdir: String, as passed to `--logdir`.
|
|
79
|
+
reload_interval: Number, as passed to `--reload_interval`.
|
|
80
|
+
channel_creds_type: `grpc_util.ChannelCredsType`, as passed to
|
|
81
|
+
`--grpc_creds_type`.
|
|
82
|
+
samples_per_plugin: Dict[String, Int], as parsed from
|
|
83
|
+
`--samples_per_plugin`.
|
|
84
|
+
extra_flags: List of extra string flags to be passed to the
|
|
85
|
+
data server without further interpretation.
|
|
86
|
+
"""
|
|
87
|
+
self._server_binary = server_binary
|
|
88
|
+
self._data_provider = None
|
|
89
|
+
self._logdir = logdir
|
|
90
|
+
self._reload_interval = reload_interval
|
|
91
|
+
self._channel_creds_type = channel_creds_type
|
|
92
|
+
self._samples_per_plugin = samples_per_plugin or {}
|
|
93
|
+
self._extra_flags = list(extra_flags or [])
|
|
94
|
+
|
|
95
|
+
@property
|
|
96
|
+
def data_provider(self):
|
|
97
|
+
if self._data_provider is None:
|
|
98
|
+
raise RuntimeError("Must call `start` first")
|
|
99
|
+
return self._data_provider
|
|
100
|
+
|
|
101
|
+
def start(self):
|
|
102
|
+
if self._data_provider:
|
|
103
|
+
return
|
|
104
|
+
|
|
105
|
+
tmpdir = tempfile.TemporaryDirectory(prefix="tensorboard_data_server_")
|
|
106
|
+
port_file_path = os.path.join(tmpdir.name, "port")
|
|
107
|
+
error_file_path = os.path.join(tmpdir.name, "startup_error")
|
|
108
|
+
|
|
109
|
+
if self._reload_interval <= 0:
|
|
110
|
+
reload = "once"
|
|
111
|
+
else:
|
|
112
|
+
reload = str(int(self._reload_interval))
|
|
113
|
+
|
|
114
|
+
sample_hint_pairs = [
|
|
115
|
+
"%s=%s" % (k, "all" if v == 0 else v)
|
|
116
|
+
for k, v in self._samples_per_plugin.items()
|
|
117
|
+
]
|
|
118
|
+
samples_per_plugin = ",".join(sample_hint_pairs)
|
|
119
|
+
|
|
120
|
+
args = [
|
|
121
|
+
self._server_binary.path,
|
|
122
|
+
"--logdir=%s" % os.path.expanduser(self._logdir),
|
|
123
|
+
"--reload=%s" % reload,
|
|
124
|
+
"--samples-per-plugin=%s" % samples_per_plugin,
|
|
125
|
+
"--port=0",
|
|
126
|
+
"--port-file=%s" % (port_file_path,),
|
|
127
|
+
"--die-after-stdin",
|
|
128
|
+
]
|
|
129
|
+
if self._server_binary.at_least_version("0.5.0a0"):
|
|
130
|
+
args.append("--error-file=%s" % (error_file_path,))
|
|
131
|
+
if logger.isEnabledFor(logging.INFO):
|
|
132
|
+
args.append("--verbose")
|
|
133
|
+
if logger.isEnabledFor(logging.DEBUG):
|
|
134
|
+
args.append("--verbose") # Repeat arg to increase verbosity.
|
|
135
|
+
args.extend(self._extra_flags)
|
|
136
|
+
|
|
137
|
+
logger.info("Spawning data server: %r", args)
|
|
138
|
+
popen = subprocess.Popen(args, stdin=subprocess.PIPE)
|
|
139
|
+
# Stash stdin to avoid calling its destructor: on Windows, this
|
|
140
|
+
# is a `subprocess.Handle` that closes itself in `__del__`,
|
|
141
|
+
# which would cause the data server to shut down. (This is not
|
|
142
|
+
# documented; you have to read CPython source to figure it out.)
|
|
143
|
+
# We want that to happen at end of process, but not before.
|
|
144
|
+
self._stdin_handle = popen.stdin # stash to avoid stdin being closed
|
|
145
|
+
|
|
146
|
+
port = None
|
|
147
|
+
# The server only needs about 10 microseconds to spawn on my machine,
|
|
148
|
+
# but give a few orders of magnitude of padding, and then poll.
|
|
149
|
+
time.sleep(0.01)
|
|
150
|
+
for i in range(20):
|
|
151
|
+
if popen.poll() is not None:
|
|
152
|
+
msg = (_maybe_read_file(error_file_path) or "").strip()
|
|
153
|
+
if not msg:
|
|
154
|
+
msg = (
|
|
155
|
+
"exited with %d; check stderr for details"
|
|
156
|
+
% popen.poll()
|
|
157
|
+
)
|
|
158
|
+
raise DataServerStartupError(msg)
|
|
159
|
+
logger.info("Polling for data server port (attempt %d)", i)
|
|
160
|
+
port_file_contents = _maybe_read_file(port_file_path)
|
|
161
|
+
logger.info("Port file contents: %r", port_file_contents)
|
|
162
|
+
if (port_file_contents or "").endswith("\n"):
|
|
163
|
+
port = int(port_file_contents)
|
|
164
|
+
break
|
|
165
|
+
# Else, not done writing yet.
|
|
166
|
+
time.sleep(0.5)
|
|
167
|
+
if port is None:
|
|
168
|
+
raise DataServerStartupError(
|
|
169
|
+
"Timed out while waiting for data server to start. "
|
|
170
|
+
"It may still be running as pid %d." % popen.pid
|
|
171
|
+
)
|
|
172
|
+
|
|
173
|
+
addr = "localhost:%d" % port
|
|
174
|
+
stub = _make_stub(addr, self._channel_creds_type)
|
|
175
|
+
logger.info(
|
|
176
|
+
"Opened channel to data server at pid %d via %s",
|
|
177
|
+
popen.pid,
|
|
178
|
+
addr,
|
|
179
|
+
)
|
|
180
|
+
|
|
181
|
+
req = data_provider_pb2.GetExperimentRequest()
|
|
182
|
+
try:
|
|
183
|
+
stub.GetExperiment(req, timeout=5) # should be near-instant
|
|
184
|
+
except grpc.RpcError as e:
|
|
185
|
+
msg = "Failed to communicate with data server at %s: %s" % (addr, e)
|
|
186
|
+
logging.warning("%s", msg)
|
|
187
|
+
raise DataServerStartupError(msg) from e
|
|
188
|
+
logger.info("Got valid response from data server")
|
|
189
|
+
self._data_provider = grpc_provider.GrpcDataProvider(addr, stub)
|
|
190
|
+
|
|
191
|
+
|
|
192
|
+
def _maybe_read_file(path):
|
|
193
|
+
"""Read a file, or return `None` on ENOENT specifically."""
|
|
194
|
+
try:
|
|
195
|
+
with open(path) as infile:
|
|
196
|
+
return infile.read()
|
|
197
|
+
except OSError as e:
|
|
198
|
+
if e.errno == errno.ENOENT:
|
|
199
|
+
return None
|
|
200
|
+
raise
|
|
201
|
+
|
|
202
|
+
|
|
203
|
+
def _make_stub(addr, channel_creds_type):
|
|
204
|
+
creds, options = channel_creds_type.channel_config()
|
|
205
|
+
options.append(("grpc.max_receive_message_length", 1024 * 1024 * 256))
|
|
206
|
+
channel = grpc.secure_channel(addr, creds, options=options)
|
|
207
|
+
return grpc_provider.make_stub(channel)
|
|
208
|
+
|
|
209
|
+
|
|
210
|
+
class NoDataServerError(RuntimeError):
|
|
211
|
+
pass
|
|
212
|
+
|
|
213
|
+
|
|
214
|
+
class DataServerStartupError(RuntimeError):
|
|
215
|
+
pass
|
|
216
|
+
|
|
217
|
+
|
|
218
|
+
class ServerBinary:
|
|
219
|
+
"""Information about a data server binary."""
|
|
220
|
+
|
|
221
|
+
def __init__(self, path, version):
|
|
222
|
+
"""Initializes a `ServerBinary`.
|
|
223
|
+
|
|
224
|
+
Args:
|
|
225
|
+
path: String path to executable on disk.
|
|
226
|
+
version: PEP 396-compliant version string, or `None` if
|
|
227
|
+
unknown or not applicable. Binaries at unknown versions are
|
|
228
|
+
assumed to be bleeding-edge: if you bring your own binary,
|
|
229
|
+
it's on you to make sure that it's up to date.
|
|
230
|
+
"""
|
|
231
|
+
self._path = path
|
|
232
|
+
self._version = (
|
|
233
|
+
pkg_resources.parse_version(version)
|
|
234
|
+
if version is not None
|
|
235
|
+
else version
|
|
236
|
+
)
|
|
237
|
+
|
|
238
|
+
@property
|
|
239
|
+
def path(self):
|
|
240
|
+
return self._path
|
|
241
|
+
|
|
242
|
+
def at_least_version(self, required_version):
|
|
243
|
+
"""Test whether the binary's version is at least the given one.
|
|
244
|
+
|
|
245
|
+
Useful for gating features that are available in the latest data
|
|
246
|
+
server builds from head, but not yet released to PyPI. For
|
|
247
|
+
example, if v0.4.0 is the latest published version, you can
|
|
248
|
+
check `at_least_version("0.5.0a0")` to include both prereleases
|
|
249
|
+
at head and the eventual final release of v0.5.0.
|
|
250
|
+
|
|
251
|
+
If this binary's version was set to `None` at construction time,
|
|
252
|
+
this method always returns `True`.
|
|
253
|
+
|
|
254
|
+
Args:
|
|
255
|
+
required_version: PEP 396-compliant version string.
|
|
256
|
+
|
|
257
|
+
Returns:
|
|
258
|
+
Boolean.
|
|
259
|
+
"""
|
|
260
|
+
if self._version is None:
|
|
261
|
+
return True
|
|
262
|
+
return self._version >= pkg_resources.parse_version(required_version)
|
|
263
|
+
|
|
264
|
+
|
|
265
|
+
def get_server_binary():
|
|
266
|
+
"""Get `ServerBinary` info or raise `NoDataServerError`."""
|
|
267
|
+
env_result = os.environ.get(_ENV_DATA_SERVER_BINARY)
|
|
268
|
+
if env_result:
|
|
269
|
+
logging.info("Server binary (from env): %s", env_result)
|
|
270
|
+
if not os.path.isfile(env_result):
|
|
271
|
+
raise NoDataServerError(
|
|
272
|
+
"Found environment variable %s=%s, but no such file exists."
|
|
273
|
+
% (_ENV_DATA_SERVER_BINARY, env_result)
|
|
274
|
+
)
|
|
275
|
+
return ServerBinary(env_result, version=None)
|
|
276
|
+
|
|
277
|
+
bundle_result = os.path.join(os.path.dirname(__file__), "server", "server")
|
|
278
|
+
if os.path.exists(bundle_result):
|
|
279
|
+
logging.info("Server binary (from bundle): %s", bundle_result)
|
|
280
|
+
return ServerBinary(bundle_result, version=None)
|
|
281
|
+
|
|
282
|
+
try:
|
|
283
|
+
import tensorboard_data_server
|
|
284
|
+
except ImportError:
|
|
285
|
+
pass
|
|
286
|
+
else:
|
|
287
|
+
pkg_result = tensorboard_data_server.server_binary()
|
|
288
|
+
version = tensorboard_data_server.__version__
|
|
289
|
+
logging.info(
|
|
290
|
+
"Server binary (from Python package v%s): %s", version, pkg_result
|
|
291
|
+
)
|
|
292
|
+
if pkg_result is None:
|
|
293
|
+
raise NoDataServerError(
|
|
294
|
+
"TensorBoard data server not supported on this platform."
|
|
295
|
+
)
|
|
296
|
+
return ServerBinary(pkg_result, version)
|
|
297
|
+
|
|
298
|
+
raise NoDataServerError(
|
|
299
|
+
"TensorBoard data server not found. This mode is experimental. "
|
|
300
|
+
"If building from source, pass --define=link_data_server=true."
|
|
301
|
+
)
|
|
@@ -0,0 +1,159 @@
|
|
|
1
|
+
# Copyright 2017 The TensorFlow Authors. All Rights Reserved.
|
|
2
|
+
#
|
|
3
|
+
# Licensed under the Apache License, Version 2.0 (the "License");
|
|
4
|
+
# you may not use this file except in compliance with the License.
|
|
5
|
+
# You may obtain a copy of the License at
|
|
6
|
+
#
|
|
7
|
+
# http://www.apache.org/licenses/LICENSE-2.0
|
|
8
|
+
#
|
|
9
|
+
# Unless required by applicable law or agreed to in writing, software
|
|
10
|
+
# distributed under the License is distributed on an "AS IS" BASIS,
|
|
11
|
+
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
12
|
+
# See the License for the specific language governing permissions and
|
|
13
|
+
# limitations under the License.
|
|
14
|
+
# ==============================================================================
|
|
15
|
+
"""Utilities to migrate legacy protos to their modern equivalents."""
|
|
16
|
+
|
|
17
|
+
import numpy as np
|
|
18
|
+
|
|
19
|
+
from tensorbored.compat.proto import event_pb2
|
|
20
|
+
from tensorbored.compat.proto import summary_pb2
|
|
21
|
+
from tensorbored.plugins.audio import metadata as audio_metadata
|
|
22
|
+
from tensorbored.plugins.histogram import metadata as histogram_metadata
|
|
23
|
+
from tensorbored.plugins.image import metadata as image_metadata
|
|
24
|
+
from tensorbored.plugins.scalar import metadata as scalar_metadata
|
|
25
|
+
from tensorbored.util import tensor_util
|
|
26
|
+
|
|
27
|
+
|
|
28
|
+
def migrate_event(event):
|
|
29
|
+
if not event.HasField("summary"):
|
|
30
|
+
return event
|
|
31
|
+
old_values = event.summary.value
|
|
32
|
+
new_values = [migrate_value(value) for value in old_values]
|
|
33
|
+
# Optimization: Don't create a new event if there were no changes.
|
|
34
|
+
if len(old_values) == len(new_values) and all(
|
|
35
|
+
x is y for (x, y) in zip(old_values, new_values)
|
|
36
|
+
):
|
|
37
|
+
return event
|
|
38
|
+
result = event_pb2.Event()
|
|
39
|
+
result.CopyFrom(event)
|
|
40
|
+
del result.summary.value[:]
|
|
41
|
+
result.summary.value.extend(new_values)
|
|
42
|
+
return result
|
|
43
|
+
|
|
44
|
+
|
|
45
|
+
def migrate_value(value):
|
|
46
|
+
"""Convert `value` to a new-style value, if necessary and possible.
|
|
47
|
+
|
|
48
|
+
An "old-style" value is a value that uses any `value` field other than
|
|
49
|
+
the `tensor` field. A "new-style" value is a value that uses the
|
|
50
|
+
`tensor` field. TensorBoard continues to support old-style values on
|
|
51
|
+
disk; this method converts them to new-style values so that further
|
|
52
|
+
code need only deal with one data format.
|
|
53
|
+
|
|
54
|
+
Arguments:
|
|
55
|
+
value: A `Summary.Value` object. This argument is not modified.
|
|
56
|
+
|
|
57
|
+
Returns:
|
|
58
|
+
If the `value` is an old-style value for which there is a new-style
|
|
59
|
+
equivalent, the result is the new-style value. Otherwise---if the
|
|
60
|
+
value is already new-style or does not yet have a new-style
|
|
61
|
+
equivalent---the value will be returned unchanged.
|
|
62
|
+
|
|
63
|
+
:type value: Summary.Value
|
|
64
|
+
:rtype: Summary.Value
|
|
65
|
+
"""
|
|
66
|
+
handler = {
|
|
67
|
+
"histo": _migrate_histogram_value,
|
|
68
|
+
"image": _migrate_image_value,
|
|
69
|
+
"audio": _migrate_audio_value,
|
|
70
|
+
"simple_value": _migrate_scalar_value,
|
|
71
|
+
}.get(value.WhichOneof("value"))
|
|
72
|
+
return handler(value) if handler else value
|
|
73
|
+
|
|
74
|
+
|
|
75
|
+
def make_summary(tag, metadata, data):
|
|
76
|
+
tensor_proto = tensor_util.make_tensor_proto(data)
|
|
77
|
+
return summary_pb2.Summary.Value(
|
|
78
|
+
tag=tag, metadata=metadata, tensor=tensor_proto
|
|
79
|
+
)
|
|
80
|
+
|
|
81
|
+
|
|
82
|
+
def _migrate_histogram_value(value):
|
|
83
|
+
"""Convert `old-style` histogram value to `new-style`.
|
|
84
|
+
|
|
85
|
+
The "old-style" format can have outermost bucket limits of -DBL_MAX and
|
|
86
|
+
DBL_MAX, which are problematic for visualization. We replace those here
|
|
87
|
+
with the actual min and max values seen in the input data, but then in
|
|
88
|
+
order to avoid introducing "backwards" buckets (where left edge > right
|
|
89
|
+
edge), we first must drop all empty buckets on the left and right ends.
|
|
90
|
+
"""
|
|
91
|
+
histogram_value = value.histo
|
|
92
|
+
bucket_counts = histogram_value.bucket
|
|
93
|
+
# Find the indices of the leftmost and rightmost non-empty buckets.
|
|
94
|
+
n = len(bucket_counts)
|
|
95
|
+
start = next((i for i in range(n) if bucket_counts[i] > 0), n)
|
|
96
|
+
end = next((i for i in reversed(range(n)) if bucket_counts[i] > 0), -1)
|
|
97
|
+
if start > end:
|
|
98
|
+
# If all input buckets were empty, treat it as a zero-bucket
|
|
99
|
+
# new-style histogram.
|
|
100
|
+
buckets = np.zeros([0, 3], dtype=np.float32)
|
|
101
|
+
else:
|
|
102
|
+
# Discard empty buckets on both ends, and keep only the "inner"
|
|
103
|
+
# edges from the remaining buckets. Note that bucket indices range
|
|
104
|
+
# from `start` to `end` inclusive, but bucket_limit indices are
|
|
105
|
+
# exclusive of `end` - this is because bucket_limit[i] is the
|
|
106
|
+
# right-hand edge for bucket[i].
|
|
107
|
+
bucket_counts = bucket_counts[start : end + 1]
|
|
108
|
+
inner_edges = histogram_value.bucket_limit[start:end]
|
|
109
|
+
# Use min as the left-hand limit for the first non-empty bucket.
|
|
110
|
+
bucket_lefts = [histogram_value.min] + inner_edges
|
|
111
|
+
# Use max as the right-hand limit for the last non-empty bucket.
|
|
112
|
+
bucket_rights = inner_edges + [histogram_value.max]
|
|
113
|
+
buckets = np.array(
|
|
114
|
+
[bucket_lefts, bucket_rights, bucket_counts], dtype=np.float32
|
|
115
|
+
).transpose()
|
|
116
|
+
|
|
117
|
+
summary_metadata = histogram_metadata.create_summary_metadata(
|
|
118
|
+
display_name=value.metadata.display_name or value.tag,
|
|
119
|
+
description=value.metadata.summary_description,
|
|
120
|
+
)
|
|
121
|
+
|
|
122
|
+
return make_summary(value.tag, summary_metadata, buckets)
|
|
123
|
+
|
|
124
|
+
|
|
125
|
+
def _migrate_image_value(value):
|
|
126
|
+
image_value = value.image
|
|
127
|
+
data = [
|
|
128
|
+
str(image_value.width).encode("ascii"),
|
|
129
|
+
str(image_value.height).encode("ascii"),
|
|
130
|
+
image_value.encoded_image_string,
|
|
131
|
+
]
|
|
132
|
+
|
|
133
|
+
summary_metadata = image_metadata.create_summary_metadata(
|
|
134
|
+
display_name=value.metadata.display_name or value.tag,
|
|
135
|
+
description=value.metadata.summary_description,
|
|
136
|
+
converted_to_tensor=True,
|
|
137
|
+
)
|
|
138
|
+
return make_summary(value.tag, summary_metadata, data)
|
|
139
|
+
|
|
140
|
+
|
|
141
|
+
def _migrate_audio_value(value):
|
|
142
|
+
audio_value = value.audio
|
|
143
|
+
data = [[audio_value.encoded_audio_string, b""]] # empty label
|
|
144
|
+
summary_metadata = audio_metadata.create_summary_metadata(
|
|
145
|
+
display_name=value.metadata.display_name or value.tag,
|
|
146
|
+
description=value.metadata.summary_description,
|
|
147
|
+
encoding=audio_metadata.Encoding.Value("WAV"),
|
|
148
|
+
converted_to_tensor=True,
|
|
149
|
+
)
|
|
150
|
+
return make_summary(value.tag, summary_metadata, data)
|
|
151
|
+
|
|
152
|
+
|
|
153
|
+
def _migrate_scalar_value(value):
|
|
154
|
+
scalar_value = value.simple_value
|
|
155
|
+
summary_metadata = scalar_metadata.create_summary_metadata(
|
|
156
|
+
display_name=value.metadata.display_name or value.tag,
|
|
157
|
+
description=value.metadata.summary_description,
|
|
158
|
+
)
|
|
159
|
+
return make_summary(value.tag, summary_metadata, scalar_value)
|
|
@@ -0,0 +1,224 @@
|
|
|
1
|
+
# Copyright 2020 The TensorFlow Authors. All Rights Reserved.
|
|
2
|
+
#
|
|
3
|
+
# Licensed under the Apache License, Version 2.0 (the "License");
|
|
4
|
+
# you may not use this file except in compliance with the License.
|
|
5
|
+
# You may obtain a copy of the License at
|
|
6
|
+
#
|
|
7
|
+
# http://www.apache.org/licenses/LICENSE-2.0
|
|
8
|
+
#
|
|
9
|
+
# Unless required by applicable law or agreed to in writing, software
|
|
10
|
+
# distributed under the License is distributed on an "AS IS" BASIS,
|
|
11
|
+
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
12
|
+
# See the License for the specific language governing permissions and
|
|
13
|
+
# limitations under the License.
|
|
14
|
+
# ==============================================================================
|
|
15
|
+
"""Utilities to migrate legacy summaries/events to generic data form.
|
|
16
|
+
|
|
17
|
+
For legacy summaries, this populates the `SummaryMetadata.data_class`
|
|
18
|
+
field and makes any necessary transformations to the tensor value. For
|
|
19
|
+
`graph_def` events, this creates a new summary event.
|
|
20
|
+
|
|
21
|
+
This should be effected after the `data_compat` transformation.
|
|
22
|
+
"""
|
|
23
|
+
|
|
24
|
+
from tensorbored.compat.proto import event_pb2
|
|
25
|
+
from tensorbored.compat.proto import summary_pb2
|
|
26
|
+
from tensorbored.plugins.audio import metadata as audio_metadata
|
|
27
|
+
from tensorbored.plugins.custom_scalar import (
|
|
28
|
+
metadata as custom_scalars_metadata,
|
|
29
|
+
)
|
|
30
|
+
from tensorbored.plugins.graph import metadata as graphs_metadata
|
|
31
|
+
from tensorbored.plugins.histogram import metadata as histograms_metadata
|
|
32
|
+
from tensorbored.plugins.hparams import metadata as hparams_metadata
|
|
33
|
+
from tensorbored.plugins.image import metadata as images_metadata
|
|
34
|
+
from tensorbored.plugins.mesh import metadata as mesh_metadata
|
|
35
|
+
from tensorbored.plugins.pr_curve import metadata as pr_curves_metadata
|
|
36
|
+
from tensorbored.plugins.scalar import metadata as scalars_metadata
|
|
37
|
+
from tensorbored.plugins.text import metadata as text_metadata
|
|
38
|
+
from tensorbored.util import tensor_util
|
|
39
|
+
|
|
40
|
+
|
|
41
|
+
def migrate_event(event, initial_metadata):
|
|
42
|
+
"""Migrate an event to a sequence of events.
|
|
43
|
+
|
|
44
|
+
Args:
|
|
45
|
+
event: An `event_pb2.Event`. The caller transfers ownership of the
|
|
46
|
+
event to this method; the event may be mutated, and may or may
|
|
47
|
+
not appear in the returned sequence.
|
|
48
|
+
initial_metadata: Map from tag name (string) to `SummaryMetadata`
|
|
49
|
+
proto for the initial occurrence of the given tag within the
|
|
50
|
+
enclosing run. While loading a given run, the caller should
|
|
51
|
+
always pass the same dictionary here, initially `{}`; this
|
|
52
|
+
function will mutate it and reuse it for future calls.
|
|
53
|
+
|
|
54
|
+
Returns:
|
|
55
|
+
A sequence of `event_pb2.Event`s to use instead of `event`.
|
|
56
|
+
"""
|
|
57
|
+
what = event.WhichOneof("what")
|
|
58
|
+
if what == "graph_def":
|
|
59
|
+
return _migrate_graph_event(event)
|
|
60
|
+
if what == "tagged_run_metadata":
|
|
61
|
+
return _migrate_tagged_run_metadata_event(event)
|
|
62
|
+
if what == "summary":
|
|
63
|
+
return _migrate_summary_event(event, initial_metadata)
|
|
64
|
+
return (event,)
|
|
65
|
+
|
|
66
|
+
|
|
67
|
+
def _migrate_graph_event(old_event):
|
|
68
|
+
result = event_pb2.Event()
|
|
69
|
+
result.wall_time = old_event.wall_time
|
|
70
|
+
result.step = old_event.step
|
|
71
|
+
value = result.summary.value.add(tag=graphs_metadata.RUN_GRAPH_NAME)
|
|
72
|
+
graph_bytes = old_event.graph_def
|
|
73
|
+
value.tensor.CopyFrom(tensor_util.make_tensor_proto([graph_bytes]))
|
|
74
|
+
value.metadata.plugin_data.plugin_name = graphs_metadata.PLUGIN_NAME
|
|
75
|
+
# `value.metadata.plugin_data.content` left empty
|
|
76
|
+
value.metadata.data_class = summary_pb2.DATA_CLASS_BLOB_SEQUENCE
|
|
77
|
+
# As long as the graphs plugin still reads the old format, keep both
|
|
78
|
+
# the old event and the new event to maintain compatibility.
|
|
79
|
+
return (old_event, result)
|
|
80
|
+
|
|
81
|
+
|
|
82
|
+
def _migrate_tagged_run_metadata_event(old_event):
|
|
83
|
+
result = event_pb2.Event()
|
|
84
|
+
result.wall_time = old_event.wall_time
|
|
85
|
+
result.step = old_event.step
|
|
86
|
+
trm = old_event.tagged_run_metadata
|
|
87
|
+
value = result.summary.value.add(tag=trm.tag)
|
|
88
|
+
value.tensor.CopyFrom(tensor_util.make_tensor_proto([trm.run_metadata]))
|
|
89
|
+
value.metadata.plugin_data.plugin_name = (
|
|
90
|
+
graphs_metadata.PLUGIN_NAME_TAGGED_RUN_METADATA
|
|
91
|
+
)
|
|
92
|
+
# `value.metadata.plugin_data.content` left empty
|
|
93
|
+
value.metadata.data_class = summary_pb2.DATA_CLASS_BLOB_SEQUENCE
|
|
94
|
+
return (result,)
|
|
95
|
+
|
|
96
|
+
|
|
97
|
+
def _migrate_summary_event(event, initial_metadata):
|
|
98
|
+
values = event.summary.value
|
|
99
|
+
new_values = [
|
|
100
|
+
new for old in values for new in _migrate_value(old, initial_metadata)
|
|
101
|
+
]
|
|
102
|
+
# Optimization: Don't create a new event if there were no shallow
|
|
103
|
+
# changes (there may still have been in-place changes).
|
|
104
|
+
if len(values) == len(new_values) and all(
|
|
105
|
+
x is y for (x, y) in zip(values, new_values)
|
|
106
|
+
):
|
|
107
|
+
return (event,)
|
|
108
|
+
del event.summary.value[:]
|
|
109
|
+
event.summary.value.extend(new_values)
|
|
110
|
+
return (event,)
|
|
111
|
+
|
|
112
|
+
|
|
113
|
+
def _migrate_value(value, initial_metadata):
|
|
114
|
+
"""Convert an old value to a stream of new values. May mutate."""
|
|
115
|
+
metadata = initial_metadata.get(value.tag)
|
|
116
|
+
initial = False
|
|
117
|
+
if metadata is None:
|
|
118
|
+
initial = True
|
|
119
|
+
# Retain a copy of the initial metadata, so that even after we
|
|
120
|
+
# update its data class we know whether to also transform later
|
|
121
|
+
# events in this time series.
|
|
122
|
+
metadata = summary_pb2.SummaryMetadata()
|
|
123
|
+
metadata.CopyFrom(value.metadata)
|
|
124
|
+
initial_metadata[value.tag] = metadata
|
|
125
|
+
if metadata.data_class != summary_pb2.DATA_CLASS_UNKNOWN:
|
|
126
|
+
return (value,)
|
|
127
|
+
plugin_name = metadata.plugin_data.plugin_name
|
|
128
|
+
if plugin_name == histograms_metadata.PLUGIN_NAME:
|
|
129
|
+
return _migrate_histogram_value(value)
|
|
130
|
+
if plugin_name == images_metadata.PLUGIN_NAME:
|
|
131
|
+
return _migrate_image_value(value)
|
|
132
|
+
if plugin_name == audio_metadata.PLUGIN_NAME:
|
|
133
|
+
return _migrate_audio_value(value)
|
|
134
|
+
if plugin_name == scalars_metadata.PLUGIN_NAME:
|
|
135
|
+
return _migrate_scalar_value(value)
|
|
136
|
+
if plugin_name == text_metadata.PLUGIN_NAME:
|
|
137
|
+
return _migrate_text_value(value)
|
|
138
|
+
if plugin_name == hparams_metadata.PLUGIN_NAME:
|
|
139
|
+
return _migrate_hparams_value(value)
|
|
140
|
+
if plugin_name == pr_curves_metadata.PLUGIN_NAME:
|
|
141
|
+
return _migrate_pr_curve_value(value)
|
|
142
|
+
if plugin_name == mesh_metadata.PLUGIN_NAME:
|
|
143
|
+
return _migrate_mesh_value(value)
|
|
144
|
+
if plugin_name == custom_scalars_metadata.PLUGIN_NAME:
|
|
145
|
+
return _migrate_custom_scalars_value(value)
|
|
146
|
+
if plugin_name in [
|
|
147
|
+
graphs_metadata.PLUGIN_NAME_RUN_METADATA,
|
|
148
|
+
graphs_metadata.PLUGIN_NAME_RUN_METADATA_WITH_GRAPH,
|
|
149
|
+
graphs_metadata.PLUGIN_NAME_KERAS_MODEL,
|
|
150
|
+
]:
|
|
151
|
+
return _migrate_graph_sub_plugin_value(value)
|
|
152
|
+
return (value,)
|
|
153
|
+
|
|
154
|
+
|
|
155
|
+
def _migrate_scalar_value(value):
|
|
156
|
+
if value.HasField("metadata"):
|
|
157
|
+
value.metadata.data_class = summary_pb2.DATA_CLASS_SCALAR
|
|
158
|
+
return (value,)
|
|
159
|
+
|
|
160
|
+
|
|
161
|
+
def _migrate_histogram_value(value):
|
|
162
|
+
if value.HasField("metadata"):
|
|
163
|
+
value.metadata.data_class = summary_pb2.DATA_CLASS_TENSOR
|
|
164
|
+
return (value,)
|
|
165
|
+
|
|
166
|
+
|
|
167
|
+
def _migrate_image_value(value):
|
|
168
|
+
if value.HasField("metadata"):
|
|
169
|
+
value.metadata.data_class = summary_pb2.DATA_CLASS_BLOB_SEQUENCE
|
|
170
|
+
return (value,)
|
|
171
|
+
|
|
172
|
+
|
|
173
|
+
def _migrate_text_value(value):
|
|
174
|
+
if value.HasField("metadata"):
|
|
175
|
+
value.metadata.data_class = summary_pb2.DATA_CLASS_TENSOR
|
|
176
|
+
return (value,)
|
|
177
|
+
|
|
178
|
+
|
|
179
|
+
def _migrate_audio_value(value):
|
|
180
|
+
if value.HasField("metadata"):
|
|
181
|
+
value.metadata.data_class = summary_pb2.DATA_CLASS_BLOB_SEQUENCE
|
|
182
|
+
tensor = value.tensor
|
|
183
|
+
# Project out just the first axis: actual audio clips.
|
|
184
|
+
stride = 1
|
|
185
|
+
while len(tensor.tensor_shape.dim) > 1:
|
|
186
|
+
stride *= tensor.tensor_shape.dim.pop().size
|
|
187
|
+
if stride != 1:
|
|
188
|
+
tensor.string_val[:] = tensor.string_val[::stride]
|
|
189
|
+
return (value,)
|
|
190
|
+
|
|
191
|
+
|
|
192
|
+
def _migrate_hparams_value(value):
|
|
193
|
+
if value.HasField("metadata"):
|
|
194
|
+
value.metadata.data_class = summary_pb2.DATA_CLASS_TENSOR
|
|
195
|
+
if not value.HasField("tensor"):
|
|
196
|
+
value.tensor.CopyFrom(hparams_metadata.NULL_TENSOR)
|
|
197
|
+
return (value,)
|
|
198
|
+
|
|
199
|
+
|
|
200
|
+
def _migrate_pr_curve_value(value):
|
|
201
|
+
if value.HasField("metadata"):
|
|
202
|
+
value.metadata.data_class = summary_pb2.DATA_CLASS_TENSOR
|
|
203
|
+
return (value,)
|
|
204
|
+
|
|
205
|
+
|
|
206
|
+
def _migrate_mesh_value(value):
|
|
207
|
+
if value.HasField("metadata"):
|
|
208
|
+
value.metadata.data_class = summary_pb2.DATA_CLASS_TENSOR
|
|
209
|
+
return (value,)
|
|
210
|
+
|
|
211
|
+
|
|
212
|
+
def _migrate_custom_scalars_value(value):
|
|
213
|
+
if value.HasField("metadata"):
|
|
214
|
+
value.metadata.data_class = summary_pb2.DATA_CLASS_TENSOR
|
|
215
|
+
return (value,)
|
|
216
|
+
|
|
217
|
+
|
|
218
|
+
def _migrate_graph_sub_plugin_value(value):
|
|
219
|
+
if value.HasField("metadata"):
|
|
220
|
+
value.metadata.data_class = summary_pb2.DATA_CLASS_BLOB_SEQUENCE
|
|
221
|
+
shape = value.tensor.tensor_shape.dim
|
|
222
|
+
if not shape:
|
|
223
|
+
shape.add(size=1)
|
|
224
|
+
return (value,)
|