tensorbored 2.21.0rc1769983804__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (271) hide show
  1. tensorbored/__init__.py +112 -0
  2. tensorbored/_vendor/__init__.py +0 -0
  3. tensorbored/_vendor/bleach/__init__.py +125 -0
  4. tensorbored/_vendor/bleach/_vendor/__init__.py +0 -0
  5. tensorbored/_vendor/bleach/_vendor/html5lib/__init__.py +35 -0
  6. tensorbored/_vendor/bleach/_vendor/html5lib/_ihatexml.py +289 -0
  7. tensorbored/_vendor/bleach/_vendor/html5lib/_inputstream.py +918 -0
  8. tensorbored/_vendor/bleach/_vendor/html5lib/_tokenizer.py +1735 -0
  9. tensorbored/_vendor/bleach/_vendor/html5lib/_trie/__init__.py +5 -0
  10. tensorbored/_vendor/bleach/_vendor/html5lib/_trie/_base.py +40 -0
  11. tensorbored/_vendor/bleach/_vendor/html5lib/_trie/py.py +67 -0
  12. tensorbored/_vendor/bleach/_vendor/html5lib/_utils.py +159 -0
  13. tensorbored/_vendor/bleach/_vendor/html5lib/constants.py +2946 -0
  14. tensorbored/_vendor/bleach/_vendor/html5lib/filters/__init__.py +0 -0
  15. tensorbored/_vendor/bleach/_vendor/html5lib/filters/alphabeticalattributes.py +29 -0
  16. tensorbored/_vendor/bleach/_vendor/html5lib/filters/base.py +12 -0
  17. tensorbored/_vendor/bleach/_vendor/html5lib/filters/inject_meta_charset.py +73 -0
  18. tensorbored/_vendor/bleach/_vendor/html5lib/filters/lint.py +93 -0
  19. tensorbored/_vendor/bleach/_vendor/html5lib/filters/optionaltags.py +207 -0
  20. tensorbored/_vendor/bleach/_vendor/html5lib/filters/sanitizer.py +916 -0
  21. tensorbored/_vendor/bleach/_vendor/html5lib/filters/whitespace.py +38 -0
  22. tensorbored/_vendor/bleach/_vendor/html5lib/html5parser.py +2795 -0
  23. tensorbored/_vendor/bleach/_vendor/html5lib/serializer.py +409 -0
  24. tensorbored/_vendor/bleach/_vendor/html5lib/treeadapters/__init__.py +30 -0
  25. tensorbored/_vendor/bleach/_vendor/html5lib/treeadapters/genshi.py +54 -0
  26. tensorbored/_vendor/bleach/_vendor/html5lib/treeadapters/sax.py +50 -0
  27. tensorbored/_vendor/bleach/_vendor/html5lib/treebuilders/__init__.py +88 -0
  28. tensorbored/_vendor/bleach/_vendor/html5lib/treebuilders/base.py +417 -0
  29. tensorbored/_vendor/bleach/_vendor/html5lib/treebuilders/dom.py +239 -0
  30. tensorbored/_vendor/bleach/_vendor/html5lib/treebuilders/etree.py +343 -0
  31. tensorbored/_vendor/bleach/_vendor/html5lib/treebuilders/etree_lxml.py +392 -0
  32. tensorbored/_vendor/bleach/_vendor/html5lib/treewalkers/__init__.py +154 -0
  33. tensorbored/_vendor/bleach/_vendor/html5lib/treewalkers/base.py +252 -0
  34. tensorbored/_vendor/bleach/_vendor/html5lib/treewalkers/dom.py +43 -0
  35. tensorbored/_vendor/bleach/_vendor/html5lib/treewalkers/etree.py +131 -0
  36. tensorbored/_vendor/bleach/_vendor/html5lib/treewalkers/etree_lxml.py +215 -0
  37. tensorbored/_vendor/bleach/_vendor/html5lib/treewalkers/genshi.py +69 -0
  38. tensorbored/_vendor/bleach/_vendor/parse.py +1078 -0
  39. tensorbored/_vendor/bleach/callbacks.py +32 -0
  40. tensorbored/_vendor/bleach/html5lib_shim.py +757 -0
  41. tensorbored/_vendor/bleach/linkifier.py +633 -0
  42. tensorbored/_vendor/bleach/parse_shim.py +1 -0
  43. tensorbored/_vendor/bleach/sanitizer.py +638 -0
  44. tensorbored/_vendor/bleach/six_shim.py +19 -0
  45. tensorbored/_vendor/webencodings/__init__.py +342 -0
  46. tensorbored/_vendor/webencodings/labels.py +231 -0
  47. tensorbored/_vendor/webencodings/mklabels.py +59 -0
  48. tensorbored/_vendor/webencodings/x_user_defined.py +325 -0
  49. tensorbored/assets.py +36 -0
  50. tensorbored/auth.py +102 -0
  51. tensorbored/backend/__init__.py +0 -0
  52. tensorbored/backend/application.py +604 -0
  53. tensorbored/backend/auth_context_middleware.py +38 -0
  54. tensorbored/backend/client_feature_flags.py +113 -0
  55. tensorbored/backend/empty_path_redirect.py +46 -0
  56. tensorbored/backend/event_processing/__init__.py +0 -0
  57. tensorbored/backend/event_processing/data_ingester.py +276 -0
  58. tensorbored/backend/event_processing/data_provider.py +535 -0
  59. tensorbored/backend/event_processing/directory_loader.py +142 -0
  60. tensorbored/backend/event_processing/directory_watcher.py +272 -0
  61. tensorbored/backend/event_processing/event_accumulator.py +950 -0
  62. tensorbored/backend/event_processing/event_file_inspector.py +463 -0
  63. tensorbored/backend/event_processing/event_file_loader.py +292 -0
  64. tensorbored/backend/event_processing/event_multiplexer.py +521 -0
  65. tensorbored/backend/event_processing/event_util.py +68 -0
  66. tensorbored/backend/event_processing/io_wrapper.py +223 -0
  67. tensorbored/backend/event_processing/plugin_asset_util.py +104 -0
  68. tensorbored/backend/event_processing/plugin_event_accumulator.py +721 -0
  69. tensorbored/backend/event_processing/plugin_event_multiplexer.py +522 -0
  70. tensorbored/backend/event_processing/reservoir.py +266 -0
  71. tensorbored/backend/event_processing/tag_types.py +29 -0
  72. tensorbored/backend/experiment_id.py +71 -0
  73. tensorbored/backend/experimental_plugin.py +51 -0
  74. tensorbored/backend/http_util.py +263 -0
  75. tensorbored/backend/json_util.py +70 -0
  76. tensorbored/backend/path_prefix.py +67 -0
  77. tensorbored/backend/process_graph.py +74 -0
  78. tensorbored/backend/security_validator.py +202 -0
  79. tensorbored/compat/__init__.py +69 -0
  80. tensorbored/compat/proto/__init__.py +0 -0
  81. tensorbored/compat/proto/allocation_description_pb2.py +35 -0
  82. tensorbored/compat/proto/api_def_pb2.py +82 -0
  83. tensorbored/compat/proto/attr_value_pb2.py +80 -0
  84. tensorbored/compat/proto/cluster_pb2.py +58 -0
  85. tensorbored/compat/proto/config_pb2.py +271 -0
  86. tensorbored/compat/proto/coordination_config_pb2.py +45 -0
  87. tensorbored/compat/proto/cost_graph_pb2.py +87 -0
  88. tensorbored/compat/proto/cpp_shape_inference_pb2.py +70 -0
  89. tensorbored/compat/proto/debug_pb2.py +65 -0
  90. tensorbored/compat/proto/event_pb2.py +149 -0
  91. tensorbored/compat/proto/full_type_pb2.py +74 -0
  92. tensorbored/compat/proto/function_pb2.py +157 -0
  93. tensorbored/compat/proto/graph_debug_info_pb2.py +111 -0
  94. tensorbored/compat/proto/graph_pb2.py +41 -0
  95. tensorbored/compat/proto/histogram_pb2.py +39 -0
  96. tensorbored/compat/proto/meta_graph_pb2.py +254 -0
  97. tensorbored/compat/proto/node_def_pb2.py +61 -0
  98. tensorbored/compat/proto/op_def_pb2.py +81 -0
  99. tensorbored/compat/proto/resource_handle_pb2.py +48 -0
  100. tensorbored/compat/proto/rewriter_config_pb2.py +93 -0
  101. tensorbored/compat/proto/rpc_options_pb2.py +35 -0
  102. tensorbored/compat/proto/saved_object_graph_pb2.py +193 -0
  103. tensorbored/compat/proto/saver_pb2.py +38 -0
  104. tensorbored/compat/proto/step_stats_pb2.py +116 -0
  105. tensorbored/compat/proto/struct_pb2.py +144 -0
  106. tensorbored/compat/proto/summary_pb2.py +111 -0
  107. tensorbored/compat/proto/tensor_description_pb2.py +38 -0
  108. tensorbored/compat/proto/tensor_pb2.py +68 -0
  109. tensorbored/compat/proto/tensor_shape_pb2.py +46 -0
  110. tensorbored/compat/proto/tfprof_log_pb2.py +307 -0
  111. tensorbored/compat/proto/trackable_object_graph_pb2.py +90 -0
  112. tensorbored/compat/proto/types_pb2.py +105 -0
  113. tensorbored/compat/proto/variable_pb2.py +62 -0
  114. tensorbored/compat/proto/verifier_config_pb2.py +38 -0
  115. tensorbored/compat/proto/versions_pb2.py +35 -0
  116. tensorbored/compat/tensorflow_stub/__init__.py +38 -0
  117. tensorbored/compat/tensorflow_stub/app.py +124 -0
  118. tensorbored/compat/tensorflow_stub/compat/__init__.py +131 -0
  119. tensorbored/compat/tensorflow_stub/compat/v1/__init__.py +20 -0
  120. tensorbored/compat/tensorflow_stub/dtypes.py +692 -0
  121. tensorbored/compat/tensorflow_stub/error_codes.py +169 -0
  122. tensorbored/compat/tensorflow_stub/errors.py +507 -0
  123. tensorbored/compat/tensorflow_stub/flags.py +124 -0
  124. tensorbored/compat/tensorflow_stub/io/__init__.py +17 -0
  125. tensorbored/compat/tensorflow_stub/io/gfile.py +1011 -0
  126. tensorbored/compat/tensorflow_stub/pywrap_tensorflow.py +285 -0
  127. tensorbored/compat/tensorflow_stub/tensor_shape.py +1035 -0
  128. tensorbored/context.py +129 -0
  129. tensorbored/data/__init__.py +0 -0
  130. tensorbored/data/grpc_provider.py +365 -0
  131. tensorbored/data/ingester.py +46 -0
  132. tensorbored/data/proto/__init__.py +0 -0
  133. tensorbored/data/proto/data_provider_pb2.py +517 -0
  134. tensorbored/data/proto/data_provider_pb2_grpc.py +374 -0
  135. tensorbored/data/provider.py +1365 -0
  136. tensorbored/data/server_ingester.py +301 -0
  137. tensorbored/data_compat.py +159 -0
  138. tensorbored/dataclass_compat.py +224 -0
  139. tensorbored/default.py +124 -0
  140. tensorbored/errors.py +130 -0
  141. tensorbored/lazy.py +99 -0
  142. tensorbored/main.py +48 -0
  143. tensorbored/main_lib.py +62 -0
  144. tensorbored/manager.py +487 -0
  145. tensorbored/notebook.py +441 -0
  146. tensorbored/plugin_util.py +266 -0
  147. tensorbored/plugins/__init__.py +0 -0
  148. tensorbored/plugins/audio/__init__.py +0 -0
  149. tensorbored/plugins/audio/audio_plugin.py +229 -0
  150. tensorbored/plugins/audio/metadata.py +69 -0
  151. tensorbored/plugins/audio/plugin_data_pb2.py +37 -0
  152. tensorbored/plugins/audio/summary.py +230 -0
  153. tensorbored/plugins/audio/summary_v2.py +124 -0
  154. tensorbored/plugins/base_plugin.py +367 -0
  155. tensorbored/plugins/core/__init__.py +0 -0
  156. tensorbored/plugins/core/core_plugin.py +981 -0
  157. tensorbored/plugins/custom_scalar/__init__.py +0 -0
  158. tensorbored/plugins/custom_scalar/custom_scalars_plugin.py +320 -0
  159. tensorbored/plugins/custom_scalar/layout_pb2.py +85 -0
  160. tensorbored/plugins/custom_scalar/metadata.py +35 -0
  161. tensorbored/plugins/custom_scalar/summary.py +79 -0
  162. tensorbored/plugins/debugger_v2/__init__.py +0 -0
  163. tensorbored/plugins/debugger_v2/debug_data_multiplexer.py +631 -0
  164. tensorbored/plugins/debugger_v2/debug_data_provider.py +634 -0
  165. tensorbored/plugins/debugger_v2/debugger_v2_plugin.py +504 -0
  166. tensorbored/plugins/distribution/__init__.py +0 -0
  167. tensorbored/plugins/distribution/compressor.py +158 -0
  168. tensorbored/plugins/distribution/distributions_plugin.py +116 -0
  169. tensorbored/plugins/distribution/metadata.py +19 -0
  170. tensorbored/plugins/graph/__init__.py +0 -0
  171. tensorbored/plugins/graph/graph_util.py +129 -0
  172. tensorbored/plugins/graph/graphs_plugin.py +336 -0
  173. tensorbored/plugins/graph/keras_util.py +328 -0
  174. tensorbored/plugins/graph/metadata.py +42 -0
  175. tensorbored/plugins/histogram/__init__.py +0 -0
  176. tensorbored/plugins/histogram/histograms_plugin.py +144 -0
  177. tensorbored/plugins/histogram/metadata.py +63 -0
  178. tensorbored/plugins/histogram/plugin_data_pb2.py +34 -0
  179. tensorbored/plugins/histogram/summary.py +234 -0
  180. tensorbored/plugins/histogram/summary_v2.py +292 -0
  181. tensorbored/plugins/hparams/__init__.py +14 -0
  182. tensorbored/plugins/hparams/_keras.py +93 -0
  183. tensorbored/plugins/hparams/api.py +130 -0
  184. tensorbored/plugins/hparams/api_pb2.py +208 -0
  185. tensorbored/plugins/hparams/backend_context.py +606 -0
  186. tensorbored/plugins/hparams/download_data.py +158 -0
  187. tensorbored/plugins/hparams/error.py +26 -0
  188. tensorbored/plugins/hparams/get_experiment.py +71 -0
  189. tensorbored/plugins/hparams/hparams_plugin.py +206 -0
  190. tensorbored/plugins/hparams/hparams_util_pb2.py +69 -0
  191. tensorbored/plugins/hparams/json_format_compat.py +38 -0
  192. tensorbored/plugins/hparams/list_metric_evals.py +57 -0
  193. tensorbored/plugins/hparams/list_session_groups.py +1040 -0
  194. tensorbored/plugins/hparams/metadata.py +125 -0
  195. tensorbored/plugins/hparams/metrics.py +41 -0
  196. tensorbored/plugins/hparams/plugin_data_pb2.py +69 -0
  197. tensorbored/plugins/hparams/summary.py +205 -0
  198. tensorbored/plugins/hparams/summary_v2.py +597 -0
  199. tensorbored/plugins/image/__init__.py +0 -0
  200. tensorbored/plugins/image/images_plugin.py +232 -0
  201. tensorbored/plugins/image/metadata.py +65 -0
  202. tensorbored/plugins/image/plugin_data_pb2.py +34 -0
  203. tensorbored/plugins/image/summary.py +159 -0
  204. tensorbored/plugins/image/summary_v2.py +130 -0
  205. tensorbored/plugins/mesh/__init__.py +14 -0
  206. tensorbored/plugins/mesh/mesh_plugin.py +292 -0
  207. tensorbored/plugins/mesh/metadata.py +152 -0
  208. tensorbored/plugins/mesh/plugin_data_pb2.py +37 -0
  209. tensorbored/plugins/mesh/summary.py +251 -0
  210. tensorbored/plugins/mesh/summary_v2.py +214 -0
  211. tensorbored/plugins/metrics/__init__.py +0 -0
  212. tensorbored/plugins/metrics/metadata.py +17 -0
  213. tensorbored/plugins/metrics/metrics_plugin.py +623 -0
  214. tensorbored/plugins/pr_curve/__init__.py +0 -0
  215. tensorbored/plugins/pr_curve/metadata.py +75 -0
  216. tensorbored/plugins/pr_curve/plugin_data_pb2.py +34 -0
  217. tensorbored/plugins/pr_curve/pr_curves_plugin.py +241 -0
  218. tensorbored/plugins/pr_curve/summary.py +574 -0
  219. tensorbored/plugins/profile_redirect/__init__.py +0 -0
  220. tensorbored/plugins/profile_redirect/profile_redirect_plugin.py +49 -0
  221. tensorbored/plugins/projector/__init__.py +67 -0
  222. tensorbored/plugins/projector/metadata.py +26 -0
  223. tensorbored/plugins/projector/projector_config_pb2.py +54 -0
  224. tensorbored/plugins/projector/projector_plugin.py +795 -0
  225. tensorbored/plugins/projector/tf_projector_plugin/index.js +32 -0
  226. tensorbored/plugins/projector/tf_projector_plugin/projector_binary.html +524 -0
  227. tensorbored/plugins/projector/tf_projector_plugin/projector_binary.js +15536 -0
  228. tensorbored/plugins/scalar/__init__.py +0 -0
  229. tensorbored/plugins/scalar/metadata.py +60 -0
  230. tensorbored/plugins/scalar/plugin_data_pb2.py +34 -0
  231. tensorbored/plugins/scalar/scalars_plugin.py +181 -0
  232. tensorbored/plugins/scalar/summary.py +109 -0
  233. tensorbored/plugins/scalar/summary_v2.py +124 -0
  234. tensorbored/plugins/text/__init__.py +0 -0
  235. tensorbored/plugins/text/metadata.py +62 -0
  236. tensorbored/plugins/text/plugin_data_pb2.py +34 -0
  237. tensorbored/plugins/text/summary.py +114 -0
  238. tensorbored/plugins/text/summary_v2.py +124 -0
  239. tensorbored/plugins/text/text_plugin.py +288 -0
  240. tensorbored/plugins/wit_redirect/__init__.py +0 -0
  241. tensorbored/plugins/wit_redirect/wit_redirect_plugin.py +49 -0
  242. tensorbored/program.py +910 -0
  243. tensorbored/summary/__init__.py +35 -0
  244. tensorbored/summary/_output.py +124 -0
  245. tensorbored/summary/_tf/__init__.py +14 -0
  246. tensorbored/summary/_tf/summary/__init__.py +178 -0
  247. tensorbored/summary/_writer.py +105 -0
  248. tensorbored/summary/v1.py +51 -0
  249. tensorbored/summary/v2.py +25 -0
  250. tensorbored/summary/writer/__init__.py +13 -0
  251. tensorbored/summary/writer/event_file_writer.py +291 -0
  252. tensorbored/summary/writer/record_writer.py +50 -0
  253. tensorbored/util/__init__.py +0 -0
  254. tensorbored/util/encoder.py +116 -0
  255. tensorbored/util/grpc_util.py +311 -0
  256. tensorbored/util/img_mime_type_detector.py +40 -0
  257. tensorbored/util/io_util.py +20 -0
  258. tensorbored/util/lazy_tensor_creator.py +110 -0
  259. tensorbored/util/op_evaluator.py +104 -0
  260. tensorbored/util/platform_util.py +20 -0
  261. tensorbored/util/tb_logging.py +24 -0
  262. tensorbored/util/tensor_util.py +617 -0
  263. tensorbored/util/timing.py +122 -0
  264. tensorbored/version.py +21 -0
  265. tensorbored/webfiles.zip +0 -0
  266. tensorbored-2.21.0rc1769983804.dist-info/METADATA +49 -0
  267. tensorbored-2.21.0rc1769983804.dist-info/RECORD +271 -0
  268. tensorbored-2.21.0rc1769983804.dist-info/WHEEL +5 -0
  269. tensorbored-2.21.0rc1769983804.dist-info/entry_points.txt +6 -0
  270. tensorbored-2.21.0rc1769983804.dist-info/licenses/LICENSE +739 -0
  271. tensorbored-2.21.0rc1769983804.dist-info/top_level.txt +1 -0
@@ -0,0 +1,301 @@
1
+ # Copyright 2020 The TensorFlow Authors. All Rights Reserved.
2
+ #
3
+ # Licensed under the Apache License, Version 2.0 (the "License");
4
+ # you may not use this file except in compliance with the License.
5
+ # You may obtain a copy of the License at
6
+ #
7
+ # http://www.apache.org/licenses/LICENSE-2.0
8
+ #
9
+ # Unless required by applicable law or agreed to in writing, software
10
+ # distributed under the License is distributed on an "AS IS" BASIS,
11
+ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12
+ # See the License for the specific language governing permissions and
13
+ # limitations under the License.
14
+ # ==============================================================================
15
+ """Provides data ingestion logic backed by a gRPC server."""
16
+
17
+ import errno
18
+ import logging
19
+ import os
20
+ import subprocess
21
+ import tempfile
22
+ import time
23
+
24
+ import grpc
25
+ import pkg_resources
26
+
27
+ from tensorbored.data import grpc_provider
28
+ from tensorbored.data import ingester
29
+ from tensorbored.data.proto import data_provider_pb2
30
+ from tensorbored.util import tb_logging
31
+
32
+ logger = tb_logging.get_logger()
33
+
34
+ # If this environment variable is non-empty, it will be used as the path to the
35
+ # data server binary rather than using a bundled version.
36
+ _ENV_DATA_SERVER_BINARY = "TENSORBOARD_DATA_SERVER_BINARY"
37
+
38
+
39
+ class ExistingServerDataIngester(ingester.DataIngester):
40
+ """Connect to an already running gRPC server."""
41
+
42
+ def __init__(self, address, *, channel_creds_type):
43
+ """Initializes an ingester with the given configuration.
44
+
45
+ Args:
46
+ address: String, as passed to `--grpc_data_provider`.
47
+ channel_creds_type: `grpc_util.ChannelCredsType`, as passed to
48
+ `--grpc_creds_type`.
49
+ """
50
+ stub = _make_stub(address, channel_creds_type)
51
+ self._data_provider = grpc_provider.GrpcDataProvider(address, stub)
52
+
53
+ @property
54
+ def data_provider(self):
55
+ return self._data_provider
56
+
57
+ def start(self):
58
+ pass
59
+
60
+
61
+ class SubprocessServerDataIngester(ingester.DataIngester):
62
+ """Start a new data server as a subprocess."""
63
+
64
+ def __init__(
65
+ self,
66
+ server_binary,
67
+ logdir,
68
+ *,
69
+ reload_interval,
70
+ channel_creds_type,
71
+ samples_per_plugin=None,
72
+ extra_flags=None,
73
+ ):
74
+ """Initializes an ingester with the given configuration.
75
+
76
+ Args:
77
+ server_binary: `ServerBinary` to launch.
78
+ logdir: String, as passed to `--logdir`.
79
+ reload_interval: Number, as passed to `--reload_interval`.
80
+ channel_creds_type: `grpc_util.ChannelCredsType`, as passed to
81
+ `--grpc_creds_type`.
82
+ samples_per_plugin: Dict[String, Int], as parsed from
83
+ `--samples_per_plugin`.
84
+ extra_flags: List of extra string flags to be passed to the
85
+ data server without further interpretation.
86
+ """
87
+ self._server_binary = server_binary
88
+ self._data_provider = None
89
+ self._logdir = logdir
90
+ self._reload_interval = reload_interval
91
+ self._channel_creds_type = channel_creds_type
92
+ self._samples_per_plugin = samples_per_plugin or {}
93
+ self._extra_flags = list(extra_flags or [])
94
+
95
+ @property
96
+ def data_provider(self):
97
+ if self._data_provider is None:
98
+ raise RuntimeError("Must call `start` first")
99
+ return self._data_provider
100
+
101
+ def start(self):
102
+ if self._data_provider:
103
+ return
104
+
105
+ tmpdir = tempfile.TemporaryDirectory(prefix="tensorboard_data_server_")
106
+ port_file_path = os.path.join(tmpdir.name, "port")
107
+ error_file_path = os.path.join(tmpdir.name, "startup_error")
108
+
109
+ if self._reload_interval <= 0:
110
+ reload = "once"
111
+ else:
112
+ reload = str(int(self._reload_interval))
113
+
114
+ sample_hint_pairs = [
115
+ "%s=%s" % (k, "all" if v == 0 else v)
116
+ for k, v in self._samples_per_plugin.items()
117
+ ]
118
+ samples_per_plugin = ",".join(sample_hint_pairs)
119
+
120
+ args = [
121
+ self._server_binary.path,
122
+ "--logdir=%s" % os.path.expanduser(self._logdir),
123
+ "--reload=%s" % reload,
124
+ "--samples-per-plugin=%s" % samples_per_plugin,
125
+ "--port=0",
126
+ "--port-file=%s" % (port_file_path,),
127
+ "--die-after-stdin",
128
+ ]
129
+ if self._server_binary.at_least_version("0.5.0a0"):
130
+ args.append("--error-file=%s" % (error_file_path,))
131
+ if logger.isEnabledFor(logging.INFO):
132
+ args.append("--verbose")
133
+ if logger.isEnabledFor(logging.DEBUG):
134
+ args.append("--verbose") # Repeat arg to increase verbosity.
135
+ args.extend(self._extra_flags)
136
+
137
+ logger.info("Spawning data server: %r", args)
138
+ popen = subprocess.Popen(args, stdin=subprocess.PIPE)
139
+ # Stash stdin to avoid calling its destructor: on Windows, this
140
+ # is a `subprocess.Handle` that closes itself in `__del__`,
141
+ # which would cause the data server to shut down. (This is not
142
+ # documented; you have to read CPython source to figure it out.)
143
+ # We want that to happen at end of process, but not before.
144
+ self._stdin_handle = popen.stdin # stash to avoid stdin being closed
145
+
146
+ port = None
147
+ # The server only needs about 10 microseconds to spawn on my machine,
148
+ # but give a few orders of magnitude of padding, and then poll.
149
+ time.sleep(0.01)
150
+ for i in range(20):
151
+ if popen.poll() is not None:
152
+ msg = (_maybe_read_file(error_file_path) or "").strip()
153
+ if not msg:
154
+ msg = (
155
+ "exited with %d; check stderr for details"
156
+ % popen.poll()
157
+ )
158
+ raise DataServerStartupError(msg)
159
+ logger.info("Polling for data server port (attempt %d)", i)
160
+ port_file_contents = _maybe_read_file(port_file_path)
161
+ logger.info("Port file contents: %r", port_file_contents)
162
+ if (port_file_contents or "").endswith("\n"):
163
+ port = int(port_file_contents)
164
+ break
165
+ # Else, not done writing yet.
166
+ time.sleep(0.5)
167
+ if port is None:
168
+ raise DataServerStartupError(
169
+ "Timed out while waiting for data server to start. "
170
+ "It may still be running as pid %d." % popen.pid
171
+ )
172
+
173
+ addr = "localhost:%d" % port
174
+ stub = _make_stub(addr, self._channel_creds_type)
175
+ logger.info(
176
+ "Opened channel to data server at pid %d via %s",
177
+ popen.pid,
178
+ addr,
179
+ )
180
+
181
+ req = data_provider_pb2.GetExperimentRequest()
182
+ try:
183
+ stub.GetExperiment(req, timeout=5) # should be near-instant
184
+ except grpc.RpcError as e:
185
+ msg = "Failed to communicate with data server at %s: %s" % (addr, e)
186
+ logging.warning("%s", msg)
187
+ raise DataServerStartupError(msg) from e
188
+ logger.info("Got valid response from data server")
189
+ self._data_provider = grpc_provider.GrpcDataProvider(addr, stub)
190
+
191
+
192
+ def _maybe_read_file(path):
193
+ """Read a file, or return `None` on ENOENT specifically."""
194
+ try:
195
+ with open(path) as infile:
196
+ return infile.read()
197
+ except OSError as e:
198
+ if e.errno == errno.ENOENT:
199
+ return None
200
+ raise
201
+
202
+
203
+ def _make_stub(addr, channel_creds_type):
204
+ creds, options = channel_creds_type.channel_config()
205
+ options.append(("grpc.max_receive_message_length", 1024 * 1024 * 256))
206
+ channel = grpc.secure_channel(addr, creds, options=options)
207
+ return grpc_provider.make_stub(channel)
208
+
209
+
210
+ class NoDataServerError(RuntimeError):
211
+ pass
212
+
213
+
214
+ class DataServerStartupError(RuntimeError):
215
+ pass
216
+
217
+
218
+ class ServerBinary:
219
+ """Information about a data server binary."""
220
+
221
+ def __init__(self, path, version):
222
+ """Initializes a `ServerBinary`.
223
+
224
+ Args:
225
+ path: String path to executable on disk.
226
+ version: PEP 396-compliant version string, or `None` if
227
+ unknown or not applicable. Binaries at unknown versions are
228
+ assumed to be bleeding-edge: if you bring your own binary,
229
+ it's on you to make sure that it's up to date.
230
+ """
231
+ self._path = path
232
+ self._version = (
233
+ pkg_resources.parse_version(version)
234
+ if version is not None
235
+ else version
236
+ )
237
+
238
+ @property
239
+ def path(self):
240
+ return self._path
241
+
242
+ def at_least_version(self, required_version):
243
+ """Test whether the binary's version is at least the given one.
244
+
245
+ Useful for gating features that are available in the latest data
246
+ server builds from head, but not yet released to PyPI. For
247
+ example, if v0.4.0 is the latest published version, you can
248
+ check `at_least_version("0.5.0a0")` to include both prereleases
249
+ at head and the eventual final release of v0.5.0.
250
+
251
+ If this binary's version was set to `None` at construction time,
252
+ this method always returns `True`.
253
+
254
+ Args:
255
+ required_version: PEP 396-compliant version string.
256
+
257
+ Returns:
258
+ Boolean.
259
+ """
260
+ if self._version is None:
261
+ return True
262
+ return self._version >= pkg_resources.parse_version(required_version)
263
+
264
+
265
+ def get_server_binary():
266
+ """Get `ServerBinary` info or raise `NoDataServerError`."""
267
+ env_result = os.environ.get(_ENV_DATA_SERVER_BINARY)
268
+ if env_result:
269
+ logging.info("Server binary (from env): %s", env_result)
270
+ if not os.path.isfile(env_result):
271
+ raise NoDataServerError(
272
+ "Found environment variable %s=%s, but no such file exists."
273
+ % (_ENV_DATA_SERVER_BINARY, env_result)
274
+ )
275
+ return ServerBinary(env_result, version=None)
276
+
277
+ bundle_result = os.path.join(os.path.dirname(__file__), "server", "server")
278
+ if os.path.exists(bundle_result):
279
+ logging.info("Server binary (from bundle): %s", bundle_result)
280
+ return ServerBinary(bundle_result, version=None)
281
+
282
+ try:
283
+ import tensorboard_data_server
284
+ except ImportError:
285
+ pass
286
+ else:
287
+ pkg_result = tensorboard_data_server.server_binary()
288
+ version = tensorboard_data_server.__version__
289
+ logging.info(
290
+ "Server binary (from Python package v%s): %s", version, pkg_result
291
+ )
292
+ if pkg_result is None:
293
+ raise NoDataServerError(
294
+ "TensorBoard data server not supported on this platform."
295
+ )
296
+ return ServerBinary(pkg_result, version)
297
+
298
+ raise NoDataServerError(
299
+ "TensorBoard data server not found. This mode is experimental. "
300
+ "If building from source, pass --define=link_data_server=true."
301
+ )
@@ -0,0 +1,159 @@
1
+ # Copyright 2017 The TensorFlow Authors. All Rights Reserved.
2
+ #
3
+ # Licensed under the Apache License, Version 2.0 (the "License");
4
+ # you may not use this file except in compliance with the License.
5
+ # You may obtain a copy of the License at
6
+ #
7
+ # http://www.apache.org/licenses/LICENSE-2.0
8
+ #
9
+ # Unless required by applicable law or agreed to in writing, software
10
+ # distributed under the License is distributed on an "AS IS" BASIS,
11
+ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12
+ # See the License for the specific language governing permissions and
13
+ # limitations under the License.
14
+ # ==============================================================================
15
+ """Utilities to migrate legacy protos to their modern equivalents."""
16
+
17
+ import numpy as np
18
+
19
+ from tensorbored.compat.proto import event_pb2
20
+ from tensorbored.compat.proto import summary_pb2
21
+ from tensorbored.plugins.audio import metadata as audio_metadata
22
+ from tensorbored.plugins.histogram import metadata as histogram_metadata
23
+ from tensorbored.plugins.image import metadata as image_metadata
24
+ from tensorbored.plugins.scalar import metadata as scalar_metadata
25
+ from tensorbored.util import tensor_util
26
+
27
+
28
+ def migrate_event(event):
29
+ if not event.HasField("summary"):
30
+ return event
31
+ old_values = event.summary.value
32
+ new_values = [migrate_value(value) for value in old_values]
33
+ # Optimization: Don't create a new event if there were no changes.
34
+ if len(old_values) == len(new_values) and all(
35
+ x is y for (x, y) in zip(old_values, new_values)
36
+ ):
37
+ return event
38
+ result = event_pb2.Event()
39
+ result.CopyFrom(event)
40
+ del result.summary.value[:]
41
+ result.summary.value.extend(new_values)
42
+ return result
43
+
44
+
45
+ def migrate_value(value):
46
+ """Convert `value` to a new-style value, if necessary and possible.
47
+
48
+ An "old-style" value is a value that uses any `value` field other than
49
+ the `tensor` field. A "new-style" value is a value that uses the
50
+ `tensor` field. TensorBoard continues to support old-style values on
51
+ disk; this method converts them to new-style values so that further
52
+ code need only deal with one data format.
53
+
54
+ Arguments:
55
+ value: A `Summary.Value` object. This argument is not modified.
56
+
57
+ Returns:
58
+ If the `value` is an old-style value for which there is a new-style
59
+ equivalent, the result is the new-style value. Otherwise---if the
60
+ value is already new-style or does not yet have a new-style
61
+ equivalent---the value will be returned unchanged.
62
+
63
+ :type value: Summary.Value
64
+ :rtype: Summary.Value
65
+ """
66
+ handler = {
67
+ "histo": _migrate_histogram_value,
68
+ "image": _migrate_image_value,
69
+ "audio": _migrate_audio_value,
70
+ "simple_value": _migrate_scalar_value,
71
+ }.get(value.WhichOneof("value"))
72
+ return handler(value) if handler else value
73
+
74
+
75
+ def make_summary(tag, metadata, data):
76
+ tensor_proto = tensor_util.make_tensor_proto(data)
77
+ return summary_pb2.Summary.Value(
78
+ tag=tag, metadata=metadata, tensor=tensor_proto
79
+ )
80
+
81
+
82
+ def _migrate_histogram_value(value):
83
+ """Convert `old-style` histogram value to `new-style`.
84
+
85
+ The "old-style" format can have outermost bucket limits of -DBL_MAX and
86
+ DBL_MAX, which are problematic for visualization. We replace those here
87
+ with the actual min and max values seen in the input data, but then in
88
+ order to avoid introducing "backwards" buckets (where left edge > right
89
+ edge), we first must drop all empty buckets on the left and right ends.
90
+ """
91
+ histogram_value = value.histo
92
+ bucket_counts = histogram_value.bucket
93
+ # Find the indices of the leftmost and rightmost non-empty buckets.
94
+ n = len(bucket_counts)
95
+ start = next((i for i in range(n) if bucket_counts[i] > 0), n)
96
+ end = next((i for i in reversed(range(n)) if bucket_counts[i] > 0), -1)
97
+ if start > end:
98
+ # If all input buckets were empty, treat it as a zero-bucket
99
+ # new-style histogram.
100
+ buckets = np.zeros([0, 3], dtype=np.float32)
101
+ else:
102
+ # Discard empty buckets on both ends, and keep only the "inner"
103
+ # edges from the remaining buckets. Note that bucket indices range
104
+ # from `start` to `end` inclusive, but bucket_limit indices are
105
+ # exclusive of `end` - this is because bucket_limit[i] is the
106
+ # right-hand edge for bucket[i].
107
+ bucket_counts = bucket_counts[start : end + 1]
108
+ inner_edges = histogram_value.bucket_limit[start:end]
109
+ # Use min as the left-hand limit for the first non-empty bucket.
110
+ bucket_lefts = [histogram_value.min] + inner_edges
111
+ # Use max as the right-hand limit for the last non-empty bucket.
112
+ bucket_rights = inner_edges + [histogram_value.max]
113
+ buckets = np.array(
114
+ [bucket_lefts, bucket_rights, bucket_counts], dtype=np.float32
115
+ ).transpose()
116
+
117
+ summary_metadata = histogram_metadata.create_summary_metadata(
118
+ display_name=value.metadata.display_name or value.tag,
119
+ description=value.metadata.summary_description,
120
+ )
121
+
122
+ return make_summary(value.tag, summary_metadata, buckets)
123
+
124
+
125
+ def _migrate_image_value(value):
126
+ image_value = value.image
127
+ data = [
128
+ str(image_value.width).encode("ascii"),
129
+ str(image_value.height).encode("ascii"),
130
+ image_value.encoded_image_string,
131
+ ]
132
+
133
+ summary_metadata = image_metadata.create_summary_metadata(
134
+ display_name=value.metadata.display_name or value.tag,
135
+ description=value.metadata.summary_description,
136
+ converted_to_tensor=True,
137
+ )
138
+ return make_summary(value.tag, summary_metadata, data)
139
+
140
+
141
+ def _migrate_audio_value(value):
142
+ audio_value = value.audio
143
+ data = [[audio_value.encoded_audio_string, b""]] # empty label
144
+ summary_metadata = audio_metadata.create_summary_metadata(
145
+ display_name=value.metadata.display_name or value.tag,
146
+ description=value.metadata.summary_description,
147
+ encoding=audio_metadata.Encoding.Value("WAV"),
148
+ converted_to_tensor=True,
149
+ )
150
+ return make_summary(value.tag, summary_metadata, data)
151
+
152
+
153
+ def _migrate_scalar_value(value):
154
+ scalar_value = value.simple_value
155
+ summary_metadata = scalar_metadata.create_summary_metadata(
156
+ display_name=value.metadata.display_name or value.tag,
157
+ description=value.metadata.summary_description,
158
+ )
159
+ return make_summary(value.tag, summary_metadata, scalar_value)
@@ -0,0 +1,224 @@
1
+ # Copyright 2020 The TensorFlow Authors. All Rights Reserved.
2
+ #
3
+ # Licensed under the Apache License, Version 2.0 (the "License");
4
+ # you may not use this file except in compliance with the License.
5
+ # You may obtain a copy of the License at
6
+ #
7
+ # http://www.apache.org/licenses/LICENSE-2.0
8
+ #
9
+ # Unless required by applicable law or agreed to in writing, software
10
+ # distributed under the License is distributed on an "AS IS" BASIS,
11
+ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12
+ # See the License for the specific language governing permissions and
13
+ # limitations under the License.
14
+ # ==============================================================================
15
+ """Utilities to migrate legacy summaries/events to generic data form.
16
+
17
+ For legacy summaries, this populates the `SummaryMetadata.data_class`
18
+ field and makes any necessary transformations to the tensor value. For
19
+ `graph_def` events, this creates a new summary event.
20
+
21
+ This should be effected after the `data_compat` transformation.
22
+ """
23
+
24
+ from tensorbored.compat.proto import event_pb2
25
+ from tensorbored.compat.proto import summary_pb2
26
+ from tensorbored.plugins.audio import metadata as audio_metadata
27
+ from tensorbored.plugins.custom_scalar import (
28
+ metadata as custom_scalars_metadata,
29
+ )
30
+ from tensorbored.plugins.graph import metadata as graphs_metadata
31
+ from tensorbored.plugins.histogram import metadata as histograms_metadata
32
+ from tensorbored.plugins.hparams import metadata as hparams_metadata
33
+ from tensorbored.plugins.image import metadata as images_metadata
34
+ from tensorbored.plugins.mesh import metadata as mesh_metadata
35
+ from tensorbored.plugins.pr_curve import metadata as pr_curves_metadata
36
+ from tensorbored.plugins.scalar import metadata as scalars_metadata
37
+ from tensorbored.plugins.text import metadata as text_metadata
38
+ from tensorbored.util import tensor_util
39
+
40
+
41
+ def migrate_event(event, initial_metadata):
42
+ """Migrate an event to a sequence of events.
43
+
44
+ Args:
45
+ event: An `event_pb2.Event`. The caller transfers ownership of the
46
+ event to this method; the event may be mutated, and may or may
47
+ not appear in the returned sequence.
48
+ initial_metadata: Map from tag name (string) to `SummaryMetadata`
49
+ proto for the initial occurrence of the given tag within the
50
+ enclosing run. While loading a given run, the caller should
51
+ always pass the same dictionary here, initially `{}`; this
52
+ function will mutate it and reuse it for future calls.
53
+
54
+ Returns:
55
+ A sequence of `event_pb2.Event`s to use instead of `event`.
56
+ """
57
+ what = event.WhichOneof("what")
58
+ if what == "graph_def":
59
+ return _migrate_graph_event(event)
60
+ if what == "tagged_run_metadata":
61
+ return _migrate_tagged_run_metadata_event(event)
62
+ if what == "summary":
63
+ return _migrate_summary_event(event, initial_metadata)
64
+ return (event,)
65
+
66
+
67
+ def _migrate_graph_event(old_event):
68
+ result = event_pb2.Event()
69
+ result.wall_time = old_event.wall_time
70
+ result.step = old_event.step
71
+ value = result.summary.value.add(tag=graphs_metadata.RUN_GRAPH_NAME)
72
+ graph_bytes = old_event.graph_def
73
+ value.tensor.CopyFrom(tensor_util.make_tensor_proto([graph_bytes]))
74
+ value.metadata.plugin_data.plugin_name = graphs_metadata.PLUGIN_NAME
75
+ # `value.metadata.plugin_data.content` left empty
76
+ value.metadata.data_class = summary_pb2.DATA_CLASS_BLOB_SEQUENCE
77
+ # As long as the graphs plugin still reads the old format, keep both
78
+ # the old event and the new event to maintain compatibility.
79
+ return (old_event, result)
80
+
81
+
82
+ def _migrate_tagged_run_metadata_event(old_event):
83
+ result = event_pb2.Event()
84
+ result.wall_time = old_event.wall_time
85
+ result.step = old_event.step
86
+ trm = old_event.tagged_run_metadata
87
+ value = result.summary.value.add(tag=trm.tag)
88
+ value.tensor.CopyFrom(tensor_util.make_tensor_proto([trm.run_metadata]))
89
+ value.metadata.plugin_data.plugin_name = (
90
+ graphs_metadata.PLUGIN_NAME_TAGGED_RUN_METADATA
91
+ )
92
+ # `value.metadata.plugin_data.content` left empty
93
+ value.metadata.data_class = summary_pb2.DATA_CLASS_BLOB_SEQUENCE
94
+ return (result,)
95
+
96
+
97
+ def _migrate_summary_event(event, initial_metadata):
98
+ values = event.summary.value
99
+ new_values = [
100
+ new for old in values for new in _migrate_value(old, initial_metadata)
101
+ ]
102
+ # Optimization: Don't create a new event if there were no shallow
103
+ # changes (there may still have been in-place changes).
104
+ if len(values) == len(new_values) and all(
105
+ x is y for (x, y) in zip(values, new_values)
106
+ ):
107
+ return (event,)
108
+ del event.summary.value[:]
109
+ event.summary.value.extend(new_values)
110
+ return (event,)
111
+
112
+
113
+ def _migrate_value(value, initial_metadata):
114
+ """Convert an old value to a stream of new values. May mutate."""
115
+ metadata = initial_metadata.get(value.tag)
116
+ initial = False
117
+ if metadata is None:
118
+ initial = True
119
+ # Retain a copy of the initial metadata, so that even after we
120
+ # update its data class we know whether to also transform later
121
+ # events in this time series.
122
+ metadata = summary_pb2.SummaryMetadata()
123
+ metadata.CopyFrom(value.metadata)
124
+ initial_metadata[value.tag] = metadata
125
+ if metadata.data_class != summary_pb2.DATA_CLASS_UNKNOWN:
126
+ return (value,)
127
+ plugin_name = metadata.plugin_data.plugin_name
128
+ if plugin_name == histograms_metadata.PLUGIN_NAME:
129
+ return _migrate_histogram_value(value)
130
+ if plugin_name == images_metadata.PLUGIN_NAME:
131
+ return _migrate_image_value(value)
132
+ if plugin_name == audio_metadata.PLUGIN_NAME:
133
+ return _migrate_audio_value(value)
134
+ if plugin_name == scalars_metadata.PLUGIN_NAME:
135
+ return _migrate_scalar_value(value)
136
+ if plugin_name == text_metadata.PLUGIN_NAME:
137
+ return _migrate_text_value(value)
138
+ if plugin_name == hparams_metadata.PLUGIN_NAME:
139
+ return _migrate_hparams_value(value)
140
+ if plugin_name == pr_curves_metadata.PLUGIN_NAME:
141
+ return _migrate_pr_curve_value(value)
142
+ if plugin_name == mesh_metadata.PLUGIN_NAME:
143
+ return _migrate_mesh_value(value)
144
+ if plugin_name == custom_scalars_metadata.PLUGIN_NAME:
145
+ return _migrate_custom_scalars_value(value)
146
+ if plugin_name in [
147
+ graphs_metadata.PLUGIN_NAME_RUN_METADATA,
148
+ graphs_metadata.PLUGIN_NAME_RUN_METADATA_WITH_GRAPH,
149
+ graphs_metadata.PLUGIN_NAME_KERAS_MODEL,
150
+ ]:
151
+ return _migrate_graph_sub_plugin_value(value)
152
+ return (value,)
153
+
154
+
155
+ def _migrate_scalar_value(value):
156
+ if value.HasField("metadata"):
157
+ value.metadata.data_class = summary_pb2.DATA_CLASS_SCALAR
158
+ return (value,)
159
+
160
+
161
+ def _migrate_histogram_value(value):
162
+ if value.HasField("metadata"):
163
+ value.metadata.data_class = summary_pb2.DATA_CLASS_TENSOR
164
+ return (value,)
165
+
166
+
167
+ def _migrate_image_value(value):
168
+ if value.HasField("metadata"):
169
+ value.metadata.data_class = summary_pb2.DATA_CLASS_BLOB_SEQUENCE
170
+ return (value,)
171
+
172
+
173
+ def _migrate_text_value(value):
174
+ if value.HasField("metadata"):
175
+ value.metadata.data_class = summary_pb2.DATA_CLASS_TENSOR
176
+ return (value,)
177
+
178
+
179
+ def _migrate_audio_value(value):
180
+ if value.HasField("metadata"):
181
+ value.metadata.data_class = summary_pb2.DATA_CLASS_BLOB_SEQUENCE
182
+ tensor = value.tensor
183
+ # Project out just the first axis: actual audio clips.
184
+ stride = 1
185
+ while len(tensor.tensor_shape.dim) > 1:
186
+ stride *= tensor.tensor_shape.dim.pop().size
187
+ if stride != 1:
188
+ tensor.string_val[:] = tensor.string_val[::stride]
189
+ return (value,)
190
+
191
+
192
+ def _migrate_hparams_value(value):
193
+ if value.HasField("metadata"):
194
+ value.metadata.data_class = summary_pb2.DATA_CLASS_TENSOR
195
+ if not value.HasField("tensor"):
196
+ value.tensor.CopyFrom(hparams_metadata.NULL_TENSOR)
197
+ return (value,)
198
+
199
+
200
+ def _migrate_pr_curve_value(value):
201
+ if value.HasField("metadata"):
202
+ value.metadata.data_class = summary_pb2.DATA_CLASS_TENSOR
203
+ return (value,)
204
+
205
+
206
+ def _migrate_mesh_value(value):
207
+ if value.HasField("metadata"):
208
+ value.metadata.data_class = summary_pb2.DATA_CLASS_TENSOR
209
+ return (value,)
210
+
211
+
212
+ def _migrate_custom_scalars_value(value):
213
+ if value.HasField("metadata"):
214
+ value.metadata.data_class = summary_pb2.DATA_CLASS_TENSOR
215
+ return (value,)
216
+
217
+
218
+ def _migrate_graph_sub_plugin_value(value):
219
+ if value.HasField("metadata"):
220
+ value.metadata.data_class = summary_pb2.DATA_CLASS_BLOB_SEQUENCE
221
+ shape = value.tensor.tensor_shape.dim
222
+ if not shape:
223
+ shape.add(size=1)
224
+ return (value,)