tensorbored 2.21.0rc1769983804__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (271) hide show
  1. tensorbored/__init__.py +112 -0
  2. tensorbored/_vendor/__init__.py +0 -0
  3. tensorbored/_vendor/bleach/__init__.py +125 -0
  4. tensorbored/_vendor/bleach/_vendor/__init__.py +0 -0
  5. tensorbored/_vendor/bleach/_vendor/html5lib/__init__.py +35 -0
  6. tensorbored/_vendor/bleach/_vendor/html5lib/_ihatexml.py +289 -0
  7. tensorbored/_vendor/bleach/_vendor/html5lib/_inputstream.py +918 -0
  8. tensorbored/_vendor/bleach/_vendor/html5lib/_tokenizer.py +1735 -0
  9. tensorbored/_vendor/bleach/_vendor/html5lib/_trie/__init__.py +5 -0
  10. tensorbored/_vendor/bleach/_vendor/html5lib/_trie/_base.py +40 -0
  11. tensorbored/_vendor/bleach/_vendor/html5lib/_trie/py.py +67 -0
  12. tensorbored/_vendor/bleach/_vendor/html5lib/_utils.py +159 -0
  13. tensorbored/_vendor/bleach/_vendor/html5lib/constants.py +2946 -0
  14. tensorbored/_vendor/bleach/_vendor/html5lib/filters/__init__.py +0 -0
  15. tensorbored/_vendor/bleach/_vendor/html5lib/filters/alphabeticalattributes.py +29 -0
  16. tensorbored/_vendor/bleach/_vendor/html5lib/filters/base.py +12 -0
  17. tensorbored/_vendor/bleach/_vendor/html5lib/filters/inject_meta_charset.py +73 -0
  18. tensorbored/_vendor/bleach/_vendor/html5lib/filters/lint.py +93 -0
  19. tensorbored/_vendor/bleach/_vendor/html5lib/filters/optionaltags.py +207 -0
  20. tensorbored/_vendor/bleach/_vendor/html5lib/filters/sanitizer.py +916 -0
  21. tensorbored/_vendor/bleach/_vendor/html5lib/filters/whitespace.py +38 -0
  22. tensorbored/_vendor/bleach/_vendor/html5lib/html5parser.py +2795 -0
  23. tensorbored/_vendor/bleach/_vendor/html5lib/serializer.py +409 -0
  24. tensorbored/_vendor/bleach/_vendor/html5lib/treeadapters/__init__.py +30 -0
  25. tensorbored/_vendor/bleach/_vendor/html5lib/treeadapters/genshi.py +54 -0
  26. tensorbored/_vendor/bleach/_vendor/html5lib/treeadapters/sax.py +50 -0
  27. tensorbored/_vendor/bleach/_vendor/html5lib/treebuilders/__init__.py +88 -0
  28. tensorbored/_vendor/bleach/_vendor/html5lib/treebuilders/base.py +417 -0
  29. tensorbored/_vendor/bleach/_vendor/html5lib/treebuilders/dom.py +239 -0
  30. tensorbored/_vendor/bleach/_vendor/html5lib/treebuilders/etree.py +343 -0
  31. tensorbored/_vendor/bleach/_vendor/html5lib/treebuilders/etree_lxml.py +392 -0
  32. tensorbored/_vendor/bleach/_vendor/html5lib/treewalkers/__init__.py +154 -0
  33. tensorbored/_vendor/bleach/_vendor/html5lib/treewalkers/base.py +252 -0
  34. tensorbored/_vendor/bleach/_vendor/html5lib/treewalkers/dom.py +43 -0
  35. tensorbored/_vendor/bleach/_vendor/html5lib/treewalkers/etree.py +131 -0
  36. tensorbored/_vendor/bleach/_vendor/html5lib/treewalkers/etree_lxml.py +215 -0
  37. tensorbored/_vendor/bleach/_vendor/html5lib/treewalkers/genshi.py +69 -0
  38. tensorbored/_vendor/bleach/_vendor/parse.py +1078 -0
  39. tensorbored/_vendor/bleach/callbacks.py +32 -0
  40. tensorbored/_vendor/bleach/html5lib_shim.py +757 -0
  41. tensorbored/_vendor/bleach/linkifier.py +633 -0
  42. tensorbored/_vendor/bleach/parse_shim.py +1 -0
  43. tensorbored/_vendor/bleach/sanitizer.py +638 -0
  44. tensorbored/_vendor/bleach/six_shim.py +19 -0
  45. tensorbored/_vendor/webencodings/__init__.py +342 -0
  46. tensorbored/_vendor/webencodings/labels.py +231 -0
  47. tensorbored/_vendor/webencodings/mklabels.py +59 -0
  48. tensorbored/_vendor/webencodings/x_user_defined.py +325 -0
  49. tensorbored/assets.py +36 -0
  50. tensorbored/auth.py +102 -0
  51. tensorbored/backend/__init__.py +0 -0
  52. tensorbored/backend/application.py +604 -0
  53. tensorbored/backend/auth_context_middleware.py +38 -0
  54. tensorbored/backend/client_feature_flags.py +113 -0
  55. tensorbored/backend/empty_path_redirect.py +46 -0
  56. tensorbored/backend/event_processing/__init__.py +0 -0
  57. tensorbored/backend/event_processing/data_ingester.py +276 -0
  58. tensorbored/backend/event_processing/data_provider.py +535 -0
  59. tensorbored/backend/event_processing/directory_loader.py +142 -0
  60. tensorbored/backend/event_processing/directory_watcher.py +272 -0
  61. tensorbored/backend/event_processing/event_accumulator.py +950 -0
  62. tensorbored/backend/event_processing/event_file_inspector.py +463 -0
  63. tensorbored/backend/event_processing/event_file_loader.py +292 -0
  64. tensorbored/backend/event_processing/event_multiplexer.py +521 -0
  65. tensorbored/backend/event_processing/event_util.py +68 -0
  66. tensorbored/backend/event_processing/io_wrapper.py +223 -0
  67. tensorbored/backend/event_processing/plugin_asset_util.py +104 -0
  68. tensorbored/backend/event_processing/plugin_event_accumulator.py +721 -0
  69. tensorbored/backend/event_processing/plugin_event_multiplexer.py +522 -0
  70. tensorbored/backend/event_processing/reservoir.py +266 -0
  71. tensorbored/backend/event_processing/tag_types.py +29 -0
  72. tensorbored/backend/experiment_id.py +71 -0
  73. tensorbored/backend/experimental_plugin.py +51 -0
  74. tensorbored/backend/http_util.py +263 -0
  75. tensorbored/backend/json_util.py +70 -0
  76. tensorbored/backend/path_prefix.py +67 -0
  77. tensorbored/backend/process_graph.py +74 -0
  78. tensorbored/backend/security_validator.py +202 -0
  79. tensorbored/compat/__init__.py +69 -0
  80. tensorbored/compat/proto/__init__.py +0 -0
  81. tensorbored/compat/proto/allocation_description_pb2.py +35 -0
  82. tensorbored/compat/proto/api_def_pb2.py +82 -0
  83. tensorbored/compat/proto/attr_value_pb2.py +80 -0
  84. tensorbored/compat/proto/cluster_pb2.py +58 -0
  85. tensorbored/compat/proto/config_pb2.py +271 -0
  86. tensorbored/compat/proto/coordination_config_pb2.py +45 -0
  87. tensorbored/compat/proto/cost_graph_pb2.py +87 -0
  88. tensorbored/compat/proto/cpp_shape_inference_pb2.py +70 -0
  89. tensorbored/compat/proto/debug_pb2.py +65 -0
  90. tensorbored/compat/proto/event_pb2.py +149 -0
  91. tensorbored/compat/proto/full_type_pb2.py +74 -0
  92. tensorbored/compat/proto/function_pb2.py +157 -0
  93. tensorbored/compat/proto/graph_debug_info_pb2.py +111 -0
  94. tensorbored/compat/proto/graph_pb2.py +41 -0
  95. tensorbored/compat/proto/histogram_pb2.py +39 -0
  96. tensorbored/compat/proto/meta_graph_pb2.py +254 -0
  97. tensorbored/compat/proto/node_def_pb2.py +61 -0
  98. tensorbored/compat/proto/op_def_pb2.py +81 -0
  99. tensorbored/compat/proto/resource_handle_pb2.py +48 -0
  100. tensorbored/compat/proto/rewriter_config_pb2.py +93 -0
  101. tensorbored/compat/proto/rpc_options_pb2.py +35 -0
  102. tensorbored/compat/proto/saved_object_graph_pb2.py +193 -0
  103. tensorbored/compat/proto/saver_pb2.py +38 -0
  104. tensorbored/compat/proto/step_stats_pb2.py +116 -0
  105. tensorbored/compat/proto/struct_pb2.py +144 -0
  106. tensorbored/compat/proto/summary_pb2.py +111 -0
  107. tensorbored/compat/proto/tensor_description_pb2.py +38 -0
  108. tensorbored/compat/proto/tensor_pb2.py +68 -0
  109. tensorbored/compat/proto/tensor_shape_pb2.py +46 -0
  110. tensorbored/compat/proto/tfprof_log_pb2.py +307 -0
  111. tensorbored/compat/proto/trackable_object_graph_pb2.py +90 -0
  112. tensorbored/compat/proto/types_pb2.py +105 -0
  113. tensorbored/compat/proto/variable_pb2.py +62 -0
  114. tensorbored/compat/proto/verifier_config_pb2.py +38 -0
  115. tensorbored/compat/proto/versions_pb2.py +35 -0
  116. tensorbored/compat/tensorflow_stub/__init__.py +38 -0
  117. tensorbored/compat/tensorflow_stub/app.py +124 -0
  118. tensorbored/compat/tensorflow_stub/compat/__init__.py +131 -0
  119. tensorbored/compat/tensorflow_stub/compat/v1/__init__.py +20 -0
  120. tensorbored/compat/tensorflow_stub/dtypes.py +692 -0
  121. tensorbored/compat/tensorflow_stub/error_codes.py +169 -0
  122. tensorbored/compat/tensorflow_stub/errors.py +507 -0
  123. tensorbored/compat/tensorflow_stub/flags.py +124 -0
  124. tensorbored/compat/tensorflow_stub/io/__init__.py +17 -0
  125. tensorbored/compat/tensorflow_stub/io/gfile.py +1011 -0
  126. tensorbored/compat/tensorflow_stub/pywrap_tensorflow.py +285 -0
  127. tensorbored/compat/tensorflow_stub/tensor_shape.py +1035 -0
  128. tensorbored/context.py +129 -0
  129. tensorbored/data/__init__.py +0 -0
  130. tensorbored/data/grpc_provider.py +365 -0
  131. tensorbored/data/ingester.py +46 -0
  132. tensorbored/data/proto/__init__.py +0 -0
  133. tensorbored/data/proto/data_provider_pb2.py +517 -0
  134. tensorbored/data/proto/data_provider_pb2_grpc.py +374 -0
  135. tensorbored/data/provider.py +1365 -0
  136. tensorbored/data/server_ingester.py +301 -0
  137. tensorbored/data_compat.py +159 -0
  138. tensorbored/dataclass_compat.py +224 -0
  139. tensorbored/default.py +124 -0
  140. tensorbored/errors.py +130 -0
  141. tensorbored/lazy.py +99 -0
  142. tensorbored/main.py +48 -0
  143. tensorbored/main_lib.py +62 -0
  144. tensorbored/manager.py +487 -0
  145. tensorbored/notebook.py +441 -0
  146. tensorbored/plugin_util.py +266 -0
  147. tensorbored/plugins/__init__.py +0 -0
  148. tensorbored/plugins/audio/__init__.py +0 -0
  149. tensorbored/plugins/audio/audio_plugin.py +229 -0
  150. tensorbored/plugins/audio/metadata.py +69 -0
  151. tensorbored/plugins/audio/plugin_data_pb2.py +37 -0
  152. tensorbored/plugins/audio/summary.py +230 -0
  153. tensorbored/plugins/audio/summary_v2.py +124 -0
  154. tensorbored/plugins/base_plugin.py +367 -0
  155. tensorbored/plugins/core/__init__.py +0 -0
  156. tensorbored/plugins/core/core_plugin.py +981 -0
  157. tensorbored/plugins/custom_scalar/__init__.py +0 -0
  158. tensorbored/plugins/custom_scalar/custom_scalars_plugin.py +320 -0
  159. tensorbored/plugins/custom_scalar/layout_pb2.py +85 -0
  160. tensorbored/plugins/custom_scalar/metadata.py +35 -0
  161. tensorbored/plugins/custom_scalar/summary.py +79 -0
  162. tensorbored/plugins/debugger_v2/__init__.py +0 -0
  163. tensorbored/plugins/debugger_v2/debug_data_multiplexer.py +631 -0
  164. tensorbored/plugins/debugger_v2/debug_data_provider.py +634 -0
  165. tensorbored/plugins/debugger_v2/debugger_v2_plugin.py +504 -0
  166. tensorbored/plugins/distribution/__init__.py +0 -0
  167. tensorbored/plugins/distribution/compressor.py +158 -0
  168. tensorbored/plugins/distribution/distributions_plugin.py +116 -0
  169. tensorbored/plugins/distribution/metadata.py +19 -0
  170. tensorbored/plugins/graph/__init__.py +0 -0
  171. tensorbored/plugins/graph/graph_util.py +129 -0
  172. tensorbored/plugins/graph/graphs_plugin.py +336 -0
  173. tensorbored/plugins/graph/keras_util.py +328 -0
  174. tensorbored/plugins/graph/metadata.py +42 -0
  175. tensorbored/plugins/histogram/__init__.py +0 -0
  176. tensorbored/plugins/histogram/histograms_plugin.py +144 -0
  177. tensorbored/plugins/histogram/metadata.py +63 -0
  178. tensorbored/plugins/histogram/plugin_data_pb2.py +34 -0
  179. tensorbored/plugins/histogram/summary.py +234 -0
  180. tensorbored/plugins/histogram/summary_v2.py +292 -0
  181. tensorbored/plugins/hparams/__init__.py +14 -0
  182. tensorbored/plugins/hparams/_keras.py +93 -0
  183. tensorbored/plugins/hparams/api.py +130 -0
  184. tensorbored/plugins/hparams/api_pb2.py +208 -0
  185. tensorbored/plugins/hparams/backend_context.py +606 -0
  186. tensorbored/plugins/hparams/download_data.py +158 -0
  187. tensorbored/plugins/hparams/error.py +26 -0
  188. tensorbored/plugins/hparams/get_experiment.py +71 -0
  189. tensorbored/plugins/hparams/hparams_plugin.py +206 -0
  190. tensorbored/plugins/hparams/hparams_util_pb2.py +69 -0
  191. tensorbored/plugins/hparams/json_format_compat.py +38 -0
  192. tensorbored/plugins/hparams/list_metric_evals.py +57 -0
  193. tensorbored/plugins/hparams/list_session_groups.py +1040 -0
  194. tensorbored/plugins/hparams/metadata.py +125 -0
  195. tensorbored/plugins/hparams/metrics.py +41 -0
  196. tensorbored/plugins/hparams/plugin_data_pb2.py +69 -0
  197. tensorbored/plugins/hparams/summary.py +205 -0
  198. tensorbored/plugins/hparams/summary_v2.py +597 -0
  199. tensorbored/plugins/image/__init__.py +0 -0
  200. tensorbored/plugins/image/images_plugin.py +232 -0
  201. tensorbored/plugins/image/metadata.py +65 -0
  202. tensorbored/plugins/image/plugin_data_pb2.py +34 -0
  203. tensorbored/plugins/image/summary.py +159 -0
  204. tensorbored/plugins/image/summary_v2.py +130 -0
  205. tensorbored/plugins/mesh/__init__.py +14 -0
  206. tensorbored/plugins/mesh/mesh_plugin.py +292 -0
  207. tensorbored/plugins/mesh/metadata.py +152 -0
  208. tensorbored/plugins/mesh/plugin_data_pb2.py +37 -0
  209. tensorbored/plugins/mesh/summary.py +251 -0
  210. tensorbored/plugins/mesh/summary_v2.py +214 -0
  211. tensorbored/plugins/metrics/__init__.py +0 -0
  212. tensorbored/plugins/metrics/metadata.py +17 -0
  213. tensorbored/plugins/metrics/metrics_plugin.py +623 -0
  214. tensorbored/plugins/pr_curve/__init__.py +0 -0
  215. tensorbored/plugins/pr_curve/metadata.py +75 -0
  216. tensorbored/plugins/pr_curve/plugin_data_pb2.py +34 -0
  217. tensorbored/plugins/pr_curve/pr_curves_plugin.py +241 -0
  218. tensorbored/plugins/pr_curve/summary.py +574 -0
  219. tensorbored/plugins/profile_redirect/__init__.py +0 -0
  220. tensorbored/plugins/profile_redirect/profile_redirect_plugin.py +49 -0
  221. tensorbored/plugins/projector/__init__.py +67 -0
  222. tensorbored/plugins/projector/metadata.py +26 -0
  223. tensorbored/plugins/projector/projector_config_pb2.py +54 -0
  224. tensorbored/plugins/projector/projector_plugin.py +795 -0
  225. tensorbored/plugins/projector/tf_projector_plugin/index.js +32 -0
  226. tensorbored/plugins/projector/tf_projector_plugin/projector_binary.html +524 -0
  227. tensorbored/plugins/projector/tf_projector_plugin/projector_binary.js +15536 -0
  228. tensorbored/plugins/scalar/__init__.py +0 -0
  229. tensorbored/plugins/scalar/metadata.py +60 -0
  230. tensorbored/plugins/scalar/plugin_data_pb2.py +34 -0
  231. tensorbored/plugins/scalar/scalars_plugin.py +181 -0
  232. tensorbored/plugins/scalar/summary.py +109 -0
  233. tensorbored/plugins/scalar/summary_v2.py +124 -0
  234. tensorbored/plugins/text/__init__.py +0 -0
  235. tensorbored/plugins/text/metadata.py +62 -0
  236. tensorbored/plugins/text/plugin_data_pb2.py +34 -0
  237. tensorbored/plugins/text/summary.py +114 -0
  238. tensorbored/plugins/text/summary_v2.py +124 -0
  239. tensorbored/plugins/text/text_plugin.py +288 -0
  240. tensorbored/plugins/wit_redirect/__init__.py +0 -0
  241. tensorbored/plugins/wit_redirect/wit_redirect_plugin.py +49 -0
  242. tensorbored/program.py +910 -0
  243. tensorbored/summary/__init__.py +35 -0
  244. tensorbored/summary/_output.py +124 -0
  245. tensorbored/summary/_tf/__init__.py +14 -0
  246. tensorbored/summary/_tf/summary/__init__.py +178 -0
  247. tensorbored/summary/_writer.py +105 -0
  248. tensorbored/summary/v1.py +51 -0
  249. tensorbored/summary/v2.py +25 -0
  250. tensorbored/summary/writer/__init__.py +13 -0
  251. tensorbored/summary/writer/event_file_writer.py +291 -0
  252. tensorbored/summary/writer/record_writer.py +50 -0
  253. tensorbored/util/__init__.py +0 -0
  254. tensorbored/util/encoder.py +116 -0
  255. tensorbored/util/grpc_util.py +311 -0
  256. tensorbored/util/img_mime_type_detector.py +40 -0
  257. tensorbored/util/io_util.py +20 -0
  258. tensorbored/util/lazy_tensor_creator.py +110 -0
  259. tensorbored/util/op_evaluator.py +104 -0
  260. tensorbored/util/platform_util.py +20 -0
  261. tensorbored/util/tb_logging.py +24 -0
  262. tensorbored/util/tensor_util.py +617 -0
  263. tensorbored/util/timing.py +122 -0
  264. tensorbored/version.py +21 -0
  265. tensorbored/webfiles.zip +0 -0
  266. tensorbored-2.21.0rc1769983804.dist-info/METADATA +49 -0
  267. tensorbored-2.21.0rc1769983804.dist-info/RECORD +271 -0
  268. tensorbored-2.21.0rc1769983804.dist-info/WHEEL +5 -0
  269. tensorbored-2.21.0rc1769983804.dist-info/entry_points.txt +6 -0
  270. tensorbored-2.21.0rc1769983804.dist-info/licenses/LICENSE +739 -0
  271. tensorbored-2.21.0rc1769983804.dist-info/top_level.txt +1 -0
@@ -0,0 +1,535 @@
1
+ # Copyright 2019 The TensorFlow Authors. All Rights Reserved.
2
+ #
3
+ # Licensed under the Apache License, Version 2.0 (the "License");
4
+ # you may not use this file except in compliance with the License.
5
+ # You may obtain a copy of the License at
6
+ #
7
+ # http://www.apache.org/licenses/LICENSE-2.0
8
+ #
9
+ # Unless required by applicable law or agreed to in writing, software
10
+ # distributed under the License is distributed on an "AS IS" BASIS,
11
+ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12
+ # See the License for the specific language governing permissions and
13
+ # limitations under the License.
14
+ # ==============================================================================
15
+ """Bridge from event multiplexer storage to generic data APIs."""
16
+
17
+ import base64
18
+ import collections
19
+ import json
20
+ import random
21
+
22
+ from tensorbored import errors
23
+ from tensorbored.compat.proto import summary_pb2
24
+ from tensorbored.data import provider
25
+ from tensorbored.util import tb_logging
26
+ from tensorbored.util import tensor_util
27
+
28
+ logger = tb_logging.get_logger()
29
+
30
+
31
+ class MultiplexerDataProvider(provider.DataProvider):
32
+ def __init__(self, multiplexer, logdir):
33
+ """Trivial initializer.
34
+
35
+ Args:
36
+ multiplexer: A `plugin_event_multiplexer.EventMultiplexer` (note:
37
+ not a boring old `event_multiplexer.EventMultiplexer`).
38
+ logdir: The log directory from which data is being read. Only used
39
+ cosmetically. Should be a `str`.
40
+ """
41
+ self._multiplexer = multiplexer
42
+ self._logdir = logdir
43
+
44
+ def __str__(self):
45
+ return "MultiplexerDataProvider(logdir=%r)" % self._logdir
46
+
47
+ def _validate_context(self, ctx):
48
+ if type(ctx).__name__ != "RequestContext":
49
+ raise TypeError("ctx must be a RequestContext; got: %r" % (ctx,))
50
+
51
+ def _validate_experiment_id(self, experiment_id):
52
+ # This data provider doesn't consume the experiment ID at all, but
53
+ # as a courtesy to callers we require that it be a valid string, to
54
+ # help catch usage errors.
55
+ if not isinstance(experiment_id, str):
56
+ raise TypeError(
57
+ "experiment_id must be %r, but got %r: %r"
58
+ % (str, type(experiment_id), experiment_id)
59
+ )
60
+
61
+ def _validate_downsample(self, downsample):
62
+ if downsample is None:
63
+ raise TypeError("`downsample` required but not given")
64
+ if isinstance(downsample, int):
65
+ return # OK
66
+ raise TypeError(
67
+ "`downsample` must be an int, but got %r: %r"
68
+ % (type(downsample), downsample)
69
+ )
70
+
71
+ def _test_run_tag(self, run_tag_filter, run, tag):
72
+ runs = run_tag_filter.runs
73
+ if runs is not None and run not in runs:
74
+ return False
75
+ tags = run_tag_filter.tags
76
+ if tags is not None and tag not in tags:
77
+ return False
78
+ return True
79
+
80
+ def _get_first_event_timestamp(self, run_name):
81
+ try:
82
+ return self._multiplexer.FirstEventTimestamp(run_name)
83
+ except ValueError as e:
84
+ return None
85
+
86
+ def experiment_metadata(self, ctx=None, *, experiment_id):
87
+ self._validate_context(ctx)
88
+ self._validate_experiment_id(experiment_id)
89
+ return provider.ExperimentMetadata(data_location=self._logdir)
90
+
91
+ def list_plugins(self, ctx=None, *, experiment_id):
92
+ self._validate_context(ctx)
93
+ self._validate_experiment_id(experiment_id)
94
+ # Note: This result may include plugins that only have time
95
+ # series with `DATA_CLASS_UNKNOWN`, which will not actually be
96
+ # accessible via `list_*` or read_*`. This is inconsistent with
97
+ # the specification for `list_plugins`, but the bug should be
98
+ # mostly harmless.
99
+ return self._multiplexer.ActivePlugins()
100
+
101
+ def list_runs(self, ctx=None, *, experiment_id):
102
+ self._validate_context(ctx)
103
+ self._validate_experiment_id(experiment_id)
104
+ return [
105
+ provider.Run(
106
+ run_id=run, # use names as IDs
107
+ run_name=run,
108
+ start_time=self._get_first_event_timestamp(run),
109
+ )
110
+ for run in self._multiplexer.Runs()
111
+ ]
112
+
113
+ def list_scalars(
114
+ self, ctx=None, *, experiment_id, plugin_name, run_tag_filter=None
115
+ ):
116
+ self._validate_context(ctx)
117
+ self._validate_experiment_id(experiment_id)
118
+ index = self._index(
119
+ plugin_name, run_tag_filter, summary_pb2.DATA_CLASS_SCALAR
120
+ )
121
+ return self._list(provider.ScalarTimeSeries, index)
122
+
123
+ def read_scalars(
124
+ self,
125
+ ctx=None,
126
+ *,
127
+ experiment_id,
128
+ plugin_name,
129
+ downsample=None,
130
+ run_tag_filter=None,
131
+ ):
132
+ self._validate_context(ctx)
133
+ self._validate_experiment_id(experiment_id)
134
+ self._validate_downsample(downsample)
135
+ index = self._index(
136
+ plugin_name, run_tag_filter, summary_pb2.DATA_CLASS_SCALAR
137
+ )
138
+ return self._read(_convert_scalar_event, index, downsample)
139
+
140
+ def read_last_scalars(
141
+ self,
142
+ ctx=None,
143
+ *,
144
+ experiment_id,
145
+ plugin_name,
146
+ run_tag_filter=None,
147
+ ):
148
+ self._validate_context(ctx)
149
+ self._validate_experiment_id(experiment_id)
150
+ index = self._index(
151
+ plugin_name, run_tag_filter, summary_pb2.DATA_CLASS_SCALAR
152
+ )
153
+ run_tag_to_last_scalar_datum = collections.defaultdict(dict)
154
+ for run, tags_for_run in index.items():
155
+ for tag, metadata in tags_for_run.items():
156
+ events = self._multiplexer.Tensors(run, tag)
157
+ if events:
158
+ run_tag_to_last_scalar_datum[run][tag] = (
159
+ _convert_scalar_event(events[-1])
160
+ )
161
+
162
+ return run_tag_to_last_scalar_datum
163
+
164
+ def list_tensors(
165
+ self, ctx=None, *, experiment_id, plugin_name, run_tag_filter=None
166
+ ):
167
+ self._validate_context(ctx)
168
+ self._validate_experiment_id(experiment_id)
169
+ index = self._index(
170
+ plugin_name, run_tag_filter, summary_pb2.DATA_CLASS_TENSOR
171
+ )
172
+ return self._list(provider.TensorTimeSeries, index)
173
+
174
+ def read_tensors(
175
+ self,
176
+ ctx=None,
177
+ *,
178
+ experiment_id,
179
+ plugin_name,
180
+ downsample=None,
181
+ run_tag_filter=None,
182
+ ):
183
+ self._validate_context(ctx)
184
+ self._validate_experiment_id(experiment_id)
185
+ self._validate_downsample(downsample)
186
+ index = self._index(
187
+ plugin_name, run_tag_filter, summary_pb2.DATA_CLASS_TENSOR
188
+ )
189
+ return self._read(_convert_tensor_event, index, downsample)
190
+
191
+ def _index(self, plugin_name, run_tag_filter, data_class_filter):
192
+ """List time series and metadata matching the given filters.
193
+
194
+ This is like `_list`, but doesn't traverse `Tensors(...)` to
195
+ compute metadata that's not always needed.
196
+
197
+ Args:
198
+ plugin_name: A string plugin name filter (required).
199
+ run_tag_filter: An `provider.RunTagFilter`, or `None`.
200
+ data_class_filter: A `summary_pb2.DataClass` filter (required).
201
+
202
+ Returns:
203
+ A nested dict `d` such that `d[run][tag]` is a
204
+ `SummaryMetadata` proto.
205
+ """
206
+ if run_tag_filter is None:
207
+ run_tag_filter = provider.RunTagFilter(runs=None, tags=None)
208
+ runs = run_tag_filter.runs
209
+ tags = run_tag_filter.tags
210
+
211
+ # Optimization for a common case, reading a single time series.
212
+ if runs and len(runs) == 1 and tags and len(tags) == 1:
213
+ (run,) = runs
214
+ (tag,) = tags
215
+ try:
216
+ metadata = self._multiplexer.SummaryMetadata(run, tag)
217
+ except KeyError:
218
+ return {}
219
+ all_metadata = {run: {tag: metadata}}
220
+ else:
221
+ all_metadata = self._multiplexer.AllSummaryMetadata()
222
+
223
+ result = {}
224
+ for run, tag_to_metadata in all_metadata.items():
225
+ if runs is not None and run not in runs:
226
+ continue
227
+ result_for_run = {}
228
+ for tag, metadata in tag_to_metadata.items():
229
+ if tags is not None and tag not in tags:
230
+ continue
231
+ if metadata.data_class != data_class_filter:
232
+ continue
233
+ if metadata.plugin_data.plugin_name != plugin_name:
234
+ continue
235
+ result[run] = result_for_run
236
+ result_for_run[tag] = metadata
237
+
238
+ return result
239
+
240
+ def _list(self, construct_time_series, index):
241
+ """Helper to list scalar or tensor time series.
242
+
243
+ Args:
244
+ construct_time_series: `ScalarTimeSeries` or `TensorTimeSeries`.
245
+ index: The result of `self._index(...)`.
246
+
247
+ Returns:
248
+ A list of objects of type given by `construct_time_series`,
249
+ suitable to be returned from `list_scalars` or `list_tensors`.
250
+ """
251
+ result = {}
252
+ for run, tag_to_metadata in index.items():
253
+ result_for_run = {}
254
+ result[run] = result_for_run
255
+ for tag, summary_metadata in tag_to_metadata.items():
256
+ max_step = None
257
+ max_wall_time = None
258
+ for event in self._multiplexer.Tensors(run, tag):
259
+ if max_step is None or max_step < event.step:
260
+ max_step = event.step
261
+ if max_wall_time is None or max_wall_time < event.wall_time:
262
+ max_wall_time = event.wall_time
263
+ summary_metadata = self._multiplexer.SummaryMetadata(run, tag)
264
+ result_for_run[tag] = construct_time_series(
265
+ max_step=max_step,
266
+ max_wall_time=max_wall_time,
267
+ plugin_content=summary_metadata.plugin_data.content,
268
+ description=summary_metadata.summary_description,
269
+ display_name=summary_metadata.display_name,
270
+ )
271
+ return result
272
+
273
+ def _read(self, convert_event, index, downsample):
274
+ """Helper to read scalar or tensor data from the multiplexer.
275
+
276
+ Args:
277
+ convert_event: Takes `plugin_event_accumulator.TensorEvent` to
278
+ either `provider.ScalarDatum` or `provider.TensorDatum`.
279
+ index: The result of `self._index(...)`.
280
+ downsample: Non-negative `int`; how many samples to return per
281
+ time series.
282
+
283
+ Returns:
284
+ A dict of dicts of values returned by `convert_event` calls,
285
+ suitable to be returned from `read_scalars` or `read_tensors`.
286
+ """
287
+ result = {}
288
+ for run, tags_for_run in index.items():
289
+ result_for_run = {}
290
+ result[run] = result_for_run
291
+ for tag, metadata in tags_for_run.items():
292
+ events = self._multiplexer.Tensors(run, tag)
293
+ data = [convert_event(e) for e in events]
294
+ result_for_run[tag] = _downsample(data, downsample)
295
+ return result
296
+
297
+ def list_blob_sequences(
298
+ self, ctx=None, *, experiment_id, plugin_name, run_tag_filter=None
299
+ ):
300
+ self._validate_context(ctx)
301
+ self._validate_experiment_id(experiment_id)
302
+ index = self._index(
303
+ plugin_name, run_tag_filter, summary_pb2.DATA_CLASS_BLOB_SEQUENCE
304
+ )
305
+ result = {}
306
+ for run, tag_to_metadata in index.items():
307
+ result_for_run = {}
308
+ result[run] = result_for_run
309
+ for tag, metadata in tag_to_metadata.items():
310
+ max_step = None
311
+ max_wall_time = None
312
+ max_length = None
313
+ for event in self._multiplexer.Tensors(run, tag):
314
+ if max_step is None or max_step < event.step:
315
+ max_step = event.step
316
+ if max_wall_time is None or max_wall_time < event.wall_time:
317
+ max_wall_time = event.wall_time
318
+ length = _tensor_size(event.tensor_proto)
319
+ if max_length is None or length > max_length:
320
+ max_length = length
321
+ result_for_run[tag] = provider.BlobSequenceTimeSeries(
322
+ max_step=max_step,
323
+ max_wall_time=max_wall_time,
324
+ max_length=max_length,
325
+ plugin_content=metadata.plugin_data.content,
326
+ description=metadata.summary_description,
327
+ display_name=metadata.display_name,
328
+ )
329
+ return result
330
+
331
+ def read_blob_sequences(
332
+ self,
333
+ ctx=None,
334
+ *,
335
+ experiment_id,
336
+ plugin_name,
337
+ downsample=None,
338
+ run_tag_filter=None,
339
+ ):
340
+ self._validate_context(ctx)
341
+ self._validate_experiment_id(experiment_id)
342
+ self._validate_downsample(downsample)
343
+ index = self._index(
344
+ plugin_name, run_tag_filter, summary_pb2.DATA_CLASS_BLOB_SEQUENCE
345
+ )
346
+ result = {}
347
+ for run, tags in index.items():
348
+ result_for_run = {}
349
+ result[run] = result_for_run
350
+ for tag in tags:
351
+ events = self._multiplexer.Tensors(run, tag)
352
+ data_by_step = {}
353
+ for event in events:
354
+ if event.step in data_by_step:
355
+ continue
356
+ data_by_step[event.step] = _convert_blob_sequence_event(
357
+ experiment_id, plugin_name, run, tag, event
358
+ )
359
+ data = [datum for (step, datum) in sorted(data_by_step.items())]
360
+ result_for_run[tag] = _downsample(data, downsample)
361
+ return result
362
+
363
+ def read_blob(self, ctx=None, *, blob_key):
364
+ self._validate_context(ctx)
365
+ (
366
+ unused_experiment_id,
367
+ plugin_name,
368
+ run,
369
+ tag,
370
+ step,
371
+ index,
372
+ ) = _decode_blob_key(blob_key)
373
+
374
+ summary_metadata = self._multiplexer.SummaryMetadata(run, tag)
375
+ if summary_metadata.data_class != summary_pb2.DATA_CLASS_BLOB_SEQUENCE:
376
+ raise errors.NotFoundError(blob_key)
377
+ tensor_events = self._multiplexer.Tensors(run, tag)
378
+ # In case of multiple events at this step, take first (arbitrary).
379
+ matching_step = next((e for e in tensor_events if e.step == step), None)
380
+ if not matching_step:
381
+ raise errors.NotFoundError("%s: no such step %r" % (blob_key, step))
382
+ tensor = tensor_util.make_ndarray(matching_step.tensor_proto)
383
+ return tensor[index]
384
+
385
+
386
+ # TODO(davidsoergel): deduplicate with other implementations
387
+ def _encode_blob_key(experiment_id, plugin_name, run, tag, step, index):
388
+ """Generate a blob key: a short, URL-safe string identifying a blob.
389
+
390
+ A blob can be located using a set of integer and string fields; here we
391
+ serialize these to allow passing the data through a URL. Specifically, we
392
+ 1) construct a tuple of the arguments in order; 2) represent that as an
393
+ ascii-encoded JSON string (without whitespace); and 3) take the URL-safe
394
+ base64 encoding of that, with no padding. For example:
395
+
396
+ 1) Tuple: ("some_id", "graphs", "train", "graph_def", 2, 0)
397
+ 2) JSON: ["some_id","graphs","train","graph_def",2,0]
398
+ 3) base64: WyJzb21lX2lkIiwiZ3JhcGhzIiwidHJhaW4iLCJncmFwaF9kZWYiLDIsMF0K
399
+
400
+ Args:
401
+ experiment_id: a string ID identifying an experiment.
402
+ plugin_name: string
403
+ run: string
404
+ tag: string
405
+ step: int
406
+ index: int
407
+
408
+ Returns:
409
+ A URL-safe base64-encoded string representing the provided arguments.
410
+ """
411
+ # Encodes the blob key as a URL-safe string, as required by the
412
+ # `BlobReference` API in `tensorboard/data/provider.py`, because these keys
413
+ # may be used to construct URLs for retrieving blobs.
414
+ stringified = json.dumps(
415
+ (experiment_id, plugin_name, run, tag, step, index),
416
+ separators=(",", ":"),
417
+ )
418
+ bytesified = stringified.encode("ascii")
419
+ encoded = base64.urlsafe_b64encode(bytesified)
420
+ return encoded.decode("ascii").rstrip("=")
421
+
422
+
423
+ # Any changes to this function need not be backward-compatible, even though
424
+ # the current encoding was used to generate URLs. The reason is that the
425
+ # generated URLs are not considered permalinks: they need to be valid only
426
+ # within the context of the session that created them (via the matching
427
+ # `_encode_blob_key` function above).
428
+ def _decode_blob_key(key):
429
+ """Decode a blob key produced by `_encode_blob_key` into component fields.
430
+
431
+ Args:
432
+ key: a blob key, as generated by `_encode_blob_key`.
433
+
434
+ Returns:
435
+ A tuple of `(experiment_id, plugin_name, run, tag, step, index)`, with types
436
+ matching the arguments of `_encode_blob_key`.
437
+ """
438
+ decoded = base64.urlsafe_b64decode(key + "==") # pad past a multiple of 4.
439
+ stringified = decoded.decode("ascii")
440
+ experiment_id, plugin_name, run, tag, step, index = json.loads(stringified)
441
+ return (experiment_id, plugin_name, run, tag, step, index)
442
+
443
+
444
+ def _convert_scalar_event(event):
445
+ """Helper for `read_scalars`."""
446
+ return provider.ScalarDatum(
447
+ step=event.step,
448
+ wall_time=event.wall_time,
449
+ value=tensor_util.make_ndarray(event.tensor_proto).item(),
450
+ )
451
+
452
+
453
+ def _convert_tensor_event(event):
454
+ """Helper for `read_tensors`."""
455
+ return provider.TensorDatum(
456
+ step=event.step,
457
+ wall_time=event.wall_time,
458
+ numpy=tensor_util.make_ndarray(event.tensor_proto),
459
+ )
460
+
461
+
462
+ def _convert_blob_sequence_event(experiment_id, plugin_name, run, tag, event):
463
+ """Helper for `read_blob_sequences`."""
464
+ num_blobs = _tensor_size(event.tensor_proto)
465
+ values = tuple(
466
+ provider.BlobReference(
467
+ _encode_blob_key(
468
+ experiment_id,
469
+ plugin_name,
470
+ run,
471
+ tag,
472
+ event.step,
473
+ idx,
474
+ )
475
+ )
476
+ for idx in range(num_blobs)
477
+ )
478
+ return provider.BlobSequenceDatum(
479
+ wall_time=event.wall_time,
480
+ step=event.step,
481
+ values=values,
482
+ )
483
+
484
+
485
+ def _tensor_size(tensor_proto):
486
+ """Compute the number of elements in a tensor.
487
+
488
+ This does not deserialize the full tensor contents.
489
+
490
+ Args:
491
+ tensor_proto: A `tensorboard.compat.proto.tensor_pb2.TensorProto`.
492
+
493
+ Returns:
494
+ A non-negative `int`.
495
+ """
496
+ # This is the same logic that `tensor_util.make_ndarray` uses to
497
+ # compute the size, but without the actual buffer copies.
498
+ result = 1
499
+ for dim in tensor_proto.tensor_shape.dim:
500
+ result *= dim.size
501
+ return result
502
+
503
+
504
+ def _downsample(xs, k):
505
+ """Downsample `xs` to at most `k` elements.
506
+
507
+ If `k` is larger than `xs`, then the contents of `xs` itself will be
508
+ returned. If `k` is smaller than `xs`, the last element of `xs` will
509
+ always be included (unless `k` is `0`) and the preceding elements
510
+ will be selected uniformly at random.
511
+
512
+ This differs from `random.sample` in that it returns a subsequence
513
+ (i.e., order is preserved) and that it permits `k > len(xs)`.
514
+
515
+ The random number generator will always be `random.Random(0)`, so
516
+ this function is deterministic (within a Python process).
517
+
518
+ Args:
519
+ xs: A sequence (`collections.abc.Sequence`).
520
+ k: A non-negative integer.
521
+
522
+ Returns:
523
+ A new list whose elements are a subsequence of `xs` of length
524
+ `min(k, len(xs))` and that is guaranteed to include the last
525
+ element of `xs`, uniformly selected among such subsequences.
526
+ """
527
+
528
+ if k > len(xs):
529
+ return list(xs)
530
+ if k == 0:
531
+ return []
532
+ indices = random.Random(0).sample(range(len(xs) - 1), k - 1)
533
+ indices.sort()
534
+ indices += [len(xs) - 1]
535
+ return [xs[i] for i in indices]
@@ -0,0 +1,142 @@
1
+ # Copyright 2019 The TensorFlow Authors. All Rights Reserved.
2
+ #
3
+ # Licensed under the Apache License, Version 2.0 (the "License");
4
+ # you may not use this file except in compliance with the License.
5
+ # You may obtain a copy of the License at
6
+ #
7
+ # http://www.apache.org/licenses/LICENSE-2.0
8
+ #
9
+ # Unless required by applicable law or agreed to in writing, software
10
+ # distributed under the License is distributed on an "AS IS" BASIS,
11
+ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12
+ # See the License for the specific language governing permissions and
13
+ # limitations under the License.
14
+ # ==============================================================================
15
+
16
+ """Implementation for a multi-file directory loader."""
17
+
18
+ from tensorbored.backend.event_processing import directory_watcher
19
+ from tensorbored.backend.event_processing import io_wrapper
20
+ from tensorbored.compat import tf
21
+ from tensorbored.util import tb_logging
22
+
23
+ logger = tb_logging.get_logger()
24
+
25
+
26
+ # Sentinel object for an inactive path.
27
+ _INACTIVE = object()
28
+
29
+
30
+ class DirectoryLoader:
31
+ """Loader for an entire directory, maintaining multiple active file
32
+ loaders.
33
+
34
+ This class takes a directory, a factory for loaders, and optionally a
35
+ path filter and watches all the paths inside that directory for new data.
36
+ Each file loader created by the factory must read a path and produce an
37
+ iterator of (timestamp, value) pairs.
38
+
39
+ Unlike DirectoryWatcher, this class does not assume that only one file
40
+ receives new data at a time; there can be arbitrarily many active files.
41
+ However, any file whose maximum load timestamp fails an "active" predicate
42
+ will be marked as inactive and no longer checked for new data.
43
+ """
44
+
45
+ def __init__(
46
+ self,
47
+ directory,
48
+ loader_factory,
49
+ path_filter=lambda x: True,
50
+ active_filter=lambda timestamp: True,
51
+ ):
52
+ """Constructs a new MultiFileDirectoryLoader.
53
+
54
+ Args:
55
+ directory: The directory to load files from.
56
+ loader_factory: A factory for creating loaders. The factory should take a
57
+ path and return an object that has a Load method returning an iterator
58
+ yielding (unix timestamp as float, value) pairs for any new data
59
+ path_filter: If specified, only paths matching this filter are loaded.
60
+ active_filter: If specified, any loader whose maximum load timestamp does
61
+ not pass this filter will be marked as inactive and no longer read.
62
+
63
+ Raises:
64
+ ValueError: If directory or loader_factory are None.
65
+ """
66
+ if directory is None:
67
+ raise ValueError("A directory is required")
68
+ if loader_factory is None:
69
+ raise ValueError("A loader factory is required")
70
+ self._directory = directory
71
+ self._loader_factory = loader_factory
72
+ self._path_filter = path_filter
73
+ self._active_filter = active_filter
74
+ self._loaders = {}
75
+ self._max_timestamps = {}
76
+
77
+ def Load(self):
78
+ """Loads new values from all active files.
79
+
80
+ Yields:
81
+ All values that have not been yielded yet.
82
+
83
+ Raises:
84
+ DirectoryDeletedError: If the directory has been permanently deleted
85
+ (as opposed to being temporarily unavailable).
86
+ """
87
+ try:
88
+ all_paths = io_wrapper.ListDirectoryAbsolute(self._directory)
89
+ paths = sorted(p for p in all_paths if self._path_filter(p))
90
+ for path in paths:
91
+ for value in self._LoadPath(path):
92
+ yield value
93
+ except tf.errors.OpError as e:
94
+ if not tf.io.gfile.exists(self._directory):
95
+ raise directory_watcher.DirectoryDeletedError(
96
+ "Directory %s has been permanently deleted"
97
+ % self._directory
98
+ )
99
+ else:
100
+ logger.info("Ignoring error during file loading: %s" % e)
101
+
102
+ def _LoadPath(self, path):
103
+ """Generator for values from a single path's loader.
104
+
105
+ Args:
106
+ path: the path to load from
107
+
108
+ Yields:
109
+ All values from this path's loader that have not been yielded yet.
110
+ """
111
+ max_timestamp = self._max_timestamps.get(path, None)
112
+ if max_timestamp is _INACTIVE or self._MarkIfInactive(
113
+ path, max_timestamp
114
+ ):
115
+ logger.debug("Skipping inactive path %s", path)
116
+ return
117
+ loader = self._loaders.get(path, None)
118
+ if loader is None:
119
+ try:
120
+ loader = self._loader_factory(path)
121
+ except tf.errors.NotFoundError:
122
+ # Happens if a file was removed after we listed the directory.
123
+ logger.debug("Skipping nonexistent path %s", path)
124
+ return
125
+ self._loaders[path] = loader
126
+ logger.info("Loading data from path %s", path)
127
+ for timestamp, value in loader.Load():
128
+ if max_timestamp is None or timestamp > max_timestamp:
129
+ max_timestamp = timestamp
130
+ yield value
131
+ if not self._MarkIfInactive(path, max_timestamp):
132
+ self._max_timestamps[path] = max_timestamp
133
+
134
+ def _MarkIfInactive(self, path, max_timestamp):
135
+ """If max_timestamp is inactive, returns True and marks the path as
136
+ such."""
137
+ logger.debug("Checking active status of %s at %s", path, max_timestamp)
138
+ if max_timestamp is not None and not self._active_filter(max_timestamp):
139
+ self._max_timestamps[path] = _INACTIVE
140
+ del self._loaders[path]
141
+ return True
142
+ return False