tensorbored 2.21.0rc1769983804__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (271) hide show
  1. tensorbored/__init__.py +112 -0
  2. tensorbored/_vendor/__init__.py +0 -0
  3. tensorbored/_vendor/bleach/__init__.py +125 -0
  4. tensorbored/_vendor/bleach/_vendor/__init__.py +0 -0
  5. tensorbored/_vendor/bleach/_vendor/html5lib/__init__.py +35 -0
  6. tensorbored/_vendor/bleach/_vendor/html5lib/_ihatexml.py +289 -0
  7. tensorbored/_vendor/bleach/_vendor/html5lib/_inputstream.py +918 -0
  8. tensorbored/_vendor/bleach/_vendor/html5lib/_tokenizer.py +1735 -0
  9. tensorbored/_vendor/bleach/_vendor/html5lib/_trie/__init__.py +5 -0
  10. tensorbored/_vendor/bleach/_vendor/html5lib/_trie/_base.py +40 -0
  11. tensorbored/_vendor/bleach/_vendor/html5lib/_trie/py.py +67 -0
  12. tensorbored/_vendor/bleach/_vendor/html5lib/_utils.py +159 -0
  13. tensorbored/_vendor/bleach/_vendor/html5lib/constants.py +2946 -0
  14. tensorbored/_vendor/bleach/_vendor/html5lib/filters/__init__.py +0 -0
  15. tensorbored/_vendor/bleach/_vendor/html5lib/filters/alphabeticalattributes.py +29 -0
  16. tensorbored/_vendor/bleach/_vendor/html5lib/filters/base.py +12 -0
  17. tensorbored/_vendor/bleach/_vendor/html5lib/filters/inject_meta_charset.py +73 -0
  18. tensorbored/_vendor/bleach/_vendor/html5lib/filters/lint.py +93 -0
  19. tensorbored/_vendor/bleach/_vendor/html5lib/filters/optionaltags.py +207 -0
  20. tensorbored/_vendor/bleach/_vendor/html5lib/filters/sanitizer.py +916 -0
  21. tensorbored/_vendor/bleach/_vendor/html5lib/filters/whitespace.py +38 -0
  22. tensorbored/_vendor/bleach/_vendor/html5lib/html5parser.py +2795 -0
  23. tensorbored/_vendor/bleach/_vendor/html5lib/serializer.py +409 -0
  24. tensorbored/_vendor/bleach/_vendor/html5lib/treeadapters/__init__.py +30 -0
  25. tensorbored/_vendor/bleach/_vendor/html5lib/treeadapters/genshi.py +54 -0
  26. tensorbored/_vendor/bleach/_vendor/html5lib/treeadapters/sax.py +50 -0
  27. tensorbored/_vendor/bleach/_vendor/html5lib/treebuilders/__init__.py +88 -0
  28. tensorbored/_vendor/bleach/_vendor/html5lib/treebuilders/base.py +417 -0
  29. tensorbored/_vendor/bleach/_vendor/html5lib/treebuilders/dom.py +239 -0
  30. tensorbored/_vendor/bleach/_vendor/html5lib/treebuilders/etree.py +343 -0
  31. tensorbored/_vendor/bleach/_vendor/html5lib/treebuilders/etree_lxml.py +392 -0
  32. tensorbored/_vendor/bleach/_vendor/html5lib/treewalkers/__init__.py +154 -0
  33. tensorbored/_vendor/bleach/_vendor/html5lib/treewalkers/base.py +252 -0
  34. tensorbored/_vendor/bleach/_vendor/html5lib/treewalkers/dom.py +43 -0
  35. tensorbored/_vendor/bleach/_vendor/html5lib/treewalkers/etree.py +131 -0
  36. tensorbored/_vendor/bleach/_vendor/html5lib/treewalkers/etree_lxml.py +215 -0
  37. tensorbored/_vendor/bleach/_vendor/html5lib/treewalkers/genshi.py +69 -0
  38. tensorbored/_vendor/bleach/_vendor/parse.py +1078 -0
  39. tensorbored/_vendor/bleach/callbacks.py +32 -0
  40. tensorbored/_vendor/bleach/html5lib_shim.py +757 -0
  41. tensorbored/_vendor/bleach/linkifier.py +633 -0
  42. tensorbored/_vendor/bleach/parse_shim.py +1 -0
  43. tensorbored/_vendor/bleach/sanitizer.py +638 -0
  44. tensorbored/_vendor/bleach/six_shim.py +19 -0
  45. tensorbored/_vendor/webencodings/__init__.py +342 -0
  46. tensorbored/_vendor/webencodings/labels.py +231 -0
  47. tensorbored/_vendor/webencodings/mklabels.py +59 -0
  48. tensorbored/_vendor/webencodings/x_user_defined.py +325 -0
  49. tensorbored/assets.py +36 -0
  50. tensorbored/auth.py +102 -0
  51. tensorbored/backend/__init__.py +0 -0
  52. tensorbored/backend/application.py +604 -0
  53. tensorbored/backend/auth_context_middleware.py +38 -0
  54. tensorbored/backend/client_feature_flags.py +113 -0
  55. tensorbored/backend/empty_path_redirect.py +46 -0
  56. tensorbored/backend/event_processing/__init__.py +0 -0
  57. tensorbored/backend/event_processing/data_ingester.py +276 -0
  58. tensorbored/backend/event_processing/data_provider.py +535 -0
  59. tensorbored/backend/event_processing/directory_loader.py +142 -0
  60. tensorbored/backend/event_processing/directory_watcher.py +272 -0
  61. tensorbored/backend/event_processing/event_accumulator.py +950 -0
  62. tensorbored/backend/event_processing/event_file_inspector.py +463 -0
  63. tensorbored/backend/event_processing/event_file_loader.py +292 -0
  64. tensorbored/backend/event_processing/event_multiplexer.py +521 -0
  65. tensorbored/backend/event_processing/event_util.py +68 -0
  66. tensorbored/backend/event_processing/io_wrapper.py +223 -0
  67. tensorbored/backend/event_processing/plugin_asset_util.py +104 -0
  68. tensorbored/backend/event_processing/plugin_event_accumulator.py +721 -0
  69. tensorbored/backend/event_processing/plugin_event_multiplexer.py +522 -0
  70. tensorbored/backend/event_processing/reservoir.py +266 -0
  71. tensorbored/backend/event_processing/tag_types.py +29 -0
  72. tensorbored/backend/experiment_id.py +71 -0
  73. tensorbored/backend/experimental_plugin.py +51 -0
  74. tensorbored/backend/http_util.py +263 -0
  75. tensorbored/backend/json_util.py +70 -0
  76. tensorbored/backend/path_prefix.py +67 -0
  77. tensorbored/backend/process_graph.py +74 -0
  78. tensorbored/backend/security_validator.py +202 -0
  79. tensorbored/compat/__init__.py +69 -0
  80. tensorbored/compat/proto/__init__.py +0 -0
  81. tensorbored/compat/proto/allocation_description_pb2.py +35 -0
  82. tensorbored/compat/proto/api_def_pb2.py +82 -0
  83. tensorbored/compat/proto/attr_value_pb2.py +80 -0
  84. tensorbored/compat/proto/cluster_pb2.py +58 -0
  85. tensorbored/compat/proto/config_pb2.py +271 -0
  86. tensorbored/compat/proto/coordination_config_pb2.py +45 -0
  87. tensorbored/compat/proto/cost_graph_pb2.py +87 -0
  88. tensorbored/compat/proto/cpp_shape_inference_pb2.py +70 -0
  89. tensorbored/compat/proto/debug_pb2.py +65 -0
  90. tensorbored/compat/proto/event_pb2.py +149 -0
  91. tensorbored/compat/proto/full_type_pb2.py +74 -0
  92. tensorbored/compat/proto/function_pb2.py +157 -0
  93. tensorbored/compat/proto/graph_debug_info_pb2.py +111 -0
  94. tensorbored/compat/proto/graph_pb2.py +41 -0
  95. tensorbored/compat/proto/histogram_pb2.py +39 -0
  96. tensorbored/compat/proto/meta_graph_pb2.py +254 -0
  97. tensorbored/compat/proto/node_def_pb2.py +61 -0
  98. tensorbored/compat/proto/op_def_pb2.py +81 -0
  99. tensorbored/compat/proto/resource_handle_pb2.py +48 -0
  100. tensorbored/compat/proto/rewriter_config_pb2.py +93 -0
  101. tensorbored/compat/proto/rpc_options_pb2.py +35 -0
  102. tensorbored/compat/proto/saved_object_graph_pb2.py +193 -0
  103. tensorbored/compat/proto/saver_pb2.py +38 -0
  104. tensorbored/compat/proto/step_stats_pb2.py +116 -0
  105. tensorbored/compat/proto/struct_pb2.py +144 -0
  106. tensorbored/compat/proto/summary_pb2.py +111 -0
  107. tensorbored/compat/proto/tensor_description_pb2.py +38 -0
  108. tensorbored/compat/proto/tensor_pb2.py +68 -0
  109. tensorbored/compat/proto/tensor_shape_pb2.py +46 -0
  110. tensorbored/compat/proto/tfprof_log_pb2.py +307 -0
  111. tensorbored/compat/proto/trackable_object_graph_pb2.py +90 -0
  112. tensorbored/compat/proto/types_pb2.py +105 -0
  113. tensorbored/compat/proto/variable_pb2.py +62 -0
  114. tensorbored/compat/proto/verifier_config_pb2.py +38 -0
  115. tensorbored/compat/proto/versions_pb2.py +35 -0
  116. tensorbored/compat/tensorflow_stub/__init__.py +38 -0
  117. tensorbored/compat/tensorflow_stub/app.py +124 -0
  118. tensorbored/compat/tensorflow_stub/compat/__init__.py +131 -0
  119. tensorbored/compat/tensorflow_stub/compat/v1/__init__.py +20 -0
  120. tensorbored/compat/tensorflow_stub/dtypes.py +692 -0
  121. tensorbored/compat/tensorflow_stub/error_codes.py +169 -0
  122. tensorbored/compat/tensorflow_stub/errors.py +507 -0
  123. tensorbored/compat/tensorflow_stub/flags.py +124 -0
  124. tensorbored/compat/tensorflow_stub/io/__init__.py +17 -0
  125. tensorbored/compat/tensorflow_stub/io/gfile.py +1011 -0
  126. tensorbored/compat/tensorflow_stub/pywrap_tensorflow.py +285 -0
  127. tensorbored/compat/tensorflow_stub/tensor_shape.py +1035 -0
  128. tensorbored/context.py +129 -0
  129. tensorbored/data/__init__.py +0 -0
  130. tensorbored/data/grpc_provider.py +365 -0
  131. tensorbored/data/ingester.py +46 -0
  132. tensorbored/data/proto/__init__.py +0 -0
  133. tensorbored/data/proto/data_provider_pb2.py +517 -0
  134. tensorbored/data/proto/data_provider_pb2_grpc.py +374 -0
  135. tensorbored/data/provider.py +1365 -0
  136. tensorbored/data/server_ingester.py +301 -0
  137. tensorbored/data_compat.py +159 -0
  138. tensorbored/dataclass_compat.py +224 -0
  139. tensorbored/default.py +124 -0
  140. tensorbored/errors.py +130 -0
  141. tensorbored/lazy.py +99 -0
  142. tensorbored/main.py +48 -0
  143. tensorbored/main_lib.py +62 -0
  144. tensorbored/manager.py +487 -0
  145. tensorbored/notebook.py +441 -0
  146. tensorbored/plugin_util.py +266 -0
  147. tensorbored/plugins/__init__.py +0 -0
  148. tensorbored/plugins/audio/__init__.py +0 -0
  149. tensorbored/plugins/audio/audio_plugin.py +229 -0
  150. tensorbored/plugins/audio/metadata.py +69 -0
  151. tensorbored/plugins/audio/plugin_data_pb2.py +37 -0
  152. tensorbored/plugins/audio/summary.py +230 -0
  153. tensorbored/plugins/audio/summary_v2.py +124 -0
  154. tensorbored/plugins/base_plugin.py +367 -0
  155. tensorbored/plugins/core/__init__.py +0 -0
  156. tensorbored/plugins/core/core_plugin.py +981 -0
  157. tensorbored/plugins/custom_scalar/__init__.py +0 -0
  158. tensorbored/plugins/custom_scalar/custom_scalars_plugin.py +320 -0
  159. tensorbored/plugins/custom_scalar/layout_pb2.py +85 -0
  160. tensorbored/plugins/custom_scalar/metadata.py +35 -0
  161. tensorbored/plugins/custom_scalar/summary.py +79 -0
  162. tensorbored/plugins/debugger_v2/__init__.py +0 -0
  163. tensorbored/plugins/debugger_v2/debug_data_multiplexer.py +631 -0
  164. tensorbored/plugins/debugger_v2/debug_data_provider.py +634 -0
  165. tensorbored/plugins/debugger_v2/debugger_v2_plugin.py +504 -0
  166. tensorbored/plugins/distribution/__init__.py +0 -0
  167. tensorbored/plugins/distribution/compressor.py +158 -0
  168. tensorbored/plugins/distribution/distributions_plugin.py +116 -0
  169. tensorbored/plugins/distribution/metadata.py +19 -0
  170. tensorbored/plugins/graph/__init__.py +0 -0
  171. tensorbored/plugins/graph/graph_util.py +129 -0
  172. tensorbored/plugins/graph/graphs_plugin.py +336 -0
  173. tensorbored/plugins/graph/keras_util.py +328 -0
  174. tensorbored/plugins/graph/metadata.py +42 -0
  175. tensorbored/plugins/histogram/__init__.py +0 -0
  176. tensorbored/plugins/histogram/histograms_plugin.py +144 -0
  177. tensorbored/plugins/histogram/metadata.py +63 -0
  178. tensorbored/plugins/histogram/plugin_data_pb2.py +34 -0
  179. tensorbored/plugins/histogram/summary.py +234 -0
  180. tensorbored/plugins/histogram/summary_v2.py +292 -0
  181. tensorbored/plugins/hparams/__init__.py +14 -0
  182. tensorbored/plugins/hparams/_keras.py +93 -0
  183. tensorbored/plugins/hparams/api.py +130 -0
  184. tensorbored/plugins/hparams/api_pb2.py +208 -0
  185. tensorbored/plugins/hparams/backend_context.py +606 -0
  186. tensorbored/plugins/hparams/download_data.py +158 -0
  187. tensorbored/plugins/hparams/error.py +26 -0
  188. tensorbored/plugins/hparams/get_experiment.py +71 -0
  189. tensorbored/plugins/hparams/hparams_plugin.py +206 -0
  190. tensorbored/plugins/hparams/hparams_util_pb2.py +69 -0
  191. tensorbored/plugins/hparams/json_format_compat.py +38 -0
  192. tensorbored/plugins/hparams/list_metric_evals.py +57 -0
  193. tensorbored/plugins/hparams/list_session_groups.py +1040 -0
  194. tensorbored/plugins/hparams/metadata.py +125 -0
  195. tensorbored/plugins/hparams/metrics.py +41 -0
  196. tensorbored/plugins/hparams/plugin_data_pb2.py +69 -0
  197. tensorbored/plugins/hparams/summary.py +205 -0
  198. tensorbored/plugins/hparams/summary_v2.py +597 -0
  199. tensorbored/plugins/image/__init__.py +0 -0
  200. tensorbored/plugins/image/images_plugin.py +232 -0
  201. tensorbored/plugins/image/metadata.py +65 -0
  202. tensorbored/plugins/image/plugin_data_pb2.py +34 -0
  203. tensorbored/plugins/image/summary.py +159 -0
  204. tensorbored/plugins/image/summary_v2.py +130 -0
  205. tensorbored/plugins/mesh/__init__.py +14 -0
  206. tensorbored/plugins/mesh/mesh_plugin.py +292 -0
  207. tensorbored/plugins/mesh/metadata.py +152 -0
  208. tensorbored/plugins/mesh/plugin_data_pb2.py +37 -0
  209. tensorbored/plugins/mesh/summary.py +251 -0
  210. tensorbored/plugins/mesh/summary_v2.py +214 -0
  211. tensorbored/plugins/metrics/__init__.py +0 -0
  212. tensorbored/plugins/metrics/metadata.py +17 -0
  213. tensorbored/plugins/metrics/metrics_plugin.py +623 -0
  214. tensorbored/plugins/pr_curve/__init__.py +0 -0
  215. tensorbored/plugins/pr_curve/metadata.py +75 -0
  216. tensorbored/plugins/pr_curve/plugin_data_pb2.py +34 -0
  217. tensorbored/plugins/pr_curve/pr_curves_plugin.py +241 -0
  218. tensorbored/plugins/pr_curve/summary.py +574 -0
  219. tensorbored/plugins/profile_redirect/__init__.py +0 -0
  220. tensorbored/plugins/profile_redirect/profile_redirect_plugin.py +49 -0
  221. tensorbored/plugins/projector/__init__.py +67 -0
  222. tensorbored/plugins/projector/metadata.py +26 -0
  223. tensorbored/plugins/projector/projector_config_pb2.py +54 -0
  224. tensorbored/plugins/projector/projector_plugin.py +795 -0
  225. tensorbored/plugins/projector/tf_projector_plugin/index.js +32 -0
  226. tensorbored/plugins/projector/tf_projector_plugin/projector_binary.html +524 -0
  227. tensorbored/plugins/projector/tf_projector_plugin/projector_binary.js +15536 -0
  228. tensorbored/plugins/scalar/__init__.py +0 -0
  229. tensorbored/plugins/scalar/metadata.py +60 -0
  230. tensorbored/plugins/scalar/plugin_data_pb2.py +34 -0
  231. tensorbored/plugins/scalar/scalars_plugin.py +181 -0
  232. tensorbored/plugins/scalar/summary.py +109 -0
  233. tensorbored/plugins/scalar/summary_v2.py +124 -0
  234. tensorbored/plugins/text/__init__.py +0 -0
  235. tensorbored/plugins/text/metadata.py +62 -0
  236. tensorbored/plugins/text/plugin_data_pb2.py +34 -0
  237. tensorbored/plugins/text/summary.py +114 -0
  238. tensorbored/plugins/text/summary_v2.py +124 -0
  239. tensorbored/plugins/text/text_plugin.py +288 -0
  240. tensorbored/plugins/wit_redirect/__init__.py +0 -0
  241. tensorbored/plugins/wit_redirect/wit_redirect_plugin.py +49 -0
  242. tensorbored/program.py +910 -0
  243. tensorbored/summary/__init__.py +35 -0
  244. tensorbored/summary/_output.py +124 -0
  245. tensorbored/summary/_tf/__init__.py +14 -0
  246. tensorbored/summary/_tf/summary/__init__.py +178 -0
  247. tensorbored/summary/_writer.py +105 -0
  248. tensorbored/summary/v1.py +51 -0
  249. tensorbored/summary/v2.py +25 -0
  250. tensorbored/summary/writer/__init__.py +13 -0
  251. tensorbored/summary/writer/event_file_writer.py +291 -0
  252. tensorbored/summary/writer/record_writer.py +50 -0
  253. tensorbored/util/__init__.py +0 -0
  254. tensorbored/util/encoder.py +116 -0
  255. tensorbored/util/grpc_util.py +311 -0
  256. tensorbored/util/img_mime_type_detector.py +40 -0
  257. tensorbored/util/io_util.py +20 -0
  258. tensorbored/util/lazy_tensor_creator.py +110 -0
  259. tensorbored/util/op_evaluator.py +104 -0
  260. tensorbored/util/platform_util.py +20 -0
  261. tensorbored/util/tb_logging.py +24 -0
  262. tensorbored/util/tensor_util.py +617 -0
  263. tensorbored/util/timing.py +122 -0
  264. tensorbored/version.py +21 -0
  265. tensorbored/webfiles.zip +0 -0
  266. tensorbored-2.21.0rc1769983804.dist-info/METADATA +49 -0
  267. tensorbored-2.21.0rc1769983804.dist-info/RECORD +271 -0
  268. tensorbored-2.21.0rc1769983804.dist-info/WHEEL +5 -0
  269. tensorbored-2.21.0rc1769983804.dist-info/entry_points.txt +6 -0
  270. tensorbored-2.21.0rc1769983804.dist-info/licenses/LICENSE +739 -0
  271. tensorbored-2.21.0rc1769983804.dist-info/top_level.txt +1 -0
@@ -0,0 +1,504 @@
1
+ # Copyright 2019 The TensorFlow Authors. All Rights Reserved.
2
+ #
3
+ # Licensed under the Apache License, Version 2.0 (the "License");
4
+ # you may not use this file except in compliance with the License.
5
+ # You may obtain a copy of the License at
6
+ #
7
+ # http://www.apache.org/licenses/LICENSE-2.0
8
+ #
9
+ # Unless required by applicable law or agreed to in writing, software
10
+ # distributed under the License is distributed on an "AS IS" BASIS,
11
+ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12
+ # See the License for the specific language governing permissions and
13
+ # limitations under the License.
14
+ # ==============================================================================
15
+ """The TensorBoard Debugger V2 plugin."""
16
+
17
+ import threading
18
+
19
+ from werkzeug import wrappers
20
+
21
+ from tensorbored import errors
22
+ from tensorbored import plugin_util
23
+ from tensorbored.plugins import base_plugin
24
+ from tensorbored.plugins.debugger_v2 import debug_data_provider
25
+ from tensorbored.backend import http_util
26
+
27
+
28
+ def _error_response(request, error_message):
29
+ return http_util.Respond(
30
+ request,
31
+ {"error": error_message},
32
+ "application/json",
33
+ code=400,
34
+ )
35
+
36
+
37
+ def _missing_run_error_response(request):
38
+ return _error_response(request, "run parameter is not provided")
39
+
40
+
41
+ class DebuggerV2Plugin(base_plugin.TBPlugin):
42
+ """Debugger V2 Plugin for TensorBoard."""
43
+
44
+ plugin_name = debug_data_provider.PLUGIN_NAME
45
+
46
+ def __init__(self, context):
47
+ """Instantiates Debugger V2 Plugin via TensorBoard core.
48
+
49
+ Args:
50
+ context: A base_plugin.TBContext instance.
51
+ """
52
+ super().__init__(context)
53
+ self._logdir = context.logdir
54
+ self._underlying_data_provider = None
55
+ # Held while initializing `_underlying_data_provider` for the first
56
+ # time, to make sure that we only construct one.
57
+ self._data_provider_init_lock = threading.Lock()
58
+
59
+ @property
60
+ def _data_provider(self):
61
+ if self._underlying_data_provider is not None:
62
+ return self._underlying_data_provider
63
+ with self._data_provider_init_lock:
64
+ if self._underlying_data_provider is not None:
65
+ return self._underlying_data_provider
66
+ # TODO(cais): Implement factory for DataProvider that takes into account
67
+ # the settings.
68
+ dp = debug_data_provider.LocalDebuggerV2DataProvider(self._logdir)
69
+ self._underlying_data_provider = dp
70
+ return dp
71
+
72
+ def get_plugin_apps(self):
73
+ # TODO(cais): Add routes as they are implemented.
74
+ return {
75
+ "/runs": self.serve_runs,
76
+ "/alerts": self.serve_alerts,
77
+ "/execution/digests": self.serve_execution_digests,
78
+ "/execution/data": self.serve_execution_data,
79
+ "/graph_execution/digests": self.serve_graph_execution_digests,
80
+ "/graph_execution/data": self.serve_graph_execution_data,
81
+ "/graphs/graph_info": self.serve_graph_info,
82
+ "/graphs/op_info": self.serve_graph_op_info,
83
+ "/source_files/list": self.serve_source_files_list,
84
+ "/source_files/file": self.serve_source_file,
85
+ "/stack_frames/stack_frames": self.serve_stack_frames,
86
+ }
87
+
88
+ def is_active(self):
89
+ """The Debugger V2 plugin must be manually selected."""
90
+ return False
91
+
92
+ def frontend_metadata(self):
93
+ return base_plugin.FrontendMetadata(
94
+ is_ng_component=True, tab_name="Debugger V2", disable_reload=False
95
+ )
96
+
97
+ @wrappers.Request.application
98
+ def serve_runs(self, request):
99
+ experiment = plugin_util.experiment_id(request.environ)
100
+ runs = self._data_provider.list_runs(experiment_id=experiment)
101
+ run_listing = dict()
102
+ for run in runs:
103
+ run_listing[run.run_id] = {"start_time": run.start_time}
104
+ return http_util.Respond(request, run_listing, "application/json")
105
+
106
+ @wrappers.Request.application
107
+ def serve_alerts(self, request):
108
+ experiment = plugin_util.experiment_id(request.environ)
109
+ run = request.args.get("run")
110
+ if run is None:
111
+ return _missing_run_error_response(request)
112
+ begin = int(request.args.get("begin", "0"))
113
+ end = int(request.args.get("end", "-1"))
114
+ alert_type = request.args.get("alert_type", None)
115
+ run_tag_filter = debug_data_provider.alerts_run_tag_filter(
116
+ run, begin, end, alert_type=alert_type
117
+ )
118
+ blob_sequences = self._data_provider.read_blob_sequences(
119
+ experiment_id=experiment,
120
+ plugin_name=self.plugin_name,
121
+ run_tag_filter=run_tag_filter,
122
+ )
123
+ tag = next(iter(run_tag_filter.tags))
124
+ try:
125
+ return http_util.Respond(
126
+ request,
127
+ self._data_provider.read_blob(
128
+ blob_key=blob_sequences[run][tag][0].blob_key
129
+ ),
130
+ "application/json",
131
+ )
132
+ except errors.InvalidArgumentError as e:
133
+ return _error_response(request, str(e))
134
+
135
+ @wrappers.Request.application
136
+ def serve_execution_digests(self, request):
137
+ experiment = plugin_util.experiment_id(request.environ)
138
+ run = request.args.get("run")
139
+ if run is None:
140
+ return _missing_run_error_response(request)
141
+ begin = int(request.args.get("begin", "0"))
142
+ end = int(request.args.get("end", "-1"))
143
+ run_tag_filter = debug_data_provider.execution_digest_run_tag_filter(
144
+ run, begin, end
145
+ )
146
+ blob_sequences = self._data_provider.read_blob_sequences(
147
+ experiment_id=experiment,
148
+ plugin_name=self.plugin_name,
149
+ run_tag_filter=run_tag_filter,
150
+ )
151
+ tag = next(iter(run_tag_filter.tags))
152
+ try:
153
+ return http_util.Respond(
154
+ request,
155
+ self._data_provider.read_blob(
156
+ blob_key=blob_sequences[run][tag][0].blob_key
157
+ ),
158
+ "application/json",
159
+ )
160
+ except errors.InvalidArgumentError as e:
161
+ return _error_response(request, str(e))
162
+
163
+ @wrappers.Request.application
164
+ def serve_execution_data(self, request):
165
+ experiment = plugin_util.experiment_id(request.environ)
166
+ run = request.args.get("run")
167
+ if run is None:
168
+ return _missing_run_error_response(request)
169
+ begin = int(request.args.get("begin", "0"))
170
+ end = int(request.args.get("end", "-1"))
171
+ run_tag_filter = debug_data_provider.execution_data_run_tag_filter(
172
+ run, begin, end
173
+ )
174
+ blob_sequences = self._data_provider.read_blob_sequences(
175
+ experiment_id=experiment,
176
+ plugin_name=self.plugin_name,
177
+ run_tag_filter=run_tag_filter,
178
+ )
179
+ tag = next(iter(run_tag_filter.tags))
180
+ try:
181
+ return http_util.Respond(
182
+ request,
183
+ self._data_provider.read_blob(
184
+ blob_key=blob_sequences[run][tag][0].blob_key
185
+ ),
186
+ "application/json",
187
+ )
188
+ except errors.InvalidArgumentError as e:
189
+ return _error_response(request, str(e))
190
+
191
+ @wrappers.Request.application
192
+ def serve_graph_execution_digests(self, request):
193
+ """Serve digests of intra-graph execution events.
194
+
195
+ As the names imply, this route differs from `serve_execution_digests()`
196
+ in that it is for intra-graph execution, while `serve_execution_digests()`
197
+ is for top-level (eager) execution.
198
+ """
199
+ experiment = plugin_util.experiment_id(request.environ)
200
+ run = request.args.get("run")
201
+ if run is None:
202
+ return _missing_run_error_response(request)
203
+ begin = int(request.args.get("begin", "0"))
204
+ end = int(request.args.get("end", "-1"))
205
+ run_tag_filter = (
206
+ debug_data_provider.graph_execution_digest_run_tag_filter(
207
+ run, begin, end
208
+ )
209
+ )
210
+ blob_sequences = self._data_provider.read_blob_sequences(
211
+ experiment_id=experiment,
212
+ plugin_name=self.plugin_name,
213
+ run_tag_filter=run_tag_filter,
214
+ )
215
+ tag = next(iter(run_tag_filter.tags))
216
+ try:
217
+ return http_util.Respond(
218
+ request,
219
+ self._data_provider.read_blob(
220
+ blob_key=blob_sequences[run][tag][0].blob_key
221
+ ),
222
+ "application/json",
223
+ )
224
+ except errors.InvalidArgumentError as e:
225
+ return _error_response(request, str(e))
226
+
227
+ @wrappers.Request.application
228
+ def serve_graph_execution_data(self, request):
229
+ """Serve detailed data objects of intra-graph execution events.
230
+
231
+ As the names imply, this route differs from `serve_execution_data()`
232
+ in that it is for intra-graph execution, while `serve_execution_data()`
233
+ is for top-level (eager) execution.
234
+
235
+ Unlike `serve_graph_execution_digests()`, this method serves the
236
+ full-sized data objects for intra-graph execution events.
237
+ """
238
+ experiment = plugin_util.experiment_id(request.environ)
239
+ run = request.args.get("run")
240
+ if run is None:
241
+ return _missing_run_error_response(request)
242
+ begin = int(request.args.get("begin", "0"))
243
+ end = int(request.args.get("end", "-1"))
244
+ run_tag_filter = (
245
+ debug_data_provider.graph_execution_data_run_tag_filter(
246
+ run, begin, end
247
+ )
248
+ )
249
+ blob_sequences = self._data_provider.read_blob_sequences(
250
+ experiment_id=experiment,
251
+ plugin_name=self.plugin_name,
252
+ run_tag_filter=run_tag_filter,
253
+ )
254
+ tag = next(iter(run_tag_filter.tags))
255
+ try:
256
+ return http_util.Respond(
257
+ request,
258
+ self._data_provider.read_blob(
259
+ blob_key=blob_sequences[run][tag][0].blob_key
260
+ ),
261
+ "application/json",
262
+ )
263
+ except errors.InvalidArgumentError as e:
264
+ return _error_response(request, str(e))
265
+
266
+ @wrappers.Request.application
267
+ def serve_graph_info(self, request):
268
+ """Serve basic information about a TensorFlow graph.
269
+
270
+ The request specifies the debugger-generated ID of the graph being
271
+ queried.
272
+
273
+ The response contains a JSON object with the following fields:
274
+ - graph_id: The debugger-generated ID (echoing the request).
275
+ - name: The name of the graph (if any). For TensorFlow 2.x
276
+ Function Graphs (FuncGraphs), this is typically the name of
277
+ the underlying Python function, optionally prefixed with
278
+ TensorFlow-generated prefixed such as "__inference_".
279
+ Some graphs (e.g., certain outermost graphs) may have no names,
280
+ in which case this field is `null`.
281
+ - outer_graph_id: Outer graph ID (if any). For an outermost graph
282
+ without an outer graph context, this field is `null`.
283
+ - inner_graph_ids: Debugger-generated IDs of all the graphs
284
+ nested inside this graph. For a graph without any graphs nested
285
+ inside, this field is an empty array.
286
+ """
287
+ experiment = plugin_util.experiment_id(request.environ)
288
+ run = request.args.get("run")
289
+ if run is None:
290
+ return _missing_run_error_response(request)
291
+ graph_id = request.args.get("graph_id")
292
+ run_tag_filter = debug_data_provider.graph_info_run_tag_filter(
293
+ run, graph_id
294
+ )
295
+ blob_sequences = self._data_provider.read_blob_sequences(
296
+ experiment_id=experiment,
297
+ plugin_name=self.plugin_name,
298
+ run_tag_filter=run_tag_filter,
299
+ )
300
+ tag = next(iter(run_tag_filter.tags))
301
+ try:
302
+ return http_util.Respond(
303
+ request,
304
+ self._data_provider.read_blob(
305
+ blob_key=blob_sequences[run][tag][0].blob_key
306
+ ),
307
+ "application/json",
308
+ )
309
+ except errors.NotFoundError as e:
310
+ return _error_response(request, str(e))
311
+
312
+ @wrappers.Request.application
313
+ def serve_graph_op_info(self, request):
314
+ """Serve information for ops in graphs.
315
+
316
+ The request specifies the op name and the ID of the graph that
317
+ contains the op.
318
+
319
+ The response contains a JSON object with the following fields:
320
+ - op_type
321
+ - op_name
322
+ - graph_ids: Stack of graph IDs that the op is located in, from
323
+ outermost to innermost. The length of this array is always >= 1.
324
+ The length is 1 if and only if the graph is an outermost graph.
325
+ - num_outputs: Number of output tensors.
326
+ - output_tensor_ids: The debugger-generated number IDs for the
327
+ symbolic output tensors of the op (an array of numbers).
328
+ - host_name: Name of the host on which the op is created.
329
+ - stack_trace: Stack frames of the op's creation.
330
+ - inputs: Specifications of all inputs to this op.
331
+ Currently only immediate (one level of) inputs are provided.
332
+ This is an array of length N_in, where N_in is the number of
333
+ data inputs received by the op. Each element of the array is an
334
+ object with the following fields:
335
+ - op_name: Name of the op that provides the input tensor.
336
+ - output_slot: 0-based output slot index from which the input
337
+ tensor emits.
338
+ - data: A recursive data structure of this same schema.
339
+ This field is not populated (undefined) at the leaf nodes
340
+ of this recursive data structure.
341
+ In the rare case wherein the data for an input cannot be
342
+ retrieved properly (e.g., special internal op types), this
343
+ field will be unpopulated.
344
+ This is an empty list for an op with no inputs.
345
+ - consumers: Specifications for all the downstream consuming ops of
346
+ this. Currently only immediate (one level of) consumers are provided.
347
+ This is an array of length N_out, where N_out is the number of
348
+ symbolic tensors output by this op.
349
+ Each element of the array is an array of which the length equals
350
+ the number of downstream ops that consume the corresponding symbolic
351
+ tensor (only data edges are tracked).
352
+ Each element of the array is an object with the following fields:
353
+ - op_name: Name of the op that receives the output tensor as an
354
+ input.
355
+ - input_slot: 0-based input slot index at which the downstream
356
+ op receives this output tensor.
357
+ - data: A recursive data structure of this very schema.
358
+ This field is not populated (undefined) at the leaf nodes
359
+ of this recursive data structure.
360
+ In the rare case wherein the data for a consumer op cannot be
361
+ retrieved properly (e.g., special internal op types), this
362
+ field will be unpopulated.
363
+ If this op has no output tensors, this is an empty array.
364
+ If one of the output tensors of this op has no consumers, the
365
+ corresponding element is an empty array.
366
+ """
367
+ experiment = plugin_util.experiment_id(request.environ)
368
+ run = request.args.get("run")
369
+ if run is None:
370
+ return _missing_run_error_response(request)
371
+ graph_id = request.args.get("graph_id")
372
+ op_name = request.args.get("op_name")
373
+ run_tag_filter = debug_data_provider.graph_op_info_run_tag_filter(
374
+ run, graph_id, op_name
375
+ )
376
+ blob_sequences = self._data_provider.read_blob_sequences(
377
+ experiment_id=experiment,
378
+ plugin_name=self.plugin_name,
379
+ run_tag_filter=run_tag_filter,
380
+ )
381
+ tag = next(iter(run_tag_filter.tags))
382
+ try:
383
+ return http_util.Respond(
384
+ request,
385
+ self._data_provider.read_blob(
386
+ blob_key=blob_sequences[run][tag][0].blob_key
387
+ ),
388
+ "application/json",
389
+ )
390
+ except errors.NotFoundError as e:
391
+ return _error_response(request, str(e))
392
+
393
+ @wrappers.Request.application
394
+ def serve_source_files_list(self, request):
395
+ """Serves a list of all source files involved in the debugged program."""
396
+ experiment = plugin_util.experiment_id(request.environ)
397
+ run = request.args.get("run")
398
+ if run is None:
399
+ return _missing_run_error_response(request)
400
+ run_tag_filter = debug_data_provider.source_file_list_run_tag_filter(
401
+ run
402
+ )
403
+ blob_sequences = self._data_provider.read_blob_sequences(
404
+ experiment_id=experiment,
405
+ plugin_name=self.plugin_name,
406
+ run_tag_filter=run_tag_filter,
407
+ )
408
+ tag = next(iter(run_tag_filter.tags))
409
+ return http_util.Respond(
410
+ request,
411
+ self._data_provider.read_blob(
412
+ blob_key=blob_sequences[run][tag][0].blob_key
413
+ ),
414
+ "application/json",
415
+ )
416
+
417
+ @wrappers.Request.application
418
+ def serve_source_file(self, request):
419
+ """Serves the content of a given source file.
420
+
421
+ The source file is referred to by the index in the list of all source
422
+ files involved in the execution of the debugged program, which is
423
+ available via the `serve_source_files_list()` serving route.
424
+
425
+ Args:
426
+ request: HTTP request.
427
+
428
+ Returns:
429
+ Response to the request.
430
+ """
431
+ experiment = plugin_util.experiment_id(request.environ)
432
+ run = request.args.get("run")
433
+ if run is None:
434
+ return _missing_run_error_response(request)
435
+ index = request.args.get("index")
436
+ # TOOD(cais): When the need arises, support serving a subset of a
437
+ # source file's lines.
438
+ if index is None:
439
+ return _error_response(
440
+ request, "index is not provided for source file content"
441
+ )
442
+ index = int(index)
443
+ run_tag_filter = debug_data_provider.source_file_run_tag_filter(
444
+ run, index
445
+ )
446
+ blob_sequences = self._data_provider.read_blob_sequences(
447
+ experiment_id=experiment,
448
+ plugin_name=self.plugin_name,
449
+ run_tag_filter=run_tag_filter,
450
+ )
451
+ tag = next(iter(run_tag_filter.tags))
452
+ try:
453
+ return http_util.Respond(
454
+ request,
455
+ self._data_provider.read_blob(
456
+ blob_key=blob_sequences[run][tag][0].blob_key
457
+ ),
458
+ "application/json",
459
+ )
460
+ except errors.NotFoundError as e:
461
+ return _error_response(request, str(e))
462
+
463
+ @wrappers.Request.application
464
+ def serve_stack_frames(self, request):
465
+ """Serves the content of stack frames.
466
+
467
+ The source frames being requested are referred to be UUIDs for each of
468
+ them, separated by commas.
469
+
470
+ Args:
471
+ request: HTTP request.
472
+
473
+ Returns:
474
+ Response to the request.
475
+ """
476
+ experiment = plugin_util.experiment_id(request.environ)
477
+ run = request.args.get("run")
478
+ if run is None:
479
+ return _missing_run_error_response(request)
480
+ stack_frame_ids = request.args.get("stack_frame_ids")
481
+ if stack_frame_ids is None:
482
+ return _error_response(request, "Missing stack_frame_ids parameter")
483
+ if not stack_frame_ids:
484
+ return _error_response(request, "Empty stack_frame_ids parameter")
485
+ stack_frame_ids = stack_frame_ids.split(",")
486
+ run_tag_filter = debug_data_provider.stack_frames_run_tag_filter(
487
+ run, stack_frame_ids
488
+ )
489
+ blob_sequences = self._data_provider.read_blob_sequences(
490
+ experiment_id=experiment,
491
+ plugin_name=self.plugin_name,
492
+ run_tag_filter=run_tag_filter,
493
+ )
494
+ tag = next(iter(run_tag_filter.tags))
495
+ try:
496
+ return http_util.Respond(
497
+ request,
498
+ self._data_provider.read_blob(
499
+ blob_key=blob_sequences[run][tag][0].blob_key
500
+ ),
501
+ "application/json",
502
+ )
503
+ except errors.NotFoundError as e:
504
+ return _error_response(request, str(e))
File without changes
@@ -0,0 +1,158 @@
1
+ # Copyright 2017 The TensorFlow Authors. All Rights Reserved.
2
+ #
3
+ # Licensed under the Apache License, Version 2.0 (the "License");
4
+ # you may not use this file except in compliance with the License.
5
+ # You may obtain a copy of the License at
6
+ #
7
+ # http://www.apache.org/licenses/LICENSE-2.0
8
+ #
9
+ # Unless required by applicable law or agreed to in writing, software
10
+ # distributed under the License is distributed on an "AS IS" BASIS,
11
+ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12
+ # See the License for the specific language governing permissions and
13
+ # limitations under the License.
14
+ # ==============================================================================
15
+ """Package for histogram compression."""
16
+
17
+ import dataclasses
18
+ import numpy as np
19
+
20
+ from typing import Tuple
21
+
22
+ # Normal CDF for std_devs: (-Inf, -1.5, -1, -0.5, 0, 0.5, 1, 1.5, Inf)
23
+ # naturally gives bands around median of width 1 std dev, 2 std dev, 3 std dev,
24
+ # and then the long tail.
25
+ NORMAL_HISTOGRAM_BPS = (0, 668, 1587, 3085, 5000, 6915, 8413, 9332, 10000)
26
+
27
+
28
+ @dataclasses.dataclass(frozen=True)
29
+ class CompressedHistogramValue:
30
+ """Represents a value in a compressed histogram.
31
+
32
+ Attributes:
33
+ basis_point: Compression point represented in basis point, 1/100th of a
34
+ percent.
35
+ value: Cumulative weight at the basis point.
36
+ """
37
+
38
+ basis_point: float
39
+ value: float
40
+
41
+ def as_tuple(self) -> Tuple[float, float]:
42
+ """Returns the basis point and the value as a tuple."""
43
+ return (self.basis_point, self.value)
44
+
45
+
46
+ # TODO(@jart): Unfork these methods.
47
+ def compress_histogram_proto(histo, bps=NORMAL_HISTOGRAM_BPS):
48
+ """Creates fixed size histogram by adding compression to accumulated state.
49
+
50
+ This routine transforms a histogram at a particular step by interpolating its
51
+ variable number of buckets to represent their cumulative weight at a constant
52
+ number of compression points. This significantly reduces the size of the
53
+ histogram and makes it suitable for a two-dimensional area plot where the
54
+ output of this routine constitutes the ranges for a single x coordinate.
55
+
56
+ Args:
57
+ histo: A HistogramProto object.
58
+ bps: Compression points represented in basis points, 1/100ths of a percent.
59
+ Defaults to normal distribution.
60
+
61
+ Returns:
62
+ List of values for each basis point.
63
+ """
64
+ # See also: Histogram::Percentile() in core/lib/histogram/histogram.cc
65
+ if not histo.num:
66
+ return [CompressedHistogramValue(b, 0.0).as_tuple() for b in bps]
67
+ bucket = np.array(histo.bucket)
68
+ bucket_limit = list(histo.bucket_limit)
69
+ weights = (bucket * bps[-1] / (bucket.sum() or 1.0)).cumsum()
70
+ values = []
71
+ j = 0
72
+ while j < len(bps):
73
+ i = np.searchsorted(weights, bps[j], side="right")
74
+ while i < len(weights):
75
+ cumsum = weights[i]
76
+ cumsum_prev = weights[i - 1] if i > 0 else 0.0
77
+ if cumsum == cumsum_prev: # prevent lerp divide by zero
78
+ i += 1
79
+ continue
80
+ if not i or not cumsum_prev:
81
+ lhs = histo.min
82
+ else:
83
+ lhs = max(bucket_limit[i - 1], histo.min)
84
+ rhs = min(bucket_limit[i], histo.max)
85
+ weight = _lerp(bps[j], cumsum_prev, cumsum, lhs, rhs)
86
+ values.append(CompressedHistogramValue(bps[j], weight).as_tuple())
87
+ j += 1
88
+ break
89
+ else:
90
+ break
91
+ while j < len(bps):
92
+ values.append(CompressedHistogramValue(bps[j], histo.max).as_tuple())
93
+ j += 1
94
+ return values
95
+
96
+
97
+ def compress_histogram(buckets, bps=NORMAL_HISTOGRAM_BPS):
98
+ """Creates fixed size histogram by adding compression to accumulated state.
99
+
100
+ This routine transforms a histogram at a particular step by linearly
101
+ interpolating its variable number of buckets to represent their cumulative
102
+ weight at a constant number of compression points. This significantly reduces
103
+ the size of the histogram and makes it suitable for a two-dimensional area
104
+ plot where the output of this routine constitutes the ranges for a single x
105
+ coordinate.
106
+
107
+ Args:
108
+ buckets: A list of buckets, each of which is a 3-tuple of the form
109
+ `(min, max, count)`.
110
+ bps: Compression points represented in basis points, 1/100ths of a percent.
111
+ Defaults to normal distribution.
112
+
113
+ Returns:
114
+ List of values for each basis point.
115
+ """
116
+ # See also: Histogram::Percentile() in core/lib/histogram/histogram.cc
117
+ buckets = np.array(buckets)
118
+ if not buckets.size:
119
+ return [CompressedHistogramValue(b, 0.0).as_tuple() for b in bps]
120
+ minmin, maxmax = (buckets[0][0], buckets[-1][1])
121
+ counts = buckets[:, 2]
122
+ right_edges = list(buckets[:, 1])
123
+ weights = (counts * bps[-1] / (counts.sum() or 1.0)).cumsum()
124
+
125
+ result = []
126
+ bp_index = 0
127
+ while bp_index < len(bps):
128
+ i = np.searchsorted(weights, bps[bp_index], side="right")
129
+ while i < len(weights):
130
+ cumsum = weights[i]
131
+ cumsum_prev = weights[i - 1] if i > 0 else 0.0
132
+ if cumsum == cumsum_prev: # prevent division-by-zero in `_lerp`
133
+ i += 1
134
+ continue
135
+ if not i or not cumsum_prev:
136
+ lhs = minmin
137
+ else:
138
+ lhs = max(right_edges[i - 1], minmin)
139
+ rhs = min(right_edges[i], maxmax)
140
+ weight = _lerp(bps[bp_index], cumsum_prev, cumsum, lhs, rhs)
141
+ result.append(
142
+ CompressedHistogramValue(bps[bp_index], weight).as_tuple()
143
+ )
144
+ bp_index += 1
145
+ break
146
+ else:
147
+ break
148
+ while bp_index < len(bps):
149
+ result.append(
150
+ CompressedHistogramValue(bps[bp_index], maxmax).as_tuple()
151
+ )
152
+ bp_index += 1
153
+ return result
154
+
155
+
156
+ def _lerp(x, x0, x1, y0, y1):
157
+ """Affinely map from [x0, x1] onto [y0, y1]."""
158
+ return y0 + (x - x0) * float(y1 - y0) / (x1 - x0)