tensorbored 2.21.0rc1769983804__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- tensorbored/__init__.py +112 -0
- tensorbored/_vendor/__init__.py +0 -0
- tensorbored/_vendor/bleach/__init__.py +125 -0
- tensorbored/_vendor/bleach/_vendor/__init__.py +0 -0
- tensorbored/_vendor/bleach/_vendor/html5lib/__init__.py +35 -0
- tensorbored/_vendor/bleach/_vendor/html5lib/_ihatexml.py +289 -0
- tensorbored/_vendor/bleach/_vendor/html5lib/_inputstream.py +918 -0
- tensorbored/_vendor/bleach/_vendor/html5lib/_tokenizer.py +1735 -0
- tensorbored/_vendor/bleach/_vendor/html5lib/_trie/__init__.py +5 -0
- tensorbored/_vendor/bleach/_vendor/html5lib/_trie/_base.py +40 -0
- tensorbored/_vendor/bleach/_vendor/html5lib/_trie/py.py +67 -0
- tensorbored/_vendor/bleach/_vendor/html5lib/_utils.py +159 -0
- tensorbored/_vendor/bleach/_vendor/html5lib/constants.py +2946 -0
- tensorbored/_vendor/bleach/_vendor/html5lib/filters/__init__.py +0 -0
- tensorbored/_vendor/bleach/_vendor/html5lib/filters/alphabeticalattributes.py +29 -0
- tensorbored/_vendor/bleach/_vendor/html5lib/filters/base.py +12 -0
- tensorbored/_vendor/bleach/_vendor/html5lib/filters/inject_meta_charset.py +73 -0
- tensorbored/_vendor/bleach/_vendor/html5lib/filters/lint.py +93 -0
- tensorbored/_vendor/bleach/_vendor/html5lib/filters/optionaltags.py +207 -0
- tensorbored/_vendor/bleach/_vendor/html5lib/filters/sanitizer.py +916 -0
- tensorbored/_vendor/bleach/_vendor/html5lib/filters/whitespace.py +38 -0
- tensorbored/_vendor/bleach/_vendor/html5lib/html5parser.py +2795 -0
- tensorbored/_vendor/bleach/_vendor/html5lib/serializer.py +409 -0
- tensorbored/_vendor/bleach/_vendor/html5lib/treeadapters/__init__.py +30 -0
- tensorbored/_vendor/bleach/_vendor/html5lib/treeadapters/genshi.py +54 -0
- tensorbored/_vendor/bleach/_vendor/html5lib/treeadapters/sax.py +50 -0
- tensorbored/_vendor/bleach/_vendor/html5lib/treebuilders/__init__.py +88 -0
- tensorbored/_vendor/bleach/_vendor/html5lib/treebuilders/base.py +417 -0
- tensorbored/_vendor/bleach/_vendor/html5lib/treebuilders/dom.py +239 -0
- tensorbored/_vendor/bleach/_vendor/html5lib/treebuilders/etree.py +343 -0
- tensorbored/_vendor/bleach/_vendor/html5lib/treebuilders/etree_lxml.py +392 -0
- tensorbored/_vendor/bleach/_vendor/html5lib/treewalkers/__init__.py +154 -0
- tensorbored/_vendor/bleach/_vendor/html5lib/treewalkers/base.py +252 -0
- tensorbored/_vendor/bleach/_vendor/html5lib/treewalkers/dom.py +43 -0
- tensorbored/_vendor/bleach/_vendor/html5lib/treewalkers/etree.py +131 -0
- tensorbored/_vendor/bleach/_vendor/html5lib/treewalkers/etree_lxml.py +215 -0
- tensorbored/_vendor/bleach/_vendor/html5lib/treewalkers/genshi.py +69 -0
- tensorbored/_vendor/bleach/_vendor/parse.py +1078 -0
- tensorbored/_vendor/bleach/callbacks.py +32 -0
- tensorbored/_vendor/bleach/html5lib_shim.py +757 -0
- tensorbored/_vendor/bleach/linkifier.py +633 -0
- tensorbored/_vendor/bleach/parse_shim.py +1 -0
- tensorbored/_vendor/bleach/sanitizer.py +638 -0
- tensorbored/_vendor/bleach/six_shim.py +19 -0
- tensorbored/_vendor/webencodings/__init__.py +342 -0
- tensorbored/_vendor/webencodings/labels.py +231 -0
- tensorbored/_vendor/webencodings/mklabels.py +59 -0
- tensorbored/_vendor/webencodings/x_user_defined.py +325 -0
- tensorbored/assets.py +36 -0
- tensorbored/auth.py +102 -0
- tensorbored/backend/__init__.py +0 -0
- tensorbored/backend/application.py +604 -0
- tensorbored/backend/auth_context_middleware.py +38 -0
- tensorbored/backend/client_feature_flags.py +113 -0
- tensorbored/backend/empty_path_redirect.py +46 -0
- tensorbored/backend/event_processing/__init__.py +0 -0
- tensorbored/backend/event_processing/data_ingester.py +276 -0
- tensorbored/backend/event_processing/data_provider.py +535 -0
- tensorbored/backend/event_processing/directory_loader.py +142 -0
- tensorbored/backend/event_processing/directory_watcher.py +272 -0
- tensorbored/backend/event_processing/event_accumulator.py +950 -0
- tensorbored/backend/event_processing/event_file_inspector.py +463 -0
- tensorbored/backend/event_processing/event_file_loader.py +292 -0
- tensorbored/backend/event_processing/event_multiplexer.py +521 -0
- tensorbored/backend/event_processing/event_util.py +68 -0
- tensorbored/backend/event_processing/io_wrapper.py +223 -0
- tensorbored/backend/event_processing/plugin_asset_util.py +104 -0
- tensorbored/backend/event_processing/plugin_event_accumulator.py +721 -0
- tensorbored/backend/event_processing/plugin_event_multiplexer.py +522 -0
- tensorbored/backend/event_processing/reservoir.py +266 -0
- tensorbored/backend/event_processing/tag_types.py +29 -0
- tensorbored/backend/experiment_id.py +71 -0
- tensorbored/backend/experimental_plugin.py +51 -0
- tensorbored/backend/http_util.py +263 -0
- tensorbored/backend/json_util.py +70 -0
- tensorbored/backend/path_prefix.py +67 -0
- tensorbored/backend/process_graph.py +74 -0
- tensorbored/backend/security_validator.py +202 -0
- tensorbored/compat/__init__.py +69 -0
- tensorbored/compat/proto/__init__.py +0 -0
- tensorbored/compat/proto/allocation_description_pb2.py +35 -0
- tensorbored/compat/proto/api_def_pb2.py +82 -0
- tensorbored/compat/proto/attr_value_pb2.py +80 -0
- tensorbored/compat/proto/cluster_pb2.py +58 -0
- tensorbored/compat/proto/config_pb2.py +271 -0
- tensorbored/compat/proto/coordination_config_pb2.py +45 -0
- tensorbored/compat/proto/cost_graph_pb2.py +87 -0
- tensorbored/compat/proto/cpp_shape_inference_pb2.py +70 -0
- tensorbored/compat/proto/debug_pb2.py +65 -0
- tensorbored/compat/proto/event_pb2.py +149 -0
- tensorbored/compat/proto/full_type_pb2.py +74 -0
- tensorbored/compat/proto/function_pb2.py +157 -0
- tensorbored/compat/proto/graph_debug_info_pb2.py +111 -0
- tensorbored/compat/proto/graph_pb2.py +41 -0
- tensorbored/compat/proto/histogram_pb2.py +39 -0
- tensorbored/compat/proto/meta_graph_pb2.py +254 -0
- tensorbored/compat/proto/node_def_pb2.py +61 -0
- tensorbored/compat/proto/op_def_pb2.py +81 -0
- tensorbored/compat/proto/resource_handle_pb2.py +48 -0
- tensorbored/compat/proto/rewriter_config_pb2.py +93 -0
- tensorbored/compat/proto/rpc_options_pb2.py +35 -0
- tensorbored/compat/proto/saved_object_graph_pb2.py +193 -0
- tensorbored/compat/proto/saver_pb2.py +38 -0
- tensorbored/compat/proto/step_stats_pb2.py +116 -0
- tensorbored/compat/proto/struct_pb2.py +144 -0
- tensorbored/compat/proto/summary_pb2.py +111 -0
- tensorbored/compat/proto/tensor_description_pb2.py +38 -0
- tensorbored/compat/proto/tensor_pb2.py +68 -0
- tensorbored/compat/proto/tensor_shape_pb2.py +46 -0
- tensorbored/compat/proto/tfprof_log_pb2.py +307 -0
- tensorbored/compat/proto/trackable_object_graph_pb2.py +90 -0
- tensorbored/compat/proto/types_pb2.py +105 -0
- tensorbored/compat/proto/variable_pb2.py +62 -0
- tensorbored/compat/proto/verifier_config_pb2.py +38 -0
- tensorbored/compat/proto/versions_pb2.py +35 -0
- tensorbored/compat/tensorflow_stub/__init__.py +38 -0
- tensorbored/compat/tensorflow_stub/app.py +124 -0
- tensorbored/compat/tensorflow_stub/compat/__init__.py +131 -0
- tensorbored/compat/tensorflow_stub/compat/v1/__init__.py +20 -0
- tensorbored/compat/tensorflow_stub/dtypes.py +692 -0
- tensorbored/compat/tensorflow_stub/error_codes.py +169 -0
- tensorbored/compat/tensorflow_stub/errors.py +507 -0
- tensorbored/compat/tensorflow_stub/flags.py +124 -0
- tensorbored/compat/tensorflow_stub/io/__init__.py +17 -0
- tensorbored/compat/tensorflow_stub/io/gfile.py +1011 -0
- tensorbored/compat/tensorflow_stub/pywrap_tensorflow.py +285 -0
- tensorbored/compat/tensorflow_stub/tensor_shape.py +1035 -0
- tensorbored/context.py +129 -0
- tensorbored/data/__init__.py +0 -0
- tensorbored/data/grpc_provider.py +365 -0
- tensorbored/data/ingester.py +46 -0
- tensorbored/data/proto/__init__.py +0 -0
- tensorbored/data/proto/data_provider_pb2.py +517 -0
- tensorbored/data/proto/data_provider_pb2_grpc.py +374 -0
- tensorbored/data/provider.py +1365 -0
- tensorbored/data/server_ingester.py +301 -0
- tensorbored/data_compat.py +159 -0
- tensorbored/dataclass_compat.py +224 -0
- tensorbored/default.py +124 -0
- tensorbored/errors.py +130 -0
- tensorbored/lazy.py +99 -0
- tensorbored/main.py +48 -0
- tensorbored/main_lib.py +62 -0
- tensorbored/manager.py +487 -0
- tensorbored/notebook.py +441 -0
- tensorbored/plugin_util.py +266 -0
- tensorbored/plugins/__init__.py +0 -0
- tensorbored/plugins/audio/__init__.py +0 -0
- tensorbored/plugins/audio/audio_plugin.py +229 -0
- tensorbored/plugins/audio/metadata.py +69 -0
- tensorbored/plugins/audio/plugin_data_pb2.py +37 -0
- tensorbored/plugins/audio/summary.py +230 -0
- tensorbored/plugins/audio/summary_v2.py +124 -0
- tensorbored/plugins/base_plugin.py +367 -0
- tensorbored/plugins/core/__init__.py +0 -0
- tensorbored/plugins/core/core_plugin.py +981 -0
- tensorbored/plugins/custom_scalar/__init__.py +0 -0
- tensorbored/plugins/custom_scalar/custom_scalars_plugin.py +320 -0
- tensorbored/plugins/custom_scalar/layout_pb2.py +85 -0
- tensorbored/plugins/custom_scalar/metadata.py +35 -0
- tensorbored/plugins/custom_scalar/summary.py +79 -0
- tensorbored/plugins/debugger_v2/__init__.py +0 -0
- tensorbored/plugins/debugger_v2/debug_data_multiplexer.py +631 -0
- tensorbored/plugins/debugger_v2/debug_data_provider.py +634 -0
- tensorbored/plugins/debugger_v2/debugger_v2_plugin.py +504 -0
- tensorbored/plugins/distribution/__init__.py +0 -0
- tensorbored/plugins/distribution/compressor.py +158 -0
- tensorbored/plugins/distribution/distributions_plugin.py +116 -0
- tensorbored/plugins/distribution/metadata.py +19 -0
- tensorbored/plugins/graph/__init__.py +0 -0
- tensorbored/plugins/graph/graph_util.py +129 -0
- tensorbored/plugins/graph/graphs_plugin.py +336 -0
- tensorbored/plugins/graph/keras_util.py +328 -0
- tensorbored/plugins/graph/metadata.py +42 -0
- tensorbored/plugins/histogram/__init__.py +0 -0
- tensorbored/plugins/histogram/histograms_plugin.py +144 -0
- tensorbored/plugins/histogram/metadata.py +63 -0
- tensorbored/plugins/histogram/plugin_data_pb2.py +34 -0
- tensorbored/plugins/histogram/summary.py +234 -0
- tensorbored/plugins/histogram/summary_v2.py +292 -0
- tensorbored/plugins/hparams/__init__.py +14 -0
- tensorbored/plugins/hparams/_keras.py +93 -0
- tensorbored/plugins/hparams/api.py +130 -0
- tensorbored/plugins/hparams/api_pb2.py +208 -0
- tensorbored/plugins/hparams/backend_context.py +606 -0
- tensorbored/plugins/hparams/download_data.py +158 -0
- tensorbored/plugins/hparams/error.py +26 -0
- tensorbored/plugins/hparams/get_experiment.py +71 -0
- tensorbored/plugins/hparams/hparams_plugin.py +206 -0
- tensorbored/plugins/hparams/hparams_util_pb2.py +69 -0
- tensorbored/plugins/hparams/json_format_compat.py +38 -0
- tensorbored/plugins/hparams/list_metric_evals.py +57 -0
- tensorbored/plugins/hparams/list_session_groups.py +1040 -0
- tensorbored/plugins/hparams/metadata.py +125 -0
- tensorbored/plugins/hparams/metrics.py +41 -0
- tensorbored/plugins/hparams/plugin_data_pb2.py +69 -0
- tensorbored/plugins/hparams/summary.py +205 -0
- tensorbored/plugins/hparams/summary_v2.py +597 -0
- tensorbored/plugins/image/__init__.py +0 -0
- tensorbored/plugins/image/images_plugin.py +232 -0
- tensorbored/plugins/image/metadata.py +65 -0
- tensorbored/plugins/image/plugin_data_pb2.py +34 -0
- tensorbored/plugins/image/summary.py +159 -0
- tensorbored/plugins/image/summary_v2.py +130 -0
- tensorbored/plugins/mesh/__init__.py +14 -0
- tensorbored/plugins/mesh/mesh_plugin.py +292 -0
- tensorbored/plugins/mesh/metadata.py +152 -0
- tensorbored/plugins/mesh/plugin_data_pb2.py +37 -0
- tensorbored/plugins/mesh/summary.py +251 -0
- tensorbored/plugins/mesh/summary_v2.py +214 -0
- tensorbored/plugins/metrics/__init__.py +0 -0
- tensorbored/plugins/metrics/metadata.py +17 -0
- tensorbored/plugins/metrics/metrics_plugin.py +623 -0
- tensorbored/plugins/pr_curve/__init__.py +0 -0
- tensorbored/plugins/pr_curve/metadata.py +75 -0
- tensorbored/plugins/pr_curve/plugin_data_pb2.py +34 -0
- tensorbored/plugins/pr_curve/pr_curves_plugin.py +241 -0
- tensorbored/plugins/pr_curve/summary.py +574 -0
- tensorbored/plugins/profile_redirect/__init__.py +0 -0
- tensorbored/plugins/profile_redirect/profile_redirect_plugin.py +49 -0
- tensorbored/plugins/projector/__init__.py +67 -0
- tensorbored/plugins/projector/metadata.py +26 -0
- tensorbored/plugins/projector/projector_config_pb2.py +54 -0
- tensorbored/plugins/projector/projector_plugin.py +795 -0
- tensorbored/plugins/projector/tf_projector_plugin/index.js +32 -0
- tensorbored/plugins/projector/tf_projector_plugin/projector_binary.html +524 -0
- tensorbored/plugins/projector/tf_projector_plugin/projector_binary.js +15536 -0
- tensorbored/plugins/scalar/__init__.py +0 -0
- tensorbored/plugins/scalar/metadata.py +60 -0
- tensorbored/plugins/scalar/plugin_data_pb2.py +34 -0
- tensorbored/plugins/scalar/scalars_plugin.py +181 -0
- tensorbored/plugins/scalar/summary.py +109 -0
- tensorbored/plugins/scalar/summary_v2.py +124 -0
- tensorbored/plugins/text/__init__.py +0 -0
- tensorbored/plugins/text/metadata.py +62 -0
- tensorbored/plugins/text/plugin_data_pb2.py +34 -0
- tensorbored/plugins/text/summary.py +114 -0
- tensorbored/plugins/text/summary_v2.py +124 -0
- tensorbored/plugins/text/text_plugin.py +288 -0
- tensorbored/plugins/wit_redirect/__init__.py +0 -0
- tensorbored/plugins/wit_redirect/wit_redirect_plugin.py +49 -0
- tensorbored/program.py +910 -0
- tensorbored/summary/__init__.py +35 -0
- tensorbored/summary/_output.py +124 -0
- tensorbored/summary/_tf/__init__.py +14 -0
- tensorbored/summary/_tf/summary/__init__.py +178 -0
- tensorbored/summary/_writer.py +105 -0
- tensorbored/summary/v1.py +51 -0
- tensorbored/summary/v2.py +25 -0
- tensorbored/summary/writer/__init__.py +13 -0
- tensorbored/summary/writer/event_file_writer.py +291 -0
- tensorbored/summary/writer/record_writer.py +50 -0
- tensorbored/util/__init__.py +0 -0
- tensorbored/util/encoder.py +116 -0
- tensorbored/util/grpc_util.py +311 -0
- tensorbored/util/img_mime_type_detector.py +40 -0
- tensorbored/util/io_util.py +20 -0
- tensorbored/util/lazy_tensor_creator.py +110 -0
- tensorbored/util/op_evaluator.py +104 -0
- tensorbored/util/platform_util.py +20 -0
- tensorbored/util/tb_logging.py +24 -0
- tensorbored/util/tensor_util.py +617 -0
- tensorbored/util/timing.py +122 -0
- tensorbored/version.py +21 -0
- tensorbored/webfiles.zip +0 -0
- tensorbored-2.21.0rc1769983804.dist-info/METADATA +49 -0
- tensorbored-2.21.0rc1769983804.dist-info/RECORD +271 -0
- tensorbored-2.21.0rc1769983804.dist-info/WHEEL +5 -0
- tensorbored-2.21.0rc1769983804.dist-info/entry_points.txt +6 -0
- tensorbored-2.21.0rc1769983804.dist-info/licenses/LICENSE +739 -0
- tensorbored-2.21.0rc1769983804.dist-info/top_level.txt +1 -0
|
@@ -0,0 +1,504 @@
|
|
|
1
|
+
# Copyright 2019 The TensorFlow Authors. All Rights Reserved.
|
|
2
|
+
#
|
|
3
|
+
# Licensed under the Apache License, Version 2.0 (the "License");
|
|
4
|
+
# you may not use this file except in compliance with the License.
|
|
5
|
+
# You may obtain a copy of the License at
|
|
6
|
+
#
|
|
7
|
+
# http://www.apache.org/licenses/LICENSE-2.0
|
|
8
|
+
#
|
|
9
|
+
# Unless required by applicable law or agreed to in writing, software
|
|
10
|
+
# distributed under the License is distributed on an "AS IS" BASIS,
|
|
11
|
+
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
12
|
+
# See the License for the specific language governing permissions and
|
|
13
|
+
# limitations under the License.
|
|
14
|
+
# ==============================================================================
|
|
15
|
+
"""The TensorBoard Debugger V2 plugin."""
|
|
16
|
+
|
|
17
|
+
import threading
|
|
18
|
+
|
|
19
|
+
from werkzeug import wrappers
|
|
20
|
+
|
|
21
|
+
from tensorbored import errors
|
|
22
|
+
from tensorbored import plugin_util
|
|
23
|
+
from tensorbored.plugins import base_plugin
|
|
24
|
+
from tensorbored.plugins.debugger_v2 import debug_data_provider
|
|
25
|
+
from tensorbored.backend import http_util
|
|
26
|
+
|
|
27
|
+
|
|
28
|
+
def _error_response(request, error_message):
|
|
29
|
+
return http_util.Respond(
|
|
30
|
+
request,
|
|
31
|
+
{"error": error_message},
|
|
32
|
+
"application/json",
|
|
33
|
+
code=400,
|
|
34
|
+
)
|
|
35
|
+
|
|
36
|
+
|
|
37
|
+
def _missing_run_error_response(request):
|
|
38
|
+
return _error_response(request, "run parameter is not provided")
|
|
39
|
+
|
|
40
|
+
|
|
41
|
+
class DebuggerV2Plugin(base_plugin.TBPlugin):
|
|
42
|
+
"""Debugger V2 Plugin for TensorBoard."""
|
|
43
|
+
|
|
44
|
+
plugin_name = debug_data_provider.PLUGIN_NAME
|
|
45
|
+
|
|
46
|
+
def __init__(self, context):
|
|
47
|
+
"""Instantiates Debugger V2 Plugin via TensorBoard core.
|
|
48
|
+
|
|
49
|
+
Args:
|
|
50
|
+
context: A base_plugin.TBContext instance.
|
|
51
|
+
"""
|
|
52
|
+
super().__init__(context)
|
|
53
|
+
self._logdir = context.logdir
|
|
54
|
+
self._underlying_data_provider = None
|
|
55
|
+
# Held while initializing `_underlying_data_provider` for the first
|
|
56
|
+
# time, to make sure that we only construct one.
|
|
57
|
+
self._data_provider_init_lock = threading.Lock()
|
|
58
|
+
|
|
59
|
+
@property
|
|
60
|
+
def _data_provider(self):
|
|
61
|
+
if self._underlying_data_provider is not None:
|
|
62
|
+
return self._underlying_data_provider
|
|
63
|
+
with self._data_provider_init_lock:
|
|
64
|
+
if self._underlying_data_provider is not None:
|
|
65
|
+
return self._underlying_data_provider
|
|
66
|
+
# TODO(cais): Implement factory for DataProvider that takes into account
|
|
67
|
+
# the settings.
|
|
68
|
+
dp = debug_data_provider.LocalDebuggerV2DataProvider(self._logdir)
|
|
69
|
+
self._underlying_data_provider = dp
|
|
70
|
+
return dp
|
|
71
|
+
|
|
72
|
+
def get_plugin_apps(self):
|
|
73
|
+
# TODO(cais): Add routes as they are implemented.
|
|
74
|
+
return {
|
|
75
|
+
"/runs": self.serve_runs,
|
|
76
|
+
"/alerts": self.serve_alerts,
|
|
77
|
+
"/execution/digests": self.serve_execution_digests,
|
|
78
|
+
"/execution/data": self.serve_execution_data,
|
|
79
|
+
"/graph_execution/digests": self.serve_graph_execution_digests,
|
|
80
|
+
"/graph_execution/data": self.serve_graph_execution_data,
|
|
81
|
+
"/graphs/graph_info": self.serve_graph_info,
|
|
82
|
+
"/graphs/op_info": self.serve_graph_op_info,
|
|
83
|
+
"/source_files/list": self.serve_source_files_list,
|
|
84
|
+
"/source_files/file": self.serve_source_file,
|
|
85
|
+
"/stack_frames/stack_frames": self.serve_stack_frames,
|
|
86
|
+
}
|
|
87
|
+
|
|
88
|
+
def is_active(self):
|
|
89
|
+
"""The Debugger V2 plugin must be manually selected."""
|
|
90
|
+
return False
|
|
91
|
+
|
|
92
|
+
def frontend_metadata(self):
|
|
93
|
+
return base_plugin.FrontendMetadata(
|
|
94
|
+
is_ng_component=True, tab_name="Debugger V2", disable_reload=False
|
|
95
|
+
)
|
|
96
|
+
|
|
97
|
+
@wrappers.Request.application
|
|
98
|
+
def serve_runs(self, request):
|
|
99
|
+
experiment = plugin_util.experiment_id(request.environ)
|
|
100
|
+
runs = self._data_provider.list_runs(experiment_id=experiment)
|
|
101
|
+
run_listing = dict()
|
|
102
|
+
for run in runs:
|
|
103
|
+
run_listing[run.run_id] = {"start_time": run.start_time}
|
|
104
|
+
return http_util.Respond(request, run_listing, "application/json")
|
|
105
|
+
|
|
106
|
+
@wrappers.Request.application
|
|
107
|
+
def serve_alerts(self, request):
|
|
108
|
+
experiment = plugin_util.experiment_id(request.environ)
|
|
109
|
+
run = request.args.get("run")
|
|
110
|
+
if run is None:
|
|
111
|
+
return _missing_run_error_response(request)
|
|
112
|
+
begin = int(request.args.get("begin", "0"))
|
|
113
|
+
end = int(request.args.get("end", "-1"))
|
|
114
|
+
alert_type = request.args.get("alert_type", None)
|
|
115
|
+
run_tag_filter = debug_data_provider.alerts_run_tag_filter(
|
|
116
|
+
run, begin, end, alert_type=alert_type
|
|
117
|
+
)
|
|
118
|
+
blob_sequences = self._data_provider.read_blob_sequences(
|
|
119
|
+
experiment_id=experiment,
|
|
120
|
+
plugin_name=self.plugin_name,
|
|
121
|
+
run_tag_filter=run_tag_filter,
|
|
122
|
+
)
|
|
123
|
+
tag = next(iter(run_tag_filter.tags))
|
|
124
|
+
try:
|
|
125
|
+
return http_util.Respond(
|
|
126
|
+
request,
|
|
127
|
+
self._data_provider.read_blob(
|
|
128
|
+
blob_key=blob_sequences[run][tag][0].blob_key
|
|
129
|
+
),
|
|
130
|
+
"application/json",
|
|
131
|
+
)
|
|
132
|
+
except errors.InvalidArgumentError as e:
|
|
133
|
+
return _error_response(request, str(e))
|
|
134
|
+
|
|
135
|
+
@wrappers.Request.application
|
|
136
|
+
def serve_execution_digests(self, request):
|
|
137
|
+
experiment = plugin_util.experiment_id(request.environ)
|
|
138
|
+
run = request.args.get("run")
|
|
139
|
+
if run is None:
|
|
140
|
+
return _missing_run_error_response(request)
|
|
141
|
+
begin = int(request.args.get("begin", "0"))
|
|
142
|
+
end = int(request.args.get("end", "-1"))
|
|
143
|
+
run_tag_filter = debug_data_provider.execution_digest_run_tag_filter(
|
|
144
|
+
run, begin, end
|
|
145
|
+
)
|
|
146
|
+
blob_sequences = self._data_provider.read_blob_sequences(
|
|
147
|
+
experiment_id=experiment,
|
|
148
|
+
plugin_name=self.plugin_name,
|
|
149
|
+
run_tag_filter=run_tag_filter,
|
|
150
|
+
)
|
|
151
|
+
tag = next(iter(run_tag_filter.tags))
|
|
152
|
+
try:
|
|
153
|
+
return http_util.Respond(
|
|
154
|
+
request,
|
|
155
|
+
self._data_provider.read_blob(
|
|
156
|
+
blob_key=blob_sequences[run][tag][0].blob_key
|
|
157
|
+
),
|
|
158
|
+
"application/json",
|
|
159
|
+
)
|
|
160
|
+
except errors.InvalidArgumentError as e:
|
|
161
|
+
return _error_response(request, str(e))
|
|
162
|
+
|
|
163
|
+
@wrappers.Request.application
|
|
164
|
+
def serve_execution_data(self, request):
|
|
165
|
+
experiment = plugin_util.experiment_id(request.environ)
|
|
166
|
+
run = request.args.get("run")
|
|
167
|
+
if run is None:
|
|
168
|
+
return _missing_run_error_response(request)
|
|
169
|
+
begin = int(request.args.get("begin", "0"))
|
|
170
|
+
end = int(request.args.get("end", "-1"))
|
|
171
|
+
run_tag_filter = debug_data_provider.execution_data_run_tag_filter(
|
|
172
|
+
run, begin, end
|
|
173
|
+
)
|
|
174
|
+
blob_sequences = self._data_provider.read_blob_sequences(
|
|
175
|
+
experiment_id=experiment,
|
|
176
|
+
plugin_name=self.plugin_name,
|
|
177
|
+
run_tag_filter=run_tag_filter,
|
|
178
|
+
)
|
|
179
|
+
tag = next(iter(run_tag_filter.tags))
|
|
180
|
+
try:
|
|
181
|
+
return http_util.Respond(
|
|
182
|
+
request,
|
|
183
|
+
self._data_provider.read_blob(
|
|
184
|
+
blob_key=blob_sequences[run][tag][0].blob_key
|
|
185
|
+
),
|
|
186
|
+
"application/json",
|
|
187
|
+
)
|
|
188
|
+
except errors.InvalidArgumentError as e:
|
|
189
|
+
return _error_response(request, str(e))
|
|
190
|
+
|
|
191
|
+
@wrappers.Request.application
|
|
192
|
+
def serve_graph_execution_digests(self, request):
|
|
193
|
+
"""Serve digests of intra-graph execution events.
|
|
194
|
+
|
|
195
|
+
As the names imply, this route differs from `serve_execution_digests()`
|
|
196
|
+
in that it is for intra-graph execution, while `serve_execution_digests()`
|
|
197
|
+
is for top-level (eager) execution.
|
|
198
|
+
"""
|
|
199
|
+
experiment = plugin_util.experiment_id(request.environ)
|
|
200
|
+
run = request.args.get("run")
|
|
201
|
+
if run is None:
|
|
202
|
+
return _missing_run_error_response(request)
|
|
203
|
+
begin = int(request.args.get("begin", "0"))
|
|
204
|
+
end = int(request.args.get("end", "-1"))
|
|
205
|
+
run_tag_filter = (
|
|
206
|
+
debug_data_provider.graph_execution_digest_run_tag_filter(
|
|
207
|
+
run, begin, end
|
|
208
|
+
)
|
|
209
|
+
)
|
|
210
|
+
blob_sequences = self._data_provider.read_blob_sequences(
|
|
211
|
+
experiment_id=experiment,
|
|
212
|
+
plugin_name=self.plugin_name,
|
|
213
|
+
run_tag_filter=run_tag_filter,
|
|
214
|
+
)
|
|
215
|
+
tag = next(iter(run_tag_filter.tags))
|
|
216
|
+
try:
|
|
217
|
+
return http_util.Respond(
|
|
218
|
+
request,
|
|
219
|
+
self._data_provider.read_blob(
|
|
220
|
+
blob_key=blob_sequences[run][tag][0].blob_key
|
|
221
|
+
),
|
|
222
|
+
"application/json",
|
|
223
|
+
)
|
|
224
|
+
except errors.InvalidArgumentError as e:
|
|
225
|
+
return _error_response(request, str(e))
|
|
226
|
+
|
|
227
|
+
@wrappers.Request.application
|
|
228
|
+
def serve_graph_execution_data(self, request):
|
|
229
|
+
"""Serve detailed data objects of intra-graph execution events.
|
|
230
|
+
|
|
231
|
+
As the names imply, this route differs from `serve_execution_data()`
|
|
232
|
+
in that it is for intra-graph execution, while `serve_execution_data()`
|
|
233
|
+
is for top-level (eager) execution.
|
|
234
|
+
|
|
235
|
+
Unlike `serve_graph_execution_digests()`, this method serves the
|
|
236
|
+
full-sized data objects for intra-graph execution events.
|
|
237
|
+
"""
|
|
238
|
+
experiment = plugin_util.experiment_id(request.environ)
|
|
239
|
+
run = request.args.get("run")
|
|
240
|
+
if run is None:
|
|
241
|
+
return _missing_run_error_response(request)
|
|
242
|
+
begin = int(request.args.get("begin", "0"))
|
|
243
|
+
end = int(request.args.get("end", "-1"))
|
|
244
|
+
run_tag_filter = (
|
|
245
|
+
debug_data_provider.graph_execution_data_run_tag_filter(
|
|
246
|
+
run, begin, end
|
|
247
|
+
)
|
|
248
|
+
)
|
|
249
|
+
blob_sequences = self._data_provider.read_blob_sequences(
|
|
250
|
+
experiment_id=experiment,
|
|
251
|
+
plugin_name=self.plugin_name,
|
|
252
|
+
run_tag_filter=run_tag_filter,
|
|
253
|
+
)
|
|
254
|
+
tag = next(iter(run_tag_filter.tags))
|
|
255
|
+
try:
|
|
256
|
+
return http_util.Respond(
|
|
257
|
+
request,
|
|
258
|
+
self._data_provider.read_blob(
|
|
259
|
+
blob_key=blob_sequences[run][tag][0].blob_key
|
|
260
|
+
),
|
|
261
|
+
"application/json",
|
|
262
|
+
)
|
|
263
|
+
except errors.InvalidArgumentError as e:
|
|
264
|
+
return _error_response(request, str(e))
|
|
265
|
+
|
|
266
|
+
@wrappers.Request.application
|
|
267
|
+
def serve_graph_info(self, request):
|
|
268
|
+
"""Serve basic information about a TensorFlow graph.
|
|
269
|
+
|
|
270
|
+
The request specifies the debugger-generated ID of the graph being
|
|
271
|
+
queried.
|
|
272
|
+
|
|
273
|
+
The response contains a JSON object with the following fields:
|
|
274
|
+
- graph_id: The debugger-generated ID (echoing the request).
|
|
275
|
+
- name: The name of the graph (if any). For TensorFlow 2.x
|
|
276
|
+
Function Graphs (FuncGraphs), this is typically the name of
|
|
277
|
+
the underlying Python function, optionally prefixed with
|
|
278
|
+
TensorFlow-generated prefixed such as "__inference_".
|
|
279
|
+
Some graphs (e.g., certain outermost graphs) may have no names,
|
|
280
|
+
in which case this field is `null`.
|
|
281
|
+
- outer_graph_id: Outer graph ID (if any). For an outermost graph
|
|
282
|
+
without an outer graph context, this field is `null`.
|
|
283
|
+
- inner_graph_ids: Debugger-generated IDs of all the graphs
|
|
284
|
+
nested inside this graph. For a graph without any graphs nested
|
|
285
|
+
inside, this field is an empty array.
|
|
286
|
+
"""
|
|
287
|
+
experiment = plugin_util.experiment_id(request.environ)
|
|
288
|
+
run = request.args.get("run")
|
|
289
|
+
if run is None:
|
|
290
|
+
return _missing_run_error_response(request)
|
|
291
|
+
graph_id = request.args.get("graph_id")
|
|
292
|
+
run_tag_filter = debug_data_provider.graph_info_run_tag_filter(
|
|
293
|
+
run, graph_id
|
|
294
|
+
)
|
|
295
|
+
blob_sequences = self._data_provider.read_blob_sequences(
|
|
296
|
+
experiment_id=experiment,
|
|
297
|
+
plugin_name=self.plugin_name,
|
|
298
|
+
run_tag_filter=run_tag_filter,
|
|
299
|
+
)
|
|
300
|
+
tag = next(iter(run_tag_filter.tags))
|
|
301
|
+
try:
|
|
302
|
+
return http_util.Respond(
|
|
303
|
+
request,
|
|
304
|
+
self._data_provider.read_blob(
|
|
305
|
+
blob_key=blob_sequences[run][tag][0].blob_key
|
|
306
|
+
),
|
|
307
|
+
"application/json",
|
|
308
|
+
)
|
|
309
|
+
except errors.NotFoundError as e:
|
|
310
|
+
return _error_response(request, str(e))
|
|
311
|
+
|
|
312
|
+
@wrappers.Request.application
|
|
313
|
+
def serve_graph_op_info(self, request):
|
|
314
|
+
"""Serve information for ops in graphs.
|
|
315
|
+
|
|
316
|
+
The request specifies the op name and the ID of the graph that
|
|
317
|
+
contains the op.
|
|
318
|
+
|
|
319
|
+
The response contains a JSON object with the following fields:
|
|
320
|
+
- op_type
|
|
321
|
+
- op_name
|
|
322
|
+
- graph_ids: Stack of graph IDs that the op is located in, from
|
|
323
|
+
outermost to innermost. The length of this array is always >= 1.
|
|
324
|
+
The length is 1 if and only if the graph is an outermost graph.
|
|
325
|
+
- num_outputs: Number of output tensors.
|
|
326
|
+
- output_tensor_ids: The debugger-generated number IDs for the
|
|
327
|
+
symbolic output tensors of the op (an array of numbers).
|
|
328
|
+
- host_name: Name of the host on which the op is created.
|
|
329
|
+
- stack_trace: Stack frames of the op's creation.
|
|
330
|
+
- inputs: Specifications of all inputs to this op.
|
|
331
|
+
Currently only immediate (one level of) inputs are provided.
|
|
332
|
+
This is an array of length N_in, where N_in is the number of
|
|
333
|
+
data inputs received by the op. Each element of the array is an
|
|
334
|
+
object with the following fields:
|
|
335
|
+
- op_name: Name of the op that provides the input tensor.
|
|
336
|
+
- output_slot: 0-based output slot index from which the input
|
|
337
|
+
tensor emits.
|
|
338
|
+
- data: A recursive data structure of this same schema.
|
|
339
|
+
This field is not populated (undefined) at the leaf nodes
|
|
340
|
+
of this recursive data structure.
|
|
341
|
+
In the rare case wherein the data for an input cannot be
|
|
342
|
+
retrieved properly (e.g., special internal op types), this
|
|
343
|
+
field will be unpopulated.
|
|
344
|
+
This is an empty list for an op with no inputs.
|
|
345
|
+
- consumers: Specifications for all the downstream consuming ops of
|
|
346
|
+
this. Currently only immediate (one level of) consumers are provided.
|
|
347
|
+
This is an array of length N_out, where N_out is the number of
|
|
348
|
+
symbolic tensors output by this op.
|
|
349
|
+
Each element of the array is an array of which the length equals
|
|
350
|
+
the number of downstream ops that consume the corresponding symbolic
|
|
351
|
+
tensor (only data edges are tracked).
|
|
352
|
+
Each element of the array is an object with the following fields:
|
|
353
|
+
- op_name: Name of the op that receives the output tensor as an
|
|
354
|
+
input.
|
|
355
|
+
- input_slot: 0-based input slot index at which the downstream
|
|
356
|
+
op receives this output tensor.
|
|
357
|
+
- data: A recursive data structure of this very schema.
|
|
358
|
+
This field is not populated (undefined) at the leaf nodes
|
|
359
|
+
of this recursive data structure.
|
|
360
|
+
In the rare case wherein the data for a consumer op cannot be
|
|
361
|
+
retrieved properly (e.g., special internal op types), this
|
|
362
|
+
field will be unpopulated.
|
|
363
|
+
If this op has no output tensors, this is an empty array.
|
|
364
|
+
If one of the output tensors of this op has no consumers, the
|
|
365
|
+
corresponding element is an empty array.
|
|
366
|
+
"""
|
|
367
|
+
experiment = plugin_util.experiment_id(request.environ)
|
|
368
|
+
run = request.args.get("run")
|
|
369
|
+
if run is None:
|
|
370
|
+
return _missing_run_error_response(request)
|
|
371
|
+
graph_id = request.args.get("graph_id")
|
|
372
|
+
op_name = request.args.get("op_name")
|
|
373
|
+
run_tag_filter = debug_data_provider.graph_op_info_run_tag_filter(
|
|
374
|
+
run, graph_id, op_name
|
|
375
|
+
)
|
|
376
|
+
blob_sequences = self._data_provider.read_blob_sequences(
|
|
377
|
+
experiment_id=experiment,
|
|
378
|
+
plugin_name=self.plugin_name,
|
|
379
|
+
run_tag_filter=run_tag_filter,
|
|
380
|
+
)
|
|
381
|
+
tag = next(iter(run_tag_filter.tags))
|
|
382
|
+
try:
|
|
383
|
+
return http_util.Respond(
|
|
384
|
+
request,
|
|
385
|
+
self._data_provider.read_blob(
|
|
386
|
+
blob_key=blob_sequences[run][tag][0].blob_key
|
|
387
|
+
),
|
|
388
|
+
"application/json",
|
|
389
|
+
)
|
|
390
|
+
except errors.NotFoundError as e:
|
|
391
|
+
return _error_response(request, str(e))
|
|
392
|
+
|
|
393
|
+
@wrappers.Request.application
|
|
394
|
+
def serve_source_files_list(self, request):
|
|
395
|
+
"""Serves a list of all source files involved in the debugged program."""
|
|
396
|
+
experiment = plugin_util.experiment_id(request.environ)
|
|
397
|
+
run = request.args.get("run")
|
|
398
|
+
if run is None:
|
|
399
|
+
return _missing_run_error_response(request)
|
|
400
|
+
run_tag_filter = debug_data_provider.source_file_list_run_tag_filter(
|
|
401
|
+
run
|
|
402
|
+
)
|
|
403
|
+
blob_sequences = self._data_provider.read_blob_sequences(
|
|
404
|
+
experiment_id=experiment,
|
|
405
|
+
plugin_name=self.plugin_name,
|
|
406
|
+
run_tag_filter=run_tag_filter,
|
|
407
|
+
)
|
|
408
|
+
tag = next(iter(run_tag_filter.tags))
|
|
409
|
+
return http_util.Respond(
|
|
410
|
+
request,
|
|
411
|
+
self._data_provider.read_blob(
|
|
412
|
+
blob_key=blob_sequences[run][tag][0].blob_key
|
|
413
|
+
),
|
|
414
|
+
"application/json",
|
|
415
|
+
)
|
|
416
|
+
|
|
417
|
+
@wrappers.Request.application
|
|
418
|
+
def serve_source_file(self, request):
|
|
419
|
+
"""Serves the content of a given source file.
|
|
420
|
+
|
|
421
|
+
The source file is referred to by the index in the list of all source
|
|
422
|
+
files involved in the execution of the debugged program, which is
|
|
423
|
+
available via the `serve_source_files_list()` serving route.
|
|
424
|
+
|
|
425
|
+
Args:
|
|
426
|
+
request: HTTP request.
|
|
427
|
+
|
|
428
|
+
Returns:
|
|
429
|
+
Response to the request.
|
|
430
|
+
"""
|
|
431
|
+
experiment = plugin_util.experiment_id(request.environ)
|
|
432
|
+
run = request.args.get("run")
|
|
433
|
+
if run is None:
|
|
434
|
+
return _missing_run_error_response(request)
|
|
435
|
+
index = request.args.get("index")
|
|
436
|
+
# TOOD(cais): When the need arises, support serving a subset of a
|
|
437
|
+
# source file's lines.
|
|
438
|
+
if index is None:
|
|
439
|
+
return _error_response(
|
|
440
|
+
request, "index is not provided for source file content"
|
|
441
|
+
)
|
|
442
|
+
index = int(index)
|
|
443
|
+
run_tag_filter = debug_data_provider.source_file_run_tag_filter(
|
|
444
|
+
run, index
|
|
445
|
+
)
|
|
446
|
+
blob_sequences = self._data_provider.read_blob_sequences(
|
|
447
|
+
experiment_id=experiment,
|
|
448
|
+
plugin_name=self.plugin_name,
|
|
449
|
+
run_tag_filter=run_tag_filter,
|
|
450
|
+
)
|
|
451
|
+
tag = next(iter(run_tag_filter.tags))
|
|
452
|
+
try:
|
|
453
|
+
return http_util.Respond(
|
|
454
|
+
request,
|
|
455
|
+
self._data_provider.read_blob(
|
|
456
|
+
blob_key=blob_sequences[run][tag][0].blob_key
|
|
457
|
+
),
|
|
458
|
+
"application/json",
|
|
459
|
+
)
|
|
460
|
+
except errors.NotFoundError as e:
|
|
461
|
+
return _error_response(request, str(e))
|
|
462
|
+
|
|
463
|
+
@wrappers.Request.application
|
|
464
|
+
def serve_stack_frames(self, request):
|
|
465
|
+
"""Serves the content of stack frames.
|
|
466
|
+
|
|
467
|
+
The source frames being requested are referred to be UUIDs for each of
|
|
468
|
+
them, separated by commas.
|
|
469
|
+
|
|
470
|
+
Args:
|
|
471
|
+
request: HTTP request.
|
|
472
|
+
|
|
473
|
+
Returns:
|
|
474
|
+
Response to the request.
|
|
475
|
+
"""
|
|
476
|
+
experiment = plugin_util.experiment_id(request.environ)
|
|
477
|
+
run = request.args.get("run")
|
|
478
|
+
if run is None:
|
|
479
|
+
return _missing_run_error_response(request)
|
|
480
|
+
stack_frame_ids = request.args.get("stack_frame_ids")
|
|
481
|
+
if stack_frame_ids is None:
|
|
482
|
+
return _error_response(request, "Missing stack_frame_ids parameter")
|
|
483
|
+
if not stack_frame_ids:
|
|
484
|
+
return _error_response(request, "Empty stack_frame_ids parameter")
|
|
485
|
+
stack_frame_ids = stack_frame_ids.split(",")
|
|
486
|
+
run_tag_filter = debug_data_provider.stack_frames_run_tag_filter(
|
|
487
|
+
run, stack_frame_ids
|
|
488
|
+
)
|
|
489
|
+
blob_sequences = self._data_provider.read_blob_sequences(
|
|
490
|
+
experiment_id=experiment,
|
|
491
|
+
plugin_name=self.plugin_name,
|
|
492
|
+
run_tag_filter=run_tag_filter,
|
|
493
|
+
)
|
|
494
|
+
tag = next(iter(run_tag_filter.tags))
|
|
495
|
+
try:
|
|
496
|
+
return http_util.Respond(
|
|
497
|
+
request,
|
|
498
|
+
self._data_provider.read_blob(
|
|
499
|
+
blob_key=blob_sequences[run][tag][0].blob_key
|
|
500
|
+
),
|
|
501
|
+
"application/json",
|
|
502
|
+
)
|
|
503
|
+
except errors.NotFoundError as e:
|
|
504
|
+
return _error_response(request, str(e))
|
|
File without changes
|
|
@@ -0,0 +1,158 @@
|
|
|
1
|
+
# Copyright 2017 The TensorFlow Authors. All Rights Reserved.
|
|
2
|
+
#
|
|
3
|
+
# Licensed under the Apache License, Version 2.0 (the "License");
|
|
4
|
+
# you may not use this file except in compliance with the License.
|
|
5
|
+
# You may obtain a copy of the License at
|
|
6
|
+
#
|
|
7
|
+
# http://www.apache.org/licenses/LICENSE-2.0
|
|
8
|
+
#
|
|
9
|
+
# Unless required by applicable law or agreed to in writing, software
|
|
10
|
+
# distributed under the License is distributed on an "AS IS" BASIS,
|
|
11
|
+
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
12
|
+
# See the License for the specific language governing permissions and
|
|
13
|
+
# limitations under the License.
|
|
14
|
+
# ==============================================================================
|
|
15
|
+
"""Package for histogram compression."""
|
|
16
|
+
|
|
17
|
+
import dataclasses
|
|
18
|
+
import numpy as np
|
|
19
|
+
|
|
20
|
+
from typing import Tuple
|
|
21
|
+
|
|
22
|
+
# Normal CDF for std_devs: (-Inf, -1.5, -1, -0.5, 0, 0.5, 1, 1.5, Inf)
|
|
23
|
+
# naturally gives bands around median of width 1 std dev, 2 std dev, 3 std dev,
|
|
24
|
+
# and then the long tail.
|
|
25
|
+
NORMAL_HISTOGRAM_BPS = (0, 668, 1587, 3085, 5000, 6915, 8413, 9332, 10000)
|
|
26
|
+
|
|
27
|
+
|
|
28
|
+
@dataclasses.dataclass(frozen=True)
|
|
29
|
+
class CompressedHistogramValue:
|
|
30
|
+
"""Represents a value in a compressed histogram.
|
|
31
|
+
|
|
32
|
+
Attributes:
|
|
33
|
+
basis_point: Compression point represented in basis point, 1/100th of a
|
|
34
|
+
percent.
|
|
35
|
+
value: Cumulative weight at the basis point.
|
|
36
|
+
"""
|
|
37
|
+
|
|
38
|
+
basis_point: float
|
|
39
|
+
value: float
|
|
40
|
+
|
|
41
|
+
def as_tuple(self) -> Tuple[float, float]:
|
|
42
|
+
"""Returns the basis point and the value as a tuple."""
|
|
43
|
+
return (self.basis_point, self.value)
|
|
44
|
+
|
|
45
|
+
|
|
46
|
+
# TODO(@jart): Unfork these methods.
|
|
47
|
+
def compress_histogram_proto(histo, bps=NORMAL_HISTOGRAM_BPS):
|
|
48
|
+
"""Creates fixed size histogram by adding compression to accumulated state.
|
|
49
|
+
|
|
50
|
+
This routine transforms a histogram at a particular step by interpolating its
|
|
51
|
+
variable number of buckets to represent their cumulative weight at a constant
|
|
52
|
+
number of compression points. This significantly reduces the size of the
|
|
53
|
+
histogram and makes it suitable for a two-dimensional area plot where the
|
|
54
|
+
output of this routine constitutes the ranges for a single x coordinate.
|
|
55
|
+
|
|
56
|
+
Args:
|
|
57
|
+
histo: A HistogramProto object.
|
|
58
|
+
bps: Compression points represented in basis points, 1/100ths of a percent.
|
|
59
|
+
Defaults to normal distribution.
|
|
60
|
+
|
|
61
|
+
Returns:
|
|
62
|
+
List of values for each basis point.
|
|
63
|
+
"""
|
|
64
|
+
# See also: Histogram::Percentile() in core/lib/histogram/histogram.cc
|
|
65
|
+
if not histo.num:
|
|
66
|
+
return [CompressedHistogramValue(b, 0.0).as_tuple() for b in bps]
|
|
67
|
+
bucket = np.array(histo.bucket)
|
|
68
|
+
bucket_limit = list(histo.bucket_limit)
|
|
69
|
+
weights = (bucket * bps[-1] / (bucket.sum() or 1.0)).cumsum()
|
|
70
|
+
values = []
|
|
71
|
+
j = 0
|
|
72
|
+
while j < len(bps):
|
|
73
|
+
i = np.searchsorted(weights, bps[j], side="right")
|
|
74
|
+
while i < len(weights):
|
|
75
|
+
cumsum = weights[i]
|
|
76
|
+
cumsum_prev = weights[i - 1] if i > 0 else 0.0
|
|
77
|
+
if cumsum == cumsum_prev: # prevent lerp divide by zero
|
|
78
|
+
i += 1
|
|
79
|
+
continue
|
|
80
|
+
if not i or not cumsum_prev:
|
|
81
|
+
lhs = histo.min
|
|
82
|
+
else:
|
|
83
|
+
lhs = max(bucket_limit[i - 1], histo.min)
|
|
84
|
+
rhs = min(bucket_limit[i], histo.max)
|
|
85
|
+
weight = _lerp(bps[j], cumsum_prev, cumsum, lhs, rhs)
|
|
86
|
+
values.append(CompressedHistogramValue(bps[j], weight).as_tuple())
|
|
87
|
+
j += 1
|
|
88
|
+
break
|
|
89
|
+
else:
|
|
90
|
+
break
|
|
91
|
+
while j < len(bps):
|
|
92
|
+
values.append(CompressedHistogramValue(bps[j], histo.max).as_tuple())
|
|
93
|
+
j += 1
|
|
94
|
+
return values
|
|
95
|
+
|
|
96
|
+
|
|
97
|
+
def compress_histogram(buckets, bps=NORMAL_HISTOGRAM_BPS):
|
|
98
|
+
"""Creates fixed size histogram by adding compression to accumulated state.
|
|
99
|
+
|
|
100
|
+
This routine transforms a histogram at a particular step by linearly
|
|
101
|
+
interpolating its variable number of buckets to represent their cumulative
|
|
102
|
+
weight at a constant number of compression points. This significantly reduces
|
|
103
|
+
the size of the histogram and makes it suitable for a two-dimensional area
|
|
104
|
+
plot where the output of this routine constitutes the ranges for a single x
|
|
105
|
+
coordinate.
|
|
106
|
+
|
|
107
|
+
Args:
|
|
108
|
+
buckets: A list of buckets, each of which is a 3-tuple of the form
|
|
109
|
+
`(min, max, count)`.
|
|
110
|
+
bps: Compression points represented in basis points, 1/100ths of a percent.
|
|
111
|
+
Defaults to normal distribution.
|
|
112
|
+
|
|
113
|
+
Returns:
|
|
114
|
+
List of values for each basis point.
|
|
115
|
+
"""
|
|
116
|
+
# See also: Histogram::Percentile() in core/lib/histogram/histogram.cc
|
|
117
|
+
buckets = np.array(buckets)
|
|
118
|
+
if not buckets.size:
|
|
119
|
+
return [CompressedHistogramValue(b, 0.0).as_tuple() for b in bps]
|
|
120
|
+
minmin, maxmax = (buckets[0][0], buckets[-1][1])
|
|
121
|
+
counts = buckets[:, 2]
|
|
122
|
+
right_edges = list(buckets[:, 1])
|
|
123
|
+
weights = (counts * bps[-1] / (counts.sum() or 1.0)).cumsum()
|
|
124
|
+
|
|
125
|
+
result = []
|
|
126
|
+
bp_index = 0
|
|
127
|
+
while bp_index < len(bps):
|
|
128
|
+
i = np.searchsorted(weights, bps[bp_index], side="right")
|
|
129
|
+
while i < len(weights):
|
|
130
|
+
cumsum = weights[i]
|
|
131
|
+
cumsum_prev = weights[i - 1] if i > 0 else 0.0
|
|
132
|
+
if cumsum == cumsum_prev: # prevent division-by-zero in `_lerp`
|
|
133
|
+
i += 1
|
|
134
|
+
continue
|
|
135
|
+
if not i or not cumsum_prev:
|
|
136
|
+
lhs = minmin
|
|
137
|
+
else:
|
|
138
|
+
lhs = max(right_edges[i - 1], minmin)
|
|
139
|
+
rhs = min(right_edges[i], maxmax)
|
|
140
|
+
weight = _lerp(bps[bp_index], cumsum_prev, cumsum, lhs, rhs)
|
|
141
|
+
result.append(
|
|
142
|
+
CompressedHistogramValue(bps[bp_index], weight).as_tuple()
|
|
143
|
+
)
|
|
144
|
+
bp_index += 1
|
|
145
|
+
break
|
|
146
|
+
else:
|
|
147
|
+
break
|
|
148
|
+
while bp_index < len(bps):
|
|
149
|
+
result.append(
|
|
150
|
+
CompressedHistogramValue(bps[bp_index], maxmax).as_tuple()
|
|
151
|
+
)
|
|
152
|
+
bp_index += 1
|
|
153
|
+
return result
|
|
154
|
+
|
|
155
|
+
|
|
156
|
+
def _lerp(x, x0, x1, y0, y1):
|
|
157
|
+
"""Affinely map from [x0, x1] onto [y0, y1]."""
|
|
158
|
+
return y0 + (x - x0) * float(y1 - y0) / (x1 - x0)
|