tensorbored 2.21.0rc1769983804__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- tensorbored/__init__.py +112 -0
- tensorbored/_vendor/__init__.py +0 -0
- tensorbored/_vendor/bleach/__init__.py +125 -0
- tensorbored/_vendor/bleach/_vendor/__init__.py +0 -0
- tensorbored/_vendor/bleach/_vendor/html5lib/__init__.py +35 -0
- tensorbored/_vendor/bleach/_vendor/html5lib/_ihatexml.py +289 -0
- tensorbored/_vendor/bleach/_vendor/html5lib/_inputstream.py +918 -0
- tensorbored/_vendor/bleach/_vendor/html5lib/_tokenizer.py +1735 -0
- tensorbored/_vendor/bleach/_vendor/html5lib/_trie/__init__.py +5 -0
- tensorbored/_vendor/bleach/_vendor/html5lib/_trie/_base.py +40 -0
- tensorbored/_vendor/bleach/_vendor/html5lib/_trie/py.py +67 -0
- tensorbored/_vendor/bleach/_vendor/html5lib/_utils.py +159 -0
- tensorbored/_vendor/bleach/_vendor/html5lib/constants.py +2946 -0
- tensorbored/_vendor/bleach/_vendor/html5lib/filters/__init__.py +0 -0
- tensorbored/_vendor/bleach/_vendor/html5lib/filters/alphabeticalattributes.py +29 -0
- tensorbored/_vendor/bleach/_vendor/html5lib/filters/base.py +12 -0
- tensorbored/_vendor/bleach/_vendor/html5lib/filters/inject_meta_charset.py +73 -0
- tensorbored/_vendor/bleach/_vendor/html5lib/filters/lint.py +93 -0
- tensorbored/_vendor/bleach/_vendor/html5lib/filters/optionaltags.py +207 -0
- tensorbored/_vendor/bleach/_vendor/html5lib/filters/sanitizer.py +916 -0
- tensorbored/_vendor/bleach/_vendor/html5lib/filters/whitespace.py +38 -0
- tensorbored/_vendor/bleach/_vendor/html5lib/html5parser.py +2795 -0
- tensorbored/_vendor/bleach/_vendor/html5lib/serializer.py +409 -0
- tensorbored/_vendor/bleach/_vendor/html5lib/treeadapters/__init__.py +30 -0
- tensorbored/_vendor/bleach/_vendor/html5lib/treeadapters/genshi.py +54 -0
- tensorbored/_vendor/bleach/_vendor/html5lib/treeadapters/sax.py +50 -0
- tensorbored/_vendor/bleach/_vendor/html5lib/treebuilders/__init__.py +88 -0
- tensorbored/_vendor/bleach/_vendor/html5lib/treebuilders/base.py +417 -0
- tensorbored/_vendor/bleach/_vendor/html5lib/treebuilders/dom.py +239 -0
- tensorbored/_vendor/bleach/_vendor/html5lib/treebuilders/etree.py +343 -0
- tensorbored/_vendor/bleach/_vendor/html5lib/treebuilders/etree_lxml.py +392 -0
- tensorbored/_vendor/bleach/_vendor/html5lib/treewalkers/__init__.py +154 -0
- tensorbored/_vendor/bleach/_vendor/html5lib/treewalkers/base.py +252 -0
- tensorbored/_vendor/bleach/_vendor/html5lib/treewalkers/dom.py +43 -0
- tensorbored/_vendor/bleach/_vendor/html5lib/treewalkers/etree.py +131 -0
- tensorbored/_vendor/bleach/_vendor/html5lib/treewalkers/etree_lxml.py +215 -0
- tensorbored/_vendor/bleach/_vendor/html5lib/treewalkers/genshi.py +69 -0
- tensorbored/_vendor/bleach/_vendor/parse.py +1078 -0
- tensorbored/_vendor/bleach/callbacks.py +32 -0
- tensorbored/_vendor/bleach/html5lib_shim.py +757 -0
- tensorbored/_vendor/bleach/linkifier.py +633 -0
- tensorbored/_vendor/bleach/parse_shim.py +1 -0
- tensorbored/_vendor/bleach/sanitizer.py +638 -0
- tensorbored/_vendor/bleach/six_shim.py +19 -0
- tensorbored/_vendor/webencodings/__init__.py +342 -0
- tensorbored/_vendor/webencodings/labels.py +231 -0
- tensorbored/_vendor/webencodings/mklabels.py +59 -0
- tensorbored/_vendor/webencodings/x_user_defined.py +325 -0
- tensorbored/assets.py +36 -0
- tensorbored/auth.py +102 -0
- tensorbored/backend/__init__.py +0 -0
- tensorbored/backend/application.py +604 -0
- tensorbored/backend/auth_context_middleware.py +38 -0
- tensorbored/backend/client_feature_flags.py +113 -0
- tensorbored/backend/empty_path_redirect.py +46 -0
- tensorbored/backend/event_processing/__init__.py +0 -0
- tensorbored/backend/event_processing/data_ingester.py +276 -0
- tensorbored/backend/event_processing/data_provider.py +535 -0
- tensorbored/backend/event_processing/directory_loader.py +142 -0
- tensorbored/backend/event_processing/directory_watcher.py +272 -0
- tensorbored/backend/event_processing/event_accumulator.py +950 -0
- tensorbored/backend/event_processing/event_file_inspector.py +463 -0
- tensorbored/backend/event_processing/event_file_loader.py +292 -0
- tensorbored/backend/event_processing/event_multiplexer.py +521 -0
- tensorbored/backend/event_processing/event_util.py +68 -0
- tensorbored/backend/event_processing/io_wrapper.py +223 -0
- tensorbored/backend/event_processing/plugin_asset_util.py +104 -0
- tensorbored/backend/event_processing/plugin_event_accumulator.py +721 -0
- tensorbored/backend/event_processing/plugin_event_multiplexer.py +522 -0
- tensorbored/backend/event_processing/reservoir.py +266 -0
- tensorbored/backend/event_processing/tag_types.py +29 -0
- tensorbored/backend/experiment_id.py +71 -0
- tensorbored/backend/experimental_plugin.py +51 -0
- tensorbored/backend/http_util.py +263 -0
- tensorbored/backend/json_util.py +70 -0
- tensorbored/backend/path_prefix.py +67 -0
- tensorbored/backend/process_graph.py +74 -0
- tensorbored/backend/security_validator.py +202 -0
- tensorbored/compat/__init__.py +69 -0
- tensorbored/compat/proto/__init__.py +0 -0
- tensorbored/compat/proto/allocation_description_pb2.py +35 -0
- tensorbored/compat/proto/api_def_pb2.py +82 -0
- tensorbored/compat/proto/attr_value_pb2.py +80 -0
- tensorbored/compat/proto/cluster_pb2.py +58 -0
- tensorbored/compat/proto/config_pb2.py +271 -0
- tensorbored/compat/proto/coordination_config_pb2.py +45 -0
- tensorbored/compat/proto/cost_graph_pb2.py +87 -0
- tensorbored/compat/proto/cpp_shape_inference_pb2.py +70 -0
- tensorbored/compat/proto/debug_pb2.py +65 -0
- tensorbored/compat/proto/event_pb2.py +149 -0
- tensorbored/compat/proto/full_type_pb2.py +74 -0
- tensorbored/compat/proto/function_pb2.py +157 -0
- tensorbored/compat/proto/graph_debug_info_pb2.py +111 -0
- tensorbored/compat/proto/graph_pb2.py +41 -0
- tensorbored/compat/proto/histogram_pb2.py +39 -0
- tensorbored/compat/proto/meta_graph_pb2.py +254 -0
- tensorbored/compat/proto/node_def_pb2.py +61 -0
- tensorbored/compat/proto/op_def_pb2.py +81 -0
- tensorbored/compat/proto/resource_handle_pb2.py +48 -0
- tensorbored/compat/proto/rewriter_config_pb2.py +93 -0
- tensorbored/compat/proto/rpc_options_pb2.py +35 -0
- tensorbored/compat/proto/saved_object_graph_pb2.py +193 -0
- tensorbored/compat/proto/saver_pb2.py +38 -0
- tensorbored/compat/proto/step_stats_pb2.py +116 -0
- tensorbored/compat/proto/struct_pb2.py +144 -0
- tensorbored/compat/proto/summary_pb2.py +111 -0
- tensorbored/compat/proto/tensor_description_pb2.py +38 -0
- tensorbored/compat/proto/tensor_pb2.py +68 -0
- tensorbored/compat/proto/tensor_shape_pb2.py +46 -0
- tensorbored/compat/proto/tfprof_log_pb2.py +307 -0
- tensorbored/compat/proto/trackable_object_graph_pb2.py +90 -0
- tensorbored/compat/proto/types_pb2.py +105 -0
- tensorbored/compat/proto/variable_pb2.py +62 -0
- tensorbored/compat/proto/verifier_config_pb2.py +38 -0
- tensorbored/compat/proto/versions_pb2.py +35 -0
- tensorbored/compat/tensorflow_stub/__init__.py +38 -0
- tensorbored/compat/tensorflow_stub/app.py +124 -0
- tensorbored/compat/tensorflow_stub/compat/__init__.py +131 -0
- tensorbored/compat/tensorflow_stub/compat/v1/__init__.py +20 -0
- tensorbored/compat/tensorflow_stub/dtypes.py +692 -0
- tensorbored/compat/tensorflow_stub/error_codes.py +169 -0
- tensorbored/compat/tensorflow_stub/errors.py +507 -0
- tensorbored/compat/tensorflow_stub/flags.py +124 -0
- tensorbored/compat/tensorflow_stub/io/__init__.py +17 -0
- tensorbored/compat/tensorflow_stub/io/gfile.py +1011 -0
- tensorbored/compat/tensorflow_stub/pywrap_tensorflow.py +285 -0
- tensorbored/compat/tensorflow_stub/tensor_shape.py +1035 -0
- tensorbored/context.py +129 -0
- tensorbored/data/__init__.py +0 -0
- tensorbored/data/grpc_provider.py +365 -0
- tensorbored/data/ingester.py +46 -0
- tensorbored/data/proto/__init__.py +0 -0
- tensorbored/data/proto/data_provider_pb2.py +517 -0
- tensorbored/data/proto/data_provider_pb2_grpc.py +374 -0
- tensorbored/data/provider.py +1365 -0
- tensorbored/data/server_ingester.py +301 -0
- tensorbored/data_compat.py +159 -0
- tensorbored/dataclass_compat.py +224 -0
- tensorbored/default.py +124 -0
- tensorbored/errors.py +130 -0
- tensorbored/lazy.py +99 -0
- tensorbored/main.py +48 -0
- tensorbored/main_lib.py +62 -0
- tensorbored/manager.py +487 -0
- tensorbored/notebook.py +441 -0
- tensorbored/plugin_util.py +266 -0
- tensorbored/plugins/__init__.py +0 -0
- tensorbored/plugins/audio/__init__.py +0 -0
- tensorbored/plugins/audio/audio_plugin.py +229 -0
- tensorbored/plugins/audio/metadata.py +69 -0
- tensorbored/plugins/audio/plugin_data_pb2.py +37 -0
- tensorbored/plugins/audio/summary.py +230 -0
- tensorbored/plugins/audio/summary_v2.py +124 -0
- tensorbored/plugins/base_plugin.py +367 -0
- tensorbored/plugins/core/__init__.py +0 -0
- tensorbored/plugins/core/core_plugin.py +981 -0
- tensorbored/plugins/custom_scalar/__init__.py +0 -0
- tensorbored/plugins/custom_scalar/custom_scalars_plugin.py +320 -0
- tensorbored/plugins/custom_scalar/layout_pb2.py +85 -0
- tensorbored/plugins/custom_scalar/metadata.py +35 -0
- tensorbored/plugins/custom_scalar/summary.py +79 -0
- tensorbored/plugins/debugger_v2/__init__.py +0 -0
- tensorbored/plugins/debugger_v2/debug_data_multiplexer.py +631 -0
- tensorbored/plugins/debugger_v2/debug_data_provider.py +634 -0
- tensorbored/plugins/debugger_v2/debugger_v2_plugin.py +504 -0
- tensorbored/plugins/distribution/__init__.py +0 -0
- tensorbored/plugins/distribution/compressor.py +158 -0
- tensorbored/plugins/distribution/distributions_plugin.py +116 -0
- tensorbored/plugins/distribution/metadata.py +19 -0
- tensorbored/plugins/graph/__init__.py +0 -0
- tensorbored/plugins/graph/graph_util.py +129 -0
- tensorbored/plugins/graph/graphs_plugin.py +336 -0
- tensorbored/plugins/graph/keras_util.py +328 -0
- tensorbored/plugins/graph/metadata.py +42 -0
- tensorbored/plugins/histogram/__init__.py +0 -0
- tensorbored/plugins/histogram/histograms_plugin.py +144 -0
- tensorbored/plugins/histogram/metadata.py +63 -0
- tensorbored/plugins/histogram/plugin_data_pb2.py +34 -0
- tensorbored/plugins/histogram/summary.py +234 -0
- tensorbored/plugins/histogram/summary_v2.py +292 -0
- tensorbored/plugins/hparams/__init__.py +14 -0
- tensorbored/plugins/hparams/_keras.py +93 -0
- tensorbored/plugins/hparams/api.py +130 -0
- tensorbored/plugins/hparams/api_pb2.py +208 -0
- tensorbored/plugins/hparams/backend_context.py +606 -0
- tensorbored/plugins/hparams/download_data.py +158 -0
- tensorbored/plugins/hparams/error.py +26 -0
- tensorbored/plugins/hparams/get_experiment.py +71 -0
- tensorbored/plugins/hparams/hparams_plugin.py +206 -0
- tensorbored/plugins/hparams/hparams_util_pb2.py +69 -0
- tensorbored/plugins/hparams/json_format_compat.py +38 -0
- tensorbored/plugins/hparams/list_metric_evals.py +57 -0
- tensorbored/plugins/hparams/list_session_groups.py +1040 -0
- tensorbored/plugins/hparams/metadata.py +125 -0
- tensorbored/plugins/hparams/metrics.py +41 -0
- tensorbored/plugins/hparams/plugin_data_pb2.py +69 -0
- tensorbored/plugins/hparams/summary.py +205 -0
- tensorbored/plugins/hparams/summary_v2.py +597 -0
- tensorbored/plugins/image/__init__.py +0 -0
- tensorbored/plugins/image/images_plugin.py +232 -0
- tensorbored/plugins/image/metadata.py +65 -0
- tensorbored/plugins/image/plugin_data_pb2.py +34 -0
- tensorbored/plugins/image/summary.py +159 -0
- tensorbored/plugins/image/summary_v2.py +130 -0
- tensorbored/plugins/mesh/__init__.py +14 -0
- tensorbored/plugins/mesh/mesh_plugin.py +292 -0
- tensorbored/plugins/mesh/metadata.py +152 -0
- tensorbored/plugins/mesh/plugin_data_pb2.py +37 -0
- tensorbored/plugins/mesh/summary.py +251 -0
- tensorbored/plugins/mesh/summary_v2.py +214 -0
- tensorbored/plugins/metrics/__init__.py +0 -0
- tensorbored/plugins/metrics/metadata.py +17 -0
- tensorbored/plugins/metrics/metrics_plugin.py +623 -0
- tensorbored/plugins/pr_curve/__init__.py +0 -0
- tensorbored/plugins/pr_curve/metadata.py +75 -0
- tensorbored/plugins/pr_curve/plugin_data_pb2.py +34 -0
- tensorbored/plugins/pr_curve/pr_curves_plugin.py +241 -0
- tensorbored/plugins/pr_curve/summary.py +574 -0
- tensorbored/plugins/profile_redirect/__init__.py +0 -0
- tensorbored/plugins/profile_redirect/profile_redirect_plugin.py +49 -0
- tensorbored/plugins/projector/__init__.py +67 -0
- tensorbored/plugins/projector/metadata.py +26 -0
- tensorbored/plugins/projector/projector_config_pb2.py +54 -0
- tensorbored/plugins/projector/projector_plugin.py +795 -0
- tensorbored/plugins/projector/tf_projector_plugin/index.js +32 -0
- tensorbored/plugins/projector/tf_projector_plugin/projector_binary.html +524 -0
- tensorbored/plugins/projector/tf_projector_plugin/projector_binary.js +15536 -0
- tensorbored/plugins/scalar/__init__.py +0 -0
- tensorbored/plugins/scalar/metadata.py +60 -0
- tensorbored/plugins/scalar/plugin_data_pb2.py +34 -0
- tensorbored/plugins/scalar/scalars_plugin.py +181 -0
- tensorbored/plugins/scalar/summary.py +109 -0
- tensorbored/plugins/scalar/summary_v2.py +124 -0
- tensorbored/plugins/text/__init__.py +0 -0
- tensorbored/plugins/text/metadata.py +62 -0
- tensorbored/plugins/text/plugin_data_pb2.py +34 -0
- tensorbored/plugins/text/summary.py +114 -0
- tensorbored/plugins/text/summary_v2.py +124 -0
- tensorbored/plugins/text/text_plugin.py +288 -0
- tensorbored/plugins/wit_redirect/__init__.py +0 -0
- tensorbored/plugins/wit_redirect/wit_redirect_plugin.py +49 -0
- tensorbored/program.py +910 -0
- tensorbored/summary/__init__.py +35 -0
- tensorbored/summary/_output.py +124 -0
- tensorbored/summary/_tf/__init__.py +14 -0
- tensorbored/summary/_tf/summary/__init__.py +178 -0
- tensorbored/summary/_writer.py +105 -0
- tensorbored/summary/v1.py +51 -0
- tensorbored/summary/v2.py +25 -0
- tensorbored/summary/writer/__init__.py +13 -0
- tensorbored/summary/writer/event_file_writer.py +291 -0
- tensorbored/summary/writer/record_writer.py +50 -0
- tensorbored/util/__init__.py +0 -0
- tensorbored/util/encoder.py +116 -0
- tensorbored/util/grpc_util.py +311 -0
- tensorbored/util/img_mime_type_detector.py +40 -0
- tensorbored/util/io_util.py +20 -0
- tensorbored/util/lazy_tensor_creator.py +110 -0
- tensorbored/util/op_evaluator.py +104 -0
- tensorbored/util/platform_util.py +20 -0
- tensorbored/util/tb_logging.py +24 -0
- tensorbored/util/tensor_util.py +617 -0
- tensorbored/util/timing.py +122 -0
- tensorbored/version.py +21 -0
- tensorbored/webfiles.zip +0 -0
- tensorbored-2.21.0rc1769983804.dist-info/METADATA +49 -0
- tensorbored-2.21.0rc1769983804.dist-info/RECORD +271 -0
- tensorbored-2.21.0rc1769983804.dist-info/WHEEL +5 -0
- tensorbored-2.21.0rc1769983804.dist-info/entry_points.txt +6 -0
- tensorbored-2.21.0rc1769983804.dist-info/licenses/LICENSE +739 -0
- tensorbored-2.21.0rc1769983804.dist-info/top_level.txt +1 -0
tensorbored/context.py
ADDED
|
@@ -0,0 +1,129 @@
|
|
|
1
|
+
# Copyright 2020 The TensorFlow Authors. All Rights Reserved.
|
|
2
|
+
#
|
|
3
|
+
# Licensed under the Apache License, Version 2.0 (the "License");
|
|
4
|
+
# you may not use this file except in compliance with the License.
|
|
5
|
+
# You may obtain a copy of the License at
|
|
6
|
+
#
|
|
7
|
+
# http://www.apache.org/licenses/LICENSE-2.0
|
|
8
|
+
#
|
|
9
|
+
# Unless required by applicable law or agreed to in writing, software
|
|
10
|
+
# distributed under the License is distributed on an "AS IS" BASIS,
|
|
11
|
+
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
12
|
+
# See the License for the specific language governing permissions and
|
|
13
|
+
# limitations under the License.
|
|
14
|
+
# ==============================================================================
|
|
15
|
+
"""Request-scoped context."""
|
|
16
|
+
|
|
17
|
+
from tensorbored import auth as auth_lib
|
|
18
|
+
|
|
19
|
+
# A `RequestContext` value is stored on WSGI environments under this key.
|
|
20
|
+
_WSGI_KEY = "tensorbored.request_context"
|
|
21
|
+
|
|
22
|
+
|
|
23
|
+
class RequestContext:
|
|
24
|
+
"""Container of request-scoped values.
|
|
25
|
+
|
|
26
|
+
This context is for cross-cutting concerns: authentication,
|
|
27
|
+
authorization, auditing, internationalization, logging, and so on.
|
|
28
|
+
It is not simply for passing commonly used parameters to functions.
|
|
29
|
+
|
|
30
|
+
`RequestContext` values are to be treated as immutable.
|
|
31
|
+
|
|
32
|
+
Fields:
|
|
33
|
+
auth: An `AuthContext`, which may be empty but is never `None`.
|
|
34
|
+
remote_ip: An `ipaddress.IPv4Address` or `ipaddress.IPv6Address` or None.
|
|
35
|
+
Best guess of the IP Address of the end user.
|
|
36
|
+
x_forwarded_for: A tuple of `ipaddress.IPv4Address` or `ipaddress.IPv6Address`,
|
|
37
|
+
which may be empty but is never None. This should be parsed value of X-Forwarded-For
|
|
38
|
+
HTTP header from the request.
|
|
39
|
+
client_feature_flags: A dict of string to arbitrary type. These represent
|
|
40
|
+
feature flag key/value pairs sent by the client application. Usage of
|
|
41
|
+
client_feature_flags should know the name of the feature flag key and
|
|
42
|
+
should know and validate the type of the value.
|
|
43
|
+
"""
|
|
44
|
+
|
|
45
|
+
def __init__(
|
|
46
|
+
self,
|
|
47
|
+
auth=None,
|
|
48
|
+
remote_ip=None,
|
|
49
|
+
x_forwarded_for=None,
|
|
50
|
+
client_feature_flags=None,
|
|
51
|
+
):
|
|
52
|
+
"""Create a request context.
|
|
53
|
+
|
|
54
|
+
The argument list is sorted and may be extended in the future;
|
|
55
|
+
therefore, callers must pass only named arguments to this
|
|
56
|
+
initializer.
|
|
57
|
+
|
|
58
|
+
Args:
|
|
59
|
+
See "Fields" on class docstring. All arguments are optional
|
|
60
|
+
and will be replaced with default values if appropriate.
|
|
61
|
+
"""
|
|
62
|
+
self._auth = auth if auth is not None else auth_lib.AuthContext.empty()
|
|
63
|
+
self._remote_ip = remote_ip
|
|
64
|
+
self._x_forwarded_for = x_forwarded_for or ()
|
|
65
|
+
self._client_feature_flags = client_feature_flags or {}
|
|
66
|
+
|
|
67
|
+
@property
|
|
68
|
+
def auth(self):
|
|
69
|
+
return self._auth
|
|
70
|
+
|
|
71
|
+
@property
|
|
72
|
+
def remote_ip(self):
|
|
73
|
+
return self._remote_ip
|
|
74
|
+
|
|
75
|
+
@property
|
|
76
|
+
def x_forwarded_for(self):
|
|
77
|
+
return self._x_forwarded_for
|
|
78
|
+
|
|
79
|
+
@property
|
|
80
|
+
def client_feature_flags(self):
|
|
81
|
+
return self._client_feature_flags
|
|
82
|
+
|
|
83
|
+
def replace(self, **kwargs):
|
|
84
|
+
"""Create a copy of this context with updated key-value pairs.
|
|
85
|
+
|
|
86
|
+
Analogous to `namedtuple._replace`. For example, to create a new
|
|
87
|
+
request context like `ctx` but with auth context `auth`, call
|
|
88
|
+
`ctx.replace(auth=auth)`.
|
|
89
|
+
|
|
90
|
+
Args:
|
|
91
|
+
As to `__init__`.
|
|
92
|
+
|
|
93
|
+
Returns:
|
|
94
|
+
A new context like this one but with the specified updates.
|
|
95
|
+
"""
|
|
96
|
+
kwargs.setdefault("auth", self.auth)
|
|
97
|
+
kwargs.setdefault("remote_ip", self.remote_ip)
|
|
98
|
+
kwargs.setdefault("x_forwarded_for", self.x_forwarded_for)
|
|
99
|
+
kwargs.setdefault("client_feature_flags", self.client_feature_flags)
|
|
100
|
+
return type(self)(**kwargs)
|
|
101
|
+
|
|
102
|
+
|
|
103
|
+
def from_environ(environ):
|
|
104
|
+
"""Get a `RequestContext` from a WSGI environment.
|
|
105
|
+
|
|
106
|
+
See also `set_in_environ`.
|
|
107
|
+
|
|
108
|
+
Args:
|
|
109
|
+
environ: A WSGI environment (see PEP 3333).
|
|
110
|
+
|
|
111
|
+
Returns:
|
|
112
|
+
The `RequestContext` stored in the WSGI environment, or an empty
|
|
113
|
+
`RequestContext` if none is stored.
|
|
114
|
+
"""
|
|
115
|
+
result = environ.get(_WSGI_KEY)
|
|
116
|
+
return result if result is not None else RequestContext()
|
|
117
|
+
|
|
118
|
+
|
|
119
|
+
def set_in_environ(environ, ctx):
|
|
120
|
+
"""Set the `RequestContext` in a WSGI environment.
|
|
121
|
+
|
|
122
|
+
After `set_in_environ(e, ctx)`, `from_environ(e) is ctx`. The input
|
|
123
|
+
environment is mutated.
|
|
124
|
+
|
|
125
|
+
Args:
|
|
126
|
+
environ: A WSGI environment to update.
|
|
127
|
+
ctx: A new `RequestContext` value.
|
|
128
|
+
"""
|
|
129
|
+
environ[_WSGI_KEY] = ctx
|
|
File without changes
|
|
@@ -0,0 +1,365 @@
|
|
|
1
|
+
# Copyright 2020 The TensorFlow Authors. All Rights Reserved.
|
|
2
|
+
#
|
|
3
|
+
# Licensed under the Apache License, Version 2.0 (the "License");
|
|
4
|
+
# you may not use this file except in compliance with the License.
|
|
5
|
+
# You may obtain a copy of the License at
|
|
6
|
+
#
|
|
7
|
+
# http://www.apache.org/licenses/LICENSE-2.0
|
|
8
|
+
#
|
|
9
|
+
# Unless required by applicable law or agreed to in writing, software
|
|
10
|
+
# distributed under the License is distributed on an "AS IS" BASIS,
|
|
11
|
+
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
12
|
+
# See the License for the specific language governing permissions and
|
|
13
|
+
# limitations under the License.
|
|
14
|
+
# ==============================================================================
|
|
15
|
+
"""A data provider that talks to a gRPC server."""
|
|
16
|
+
|
|
17
|
+
import collections
|
|
18
|
+
import contextlib
|
|
19
|
+
|
|
20
|
+
import grpc
|
|
21
|
+
|
|
22
|
+
from tensorbored.util import tensor_util
|
|
23
|
+
from tensorbored.util import timing
|
|
24
|
+
from tensorbored import errors
|
|
25
|
+
from tensorbored.data import provider
|
|
26
|
+
from tensorbored.data.proto import data_provider_pb2
|
|
27
|
+
from tensorbored.data.proto import data_provider_pb2_grpc
|
|
28
|
+
|
|
29
|
+
|
|
30
|
+
def make_stub(channel):
|
|
31
|
+
"""Wraps a gRPC channel with a service stub."""
|
|
32
|
+
return data_provider_pb2_grpc.TensorBoardDataProviderStub(channel)
|
|
33
|
+
|
|
34
|
+
|
|
35
|
+
class GrpcDataProvider(provider.DataProvider):
|
|
36
|
+
"""Data provider that talks over gRPC."""
|
|
37
|
+
|
|
38
|
+
def __init__(self, addr, stub):
|
|
39
|
+
"""Initializes a GrpcDataProvider.
|
|
40
|
+
|
|
41
|
+
Args:
|
|
42
|
+
addr: String address of the remote peer. Used cosmetically for
|
|
43
|
+
data location.
|
|
44
|
+
stub: `data_provider_pb2_grpc.TensorBoardDataProviderStub`
|
|
45
|
+
value. See `make_stub` to construct one from a channel.
|
|
46
|
+
"""
|
|
47
|
+
self._addr = addr
|
|
48
|
+
self._stub = stub
|
|
49
|
+
|
|
50
|
+
def __str__(self):
|
|
51
|
+
return "GrpcDataProvider(addr=%r)" % self._addr
|
|
52
|
+
|
|
53
|
+
def experiment_metadata(self, ctx, *, experiment_id):
|
|
54
|
+
req = data_provider_pb2.GetExperimentRequest()
|
|
55
|
+
req.experiment_id = experiment_id
|
|
56
|
+
with _translate_grpc_error():
|
|
57
|
+
res = self._stub.GetExperiment(req)
|
|
58
|
+
res = provider.ExperimentMetadata(
|
|
59
|
+
data_location=res.data_location,
|
|
60
|
+
experiment_name=res.name,
|
|
61
|
+
experiment_description=res.description,
|
|
62
|
+
creation_time=_timestamp_proto_to_float(res.creation_time),
|
|
63
|
+
)
|
|
64
|
+
return res
|
|
65
|
+
|
|
66
|
+
def list_plugins(self, ctx, *, experiment_id):
|
|
67
|
+
req = data_provider_pb2.ListPluginsRequest()
|
|
68
|
+
req.experiment_id = experiment_id
|
|
69
|
+
with _translate_grpc_error():
|
|
70
|
+
res = self._stub.ListPlugins(req)
|
|
71
|
+
return [p.name for p in res.plugins]
|
|
72
|
+
|
|
73
|
+
def list_runs(self, ctx, *, experiment_id):
|
|
74
|
+
req = data_provider_pb2.ListRunsRequest()
|
|
75
|
+
req.experiment_id = experiment_id
|
|
76
|
+
with _translate_grpc_error():
|
|
77
|
+
res = self._stub.ListRuns(req)
|
|
78
|
+
return [
|
|
79
|
+
provider.Run(
|
|
80
|
+
run_id=run.name,
|
|
81
|
+
run_name=run.name,
|
|
82
|
+
start_time=run.start_time,
|
|
83
|
+
)
|
|
84
|
+
for run in res.runs
|
|
85
|
+
]
|
|
86
|
+
|
|
87
|
+
@timing.log_latency
|
|
88
|
+
def list_scalars(
|
|
89
|
+
self, ctx, *, experiment_id, plugin_name, run_tag_filter=None
|
|
90
|
+
):
|
|
91
|
+
with timing.log_latency("build request"):
|
|
92
|
+
req = data_provider_pb2.ListScalarsRequest()
|
|
93
|
+
req.experiment_id = experiment_id
|
|
94
|
+
req.plugin_filter.plugin_name = plugin_name
|
|
95
|
+
_populate_rtf(run_tag_filter, req.run_tag_filter)
|
|
96
|
+
with timing.log_latency("_stub.ListScalars"):
|
|
97
|
+
with _translate_grpc_error():
|
|
98
|
+
res = self._stub.ListScalars(req)
|
|
99
|
+
with timing.log_latency("build result"):
|
|
100
|
+
result = {}
|
|
101
|
+
for run_entry in res.runs:
|
|
102
|
+
tags = {}
|
|
103
|
+
result[run_entry.run_name] = tags
|
|
104
|
+
for tag_entry in run_entry.tags:
|
|
105
|
+
time_series = tag_entry.metadata
|
|
106
|
+
tags[tag_entry.tag_name] = provider.ScalarTimeSeries(
|
|
107
|
+
max_step=time_series.max_step,
|
|
108
|
+
max_wall_time=time_series.max_wall_time,
|
|
109
|
+
plugin_content=time_series.summary_metadata.plugin_data.content,
|
|
110
|
+
description=time_series.summary_metadata.summary_description,
|
|
111
|
+
display_name=time_series.summary_metadata.display_name,
|
|
112
|
+
)
|
|
113
|
+
return result
|
|
114
|
+
|
|
115
|
+
@timing.log_latency
|
|
116
|
+
def read_scalars(
|
|
117
|
+
self,
|
|
118
|
+
ctx,
|
|
119
|
+
*,
|
|
120
|
+
experiment_id,
|
|
121
|
+
plugin_name,
|
|
122
|
+
downsample=None,
|
|
123
|
+
run_tag_filter=None,
|
|
124
|
+
):
|
|
125
|
+
with timing.log_latency("build request"):
|
|
126
|
+
req = data_provider_pb2.ReadScalarsRequest()
|
|
127
|
+
req.experiment_id = experiment_id
|
|
128
|
+
req.plugin_filter.plugin_name = plugin_name
|
|
129
|
+
_populate_rtf(run_tag_filter, req.run_tag_filter)
|
|
130
|
+
req.downsample.num_points = downsample
|
|
131
|
+
with timing.log_latency("_stub.ReadScalars"):
|
|
132
|
+
with _translate_grpc_error():
|
|
133
|
+
res = self._stub.ReadScalars(req)
|
|
134
|
+
with timing.log_latency("build result"):
|
|
135
|
+
result = {}
|
|
136
|
+
for run_entry in res.runs:
|
|
137
|
+
tags = {}
|
|
138
|
+
result[run_entry.run_name] = tags
|
|
139
|
+
for tag_entry in run_entry.tags:
|
|
140
|
+
series = []
|
|
141
|
+
tags[tag_entry.tag_name] = series
|
|
142
|
+
d = tag_entry.data
|
|
143
|
+
for step, wt, value in zip(d.step, d.wall_time, d.value):
|
|
144
|
+
point = provider.ScalarDatum(
|
|
145
|
+
step=step,
|
|
146
|
+
wall_time=wt,
|
|
147
|
+
value=value,
|
|
148
|
+
)
|
|
149
|
+
series.append(point)
|
|
150
|
+
return result
|
|
151
|
+
|
|
152
|
+
@timing.log_latency
|
|
153
|
+
def read_last_scalars(
|
|
154
|
+
self,
|
|
155
|
+
ctx,
|
|
156
|
+
*,
|
|
157
|
+
experiment_id,
|
|
158
|
+
plugin_name,
|
|
159
|
+
run_tag_filter=None,
|
|
160
|
+
):
|
|
161
|
+
with timing.log_latency("build request"):
|
|
162
|
+
req = data_provider_pb2.ReadScalarsRequest()
|
|
163
|
+
req.experiment_id = experiment_id
|
|
164
|
+
req.plugin_filter.plugin_name = plugin_name
|
|
165
|
+
_populate_rtf(run_tag_filter, req.run_tag_filter)
|
|
166
|
+
# `ReadScalars` always includes the most recent datum, therefore
|
|
167
|
+
# downsampling to one means fetching the latest value.
|
|
168
|
+
req.downsample.num_points = 1
|
|
169
|
+
with timing.log_latency("_stub.ReadScalars"):
|
|
170
|
+
with _translate_grpc_error():
|
|
171
|
+
res = self._stub.ReadScalars(req)
|
|
172
|
+
with timing.log_latency("build result"):
|
|
173
|
+
result = collections.defaultdict(dict)
|
|
174
|
+
for run_entry in res.runs:
|
|
175
|
+
run_name = run_entry.run_name
|
|
176
|
+
for tag_entry in run_entry.tags:
|
|
177
|
+
d = tag_entry.data
|
|
178
|
+
# There should be no more than one datum in
|
|
179
|
+
# `tag_entry.data` since downsample was set to 1.
|
|
180
|
+
for step, wt, value in zip(d.step, d.wall_time, d.value):
|
|
181
|
+
result[run_name][tag_entry.tag_name] = (
|
|
182
|
+
provider.ScalarDatum(
|
|
183
|
+
step=step,
|
|
184
|
+
wall_time=wt,
|
|
185
|
+
value=value,
|
|
186
|
+
)
|
|
187
|
+
)
|
|
188
|
+
return result
|
|
189
|
+
|
|
190
|
+
@timing.log_latency
|
|
191
|
+
def list_tensors(
|
|
192
|
+
self, ctx, *, experiment_id, plugin_name, run_tag_filter=None
|
|
193
|
+
):
|
|
194
|
+
with timing.log_latency("build request"):
|
|
195
|
+
req = data_provider_pb2.ListTensorsRequest()
|
|
196
|
+
req.experiment_id = experiment_id
|
|
197
|
+
req.plugin_filter.plugin_name = plugin_name
|
|
198
|
+
_populate_rtf(run_tag_filter, req.run_tag_filter)
|
|
199
|
+
with timing.log_latency("_stub.ListTensors"):
|
|
200
|
+
with _translate_grpc_error():
|
|
201
|
+
res = self._stub.ListTensors(req)
|
|
202
|
+
with timing.log_latency("build result"):
|
|
203
|
+
result = {}
|
|
204
|
+
for run_entry in res.runs:
|
|
205
|
+
tags = {}
|
|
206
|
+
result[run_entry.run_name] = tags
|
|
207
|
+
for tag_entry in run_entry.tags:
|
|
208
|
+
time_series = tag_entry.metadata
|
|
209
|
+
tags[tag_entry.tag_name] = provider.TensorTimeSeries(
|
|
210
|
+
max_step=time_series.max_step,
|
|
211
|
+
max_wall_time=time_series.max_wall_time,
|
|
212
|
+
plugin_content=time_series.summary_metadata.plugin_data.content,
|
|
213
|
+
description=time_series.summary_metadata.summary_description,
|
|
214
|
+
display_name=time_series.summary_metadata.display_name,
|
|
215
|
+
)
|
|
216
|
+
return result
|
|
217
|
+
|
|
218
|
+
@timing.log_latency
|
|
219
|
+
def read_tensors(
|
|
220
|
+
self,
|
|
221
|
+
ctx,
|
|
222
|
+
*,
|
|
223
|
+
experiment_id,
|
|
224
|
+
plugin_name,
|
|
225
|
+
downsample=None,
|
|
226
|
+
run_tag_filter=None,
|
|
227
|
+
):
|
|
228
|
+
with timing.log_latency("build request"):
|
|
229
|
+
req = data_provider_pb2.ReadTensorsRequest()
|
|
230
|
+
req.experiment_id = experiment_id
|
|
231
|
+
req.plugin_filter.plugin_name = plugin_name
|
|
232
|
+
_populate_rtf(run_tag_filter, req.run_tag_filter)
|
|
233
|
+
req.downsample.num_points = downsample
|
|
234
|
+
with timing.log_latency("_stub.ReadTensors"):
|
|
235
|
+
with _translate_grpc_error():
|
|
236
|
+
res = self._stub.ReadTensors(req)
|
|
237
|
+
with timing.log_latency("build result"):
|
|
238
|
+
result = {}
|
|
239
|
+
for run_entry in res.runs:
|
|
240
|
+
tags = {}
|
|
241
|
+
result[run_entry.run_name] = tags
|
|
242
|
+
for tag_entry in run_entry.tags:
|
|
243
|
+
series = []
|
|
244
|
+
tags[tag_entry.tag_name] = series
|
|
245
|
+
d = tag_entry.data
|
|
246
|
+
for step, wt, value in zip(d.step, d.wall_time, d.value):
|
|
247
|
+
point = provider.TensorDatum(
|
|
248
|
+
step=step,
|
|
249
|
+
wall_time=wt,
|
|
250
|
+
numpy=tensor_util.make_ndarray(value),
|
|
251
|
+
)
|
|
252
|
+
series.append(point)
|
|
253
|
+
return result
|
|
254
|
+
|
|
255
|
+
@timing.log_latency
|
|
256
|
+
def list_blob_sequences(
|
|
257
|
+
self, ctx, experiment_id, plugin_name, run_tag_filter=None
|
|
258
|
+
):
|
|
259
|
+
with timing.log_latency("build request"):
|
|
260
|
+
req = data_provider_pb2.ListBlobSequencesRequest()
|
|
261
|
+
req.experiment_id = experiment_id
|
|
262
|
+
req.plugin_filter.plugin_name = plugin_name
|
|
263
|
+
_populate_rtf(run_tag_filter, req.run_tag_filter)
|
|
264
|
+
with timing.log_latency("_stub.ListBlobSequences"):
|
|
265
|
+
with _translate_grpc_error():
|
|
266
|
+
res = self._stub.ListBlobSequences(req)
|
|
267
|
+
with timing.log_latency("build result"):
|
|
268
|
+
result = {}
|
|
269
|
+
for run_entry in res.runs:
|
|
270
|
+
tags = {}
|
|
271
|
+
result[run_entry.run_name] = tags
|
|
272
|
+
for tag_entry in run_entry.tags:
|
|
273
|
+
time_series = tag_entry.metadata
|
|
274
|
+
tags[tag_entry.tag_name] = provider.BlobSequenceTimeSeries(
|
|
275
|
+
max_step=time_series.max_step,
|
|
276
|
+
max_wall_time=time_series.max_wall_time,
|
|
277
|
+
max_length=time_series.max_length,
|
|
278
|
+
plugin_content=time_series.summary_metadata.plugin_data.content,
|
|
279
|
+
description=time_series.summary_metadata.summary_description,
|
|
280
|
+
display_name=time_series.summary_metadata.display_name,
|
|
281
|
+
)
|
|
282
|
+
return result
|
|
283
|
+
|
|
284
|
+
@timing.log_latency
|
|
285
|
+
def read_blob_sequences(
|
|
286
|
+
self,
|
|
287
|
+
ctx,
|
|
288
|
+
experiment_id,
|
|
289
|
+
plugin_name,
|
|
290
|
+
downsample=None,
|
|
291
|
+
run_tag_filter=None,
|
|
292
|
+
):
|
|
293
|
+
with timing.log_latency("build request"):
|
|
294
|
+
req = data_provider_pb2.ReadBlobSequencesRequest()
|
|
295
|
+
req.experiment_id = experiment_id
|
|
296
|
+
req.plugin_filter.plugin_name = plugin_name
|
|
297
|
+
_populate_rtf(run_tag_filter, req.run_tag_filter)
|
|
298
|
+
req.downsample.num_points = downsample
|
|
299
|
+
with timing.log_latency("_stub.ReadBlobSequences"):
|
|
300
|
+
with _translate_grpc_error():
|
|
301
|
+
res = self._stub.ReadBlobSequences(req)
|
|
302
|
+
with timing.log_latency("build result"):
|
|
303
|
+
result = {}
|
|
304
|
+
for run_entry in res.runs:
|
|
305
|
+
tags = {}
|
|
306
|
+
result[run_entry.run_name] = tags
|
|
307
|
+
for tag_entry in run_entry.tags:
|
|
308
|
+
series = []
|
|
309
|
+
tags[tag_entry.tag_name] = series
|
|
310
|
+
d = tag_entry.data
|
|
311
|
+
for step, wt, blob_sequence in zip(
|
|
312
|
+
d.step, d.wall_time, d.values
|
|
313
|
+
):
|
|
314
|
+
values = []
|
|
315
|
+
for ref in blob_sequence.blob_refs:
|
|
316
|
+
values.append(
|
|
317
|
+
provider.BlobReference(
|
|
318
|
+
blob_key=ref.blob_key, url=ref.url or None
|
|
319
|
+
)
|
|
320
|
+
)
|
|
321
|
+
point = provider.BlobSequenceDatum(
|
|
322
|
+
step=step, wall_time=wt, values=tuple(values)
|
|
323
|
+
)
|
|
324
|
+
series.append(point)
|
|
325
|
+
return result
|
|
326
|
+
|
|
327
|
+
@timing.log_latency
|
|
328
|
+
def read_blob(self, ctx, blob_key):
|
|
329
|
+
with timing.log_latency("build request"):
|
|
330
|
+
req = data_provider_pb2.ReadBlobRequest()
|
|
331
|
+
req.blob_key = blob_key
|
|
332
|
+
with timing.log_latency("list(_stub.ReadBlob)"):
|
|
333
|
+
with _translate_grpc_error():
|
|
334
|
+
responses = list(self._stub.ReadBlob(req))
|
|
335
|
+
with timing.log_latency("build result"):
|
|
336
|
+
return b"".join(res.data for res in responses)
|
|
337
|
+
|
|
338
|
+
|
|
339
|
+
@contextlib.contextmanager
|
|
340
|
+
def _translate_grpc_error():
|
|
341
|
+
try:
|
|
342
|
+
yield
|
|
343
|
+
except grpc.RpcError as e:
|
|
344
|
+
if e.code() == grpc.StatusCode.INVALID_ARGUMENT:
|
|
345
|
+
raise errors.InvalidArgumentError(e.details())
|
|
346
|
+
if e.code() == grpc.StatusCode.NOT_FOUND:
|
|
347
|
+
raise errors.NotFoundError(e.details())
|
|
348
|
+
if e.code() == grpc.StatusCode.PERMISSION_DENIED:
|
|
349
|
+
raise errors.PermissionDeniedError(e.details())
|
|
350
|
+
raise
|
|
351
|
+
|
|
352
|
+
|
|
353
|
+
def _populate_rtf(run_tag_filter, rtf_proto):
|
|
354
|
+
"""Copies `run_tag_filter` into `rtf_proto`."""
|
|
355
|
+
if run_tag_filter is None:
|
|
356
|
+
return
|
|
357
|
+
if run_tag_filter.runs is not None:
|
|
358
|
+
rtf_proto.runs.names[:] = sorted(run_tag_filter.runs)
|
|
359
|
+
if run_tag_filter.tags is not None:
|
|
360
|
+
rtf_proto.tags.names[:] = sorted(run_tag_filter.tags)
|
|
361
|
+
|
|
362
|
+
|
|
363
|
+
def _timestamp_proto_to_float(ts):
|
|
364
|
+
"""Converts `timestamp_pb2.Timestamp` to float seconds since epoch."""
|
|
365
|
+
return ts.ToNanoseconds() / 1e9
|
|
@@ -0,0 +1,46 @@
|
|
|
1
|
+
# Copyright 2020 The TensorFlow Authors. All Rights Reserved.
|
|
2
|
+
#
|
|
3
|
+
# Licensed under the Apache License, Version 2.0 (the "License");
|
|
4
|
+
# you may not use this file except in compliance with the License.
|
|
5
|
+
# You may obtain a copy of the License at
|
|
6
|
+
#
|
|
7
|
+
# http://www.apache.org/licenses/LICENSE-2.0
|
|
8
|
+
#
|
|
9
|
+
# Unless required by applicable law or agreed to in writing, software
|
|
10
|
+
# distributed under the License is distributed on an "AS IS" BASIS,
|
|
11
|
+
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
12
|
+
# See the License for the specific language governing permissions and
|
|
13
|
+
# limitations under the License.
|
|
14
|
+
# ==============================================================================
|
|
15
|
+
"""Abstraction for data ingestion logic."""
|
|
16
|
+
|
|
17
|
+
import abc
|
|
18
|
+
|
|
19
|
+
|
|
20
|
+
class DataIngester(metaclass=abc.ABCMeta):
|
|
21
|
+
"""Link between a data source and a data provider.
|
|
22
|
+
|
|
23
|
+
A data ingester starts a reload operation in the background and
|
|
24
|
+
provides a data provider as a view.
|
|
25
|
+
"""
|
|
26
|
+
|
|
27
|
+
@property
|
|
28
|
+
@abc.abstractmethod
|
|
29
|
+
def data_provider(self):
|
|
30
|
+
"""Returns a `DataProvider`.
|
|
31
|
+
|
|
32
|
+
It may be an error to dereference this before `start` is called.
|
|
33
|
+
"""
|
|
34
|
+
pass
|
|
35
|
+
|
|
36
|
+
@abc.abstractmethod
|
|
37
|
+
def start(self):
|
|
38
|
+
"""Starts ingesting data.
|
|
39
|
+
|
|
40
|
+
This may start a background thread or process, and will return
|
|
41
|
+
once communication with that task is established. It won't block
|
|
42
|
+
forever as data is reloaded.
|
|
43
|
+
|
|
44
|
+
Must only be called once.
|
|
45
|
+
"""
|
|
46
|
+
pass
|
|
File without changes
|