tensorbored 2.21.0rc1769983804__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- tensorbored/__init__.py +112 -0
- tensorbored/_vendor/__init__.py +0 -0
- tensorbored/_vendor/bleach/__init__.py +125 -0
- tensorbored/_vendor/bleach/_vendor/__init__.py +0 -0
- tensorbored/_vendor/bleach/_vendor/html5lib/__init__.py +35 -0
- tensorbored/_vendor/bleach/_vendor/html5lib/_ihatexml.py +289 -0
- tensorbored/_vendor/bleach/_vendor/html5lib/_inputstream.py +918 -0
- tensorbored/_vendor/bleach/_vendor/html5lib/_tokenizer.py +1735 -0
- tensorbored/_vendor/bleach/_vendor/html5lib/_trie/__init__.py +5 -0
- tensorbored/_vendor/bleach/_vendor/html5lib/_trie/_base.py +40 -0
- tensorbored/_vendor/bleach/_vendor/html5lib/_trie/py.py +67 -0
- tensorbored/_vendor/bleach/_vendor/html5lib/_utils.py +159 -0
- tensorbored/_vendor/bleach/_vendor/html5lib/constants.py +2946 -0
- tensorbored/_vendor/bleach/_vendor/html5lib/filters/__init__.py +0 -0
- tensorbored/_vendor/bleach/_vendor/html5lib/filters/alphabeticalattributes.py +29 -0
- tensorbored/_vendor/bleach/_vendor/html5lib/filters/base.py +12 -0
- tensorbored/_vendor/bleach/_vendor/html5lib/filters/inject_meta_charset.py +73 -0
- tensorbored/_vendor/bleach/_vendor/html5lib/filters/lint.py +93 -0
- tensorbored/_vendor/bleach/_vendor/html5lib/filters/optionaltags.py +207 -0
- tensorbored/_vendor/bleach/_vendor/html5lib/filters/sanitizer.py +916 -0
- tensorbored/_vendor/bleach/_vendor/html5lib/filters/whitespace.py +38 -0
- tensorbored/_vendor/bleach/_vendor/html5lib/html5parser.py +2795 -0
- tensorbored/_vendor/bleach/_vendor/html5lib/serializer.py +409 -0
- tensorbored/_vendor/bleach/_vendor/html5lib/treeadapters/__init__.py +30 -0
- tensorbored/_vendor/bleach/_vendor/html5lib/treeadapters/genshi.py +54 -0
- tensorbored/_vendor/bleach/_vendor/html5lib/treeadapters/sax.py +50 -0
- tensorbored/_vendor/bleach/_vendor/html5lib/treebuilders/__init__.py +88 -0
- tensorbored/_vendor/bleach/_vendor/html5lib/treebuilders/base.py +417 -0
- tensorbored/_vendor/bleach/_vendor/html5lib/treebuilders/dom.py +239 -0
- tensorbored/_vendor/bleach/_vendor/html5lib/treebuilders/etree.py +343 -0
- tensorbored/_vendor/bleach/_vendor/html5lib/treebuilders/etree_lxml.py +392 -0
- tensorbored/_vendor/bleach/_vendor/html5lib/treewalkers/__init__.py +154 -0
- tensorbored/_vendor/bleach/_vendor/html5lib/treewalkers/base.py +252 -0
- tensorbored/_vendor/bleach/_vendor/html5lib/treewalkers/dom.py +43 -0
- tensorbored/_vendor/bleach/_vendor/html5lib/treewalkers/etree.py +131 -0
- tensorbored/_vendor/bleach/_vendor/html5lib/treewalkers/etree_lxml.py +215 -0
- tensorbored/_vendor/bleach/_vendor/html5lib/treewalkers/genshi.py +69 -0
- tensorbored/_vendor/bleach/_vendor/parse.py +1078 -0
- tensorbored/_vendor/bleach/callbacks.py +32 -0
- tensorbored/_vendor/bleach/html5lib_shim.py +757 -0
- tensorbored/_vendor/bleach/linkifier.py +633 -0
- tensorbored/_vendor/bleach/parse_shim.py +1 -0
- tensorbored/_vendor/bleach/sanitizer.py +638 -0
- tensorbored/_vendor/bleach/six_shim.py +19 -0
- tensorbored/_vendor/webencodings/__init__.py +342 -0
- tensorbored/_vendor/webencodings/labels.py +231 -0
- tensorbored/_vendor/webencodings/mklabels.py +59 -0
- tensorbored/_vendor/webencodings/x_user_defined.py +325 -0
- tensorbored/assets.py +36 -0
- tensorbored/auth.py +102 -0
- tensorbored/backend/__init__.py +0 -0
- tensorbored/backend/application.py +604 -0
- tensorbored/backend/auth_context_middleware.py +38 -0
- tensorbored/backend/client_feature_flags.py +113 -0
- tensorbored/backend/empty_path_redirect.py +46 -0
- tensorbored/backend/event_processing/__init__.py +0 -0
- tensorbored/backend/event_processing/data_ingester.py +276 -0
- tensorbored/backend/event_processing/data_provider.py +535 -0
- tensorbored/backend/event_processing/directory_loader.py +142 -0
- tensorbored/backend/event_processing/directory_watcher.py +272 -0
- tensorbored/backend/event_processing/event_accumulator.py +950 -0
- tensorbored/backend/event_processing/event_file_inspector.py +463 -0
- tensorbored/backend/event_processing/event_file_loader.py +292 -0
- tensorbored/backend/event_processing/event_multiplexer.py +521 -0
- tensorbored/backend/event_processing/event_util.py +68 -0
- tensorbored/backend/event_processing/io_wrapper.py +223 -0
- tensorbored/backend/event_processing/plugin_asset_util.py +104 -0
- tensorbored/backend/event_processing/plugin_event_accumulator.py +721 -0
- tensorbored/backend/event_processing/plugin_event_multiplexer.py +522 -0
- tensorbored/backend/event_processing/reservoir.py +266 -0
- tensorbored/backend/event_processing/tag_types.py +29 -0
- tensorbored/backend/experiment_id.py +71 -0
- tensorbored/backend/experimental_plugin.py +51 -0
- tensorbored/backend/http_util.py +263 -0
- tensorbored/backend/json_util.py +70 -0
- tensorbored/backend/path_prefix.py +67 -0
- tensorbored/backend/process_graph.py +74 -0
- tensorbored/backend/security_validator.py +202 -0
- tensorbored/compat/__init__.py +69 -0
- tensorbored/compat/proto/__init__.py +0 -0
- tensorbored/compat/proto/allocation_description_pb2.py +35 -0
- tensorbored/compat/proto/api_def_pb2.py +82 -0
- tensorbored/compat/proto/attr_value_pb2.py +80 -0
- tensorbored/compat/proto/cluster_pb2.py +58 -0
- tensorbored/compat/proto/config_pb2.py +271 -0
- tensorbored/compat/proto/coordination_config_pb2.py +45 -0
- tensorbored/compat/proto/cost_graph_pb2.py +87 -0
- tensorbored/compat/proto/cpp_shape_inference_pb2.py +70 -0
- tensorbored/compat/proto/debug_pb2.py +65 -0
- tensorbored/compat/proto/event_pb2.py +149 -0
- tensorbored/compat/proto/full_type_pb2.py +74 -0
- tensorbored/compat/proto/function_pb2.py +157 -0
- tensorbored/compat/proto/graph_debug_info_pb2.py +111 -0
- tensorbored/compat/proto/graph_pb2.py +41 -0
- tensorbored/compat/proto/histogram_pb2.py +39 -0
- tensorbored/compat/proto/meta_graph_pb2.py +254 -0
- tensorbored/compat/proto/node_def_pb2.py +61 -0
- tensorbored/compat/proto/op_def_pb2.py +81 -0
- tensorbored/compat/proto/resource_handle_pb2.py +48 -0
- tensorbored/compat/proto/rewriter_config_pb2.py +93 -0
- tensorbored/compat/proto/rpc_options_pb2.py +35 -0
- tensorbored/compat/proto/saved_object_graph_pb2.py +193 -0
- tensorbored/compat/proto/saver_pb2.py +38 -0
- tensorbored/compat/proto/step_stats_pb2.py +116 -0
- tensorbored/compat/proto/struct_pb2.py +144 -0
- tensorbored/compat/proto/summary_pb2.py +111 -0
- tensorbored/compat/proto/tensor_description_pb2.py +38 -0
- tensorbored/compat/proto/tensor_pb2.py +68 -0
- tensorbored/compat/proto/tensor_shape_pb2.py +46 -0
- tensorbored/compat/proto/tfprof_log_pb2.py +307 -0
- tensorbored/compat/proto/trackable_object_graph_pb2.py +90 -0
- tensorbored/compat/proto/types_pb2.py +105 -0
- tensorbored/compat/proto/variable_pb2.py +62 -0
- tensorbored/compat/proto/verifier_config_pb2.py +38 -0
- tensorbored/compat/proto/versions_pb2.py +35 -0
- tensorbored/compat/tensorflow_stub/__init__.py +38 -0
- tensorbored/compat/tensorflow_stub/app.py +124 -0
- tensorbored/compat/tensorflow_stub/compat/__init__.py +131 -0
- tensorbored/compat/tensorflow_stub/compat/v1/__init__.py +20 -0
- tensorbored/compat/tensorflow_stub/dtypes.py +692 -0
- tensorbored/compat/tensorflow_stub/error_codes.py +169 -0
- tensorbored/compat/tensorflow_stub/errors.py +507 -0
- tensorbored/compat/tensorflow_stub/flags.py +124 -0
- tensorbored/compat/tensorflow_stub/io/__init__.py +17 -0
- tensorbored/compat/tensorflow_stub/io/gfile.py +1011 -0
- tensorbored/compat/tensorflow_stub/pywrap_tensorflow.py +285 -0
- tensorbored/compat/tensorflow_stub/tensor_shape.py +1035 -0
- tensorbored/context.py +129 -0
- tensorbored/data/__init__.py +0 -0
- tensorbored/data/grpc_provider.py +365 -0
- tensorbored/data/ingester.py +46 -0
- tensorbored/data/proto/__init__.py +0 -0
- tensorbored/data/proto/data_provider_pb2.py +517 -0
- tensorbored/data/proto/data_provider_pb2_grpc.py +374 -0
- tensorbored/data/provider.py +1365 -0
- tensorbored/data/server_ingester.py +301 -0
- tensorbored/data_compat.py +159 -0
- tensorbored/dataclass_compat.py +224 -0
- tensorbored/default.py +124 -0
- tensorbored/errors.py +130 -0
- tensorbored/lazy.py +99 -0
- tensorbored/main.py +48 -0
- tensorbored/main_lib.py +62 -0
- tensorbored/manager.py +487 -0
- tensorbored/notebook.py +441 -0
- tensorbored/plugin_util.py +266 -0
- tensorbored/plugins/__init__.py +0 -0
- tensorbored/plugins/audio/__init__.py +0 -0
- tensorbored/plugins/audio/audio_plugin.py +229 -0
- tensorbored/plugins/audio/metadata.py +69 -0
- tensorbored/plugins/audio/plugin_data_pb2.py +37 -0
- tensorbored/plugins/audio/summary.py +230 -0
- tensorbored/plugins/audio/summary_v2.py +124 -0
- tensorbored/plugins/base_plugin.py +367 -0
- tensorbored/plugins/core/__init__.py +0 -0
- tensorbored/plugins/core/core_plugin.py +981 -0
- tensorbored/plugins/custom_scalar/__init__.py +0 -0
- tensorbored/plugins/custom_scalar/custom_scalars_plugin.py +320 -0
- tensorbored/plugins/custom_scalar/layout_pb2.py +85 -0
- tensorbored/plugins/custom_scalar/metadata.py +35 -0
- tensorbored/plugins/custom_scalar/summary.py +79 -0
- tensorbored/plugins/debugger_v2/__init__.py +0 -0
- tensorbored/plugins/debugger_v2/debug_data_multiplexer.py +631 -0
- tensorbored/plugins/debugger_v2/debug_data_provider.py +634 -0
- tensorbored/plugins/debugger_v2/debugger_v2_plugin.py +504 -0
- tensorbored/plugins/distribution/__init__.py +0 -0
- tensorbored/plugins/distribution/compressor.py +158 -0
- tensorbored/plugins/distribution/distributions_plugin.py +116 -0
- tensorbored/plugins/distribution/metadata.py +19 -0
- tensorbored/plugins/graph/__init__.py +0 -0
- tensorbored/plugins/graph/graph_util.py +129 -0
- tensorbored/plugins/graph/graphs_plugin.py +336 -0
- tensorbored/plugins/graph/keras_util.py +328 -0
- tensorbored/plugins/graph/metadata.py +42 -0
- tensorbored/plugins/histogram/__init__.py +0 -0
- tensorbored/plugins/histogram/histograms_plugin.py +144 -0
- tensorbored/plugins/histogram/metadata.py +63 -0
- tensorbored/plugins/histogram/plugin_data_pb2.py +34 -0
- tensorbored/plugins/histogram/summary.py +234 -0
- tensorbored/plugins/histogram/summary_v2.py +292 -0
- tensorbored/plugins/hparams/__init__.py +14 -0
- tensorbored/plugins/hparams/_keras.py +93 -0
- tensorbored/plugins/hparams/api.py +130 -0
- tensorbored/plugins/hparams/api_pb2.py +208 -0
- tensorbored/plugins/hparams/backend_context.py +606 -0
- tensorbored/plugins/hparams/download_data.py +158 -0
- tensorbored/plugins/hparams/error.py +26 -0
- tensorbored/plugins/hparams/get_experiment.py +71 -0
- tensorbored/plugins/hparams/hparams_plugin.py +206 -0
- tensorbored/plugins/hparams/hparams_util_pb2.py +69 -0
- tensorbored/plugins/hparams/json_format_compat.py +38 -0
- tensorbored/plugins/hparams/list_metric_evals.py +57 -0
- tensorbored/plugins/hparams/list_session_groups.py +1040 -0
- tensorbored/plugins/hparams/metadata.py +125 -0
- tensorbored/plugins/hparams/metrics.py +41 -0
- tensorbored/plugins/hparams/plugin_data_pb2.py +69 -0
- tensorbored/plugins/hparams/summary.py +205 -0
- tensorbored/plugins/hparams/summary_v2.py +597 -0
- tensorbored/plugins/image/__init__.py +0 -0
- tensorbored/plugins/image/images_plugin.py +232 -0
- tensorbored/plugins/image/metadata.py +65 -0
- tensorbored/plugins/image/plugin_data_pb2.py +34 -0
- tensorbored/plugins/image/summary.py +159 -0
- tensorbored/plugins/image/summary_v2.py +130 -0
- tensorbored/plugins/mesh/__init__.py +14 -0
- tensorbored/plugins/mesh/mesh_plugin.py +292 -0
- tensorbored/plugins/mesh/metadata.py +152 -0
- tensorbored/plugins/mesh/plugin_data_pb2.py +37 -0
- tensorbored/plugins/mesh/summary.py +251 -0
- tensorbored/plugins/mesh/summary_v2.py +214 -0
- tensorbored/plugins/metrics/__init__.py +0 -0
- tensorbored/plugins/metrics/metadata.py +17 -0
- tensorbored/plugins/metrics/metrics_plugin.py +623 -0
- tensorbored/plugins/pr_curve/__init__.py +0 -0
- tensorbored/plugins/pr_curve/metadata.py +75 -0
- tensorbored/plugins/pr_curve/plugin_data_pb2.py +34 -0
- tensorbored/plugins/pr_curve/pr_curves_plugin.py +241 -0
- tensorbored/plugins/pr_curve/summary.py +574 -0
- tensorbored/plugins/profile_redirect/__init__.py +0 -0
- tensorbored/plugins/profile_redirect/profile_redirect_plugin.py +49 -0
- tensorbored/plugins/projector/__init__.py +67 -0
- tensorbored/plugins/projector/metadata.py +26 -0
- tensorbored/plugins/projector/projector_config_pb2.py +54 -0
- tensorbored/plugins/projector/projector_plugin.py +795 -0
- tensorbored/plugins/projector/tf_projector_plugin/index.js +32 -0
- tensorbored/plugins/projector/tf_projector_plugin/projector_binary.html +524 -0
- tensorbored/plugins/projector/tf_projector_plugin/projector_binary.js +15536 -0
- tensorbored/plugins/scalar/__init__.py +0 -0
- tensorbored/plugins/scalar/metadata.py +60 -0
- tensorbored/plugins/scalar/plugin_data_pb2.py +34 -0
- tensorbored/plugins/scalar/scalars_plugin.py +181 -0
- tensorbored/plugins/scalar/summary.py +109 -0
- tensorbored/plugins/scalar/summary_v2.py +124 -0
- tensorbored/plugins/text/__init__.py +0 -0
- tensorbored/plugins/text/metadata.py +62 -0
- tensorbored/plugins/text/plugin_data_pb2.py +34 -0
- tensorbored/plugins/text/summary.py +114 -0
- tensorbored/plugins/text/summary_v2.py +124 -0
- tensorbored/plugins/text/text_plugin.py +288 -0
- tensorbored/plugins/wit_redirect/__init__.py +0 -0
- tensorbored/plugins/wit_redirect/wit_redirect_plugin.py +49 -0
- tensorbored/program.py +910 -0
- tensorbored/summary/__init__.py +35 -0
- tensorbored/summary/_output.py +124 -0
- tensorbored/summary/_tf/__init__.py +14 -0
- tensorbored/summary/_tf/summary/__init__.py +178 -0
- tensorbored/summary/_writer.py +105 -0
- tensorbored/summary/v1.py +51 -0
- tensorbored/summary/v2.py +25 -0
- tensorbored/summary/writer/__init__.py +13 -0
- tensorbored/summary/writer/event_file_writer.py +291 -0
- tensorbored/summary/writer/record_writer.py +50 -0
- tensorbored/util/__init__.py +0 -0
- tensorbored/util/encoder.py +116 -0
- tensorbored/util/grpc_util.py +311 -0
- tensorbored/util/img_mime_type_detector.py +40 -0
- tensorbored/util/io_util.py +20 -0
- tensorbored/util/lazy_tensor_creator.py +110 -0
- tensorbored/util/op_evaluator.py +104 -0
- tensorbored/util/platform_util.py +20 -0
- tensorbored/util/tb_logging.py +24 -0
- tensorbored/util/tensor_util.py +617 -0
- tensorbored/util/timing.py +122 -0
- tensorbored/version.py +21 -0
- tensorbored/webfiles.zip +0 -0
- tensorbored-2.21.0rc1769983804.dist-info/METADATA +49 -0
- tensorbored-2.21.0rc1769983804.dist-info/RECORD +271 -0
- tensorbored-2.21.0rc1769983804.dist-info/WHEEL +5 -0
- tensorbored-2.21.0rc1769983804.dist-info/entry_points.txt +6 -0
- tensorbored-2.21.0rc1769983804.dist-info/licenses/LICENSE +739 -0
- tensorbored-2.21.0rc1769983804.dist-info/top_level.txt +1 -0
|
@@ -0,0 +1,311 @@
|
|
|
1
|
+
# Copyright 2019 The TensorFlow Authors. All Rights Reserved.
|
|
2
|
+
#
|
|
3
|
+
# Licensed under the Apache License, Version 2.0 (the "License");
|
|
4
|
+
# you may not use this file except in compliance with the License.
|
|
5
|
+
# You may obtain a copy of the License at
|
|
6
|
+
#
|
|
7
|
+
# http://www.apache.org/licenses/LICENSE-2.0
|
|
8
|
+
#
|
|
9
|
+
# Unless required by applicable law or agreed to in writing, software
|
|
10
|
+
# distributed under the License is distributed on an "AS IS" BASIS,
|
|
11
|
+
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
12
|
+
# See the License for the specific language governing permissions and
|
|
13
|
+
# limitations under the License.
|
|
14
|
+
# ==============================================================================
|
|
15
|
+
"""Utilities for working with python gRPC stubs."""
|
|
16
|
+
|
|
17
|
+
import enum
|
|
18
|
+
import functools
|
|
19
|
+
import random
|
|
20
|
+
import threading
|
|
21
|
+
import time
|
|
22
|
+
|
|
23
|
+
import grpc
|
|
24
|
+
|
|
25
|
+
from tensorbored import version
|
|
26
|
+
from tensorbored.util import tb_logging
|
|
27
|
+
|
|
28
|
+
logger = tb_logging.get_logger()
|
|
29
|
+
|
|
30
|
+
# Default RPC timeout.
|
|
31
|
+
_GRPC_DEFAULT_TIMEOUT_SECS = 30
|
|
32
|
+
|
|
33
|
+
# Max number of times to attempt an RPC, retrying on transient failures.
|
|
34
|
+
_GRPC_RETRY_MAX_ATTEMPTS = 5
|
|
35
|
+
|
|
36
|
+
# Parameters to control the exponential backoff behavior.
|
|
37
|
+
_GRPC_RETRY_EXPONENTIAL_BASE = 2
|
|
38
|
+
_GRPC_RETRY_JITTER_FACTOR_MIN = 1.1
|
|
39
|
+
_GRPC_RETRY_JITTER_FACTOR_MAX = 1.5
|
|
40
|
+
|
|
41
|
+
# Status codes from gRPC for which it's reasonable to retry the RPC.
|
|
42
|
+
_GRPC_RETRYABLE_STATUS_CODES = frozenset(
|
|
43
|
+
[
|
|
44
|
+
grpc.StatusCode.ABORTED,
|
|
45
|
+
grpc.StatusCode.DEADLINE_EXCEEDED,
|
|
46
|
+
grpc.StatusCode.RESOURCE_EXHAUSTED,
|
|
47
|
+
grpc.StatusCode.UNAVAILABLE,
|
|
48
|
+
]
|
|
49
|
+
)
|
|
50
|
+
|
|
51
|
+
# gRPC metadata key whose value contains the client version.
|
|
52
|
+
_VERSION_METADATA_KEY = "tensorboard-version"
|
|
53
|
+
|
|
54
|
+
|
|
55
|
+
class AsyncCallFuture:
|
|
56
|
+
"""Encapsulates the future value of a retriable async gRPC request.
|
|
57
|
+
|
|
58
|
+
Abstracts over the set of futures returned by a set of gRPC calls
|
|
59
|
+
comprising a single logical gRPC request with retries. Communicates
|
|
60
|
+
to the caller the result or exception resulting from the request.
|
|
61
|
+
|
|
62
|
+
Args:
|
|
63
|
+
completion_event: The constructor should provide a `threding.Event` which
|
|
64
|
+
will be used to communicate when the set of gRPC requests is complete.
|
|
65
|
+
"""
|
|
66
|
+
|
|
67
|
+
def __init__(self, completion_event):
|
|
68
|
+
self._active_grpc_future = None
|
|
69
|
+
self._active_grpc_future_lock = threading.Lock()
|
|
70
|
+
self._completion_event = completion_event
|
|
71
|
+
|
|
72
|
+
def _set_active_future(self, grpc_future):
|
|
73
|
+
if grpc_future is None:
|
|
74
|
+
raise RuntimeError(
|
|
75
|
+
"_set_active_future invoked with grpc_future=None."
|
|
76
|
+
)
|
|
77
|
+
with self._active_grpc_future_lock:
|
|
78
|
+
self._active_grpc_future = grpc_future
|
|
79
|
+
|
|
80
|
+
def result(self, timeout):
|
|
81
|
+
"""Analogous to `grpc.Future.result`. Returns the value or exception.
|
|
82
|
+
|
|
83
|
+
This method will wait until the full set of gRPC requests is complete
|
|
84
|
+
and then act as `grpc.Future.result` for the single gRPC invocation
|
|
85
|
+
corresponding to the first successful call or final failure, as
|
|
86
|
+
appropriate.
|
|
87
|
+
|
|
88
|
+
Args:
|
|
89
|
+
timeout: How long to wait in seconds before giving up and raising.
|
|
90
|
+
|
|
91
|
+
Returns:
|
|
92
|
+
The result of the future corresponding to the single gRPC
|
|
93
|
+
corresponding to the successful call.
|
|
94
|
+
|
|
95
|
+
Raises:
|
|
96
|
+
* `grpc.FutureTimeoutError` if timeout seconds elapse before the gRPC
|
|
97
|
+
calls could complete, including waits and retries.
|
|
98
|
+
* The exception corresponding to the last non-retryable gRPC request
|
|
99
|
+
in the case that a successful gRPC request was not made.
|
|
100
|
+
"""
|
|
101
|
+
if not self._completion_event.wait(timeout):
|
|
102
|
+
raise grpc.FutureTimeoutError(
|
|
103
|
+
f"AsyncCallFuture timed out after {timeout} seconds"
|
|
104
|
+
)
|
|
105
|
+
with self._active_grpc_future_lock:
|
|
106
|
+
if self._active_grpc_future is None:
|
|
107
|
+
raise RuntimeError("AsyncFuture never had an active future set")
|
|
108
|
+
return self._active_grpc_future.result()
|
|
109
|
+
|
|
110
|
+
|
|
111
|
+
def async_call_with_retries(api_method, request, clock=None):
|
|
112
|
+
"""Initiate an asynchronous call to a gRPC stub, with retry logic.
|
|
113
|
+
|
|
114
|
+
This is similar to the `async_call` API, except that the call is handled
|
|
115
|
+
asynchronously, and the completion may be handled by another thread. The
|
|
116
|
+
caller must provide a `done_callback` argument which will handle the
|
|
117
|
+
result or exception rising from the gRPC completion.
|
|
118
|
+
|
|
119
|
+
Retries are handled with jittered exponential backoff to spread out failures
|
|
120
|
+
due to request spikes.
|
|
121
|
+
|
|
122
|
+
This only supports unary-unary RPCs: i.e., no streaming on either end.
|
|
123
|
+
|
|
124
|
+
Args:
|
|
125
|
+
api_method: Callable for the API method to invoke.
|
|
126
|
+
request: Request protocol buffer to pass to the API method.
|
|
127
|
+
clock: an interface object supporting `time()` and `sleep()` methods
|
|
128
|
+
like the standard `time` module; if not passed, uses the normal module.
|
|
129
|
+
|
|
130
|
+
Returns:
|
|
131
|
+
An `AsyncCallFuture` which will encapsulate the `grpc.Future`
|
|
132
|
+
corresponding to the gRPC call which either completes successfully or
|
|
133
|
+
represents the final try.
|
|
134
|
+
"""
|
|
135
|
+
if clock is None:
|
|
136
|
+
clock = time
|
|
137
|
+
logger.debug("Async RPC call %s with request: %r", api_method, request)
|
|
138
|
+
|
|
139
|
+
completion_event = threading.Event()
|
|
140
|
+
async_future = AsyncCallFuture(completion_event)
|
|
141
|
+
|
|
142
|
+
def async_call(handler):
|
|
143
|
+
"""Invokes the gRPC future and orchestrates it via the AsyncCallFuture."""
|
|
144
|
+
future = api_method.future(
|
|
145
|
+
request,
|
|
146
|
+
timeout=_GRPC_DEFAULT_TIMEOUT_SECS,
|
|
147
|
+
metadata=version_metadata(),
|
|
148
|
+
)
|
|
149
|
+
# Ensure we set the active future before invoking the done callback, to
|
|
150
|
+
# avoid the case where the done callback completes immediately and
|
|
151
|
+
# triggers completion event while async_future still holds the old
|
|
152
|
+
# future.
|
|
153
|
+
async_future._set_active_future(future)
|
|
154
|
+
future.add_done_callback(handler)
|
|
155
|
+
|
|
156
|
+
# retry_handler is the continuation of the `async_call`. It should:
|
|
157
|
+
# * If the grpc call succeeds: trigger the `completion_event`.
|
|
158
|
+
# * If there are no more retries: trigger the `completion_event`.
|
|
159
|
+
# * Otherwise, invoke a new async_call with the same
|
|
160
|
+
# retry_handler.
|
|
161
|
+
def retry_handler(future, num_attempts):
|
|
162
|
+
e = future.exception()
|
|
163
|
+
if e is None:
|
|
164
|
+
completion_event.set()
|
|
165
|
+
return
|
|
166
|
+
else:
|
|
167
|
+
logger.info("RPC call %s got error %s", api_method, e)
|
|
168
|
+
# If unable to retry, proceed to completion.
|
|
169
|
+
if e.code() not in _GRPC_RETRYABLE_STATUS_CODES:
|
|
170
|
+
completion_event.set()
|
|
171
|
+
return
|
|
172
|
+
if num_attempts >= _GRPC_RETRY_MAX_ATTEMPTS:
|
|
173
|
+
completion_event.set()
|
|
174
|
+
return
|
|
175
|
+
# If able to retry, wait then do so.
|
|
176
|
+
backoff_secs = _compute_backoff_seconds(num_attempts)
|
|
177
|
+
clock.sleep(backoff_secs)
|
|
178
|
+
async_call(
|
|
179
|
+
functools.partial(retry_handler, num_attempts=num_attempts + 1)
|
|
180
|
+
)
|
|
181
|
+
|
|
182
|
+
async_call(functools.partial(retry_handler, num_attempts=1))
|
|
183
|
+
return async_future
|
|
184
|
+
|
|
185
|
+
|
|
186
|
+
def _compute_backoff_seconds(num_attempts):
|
|
187
|
+
"""Compute appropriate wait time between RPC attempts."""
|
|
188
|
+
jitter_factor = random.uniform(
|
|
189
|
+
_GRPC_RETRY_JITTER_FACTOR_MIN, _GRPC_RETRY_JITTER_FACTOR_MAX
|
|
190
|
+
)
|
|
191
|
+
backoff_secs = (_GRPC_RETRY_EXPONENTIAL_BASE**num_attempts) * jitter_factor
|
|
192
|
+
return backoff_secs
|
|
193
|
+
|
|
194
|
+
|
|
195
|
+
def call_with_retries(api_method, request, clock=None):
|
|
196
|
+
"""Call a gRPC stub API method, with automatic retry logic.
|
|
197
|
+
|
|
198
|
+
This only supports unary-unary RPCs: i.e., no streaming on either end.
|
|
199
|
+
Streamed RPCs will generally need application-level pagination support,
|
|
200
|
+
because after a gRPC error one must retry the entire request; there is no
|
|
201
|
+
"retry-resume" functionality.
|
|
202
|
+
|
|
203
|
+
Retries are handled with jittered exponential backoff to spread out failures
|
|
204
|
+
due to request spikes.
|
|
205
|
+
|
|
206
|
+
Args:
|
|
207
|
+
api_method: Callable for the API method to invoke.
|
|
208
|
+
request: Request protocol buffer to pass to the API method.
|
|
209
|
+
clock: an interface object supporting `time()` and `sleep()` methods
|
|
210
|
+
like the standard `time` module; if not passed, uses the normal module.
|
|
211
|
+
|
|
212
|
+
Returns:
|
|
213
|
+
Response protocol buffer returned by the API method.
|
|
214
|
+
|
|
215
|
+
Raises:
|
|
216
|
+
grpc.RpcError: if a non-retryable error is returned, or if all retry
|
|
217
|
+
attempts have been exhausted.
|
|
218
|
+
"""
|
|
219
|
+
if clock is None:
|
|
220
|
+
clock = time
|
|
221
|
+
# We can't actually use api_method.__name__ because it's not a real method,
|
|
222
|
+
# it's a special gRPC callable instance that doesn't expose the method name.
|
|
223
|
+
rpc_name = request.__class__.__name__.replace("Request", "")
|
|
224
|
+
logger.debug("RPC call %s with request: %r", rpc_name, request)
|
|
225
|
+
num_attempts = 0
|
|
226
|
+
while True:
|
|
227
|
+
num_attempts += 1
|
|
228
|
+
try:
|
|
229
|
+
return api_method(
|
|
230
|
+
request,
|
|
231
|
+
timeout=_GRPC_DEFAULT_TIMEOUT_SECS,
|
|
232
|
+
metadata=version_metadata(),
|
|
233
|
+
)
|
|
234
|
+
except grpc.RpcError as e:
|
|
235
|
+
logger.info("RPC call %s got error %s", rpc_name, e)
|
|
236
|
+
if e.code() not in _GRPC_RETRYABLE_STATUS_CODES:
|
|
237
|
+
raise
|
|
238
|
+
if num_attempts >= _GRPC_RETRY_MAX_ATTEMPTS:
|
|
239
|
+
raise
|
|
240
|
+
backoff_secs = _compute_backoff_seconds(num_attempts)
|
|
241
|
+
logger.info(
|
|
242
|
+
"RPC call %s attempted %d times, retrying in %.1f seconds",
|
|
243
|
+
rpc_name,
|
|
244
|
+
num_attempts,
|
|
245
|
+
backoff_secs,
|
|
246
|
+
)
|
|
247
|
+
clock.sleep(backoff_secs)
|
|
248
|
+
|
|
249
|
+
|
|
250
|
+
def version_metadata():
|
|
251
|
+
"""Creates gRPC invocation metadata encoding the TensorBoard version.
|
|
252
|
+
|
|
253
|
+
Usage: `stub.MyRpc(request, metadata=version_metadata())`.
|
|
254
|
+
|
|
255
|
+
Returns:
|
|
256
|
+
A tuple of key-value pairs (themselves 2-tuples) to be passed as the
|
|
257
|
+
`metadata` kwarg to gRPC stub API methods.
|
|
258
|
+
"""
|
|
259
|
+
return ((_VERSION_METADATA_KEY, version.VERSION),)
|
|
260
|
+
|
|
261
|
+
|
|
262
|
+
def extract_version(metadata):
|
|
263
|
+
"""Extracts version from invocation metadata.
|
|
264
|
+
|
|
265
|
+
The argument should be the result of a prior call to `metadata` or the
|
|
266
|
+
result of combining such a result with other metadata.
|
|
267
|
+
|
|
268
|
+
Returns:
|
|
269
|
+
The TensorBoard version listed in this metadata, or `None` if none
|
|
270
|
+
is listed.
|
|
271
|
+
"""
|
|
272
|
+
return dict(metadata).get(_VERSION_METADATA_KEY)
|
|
273
|
+
|
|
274
|
+
|
|
275
|
+
@enum.unique
|
|
276
|
+
class ChannelCredsType(enum.Enum):
|
|
277
|
+
LOCAL = "local"
|
|
278
|
+
SSL = "ssl"
|
|
279
|
+
SSL_DEV = "ssl_dev"
|
|
280
|
+
|
|
281
|
+
def channel_config(self):
|
|
282
|
+
"""Create channel credentials and options.
|
|
283
|
+
|
|
284
|
+
Returns:
|
|
285
|
+
A tuple `(channel_creds, channel_options)`, where `channel_creds`
|
|
286
|
+
is a `grpc.ChannelCredentials` and `channel_options` is a
|
|
287
|
+
(potentially empty) list of `(key, value)` tuples. Both results
|
|
288
|
+
may be passed to `grpc.secure_channel`.
|
|
289
|
+
"""
|
|
290
|
+
|
|
291
|
+
options = []
|
|
292
|
+
if self == ChannelCredsType.LOCAL:
|
|
293
|
+
creds = grpc.local_channel_credentials()
|
|
294
|
+
elif self == ChannelCredsType.SSL:
|
|
295
|
+
creds = grpc.ssl_channel_credentials()
|
|
296
|
+
elif self == ChannelCredsType.SSL_DEV:
|
|
297
|
+
# Configure the dev cert to use by passing the environment variable
|
|
298
|
+
# GRPC_DEFAULT_SSL_ROOTS_FILE_PATH=path/to/cert.crt
|
|
299
|
+
creds = grpc.ssl_channel_credentials()
|
|
300
|
+
options.append(("grpc.ssl_target_name_override", "localhost"))
|
|
301
|
+
else:
|
|
302
|
+
raise AssertionError("unhandled ChannelCredsType: %r" % self)
|
|
303
|
+
return (creds, options)
|
|
304
|
+
|
|
305
|
+
@classmethod
|
|
306
|
+
def choices(cls):
|
|
307
|
+
return cls.__members__.values()
|
|
308
|
+
|
|
309
|
+
def __str__(self):
|
|
310
|
+
# Use user-facing string, because this is shown for flag choices.
|
|
311
|
+
return self.value
|
|
@@ -0,0 +1,40 @@
|
|
|
1
|
+
# Copyright 2025 The TensorFlow Authors. All Rights Reserved.
|
|
2
|
+
#
|
|
3
|
+
# Licensed under the Apache License, Version 2.0 (the "License");
|
|
4
|
+
# you may not use this file except in compliance with the License.
|
|
5
|
+
# You may obtain a copy of the License at
|
|
6
|
+
#
|
|
7
|
+
# http://www.apache.org/licenses/LICENSE-2.0
|
|
8
|
+
#
|
|
9
|
+
# Unless required by applicable law or agreed to in writing, software
|
|
10
|
+
# distributed under the License is distributed on an "AS IS" BASIS,
|
|
11
|
+
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
12
|
+
# See the License for the specific language governing permissions and
|
|
13
|
+
# limitations under the License.
|
|
14
|
+
|
|
15
|
+
"""Utility to determine the MIME type of an image."""
|
|
16
|
+
|
|
17
|
+
from PIL import Image
|
|
18
|
+
import io
|
|
19
|
+
|
|
20
|
+
_IMGHDR_TO_MIMETYPE = {
|
|
21
|
+
"bmp": "image/bmp",
|
|
22
|
+
"gif": "image/gif",
|
|
23
|
+
"jpeg": "image/jpeg",
|
|
24
|
+
"png": "image/png",
|
|
25
|
+
}
|
|
26
|
+
_DEFAULT_IMAGE_MIMETYPE = "application/octet-stream"
|
|
27
|
+
|
|
28
|
+
|
|
29
|
+
def from_bytes(img_bytes: bytes) -> str:
|
|
30
|
+
"""Returns the MIME type of an image from its bytes."""
|
|
31
|
+
format_lower = None
|
|
32
|
+
try:
|
|
33
|
+
img = Image.open(io.BytesIO(img_bytes))
|
|
34
|
+
format_lower = img.format.lower()
|
|
35
|
+
if format_lower == "jpg":
|
|
36
|
+
format_lower = "jpeg"
|
|
37
|
+
except:
|
|
38
|
+
# Let the default value be returned.
|
|
39
|
+
pass
|
|
40
|
+
return _IMGHDR_TO_MIMETYPE.get(format_lower, _DEFAULT_IMAGE_MIMETYPE)
|
|
@@ -0,0 +1,20 @@
|
|
|
1
|
+
# Copyright 2021 The TensorFlow Authors. All Rights Reserved.
|
|
2
|
+
#
|
|
3
|
+
# Licensed under the Apache License, Version 2.0 (the "License");
|
|
4
|
+
# you may not use this file except in compliance with the License.
|
|
5
|
+
# You may obtain a copy of the License at
|
|
6
|
+
#
|
|
7
|
+
# http://www.apache.org/licenses/LICENSE-2.0
|
|
8
|
+
#
|
|
9
|
+
# Unless required by applicable law or agreed to in writing, software
|
|
10
|
+
# distributed under the License is distributed on an "AS IS" BASIS,
|
|
11
|
+
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
12
|
+
# See the License for the specific language governing permissions and
|
|
13
|
+
# limitations under the License.
|
|
14
|
+
|
|
15
|
+
"""TensorBoard IO helpers."""
|
|
16
|
+
|
|
17
|
+
|
|
18
|
+
def IsCloudPath(path):
|
|
19
|
+
"""Checks whether a given path is Cloud filesystem path."""
|
|
20
|
+
return path.startswith("gs://") or path.startswith("s3://")
|
|
@@ -0,0 +1,110 @@
|
|
|
1
|
+
# Copyright 2019 The TensorFlow Authors. All Rights Reserved.
|
|
2
|
+
#
|
|
3
|
+
# Licensed under the Apache License, Version 2.0 (the "License");
|
|
4
|
+
# you may not use this file except in compliance with the License.
|
|
5
|
+
# You may obtain a copy of the License at
|
|
6
|
+
#
|
|
7
|
+
# http://www.apache.org/licenses/LICENSE-2.0
|
|
8
|
+
#
|
|
9
|
+
# Unless required by applicable law or agreed to in writing, software
|
|
10
|
+
# distributed under the License is distributed on an "AS IS" BASIS,
|
|
11
|
+
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
12
|
+
# See the License for the specific language governing permissions and
|
|
13
|
+
# limitations under the License.
|
|
14
|
+
# ==============================================================================
|
|
15
|
+
"""Provides a lazy wrapper for deferring Tensor creation."""
|
|
16
|
+
|
|
17
|
+
import threading
|
|
18
|
+
|
|
19
|
+
from tensorbored.compat import tf2 as tf
|
|
20
|
+
|
|
21
|
+
# Sentinel used for LazyTensorCreator._tensor to indicate that a value is
|
|
22
|
+
# currently being computed, in order to fail hard on reentrancy.
|
|
23
|
+
_CALL_IN_PROGRESS_SENTINEL = object()
|
|
24
|
+
|
|
25
|
+
|
|
26
|
+
class LazyTensorCreator:
|
|
27
|
+
"""Lazy auto-converting wrapper for a callable that returns a `tf.Tensor`.
|
|
28
|
+
|
|
29
|
+
This class wraps an arbitrary callable that returns a `Tensor` so that it
|
|
30
|
+
will be automatically converted to a `Tensor` by any logic that calls
|
|
31
|
+
`tf.convert_to_tensor()`. This also memoizes the callable so that it is
|
|
32
|
+
called at most once.
|
|
33
|
+
|
|
34
|
+
The intended use of this class is to defer the construction of a `Tensor`
|
|
35
|
+
(e.g. to avoid unnecessary wasted computation, or ensure any new ops are
|
|
36
|
+
created in a context only available later on in execution), while remaining
|
|
37
|
+
compatible with APIs that expect to be given an already materialized value
|
|
38
|
+
that can be converted to a `Tensor`.
|
|
39
|
+
|
|
40
|
+
This class is thread-safe.
|
|
41
|
+
"""
|
|
42
|
+
|
|
43
|
+
def __init__(self, tensor_callable):
|
|
44
|
+
"""Initializes a LazyTensorCreator object.
|
|
45
|
+
|
|
46
|
+
Args:
|
|
47
|
+
tensor_callable: A callable that returns a `tf.Tensor`.
|
|
48
|
+
"""
|
|
49
|
+
if not callable(tensor_callable):
|
|
50
|
+
raise ValueError("Not a callable: %r" % tensor_callable)
|
|
51
|
+
self._tensor_callable = tensor_callable
|
|
52
|
+
self._tensor = None
|
|
53
|
+
self._tensor_lock = threading.RLock()
|
|
54
|
+
_register_conversion_function_once()
|
|
55
|
+
|
|
56
|
+
def __call__(self):
|
|
57
|
+
if self._tensor is None or self._tensor is _CALL_IN_PROGRESS_SENTINEL:
|
|
58
|
+
with self._tensor_lock:
|
|
59
|
+
if self._tensor is _CALL_IN_PROGRESS_SENTINEL:
|
|
60
|
+
raise RuntimeError(
|
|
61
|
+
"Cannot use LazyTensorCreator with reentrant callable"
|
|
62
|
+
)
|
|
63
|
+
elif self._tensor is None:
|
|
64
|
+
self._tensor = _CALL_IN_PROGRESS_SENTINEL
|
|
65
|
+
self._tensor = self._tensor_callable()
|
|
66
|
+
return self._tensor
|
|
67
|
+
|
|
68
|
+
|
|
69
|
+
def _lazy_tensor_creator_converter(value, dtype=None, name=None, as_ref=False):
|
|
70
|
+
del name # ignored
|
|
71
|
+
if not isinstance(value, LazyTensorCreator):
|
|
72
|
+
raise RuntimeError("Expected LazyTensorCreator, got %r" % value)
|
|
73
|
+
if as_ref:
|
|
74
|
+
raise RuntimeError("Cannot use LazyTensorCreator to create ref tensor")
|
|
75
|
+
tensor = value()
|
|
76
|
+
if dtype not in (None, tensor.dtype):
|
|
77
|
+
raise RuntimeError(
|
|
78
|
+
"Cannot convert LazyTensorCreator returning dtype %s to dtype %s"
|
|
79
|
+
% (tensor.dtype, dtype)
|
|
80
|
+
)
|
|
81
|
+
return tensor
|
|
82
|
+
|
|
83
|
+
|
|
84
|
+
# Use module-level bit and lock to ensure that registration of the
|
|
85
|
+
# LazyTensorCreator conversion function happens only once.
|
|
86
|
+
_conversion_registered = False
|
|
87
|
+
_conversion_registered_lock = threading.Lock()
|
|
88
|
+
|
|
89
|
+
|
|
90
|
+
def _register_conversion_function_once():
|
|
91
|
+
"""Performs one-time registration of `_lazy_tensor_creator_converter`.
|
|
92
|
+
|
|
93
|
+
This helper can be invoked multiple times but only registers the conversion
|
|
94
|
+
function on the first invocation, making it suitable for calling when
|
|
95
|
+
constructing a LazyTensorCreator.
|
|
96
|
+
|
|
97
|
+
Deferring the registration is necessary because doing it at at module import
|
|
98
|
+
time would trigger the lazy TensorFlow import to resolve, and that in turn
|
|
99
|
+
would break the delicate `tf.summary` import cycle avoidance scheme.
|
|
100
|
+
"""
|
|
101
|
+
global _conversion_registered
|
|
102
|
+
if not _conversion_registered:
|
|
103
|
+
with _conversion_registered_lock:
|
|
104
|
+
if not _conversion_registered:
|
|
105
|
+
_conversion_registered = True
|
|
106
|
+
tf.register_tensor_conversion_function(
|
|
107
|
+
base_type=LazyTensorCreator,
|
|
108
|
+
conversion_func=_lazy_tensor_creator_converter,
|
|
109
|
+
priority=0,
|
|
110
|
+
)
|
|
@@ -0,0 +1,104 @@
|
|
|
1
|
+
# Copyright 2018 The TensorFlow Authors. All Rights Reserved.
|
|
2
|
+
#
|
|
3
|
+
# Licensed under the Apache License, Version 2.0 (the "License");
|
|
4
|
+
# you may not use this file except in compliance with the License.
|
|
5
|
+
# You may obtain a copy of the License at
|
|
6
|
+
#
|
|
7
|
+
# http://www.apache.org/licenses/LICENSE-2.0
|
|
8
|
+
#
|
|
9
|
+
# Unless required by applicable law or agreed to in writing, software
|
|
10
|
+
# distributed under the License is distributed on an "AS IS" BASIS,
|
|
11
|
+
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
12
|
+
# See the License for the specific language governing permissions and
|
|
13
|
+
# limitations under the License.
|
|
14
|
+
|
|
15
|
+
"""TensorBoard helper routine for TF op evaluator.
|
|
16
|
+
|
|
17
|
+
Requires TensorFlow.
|
|
18
|
+
"""
|
|
19
|
+
|
|
20
|
+
import threading
|
|
21
|
+
|
|
22
|
+
|
|
23
|
+
class PersistentOpEvaluator:
|
|
24
|
+
"""Evaluate a fixed TensorFlow graph repeatedly, safely, efficiently.
|
|
25
|
+
|
|
26
|
+
Extend this class to create a particular kind of op evaluator, like an
|
|
27
|
+
image encoder. In `initialize_graph`, create an appropriate TensorFlow
|
|
28
|
+
graph with placeholder inputs. In `run`, evaluate this graph and
|
|
29
|
+
return its result. This class will manage a singleton graph and
|
|
30
|
+
session to preserve memory usage, and will ensure that this graph and
|
|
31
|
+
session do not interfere with other concurrent sessions.
|
|
32
|
+
|
|
33
|
+
A subclass of this class offers a threadsafe, highly parallel Python
|
|
34
|
+
entry point for evaluating a particular TensorFlow graph.
|
|
35
|
+
|
|
36
|
+
Example usage:
|
|
37
|
+
|
|
38
|
+
class FluxCapacitanceEvaluator(PersistentOpEvaluator):
|
|
39
|
+
\"\"\"Compute the flux capacitance required for a system.
|
|
40
|
+
|
|
41
|
+
Arguments:
|
|
42
|
+
x: Available power input, as a `float`, in jigawatts.
|
|
43
|
+
|
|
44
|
+
Returns:
|
|
45
|
+
A `float`, in nanofarads.
|
|
46
|
+
\"\"\"
|
|
47
|
+
|
|
48
|
+
def initialize_graph(self):
|
|
49
|
+
self._placeholder = tf.placeholder(some_dtype)
|
|
50
|
+
self._op = some_op(self._placeholder)
|
|
51
|
+
|
|
52
|
+
def run(self, x):
|
|
53
|
+
return self._op.eval(feed_dict: {self._placeholder: x})
|
|
54
|
+
|
|
55
|
+
evaluate_flux_capacitance = FluxCapacitanceEvaluator()
|
|
56
|
+
|
|
57
|
+
for x in xs:
|
|
58
|
+
evaluate_flux_capacitance(x)
|
|
59
|
+
"""
|
|
60
|
+
|
|
61
|
+
def __init__(self):
|
|
62
|
+
super().__init__()
|
|
63
|
+
self._session = None
|
|
64
|
+
self._initialization_lock = threading.Lock()
|
|
65
|
+
|
|
66
|
+
def _lazily_initialize(self):
|
|
67
|
+
"""Initialize the graph and session, if this has not yet been done."""
|
|
68
|
+
# TODO(nickfelt): remove on-demand imports once dep situation is fixed.
|
|
69
|
+
import tensorflow.compat.v1 as tf
|
|
70
|
+
|
|
71
|
+
with self._initialization_lock:
|
|
72
|
+
if self._session:
|
|
73
|
+
return
|
|
74
|
+
graph = tf.Graph()
|
|
75
|
+
with graph.as_default():
|
|
76
|
+
self.initialize_graph()
|
|
77
|
+
# Don't reserve GPU because libpng can't run on GPU.
|
|
78
|
+
config = tf.ConfigProto(device_count={"GPU": 0})
|
|
79
|
+
self._session = tf.Session(graph=graph, config=config)
|
|
80
|
+
|
|
81
|
+
def initialize_graph(self):
|
|
82
|
+
"""Create the TensorFlow graph needed to compute this operation.
|
|
83
|
+
|
|
84
|
+
This should write ops to the default graph and return `None`.
|
|
85
|
+
"""
|
|
86
|
+
raise NotImplementedError(
|
|
87
|
+
'Subclasses must implement "initialize_graph".'
|
|
88
|
+
)
|
|
89
|
+
|
|
90
|
+
def run(self, *args, **kwargs):
|
|
91
|
+
"""Evaluate the ops with the given input.
|
|
92
|
+
|
|
93
|
+
When this function is called, the default session will have the
|
|
94
|
+
graph defined by a previous call to `initialize_graph`. This
|
|
95
|
+
function should evaluate any ops necessary to compute the result
|
|
96
|
+
of the query for the given *args and **kwargs, likely returning
|
|
97
|
+
the result of a call to `some_op.eval(...)`.
|
|
98
|
+
"""
|
|
99
|
+
raise NotImplementedError('Subclasses must implement "run".')
|
|
100
|
+
|
|
101
|
+
def __call__(self, *args, **kwargs):
|
|
102
|
+
self._lazily_initialize()
|
|
103
|
+
with self._session.as_default():
|
|
104
|
+
return self.run(*args, **kwargs)
|
|
@@ -0,0 +1,20 @@
|
|
|
1
|
+
# Copyright 2018 The TensorFlow Authors. All Rights Reserved.
|
|
2
|
+
#
|
|
3
|
+
# Licensed under the Apache License, Version 2.0 (the "License");
|
|
4
|
+
# you may not use this file except in compliance with the License.
|
|
5
|
+
# You may obtain a copy of the License at
|
|
6
|
+
#
|
|
7
|
+
# http://www.apache.org/licenses/LICENSE-2.0
|
|
8
|
+
#
|
|
9
|
+
# Unless required by applicable law or agreed to in writing, software
|
|
10
|
+
# distributed under the License is distributed on an "AS IS" BASIS,
|
|
11
|
+
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
12
|
+
# See the License for the specific language governing permissions and
|
|
13
|
+
# limitations under the License.
|
|
14
|
+
|
|
15
|
+
"""TensorBoard helper routine for platform."""
|
|
16
|
+
|
|
17
|
+
|
|
18
|
+
def readahead_file_path(path, unused_readahead=None):
|
|
19
|
+
"""Readahead files not implemented; simply returns given path."""
|
|
20
|
+
return path
|
|
@@ -0,0 +1,24 @@
|
|
|
1
|
+
# Copyright 2018 The TensorFlow Authors. All Rights Reserved.
|
|
2
|
+
#
|
|
3
|
+
# Licensed under the Apache License, Version 2.0 (the "License");
|
|
4
|
+
# you may not use this file except in compliance with the License.
|
|
5
|
+
# You may obtain a copy of the License at
|
|
6
|
+
#
|
|
7
|
+
# http://www.apache.org/licenses/LICENSE-2.0
|
|
8
|
+
#
|
|
9
|
+
# Unless required by applicable law or agreed to in writing, software
|
|
10
|
+
# distributed under the License is distributed on an "AS IS" BASIS,
|
|
11
|
+
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
12
|
+
# See the License for the specific language governing permissions and
|
|
13
|
+
# limitations under the License.
|
|
14
|
+
|
|
15
|
+
"""TensorBoard logging module."""
|
|
16
|
+
|
|
17
|
+
import logging
|
|
18
|
+
|
|
19
|
+
_logger = logging.getLogger("tensorboard")
|
|
20
|
+
|
|
21
|
+
|
|
22
|
+
def get_logger():
|
|
23
|
+
"""Returns TensorBoard logger."""
|
|
24
|
+
return _logger
|