tensorbored 2.21.0rc1769983804__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- tensorbored/__init__.py +112 -0
- tensorbored/_vendor/__init__.py +0 -0
- tensorbored/_vendor/bleach/__init__.py +125 -0
- tensorbored/_vendor/bleach/_vendor/__init__.py +0 -0
- tensorbored/_vendor/bleach/_vendor/html5lib/__init__.py +35 -0
- tensorbored/_vendor/bleach/_vendor/html5lib/_ihatexml.py +289 -0
- tensorbored/_vendor/bleach/_vendor/html5lib/_inputstream.py +918 -0
- tensorbored/_vendor/bleach/_vendor/html5lib/_tokenizer.py +1735 -0
- tensorbored/_vendor/bleach/_vendor/html5lib/_trie/__init__.py +5 -0
- tensorbored/_vendor/bleach/_vendor/html5lib/_trie/_base.py +40 -0
- tensorbored/_vendor/bleach/_vendor/html5lib/_trie/py.py +67 -0
- tensorbored/_vendor/bleach/_vendor/html5lib/_utils.py +159 -0
- tensorbored/_vendor/bleach/_vendor/html5lib/constants.py +2946 -0
- tensorbored/_vendor/bleach/_vendor/html5lib/filters/__init__.py +0 -0
- tensorbored/_vendor/bleach/_vendor/html5lib/filters/alphabeticalattributes.py +29 -0
- tensorbored/_vendor/bleach/_vendor/html5lib/filters/base.py +12 -0
- tensorbored/_vendor/bleach/_vendor/html5lib/filters/inject_meta_charset.py +73 -0
- tensorbored/_vendor/bleach/_vendor/html5lib/filters/lint.py +93 -0
- tensorbored/_vendor/bleach/_vendor/html5lib/filters/optionaltags.py +207 -0
- tensorbored/_vendor/bleach/_vendor/html5lib/filters/sanitizer.py +916 -0
- tensorbored/_vendor/bleach/_vendor/html5lib/filters/whitespace.py +38 -0
- tensorbored/_vendor/bleach/_vendor/html5lib/html5parser.py +2795 -0
- tensorbored/_vendor/bleach/_vendor/html5lib/serializer.py +409 -0
- tensorbored/_vendor/bleach/_vendor/html5lib/treeadapters/__init__.py +30 -0
- tensorbored/_vendor/bleach/_vendor/html5lib/treeadapters/genshi.py +54 -0
- tensorbored/_vendor/bleach/_vendor/html5lib/treeadapters/sax.py +50 -0
- tensorbored/_vendor/bleach/_vendor/html5lib/treebuilders/__init__.py +88 -0
- tensorbored/_vendor/bleach/_vendor/html5lib/treebuilders/base.py +417 -0
- tensorbored/_vendor/bleach/_vendor/html5lib/treebuilders/dom.py +239 -0
- tensorbored/_vendor/bleach/_vendor/html5lib/treebuilders/etree.py +343 -0
- tensorbored/_vendor/bleach/_vendor/html5lib/treebuilders/etree_lxml.py +392 -0
- tensorbored/_vendor/bleach/_vendor/html5lib/treewalkers/__init__.py +154 -0
- tensorbored/_vendor/bleach/_vendor/html5lib/treewalkers/base.py +252 -0
- tensorbored/_vendor/bleach/_vendor/html5lib/treewalkers/dom.py +43 -0
- tensorbored/_vendor/bleach/_vendor/html5lib/treewalkers/etree.py +131 -0
- tensorbored/_vendor/bleach/_vendor/html5lib/treewalkers/etree_lxml.py +215 -0
- tensorbored/_vendor/bleach/_vendor/html5lib/treewalkers/genshi.py +69 -0
- tensorbored/_vendor/bleach/_vendor/parse.py +1078 -0
- tensorbored/_vendor/bleach/callbacks.py +32 -0
- tensorbored/_vendor/bleach/html5lib_shim.py +757 -0
- tensorbored/_vendor/bleach/linkifier.py +633 -0
- tensorbored/_vendor/bleach/parse_shim.py +1 -0
- tensorbored/_vendor/bleach/sanitizer.py +638 -0
- tensorbored/_vendor/bleach/six_shim.py +19 -0
- tensorbored/_vendor/webencodings/__init__.py +342 -0
- tensorbored/_vendor/webencodings/labels.py +231 -0
- tensorbored/_vendor/webencodings/mklabels.py +59 -0
- tensorbored/_vendor/webencodings/x_user_defined.py +325 -0
- tensorbored/assets.py +36 -0
- tensorbored/auth.py +102 -0
- tensorbored/backend/__init__.py +0 -0
- tensorbored/backend/application.py +604 -0
- tensorbored/backend/auth_context_middleware.py +38 -0
- tensorbored/backend/client_feature_flags.py +113 -0
- tensorbored/backend/empty_path_redirect.py +46 -0
- tensorbored/backend/event_processing/__init__.py +0 -0
- tensorbored/backend/event_processing/data_ingester.py +276 -0
- tensorbored/backend/event_processing/data_provider.py +535 -0
- tensorbored/backend/event_processing/directory_loader.py +142 -0
- tensorbored/backend/event_processing/directory_watcher.py +272 -0
- tensorbored/backend/event_processing/event_accumulator.py +950 -0
- tensorbored/backend/event_processing/event_file_inspector.py +463 -0
- tensorbored/backend/event_processing/event_file_loader.py +292 -0
- tensorbored/backend/event_processing/event_multiplexer.py +521 -0
- tensorbored/backend/event_processing/event_util.py +68 -0
- tensorbored/backend/event_processing/io_wrapper.py +223 -0
- tensorbored/backend/event_processing/plugin_asset_util.py +104 -0
- tensorbored/backend/event_processing/plugin_event_accumulator.py +721 -0
- tensorbored/backend/event_processing/plugin_event_multiplexer.py +522 -0
- tensorbored/backend/event_processing/reservoir.py +266 -0
- tensorbored/backend/event_processing/tag_types.py +29 -0
- tensorbored/backend/experiment_id.py +71 -0
- tensorbored/backend/experimental_plugin.py +51 -0
- tensorbored/backend/http_util.py +263 -0
- tensorbored/backend/json_util.py +70 -0
- tensorbored/backend/path_prefix.py +67 -0
- tensorbored/backend/process_graph.py +74 -0
- tensorbored/backend/security_validator.py +202 -0
- tensorbored/compat/__init__.py +69 -0
- tensorbored/compat/proto/__init__.py +0 -0
- tensorbored/compat/proto/allocation_description_pb2.py +35 -0
- tensorbored/compat/proto/api_def_pb2.py +82 -0
- tensorbored/compat/proto/attr_value_pb2.py +80 -0
- tensorbored/compat/proto/cluster_pb2.py +58 -0
- tensorbored/compat/proto/config_pb2.py +271 -0
- tensorbored/compat/proto/coordination_config_pb2.py +45 -0
- tensorbored/compat/proto/cost_graph_pb2.py +87 -0
- tensorbored/compat/proto/cpp_shape_inference_pb2.py +70 -0
- tensorbored/compat/proto/debug_pb2.py +65 -0
- tensorbored/compat/proto/event_pb2.py +149 -0
- tensorbored/compat/proto/full_type_pb2.py +74 -0
- tensorbored/compat/proto/function_pb2.py +157 -0
- tensorbored/compat/proto/graph_debug_info_pb2.py +111 -0
- tensorbored/compat/proto/graph_pb2.py +41 -0
- tensorbored/compat/proto/histogram_pb2.py +39 -0
- tensorbored/compat/proto/meta_graph_pb2.py +254 -0
- tensorbored/compat/proto/node_def_pb2.py +61 -0
- tensorbored/compat/proto/op_def_pb2.py +81 -0
- tensorbored/compat/proto/resource_handle_pb2.py +48 -0
- tensorbored/compat/proto/rewriter_config_pb2.py +93 -0
- tensorbored/compat/proto/rpc_options_pb2.py +35 -0
- tensorbored/compat/proto/saved_object_graph_pb2.py +193 -0
- tensorbored/compat/proto/saver_pb2.py +38 -0
- tensorbored/compat/proto/step_stats_pb2.py +116 -0
- tensorbored/compat/proto/struct_pb2.py +144 -0
- tensorbored/compat/proto/summary_pb2.py +111 -0
- tensorbored/compat/proto/tensor_description_pb2.py +38 -0
- tensorbored/compat/proto/tensor_pb2.py +68 -0
- tensorbored/compat/proto/tensor_shape_pb2.py +46 -0
- tensorbored/compat/proto/tfprof_log_pb2.py +307 -0
- tensorbored/compat/proto/trackable_object_graph_pb2.py +90 -0
- tensorbored/compat/proto/types_pb2.py +105 -0
- tensorbored/compat/proto/variable_pb2.py +62 -0
- tensorbored/compat/proto/verifier_config_pb2.py +38 -0
- tensorbored/compat/proto/versions_pb2.py +35 -0
- tensorbored/compat/tensorflow_stub/__init__.py +38 -0
- tensorbored/compat/tensorflow_stub/app.py +124 -0
- tensorbored/compat/tensorflow_stub/compat/__init__.py +131 -0
- tensorbored/compat/tensorflow_stub/compat/v1/__init__.py +20 -0
- tensorbored/compat/tensorflow_stub/dtypes.py +692 -0
- tensorbored/compat/tensorflow_stub/error_codes.py +169 -0
- tensorbored/compat/tensorflow_stub/errors.py +507 -0
- tensorbored/compat/tensorflow_stub/flags.py +124 -0
- tensorbored/compat/tensorflow_stub/io/__init__.py +17 -0
- tensorbored/compat/tensorflow_stub/io/gfile.py +1011 -0
- tensorbored/compat/tensorflow_stub/pywrap_tensorflow.py +285 -0
- tensorbored/compat/tensorflow_stub/tensor_shape.py +1035 -0
- tensorbored/context.py +129 -0
- tensorbored/data/__init__.py +0 -0
- tensorbored/data/grpc_provider.py +365 -0
- tensorbored/data/ingester.py +46 -0
- tensorbored/data/proto/__init__.py +0 -0
- tensorbored/data/proto/data_provider_pb2.py +517 -0
- tensorbored/data/proto/data_provider_pb2_grpc.py +374 -0
- tensorbored/data/provider.py +1365 -0
- tensorbored/data/server_ingester.py +301 -0
- tensorbored/data_compat.py +159 -0
- tensorbored/dataclass_compat.py +224 -0
- tensorbored/default.py +124 -0
- tensorbored/errors.py +130 -0
- tensorbored/lazy.py +99 -0
- tensorbored/main.py +48 -0
- tensorbored/main_lib.py +62 -0
- tensorbored/manager.py +487 -0
- tensorbored/notebook.py +441 -0
- tensorbored/plugin_util.py +266 -0
- tensorbored/plugins/__init__.py +0 -0
- tensorbored/plugins/audio/__init__.py +0 -0
- tensorbored/plugins/audio/audio_plugin.py +229 -0
- tensorbored/plugins/audio/metadata.py +69 -0
- tensorbored/plugins/audio/plugin_data_pb2.py +37 -0
- tensorbored/plugins/audio/summary.py +230 -0
- tensorbored/plugins/audio/summary_v2.py +124 -0
- tensorbored/plugins/base_plugin.py +367 -0
- tensorbored/plugins/core/__init__.py +0 -0
- tensorbored/plugins/core/core_plugin.py +981 -0
- tensorbored/plugins/custom_scalar/__init__.py +0 -0
- tensorbored/plugins/custom_scalar/custom_scalars_plugin.py +320 -0
- tensorbored/plugins/custom_scalar/layout_pb2.py +85 -0
- tensorbored/plugins/custom_scalar/metadata.py +35 -0
- tensorbored/plugins/custom_scalar/summary.py +79 -0
- tensorbored/plugins/debugger_v2/__init__.py +0 -0
- tensorbored/plugins/debugger_v2/debug_data_multiplexer.py +631 -0
- tensorbored/plugins/debugger_v2/debug_data_provider.py +634 -0
- tensorbored/plugins/debugger_v2/debugger_v2_plugin.py +504 -0
- tensorbored/plugins/distribution/__init__.py +0 -0
- tensorbored/plugins/distribution/compressor.py +158 -0
- tensorbored/plugins/distribution/distributions_plugin.py +116 -0
- tensorbored/plugins/distribution/metadata.py +19 -0
- tensorbored/plugins/graph/__init__.py +0 -0
- tensorbored/plugins/graph/graph_util.py +129 -0
- tensorbored/plugins/graph/graphs_plugin.py +336 -0
- tensorbored/plugins/graph/keras_util.py +328 -0
- tensorbored/plugins/graph/metadata.py +42 -0
- tensorbored/plugins/histogram/__init__.py +0 -0
- tensorbored/plugins/histogram/histograms_plugin.py +144 -0
- tensorbored/plugins/histogram/metadata.py +63 -0
- tensorbored/plugins/histogram/plugin_data_pb2.py +34 -0
- tensorbored/plugins/histogram/summary.py +234 -0
- tensorbored/plugins/histogram/summary_v2.py +292 -0
- tensorbored/plugins/hparams/__init__.py +14 -0
- tensorbored/plugins/hparams/_keras.py +93 -0
- tensorbored/plugins/hparams/api.py +130 -0
- tensorbored/plugins/hparams/api_pb2.py +208 -0
- tensorbored/plugins/hparams/backend_context.py +606 -0
- tensorbored/plugins/hparams/download_data.py +158 -0
- tensorbored/plugins/hparams/error.py +26 -0
- tensorbored/plugins/hparams/get_experiment.py +71 -0
- tensorbored/plugins/hparams/hparams_plugin.py +206 -0
- tensorbored/plugins/hparams/hparams_util_pb2.py +69 -0
- tensorbored/plugins/hparams/json_format_compat.py +38 -0
- tensorbored/plugins/hparams/list_metric_evals.py +57 -0
- tensorbored/plugins/hparams/list_session_groups.py +1040 -0
- tensorbored/plugins/hparams/metadata.py +125 -0
- tensorbored/plugins/hparams/metrics.py +41 -0
- tensorbored/plugins/hparams/plugin_data_pb2.py +69 -0
- tensorbored/plugins/hparams/summary.py +205 -0
- tensorbored/plugins/hparams/summary_v2.py +597 -0
- tensorbored/plugins/image/__init__.py +0 -0
- tensorbored/plugins/image/images_plugin.py +232 -0
- tensorbored/plugins/image/metadata.py +65 -0
- tensorbored/plugins/image/plugin_data_pb2.py +34 -0
- tensorbored/plugins/image/summary.py +159 -0
- tensorbored/plugins/image/summary_v2.py +130 -0
- tensorbored/plugins/mesh/__init__.py +14 -0
- tensorbored/plugins/mesh/mesh_plugin.py +292 -0
- tensorbored/plugins/mesh/metadata.py +152 -0
- tensorbored/plugins/mesh/plugin_data_pb2.py +37 -0
- tensorbored/plugins/mesh/summary.py +251 -0
- tensorbored/plugins/mesh/summary_v2.py +214 -0
- tensorbored/plugins/metrics/__init__.py +0 -0
- tensorbored/plugins/metrics/metadata.py +17 -0
- tensorbored/plugins/metrics/metrics_plugin.py +623 -0
- tensorbored/plugins/pr_curve/__init__.py +0 -0
- tensorbored/plugins/pr_curve/metadata.py +75 -0
- tensorbored/plugins/pr_curve/plugin_data_pb2.py +34 -0
- tensorbored/plugins/pr_curve/pr_curves_plugin.py +241 -0
- tensorbored/plugins/pr_curve/summary.py +574 -0
- tensorbored/plugins/profile_redirect/__init__.py +0 -0
- tensorbored/plugins/profile_redirect/profile_redirect_plugin.py +49 -0
- tensorbored/plugins/projector/__init__.py +67 -0
- tensorbored/plugins/projector/metadata.py +26 -0
- tensorbored/plugins/projector/projector_config_pb2.py +54 -0
- tensorbored/plugins/projector/projector_plugin.py +795 -0
- tensorbored/plugins/projector/tf_projector_plugin/index.js +32 -0
- tensorbored/plugins/projector/tf_projector_plugin/projector_binary.html +524 -0
- tensorbored/plugins/projector/tf_projector_plugin/projector_binary.js +15536 -0
- tensorbored/plugins/scalar/__init__.py +0 -0
- tensorbored/plugins/scalar/metadata.py +60 -0
- tensorbored/plugins/scalar/plugin_data_pb2.py +34 -0
- tensorbored/plugins/scalar/scalars_plugin.py +181 -0
- tensorbored/plugins/scalar/summary.py +109 -0
- tensorbored/plugins/scalar/summary_v2.py +124 -0
- tensorbored/plugins/text/__init__.py +0 -0
- tensorbored/plugins/text/metadata.py +62 -0
- tensorbored/plugins/text/plugin_data_pb2.py +34 -0
- tensorbored/plugins/text/summary.py +114 -0
- tensorbored/plugins/text/summary_v2.py +124 -0
- tensorbored/plugins/text/text_plugin.py +288 -0
- tensorbored/plugins/wit_redirect/__init__.py +0 -0
- tensorbored/plugins/wit_redirect/wit_redirect_plugin.py +49 -0
- tensorbored/program.py +910 -0
- tensorbored/summary/__init__.py +35 -0
- tensorbored/summary/_output.py +124 -0
- tensorbored/summary/_tf/__init__.py +14 -0
- tensorbored/summary/_tf/summary/__init__.py +178 -0
- tensorbored/summary/_writer.py +105 -0
- tensorbored/summary/v1.py +51 -0
- tensorbored/summary/v2.py +25 -0
- tensorbored/summary/writer/__init__.py +13 -0
- tensorbored/summary/writer/event_file_writer.py +291 -0
- tensorbored/summary/writer/record_writer.py +50 -0
- tensorbored/util/__init__.py +0 -0
- tensorbored/util/encoder.py +116 -0
- tensorbored/util/grpc_util.py +311 -0
- tensorbored/util/img_mime_type_detector.py +40 -0
- tensorbored/util/io_util.py +20 -0
- tensorbored/util/lazy_tensor_creator.py +110 -0
- tensorbored/util/op_evaluator.py +104 -0
- tensorbored/util/platform_util.py +20 -0
- tensorbored/util/tb_logging.py +24 -0
- tensorbored/util/tensor_util.py +617 -0
- tensorbored/util/timing.py +122 -0
- tensorbored/version.py +21 -0
- tensorbored/webfiles.zip +0 -0
- tensorbored-2.21.0rc1769983804.dist-info/METADATA +49 -0
- tensorbored-2.21.0rc1769983804.dist-info/RECORD +271 -0
- tensorbored-2.21.0rc1769983804.dist-info/WHEEL +5 -0
- tensorbored-2.21.0rc1769983804.dist-info/entry_points.txt +6 -0
- tensorbored-2.21.0rc1769983804.dist-info/licenses/LICENSE +739 -0
- tensorbored-2.21.0rc1769983804.dist-info/top_level.txt +1 -0
|
@@ -0,0 +1,291 @@
|
|
|
1
|
+
# Copyright 2015 The TensorFlow Authors. All Rights Reserved.
|
|
2
|
+
#
|
|
3
|
+
# Licensed under the Apache License, Version 2.0 (the "License");
|
|
4
|
+
# you may not use this file except in compliance with the License.
|
|
5
|
+
# You may obtain a copy of the License at
|
|
6
|
+
#
|
|
7
|
+
# http://www.apache.org/licenses/LICENSE-2.0
|
|
8
|
+
#
|
|
9
|
+
# Unless required by applicable law or agreed to in writing, software
|
|
10
|
+
# distributed under the License is distributed on an "AS IS" BASIS,
|
|
11
|
+
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
12
|
+
# See the License for the specific language governing permissions and
|
|
13
|
+
# limitations under the License.
|
|
14
|
+
# ==============================================================================
|
|
15
|
+
"""Writes events to disk in a logdir."""
|
|
16
|
+
|
|
17
|
+
import os
|
|
18
|
+
import queue
|
|
19
|
+
import socket
|
|
20
|
+
import threading
|
|
21
|
+
import time
|
|
22
|
+
|
|
23
|
+
from tensorbored.compat import tf
|
|
24
|
+
from tensorbored.compat.proto import event_pb2
|
|
25
|
+
from tensorbored.summary.writer.record_writer import RecordWriter
|
|
26
|
+
|
|
27
|
+
|
|
28
|
+
class AtomicCounter:
|
|
29
|
+
def __init__(self, initial_value):
|
|
30
|
+
self._value = initial_value
|
|
31
|
+
self._lock = threading.Lock()
|
|
32
|
+
|
|
33
|
+
def get(self):
|
|
34
|
+
with self._lock:
|
|
35
|
+
try:
|
|
36
|
+
return self._value
|
|
37
|
+
finally:
|
|
38
|
+
self._value += 1
|
|
39
|
+
|
|
40
|
+
|
|
41
|
+
_global_uid = AtomicCounter(0)
|
|
42
|
+
|
|
43
|
+
|
|
44
|
+
class EventFileWriter:
|
|
45
|
+
"""Writes `Event` protocol buffers to an event file.
|
|
46
|
+
|
|
47
|
+
The `EventFileWriter` class creates an event file in the specified
|
|
48
|
+
directory, and asynchronously writes Event protocol buffers to the
|
|
49
|
+
file. The Event file is encoded using the tfrecord format, which is
|
|
50
|
+
similar to RecordIO.
|
|
51
|
+
"""
|
|
52
|
+
|
|
53
|
+
def __init__(
|
|
54
|
+
self, logdir, max_queue_size=10, flush_secs=120, filename_suffix=""
|
|
55
|
+
):
|
|
56
|
+
"""Creates a `EventFileWriter` and an event file to write to.
|
|
57
|
+
|
|
58
|
+
On construction the summary writer creates a new event file in `logdir`.
|
|
59
|
+
This event file will contain `Event` protocol buffers, which are written to
|
|
60
|
+
disk via the add_event method.
|
|
61
|
+
The other arguments to the constructor control the asynchronous writes to
|
|
62
|
+
the event file:
|
|
63
|
+
|
|
64
|
+
Args:
|
|
65
|
+
logdir: A string. Directory where event file will be written.
|
|
66
|
+
max_queue_size: Integer. Size of the queue for pending events and summaries.
|
|
67
|
+
flush_secs: Number. How often, in seconds, to flush the
|
|
68
|
+
pending events and summaries to disk.
|
|
69
|
+
"""
|
|
70
|
+
self._logdir = logdir
|
|
71
|
+
tf.io.gfile.makedirs(logdir)
|
|
72
|
+
self._file_name = (
|
|
73
|
+
os.path.join(
|
|
74
|
+
logdir,
|
|
75
|
+
"events.out.tfevents.%010d.%s.%s.%s"
|
|
76
|
+
% (
|
|
77
|
+
time.time(),
|
|
78
|
+
socket.gethostname(),
|
|
79
|
+
os.getpid(),
|
|
80
|
+
_global_uid.get(),
|
|
81
|
+
),
|
|
82
|
+
)
|
|
83
|
+
+ filename_suffix
|
|
84
|
+
) # noqa E128
|
|
85
|
+
self._general_file_writer = tf.io.gfile.GFile(self._file_name, "wb")
|
|
86
|
+
self._async_writer = _AsyncWriter(
|
|
87
|
+
RecordWriter(self._general_file_writer), max_queue_size, flush_secs
|
|
88
|
+
)
|
|
89
|
+
|
|
90
|
+
# Initialize an event instance.
|
|
91
|
+
_event = event_pb2.Event(
|
|
92
|
+
wall_time=time.time(),
|
|
93
|
+
file_version="brain.Event:2",
|
|
94
|
+
source_metadata=event_pb2.SourceMetadata(
|
|
95
|
+
writer="tensorbored.summary.writer.event_file_writer"
|
|
96
|
+
),
|
|
97
|
+
)
|
|
98
|
+
self.add_event(_event)
|
|
99
|
+
self.flush()
|
|
100
|
+
|
|
101
|
+
def get_logdir(self):
|
|
102
|
+
"""Returns the directory where event file will be written."""
|
|
103
|
+
return self._logdir
|
|
104
|
+
|
|
105
|
+
def add_event(self, event):
|
|
106
|
+
"""Adds an event to the event file.
|
|
107
|
+
|
|
108
|
+
Args:
|
|
109
|
+
event: An `Event` protocol buffer.
|
|
110
|
+
"""
|
|
111
|
+
if not isinstance(event, event_pb2.Event):
|
|
112
|
+
raise TypeError(
|
|
113
|
+
"Expected an event_pb2.Event proto, "
|
|
114
|
+
" but got %s" % type(event)
|
|
115
|
+
)
|
|
116
|
+
self._async_writer.write(event.SerializeToString())
|
|
117
|
+
|
|
118
|
+
def flush(self):
|
|
119
|
+
"""Flushes the event file to disk.
|
|
120
|
+
|
|
121
|
+
Call this method to make sure that all pending events have been
|
|
122
|
+
written to disk.
|
|
123
|
+
"""
|
|
124
|
+
self._async_writer.flush()
|
|
125
|
+
|
|
126
|
+
def close(self):
|
|
127
|
+
"""Performs a final flush of the event file to disk, stops the
|
|
128
|
+
write/flush worker and closes the file.
|
|
129
|
+
|
|
130
|
+
Call this method when you do not need the summary writer
|
|
131
|
+
anymore.
|
|
132
|
+
"""
|
|
133
|
+
self._async_writer.close()
|
|
134
|
+
|
|
135
|
+
|
|
136
|
+
class _AsyncWriter:
|
|
137
|
+
"""Writes bytes to a file."""
|
|
138
|
+
|
|
139
|
+
def __init__(self, record_writer, max_queue_size=20, flush_secs=120):
|
|
140
|
+
"""Writes bytes to a file asynchronously. An instance of this class
|
|
141
|
+
holds a queue to keep the incoming data temporarily. Data passed to the
|
|
142
|
+
`write` function will be put to the queue and the function returns
|
|
143
|
+
immediately. This class also maintains a thread to write data in the
|
|
144
|
+
queue to disk. The first initialization parameter is an instance of
|
|
145
|
+
`tensorboard.summary.record_writer` which computes the CRC checksum and
|
|
146
|
+
then write the combined result to the disk. So we use an async approach
|
|
147
|
+
to improve performance.
|
|
148
|
+
|
|
149
|
+
Args:
|
|
150
|
+
record_writer: A RecordWriter instance
|
|
151
|
+
max_queue_size: Integer. Size of the queue for pending bytestrings.
|
|
152
|
+
flush_secs: Number. How often, in seconds, to flush the
|
|
153
|
+
pending bytestrings to disk.
|
|
154
|
+
"""
|
|
155
|
+
self._writer = record_writer
|
|
156
|
+
self._closed = False
|
|
157
|
+
self._byte_queue = queue.Queue(max_queue_size)
|
|
158
|
+
self._worker = _AsyncWriterThread(
|
|
159
|
+
self._byte_queue, self._writer, flush_secs
|
|
160
|
+
)
|
|
161
|
+
self._lock = threading.Lock()
|
|
162
|
+
self._worker.start()
|
|
163
|
+
|
|
164
|
+
def write(self, bytestring):
|
|
165
|
+
"""Enqueue the given bytes to be written asychronously."""
|
|
166
|
+
with self._lock:
|
|
167
|
+
# Status of the worker should be checked under the lock to avoid
|
|
168
|
+
# multiple threads passing the check and then switching just before
|
|
169
|
+
# blocking on putting to the queue which might result in a deadlock.
|
|
170
|
+
self._check_worker_status()
|
|
171
|
+
if self._closed:
|
|
172
|
+
raise IOError("Writer is closed")
|
|
173
|
+
self._byte_queue.put(bytestring)
|
|
174
|
+
# Check the status again in case the background worker thread has
|
|
175
|
+
# failed in the meantime to avoid waiting until the next call to
|
|
176
|
+
# surface the error.
|
|
177
|
+
self._check_worker_status()
|
|
178
|
+
|
|
179
|
+
def flush(self):
|
|
180
|
+
"""Write all the enqueued bytestring before this flush call to disk.
|
|
181
|
+
|
|
182
|
+
Block until all the above bytestring are written.
|
|
183
|
+
"""
|
|
184
|
+
with self._lock:
|
|
185
|
+
self._check_worker_status()
|
|
186
|
+
if self._closed:
|
|
187
|
+
raise IOError("Writer is closed")
|
|
188
|
+
self._byte_queue.join()
|
|
189
|
+
self._writer.flush()
|
|
190
|
+
# Check the status again in case the background worker thread has
|
|
191
|
+
# failed in the meantime to avoid waiting until the next call to
|
|
192
|
+
# surface the error.
|
|
193
|
+
self._check_worker_status()
|
|
194
|
+
|
|
195
|
+
def close(self):
|
|
196
|
+
"""Closes the underlying writer, flushing any pending writes first."""
|
|
197
|
+
if not self._closed:
|
|
198
|
+
with self._lock:
|
|
199
|
+
if not self._closed:
|
|
200
|
+
self._closed = True
|
|
201
|
+
self._worker.stop()
|
|
202
|
+
self._writer.flush()
|
|
203
|
+
self._writer.close()
|
|
204
|
+
|
|
205
|
+
def _check_worker_status(self):
|
|
206
|
+
"""Makes sure the worker thread is still running and raises exception
|
|
207
|
+
thrown in the worker thread otherwise.
|
|
208
|
+
"""
|
|
209
|
+
exception = self._worker.exception
|
|
210
|
+
if exception is not None:
|
|
211
|
+
raise exception
|
|
212
|
+
|
|
213
|
+
|
|
214
|
+
class _AsyncWriterThread(threading.Thread):
|
|
215
|
+
"""Thread that processes asynchronous writes for _AsyncWriter."""
|
|
216
|
+
|
|
217
|
+
def __init__(self, queue, record_writer, flush_secs):
|
|
218
|
+
"""Creates an _AsyncWriterThread.
|
|
219
|
+
|
|
220
|
+
Args:
|
|
221
|
+
queue: A Queue from which to dequeue data.
|
|
222
|
+
record_writer: An instance of record_writer writer.
|
|
223
|
+
flush_secs: How often, in seconds, to flush the
|
|
224
|
+
pending file to disk.
|
|
225
|
+
"""
|
|
226
|
+
threading.Thread.__init__(self)
|
|
227
|
+
self.daemon = True
|
|
228
|
+
self.exception = None
|
|
229
|
+
self._queue = queue
|
|
230
|
+
self._record_writer = record_writer
|
|
231
|
+
self._flush_secs = flush_secs
|
|
232
|
+
# The first data will be flushed immediately.
|
|
233
|
+
self._next_flush_time = 0
|
|
234
|
+
self._has_pending_data = False
|
|
235
|
+
self._shutdown_signal = object()
|
|
236
|
+
|
|
237
|
+
def stop(self):
|
|
238
|
+
self._queue.put(self._shutdown_signal)
|
|
239
|
+
self.join()
|
|
240
|
+
|
|
241
|
+
def run(self):
|
|
242
|
+
try:
|
|
243
|
+
self._run()
|
|
244
|
+
except Exception as ex:
|
|
245
|
+
self.exception = ex
|
|
246
|
+
try:
|
|
247
|
+
# In case there's a thread blocked on putting an item into the
|
|
248
|
+
# queue or a thread blocked on flushing, pop all items from the
|
|
249
|
+
# queue to let the foreground thread proceed.
|
|
250
|
+
while True:
|
|
251
|
+
self._queue.get(False)
|
|
252
|
+
self._queue.task_done()
|
|
253
|
+
except queue.Empty:
|
|
254
|
+
pass
|
|
255
|
+
raise
|
|
256
|
+
|
|
257
|
+
def _run(self):
|
|
258
|
+
# Here wait on the queue until an data appears, or till the next
|
|
259
|
+
# time to flush the writer, whichever is earlier. If we have an
|
|
260
|
+
# data, write it. If not, an empty queue exception will be raised
|
|
261
|
+
# and we can proceed to flush the writer.
|
|
262
|
+
while True:
|
|
263
|
+
now = time.time()
|
|
264
|
+
queue_wait_duration = self._next_flush_time - now
|
|
265
|
+
data = None
|
|
266
|
+
try:
|
|
267
|
+
if queue_wait_duration > 0:
|
|
268
|
+
data = self._queue.get(True, queue_wait_duration)
|
|
269
|
+
else:
|
|
270
|
+
data = self._queue.get(False)
|
|
271
|
+
|
|
272
|
+
if data is self._shutdown_signal:
|
|
273
|
+
return
|
|
274
|
+
self._record_writer.write(data)
|
|
275
|
+
self._has_pending_data = True
|
|
276
|
+
except queue.Empty:
|
|
277
|
+
pass
|
|
278
|
+
finally:
|
|
279
|
+
if data:
|
|
280
|
+
self._queue.task_done()
|
|
281
|
+
|
|
282
|
+
now = time.time()
|
|
283
|
+
if now > self._next_flush_time:
|
|
284
|
+
if self._has_pending_data:
|
|
285
|
+
# Small optimization - if there are no pending data,
|
|
286
|
+
# there's no need to flush, since each flush can be
|
|
287
|
+
# expensive (e.g. uploading a new file to a server).
|
|
288
|
+
self._record_writer.flush()
|
|
289
|
+
self._has_pending_data = False
|
|
290
|
+
# Do it again in flush_secs.
|
|
291
|
+
self._next_flush_time = now + self._flush_secs
|
|
@@ -0,0 +1,50 @@
|
|
|
1
|
+
# Copyright 2019 The TensorFlow Authors. All Rights Reserved.
|
|
2
|
+
#
|
|
3
|
+
# Licensed under the Apache License, Version 2.0 (the "License");
|
|
4
|
+
# you may not use this file except in compliance with the License.
|
|
5
|
+
# You may obtain a copy of the License at
|
|
6
|
+
#
|
|
7
|
+
# http://www.apache.org/licenses/LICENSE-2.0
|
|
8
|
+
#
|
|
9
|
+
# Unless required by applicable law or agreed to in writing, software
|
|
10
|
+
# distributed under the License is distributed on an "AS IS" BASIS,
|
|
11
|
+
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
12
|
+
# See the License for the specific language governing permissions and
|
|
13
|
+
# limitations under the License.
|
|
14
|
+
# ==============================================================================
|
|
15
|
+
|
|
16
|
+
import struct
|
|
17
|
+
from tensorbored.compat.tensorflow_stub.pywrap_tensorflow import masked_crc32c
|
|
18
|
+
|
|
19
|
+
|
|
20
|
+
class RecordWriter:
|
|
21
|
+
"""Write encoded protobuf to a file with packing defined in tensorflow."""
|
|
22
|
+
|
|
23
|
+
def __init__(self, writer):
|
|
24
|
+
"""Open a file to keep the tensorboard records.
|
|
25
|
+
|
|
26
|
+
Args:
|
|
27
|
+
writer: A file-like object that implements `write`, `flush` and `close`.
|
|
28
|
+
"""
|
|
29
|
+
self._writer = writer
|
|
30
|
+
|
|
31
|
+
# Format of a single record: (little-endian)
|
|
32
|
+
# uint64 length
|
|
33
|
+
# uint32 masked crc of length
|
|
34
|
+
# byte data[length]
|
|
35
|
+
# uint32 masked crc of data
|
|
36
|
+
def write(self, data):
|
|
37
|
+
header = struct.pack("<Q", len(data))
|
|
38
|
+
header_crc = struct.pack("<I", masked_crc32c(header))
|
|
39
|
+
footer_crc = struct.pack("<I", masked_crc32c(data))
|
|
40
|
+
self._writer.write(header + header_crc + data + footer_crc)
|
|
41
|
+
|
|
42
|
+
def flush(self):
|
|
43
|
+
self._writer.flush()
|
|
44
|
+
|
|
45
|
+
def close(self):
|
|
46
|
+
self._writer.close()
|
|
47
|
+
|
|
48
|
+
@property
|
|
49
|
+
def closed(self):
|
|
50
|
+
return self._writer.closed
|
|
File without changes
|
|
@@ -0,0 +1,116 @@
|
|
|
1
|
+
# Copyright 2018 The TensorFlow Authors. All Rights Reserved.
|
|
2
|
+
#
|
|
3
|
+
# Licensed under the Apache License, Version 2.0 (the "License");
|
|
4
|
+
# you may not use this file except in compliance with the License.
|
|
5
|
+
# You may obtain a copy of the License at
|
|
6
|
+
#
|
|
7
|
+
# http://www.apache.org/licenses/LICENSE-2.0
|
|
8
|
+
#
|
|
9
|
+
# Unless required by applicable law or agreed to in writing, software
|
|
10
|
+
# distributed under the License is distributed on an "AS IS" BASIS,
|
|
11
|
+
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
12
|
+
# See the License for the specific language governing permissions and
|
|
13
|
+
# limitations under the License.
|
|
14
|
+
|
|
15
|
+
"""TensorBoard encoder helper module.
|
|
16
|
+
|
|
17
|
+
Encoder depends on TensorFlow.
|
|
18
|
+
"""
|
|
19
|
+
|
|
20
|
+
import numpy as np
|
|
21
|
+
|
|
22
|
+
from tensorbored.util import op_evaluator
|
|
23
|
+
|
|
24
|
+
|
|
25
|
+
class _TensorFlowPngEncoder(op_evaluator.PersistentOpEvaluator):
|
|
26
|
+
"""Encode an image to PNG.
|
|
27
|
+
|
|
28
|
+
This function is thread-safe, and has high performance when run in
|
|
29
|
+
parallel. See `encode_png_benchmark.py` for details.
|
|
30
|
+
|
|
31
|
+
Arguments:
|
|
32
|
+
image: A numpy array of shape `[height, width, channels]`, where
|
|
33
|
+
`channels` is 1, 3, or 4, and of dtype uint8.
|
|
34
|
+
|
|
35
|
+
Returns:
|
|
36
|
+
A bytestring with PNG-encoded data.
|
|
37
|
+
"""
|
|
38
|
+
|
|
39
|
+
def __init__(self):
|
|
40
|
+
super().__init__()
|
|
41
|
+
self._image_placeholder = None
|
|
42
|
+
self._encode_op = None
|
|
43
|
+
|
|
44
|
+
def initialize_graph(self):
|
|
45
|
+
# TODO(nickfelt): remove on-demand imports once dep situation is fixed.
|
|
46
|
+
import tensorflow.compat.v1 as tf
|
|
47
|
+
|
|
48
|
+
self._image_placeholder = tf.placeholder(
|
|
49
|
+
dtype=tf.uint8, name="image_to_encode"
|
|
50
|
+
)
|
|
51
|
+
self._encode_op = tf.image.encode_png(self._image_placeholder)
|
|
52
|
+
|
|
53
|
+
def run(self, image): # pylint: disable=arguments-differ
|
|
54
|
+
if not isinstance(image, np.ndarray):
|
|
55
|
+
raise ValueError("'image' must be a numpy array: %r" % image)
|
|
56
|
+
if image.dtype != np.uint8:
|
|
57
|
+
raise ValueError(
|
|
58
|
+
"'image' dtype must be uint8, but is %r" % image.dtype
|
|
59
|
+
)
|
|
60
|
+
return self._encode_op.eval(feed_dict={self._image_placeholder: image})
|
|
61
|
+
|
|
62
|
+
|
|
63
|
+
encode_png = _TensorFlowPngEncoder()
|
|
64
|
+
|
|
65
|
+
|
|
66
|
+
class _TensorFlowWavEncoder(op_evaluator.PersistentOpEvaluator):
|
|
67
|
+
"""Encode an audio clip to WAV.
|
|
68
|
+
|
|
69
|
+
This function is thread-safe and exhibits good parallel performance.
|
|
70
|
+
|
|
71
|
+
Arguments:
|
|
72
|
+
audio: A numpy array of shape `[samples, channels]`.
|
|
73
|
+
samples_per_second: A positive `int`, in Hz.
|
|
74
|
+
|
|
75
|
+
Returns:
|
|
76
|
+
A bytestring with WAV-encoded data.
|
|
77
|
+
"""
|
|
78
|
+
|
|
79
|
+
def __init__(self):
|
|
80
|
+
super().__init__()
|
|
81
|
+
self._audio_placeholder = None
|
|
82
|
+
self._samples_per_second_placeholder = None
|
|
83
|
+
self._encode_op = None
|
|
84
|
+
|
|
85
|
+
def initialize_graph(self):
|
|
86
|
+
# TODO(nickfelt): remove on-demand imports once dep situation is fixed.
|
|
87
|
+
import tensorflow.compat.v1 as tf
|
|
88
|
+
|
|
89
|
+
self._audio_placeholder = tf.placeholder(
|
|
90
|
+
dtype=tf.float32, name="image_to_encode"
|
|
91
|
+
)
|
|
92
|
+
self._samples_per_second_placeholder = tf.placeholder(
|
|
93
|
+
dtype=tf.int32, name="samples_per_second"
|
|
94
|
+
)
|
|
95
|
+
self._encode_op = tf.audio.encode_wav(
|
|
96
|
+
self._audio_placeholder,
|
|
97
|
+
sample_rate=self._samples_per_second_placeholder,
|
|
98
|
+
)
|
|
99
|
+
|
|
100
|
+
def run(
|
|
101
|
+
self, audio, samples_per_second
|
|
102
|
+
): # pylint: disable=arguments-differ
|
|
103
|
+
if not isinstance(audio, np.ndarray):
|
|
104
|
+
raise ValueError("'audio' must be a numpy array: %r" % audio)
|
|
105
|
+
if not isinstance(samples_per_second, int):
|
|
106
|
+
raise ValueError(
|
|
107
|
+
"'samples_per_second' must be an int: %r" % samples_per_second
|
|
108
|
+
)
|
|
109
|
+
feed_dict = {
|
|
110
|
+
self._audio_placeholder: audio,
|
|
111
|
+
self._samples_per_second_placeholder: samples_per_second,
|
|
112
|
+
}
|
|
113
|
+
return self._encode_op.eval(feed_dict=feed_dict)
|
|
114
|
+
|
|
115
|
+
|
|
116
|
+
encode_wav = _TensorFlowWavEncoder()
|