diff --git a/tensorboard/backend/event_processing/plugin_event_accumulator_test.py b/tensorboard/backend/event_processing/plugin_event_accumulator_test.py index f67df572d5..4ff04174d3 100644 --- a/tensorboard/backend/event_processing/plugin_event_accumulator_test.py +++ b/tensorboard/backend/event_processing/plugin_event_accumulator_test.py @@ -682,10 +682,15 @@ def FakeScalarSummary(tag, value): self.assertTagsEqual( acc.Tags(), { - ea.TENSORS: [graph_metadata.RUN_GRAPH_NAME, "id", "sq"], + ea.TENSORS: [ + graph_metadata.RUN_GRAPH_NAME, + "id", + "sq", + "test run", + ], ea.GRAPH: True, ea.META_GRAPH: True, - ea.RUN_METADATA: ["test run"], + ea.RUN_METADATA: [], }, ) id_events = acc.Tensors("id") diff --git a/tensorboard/dataclass_compat.py b/tensorboard/dataclass_compat.py index 676c3f2d07..3249b6f954 100644 --- a/tensorboard/dataclass_compat.py +++ b/tensorboard/dataclass_compat.py @@ -54,9 +54,12 @@ def migrate_event(event, initial_metadata): Returns: A sequence of `event_pb2.Event`s to use instead of `event`. """ - if event.HasField("graph_def"): + what = event.WhichOneof("what") + if what == "graph_def": return _migrate_graph_event(event) - if event.HasField("summary"): + if what == "tagged_run_metadata": + return _migrate_tagged_run_metadata_event(event) + if what == "summary": return _migrate_summary_event(event, initial_metadata) return (event,) @@ -69,13 +72,28 @@ def _migrate_graph_event(old_event): graph_bytes = old_event.graph_def value.tensor.CopyFrom(tensor_util.make_tensor_proto([graph_bytes])) value.metadata.plugin_data.plugin_name = graphs_metadata.PLUGIN_NAME - # `value.metadata.plugin_data.content` left as the empty proto + # `value.metadata.plugin_data.content` left empty value.metadata.data_class = summary_pb2.DATA_CLASS_BLOB_SEQUENCE - # In the short term, keep both the old event and the new event to - # maintain compatibility. + # As long as the graphs plugin still reads the old format, keep both + # the old event and the new event to maintain compatibility. return (old_event, result) +def _migrate_tagged_run_metadata_event(old_event): + result = event_pb2.Event() + result.wall_time = old_event.wall_time + result.step = old_event.step + trm = old_event.tagged_run_metadata + value = result.summary.value.add(tag=trm.tag) + value.tensor.CopyFrom(tensor_util.make_tensor_proto([trm.run_metadata])) + value.metadata.plugin_data.plugin_name = ( + graphs_metadata.PLUGIN_NAME_TAGGED_RUN_METADATA + ) + # `value.metadata.plugin_data.content` left empty + value.metadata.data_class = summary_pb2.DATA_CLASS_BLOB_SEQUENCE + return (result,) + + def _migrate_summary_event(event, initial_metadata): values = event.summary.value new_values = [ diff --git a/tensorboard/dataclass_compat_test.py b/tensorboard/dataclass_compat_test.py index 4e73878ceb..d176899705 100644 --- a/tensorboard/dataclass_compat_test.py +++ b/tensorboard/dataclass_compat_test.py @@ -345,6 +345,31 @@ def test_graph_def(self): self.assertProtoEquals(graph_def, new_graph_def) + def test_run_metadata(self): + old_event = event_pb2.Event() + old_event.step = 123 + old_event.wall_time = 456.75 + rm = tf.compat.v1.RunMetadata() + rm.step_stats.dev_stats.add(device="CPU:0") + rm.step_stats.dev_stats.add(device="CPU:1") + old_event.tagged_run_metadata.tag = "step0" + old_event.tagged_run_metadata.run_metadata = rm.SerializeToString() + + new_events = self._migrate_event(old_event) + self.assertLen(new_events, 1) + self.assertLen(new_events[0].summary.value, 1) + value = new_events[0].summary.value[0] + tensor = tensor_util.make_ndarray(value.tensor) + self.assertEqual(tensor.shape, (1,)) + self.assertEqual(tensor.item(), rm.SerializeToString()) + self.assertEqual( + value.metadata.data_class, summary_pb2.DATA_CLASS_BLOB_SEQUENCE + ) + self.assertEqual( + value.metadata.plugin_data.plugin_name, + graphs_metadata.PLUGIN_NAME_TAGGED_RUN_METADATA, + ) + if __name__ == "__main__": tf.test.main() diff --git a/tensorboard/plugins/graph/BUILD b/tensorboard/plugins/graph/BUILD index 32bfdfda67..6197d6afbc 100644 --- a/tensorboard/plugins/graph/BUILD +++ b/tensorboard/plugins/graph/BUILD @@ -14,11 +14,9 @@ py_library( ":graph_util", ":keras_util", ":metadata", - "//tensorboard:context", "//tensorboard:plugin_util", "//tensorboard/backend:http_util", "//tensorboard/backend:process_graph", - "//tensorboard/backend/event_processing:tag_types", "//tensorboard/data:provider", "//tensorboard/plugins:base_plugin", "@com_google_protobuf//:protobuf_python", diff --git a/tensorboard/plugins/graph/graphs_plugin.py b/tensorboard/plugins/graph/graphs_plugin.py index 2e8cc1eb16..4b27a55b9f 100644 --- a/tensorboard/plugins/graph/graphs_plugin.py +++ b/tensorboard/plugins/graph/graphs_plugin.py @@ -23,10 +23,8 @@ from werkzeug import wrappers from tensorboard import plugin_util -from tensorboard import context from tensorboard.backend import http_util from tensorboard.backend import process_graph -from tensorboard.backend.event_processing import tag_types from tensorboard.compat.proto import config_pb2 from tensorboard.compat.proto import graph_pb2 from tensorboard.data import provider @@ -38,16 +36,6 @@ logger = tb_logging.get_logger() -# The Summary API is implemented in TensorFlow because it uses TensorFlow internal APIs. -# As a result, this SummaryMetadata is a bit unconventional and uses non-public -# hardcoded name as the plugin name. Please refer to link below for the summary ops. -# https://github.com/tensorflow/tensorflow/blob/11f4ecb54708865ec757ca64e4805957b05d7570/tensorflow/python/ops/summary_ops_v2.py#L757 -_PLUGIN_NAME_RUN_METADATA = "graph_run_metadata" -# https://github.com/tensorflow/tensorflow/blob/11f4ecb54708865ec757ca64e4805957b05d7570/tensorflow/python/ops/summary_ops_v2.py#L788 -_PLUGIN_NAME_RUN_METADATA_WITH_GRAPH = "graph_run_metadata_graph" -# https://github.com/tensorflow/tensorflow/blob/565952cc2f17fdfd995e25171cf07be0f6f06180/tensorflow/python/ops/summary_ops_v2.py#L825 -_PLUGIN_NAME_KERAS_MODEL = "graph_keras_model" - class GraphsPlugin(base_plugin.TBPlugin): """Graphs Plugin for TensorBoard.""" @@ -77,11 +65,16 @@ def get_plugin_apps(self): def is_active(self): """The graphs plugin is active iff any run has a graph or metadata.""" - if self._data_provider: - return False # `list_plugins` as called by TB core suffices - - empty_context = context.RequestContext() # not used - return bool(self.info_impl(empty_context)) + return False # `list_plugins` as called by TB core suffices + + def data_plugin_names(self): + return ( + metadata.PLUGIN_NAME, + metadata.PLUGIN_NAME_RUN_METADATA, + metadata.PLUGIN_NAME_RUN_METADATA_WITH_GRAPH, + metadata.PLUGIN_NAME_KERAS_MODEL, + metadata.PLUGIN_NAME_TAGGED_RUN_METADATA, + ) def frontend_metadata(self): return base_plugin.FrontendMetadata( @@ -136,7 +129,7 @@ def add_row_item(run, tag=None): return result mapping = self._multiplexer.PluginRunToTagToContent( - _PLUGIN_NAME_RUN_METADATA_WITH_GRAPH + metadata.PLUGIN_NAME_RUN_METADATA_WITH_GRAPH ) for run_name, tag_to_content in six.iteritems(mapping): for (tag, content) in six.iteritems(tag_to_content): @@ -151,10 +144,10 @@ def add_row_item(run, tag=None): (_, tag_item) = add_row_item(run_name, tag) tag_item["op_graph"] = True - # Tensors associated with plugin name _PLUGIN_NAME_RUN_METADATA contain - # both op graph and profile information. + # Tensors associated with plugin name metadata.PLUGIN_NAME_RUN_METADATA + # contain both op graph and profile information. mapping = self._multiplexer.PluginRunToTagToContent( - _PLUGIN_NAME_RUN_METADATA + metadata.PLUGIN_NAME_RUN_METADATA ) for run_name, tag_to_content in six.iteritems(mapping): for (tag, content) in six.iteritems(tag_to_content): @@ -167,10 +160,10 @@ def add_row_item(run, tag=None): tag_item["profile"] = True tag_item["op_graph"] = True - # Tensors associated with plugin name _PLUGIN_NAME_KERAS_MODEL contain - # serialized Keras model in JSON format. + # Tensors associated with plugin name metadata.PLUGIN_NAME_KERAS_MODEL + # contain serialized Keras model in JSON format. mapping = self._multiplexer.PluginRunToTagToContent( - _PLUGIN_NAME_KERAS_MODEL + metadata.PLUGIN_NAME_KERAS_MODEL ) for run_name, tag_to_content in six.iteritems(mapping): for (tag, content) in six.iteritems(tag_to_content): @@ -182,16 +175,22 @@ def add_row_item(run, tag=None): (_, tag_item) = add_row_item(run_name, tag) tag_item["conceptual_graph"] = True - for (run_name, run_data) in six.iteritems(self._multiplexer.Runs()): - if run_data.get(tag_types.GRAPH): + mapping = self._multiplexer.PluginRunToTagToContent( + metadata.PLUGIN_NAME + ) + for (run_name, tags) in mapping.items(): + if metadata.RUN_GRAPH_NAME in tags: (run_item, _) = add_row_item(run_name, None) run_item["run_graph"] = True - for (run_name, run_data) in six.iteritems(self._multiplexer.Runs()): - if tag_types.RUN_METADATA in run_data: - for tag in run_data[tag_types.RUN_METADATA]: - (_, tag_item) = add_row_item(run_name, tag) - tag_item["profile"] = True + # Top level `Event.tagged_run_metadata` represents profile data only. + mapping = self._multiplexer.PluginRunToTagToContent( + metadata.PLUGIN_NAME_TAGGED_RUN_METADATA + ) + for (run_name, tags) in mapping.items(): + for tag in tags: + (_, tag_item) = add_row_item(run_name, tag) + tag_item["profile"] = True return result @@ -253,8 +252,13 @@ def graph_impl( for func_graph in run_metadata.function_graphs ] ) + else: - graph = self._multiplexer.Graph(run) + tensor_events = self._multiplexer.Tensors( + run, metadata.RUN_GRAPH_NAME + ) + graph_raw = tensor_events[0].tensor_proto.string_val[0] + graph = graph_pb2.GraphDef.FromString(graph_raw) # This next line might raise a ValueError if the limit parameters # are invalid (size is negative, size present but key absent, etc.). @@ -269,21 +273,14 @@ def run_metadata_impl(self, run, tag): if self._data_provider: # TODO(davidsoergel, wchargin): Consider plumbing run metadata through data providers. return None - try: - run_metadata = self._multiplexer.RunMetadata(run, tag) - except ValueError: - # TODO(stephanwlee): Should include whether FE is fetching for v1 or v2 RunMetadata - # so we can remove this try/except. - tensor_events = self._multiplexer.Tensors(run, tag) - if tensor_events is None: - return None - # Take the first event if there are multiple events written from different - # steps. - run_metadata = config_pb2.RunMetadata.FromString( - tensor_events[0].tensor_proto.string_val[0] - ) - if run_metadata is None: + tensor_events = self._multiplexer.Tensors(run, tag) + if tensor_events is None: return None + # Take the first event if there are multiple events written from different + # steps. + run_metadata = config_pb2.RunMetadata.FromString( + tensor_events[0].tensor_proto.string_val[0] + ) return (str(run_metadata), "text/x-protobuf") # pbtxt @wrappers.Request.application diff --git a/tensorboard/plugins/graph/graphs_plugin_test.py b/tensorboard/plugins/graph/graphs_plugin_test.py index cd552810bb..a8dc86c4dd 100644 --- a/tensorboard/plugins/graph/graphs_plugin_test.py +++ b/tensorboard/plugins/graph/graphs_plugin_test.py @@ -308,28 +308,7 @@ def test_run_metadata(self, plugin): # If it parses, we're happy. @with_runs([_RUN_WITH_GRAPH_WITHOUT_METADATA]) - def test_is_active_with_graph_without_run_metadata(self, plugin): - if plugin._data_provider: - self.assertFalse(plugin.is_active()) - else: - self.assertTrue(plugin.is_active()) - - @with_runs([_RUN_WITHOUT_GRAPH_WITH_METADATA]) - def test_is_active_without_graph_with_run_metadata(self, plugin): - if plugin._data_provider: - self.assertFalse(plugin.is_active()) - else: - self.assertTrue(plugin.is_active()) - - @with_runs([_RUN_WITH_GRAPH_WITH_METADATA]) - def test_is_active_with_both(self, plugin): - if plugin._data_provider: - self.assertFalse(plugin.is_active()) - else: - self.assertTrue(plugin.is_active()) - - @with_runs([_RUN_WITHOUT_GRAPH_WITHOUT_METADATA]) - def test_is_inactive_without_both(self, plugin): + def test_is_active(self, plugin): self.assertFalse(plugin.is_active()) diff --git a/tensorboard/plugins/graph/metadata.py b/tensorboard/plugins/graph/metadata.py index 25b7b6051e..23ee2eadd1 100644 --- a/tensorboard/plugins/graph/metadata.py +++ b/tensorboard/plugins/graph/metadata.py @@ -19,11 +19,23 @@ from __future__ import print_function # This name is used as the plugin prefix route and to identify this plugin -# generally. -# Note however that different 'plugin names' are used in the context of -# graph Summaries. -# See `graphs_plugin.py` for details. +# generally, and is also the `plugin_name` for run graphs after data-compat +# transformations. PLUGIN_NAME = "graphs" +# The Summary API is implemented in TensorFlow because it uses TensorFlow internal APIs. +# As a result, this SummaryMetadata is a bit unconventional and uses non-public +# hardcoded name as the plugin name. Please refer to link below for the summary ops. +# https://github.com/tensorflow/tensorflow/blob/11f4ecb54708865ec757ca64e4805957b05d7570/tensorflow/python/ops/summary_ops_v2.py#L757 +PLUGIN_NAME_RUN_METADATA = "graph_run_metadata" +# https://github.com/tensorflow/tensorflow/blob/11f4ecb54708865ec757ca64e4805957b05d7570/tensorflow/python/ops/summary_ops_v2.py#L788 +PLUGIN_NAME_RUN_METADATA_WITH_GRAPH = "graph_run_metadata_graph" +# https://github.com/tensorflow/tensorflow/blob/565952cc2f17fdfd995e25171cf07be0f6f06180/tensorflow/python/ops/summary_ops_v2.py#L825 +PLUGIN_NAME_KERAS_MODEL = "graph_keras_model" +# Plugin name used for `Event.tagged_run_metadata`. This doesn't fall into one +# of the above cases because (despite the name) `PLUGIN_NAME_RUN_METADATA` is +# _required_ to have both profile and op graphs, whereas tagged run metadata +# need only have profile data. +PLUGIN_NAME_TAGGED_RUN_METADATA = "graph_tagged_run_metadata" # In the context of the data provider interface, tag name given to a # graph read from the `graph_def` field of an `Event` proto, which is