Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
1 change: 1 addition & 0 deletions tensorboard/BUILD
Original file line number Diff line number Diff line change
Expand Up @@ -498,6 +498,7 @@ py_library(
srcs_version = "PY2AND3",
visibility = ["//visibility:public"],
deps = [
"//tensorboard/backend:process_graph",
"//tensorboard/compat/proto:protos_all_py_pb2",
"//tensorboard/plugins/graph:metadata",
"//tensorboard/plugins/histogram:metadata",
Expand Down
4 changes: 4 additions & 0 deletions tensorboard/backend/process_graph.py
Original file line number Diff line number Diff line change
Expand Up @@ -46,6 +46,10 @@ def prepare_graph_for_ui(
ValueError: If `large_attrs_key is None` while `limit_attr_size != None`.
ValueError: If `limit_attr_size` is defined, but <= 0.
"""
# TODO(@davidsoergel): detect whether a graph has been filtered already
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

To make this TODO item clearer, I believe you can say in addition: "if it is already filtered, return immediately".

Copy link
Member Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Done.

# (to a limit_attr_size <= what is requested here). If it is already
# filtered, return immediately.

# Check input for validity.
if limit_attr_size is not None:
if large_attrs_key is None:
Expand Down
23 changes: 20 additions & 3 deletions tensorboard/dataclass_compat.py
Original file line number Diff line number Diff line change
Expand Up @@ -25,7 +25,10 @@
from __future__ import division
from __future__ import print_function


from tensorboard.backend import process_graph
from tensorboard.compat.proto import event_pb2
from tensorboard.compat.proto import graph_pb2
from tensorboard.compat.proto import summary_pb2
from tensorboard.compat.proto import types_pb2
from tensorboard.plugins.graph import metadata as graphs_metadata
Expand All @@ -37,30 +40,44 @@
from tensorboard.util import tensor_util


def migrate_event(event):
def migrate_event(event, experimental_filter_graph=False):
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Please add doc string for the new kwarg.

Copy link
Member Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Oops, done.

"""Migrate an event to a sequence of events.

Args:
event: An `event_pb2.Event`. The caller transfers ownership of the
event to this method; the event may be mutated, and may or may
not appear in the returned sequence.
experimental_filter_graph: When a graph event is encountered, process the
GraphDef to filter out attributes that are too large to be shown in the
graph UI.

Returns:
A sequence of `event_pb2.Event`s to use instead of `event`.
"""
if event.HasField("graph_def"):
return _migrate_graph_event(event)
return _migrate_graph_event(
event, experimental_filter_graph=experimental_filter_graph
)
if event.HasField("summary"):
return _migrate_summary_event(event)
return (event,)


def _migrate_graph_event(old_event):
def _migrate_graph_event(old_event, experimental_filter_graph=False):
result = event_pb2.Event()
result.wall_time = old_event.wall_time
result.step = old_event.step
value = result.summary.value.add(tag=graphs_metadata.RUN_GRAPH_NAME)
graph_bytes = old_event.graph_def

# TODO(@davidsoergel): Move this stopgap to a more appropriate place.
if experimental_filter_graph:
graph_def = graph_pb2.GraphDef().FromString(graph_bytes)
# Use the default filter parameters:
# limit_attr_size=1024, large_attrs_key="_too_large_attrs"
process_graph.prepare_graph_for_ui(graph_def)
graph_bytes = graph_def.SerializeToString()

value.tensor.CopyFrom(tensor_util.make_tensor_proto([graph_bytes]))
value.metadata.plugin_data.plugin_name = graphs_metadata.PLUGIN_NAME
# `value.metadata.plugin_data.content` left as the empty proto
Expand Down
45 changes: 43 additions & 2 deletions tensorboard/dataclass_compat_test.py
Original file line number Diff line number Diff line change
Expand Up @@ -27,6 +27,7 @@
from tensorboard.backend.event_processing import event_file_loader
from tensorboard.compat.proto import event_pb2
from tensorboard.compat.proto import graph_pb2
from tensorboard.compat.proto import node_def_pb2
from tensorboard.compat.proto import summary_pb2
from tensorboard.plugins.graph import metadata as graphs_metadata
from tensorboard.plugins.histogram import metadata as histogram_metadata
Expand All @@ -42,11 +43,13 @@
class MigrateEventTest(tf.test.TestCase):
"""Tests for `migrate_event`."""

def _migrate_event(self, old_event):
def _migrate_event(self, old_event, experimental_filter_graph=False):
"""Like `migrate_event`, but performs some sanity checks."""
old_event_copy = event_pb2.Event()
old_event_copy.CopyFrom(old_event)
new_events = dataclass_compat.migrate_event(old_event)
new_events = dataclass_compat.migrate_event(
old_event, experimental_filter_graph
)
for event in new_events: # ensure that wall time and step are preserved
self.assertEqual(event.wall_time, old_event.wall_time)
self.assertEqual(event.step, old_event.step)
Expand Down Expand Up @@ -212,6 +215,44 @@ def test_graph_def(self):

self.assertProtoEquals(graph_def, new_graph_def)

def test_graph_def_experimental_filter_graph(self):
# Create a `GraphDef`
graph_def = graph_pb2.GraphDef()
graph_def.node.add(name="alice", op="Person")
graph_def.node.add(name="bob", op="Person")

graph_def.node[1].attr["small"].s = b"small_attr_value"
graph_def.node[1].attr["large"].s = (
b"large_attr_value" * 100 # 1600 bytes > 1024 limit
)
graph_def.node.add(
name="friendship", op="Friendship", input=["alice", "bob"]
)

# Simulate legacy graph event
old_event = event_pb2.Event()
old_event.step = 0
old_event.wall_time = 456.75
old_event.graph_def = graph_def.SerializeToString()

new_events = self._migrate_event(
old_event, experimental_filter_graph=True
)

new_event = new_events[1]
tensor = tensor_util.make_ndarray(new_event.summary.value[0].tensor)
new_graph_def_bytes = tensor[0]
new_graph_def = graph_pb2.GraphDef.FromString(new_graph_def_bytes)

expected_graph_def = graph_pb2.GraphDef()
expected_graph_def.CopyFrom(graph_def)
del expected_graph_def.node[1].attr["large"]
expected_graph_def.node[1].attr["_too_large_attrs"].list.s.append(
b"large"
)

self.assertProtoEquals(expected_graph_def, new_graph_def)


if __name__ == "__main__":
tf.test.main()
4 changes: 3 additions & 1 deletion tensorboard/uploader/uploader.py
Original file line number Diff line number Diff line change
Expand Up @@ -425,7 +425,9 @@ def _run_values(self, run_to_events):
for (run_name, events) in six.iteritems(run_to_events):
for event in events:
v2_event = data_compat.migrate_event(event)
dataclass_events = dataclass_compat.migrate_event(v2_event)
dataclass_events = dataclass_compat.migrate_event(
v2_event, experimental_filter_graph=True
)
for dataclass_event in dataclass_events:
if dataclass_event.summary:
for value in dataclass_event.summary.value:
Expand Down