Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
3 changes: 3 additions & 0 deletions tensorboard/data/BUILD
Original file line number Diff line number Diff line change
Expand Up @@ -124,6 +124,7 @@ py_library(
"//tensorboard:expect_grpc_installed",
"//tensorboard/data/proto:protos_all_py_pb2",
"//tensorboard/data/proto:protos_all_py_pb2_grpc",
"//tensorboard/util:tensor_util",
"//tensorboard/util:timing",
],
)
Expand All @@ -141,8 +142,10 @@ py_test(
"//tensorboard:errors",
"//tensorboard:expect_grpc_installed",
"//tensorboard:expect_grpc_testing_installed",
"//tensorboard:expect_numpy_installed",
"//tensorboard:test",
"//tensorboard/data/proto:protos_all_py_pb2",
"//tensorboard/data/proto:protos_all_py_pb2_grpc",
"//tensorboard/util:tensor_util",
],
)
66 changes: 66 additions & 0 deletions tensorboard/data/grpc_provider.py
Original file line number Diff line number Diff line change
Expand Up @@ -18,6 +18,7 @@

import grpc

from tensorboard.util import tensor_util
from tensorboard.util import timing
from tensorboard import errors
from tensorboard.data import provider
Expand Down Expand Up @@ -134,6 +135,71 @@ def read_scalars(
series.append(point)
return result

@timing.log_latency
def list_tensors(
self, ctx, *, experiment_id, plugin_name, run_tag_filter=None
):
with timing.log_latency("build request"):
req = data_provider_pb2.ListTensorsRequest()
req.experiment_id = experiment_id
req.plugin_filter.plugin_name = plugin_name
_populate_rtf(run_tag_filter, req.run_tag_filter)
with timing.log_latency("_stub.ListTensors"):
with _translate_grpc_error():
res = self._stub.ListTensors(req)
with timing.log_latency("build result"):
result = {}
for run_entry in res.runs:
tags = {}
result[run_entry.run_name] = tags
for tag_entry in run_entry.tags:
time_series = tag_entry.metadata
tags[tag_entry.tag_name] = provider.TensorTimeSeries(
max_step=time_series.max_step,
max_wall_time=time_series.max_wall_time,
plugin_content=time_series.summary_metadata.plugin_data.content,
description=time_series.summary_metadata.summary_description,
display_name=time_series.summary_metadata.display_name,
)
return result

@timing.log_latency
def read_tensors(
self,
ctx,
*,
experiment_id,
plugin_name,
downsample=None,
run_tag_filter=None,
):
with timing.log_latency("build request"):
req = data_provider_pb2.ReadTensorsRequest()
req.experiment_id = experiment_id
req.plugin_filter.plugin_name = plugin_name
_populate_rtf(run_tag_filter, req.run_tag_filter)
req.downsample.num_points = downsample
with timing.log_latency("_stub.ReadTensors"):
with _translate_grpc_error():
res = self._stub.ReadTensors(req)
with timing.log_latency("build result"):
result = {}
for run_entry in res.runs:
tags = {}
result[run_entry.run_name] = tags
for tag_entry in run_entry.tags:
series = []
tags[tag_entry.tag_name] = series
d = tag_entry.data
for (step, wt, value) in zip(d.step, d.wall_time, d.value):
point = provider.TensorDatum(
step=step,
wall_time=wt,
numpy=tensor_util.make_ndarray(value),
)
series.append(point)
return result

@timing.log_latency
def list_blob_sequences(
self, ctx, experiment_id, plugin_name, run_tag_filter=None
Expand Down
111 changes: 110 additions & 1 deletion tensorboard/data/grpc_provider_test.py
Original file line number Diff line number Diff line change
Expand Up @@ -17,6 +17,7 @@

import grpc
import grpc_testing
import numpy as np

from tensorboard import errors
from tensorboard import test as tb_test
Expand All @@ -25,6 +26,7 @@
from tensorboard.data import provider
from tensorboard.data.proto import data_provider_pb2
from tensorboard.data.proto import data_provider_pb2_grpc
from tensorboard.util import tensor_util


def _create_mock_client():
Expand Down Expand Up @@ -54,10 +56,11 @@ def test_list_plugins(self):
res = data_provider_pb2.ListPluginsResponse()
res.plugins.add(name="scalars")
res.plugins.add(name="images")
res.plugins.add(name="text")
self.stub.ListPlugins.return_value = res

actual = self.provider.list_plugins(self.ctx, experiment_id="123")
self.assertEqual(actual, ["scalars", "images"])
self.assertEqual(actual, ["scalars", "images", "text"])

req = data_provider_pb2.ListPluginsRequest()
req.experiment_id = "123"
Expand Down Expand Up @@ -174,6 +177,112 @@ def test_read_scalars(self):
req.downsample.num_points = 4
self.stub.ReadScalars.assert_called_once_with(req)

def test_list_tensors(self):
res = data_provider_pb2.ListTensorsResponse()
run1 = res.runs.add(run_name="val")
tag11 = run1.tags.add(tag_name="weights")
tag11.metadata.max_step = 7
tag11.metadata.max_wall_time = 7.77
tag11.metadata.summary_metadata.plugin_data.content = b"magic"
tag11.metadata.summary_metadata.summary_description = "hey"
tag12 = run1.tags.add(tag_name="other")
tag12.metadata.max_step = 8
tag12.metadata.max_wall_time = 8.88
run2 = res.runs.add(run_name="test")
tag21 = run2.tags.add(tag_name="weights")
tag21.metadata.max_step = 9
tag21.metadata.max_wall_time = 9.99
self.stub.ListTensors.return_value = res

actual = self.provider.list_tensors(
self.ctx,
experiment_id="123",
plugin_name="histograms",
run_tag_filter=provider.RunTagFilter(tags=["weights", "other"]),
)
expected = {
"val": {
"weights": provider.TensorTimeSeries(
max_step=7,
max_wall_time=7.77,
plugin_content=b"magic",
description="hey",
display_name="",
),
"other": provider.TensorTimeSeries(
max_step=8,
max_wall_time=8.88,
plugin_content=b"",
description="",
display_name="",
),
},
"test": {
"weights": provider.TensorTimeSeries(
max_step=9,
max_wall_time=9.99,
plugin_content=b"",
description="",
display_name="",
),
},
}
self.assertEqual(actual, expected)

req = data_provider_pb2.ListTensorsRequest()
req.experiment_id = "123"
req.plugin_filter.plugin_name = "histograms"
req.run_tag_filter.tags.names.extend(["other", "weights"]) # sorted
self.stub.ListTensors.assert_called_once_with(req)

def test_read_tensors(self):
res = data_provider_pb2.ReadTensorsResponse()
run = res.runs.add(run_name="test")
tag = run.tags.add(tag_name="weights")
tag.data.step.extend([0, 1, 2])
tag.data.wall_time.extend([1234.0, 1235.0, 1236.0])
tag.data.value.append(tensor_util.make_tensor_proto([0.0, 0.0, 42.0]))
tag.data.value.append(tensor_util.make_tensor_proto([1.0, 1.0, 43.0]))
tag.data.value.append(tensor_util.make_tensor_proto([2.0, 2.0, 44.0]))
self.stub.ReadTensors.return_value = res

actual = self.provider.read_tensors(
self.ctx,
experiment_id="123",
plugin_name="histograms",
run_tag_filter=provider.RunTagFilter(runs=["test", "nope"]),
downsample=3,
)
expected = {
"test": {
"weights": [
provider.TensorDatum(
step=0,
wall_time=1234.0,
numpy=np.array([0.0, 0.0, 42.0]),
),
provider.TensorDatum(
step=1,
wall_time=1235.0,
numpy=np.array([1.0, 1.0, 43.0]),
),
provider.TensorDatum(
step=2,
wall_time=1236.0,
numpy=np.array([2.0, 2.0, 44.0]),
),
],
},
}
self.assertEqual(actual, expected)

req = data_provider_pb2.ReadTensorsRequest()
req.experiment_id = "123"
req.plugin_filter.plugin_name = "histograms"
req.run_tag_filter.runs.names.extend(["nope", "test"]) # sorted
req.downsample.num_points = 3
self.stub.ReadTensors.assert_called_once_with(req)

def test_list_blob_sequences(self):
res = data_provider_pb2.ListBlobSequencesResponse()
run1 = res.runs.add(run_name="train")
Expand Down
Loading