@@ -57,7 +57,7 @@ def _get_first_event_timestamp(self, run_name):
5757 return None
5858
5959 def data_location (self , experiment_id ):
60- del experiment_id # ignored
60+ del experiment_id # ignored
6161 return str (self ._logdir )
6262
6363 def list_runs (self , experiment_id ):
@@ -72,8 +72,69 @@ def list_runs(self, experiment_id):
7272 ]
7373
7474 def list_scalars (self , experiment_id , plugin_name , run_tag_filter = None ):
75- del experiment_id # ignored for now
7675 run_tag_content = self ._multiplexer .PluginRunToTagToContent (plugin_name )
76+ return self ._list (
77+ provider .ScalarTimeSeries , run_tag_content , run_tag_filter
78+ )
79+
80+ def read_scalars (
81+ self , experiment_id , plugin_name , downsample = None , run_tag_filter = None
82+ ):
83+ # TODO(@wchargin): Downsampling not implemented, as the multiplexer
84+ # is already downsampled. We could downsample on top of the existing
85+ # sampling, which would be nice for testing.
86+ del downsample # ignored for now
87+ index = self .list_scalars (
88+ experiment_id , plugin_name , run_tag_filter = run_tag_filter
89+ )
90+
91+ def convert_scalar_event (event ):
92+ return provider .ScalarDatum (
93+ step = event .step ,
94+ wall_time = event .wall_time ,
95+ value = tensor_util .make_ndarray (event .tensor_proto ).item (),
96+ )
97+
98+ return self ._read (convert_scalar_event , index )
99+
100+ def list_tensors (self , experiment_id , plugin_name , run_tag_filter = None ):
101+ run_tag_content = self ._multiplexer .PluginRunToTagToContent (plugin_name )
102+ return self ._list (
103+ provider .TensorTimeSeries , run_tag_content , run_tag_filter
104+ )
105+
106+ def read_tensors (
107+ self , experiment_id , plugin_name , downsample = None , run_tag_filter = None
108+ ):
109+ # TODO(@wchargin): Downsampling not implemented, as the multiplexer
110+ # is already downsampled. We could downsample on top of the existing
111+ # sampling, which would be nice for testing.
112+ del downsample # ignored for now
113+ index = self .list_tensors (
114+ experiment_id , plugin_name , run_tag_filter = run_tag_filter
115+ )
116+
117+ def convert_tensor_event (event ):
118+ return provider .TensorDatum (
119+ step = event .step ,
120+ wall_time = event .wall_time ,
121+ numpy = tensor_util .make_ndarray (event .tensor_proto ),
122+ )
123+
124+ return self ._read (convert_tensor_event , index )
125+
126+ def _list (self , construct_time_series , run_tag_content , run_tag_filter ):
127+ """Helper to list scalar or tensor time series.
128+
129+ Args:
130+ construct_time_series: `ScalarTimeSeries` or `TensorTimeSeries`.
131+ run_tag_content: Result of `_multiplexer.PluginRunToTagToContent(...)`.
132+ run_tag_filter: As given by the client; may be `None`.
133+
134+ Returns:
135+ A list of objects of type given by `construct_time_series`,
136+ suitable to be returned from `list_scalars` or `list_tensors`.
137+ """
77138 result = {}
78139 if run_tag_filter is None :
79140 run_tag_filter = provider .RunTagFilter (runs = None , tags = None )
@@ -91,7 +152,7 @@ def list_scalars(self, experiment_id, plugin_name, run_tag_filter=None):
91152 if max_wall_time is None or max_wall_time < event .wall_time :
92153 max_wall_time = event .wall_time
93154 summary_metadata = self ._multiplexer .SummaryMetadata (run , tag )
94- result_for_run [tag ] = provider . ScalarTimeSeries (
155+ result_for_run [tag ] = construct_time_series (
95156 max_step = max_step ,
96157 max_wall_time = max_wall_time ,
97158 plugin_content = summary_metadata .plugin_data .content ,
@@ -100,28 +161,23 @@ def list_scalars(self, experiment_id, plugin_name, run_tag_filter=None):
100161 )
101162 return result
102163
103- def read_scalars (
104- self , experiment_id , plugin_name , downsample = None , run_tag_filter = None
105- ):
106- # TODO(@wchargin): Downsampling not implemented, as the multiplexer
107- # is already downsampled. We could downsample on top of the existing
108- # sampling, which would be nice for testing.
109- del downsample # ignored for now
110- index = self .list_scalars (
111- experiment_id , plugin_name , run_tag_filter = run_tag_filter
112- )
164+ def _read (self , convert_event , index ):
165+ """Helper to read scalar or tensor data from the multiplexer.
166+
167+ Args:
168+ convert_event: Takes `plugin_event_accumulator.TensorEvent` to
169+ either `provider.ScalarDatum` or `provider.TensorDatum`.
170+ index: The result of `list_scalars` or `list_tensors`.
171+
172+ Returns:
173+ A dict of dicts of values returned by `convert_event` calls,
174+ suitable to be returned from `read_scalars` or `read_tensors`.
175+ """
113176 result = {}
114177 for (run , tags_for_run ) in six .iteritems (index ):
115178 result_for_run = {}
116179 result [run ] = result_for_run
117180 for (tag , metadata ) in six .iteritems (tags_for_run ):
118181 events = self ._multiplexer .Tensors (run , tag )
119- result_for_run [tag ] = [self . _convert_scalar_event (e ) for e in events ]
182+ result_for_run [tag ] = [convert_event (e ) for e in events ]
120183 return result
121-
122- def _convert_scalar_event (self , event ):
123- return provider .ScalarDatum (
124- step = event .step ,
125- wall_time = event .wall_time ,
126- value = tensor_util .make_ndarray (event .tensor_proto ).item (),
127- )
0 commit comments