Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
Show all changes
23 commits
Select commit Hold shift + click to select a range
d84a0eb
Add flags to TBContext
jameswex Mar 19, 2018
ab0e9cf
Add flags to TBContext in application
jameswex Mar 19, 2018
e72cbcf
Add flags to tbcontext
jameswex Mar 19, 2018
3803ab7
gitignore changes
jameswex Apr 11, 2018
b65f56c
Merge branch 'master' of https://github.com/tensorflow/tensorboard
jameswex Apr 11, 2018
77a5109
Merge remote-tracking branch 'upstream/master'
jameswex Apr 13, 2018
92e5a72
Merge remote-tracking branch 'upstream/master'
jameswex Apr 18, 2018
0611086
Merge branch 'master' of https://github.com/tensorflow/tensorboard
jameswex Apr 20, 2018
2758a2a
Merge remote-tracking branch 'upstream/master'
jameswex May 9, 2018
8820f4c
Merge remote-tracking branch 'upstream/master'
jameswex May 9, 2018
d1299c6
Merge remote-tracking branch 'upstream/master'
jameswex Aug 6, 2018
c0c8258
jwexler updated rules_closure version
jameswex Aug 6, 2018
019e6f7
Merge remote-tracking branch 'upstream/master'
jameswex Sep 5, 2018
1833317
Merge remote-tracking branch 'upstream/master'
jameswex Sep 10, 2018
286df31
Merge branch 'master' of https://github.com/tensorflow/tensorboard
jameswex Sep 14, 2018
4cebc83
Merge branch 'master' of https://github.com/jameswex/tensorboard
jameswex Sep 17, 2018
d37b198
Merge remote-tracking branch 'upstream/master'
jameswex Sep 19, 2018
9335a8b
Merge remote-tracking branch 'upstream/master'
jameswex Sep 20, 2018
96fe3be
Merge remote-tracking branch 'upstream/master'
jameswex Sep 24, 2018
e2f8e08
Merge remote-tracking branch 'upstream/master'
jameswex Sep 28, 2018
e350d1a
Merge remote-tracking branch 'upstream/master'
jameswex Oct 9, 2018
6fa88ce
Add sampling to example loading
jameswex Oct 9, 2018
50018ec
fix test
jameswex Oct 9, 2018
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
Expand Up @@ -124,14 +124,16 @@ def _examples_from_path_handler(self, request):
"""
examples_count = int(request.args.get('max_examples'))
examples_path = request.args.get('examples_path')
sampling_odds = float(request.args.get('sampling_odds'))
try:
platform_utils.throw_if_file_access_not_allowed(examples_path,
self._logdir,
self._has_auth_group)
example_strings = platform_utils.example_protos_from_path(
examples_path, examples_count, parse_examples=False)
examples_path, examples_count, parse_examples=False,
sampling_odds=sampling_odds)
self.examples = [
tf.train.Example.FromString(ex) for ex in example_strings]
tf.train.Example.FromString(ex) for ex in example_strings]
self.generate_sprite(example_strings)
json_examples = [
json_format.MessageToJson(example) for example in self.examples
Expand Down Expand Up @@ -404,4 +406,4 @@ def _infer_mutants_handler(self, request):
return http_util.Respond(request, json_mapping, 'application/json')
except common_utils.InvalidUserInputError as e:
return http_util.Respond(request, {'error': e.message},
'application/json', code=400)
'application/json', code=400)
Original file line number Diff line number Diff line change
Expand Up @@ -76,7 +76,8 @@ def test_examples_from_path(self):
'/data/plugin/whatif/examples_from_path?' +
urllib_parse.urlencode({
'examples_path': examples_path,
'max_examples': 2
'max_examples': 2,
'sampling_odds': 1,
}))
self.assertEqual(200, response.status_code)
example_strings = json.loads(response.get_data().decode('utf-8'))['examples']
Expand All @@ -94,7 +95,8 @@ def test_examples_from_path_if_path_does_not_exist(self):
'/data/plugin/whatif/examples_from_path?' +
urllib_parse.urlencode({
'examples_path': 'does_not_exist',
'max_examples': 2
'max_examples': 2,
'sampling_odds': 1,
}))
error = json.loads(response.get_data().decode('utf-8'))['error']
self.assertTrue(error)
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -67,6 +67,9 @@
.input-in-row {
margin-right: 10px;
}
.flex-grow {
flex-grow: 1;
}
.model-type-label {
padding-top: 10px;
}
Expand Down Expand Up @@ -105,11 +108,16 @@
<paper-input always-float-label label="Path to examples"
value="{{examplesPath}}">
</paper-input>
<paper-input always-float-label type="number"
label="Maximum number of examples to load"
placeholder="[[maxExamples]]" value="{{maxExamples}}">
</paper-input>

<div class="flex-holder">
<paper-input always-float-label type="number" class="input-in-row flex-grow"
label="Maximum number of examples to load"
placeholder="[[maxExamples]]" value="{{maxExamples}}">
</paper-input>
<paper-input always-float-label type="number" class="input-in-row flex-grow"
label="Sampling ratio (0.2 = sample ~20% of examples)"
placeholder="[[samplingOdds]]" value="{{samplingOdds}}">
</paper-input>
</div>
<paper-input always-float-label label="Path to label dictionary (optional)"
placeholder="[[labelVocabPath]]"
value="{{labelVocabPath}}"
Expand All @@ -129,7 +137,7 @@
<div class="flex-holder">
<paper-input always-float-label type="number" label="Max classes to display"
placeholder="[[maxClassesToDisplay]]" value="{{maxClassesToDisplay}}"
class="input-in-row" disabled="[[shouldDisableClassificationControls_(modelType)]]">
class="input-in-row" disabled="[[shouldDisableMultiClassControls_(multiClass)]]">
</paper-input>
<paper-checkbox disabled="[[shouldDisableClassificationControls_(modelType)]]"
checked="{{multiClass}}"
Expand All @@ -148,6 +156,8 @@
const defaultModelType = 'classification';
const defaultMaxExamples = '1000';
const defaultLabelVocabPath = '';
const defaultMaxClassesToDisplay = '5';
const defaultSamplingOdds = '1';

Polymer({
is: "tf-inference-panel",
Expand Down Expand Up @@ -198,7 +208,7 @@
maxExamples: {
type: Number,
value: tf_storage.getStringInitializer(
'maxExamples', {defaultValue: String(defaultMaxExamples)}),
'maxExamples', {defaultValue: defaultMaxExamples}),
observer: 'maxExamplesChanged_',
notify: true,
},
Expand All @@ -216,7 +226,16 @@
},
maxClassesToDisplay: {
type: Number,
value: 5,
value: tf_storage.getStringInitializer(
'maxClassesToDisplay', {defaultValue: defaultMaxClassesToDisplay}),
observer: 'maxClassesToDisplayChanged_',
notify: true,
},
samplingOdds: {
type: Number,
value: tf_storage.getStringInitializer(
'samplingOdds', {defaultValue: defaultSamplingOdds}),
observer: 'samplingOddsChanged_',
notify: true,
},
},
Expand Down Expand Up @@ -245,9 +264,19 @@
labelVocabPathChanged_: tf_storage.getStringObserver(
'labelVocabPath', {defaultValue: defaultLabelVocabPath}),

maxClassesToDisplayChanged_: tf_storage.getStringObserver(
'maxClassesToDisplay', {defaultValue: defaultMaxClassesToDisplay}),

samplingOddsChanged_: tf_storage.getStringObserver(
'samplingOdds', {defaultValue: defaultSamplingOdds}),

shouldDisableClassificationControls_: function(modelType) {
return modelType == 'regression';
},

shouldDisableMultiClassControls_: function(multiClass) {
return !multiClass;
}
});

</script>
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -831,6 +831,7 @@
max-examples="{{maxExamples}}"
label-vocab-path="{{labelVocabPath}}"
multi-class="{{multiClass}}"
sampling-odds="{{samplingOdds}}"
max-classes-to-display="{{maxInferenceEntriesPerRun}}">
</tf-inference-panel>
<div class="accept-button-holder">
Expand Down Expand Up @@ -1669,7 +1670,10 @@ <h2>Create a distance feature</h2>
// If the classification model is a multi-class model.
multiClass: {
type: Boolean,
value: false,
},
// Sampling odds (1: load all examples, .2: sample 20% of examples)
samplingOdds: {
type: Number,
},
// Precision on charts for performance measuring.
axisPrecision: {
Expand Down Expand Up @@ -3226,7 +3230,8 @@ <h2>Create a distance feature</h2>
getExamples_: function(){
var url = this.makeUrl_('/data/plugin/whatif/examples_from_path',
{'examples_path': this.examplesPath,
'max_examples': this.maxExamples});
'max_examples': this.maxExamples,
'sampling_odds': this.samplingOdds});

const updateExampleContents = result => {
this.updateExampleContents_(
Expand Down
23 changes: 13 additions & 10 deletions tensorboard/plugins/interactive_inference/utils/platform_utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -16,6 +16,7 @@

from glob import glob
from grpc.beta import implementations
import random
from six.moves.urllib.parse import urlparse
import tensorflow as tf

Expand Down Expand Up @@ -62,15 +63,17 @@ def throw_if_file_access_not_allowed(file_path, logdir, has_auth_group):
def example_protos_from_path(cns_path,
num_examples=10,
start_index=0,
parse_examples=True):
parse_examples=True,
sampling_odds=1):
"""Returns a number of tf.train.Examples from the CNS path.

Args:
cns_path: A string CNS path.
num_examples: The maximum number of examples to return from the path.
start_index: The index of the first example to return.
parse_examples: If true then parses the serialized proto from the path into
proto objects. Defaults to True.
sampling_odds: Odds of loading an example, used for sampling. When >= 1
(the default), then all examples are loaded.

Returns:
A list of Example protos or serialized proto strings at the CNS path.
Expand All @@ -80,8 +83,8 @@ def example_protos_from_path(cns_path,
"""

def append_examples_from_iterable(iterable, examples):
for i, value in enumerate(iterable):
if i >= start_index:
for value in iterable:
if sampling_odds >= 1 or random.random() < sampling_odds:
examples.append(
tf.train.Example.FromString(value) if parse_examples else value)
if len(examples) >= num_examples:
Expand All @@ -90,19 +93,19 @@ def append_examples_from_iterable(iterable, examples):
filenames = filepath_to_filepath_list(cns_path)
examples = []
compression_types = [
tf.python_io.TFRecordCompressionType.NONE,
tf.python_io.TFRecordCompressionType.GZIP,
tf.python_io.TFRecordCompressionType.ZLIB,
tf.python_io.TFRecordCompressionType.NONE,
tf.python_io.TFRecordCompressionType.GZIP,
tf.python_io.TFRecordCompressionType.ZLIB,
]
current_compression_idx = 0
current_file_index = 0
while (current_file_index < len(filenames) and
current_compression_idx < len(compression_types)):
try:
record_iterator = tf.python_io.tf_record_iterator(
path=filenames[current_file_index],
options=tf.python_io.TFRecordOptions(
compression_types[current_compression_idx]))
path=filenames[current_file_index],
options=tf.python_io.TFRecordOptions(
compression_types[current_compression_idx]))
append_examples_from_iterable(record_iterator, examples)
current_file_index += 1
if len(examples) >= num_examples:
Expand Down