Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
Expand Up @@ -31,7 +31,7 @@ def _AddEvents(path):
if not tf.io.gfile.isdir(path):
tf.io.gfile.makedirs(path)
fpath = os.path.join(path, 'hypothetical.tfevents.out')
with tf.compat.v1.gfile.GFile(fpath, 'w') as f:
with tf.io.gfile.GFile(fpath, 'w') as f:
f.write('')
return fpath

Expand Down
2 changes: 1 addition & 1 deletion tensorboard/backend/event_processing/plugin_asset_util.py
Original file line number Diff line number Diff line change
Expand Up @@ -90,7 +90,7 @@ def RetrieveAsset(logdir, plugin_name, asset_name):

asset_path = os.path.join(PluginDirectory(logdir, plugin_name), asset_name)
try:
with tf.compat.v1.gfile.Open(asset_path, "r") as f:
with tf.io.gfile.GFile(asset_path, "r") as f:
return f.read()
except tf.errors.NotFoundError:
raise KeyError("Asset path %s not found" % asset_path)
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -31,7 +31,7 @@ def _AddEvents(path):
if not tf.io.gfile.isdir(path):
tf.io.gfile.makedirs(path)
fpath = os.path.join(path, 'hypothetical.tfevents.out')
with tf.compat.v1.gfile.GFile(fpath, 'w') as f:
with tf.io.gfile.GFile(fpath, 'w') as f:
f.write('')
return fpath

Expand Down
8 changes: 4 additions & 4 deletions tensorboard/plugins/beholder/file_system_tools.py
Original file line number Diff line number Diff line change
Expand Up @@ -28,12 +28,12 @@


def write_file(contents, path, mode='wb'):
with tf.compat.v1.gfile.Open(path, mode) as new_file:
with tf.io.gfile.GFile(path, mode) as new_file:
new_file.write(contents)


def read_tensor_summary(path):
with tf.compat.v1.gfile.Open(path, 'rb') as summary_file:
with tf.io.gfile.GFile(path, 'rb') as summary_file:
summary_string = summary_file.read()

if not summary_string:
Expand All @@ -48,13 +48,13 @@ def read_tensor_summary(path):


def write_pickle(obj, path):
with tf.compat.v1.gfile.Open(path, 'wb') as new_file:
with tf.io.gfile.GFile(path, 'wb') as new_file:
pickle.dump(obj, new_file)


def read_pickle(path, default=None):
try:
with tf.compat.v1.gfile.Open(path, 'rb') as pickle_file:
with tf.io.gfile.GFile(path, 'rb') as pickle_file:
result = pickle.load(pickle_file)

except (IOError, EOFError, ValueError, tf.errors.NotFoundError) as e:
Expand Down
4 changes: 2 additions & 2 deletions tensorboard/plugins/beholder/im_util.py
Original file line number Diff line number Diff line change
Expand Up @@ -139,12 +139,12 @@ def run(self, image, height, width):
resize = Resizer()

def read_image(filename):
with tf.compat.v1.gfile.Open(filename, 'rb') as image_file:
with tf.io.gfile.GFile(filename, 'rb') as image_file:
return np.array(decode_png(image_file.read()))


def write_image(array, filename):
with tf.compat.v1.gfile.Open(filename, 'w') as image_file:
with tf.io.gfile.GFile(filename, 'w') as image_file:
image_file.write(encoder.encode_png(array))


Expand Down
2 changes: 1 addition & 1 deletion tensorboard/plugins/debugger/debugger_server_lib.py
Original file line number Diff line number Diff line change
Expand Up @@ -256,7 +256,7 @@ def __init__(self,

if tf.io.gfile.exists(self._registry_backup_file_path):
# A backup file exists. Read its contents to use for initialization.
with tf.compat.v1.gfile.Open(self._registry_backup_file_path, "r") as backup_file:
with tf.io.gfile.GFile(self._registry_backup_file_path, "r") as backup_file:
try:
# Use the data to initialize the registry.
initial_data = json.load(backup_file)
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -644,7 +644,7 @@ def get_label_vocab(vocab_path):
"""Returns a list of label strings loaded from the provided path."""
if vocab_path:
try:
with tf.compat.v1.gfile.GFile(vocab_path, 'r') as f:
with tf.io.gfile.GFile(vocab_path, 'r') as f:
return [line.rstrip('\n') for line in f]
except tf.errors.NotFoundError as err:
tf.logging.error('error reading vocab file: %s', err)
Expand Down
2 changes: 1 addition & 1 deletion tensorboard/plugins/profile/profile_plugin.py
Original file line number Diff line number Diff line change
Expand Up @@ -303,7 +303,7 @@ def data_impl(self, request):
asset_path = os.path.join(self.plugin_logdir, rel_data_path)
raw_data = None
try:
with tf.compat.v1.gfile.Open(asset_path, 'rb') as f:
with tf.io.gfile.GFile(asset_path, 'rb') as f:
raw_data = f.read()
except tf.errors.NotFoundError:
logger.warn('Asset path %s not found', asset_path)
Expand Down
2 changes: 1 addition & 1 deletion tensorboard/plugins/projector/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -58,5 +58,5 @@ def visualize_embeddings(summary_writer, config):
# Saving the config file in the logdir.
config_pbtxt = _text_format.MessageToString(config)
path = os.path.join(logdir, _projector_plugin.PROJECTOR_FILENAME)
with tf.compat.v1.gfile.Open(path, 'w') as f:
with tf.io.gfile.GFile(path, 'w') as f:
f.write(config_pbtxt)
2 changes: 1 addition & 1 deletion tensorboard/plugins/projector/projector_api_test.py
Original file line number Diff line number Diff line change
Expand Up @@ -48,7 +48,7 @@ def testVisualizeEmbeddings(self):
projector.visualize_embeddings(writer, config)

# Read the configurations from disk and make sure it matches the original.
with tf.compat.v1.gfile.GFile(os.path.join(temp_dir, 'projector_config.pbtxt')) as f:
with tf.io.gfile.GFile(os.path.join(temp_dir, 'projector_config.pbtxt')) as f:
config2 = projector.ProjectorConfig()
text_format.Parse(f.read(), config2)
self.assertEqual(config, config2)
Expand Down
12 changes: 6 additions & 6 deletions tensorboard/plugins/projector/projector_plugin.py
Original file line number Diff line number Diff line change
Expand Up @@ -146,7 +146,7 @@ def add_column(self, column_name, column_values):


def _read_tensor_tsv_file(fpath):
with tf.compat.v1.gfile.GFile(fpath, 'r') as f:
with tf.io.gfile.GFile(fpath, 'r') as f:
tensor = []
for line in f:
line = line.rstrip('\n')
Expand All @@ -170,7 +170,7 @@ def _latest_checkpoints_changed(configs, run_path_pairs):
config = ProjectorConfig()
config_fpath = os.path.join(assets_dir, PROJECTOR_FILENAME)
if tf.io.gfile.exists(config_fpath):
with tf.compat.v1.gfile.GFile(config_fpath, 'r') as f:
with tf.io.gfile.GFile(config_fpath, 'r') as f:
file_content = f.read()
text_format.Merge(file_content, config)
else:
Expand Down Expand Up @@ -381,7 +381,7 @@ def _read_latest_config_files(self, run_path_pairs):
config = ProjectorConfig()
config_fpath = os.path.join(assets_dir, PROJECTOR_FILENAME)
if tf.io.gfile.exists(config_fpath):
with tf.compat.v1.gfile.GFile(config_fpath, 'r') as f:
with tf.io.gfile.GFile(config_fpath, 'r') as f:
file_content = f.read()
text_format.Merge(file_content, config)
has_tensor_files = False
Expand Down Expand Up @@ -512,7 +512,7 @@ def _serve_metadata(self, request):
'text/plain', 400)

num_header_rows = 0
with tf.compat.v1.gfile.GFile(fpath, 'r') as f:
with tf.io.gfile.GFile(fpath, 'r') as f:
lines = []
# Stream reading the file with early break in case the file doesn't fit in
# memory.
Expand Down Expand Up @@ -608,7 +608,7 @@ def _serve_bookmarks(self, request):
'text/plain', 400)

bookmarks_json = None
with tf.compat.v1.gfile.GFile(fpath, 'rb') as f:
with tf.io.gfile.GFile(fpath, 'rb') as f:
bookmarks_json = f.read()
return Respond(request, bookmarks_json, 'application/json')

Expand Down Expand Up @@ -641,7 +641,7 @@ def _serve_sprite_image(self, request):
if not tf.io.gfile.exists(fpath) or tf.io.gfile.isdir(fpath):
return Respond(request, '"%s" does not exist or is directory' % fpath,
'text/plain', 400)
f = tf.compat.v1.gfile.GFile(fpath, 'rb')
f = tf.io.gfile.GFile(fpath, 'rb')
encoded_image_string = f.read()
f.close()
image_type = imghdr.what(None, encoded_image_string)
Expand Down
6 changes: 3 additions & 3 deletions tensorboard/plugins/projector/projector_plugin_test.py
Original file line number Diff line number Diff line change
Expand Up @@ -80,7 +80,7 @@ def testRunsWithInvalidModelCheckpointPathInConfig(self):
config.model_checkpoint_path = 'does_not_exist'
embedding = config.embeddings.add()
embedding.tensor_name = 'var1'
with tf.compat.v1.gfile.GFile(config_path, 'w') as f:
with tf.io.gfile.GFile(config_path, 'w') as f:
f.write(text_format.MessageToString(config))
self._SetupWSGIApp()

Expand Down Expand Up @@ -282,12 +282,12 @@ def _GenerateProjectorTestData(self):
# Add an embedding by its canonical tensor name.
embedding.tensor_name = 'var1:0'

with tf.compat.v1.gfile.GFile(os.path.join(self.log_dir, 'bookmarks.json'), 'w') as f:
with tf.io.gfile.GFile(os.path.join(self.log_dir, 'bookmarks.json'), 'w') as f:
f.write('{"a": "b"}')
embedding.bookmarks_path = 'bookmarks.json'

config_pbtxt = text_format.MessageToString(config)
with tf.compat.v1.gfile.GFile(config_path, 'w') as f:
with tf.io.gfile.GFile(config_path, 'w') as f:
f.write(config_pbtxt)

# Write a checkpoint with some dummy variables.
Expand Down