From 8d10ad975e5f9fccdbd529bb6f9425d2a86b5f14 Mon Sep 17 00:00:00 2001 From: Nick Felt Date: Fri, 25 Jan 2019 20:32:04 -0800 Subject: [PATCH 1/7] Make tensorboard.compat.{tf,tf2} lazily loaded --- tensorboard/__init__.py | 16 ++-- tensorboard/compat/BUILD | 3 + tensorboard/compat/__init__.py | 86 +++++++++---------- .../compat/tensorflow_stub/__init__.py | 3 + tensorboard/lazy.py | 57 ++++++++---- tensorboard/plugins/audio/summary_test.py | 10 +-- tensorboard/plugins/audio/summary_v2.py | 4 +- tensorboard/plugins/histogram/summary_test.py | 14 +-- tensorboard/plugins/histogram/summary_v2.py | 11 +-- tensorboard/plugins/image/summary_test.py | 10 +-- tensorboard/plugins/image/summary_v2.py | 6 +- .../plugins/projector/projector_plugin.py | 12 ++- tensorboard/plugins/scalar/summary_test.py | 14 +-- tensorboard/plugins/scalar/summary_v2.py | 4 +- tensorboard/plugins/text/summary_test.py | 14 +-- tensorboard/plugins/text/summary_v2.py | 12 ++- 16 files changed, 145 insertions(+), 131 deletions(-) diff --git a/tensorboard/__init__.py b/tensorboard/__init__.py index 60a6071f4c..b39df4d3b4 100644 --- a/tensorboard/__init__.py +++ b/tensorboard/__init__.py @@ -19,14 +19,16 @@ from __future__ import division from __future__ import print_function +import importlib as _importlib + from tensorboard import lazy -pkg = lambda i: i # helps google sync process -mod = lambda i: lazy.LazyLoader(i[i.rindex('.') + 1:], globals(), i) # noqa: F821 -program = mod(pkg('tensorboard.program')) -summary = mod(pkg('tensorboard.summary')) +@lazy.lazy_load('tensorboard.program') +def program(): + return _importlib.import_module('tensorboard.program') + -del lazy -del mod -del pkg +@lazy.lazy_load('tensorboard.summary') +def summary(): + return _importlib.import_module('tensorboard.summary') diff --git a/tensorboard/compat/BUILD b/tensorboard/compat/BUILD index a98dd0f6fb..9a12c44bdb 100644 --- a/tensorboard/compat/BUILD +++ b/tensorboard/compat/BUILD @@ -23,6 +23,9 @@ py_library( srcs = ["__init__.py"], srcs_version = "PY2AND3", visibility = ["//visibility:public"], + deps = [ + "//tensorboard:lazy", + ], ) # This rule ensures that `from tensorboard.compat import tf` will provide a diff --git a/tensorboard/compat/__init__.py b/tensorboard/compat/__init__.py index 732578f14d..0ce4626fa0 100644 --- a/tensorboard/compat/__init__.py +++ b/tensorboard/compat/__init__.py @@ -14,59 +14,59 @@ """Compatibility interfaces for TensorBoard. -This module provides logic for importing variations on the TensorFlow APIs. - -The alias `tf` is for the main TF API used by TensorBoard. By default this will -be the result of `import tensorflow as tf`, or undefined if that fails. This -can be used in combination with //tensorboard/compat:tensorflow (to fall back to -a stub TF API implementation if the real one is not available) and -//tensorboard/compat:no_tensorflow (to use the stub TF API unconditionally). - -The function `import_tf_v2` provides common logic for importing the TF 2.0 API, -and returns the root module of the API if found, or else raises ImportError. -This is a function instead of a direct alias like `tf` in order to provide -enough indirection to get around circular dependencies. +This module provides logic for importing variations on the TensorFlow APIs, as +lazily loaded imports to help avoid circular dependency issues and defer the +search and loading of the module until necessary. """ from __future__ import absolute_import from __future__ import division from __future__ import print_function -# First, check if using TF is explicitly disabled by request. -USING_TF = True -try: - from tensorboard.compat import notf - USING_TF = False -except ImportError: - pass +import importlib as _importlib + +import tensorboard.lazy as _lazy -# If TF is not disabled, check if it's available. -if USING_TF: - try: - import tensorflow as tf - except ImportError: - USING_TF = False -if not USING_TF: - # If we can't use TF, try to provide the stub instead. - # This will only work if the tensorflow_stub dep is included - # in the build, via the `tensorboard/compat:tensorflow` target. +@_lazy.lazy_load('tensorboard.compat.tf') +def tf(): + """Provide the root module of a TF-like API for use within TensorBoard. + + By default this is equivalent to `import tensorflow as tf`, but it can be used + in combination with //tensorboard/compat:tensorflow (to fall back to a stub TF + API implementation if the real one is not available) or with + //tensorboard/compat:no_tensorflow (to force unconditional use of the stub). + + Returns: + The root module of a TF-like API, if available. + + Raises: + ImportError: if a TF-like API is not available. + """ try: - from tensorboard.compat import tensorflow_stub as tf + _importlib.import_module('tensorboard.compat.notf') except ImportError: - pass + try: + return _importlib.import_module('tensorflow') + except ImportError: + pass + return _importlib.import_module('tensorboard.compat.tensorflow_stub') # pylint: disable=line-too-long + + +@_lazy.lazy_load('tensorboard.compat.tf2') +def tf2(): + """Provide the root module of a TF-2.0 API for use within TensorBoard. + Returns: + The root module of a TF-2.0 API, if available. -def import_tf_v2(): - """Import the TF 2.0 API if possible, or raise an ImportError.""" - # We must be able to use TF in order to provide the TF 2.0 API. - if USING_TF: - # Check if this is TF 2.0 by looking for a known 2.0-only tf.summary symbol. - # TODO(nickfelt): determine a cleaner way to do this. - if hasattr(tf, 'summary') and hasattr(tf.summary, 'write'): - return tf - else: - # As a fallback, try `tensorflow.compat.v2` if it's defined. - if hasattr(tf, 'compat') and hasattr(tf.compat, 'v2'): - return tf.compat.v2 + Raises: + ImportError: if a TF-2.0 API is not available. + """ + # Import the `tf` compat API from this file and check if it's already TF 2.0. + if tf.__version__.startswith('2.'): + return tf + elif hasattr(tf, 'compat') and hasattr(tf.compat, 'v2'): + # As a fallback, try `tensorflow.compat.v2` if it's defined. + return tf.compat.v2 raise ImportError('cannot import tensorflow 2.0 API') diff --git a/tensorboard/compat/tensorflow_stub/__init__.py b/tensorboard/compat/tensorflow_stub/__init__.py index 1d322dbb32..685b73b838 100644 --- a/tensorboard/compat/tensorflow_stub/__init__.py +++ b/tensorboard/compat/tensorflow_stub/__init__.py @@ -34,3 +34,6 @@ from . import gfile # noqa from . import pywrap_tensorflow # noqa from . import tensor_shape # noqa + +# Set a fake __version__ to help distinguish this as our own stub API. +__version__ = 'stub' diff --git a/tensorboard/lazy.py b/tensorboard/lazy.py index d1099b0636..fd3105ec3a 100644 --- a/tensorboard/lazy.py +++ b/tensorboard/lazy.py @@ -19,36 +19,55 @@ from __future__ import division from __future__ import print_function -import importlib +import functools import types class LazyLoader(types.ModuleType): - """Lazily import a module, mainly to avoid pulling in large dependencies.""" + """Lazily import a module. + + This can be used to defer importing troublesome dependencies - e.g. ones that + are large and infrequently used, or that cause a dependency cycle - + until they are actually used. + """ # The lint error here is incorrect. - def __init__(self, local_name, parent_module_globals, name): # pylint: disable=super-on-old-class - self._local_name = local_name - self._parent_module_globals = parent_module_globals + def __init__(self, name, load_fn): # pylint: disable=super-on-old-class + """Create a LazyLoader for a module. + Args: + name: the fully-qualified name of the module + load_fn: callable that actually does the import and returns the module + """ super(LazyLoader, self).__init__(name) + self._load_fn = load_fn + self._cached_module = None def _load(self): - # Import the target module and insert it into the parent's namespace - module = importlib.import_module(self.__name__) - self._parent_module_globals[self._local_name] = module - - # Update this object's dict so that if someone keeps a reference to the - # LazyLoader, lookups are efficient (__getattr__ is only called on lookups - # that fail). - self.__dict__.update(module.__dict__) - - return module + self._cached_module = self._load_fn() + # Update this object's dict to make future lookups efficient (__getattr__ is + # only called on lookups that fail). + self.__dict__.update(self._cached_module.__dict__) def __getattr__(self, item): - module = self._load() - return getattr(module, item) + if not self._cached_module: + self._load() + return getattr(self._cached_module, item) def __dir__(self): - module = self._load() - return dir(module) + if not self._cached_module: + self._load() + return dir(self._cached_module) + + +def lazy_load(name): + """Decorator to define a function that lazily loads the module 'name'. + + Args: + name: the fully-qualified name of the module; typically the last segment + of 'name' matches the name of the decorated function + + Returns: + Decorator function for lazily loading the module 'name'. + """ + return functools.partial(LazyLoader, name) diff --git a/tensorboard/plugins/audio/summary_test.py b/tensorboard/plugins/audio/summary_test.py index 688a35bd06..3279ec267d 100644 --- a/tensorboard/plugins/audio/summary_test.py +++ b/tensorboard/plugins/audio/summary_test.py @@ -29,16 +29,16 @@ # TODO(nickfelt): get encode_wav() exported in the public API. from tensorflow.python.ops import gen_audio_ops +from tensorboard.compat import tf2 from tensorboard.plugins.audio import metadata from tensorboard.plugins.audio import summary from tensorboard.util import tensor_util try: - from tensorboard import compat - tf_v2 = compat.import_tf_v2() + tf2.__version__ # Force lazy import to resolve except ImportError: - tf_v2 = None + tf2 = None try: tf.compat.v1.enable_eager_execution() @@ -198,12 +198,12 @@ def test_requires_nonnegative_max_outputs(self): class SummaryV2OpTest(SummaryBaseTest, tf.test.TestCase): def setUp(self): super(SummaryV2OpTest, self).setUp() - if tf_v2 is None: + if tf2 is None: self.skipTest('TF v2 summary API not available') def audio(self, *args, **kwargs): kwargs.setdefault('step', 1) - writer = tf_v2.summary.create_file_writer(self.get_temp_dir()) + writer = tf2.summary.create_file_writer(self.get_temp_dir()) with writer.as_default(): summary.audio(*args, **kwargs) writer.close() diff --git a/tensorboard/plugins/audio/summary_v2.py b/tensorboard/plugins/audio/summary_v2.py index 9e9d503e42..8eee4bfb33 100644 --- a/tensorboard/plugins/audio/summary_v2.py +++ b/tensorboard/plugins/audio/summary_v2.py @@ -27,6 +27,7 @@ import functools +from tensorboard.compat import tf2 as tf from tensorboard.plugins.audio import metadata @@ -64,9 +65,6 @@ def audio(name, True on success, or false if no summary was emitted because no default summary writer was available. """ - # TODO(nickfelt): remove on-demand imports once dep situation is fixed. - from tensorboard import compat - tf = compat.import_tf_v2() # TODO(nickfelt): get encode_wav() exported in the public API. from tensorflow.python.ops import gen_audio_ops diff --git a/tensorboard/plugins/histogram/summary_test.py b/tensorboard/plugins/histogram/summary_test.py index 503275c12f..988b7614e7 100644 --- a/tensorboard/plugins/histogram/summary_test.py +++ b/tensorboard/plugins/histogram/summary_test.py @@ -25,16 +25,16 @@ import numpy as np import tensorflow as tf +from tensorboard.compat import tf2 from tensorboard.plugins.histogram import metadata from tensorboard.plugins.histogram import summary from tensorboard.util import tensor_util try: - from tensorboard import compat - tf_v2 = compat.import_tf_v2() + tf2.__version__ # Force lazy import to resolve except ImportError: - tf_v2 = None + tf2 = None try: tf.compat.v1.enable_eager_execution() @@ -160,12 +160,12 @@ def histogram(self, *args, **kwargs): class SummaryV2OpTest(SummaryBaseTest, tf.test.TestCase): def setUp(self): super(SummaryV2OpTest, self).setUp() - if tf_v2 is None: + if tf2 is None: self.skipTest('v2 summary API not available') def histogram(self, *args, **kwargs): kwargs.setdefault('step', 1) - writer = tf_v2.summary.create_file_writer(self.get_temp_dir()) + writer = tf2.summary.create_file_writer(self.get_temp_dir()) with writer.as_default(): summary.histogram(*args, **kwargs) writer.close() @@ -194,12 +194,12 @@ def histogram(self, *args, **kwargs): # Hack to extract current scope since there's no direct API for it. with tf.name_scope('_') as temp_scope: scope = temp_scope.rstrip('/_') - @tf_v2.function + @tf2.function def graph_fn(): # Recreate the active scope inside the defun since it won't propagate. with tf.name_scope(scope): summary.histogram(*args, **kwargs) - writer = tf_v2.summary.create_file_writer(self.get_temp_dir()) + writer = tf2.summary.create_file_writer(self.get_temp_dir()) with writer.as_default(): graph_fn() writer.close() diff --git a/tensorboard/plugins/histogram/summary_v2.py b/tensorboard/plugins/histogram/summary_v2.py index e27731c3ac..9ed0e1ebae 100644 --- a/tensorboard/plugins/histogram/summary_v2.py +++ b/tensorboard/plugins/histogram/summary_v2.py @@ -31,6 +31,7 @@ import numpy as np +from tensorboard.compat import tf2 as tf from tensorboard.compat.proto import summary_pb2 from tensorboard.plugins.histogram import metadata from tensorboard.util import tensor_util @@ -59,9 +60,6 @@ def histogram(name, data, step, buckets=None, description=None): True on success, or false if no summary was emitted because no default summary writer was available. """ - # TODO(nickfelt): remove on-demand imports once dep situation is fixed. - from tensorboard import compat - tf = compat.import_tf_v2() summary_metadata = metadata.create_summary_metadata( display_name=None, description=description) with tf.summary.summary_scope( @@ -82,9 +80,6 @@ def _buckets(data, bucket_count=None): a triple `[left_edge, right_edge, count]` for a single bucket. The value of `k` is either `bucket_count` or `1` or `0`. """ - # TODO(nickfelt): remove on-demand imports once dep situation is fixed. - from tensorboard import compat - tf = compat.import_tf_v2() if bucket_count is None: bucket_count = DEFAULT_BUCKET_COUNT with tf.name_scope('buckets', values=[data, bucket_count]): @@ -152,8 +147,6 @@ def histogram_pb(tag, data, buckets=None, description=None): Returns: A `summary_pb2.Summary` protobuf object. """ - # TODO(nickfelt): remove on-demand imports once dep situation is fixed. - from tensorboard.compat import tf bucket_count = DEFAULT_BUCKET_COUNT if buckets is None else buckets data = np.array(data).flatten().astype(float) if data.size == 0: @@ -179,7 +172,7 @@ def histogram_pb(tag, data, buckets=None, description=None): left_edges = edges[:-1] right_edges = edges[1:] buckets = np.array([left_edges, right_edges, bucket_counts]).transpose() - tensor = tensor_util.make_tensor_proto(buckets, dtype=tf.float64) + tensor = tensor_util.make_tensor_proto(buckets, dtype=np.float64) summary_metadata = metadata.create_summary_metadata( display_name=None, description=description) diff --git a/tensorboard/plugins/image/summary_test.py b/tensorboard/plugins/image/summary_test.py index 5c5d975244..6440e69c87 100644 --- a/tensorboard/plugins/image/summary_test.py +++ b/tensorboard/plugins/image/summary_test.py @@ -26,14 +26,14 @@ import six import tensorflow as tf +from tensorboard.compat import tf2 from tensorboard.plugins.image import metadata from tensorboard.plugins.image import summary try: - from tensorboard import compat - tf_v2 = compat.import_tf_v2() + tf2.__version__ # Force lazy import to resolve except ImportError: - tf_v2 = None + tf2 = None try: tf.compat.v1.enable_eager_execution() @@ -181,12 +181,12 @@ def test_image_count_zero(self): class SummaryV2OpTest(SummaryBaseTest, tf.test.TestCase): def setUp(self): super(SummaryV2OpTest, self).setUp() - if tf_v2 is None: + if tf2 is None: self.skipTest('TF v2 summary API not available') def image(self, *args, **kwargs): kwargs.setdefault('step', 1) - writer = tf_v2.summary.create_file_writer(self.get_temp_dir()) + writer = tf2.summary.create_file_writer(self.get_temp_dir()) with writer.as_default(): summary.image(*args, **kwargs) writer.close() diff --git a/tensorboard/plugins/image/summary_v2.py b/tensorboard/plugins/image/summary_v2.py index 7074142da6..8c9c36e903 100644 --- a/tensorboard/plugins/image/summary_v2.py +++ b/tensorboard/plugins/image/summary_v2.py @@ -22,9 +22,8 @@ from __future__ import division from __future__ import print_function -from tensorboard.compat.proto import summary_pb2 +from tensorboard.compat import tf2 as tf from tensorboard.plugins.image import metadata -from tensorboard.util import tensor_util def image(name, @@ -55,9 +54,6 @@ def image(name, True on success, or false if no summary was emitted because no default summary writer was available. """ - # TODO(nickfelt): remove on-demand imports once dep situation is fixed. - from tensorboard import compat - tf = compat.import_tf_v2() summary_metadata = metadata.create_summary_metadata( display_name=None, description=description) with tf.summary.summary_scope( diff --git a/tensorboard/plugins/projector/projector_plugin.py b/tensorboard/plugins/projector/projector_plugin.py index 1d25822e31..03c641d5a8 100644 --- a/tensorboard/plugins/projector/projector_plugin.py +++ b/tensorboard/plugins/projector/projector_plugin.py @@ -32,7 +32,6 @@ from tensorboard.backend.http_util import Respond from tensorboard.compat import tf -from tensorboard.compat import USING_TF from tensorboard.plugins import base_plugin from tensorboard.plugins.projector.projector_config_pb2 import ProjectorConfig from tensorboard.util import tb_logging @@ -215,6 +214,11 @@ def _rel_to_abs_asset_path(fpath, config_fpath): return fpath +def _using_tf(): + """Return true if we're not using the fake TF API stub implementation.""" + return tf.__version__ != 'stub' + + class ProjectorPlugin(base_plugin.TBPlugin): """Embedding projector.""" @@ -402,7 +406,7 @@ def _read_latest_config_files(self, run_path_pairs): config.model_checkpoint_path = ckpt_path # Sanity check for the checkpoint file. - if (config.model_checkpoint_path and USING_TF and + if (config.model_checkpoint_path and _using_tf() and not tf.compat.v1.train.checkpoint_exists(config.model_checkpoint_path)): logger.warn('Checkpoint file "%s" not found', config.model_checkpoint_path) @@ -417,7 +421,7 @@ def _get_reader_for_run(self, run): config = self._configs[run] reader = None - if config.model_checkpoint_path and USING_TF: + if config.model_checkpoint_path and _using_tf(): try: reader = tf.compat.v1.pywrap_tensorflow.NewCheckpointReader( config.model_checkpoint_path) @@ -650,7 +654,7 @@ def _serve_sprite_image(self, request): def _find_latest_checkpoint(dir_path): - if not USING_TF: + if not _using_tf(): return None try: ckpt_path = tf.train.latest_checkpoint(dir_path) diff --git a/tensorboard/plugins/scalar/summary_test.py b/tensorboard/plugins/scalar/summary_test.py index 9c28eb81e0..ea70359e82 100644 --- a/tensorboard/plugins/scalar/summary_test.py +++ b/tensorboard/plugins/scalar/summary_test.py @@ -27,15 +27,15 @@ import six import tensorflow as tf -from tensorboard import compat +from tensorboard.compat import tf2 from tensorboard.plugins.scalar import metadata from tensorboard.plugins.scalar import summary from tensorboard.util import tensor_util try: - tf_v2 = compat.import_tf_v2() + tf2.__version__ # Force lazy import to resolve except ImportError: - tf_v2 = None + tf2 = None try: tf.compat.v1.enable_eager_execution() @@ -138,12 +138,12 @@ class SummaryV2OpTest(SummaryBaseTest, tf.test.TestCase): def setUp(self): super(SummaryV2OpTest, self).setUp() - if tf_v2 is None: + if tf2 is None: self.skipTest('v2 summary API not available') def scalar(self, *args, **kwargs): kwargs.setdefault('step', 1) - writer = tf_v2.summary.create_file_writer(self.get_temp_dir()) + writer = tf2.summary.create_file_writer(self.get_temp_dir()) with writer.as_default(): summary.scalar(*args, **kwargs) writer.close() @@ -172,12 +172,12 @@ def scalar(self, *args, **kwargs): # Hack to extract current scope since there's no direct API for it. with tf.name_scope('_') as temp_scope: scope = temp_scope.rstrip('/_') - @tf_v2.function + @tf2.function def graph_fn(): # Recreate the active scope inside the defun since it won't propagate. with tf.name_scope(scope): summary.scalar(*args, **kwargs) - writer = tf_v2.summary.create_file_writer(self.get_temp_dir()) + writer = tf2.summary.create_file_writer(self.get_temp_dir()) with writer.as_default(): graph_fn() writer.close() diff --git a/tensorboard/plugins/scalar/summary_v2.py b/tensorboard/plugins/scalar/summary_v2.py index bf15a176cf..bb39e7c884 100644 --- a/tensorboard/plugins/scalar/summary_v2.py +++ b/tensorboard/plugins/scalar/summary_v2.py @@ -23,6 +23,7 @@ import numpy as np +from tensorboard.compat import tf2 as tf from tensorboard.compat.proto import summary_pb2 from tensorboard.plugins.scalar import metadata from tensorboard.util import tensor_util @@ -43,9 +44,6 @@ def scalar(name, data, step, description=None): True on success, or false if no summary was written because no default summary writer was available. """ - # TODO(nickfelt): remove on-demand imports once dep situation is fixed. - from tensorboard import compat - tf = compat.import_tf_v2() summary_metadata = metadata.create_summary_metadata( display_name=None, description=description) with tf.summary.summary_scope( diff --git a/tensorboard/plugins/text/summary_test.py b/tensorboard/plugins/text/summary_test.py index 4b352a51f7..fc106315fd 100644 --- a/tensorboard/plugins/text/summary_test.py +++ b/tensorboard/plugins/text/summary_test.py @@ -27,15 +27,15 @@ import six import tensorflow as tf -from tensorboard import compat +from tensorboard.compat import tf2 from tensorboard.plugins.text import metadata from tensorboard.plugins.text import summary from tensorboard.util import tensor_util try: - tf_v2 = compat.import_tf_v2() + tf2.__version__ # Force lazy import to resolve except ImportError: - tf_v2 = None + tf2 = None try: tf.compat.v1.enable_eager_execution() @@ -155,12 +155,12 @@ def text(self, *args, **kwargs): class SummaryV2OpTest(SummaryBaseTest, tf.test.TestCase): def setUp(self): super(SummaryV2OpTest, self).setUp() - if tf_v2 is None: + if tf2 is None: self.skipTest('TF v2 summary API not available') def text(self, *args, **kwargs): kwargs.setdefault('step', 1) - writer = tf_v2.summary.create_file_writer(self.get_temp_dir()) + writer = tf2.summary.create_file_writer(self.get_temp_dir()) with writer.as_default(): summary.text(*args, **kwargs) writer.close() @@ -189,12 +189,12 @@ def text(self, *args, **kwargs): # Hack to extract current scope since there's no direct API for it. with tf.name_scope('_') as temp_scope: scope = temp_scope.rstrip('/_') - @tf_v2.function + @tf2.function def graph_fn(): # Recreate the active scope inside the defun since it won't propagate. with tf.name_scope(scope): summary.text(*args, **kwargs) - writer = tf_v2.summary.create_file_writer(self.get_temp_dir()) + writer = tf2.summary.create_file_writer(self.get_temp_dir()) with writer.as_default(): graph_fn() writer.close() diff --git a/tensorboard/plugins/text/summary_v2.py b/tensorboard/plugins/text/summary_v2.py index 85e684375e..8edd8bf249 100644 --- a/tensorboard/plugins/text/summary_v2.py +++ b/tensorboard/plugins/text/summary_v2.py @@ -18,6 +18,9 @@ from __future__ import division from __future__ import print_function +import numpy as np + +from tensorboard.compat import tf2 as tf from tensorboard.compat.proto import summary_pb2 from tensorboard.plugins.text import metadata from tensorboard.util import tensor_util @@ -38,9 +41,6 @@ def text(name, data, step, description=None): True on success, or false if no summary was emitted because no default summary writer was available. """ - # TODO(nickfelt): remove on-demand imports once dep situation is fixed. - from tensorboard import compat - tf = compat.import_tf_v2() summary_metadata = metadata.create_summary_metadata( display_name=None, description=description) with tf.summary.summary_scope( @@ -66,12 +66,10 @@ def text_pb(tag, data, description=None): Returns: A `tf.Summary` protobuf object. """ - # TODO(nickfelt): remove on-demand imports once dep situation is fixed. - from tensorboard.compat import tf try: - tensor = tensor_util.make_tensor_proto(data, dtype=tf.string) + tensor = tensor_util.make_tensor_proto(data, dtype=np.object) except TypeError as e: - raise TypeError("tensor must be of type string", e) + raise TypeError('tensor must be of type string', e) summary_metadata = metadata.create_summary_metadata( display_name=None, description=description) summary = summary_pb2.Summary() From 976064d2c782a62ec37baca1a19a6e7dfaa070a4 Mon Sep 17 00:00:00 2001 From: Nick Felt Date: Fri, 25 Jan 2019 21:11:08 -0800 Subject: [PATCH 2/7] add summary_dep_test.py to check for no TF dep from tb.summary.v2 --- tensorboard/summary/BUILD | 10 +++++++ tensorboard/summary/summary_dep_test.py | 38 +++++++++++++++++++++++++ 2 files changed, 48 insertions(+) create mode 100644 tensorboard/summary/summary_dep_test.py diff --git a/tensorboard/summary/BUILD b/tensorboard/summary/BUILD index e4630576bb..9829bd5678 100644 --- a/tensorboard/summary/BUILD +++ b/tensorboard/summary/BUILD @@ -62,3 +62,13 @@ py_test( "//tensorboard:expect_tensorflow_installed", ], ) + +py_test( + name = "summary_dep_test", + size = "small", + srcs = ["summary_dep_test.py"], + srcs_version = "PY2AND3", + deps = [ + ":summary_v2", + ], +) diff --git a/tensorboard/summary/summary_dep_test.py b/tensorboard/summary/summary_dep_test.py new file mode 100644 index 0000000000..0a0e7318d6 --- /dev/null +++ b/tensorboard/summary/summary_dep_test.py @@ -0,0 +1,38 @@ +# Copyright 2019 The TensorFlow Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ============================================================================== +"""Dependency tests for the `tensorboard.summary` APIs. + +This test is isolated in its own file to avoid depending on TensorFlow (either +directly or transitively), since we need to test the *absence* of a TF dep. +""" + +from __future__ import absolute_import +from __future__ import division +from __future__ import print_function + +import sys +import unittest + + +class SummaryV2DepTest(unittest.TestCase): + + def test_summary_v2_has_no_immediate_tf_dep(self): + import tensorboard.summary.v2 + from tensorboard.summary import v2 + self.assertEqual('notfound', sys.modules.get('tensorflow', 'notfound')) + + +if __name__ == '__main__': + unittest.main() From 6f9843f85efea46e96ef636aa529495685567695 Mon Sep 17 00:00:00 2001 From: Nick Felt Date: Mon, 28 Jan 2019 16:38:30 -0800 Subject: [PATCH 3/7] CR: rewrite lazy_load() to use closure and guaranteed single initialization --- tensorboard/lazy.py | 80 +++++++++++++++++++++++++-------------------- 1 file changed, 44 insertions(+), 36 deletions(-) diff --git a/tensorboard/lazy.py b/tensorboard/lazy.py index fd3105ec3a..3dc6ff29b8 100644 --- a/tensorboard/lazy.py +++ b/tensorboard/lazy.py @@ -20,54 +20,62 @@ from __future__ import print_function import functools +import threading import types -class LazyLoader(types.ModuleType): - """Lazily import a module. +def lazy_load(name): + """Decorator to define a function that lazily loads the module 'name'. This can be used to defer importing troublesome dependencies - e.g. ones that are large and infrequently used, or that cause a dependency cycle - until they are actually used. - """ - - # The lint error here is incorrect. - def __init__(self, name, load_fn): # pylint: disable=super-on-old-class - """Create a LazyLoader for a module. - Args: - name: the fully-qualified name of the module - load_fn: callable that actually does the import and returns the module - """ - super(LazyLoader, self).__init__(name) - self._load_fn = load_fn - self._cached_module = None + Args: + name: the fully-qualified name of the module; typically the last segment + of 'name' matches the name of the decorated function - def _load(self): - self._cached_module = self._load_fn() - # Update this object's dict to make future lookups efficient (__getattr__ is - # only called on lookups that fail). - self.__dict__.update(self._cached_module.__dict__) + Returns: + Decorator function that produces a lazy-loading module 'name' backed by the + underlying decorated function. + """ + def wrapper(load_fn): + # Wrap load_fn to call it exactly once and update __dict__ afterwards to + # make future lookups efficient (only failed lookups call __getattr__). + @_return_once + def load_once(self): + module = load_fn() + self.__dict__.update(module.__dict__) + return module - def __getattr__(self, item): - if not self._cached_module: - self._load() - return getattr(self._cached_module, item) + # Define a module that proxies getattr() and dir() to the result of calling + # load_once() the first time it's needed. The class is nested so we can close + # over load_once() and avoid polluting the module's attrs with our own state. + class LazyModule(types.ModuleType): + def __getattr__(self, attr_name): + return getattr(load_once(self), attr_name) - def __dir__(self): - if not self._cached_module: - self._load() - return dir(self._cached_module) + def __dir__(self): + return dir(load_once(self)) + def __repr__(self): + return '' % self.__name__ -def lazy_load(name): - """Decorator to define a function that lazily loads the module 'name'. + return LazyModule(name) + return wrapper - Args: - name: the fully-qualified name of the module; typically the last segment - of 'name' matches the name of the decorated function - Returns: - Decorator function for lazily loading the module 'name'. - """ - return functools.partial(LazyLoader, name) +def _return_once(f): + """Decorator that calls f() once, then returns that value repeatedly.""" + not_called = object() # Unique "not yet called" sentinel object. + # Cache result indirectly via a list since closures can't reassign variables. + cache = [not_called] + lock = threading.Lock() + @functools.wraps(f) + def wrapper(*args, **kwargs): + if cache[0] == not_called: + with lock: + if cache[0] == not_called: + cache[0] = f(*args, **kwargs) + return cache[0] + return wrapper From 117c6f2b8ba212a2374e43aa22d25f70165441a1 Mon Sep 17 00:00:00 2001 From: Nick Felt Date: Mon, 28 Jan 2019 16:38:45 -0800 Subject: [PATCH 4/7] add a couple extra checks to SummaryV2DepTest --- tensorboard/summary/summary_dep_test.py | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/tensorboard/summary/summary_dep_test.py b/tensorboard/summary/summary_dep_test.py index 0a0e7318d6..4f9f21d1fe 100644 --- a/tensorboard/summary/summary_dep_test.py +++ b/tensorboard/summary/summary_dep_test.py @@ -29,8 +29,12 @@ class SummaryV2DepTest(unittest.TestCase): def test_summary_v2_has_no_immediate_tf_dep(self): + # Check that we can import the module (multiple ways) and list and reference + # symbols from it without triggering a tensorflow import. import tensorboard.summary.v2 from tensorboard.summary import v2 + print(dir(tensorboard.summary.v2)) + print(tensorboard.summary.v2.scalar) self.assertEqual('notfound', sys.modules.get('tensorflow', 'notfound')) From 5aec2b521a4d4601f3218d1bdaa005c7ea0df7f3 Mon Sep 17 00:00:00 2001 From: Nick Felt Date: Mon, 28 Jan 2019 16:41:29 -0800 Subject: [PATCH 5/7] CR: avoid leaking _importlib symbol --- tensorboard/__init__.py | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/tensorboard/__init__.py b/tensorboard/__init__.py index b39df4d3b4..471b1f0337 100644 --- a/tensorboard/__init__.py +++ b/tensorboard/__init__.py @@ -19,16 +19,16 @@ from __future__ import division from __future__ import print_function -import importlib as _importlib - from tensorboard import lazy @lazy.lazy_load('tensorboard.program') def program(): - return _importlib.import_module('tensorboard.program') + import tensorboard.program as module # pylint: disable=g-import-not-at-top + return module @lazy.lazy_load('tensorboard.summary') def summary(): - return _importlib.import_module('tensorboard.summary') + import tensorboard.summary as module # pylint: disable=g-import-not-at-top + return module From 3424a9505ce501eab035f7ee785ed372bc24b0f5 Mon Sep 17 00:00:00 2001 From: Nick Felt Date: Mon, 28 Jan 2019 18:06:53 -0800 Subject: [PATCH 6/7] CR: clarify memoization contract, avoid deadlock, nicer repr --- tensorboard/lazy.py | 28 ++++++++++++++++------------ 1 file changed, 16 insertions(+), 12 deletions(-) diff --git a/tensorboard/lazy.py b/tensorboard/lazy.py index 3dc6ff29b8..59c577ed2a 100644 --- a/tensorboard/lazy.py +++ b/tensorboard/lazy.py @@ -42,10 +42,11 @@ def lazy_load(name): def wrapper(load_fn): # Wrap load_fn to call it exactly once and update __dict__ afterwards to # make future lookups efficient (only failed lookups call __getattr__). - @_return_once + @_memoize def load_once(self): module = load_fn() self.__dict__.update(module.__dict__) + load_once.loaded = True return module # Define a module that proxies getattr() and dir() to the result of calling @@ -59,23 +60,26 @@ def __dir__(self): return dir(load_once(self)) def __repr__(self): + if hasattr(load_once, 'loaded'): + return repr(load_once(self)) return '' % self.__name__ return LazyModule(name) return wrapper -def _return_once(f): - """Decorator that calls f() once, then returns that value repeatedly.""" - not_called = object() # Unique "not yet called" sentinel object. - # Cache result indirectly via a list since closures can't reassign variables. - cache = [not_called] - lock = threading.Lock() +def _memoize(f): + """Memoizing decorator for f, which must have exactly 1 hashable argument.""" + nothing = object() # Unique "no value" sentinel object. + cache = {} + # Use a reentrank lock so that if f references the resulting wrapper we die + # with recursion depth exceeded instead of deadlocking. + lock = threading.RLock() @functools.wraps(f) - def wrapper(*args, **kwargs): - if cache[0] == not_called: + def wrapper(arg): + if cache.get(arg, nothing) == nothing: with lock: - if cache[0] == not_called: - cache[0] = f(*args, **kwargs) - return cache[0] + if cache.get(arg, nothing) == nothing: + cache[arg] = f(arg) + return cache[arg] return wrapper From 22b42398861c7be3e1a9043bd777de71e2421496 Mon Sep 17 00:00:00 2001 From: Nick Felt Date: Mon, 28 Jan 2019 18:27:37 -0800 Subject: [PATCH 7/7] CR: no moar hasattr, typo fix --- tensorboard/lazy.py | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/tensorboard/lazy.py b/tensorboard/lazy.py index 59c577ed2a..de112b113f 100644 --- a/tensorboard/lazy.py +++ b/tensorboard/lazy.py @@ -48,6 +48,7 @@ def load_once(self): self.__dict__.update(module.__dict__) load_once.loaded = True return module + load_once.loaded = False # Define a module that proxies getattr() and dir() to the result of calling # load_once() the first time it's needed. The class is nested so we can close @@ -60,7 +61,7 @@ def __dir__(self): return dir(load_once(self)) def __repr__(self): - if hasattr(load_once, 'loaded'): + if load_once.loaded: return repr(load_once(self)) return '' % self.__name__ @@ -72,7 +73,7 @@ def _memoize(f): """Memoizing decorator for f, which must have exactly 1 hashable argument.""" nothing = object() # Unique "no value" sentinel object. cache = {} - # Use a reentrank lock so that if f references the resulting wrapper we die + # Use a reentrant lock so that if f references the resulting wrapper we die # with recursion depth exceeded instead of deadlocking. lock = threading.RLock() @functools.wraps(f)