diff --git a/tests/tensorflow/test_keras_to_estimator.py b/tests/tensorflow/test_keras_to_estimator.py index 0122b34b9..d93754b17 100644 --- a/tests/tensorflow/test_keras_to_estimator.py +++ b/tests/tensorflow/test_keras_to_estimator.py @@ -2,6 +2,7 @@ import tensorflow as tf import tensorflow_datasets as tfds from tests.constants import TEST_DATASET_S3_PATH +from tests.utils import use_s3_datasets # First Party from smdebug.tensorflow import EstimatorHook, modes @@ -18,7 +19,8 @@ def test_keras_to_estimator(out_dir): def input_fn(): split = tfds.Split.TRAIN - dataset = tfds.load("iris", data_dir=TEST_DATASET_S3_PATH, split=split, as_supervised=True) + data_dir = TEST_DATASET_S3_PATH if use_s3_datasets() else None + dataset = tfds.load("iris", data_dir=data_dir, split=split, as_supervised=True) dataset = dataset.map(lambda features, labels: ({"dense_input": features}, labels)) dataset = dataset.batch(32).repeat() return dataset diff --git a/tests/tensorflow2/test_keras.py b/tests/tensorflow2/test_keras.py index d7fc42759..8b0ef1879 100644 --- a/tests/tensorflow2/test_keras.py +++ b/tests/tensorflow2/test_keras.py @@ -16,6 +16,7 @@ from tests.constants import TEST_DATASET_S3_PATH from tests.tensorflow2.utils import is_tf_2_2 from tests.tensorflow.utils import create_trial_fast_refresh +from tests.utils import use_s3_datasets # First Party import smdebug.tensorflow as smd @@ -750,7 +751,8 @@ def test_keras_to_estimator(out_dir, tf_eager_mode): def input_fn(): split = tfds.Split.TRAIN - dataset = tfds.load("iris", data_dir=TEST_DATASET_S3_PATH, split=split, as_supervised=True) + data_dir = TEST_DATASET_S3_PATH if use_s3_datasets() else None + dataset = tfds.load("iris", data_dir=data_dir, split=split, as_supervised=True) dataset = dataset.map(lambda features, labels: ({"dense_input": features}, labels)) dataset = dataset.batch(32).repeat() return dataset diff --git a/tests/utils.py b/tests/utils.py index ab1ea65e2..d5db2a8ba 100644 --- a/tests/utils.py +++ b/tests/utils.py @@ -3,6 +3,10 @@ import shutil from pathlib import Path +# Third Party +import boto3 +from tests.constants import TEST_DATASET_S3_PATH + # First Party from smdebug.core.config_constants import ( CONFIG_FILE_PATH_ENV_STR, @@ -10,7 +14,17 @@ DEFAULT_SAGEMAKER_TENSORBOARD_PATH, TENSORBOARD_CONFIG_FILE_PATH_ENV_STR, ) -from smdebug.core.utils import remove_file_if_exists +from smdebug.core.utils import is_s3, remove_file_if_exists + + +def use_s3_datasets(): + s3 = boto3.resource("s3") + _, bucket, _ = is_s3(TEST_DATASET_S3_PATH) + try: + s3.meta.client.head_bucket(Bucket=bucket) + return True + except Exception: + return False class SagemakerSimulator(object): diff --git a/tests/zero_code_change/test_tensorflow_integration.py b/tests/zero_code_change/test_tensorflow_integration.py index 95a1df5f9..d93184a8e 100644 --- a/tests/zero_code_change/test_tensorflow_integration.py +++ b/tests/zero_code_change/test_tensorflow_integration.py @@ -24,6 +24,7 @@ from tests.constants import TEST_DATASET_S3_PATH from tests.tensorflow.hooks.test_mirrored_strategy import test_basic from tests.tensorflow.keras.test_keras_mirrored import test_tf_keras +from tests.utils import use_s3_datasets from tests.zero_code_change.tf_utils import ( get_data, get_estimator, @@ -422,9 +423,8 @@ def test_keras_to_estimator(script_mode): def input_fn(): split = tfds.Split.TRAIN - dataset = tfds.load( - "iris", data_dir=TEST_DATASET_S3_PATH, split=split, as_supervised=True - ) + data_dir = TEST_DATASET_S3_PATH if use_s3_datasets() else None + dataset = tfds.load("iris", data_dir=data_dir, split=split, as_supervised=True) dataset = dataset.map(lambda features, labels: ({"dense_input": features}, labels)) dataset = dataset.batch(32).repeat() return dataset