Skip to content
Merged
4 changes: 3 additions & 1 deletion tests/tensorflow/test_keras_to_estimator.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,6 +2,7 @@
import tensorflow as tf
import tensorflow_datasets as tfds
from tests.constants import TEST_DATASET_S3_PATH
from tests.utils import use_s3_datasets

# First Party
from smdebug.tensorflow import EstimatorHook, modes
Expand All @@ -18,7 +19,8 @@ def test_keras_to_estimator(out_dir):

def input_fn():
split = tfds.Split.TRAIN
dataset = tfds.load("iris", data_dir=TEST_DATASET_S3_PATH, split=split, as_supervised=True)
data_dir = TEST_DATASET_S3_PATH if use_s3_datasets() else None
dataset = tfds.load("iris", data_dir=data_dir, split=split, as_supervised=True)
dataset = dataset.map(lambda features, labels: ({"dense_input": features}, labels))
dataset = dataset.batch(32).repeat()
return dataset
Expand Down
4 changes: 3 additions & 1 deletion tests/tensorflow2/test_keras.py
Original file line number Diff line number Diff line change
Expand Up @@ -16,6 +16,7 @@
from tests.constants import TEST_DATASET_S3_PATH
from tests.tensorflow2.utils import is_tf_2_2
from tests.tensorflow.utils import create_trial_fast_refresh
from tests.utils import use_s3_datasets

# First Party
import smdebug.tensorflow as smd
Expand Down Expand Up @@ -750,7 +751,8 @@ def test_keras_to_estimator(out_dir, tf_eager_mode):

def input_fn():
split = tfds.Split.TRAIN
dataset = tfds.load("iris", data_dir=TEST_DATASET_S3_PATH, split=split, as_supervised=True)
data_dir = TEST_DATASET_S3_PATH if use_s3_datasets() else None
dataset = tfds.load("iris", data_dir=data_dir, split=split, as_supervised=True)
dataset = dataset.map(lambda features, labels: ({"dense_input": features}, labels))
dataset = dataset.batch(32).repeat()
return dataset
Expand Down
16 changes: 15 additions & 1 deletion tests/utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,14 +3,28 @@
import shutil
from pathlib import Path

# Third Party
import boto3
from tests.constants import TEST_DATASET_S3_PATH

# First Party
from smdebug.core.config_constants import (
CONFIG_FILE_PATH_ENV_STR,
DEFAULT_SAGEMAKER_OUTDIR,
DEFAULT_SAGEMAKER_TENSORBOARD_PATH,
TENSORBOARD_CONFIG_FILE_PATH_ENV_STR,
)
from smdebug.core.utils import remove_file_if_exists
from smdebug.core.utils import is_s3, remove_file_if_exists


def use_s3_datasets():
s3 = boto3.resource("s3")
_, bucket, _ = is_s3(TEST_DATASET_S3_PATH)
try:
s3.meta.client.head_bucket(Bucket=bucket)
return True
except Exception:
return False


class SagemakerSimulator(object):
Expand Down
6 changes: 3 additions & 3 deletions tests/zero_code_change/test_tensorflow_integration.py
Original file line number Diff line number Diff line change
Expand Up @@ -24,6 +24,7 @@
from tests.constants import TEST_DATASET_S3_PATH
from tests.tensorflow.hooks.test_mirrored_strategy import test_basic
from tests.tensorflow.keras.test_keras_mirrored import test_tf_keras
from tests.utils import use_s3_datasets
from tests.zero_code_change.tf_utils import (
get_data,
get_estimator,
Expand Down Expand Up @@ -422,9 +423,8 @@ def test_keras_to_estimator(script_mode):

def input_fn():
split = tfds.Split.TRAIN
dataset = tfds.load(
"iris", data_dir=TEST_DATASET_S3_PATH, split=split, as_supervised=True
)
data_dir = TEST_DATASET_S3_PATH if use_s3_datasets() else None
dataset = tfds.load("iris", data_dir=data_dir, split=split, as_supervised=True)
dataset = dataset.map(lambda features, labels: ({"dense_input": features}, labels))
dataset = dataset.batch(32).repeat()
return dataset
Expand Down