diff --git a/resource-managers/kubernetes/integration-tests/README.md b/resource-managers/kubernetes/integration-tests/README.md
index 1daa8ce4786d..2c759d9095ce 100644
--- a/resource-managers/kubernetes/integration-tests/README.md
+++ b/resource-managers/kubernetes/integration-tests/README.md
@@ -17,6 +17,13 @@ To run tests with Java 11 instead of Java 8, use `--java-image-tag` to specify t
./dev/dev-run-integration-tests.sh --java-image-tag 11-jre-slim
+To run tests with a custom docker image, use `--docker-file` to specify the Dockerfile.
+Note that if both `--docker-file` and `--java-image-tag` are used, `--docker-file` is preferred,
+and the custom Dockerfile need to include a Java installation by itself.
+Dockerfile.java17 is an example of custom Dockerfile, and you can specify it to run tests with Java 17.
+
+ ./dev/dev-run-integration-tests.sh --docker-file ../docker/src/main/dockerfiles/spark/Dockerfile.java17
+
To run tests with Hadoop 2.x instead of Hadoop 3.x, use `--hadoop-profile`.
./dev/dev-run-integration-tests.sh --hadoop-profile hadoop-2
@@ -237,6 +244,13 @@ to the wrapper scripts and using the wrapper scripts will simply set these appro
spark-r |
+
+ spark.kubernetes.test.dockerFile |
+
+ The path to the custom Dockerfile
+ |
+ N/A |
+
spark.kubernetes.test.namespace |
diff --git a/resource-managers/kubernetes/integration-tests/dev/dev-run-integration-tests.sh b/resource-managers/kubernetes/integration-tests/dev/dev-run-integration-tests.sh
index 8b14b7ecb1e2..be6cf16c73b6 100755
--- a/resource-managers/kubernetes/integration-tests/dev/dev-run-integration-tests.sh
+++ b/resource-managers/kubernetes/integration-tests/dev/dev-run-integration-tests.sh
@@ -19,6 +19,8 @@
set -exo errexit
TEST_ROOT_DIR=$(git rev-parse --show-toplevel)
+. $TEST_ROOT_DIR/build/util.sh
+
DEPLOY_MODE="minikube"
IMAGE_REPO="docker.io/kubespark"
SPARK_TGZ="N/A"
@@ -28,6 +30,7 @@ BASE_IMAGE_NAME=
JVM_IMAGE_NAME=
PYTHON_IMAGE_NAME=
R_IMAGE_NAME=
+DOCKER_FILE=
SPARK_MASTER=
NAMESPACE=
SERVICE_ACCOUNT=
@@ -70,6 +73,10 @@ while (( "$#" )); do
SPARK_TGZ="$2"
shift
;;
+ --docker-file)
+ DOCKER_FILE="$2"
+ shift
+ ;;
--spark-master)
SPARK_MASTER="$2"
shift
@@ -143,6 +150,11 @@ then
properties=( ${properties[@]} -Dspark.kubernetes.test.javaImageTag=$JAVA_IMAGE_TAG )
fi
+if [ -n "$DOCKER_FILE" ];
+then
+ properties=( ${properties[@]} -Dspark.kubernetes.test.dockerFile=$(realpath $DOCKER_FILE) )
+fi
+
if [ -n "$NAMESPACE" ];
then
properties=( ${properties[@]} -Dspark.kubernetes.test.namespace=$NAMESPACE )
@@ -180,4 +192,14 @@ properties+=(
-Dlog4j.logger.org.apache.spark=DEBUG
)
-$TEST_ROOT_DIR/build/mvn install -f $TEST_ROOT_DIR/pom.xml -pl resource-managers/kubernetes/integration-tests $BUILD_DEPENDENCIES_MVN_FLAG -Pscala-$SCALA_VERSION -P$HADOOP_PROFILE -Pkubernetes -Pkubernetes-integration-tests ${properties[@]}
+(
+ cd $TEST_ROOT_DIR;
+ ./build/mvn install \
+ -pl resource-managers/kubernetes/integration-tests \
+ $BUILD_DEPENDENCIES_MVN_FLAG \
+ -Pscala-$SCALA_VERSION \
+ -P$HADOOP_PROFILE \
+ -Pkubernetes \
+ -Pkubernetes-integration-tests \
+ ${properties[@]}
+)
|