1414 required : true
1515
1616jobs :
17- # This is on the top to give the most visibility in case of failures
18- hadoop-2 :
19- name : Hadoop 2 build
20- runs-on : ubuntu-20.04
21- steps :
22- - name : Checkout Spark repository
23- uses : actions/checkout@v2
24- - name : Cache Coursier local repository
25- uses : actions/cache@v2
26- with :
27- path : ~/.cache/coursier
28- key : hadoop-2-coursier-${{ hashFiles('**/pom.xml', '**/plugins.sbt') }}
29- restore-keys : |
30- hadoop-2-coursier-
31- - name : Install Java 8
32- uses : actions/setup-java@v1
33- with :
34- java-version : 1.8
35- - name : Build with SBT
36- run : |
37- ./build/sbt -Pyarn -Pmesos -Pkubernetes -Phive -Phive-thriftserver -Phadoop-cloud -Pkinesis-asl -Phadoop-2.7 compile test:compile
38-
3917 # Build: build Spark and run the tests for specified modules.
4018 build :
4119 name : " Build modules: ${{ matrix.modules }} ${{ matrix.comment }} (JDK ${{ matrix.java }}, ${{ matrix.hadoop }}, ${{ matrix.hive }})"
4523 fail-fast : false
4624 matrix :
4725 java :
48- - 1. 8
26+ - 8
4927 hadoop :
5028 - hadoop3.2
5129 hive :
@@ -71,26 +49,26 @@ jobs:
7149 include :
7250 # Hive tests
7351 - modules : hive
74- java : 1. 8
52+ java : 8
7553 hadoop : hadoop3.2
7654 hive : hive2.3
7755 included-tags : org.apache.spark.tags.SlowHiveTest
7856 comment : " - slow tests"
7957 - modules : hive
80- java : 1. 8
58+ java : 8
8159 hadoop : hadoop3.2
8260 hive : hive2.3
8361 excluded-tags : org.apache.spark.tags.SlowHiveTest
8462 comment : " - other tests"
8563 # SQL tests
8664 - modules : sql
87- java : 1. 8
65+ java : 8
8866 hadoop : hadoop3.2
8967 hive : hive2.3
9068 included-tags : org.apache.spark.tags.ExtendedSQLTest
9169 comment : " - slow tests"
9270 - modules : sql
93- java : 1. 8
71+ java : 8
9472 hadoop : hadoop3.2
9573 hive : hive2.3
9674 excluded-tags : org.apache.spark.tags.ExtendedSQLTest
@@ -123,24 +101,18 @@ jobs:
123101 build/zinc-*
124102 build/scala-*
125103 build/*.jar
104+ ~/.sbt
126105 key : build-${{ hashFiles('**/pom.xml', 'project/build.properties', 'build/mvn', 'build/sbt', 'build/sbt-launch-lib.bash', 'build/spark-build-info') }}
127106 restore-keys : |
128107 build-
129- - name : Cache Maven local repository
130- uses : actions/cache@v2
131- with :
132- path : ~/.m2/repository
133- key : ${{ matrix.java }}-${{ matrix.hadoop }}-maven-${{ hashFiles('**/pom.xml') }}
134- restore-keys : |
135- ${{ matrix.java }}-${{ matrix.hadoop }}-maven-
136108 - name : Cache Coursier local repository
137109 uses : actions/cache@v2
138110 with :
139111 path : ~/.cache/coursier
140112 key : ${{ matrix.java }}-${{ matrix.hadoop }}-coursier-${{ hashFiles('**/pom.xml', '**/plugins.sbt') }}
141113 restore-keys : |
142114 ${{ matrix.java }}-${{ matrix.hadoop }}-coursier-
143- - name : Install JDK ${{ matrix.java }}
115+ - name : Install Java ${{ matrix.java }}
144116 uses : actions/setup-java@v1
145117 with :
146118 java-version : ${{ matrix.java }}
@@ -163,9 +135,7 @@ jobs:
163135 run : |
164136 # Hive and SQL tests become flaky when running in parallel as it's too intensive.
165137 if [[ "$MODULES_TO_TEST" == "hive" ]] || [[ "$MODULES_TO_TEST" == "sql" ]]; then export SERIAL_SBT_TESTS=1; fi
166- mkdir -p ~/.m2
167138 ./dev/run-tests --parallelism 2 --modules "$MODULES_TO_TEST" --included-tags "$INCLUDED_TAGS" --excluded-tags "$EXCLUDED_TAGS"
168- rm -rf ~/.m2/repository/org/apache/spark
169139 - name : Upload test results to report
170140 if : always()
171141 uses : actions/upload-artifact@v2
@@ -218,16 +188,10 @@ jobs:
218188 build/zinc-*
219189 build/scala-*
220190 build/*.jar
191+ ~/.sbt
221192 key : build-${{ hashFiles('**/pom.xml', 'project/build.properties', 'build/mvn', 'build/sbt', 'build/sbt-launch-lib.bash', 'build/spark-build-info') }}
222193 restore-keys : |
223194 build-
224- - name : Cache Maven local repository
225- uses : actions/cache@v2
226- with :
227- path : ~/.m2/repository
228- key : pyspark-maven-${{ hashFiles('**/pom.xml') }}
229- restore-keys : |
230- pyspark-maven-
231195 - name : Cache Coursier local repository
232196 uses : actions/cache@v2
233197 with :
@@ -250,24 +214,22 @@ jobs:
250214 # Run the tests.
251215 - name : Run tests
252216 run : |
253- mkdir -p ~/.m2
254217 ./dev/run-tests --parallelism 2 --modules "$MODULES_TO_TEST"
255- rm -rf ~/.m2/repository/org/apache/spark
256218 - name : Upload test results to report
257219 if : always()
258220 uses : actions/upload-artifact@v2
259221 with :
260- name : test-results-${{ matrix.modules }}--1. 8-hadoop3.2-hive2.3
222+ name : test-results-${{ matrix.modules }}--8-hadoop3.2-hive2.3
261223 path : " **/target/test-reports/*.xml"
262224 - name : Upload unit tests log files
263225 if : failure()
264226 uses : actions/upload-artifact@v2
265227 with :
266- name : unit-tests-log-${{ matrix.modules }}--1. 8-hadoop3.2-hive2.3
228+ name : unit-tests-log-${{ matrix.modules }}--8-hadoop3.2-hive2.3
267229 path : " **/target/unit-tests.log"
268230
269231 sparkr :
270- name : Build modules - sparkr
232+ name : " Build modules: sparkr"
271233 runs-on : ubuntu-20.04
272234 container :
273235 image : dongjoon/apache-spark-github-action-image:20201025
@@ -294,16 +256,10 @@ jobs:
294256 build/zinc-*
295257 build/scala-*
296258 build/*.jar
259+ ~/.sbt
297260 key : build-${{ hashFiles('**/pom.xml', 'project/build.properties', 'build/mvn', 'build/sbt', 'build/sbt-launch-lib.bash', 'build/spark-build-info') }}
298261 restore-keys : |
299262 build-
300- - name : Cache Maven local repository
301- uses : actions/cache@v2
302- with :
303- path : ~/.m2/repository
304- key : sparkr-maven-${{ hashFiles('**/pom.xml') }}
305- restore-keys : |
306- sparkr-maven-
307263 - name : Cache Coursier local repository
308264 uses : actions/cache@v2
309265 with :
@@ -313,18 +269,16 @@ jobs:
313269 sparkr-coursier-
314270 - name : Run tests
315271 run : |
316- mkdir -p ~/.m2
317272 # The followings are also used by `r-lib/actions/setup-r` to avoid
318273 # R issues at docker environment
319274 export TZ=UTC
320275 export _R_CHECK_SYSTEM_CLOCK_=FALSE
321276 ./dev/run-tests --parallelism 2 --modules sparkr
322- rm -rf ~/.m2/repository/org/apache/spark
323277 - name : Upload test results to report
324278 if : always()
325279 uses : actions/upload-artifact@v2
326280 with :
327- name : test-results-sparkr--1. 8-hadoop3.2-hive2.3
281+ name : test-results-sparkr--8-hadoop3.2-hive2.3
328282 path : " **/target/test-reports/*.xml"
329283
330284 # Static analysis, and documentation build
@@ -334,17 +288,37 @@ jobs:
334288 steps :
335289 - name : Checkout Spark repository
336290 uses : actions/checkout@v2
291+ # Cache local repositories. Note that GitHub Actions cache has a 2G limit.
292+ - name : Cache Scala, SBT, Maven and Zinc
293+ uses : actions/cache@v2
294+ with :
295+ path : |
296+ build/apache-maven-*
297+ build/zinc-*
298+ build/scala-*
299+ build/*.jar
300+ ~/.sbt
301+ key : build-${{ hashFiles('**/pom.xml', 'project/build.properties', 'build/mvn', 'build/sbt', 'build/sbt-launch-lib.bash', 'build/spark-build-info') }}
302+ restore-keys : |
303+ build-
304+ - name : Cache Coursier local repository
305+ uses : actions/cache@v2
306+ with :
307+ path : ~/.cache/coursier
308+ key : docs-coursier-${{ hashFiles('**/pom.xml', '**/plugins.sbt') }}
309+ restore-keys : |
310+ docs-coursier-
337311 - name : Cache Maven local repository
338312 uses : actions/cache@v2
339313 with :
340314 path : ~/.m2/repository
341- key : docs-maven-repo- ${{ hashFiles('**/pom.xml') }}
315+ key : docs-maven-${{ hashFiles('**/pom.xml') }}
342316 restore-keys : |
343317 docs-maven-
344- - name : Install JDK 1. 8
318+ - name : Install Java 8
345319 uses : actions/setup-java@v1
346320 with :
347- java-version : 1. 8
321+ java-version : 8
348322 - name : Install Python 3.6
349323 uses : actions/setup-python@v2
350324 with :
@@ -395,8 +369,8 @@ jobs:
395369 cd docs
396370 jekyll build
397371
398- java11 :
399- name : Java 11 build
372+ java-11 :
373+ name : Java 11 build with Maven
400374 runs-on : ubuntu-20.04
401375 steps :
402376 - name : Checkout Spark repository
@@ -416,12 +390,12 @@ jobs:
416390 run : |
417391 export MAVEN_OPTS="-Xmx2g -XX:ReservedCodeCacheSize=1g -Dorg.slf4j.simpleLogger.defaultLogLevel=WARN"
418392 export MAVEN_CLI_OPTS="--no-transfer-progress"
419- mkdir -p ~/.m2
393+ # It uses Maven's 'install' intentionally, see https://github.com/apache/spark/pull/26414.
420394 ./build/mvn $MAVEN_CLI_OPTS -DskipTests -Pyarn -Pmesos -Pkubernetes -Phive -Phive-thriftserver -Phadoop-cloud -Djava.version=11 install
421395 rm -rf ~/.m2/repository/org/apache/spark
422396
423397 scala-213 :
424- name : Scala 2.13 build
398+ name : Scala 2.13 build with SBT
425399 runs-on : ubuntu-20.04
426400 steps :
427401 - name : Checkout Spark repository
@@ -433,11 +407,32 @@ jobs:
433407 key : scala-213-coursier-${{ hashFiles('**/pom.xml', '**/plugins.sbt') }}
434408 restore-keys : |
435409 scala-213-coursier-
436- - name : Install Java 11
410+ - name : Install Java 8
437411 uses : actions/setup-java@v1
438412 with :
439- java-version : 11
413+ java-version : 8
440414 - name : Build with SBT
441415 run : |
442416 ./dev/change-scala-version.sh 2.13
443417 ./build/sbt -Pyarn -Pmesos -Pkubernetes -Phive -Phive-thriftserver -Phadoop-cloud -Pkinesis-asl -Djava.version=11 -Pscala-2.13 compile test:compile
418+
419+ hadoop-2 :
420+ name : Hadoop 2 build with SBT
421+ runs-on : ubuntu-20.04
422+ steps :
423+ - name : Checkout Spark repository
424+ uses : actions/checkout@v2
425+ - name : Cache Coursier local repository
426+ uses : actions/cache@v2
427+ with :
428+ path : ~/.cache/coursier
429+ key : hadoop-2-coursier-${{ hashFiles('**/pom.xml', '**/plugins.sbt') }}
430+ restore-keys : |
431+ hadoop-2-coursier-
432+ - name : Install Java 8
433+ uses : actions/setup-java@v1
434+ with :
435+ java-version : 8
436+ - name : Build with SBT
437+ run : |
438+ ./build/sbt -Pyarn -Pmesos -Pkubernetes -Phive -Phive-thriftserver -Phadoop-cloud -Pkinesis-asl -Phadoop-2.7 compile test:compile
0 commit comments