Skip to content

Update sbt to 1.10.2 (#708) #462

Update sbt to 1.10.2 (#708)

Update sbt to 1.10.2 (#708) #462

Workflow file for this run

name: Tests
on:
pull_request:
branches:
- main
push:
branches:
- main
tags:
- "**"
schedule:
- cron: "0 0 * * *"
workflow_dispatch:
concurrency:
group: ${{ github.workflow }}-${{ github.event.pull_request.number || github.ref }}
cancel-in-progress: true
jobs:
check-docs:
runs-on: ubuntu-latest
defaults:
run:
shell: bash -el {0}
steps:
- name: Checkout
uses: actions/checkout@v4
- name: Install Conda
uses: conda-incubator/setup-miniconda@v3
with:
conda-solver: libmamba
activate-environment: glow
- name: Cache Conda env
uses: actions/cache@v4
with:
path: /usr/share/miniconda/envs/glow
key: conda-${{ hashFiles('python/environment.yml') }}-${{ env.CACHE_NUMBER }}
env:
# Increase this value to reset cache if etc/example-environment.yml has not changed
CACHE_NUMBER: 0
id: cache
- name: Update environment
run: conda env update -n glow -f python/environment.yml
if: steps.cache.outputs.cache-hit != 'true'
- name: Install Certs
run: sudo apt-get install -y ca-certificates
- name: Check docs links
run: (cd docs && make linkcheck)
continue-on-error: true
spark-tests:
runs-on: ubuntu-latest
defaults:
run:
shell: bash -el {0}
strategy:
matrix:
spark_version: [3.4.1, 3.5.1]
scala_version: [2.12.19]
env:
SPARK_VERSION: ${{ matrix.spark_version }}
SCALA_VERSION: ${{ matrix.scala_version }}
steps:
- name: Checkout
uses: actions/checkout@v4
- name: Set up Java
uses: actions/setup-java@v4
with:
distribution: "adopt"
java-version: "8"
cache: "sbt"
cache-dependency-path: |
build.sbt
plugins.sbt
- name: Install Conda
uses: conda-incubator/setup-miniconda@v3
with:
conda-solver: libmamba
activate-environment: glow
- name: Cache Conda env
uses: actions/cache@v4
with:
path: /usr/share/miniconda/envs/glow
key: conda-${{ hashFiles('python/environment.yml') }}-${{ env.CACHE_NUMBER }}
env:
# Increase this value to reset cache if etc/example-environment.yml has not changed
CACHE_NUMBER: 0
id: cache
- name: Update environment
run: conda env update -n glow -f python/environment.yml
if: steps.cache.outputs.cache-hit != 'true'
- name: Install correct PySpark version
run: pip install pyspark==${{ matrix.spark_version }}
- name: Scala tests
run: sbt compile coverage core/test core/coverageReport exit
- name: Python tests
run: sbt python/test exit
- name: Docs tests
run: sbt docs/test exit
- name: Build artifacts
run: bin/build --scala --python
- name: Upload artifacts
uses: actions/upload-artifact@v4
if: success() || failure()
with:
name: binaries-${{ github.job }}-${{ matrix.spark_version}}-${{ matrix.scala_version }}
path: |
core/target/**/glow*assembly*.jar
python/dist/*.whl
- name: Upload results
uses: actions/upload-artifact@v4
if: success() || failure()
with:
name: test-results-${{ github.job }}-${{ matrix.spark_version }}-${{ matrix.scala_version}}
path: |
core/target/**/test-reports
unit-tests.log
- name: Upload coverage to Codecov
uses: codecov/codecov-action@v4
with:
files: ./coverage.xml, *scoverage.xml
fail_ci_if_error: true
token: ${{ secrets.CODECOV_TOKEN }}
flags: unittests
verbose: true
# Dummy job so that required statuses don't need to change with Spark / scala matrix
spark-tests-success:
runs-on: ubuntu-latest
needs: spark-tests
steps:
- run: echo "Spark tests passed!"
spark-4-tests:
runs-on: ubuntu-latest
if: contains(github.event.pull_request.title, '[SPARK4]')
defaults:
run:
shell: bash -el {0}
env:
SPARK_VERSION: 4.0.0-SNAPSHOT
SCALA_VERSION: 2.13.14
steps:
- name: Checkout
uses: actions/checkout@v4
- name: Set up Java
uses: actions/setup-java@v4
with:
distribution: "adopt"
java-version: "17"
cache: "sbt"
cache-dependency-path: |
build.sbt
plugins.sbt
- name: Install Conda
uses: conda-incubator/setup-miniconda@v3
with:
conda-solver: libmamba
activate-environment: glow-spark4
- name: Cache Conda env
uses: actions/cache@v3
with:
path: /usr/share/miniconda/envs/glow-spark4
key: conda-${{ hashFiles('python/spark-4-environment.yml') }}-${{ env.CACHE_NUMBER }}
env:
# Increase this value to reset cache if etc/example-environment.yml has not changed
CACHE_NUMBER: 0
id: cache
- name: Update environment
run: conda env update -n glow-spark4 -f python/spark-4-environment.yml
if: steps.cache.outputs.cache-hit != 'true'
- name: Clone Spark (for PySpark source)
run: (cd $HOME && git clone https://github.com/apache/spark.git)
- name: Scala tests
run: sbt core/test exit
- name: Uninstall PySpark
run: pip uninstall -y pyspark
- name: Python tests
run: EXTRA_PYTHON_PATH=$HOME/spark/python sbt python/test exit
- name: Docs tests
run: EXTRA_PYTHON_PATH=$HOME/spark/python sbt docs/test exit
- name: Build artifacts
run: bin/build --scala --python
- name: Test assembly jar
run: java -cp core/target/**/glow*assembly*.jar io.projectglow.TestAssemblyJar
- name: Upload artifacts
uses: actions/upload-artifact@v4
if: success() || failure()
with:
name: binaries-${{ github.job }}
path: |
core/target/**/glow*assembly*.jar
python/dist/*.whl
- name: Upload results
uses: actions/upload-artifact@v4
if: success() || failure()
with:
name: test-results-${{ github.job }}
path: |
core/target/**/test-reports
unit-tests.log
- name: Upload coverage to Codecov
uses: codecov/codecov-action@v4
with:
files: ./coverage.xml, *scoverage.xml
fail_ci_if_error: true
token: ${{ secrets.CODECOV_TOKEN }}
flags: unittests
verbose: true