Skip to content

Handle DataNode failures in replicated writes #431

Handle DataNode failures in replicated writes

Handle DataNode failures in replicated writes #431

Workflow file for this run

name: python-test
on:
push:
branches:
- 'master'
pull_request:
branches:
- "*"
defaults:
run:
working-directory: ./python
jobs:
test:
strategy:
fail-fast: false
matrix:
python-version:
- 3.9
- 3.13
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v4
- uses: actions/setup-java@v4
with:
distribution: "temurin"
java-version: "17"
- name: Download Hadoop
run: |
wget -q https://dlcdn.apache.org/hadoop/common/hadoop-3.3.6/hadoop-3.3.6.tar.gz
tar -xf hadoop-3.3.6.tar.gz -C $GITHUB_WORKSPACE
echo "$GITHUB_WORKSPACE/hadoop-3.3.6/bin" >> $GITHUB_PATH
- name: Setup Python
uses: actions/setup-python@v5
with:
python-version: ${{ matrix.python-version }}
- name: Create virtualenv
run: |
python3 -m venv .venv
- uses: PyO3/maturin-action@v1
with:
command: develop
sccache: 'true'
container: 'off'
working-directory: ./python
args: --extras devel
- name: Run lints
run: |
source .venv/bin/activate
mypy
ruff check .
ruff format . --check --diff
- name: Run tests
run: |
source .venv/bin/activate
pytest
build:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v4
- name: Build wheel
uses: PyO3/maturin-action@v1
with:
args: --release --out dist --find-interpreter --manifest-path python/Cargo.toml
sccache: 'true'
manylinux: auto
- name: Upload wheels
if: github.ref == 'refs/heads/master'
uses: actions/upload-artifact@v4
with:
name: wheels
path: dist