Skip to content

Commit

Permalink
Merge remote-tracking branch 'upstream/dev' into dev
Browse files Browse the repository at this point in the history
  • Loading branch information
rwrozelle committed Sep 20, 2024
2 parents b3a256a + 803de40 commit acd4266
Show file tree
Hide file tree
Showing 2,584 changed files with 98,111 additions and 30,962 deletions.
4 changes: 4 additions & 0 deletions .core_files.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -14,6 +14,7 @@ core: &core
base_platforms: &base_platforms
- homeassistant/components/air_quality/**
- homeassistant/components/alarm_control_panel/**
- homeassistant/components/assist_satellite/**
- homeassistant/components/binary_sensor/**
- homeassistant/components/button/**
- homeassistant/components/calendar/**
Expand Down Expand Up @@ -110,6 +111,7 @@ components: &components
- homeassistant/components/tag/**
- homeassistant/components/template/**
- homeassistant/components/timer/**
- homeassistant/components/trace/**
- homeassistant/components/usb/**
- homeassistant/components/webhook/**
- homeassistant/components/websocket_api/**
Expand All @@ -125,9 +127,11 @@ tests: &tests
- tests/*.py
- tests/auth/**
- tests/backports/**
- tests/components/diagnostics/**
- tests/components/history/**
- tests/components/logbook/**
- tests/components/recorder/**
- tests/components/repairs/**
- tests/components/sensor/**
- tests/hassfest/**
- tests/helpers/**
Expand Down
7 changes: 4 additions & 3 deletions .github/workflows/builder.yml
Original file line number Diff line number Diff line change
Expand Up @@ -69,7 +69,7 @@ jobs:
run: find ./homeassistant/components/*/translations -name "*.json" | tar zcvf translations.tar.gz -T -

- name: Upload translations
uses: actions/upload-artifact@v4.3.6
uses: actions/upload-artifact@v4.4.0
with:
name: translations
path: translations.tar.gz
Expand Down Expand Up @@ -126,7 +126,7 @@ jobs:
env:
UV_PRERELEASE: allow
run: |
python3 -m pip install "$(grep '^uv' < requirements_test.txt)"
python3 -m pip install "$(grep '^uv' < requirements.txt)"
uv pip install packaging tomli
uv pip install .
python3 script/version_bump.py nightly --set-nightly-version "${{ needs.init.outputs.version }}"
Expand Down Expand Up @@ -316,6 +316,7 @@ jobs:
packages: write
id-token: write
strategy:
fail-fast: false
matrix:
registry: ["ghcr.io/home-assistant", "docker.io/homeassistant"]
steps:
Expand Down Expand Up @@ -530,7 +531,7 @@ jobs:

- name: Generate artifact attestation
if: needs.init.outputs.channel != 'dev' && needs.init.outputs.publish == 'true'
uses: actions/attest-build-provenance@6149ea5740be74af77f260b9db67e633f6b0a9a1 # v1.4.2
uses: actions/attest-build-provenance@1c608d11d69870c2092266b3f9a6f3abbf17002c # v1.4.3
with:
subject-name: ${{ env.HASSFEST_IMAGE_NAME }}
subject-digest: ${{ steps.push.outputs.digest }}
Expand Down
55 changes: 35 additions & 20 deletions .github/workflows/ci.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -252,7 +252,7 @@ jobs:
python -m venv venv
. venv/bin/activate
python --version
pip install "$(grep '^uv' < requirements_test.txt)"
pip install "$(grep '^uv' < requirements.txt)"
uv pip install "$(cat requirements_test.txt | grep pre-commit)"
- name: Restore pre-commit environment from cache
id: cache-precommit
Expand Down Expand Up @@ -429,17 +429,32 @@ jobs:
. venv/bin/activate
pre-commit run --show-diff-on-failure --hook-stage manual codespell --all-files
lint-hadolint:
name: Check ${{ matrix.file }}
runs-on: ubuntu-24.04
needs:
- info
if: |
github.event.inputs.pylint-only != 'true'
&& github.event.inputs.mypy-only != 'true'
&& github.event.inputs.audit-licenses-only != 'true'
strategy:
fail-fast: false
matrix:
file:
- Dockerfile
- Dockerfile.dev
- script/hassfest/docker/Dockerfile
steps:
- name: Check out code from GitHub
uses: actions/checkout@v4.1.7
- name: Register hadolint problem matcher
run: |
echo "::add-matcher::.github/workflows/matchers/hadolint.json"
- name: Check Dockerfile
uses: docker://hadolint/hadolint:v1.18.2
with:
args: hadolint Dockerfile
- name: Check Dockerfile.dev
uses: docker://hadolint/hadolint:v1.18.2
- name: Check ${{ matrix.file }}
uses: docker://hadolint/hadolint:v2.12.0
with:
args: hadolint Dockerfile.dev
args: hadolint ${{ matrix.file }}

base:
name: Prepare dependencies
Expand All @@ -461,7 +476,7 @@ jobs:
- name: Generate partial uv restore key
id: generate-uv-key
run: |
uv_version=$(cat requirements_test.txt | grep uv | cut -d '=' -f 3)
uv_version=$(cat requirements.txt | grep uv | cut -d '=' -f 3)
echo "version=${uv_version}" >> $GITHUB_OUTPUT
echo "key=uv-${{ env.UV_CACHE_VERSION }}-${uv_version}-${{
env.HA_SHORT_VERSION }}-$(date -u '+%Y-%m-%dT%H:%M:%s')" >> $GITHUB_OUTPUT
Expand Down Expand Up @@ -510,7 +525,7 @@ jobs:
python -m venv venv
. venv/bin/activate
python --version
pip install "$(grep '^uv' < requirements_test.txt)"
pip install "$(grep '^uv' < requirements.txt)"
uv pip install -U "pip>=21.3.1" setuptools wheel
uv pip install -r requirements.txt
python -m script.gen_requirements_all ci
Expand Down Expand Up @@ -623,7 +638,7 @@ jobs:
. venv/bin/activate
pip-licenses --format=json --output-file=licenses.json
- name: Upload licenses
uses: actions/upload-artifact@v4.3.6
uses: actions/upload-artifact@v4.4.0
with:
name: licenses
path: licenses.json
Expand Down Expand Up @@ -833,7 +848,7 @@ jobs:
. venv/bin/activate
python -m script.split_tests ${{ needs.info.outputs.test_group_count }} tests
- name: Upload pytest_buckets
uses: actions/upload-artifact@v4.3.6
uses: actions/upload-artifact@v4.4.0
with:
name: pytest_buckets
path: pytest_buckets.txt
Expand Down Expand Up @@ -934,14 +949,14 @@ jobs:
2>&1 | tee pytest-${{ matrix.python-version }}-${{ matrix.group }}.txt
- name: Upload pytest output
if: success() || failure() && steps.pytest-full.conclusion == 'failure'
uses: actions/upload-artifact@v4.3.6
uses: actions/upload-artifact@v4.4.0
with:
name: pytest-${{ github.run_number }}-${{ matrix.python-version }}-${{ matrix.group }}
path: pytest-*.txt
overwrite: true
- name: Upload coverage artifact
if: needs.info.outputs.skip_coverage != 'true'
uses: actions/upload-artifact@v4.3.6
uses: actions/upload-artifact@v4.4.0
with:
name: coverage-${{ matrix.python-version }}-${{ matrix.group }}
path: coverage.xml
Expand Down Expand Up @@ -1060,15 +1075,15 @@ jobs:
2>&1 | tee pytest-${{ matrix.python-version }}-${mariadb}.txt
- name: Upload pytest output
if: success() || failure() && steps.pytest-partial.conclusion == 'failure'
uses: actions/upload-artifact@v4.3.6
uses: actions/upload-artifact@v4.4.0
with:
name: pytest-${{ github.run_number }}-${{ matrix.python-version }}-${{
steps.pytest-partial.outputs.mariadb }}
path: pytest-*.txt
overwrite: true
- name: Upload coverage artifact
if: needs.info.outputs.skip_coverage != 'true'
uses: actions/upload-artifact@v4.3.6
uses: actions/upload-artifact@v4.4.0
with:
name: coverage-${{ matrix.python-version }}-${{
steps.pytest-partial.outputs.mariadb }}
Expand Down Expand Up @@ -1187,15 +1202,15 @@ jobs:
2>&1 | tee pytest-${{ matrix.python-version }}-${postgresql}.txt
- name: Upload pytest output
if: success() || failure() && steps.pytest-partial.conclusion == 'failure'
uses: actions/upload-artifact@v4.3.6
uses: actions/upload-artifact@v4.4.0
with:
name: pytest-${{ github.run_number }}-${{ matrix.python-version }}-${{
steps.pytest-partial.outputs.postgresql }}
path: pytest-*.txt
overwrite: true
- name: Upload coverage artifact
if: needs.info.outputs.skip_coverage != 'true'
uses: actions/upload-artifact@v4.3.6
uses: actions/upload-artifact@v4.4.0
with:
name: coverage-${{ matrix.python-version }}-${{
steps.pytest-partial.outputs.postgresql }}
Expand Down Expand Up @@ -1329,14 +1344,14 @@ jobs:
2>&1 | tee pytest-${{ matrix.python-version }}-${{ matrix.group }}.txt
- name: Upload pytest output
if: success() || failure() && steps.pytest-partial.conclusion == 'failure'
uses: actions/upload-artifact@v4.3.6
uses: actions/upload-artifact@v4.4.0
with:
name: pytest-${{ github.run_number }}-${{ matrix.python-version }}-${{ matrix.group }}
path: pytest-*.txt
overwrite: true
- name: Upload coverage artifact
if: needs.info.outputs.skip_coverage != 'true'
uses: actions/upload-artifact@v4.3.6
uses: actions/upload-artifact@v4.4.0
with:
name: coverage-${{ matrix.python-version }}-${{ matrix.group }}
path: coverage.xml
Expand Down
4 changes: 2 additions & 2 deletions .github/workflows/codeql.yml
Original file line number Diff line number Diff line change
Expand Up @@ -24,11 +24,11 @@ jobs:
uses: actions/checkout@v4.1.7

- name: Initialize CodeQL
uses: github/codeql-action/init@v3.26.6
uses: github/codeql-action/init@v3.26.8
with:
languages: python

- name: Perform CodeQL Analysis
uses: github/codeql-action/analyze@v3.26.6
uses: github/codeql-action/analyze@v3.26.8
with:
category: "/language:python"
46 changes: 28 additions & 18 deletions .github/workflows/wheels.yml
Original file line number Diff line number Diff line change
Expand Up @@ -46,7 +46,7 @@ jobs:
python -m venv venv
. venv/bin/activate
python --version
pip install "$(grep '^uv' < requirements_test.txt)"
pip install "$(grep '^uv' < requirements.txt)"
uv pip install -r requirements.txt
- name: Get information
Expand Down Expand Up @@ -82,14 +82,15 @@ jobs:
) > .env_file
- name: Upload env_file
uses: actions/upload-artifact@v4.3.6
uses: actions/upload-artifact@v4.4.0
with:
name: env_file
path: ./.env_file
include-hidden-files: true
overwrite: true

- name: Upload requirements_diff
uses: actions/upload-artifact@v4.3.6
uses: actions/upload-artifact@v4.4.0
with:
name: requirements_diff
path: ./requirements_diff.txt
Expand All @@ -101,7 +102,7 @@ jobs:
python -m script.gen_requirements_all ci
- name: Upload requirements_all_wheels
uses: actions/upload-artifact@v4.3.6
uses: actions/upload-artifact@v4.4.0
with:
name: requirements_all_wheels
path: ./requirements_all_wheels_*.txt
Expand Down Expand Up @@ -130,6 +131,12 @@ jobs:
with:
name: requirements_diff

- name: Adjust build env
run: |
# Don't build wheels for uv as uv requires a greater version of rust as currently available on alpine
sed -i "/uv/d" requirements.txt
sed -i "/uv/d" requirements_diff.txt
- name: Build wheels
uses: home-assistant/wheels@2024.07.1
with:
Expand All @@ -139,7 +146,7 @@ jobs:
wheels-key: ${{ secrets.WHEELS_KEY }}
env-file: true
apk: "libffi-dev;openssl-dev;yaml-dev;nasm"
skip-binary: aiohttp
skip-binary: aiohttp;multidict;yarl
constraints: "homeassistant/package_constraints.txt"
requirements-diff: "requirements_diff.txt"
requirements: "requirements.txt"
Expand Down Expand Up @@ -173,6 +180,18 @@ jobs:
with:
name: requirements_all_wheels

- name: Adjust build env
run: |
if [ "${{ matrix.arch }}" = "i386" ]; then
echo "NPY_DISABLE_SVML=1" >> .env_file
fi
# Do not pin numpy in wheels building
sed -i "/numpy/d" homeassistant/package_constraints.txt
# Don't build wheels for uv as uv requires a greater version of rust as currently available on alpine
sed -i "/uv/d" requirements.txt
sed -i "/uv/d" requirements_diff.txt
- name: Split requirements all
run: |
# We split requirements all into multiple files.
Expand All @@ -193,15 +212,6 @@ jobs:
cat homeassistant/package_constraints.txt | grep 'grpcio==' >> requirements_old-cython.txt
cat homeassistant/package_constraints.txt | grep 'pydantic==' >> requirements_old-cython.txt
- name: Adjust build env
run: |
if [ "${{ matrix.arch }}" = "i386" ]; then
echo "NPY_DISABLE_SVML=1" >> .env_file
fi
# Do not pin numpy in wheels building
sed -i "/numpy/d" homeassistant/package_constraints.txt
- name: Build wheels (old cython)
uses: home-assistant/wheels@2024.07.1
with:
Expand All @@ -211,7 +221,7 @@ jobs:
wheels-key: ${{ secrets.WHEELS_KEY }}
env-file: true
apk: "bluez-dev;libffi-dev;openssl-dev;glib-dev;eudev-dev;libxml2-dev;libxslt-dev;libpng-dev;libjpeg-turbo-dev;tiff-dev;cups-dev;gmp-dev;mpfr-dev;mpc1-dev;ffmpeg-dev;gammu-dev;yaml-dev;openblas-dev;fftw-dev;lapack-dev;gfortran;blas-dev;eigen-dev;freetype-dev;glew-dev;harfbuzz-dev;hdf5-dev;libdc1394-dev;libtbb-dev;mesa-dev;openexr-dev;openjpeg-dev;uchardet-dev"
skip-binary: aiohttp;charset-normalizer;grpcio;SQLAlchemy;protobuf;pydantic;pymicro-vad
skip-binary: aiohttp;charset-normalizer;grpcio;multidict;SQLAlchemy;protobuf;pydantic;pymicro-vad;yarl
constraints: "homeassistant/package_constraints.txt"
requirements-diff: "requirements_diff.txt"
requirements: "requirements_old-cython.txt"
Expand All @@ -226,7 +236,7 @@ jobs:
wheels-key: ${{ secrets.WHEELS_KEY }}
env-file: true
apk: "bluez-dev;libffi-dev;openssl-dev;glib-dev;eudev-dev;libxml2-dev;libxslt-dev;libpng-dev;libjpeg-turbo-dev;tiff-dev;cups-dev;gmp-dev;mpfr-dev;mpc1-dev;ffmpeg-dev;gammu-dev;yaml-dev;openblas-dev;fftw-dev;lapack-dev;gfortran;blas-dev;eigen-dev;freetype-dev;glew-dev;harfbuzz-dev;hdf5-dev;libdc1394-dev;libtbb-dev;mesa-dev;openexr-dev;openjpeg-dev;uchardet-dev;nasm"
skip-binary: aiohttp;charset-normalizer;grpcio;SQLAlchemy;protobuf;pydantic;pymicro-vad
skip-binary: aiohttp;charset-normalizer;grpcio;multidict;SQLAlchemy;protobuf;pydantic;pymicro-vad;yarl
constraints: "homeassistant/package_constraints.txt"
requirements-diff: "requirements_diff.txt"
requirements: "requirements_all.txtaa"
Expand All @@ -240,7 +250,7 @@ jobs:
wheels-key: ${{ secrets.WHEELS_KEY }}
env-file: true
apk: "bluez-dev;libffi-dev;openssl-dev;glib-dev;eudev-dev;libxml2-dev;libxslt-dev;libpng-dev;libjpeg-turbo-dev;tiff-dev;cups-dev;gmp-dev;mpfr-dev;mpc1-dev;ffmpeg-dev;gammu-dev;yaml-dev;openblas-dev;fftw-dev;lapack-dev;gfortran;blas-dev;eigen-dev;freetype-dev;glew-dev;harfbuzz-dev;hdf5-dev;libdc1394-dev;libtbb-dev;mesa-dev;openexr-dev;openjpeg-dev;uchardet-dev;nasm"
skip-binary: aiohttp;charset-normalizer;grpcio;SQLAlchemy;protobuf;pydantic;pymicro-vad
skip-binary: aiohttp;charset-normalizer;grpcio;multidict;SQLAlchemy;protobuf;pydantic;pymicro-vad;yarl
constraints: "homeassistant/package_constraints.txt"
requirements-diff: "requirements_diff.txt"
requirements: "requirements_all.txtab"
Expand All @@ -254,7 +264,7 @@ jobs:
wheels-key: ${{ secrets.WHEELS_KEY }}
env-file: true
apk: "bluez-dev;libffi-dev;openssl-dev;glib-dev;eudev-dev;libxml2-dev;libxslt-dev;libpng-dev;libjpeg-turbo-dev;tiff-dev;cups-dev;gmp-dev;mpfr-dev;mpc1-dev;ffmpeg-dev;gammu-dev;yaml-dev;openblas-dev;fftw-dev;lapack-dev;gfortran;blas-dev;eigen-dev;freetype-dev;glew-dev;harfbuzz-dev;hdf5-dev;libdc1394-dev;libtbb-dev;mesa-dev;openexr-dev;openjpeg-dev;uchardet-dev;nasm"
skip-binary: aiohttp;charset-normalizer;grpcio;SQLAlchemy;protobuf;pydantic;pymicro-vad
skip-binary: aiohttp;charset-normalizer;grpcio;multidict;SQLAlchemy;protobuf;pydantic;pymicro-vad;yarl
constraints: "homeassistant/package_constraints.txt"
requirements-diff: "requirements_diff.txt"
requirements: "requirements_all.txtac"
4 changes: 2 additions & 2 deletions .pre-commit-config.yaml
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
repos:
- repo: https://github.com/astral-sh/ruff-pre-commit
rev: v0.6.2
rev: v0.6.5
hooks:
- id: ruff
args:
Expand Down Expand Up @@ -83,7 +83,7 @@ repos:
pass_filenames: false
language: script
types: [text]
files: ^(homeassistant/.+/(icons|manifest|strings)\.json|homeassistant/brands/.*\.json|homeassistant/.+/services\.yaml|script/hassfest/(?!metadata|mypy_config).+\.py|requirements_test.txt)$
files: ^(homeassistant/.+/(icons|manifest|strings)\.json|homeassistant/brands/.*\.json|homeassistant/.+/services\.yaml|script/hassfest/(?!metadata|mypy_config).+\.py|requirements\.txt)$
- id: hassfest-metadata
name: hassfest-metadata
entry: script/run-in-env.sh python3 -m script.hassfest -p metadata
Expand Down
Loading

0 comments on commit acd4266

Please sign in to comment.