diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 5e53566e6..1ce879ec2 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -3,7 +3,7 @@ repos: rev: v4.6.0 hooks: - id: check-yaml - exclude: (tests/recipes|conda_smithy.recipe) + exclude: (tests/__snapshots__|tests/recipes|conda_smithy.recipe) - id: end-of-file-fixer - id: trailing-whitespace - repo: https://github.com/psf/black diff --git a/environment.yml b/environment.yml index a69b93108..dfff42b8f 100644 --- a/environment.yml +++ b/environment.yml @@ -9,10 +9,12 @@ dependencies: - python-build - setuptools>=45 - setuptools_scm>=8.1 + - syrupy - tomli>=1.0.0 - pre-commit - mock - pytest + - pytest-mock - pytest-cov # Runtime dependencies - conda >=4.2 diff --git a/news/improve_testsuite.rst b/news/improve_testsuite.rst new file mode 100644 index 000000000..b3818f5e6 --- /dev/null +++ b/news/improve_testsuite.rst @@ -0,0 +1,25 @@ +**Added:** + +* pytest-mock +* syrupy + +**Changed:** + +* Replace unittest with pytest +* Add snapshot tests + +**Deprecated:** + +* + +**Removed:** + +* unittest + +**Fixed:** + +* + +**Security:** + +* diff --git a/tests/__snapshots__/test_anaconda_token_rotation.ambr b/tests/__snapshots__/test_anaconda_token_rotation.ambr new file mode 100644 index 000000000..cbbd95994 --- /dev/null +++ b/tests/__snapshots__/test_anaconda_token_rotation.ambr @@ -0,0 +1,211 @@ +# serializer version: 1 +# name: test_rotate_anaconda_token_notoken[False-False-False-False-False-False] + 'You must have the anaconda token defined to do token rotation!' +# --- +# name: test_rotate_anaconda_token_notoken[False-False-False-False-False-True] + 'You must have the anaconda token defined to do token rotation!' +# --- +# name: test_rotate_anaconda_token_notoken[False-False-False-False-True-False] + 'You must have the anaconda token defined to do token rotation!' +# --- +# name: test_rotate_anaconda_token_notoken[False-False-False-False-True-True] + 'You must have the anaconda token defined to do token rotation!' +# --- +# name: test_rotate_anaconda_token_notoken[False-False-False-True-False-False] + 'You must have the anaconda token defined to do token rotation!' +# --- +# name: test_rotate_anaconda_token_notoken[False-False-False-True-False-True] + 'You must have the anaconda token defined to do token rotation!' +# --- +# name: test_rotate_anaconda_token_notoken[False-False-False-True-True-False] + 'You must have the anaconda token defined to do token rotation!' +# --- +# name: test_rotate_anaconda_token_notoken[False-False-False-True-True-True] + 'You must have the anaconda token defined to do token rotation!' +# --- +# name: test_rotate_anaconda_token_notoken[False-False-True-False-False-False] + 'You must have the anaconda token defined to do token rotation!' +# --- +# name: test_rotate_anaconda_token_notoken[False-False-True-False-False-True] + 'You must have the anaconda token defined to do token rotation!' +# --- +# name: test_rotate_anaconda_token_notoken[False-False-True-False-True-False] + 'You must have the anaconda token defined to do token rotation!' +# --- +# name: test_rotate_anaconda_token_notoken[False-False-True-False-True-True] + 'You must have the anaconda token defined to do token rotation!' +# --- +# name: test_rotate_anaconda_token_notoken[False-False-True-True-False-False] + 'You must have the anaconda token defined to do token rotation!' +# --- +# name: test_rotate_anaconda_token_notoken[False-False-True-True-False-True] + 'You must have the anaconda token defined to do token rotation!' +# --- +# name: test_rotate_anaconda_token_notoken[False-False-True-True-True-False] + 'You must have the anaconda token defined to do token rotation!' +# --- +# name: test_rotate_anaconda_token_notoken[False-False-True-True-True-True] + 'You must have the anaconda token defined to do token rotation!' +# --- +# name: test_rotate_anaconda_token_notoken[False-True-False-False-False-False] + 'You must have the anaconda token defined to do token rotation!' +# --- +# name: test_rotate_anaconda_token_notoken[False-True-False-False-False-True] + 'You must have the anaconda token defined to do token rotation!' +# --- +# name: test_rotate_anaconda_token_notoken[False-True-False-False-True-False] + 'You must have the anaconda token defined to do token rotation!' +# --- +# name: test_rotate_anaconda_token_notoken[False-True-False-False-True-True] + 'You must have the anaconda token defined to do token rotation!' +# --- +# name: test_rotate_anaconda_token_notoken[False-True-False-True-False-False] + 'You must have the anaconda token defined to do token rotation!' +# --- +# name: test_rotate_anaconda_token_notoken[False-True-False-True-False-True] + 'You must have the anaconda token defined to do token rotation!' +# --- +# name: test_rotate_anaconda_token_notoken[False-True-False-True-True-False] + 'You must have the anaconda token defined to do token rotation!' +# --- +# name: test_rotate_anaconda_token_notoken[False-True-False-True-True-True] + 'You must have the anaconda token defined to do token rotation!' +# --- +# name: test_rotate_anaconda_token_notoken[False-True-True-False-False-False] + 'You must have the anaconda token defined to do token rotation!' +# --- +# name: test_rotate_anaconda_token_notoken[False-True-True-False-False-True] + 'You must have the anaconda token defined to do token rotation!' +# --- +# name: test_rotate_anaconda_token_notoken[False-True-True-False-True-False] + 'You must have the anaconda token defined to do token rotation!' +# --- +# name: test_rotate_anaconda_token_notoken[False-True-True-False-True-True] + 'You must have the anaconda token defined to do token rotation!' +# --- +# name: test_rotate_anaconda_token_notoken[False-True-True-True-False-False] + 'You must have the anaconda token defined to do token rotation!' +# --- +# name: test_rotate_anaconda_token_notoken[False-True-True-True-False-True] + 'You must have the anaconda token defined to do token rotation!' +# --- +# name: test_rotate_anaconda_token_notoken[False-True-True-True-True-False] + 'You must have the anaconda token defined to do token rotation!' +# --- +# name: test_rotate_anaconda_token_notoken[False-True-True-True-True-True] + 'You must have the anaconda token defined to do token rotation!' +# --- +# name: test_rotate_anaconda_token_notoken[True-False-False-False-False-False] + 'You must have the anaconda token defined to do token rotation!' +# --- +# name: test_rotate_anaconda_token_notoken[True-False-False-False-False-True] + 'You must have the anaconda token defined to do token rotation!' +# --- +# name: test_rotate_anaconda_token_notoken[True-False-False-False-True-False] + 'You must have the anaconda token defined to do token rotation!' +# --- +# name: test_rotate_anaconda_token_notoken[True-False-False-False-True-True] + 'You must have the anaconda token defined to do token rotation!' +# --- +# name: test_rotate_anaconda_token_notoken[True-False-False-True-False-False] + 'You must have the anaconda token defined to do token rotation!' +# --- +# name: test_rotate_anaconda_token_notoken[True-False-False-True-False-True] + 'You must have the anaconda token defined to do token rotation!' +# --- +# name: test_rotate_anaconda_token_notoken[True-False-False-True-True-False] + 'You must have the anaconda token defined to do token rotation!' +# --- +# name: test_rotate_anaconda_token_notoken[True-False-False-True-True-True] + 'You must have the anaconda token defined to do token rotation!' +# --- +# name: test_rotate_anaconda_token_notoken[True-False-True-False-False-False] + 'You must have the anaconda token defined to do token rotation!' +# --- +# name: test_rotate_anaconda_token_notoken[True-False-True-False-False-True] + 'You must have the anaconda token defined to do token rotation!' +# --- +# name: test_rotate_anaconda_token_notoken[True-False-True-False-True-False] + 'You must have the anaconda token defined to do token rotation!' +# --- +# name: test_rotate_anaconda_token_notoken[True-False-True-False-True-True] + 'You must have the anaconda token defined to do token rotation!' +# --- +# name: test_rotate_anaconda_token_notoken[True-False-True-True-False-False] + 'You must have the anaconda token defined to do token rotation!' +# --- +# name: test_rotate_anaconda_token_notoken[True-False-True-True-False-True] + 'You must have the anaconda token defined to do token rotation!' +# --- +# name: test_rotate_anaconda_token_notoken[True-False-True-True-True-False] + 'You must have the anaconda token defined to do token rotation!' +# --- +# name: test_rotate_anaconda_token_notoken[True-False-True-True-True-True] + 'You must have the anaconda token defined to do token rotation!' +# --- +# name: test_rotate_anaconda_token_notoken[True-True-False-False-False-False] + 'You must have the anaconda token defined to do token rotation!' +# --- +# name: test_rotate_anaconda_token_notoken[True-True-False-False-False-True] + 'You must have the anaconda token defined to do token rotation!' +# --- +# name: test_rotate_anaconda_token_notoken[True-True-False-False-True-False] + 'You must have the anaconda token defined to do token rotation!' +# --- +# name: test_rotate_anaconda_token_notoken[True-True-False-False-True-True] + 'You must have the anaconda token defined to do token rotation!' +# --- +# name: test_rotate_anaconda_token_notoken[True-True-False-True-False-False] + 'You must have the anaconda token defined to do token rotation!' +# --- +# name: test_rotate_anaconda_token_notoken[True-True-False-True-False-True] + 'You must have the anaconda token defined to do token rotation!' +# --- +# name: test_rotate_anaconda_token_notoken[True-True-False-True-True-False] + 'You must have the anaconda token defined to do token rotation!' +# --- +# name: test_rotate_anaconda_token_notoken[True-True-False-True-True-True] + 'You must have the anaconda token defined to do token rotation!' +# --- +# name: test_rotate_anaconda_token_notoken[True-True-True-False-False-False] + 'You must have the anaconda token defined to do token rotation!' +# --- +# name: test_rotate_anaconda_token_notoken[True-True-True-False-False-True] + 'You must have the anaconda token defined to do token rotation!' +# --- +# name: test_rotate_anaconda_token_notoken[True-True-True-False-True-False] + 'You must have the anaconda token defined to do token rotation!' +# --- +# name: test_rotate_anaconda_token_notoken[True-True-True-False-True-True] + 'You must have the anaconda token defined to do token rotation!' +# --- +# name: test_rotate_anaconda_token_notoken[True-True-True-True-False-False] + 'You must have the anaconda token defined to do token rotation!' +# --- +# name: test_rotate_anaconda_token_notoken[True-True-True-True-False-True] + 'You must have the anaconda token defined to do token rotation!' +# --- +# name: test_rotate_anaconda_token_notoken[True-True-True-True-True-False] + 'You must have the anaconda token defined to do token rotation!' +# --- +# name: test_rotate_anaconda_token_notoken[True-True-True-True-True-True] + 'You must have the anaconda token defined to do token rotation!' +# --- +# name: test_rotate_anaconda_token_provider_error[appveyor] + 'Failed to rotate token for foo/bar-feedstock on appveyor!' +# --- +# name: test_rotate_anaconda_token_provider_error[azure] + 'Failed to rotate token for foo/bar-feedstock on azure!' +# --- +# name: test_rotate_anaconda_token_provider_error[circle] + 'Failed to rotate token for foo/bar-feedstock on circle!' +# --- +# name: test_rotate_anaconda_token_provider_error[drone] + 'Failed to rotate token for foo/bar-feedstock on drone endpoint https://cloud.drone.io!' +# --- +# name: test_rotate_anaconda_token_provider_error[github_actions] + 'Failed to rotate token for foo/bar-feedstock on github actions!' +# --- +# name: test_rotate_anaconda_token_provider_error[travis] + 'Failed to rotate token for foo/bar-feedstock on travis!' +# --- diff --git a/tests/__snapshots__/test_ci_skeleton.ambr b/tests/__snapshots__/test_ci_skeleton.ambr new file mode 100644 index 000000000..9cf02d3a0 --- /dev/null +++ b/tests/__snapshots__/test_ci_skeleton.ambr @@ -0,0 +1,101 @@ +# serializer version: 1 +# name: test_generate[CONDA_FORGE_YML] + ''' + clone_depth: 0 + recipe_dir: myrecipe + skip_render: + - README.md + - LICENSE.txt + - .gitattributes + - .gitignore + - build-locally.py + - LICENSE + - .github/CONTRIBUTING.md + - .github/ISSUE_TEMPLATE.md + - .github/PULL_REQUEST_TEMPLATE.md + - .github/workflows + ''' +# --- +# name: test_generate[META_YML] + ''' + {% set name = "my-package" %} + {% set version = environ.get('GIT_DESCRIBE_TAG', 'untagged')|string|replace('-','_') %} + {% set build_number = environ.get('GIT_DESCRIBE_NUMBER', '0') %} + + package: + name: {{ name|lower }} + version: {{ version }} + + source: + git_url: {{ environ.get('FEEDSTOCK_ROOT', '..') }} + + build: + # Uncomment the following line if the package is pure Python and the recipe + # is exactly the same for all platforms. It is okay if the dependencies are + # not built for all platforms/versions, although selectors are still not allowed. + # See https://conda-forge.org/docs/maintainer/knowledge_base.html#noarch-python + # for more details. + # noarch: python + + number: {{ build_number }} + string: {{ [build_number, ('h' + PKG_HASH), environ.get('GIT_DESCRIBE_HASH', '')]|join('_') }} + + # If the installation is complex, or different between Unix and Windows, + # use separate bld.bat and build.sh files instead of this key. By default, + # the package will be built for the Python versions supported by conda-forge + # and for all major OSs. Add the line "skip: True # [py<35]" (for example) + # to limit to Python 3.5 and newer, or "skip: True # [not win]" to limit + # to Windows. + script: "{{ PYTHON }} -m pip install . -vv" + + requirements: + build: + # If your project compiles code (such as a C extension) then add the required + # compilers as separate entries here. Compilers are named 'c', 'cxx' and 'fortran'. + - {{ compiler('c') }} + host: + - python + - pip + run: + - python + + test: + # Some packages might need a `test/commands` key to check CLI. + # List all the packages/modules that `run_test.py` imports. + imports: + - my_package + # Run your test commands here + commands: + - my-package --help + - pytest + # declare any test-only requirements here + requires: + - pytest + # copy over any needed test files here + source_files: + - tests/ + + # Uncomment and fill in my-package metadata + #about: + # home: https://github.com/conda-forge/conda-smithy + # license: BSD-3-Clause + # license_family: BSD + # license_file: LICENSE + + # Uncomment the following if this will be on a forge + # Remove these lines if this is only be used for CI + #extra: + # recipe-maintainers: + # - BobaFett + # - LisaSimpson + ''' +# --- +# name: test_generate[gitignore] + ''' + # conda smithy ci-skeleton start + *.pyc + build_artifacts + # conda smithy ci-skeleton end + + ''' +# --- diff --git a/tests/__snapshots__/test_cli.ambr b/tests/__snapshots__/test_cli.ambr new file mode 100644 index 000000000..a45b97e4d --- /dev/null +++ b/tests/__snapshots__/test_cli.ambr @@ -0,0 +1,193 @@ +# serializer version: 1 +# name: test_init_cuda_docker_images + dict({ + 'cdt_name': list([ + 'cos6', + ]), + 'cuda_compiler': list([ + 'nvcc', + ]), + 'cuda_compiler_version': list([ + 'None', + ]), + 'docker_image': list([ + 'condaforge/linux-anvil-comp7', + ]), + 'target_platform': list([ + 'linux-64', + ]), + 'zip_keys': list([ + list([ + 'cdt_name', + 'cuda_compiler_version', + 'docker_image', + ]), + ]), + }) +# --- +# name: test_init_cuda_docker_images[10.0] + dict({ + 'cdt_name': list([ + 'cos6', + ]), + 'cuda_compiler': list([ + 'nvcc', + ]), + 'cuda_compiler_version': list([ + '10.0', + ]), + 'docker_image': list([ + 'condaforge/linux-anvil-cuda:10.0', + ]), + 'target_platform': list([ + 'linux-64', + ]), + 'zip_keys': list([ + list([ + 'cdt_name', + 'cuda_compiler_version', + 'docker_image', + ]), + ]), + }) +# --- +# name: test_init_cuda_docker_images[10.1] + dict({ + 'cdt_name': list([ + 'cos6', + ]), + 'cuda_compiler': list([ + 'nvcc', + ]), + 'cuda_compiler_version': list([ + '10.1', + ]), + 'docker_image': list([ + 'condaforge/linux-anvil-cuda:10.1', + ]), + 'target_platform': list([ + 'linux-64', + ]), + 'zip_keys': list([ + list([ + 'cdt_name', + 'cuda_compiler_version', + 'docker_image', + ]), + ]), + }) +# --- +# name: test_init_cuda_docker_images[10.2] + dict({ + 'cdt_name': list([ + 'cos6', + ]), + 'cuda_compiler': list([ + 'nvcc', + ]), + 'cuda_compiler_version': list([ + '10.2', + ]), + 'docker_image': list([ + 'condaforge/linux-anvil-cuda:10.2', + ]), + 'target_platform': list([ + 'linux-64', + ]), + 'zip_keys': list([ + list([ + 'cdt_name', + 'cuda_compiler_version', + 'docker_image', + ]), + ]), + }) +# --- +# name: test_init_cuda_docker_images[11.0] + dict({ + 'cdt_name': list([ + 'cos7', + ]), + 'cuda_compiler': list([ + 'nvcc', + ]), + 'cuda_compiler_version': list([ + '11.0', + ]), + 'docker_image': list([ + 'condaforge/linux-anvil-cuda:11.0', + ]), + 'target_platform': list([ + 'linux-64', + ]), + 'zip_keys': list([ + list([ + 'cdt_name', + 'cuda_compiler_version', + 'docker_image', + ]), + ]), + }) +# --- +# name: test_init_cuda_docker_images[9.2] + dict({ + 'cdt_name': list([ + 'cos6', + ]), + 'cuda_compiler': list([ + 'nvcc', + ]), + 'cuda_compiler_version': list([ + '9.2', + ]), + 'docker_image': list([ + 'condaforge/linux-anvil-cuda:9.2', + ]), + 'target_platform': list([ + 'linux-64', + ]), + 'zip_keys': list([ + list([ + 'cdt_name', + 'cuda_compiler_version', + 'docker_image', + ]), + ]), + }) +# --- +# name: test_init_multiple_docker_images + dict({ + 'cdt_name': list([ + 'pickme_1', + ]), + 'docker_image': list([ + 'pickme_a', + ]), + 'target_platform': list([ + 'linux-64', + ]), + 'zip_keys': list([ + list([ + 'docker_image', + 'cdt_name', + ]), + ]), + }) +# --- +# name: test_init_with_custom_config[conda-build] + dict({ + 'bot': dict({ + 'automerge': True, + 'run_deps_from_wheel': True, + }), + }) +# --- +# name: test_init_with_custom_config[rattler-build] + dict({ + 'bot': dict({ + 'automerge': True, + 'run_deps_from_wheel': True, + }), + 'conda_build_tool': 'rattler-build', + }) +# --- diff --git a/tests/__snapshots__/test_configure_feedstock.ambr b/tests/__snapshots__/test_configure_feedstock.ambr new file mode 100644 index 000000000..545071ab2 --- /dev/null +++ b/tests/__snapshots__/test_configure_feedstock.ambr @@ -0,0 +1,535 @@ +# serializer version: 1 +# name: test_automerge_action_exists[conda-build] + dict({ + 'jobs': dict({ + 'automerge-action': dict({ + 'name': 'automerge', + 'runs-on': 'ubuntu-latest', + 'steps': list([ + dict({ + 'id': 'automerge-action', + 'name': 'automerge-action', + 'uses': 'conda-forge/automerge-action@main', + 'with': dict({ + 'github_token': '${{ secrets.GITHUB_TOKEN }}', + 'rerendering_github_token': '${{ secrets.RERENDERING_GITHUB_TOKEN }}', + }), + }), + ]), + }), + }), + True: dict({ + 'check_suite': dict({ + 'types': list([ + 'completed', + ]), + }), + 'status': dict({ + }), + }), + }) +# --- +# name: test_automerge_action_exists[rattler-build] + dict({ + 'jobs': dict({ + 'automerge-action': dict({ + 'name': 'automerge', + 'runs-on': 'ubuntu-latest', + 'steps': list([ + dict({ + 'id': 'automerge-action', + 'name': 'automerge-action', + 'uses': 'conda-forge/automerge-action@main', + 'with': dict({ + 'github_token': '${{ secrets.GITHUB_TOKEN }}', + 'rerendering_github_token': '${{ secrets.RERENDERING_GITHUB_TOKEN }}', + }), + }), + ]), + }), + }), + True: dict({ + 'check_suite': dict({ + 'types': list([ + 'completed', + ]), + }), + 'status': dict({ + }), + }), + }) +# --- +# name: test_choco_install[conda-build] + ''' + # This file was generated automatically from conda-smithy. To update this configuration, + # update the conda-forge.yml and/or the recipe/meta.yaml. + # -*- mode: yaml -*- + + jobs: + - job: win + pool: + vmImage: windows-2022 + strategy: + matrix: + win_64_c_compilervs2008python2.7: + CONFIG: win_64_c_compilervs2008python2.7 + UPLOAD_PACKAGES: 'True' + win_64_c_compilervs2015python3.5: + CONFIG: win_64_c_compilervs2015python3.5 + UPLOAD_PACKAGES: 'True' + timeoutInMinutes: 360 + variables: + CONDA_BLD_PATH: D:\\bld\\ + UPLOAD_TEMP: D:\\tmp + + steps: + - script: | + choco install pkg0 -fdv -y --debug + displayName: "Install Chocolatey Package: pkg0" + + - script: | + choco install pkg1 --version=X.Y.Z -fdv -y --debug + displayName: "Install Chocolatey Package: pkg1 --version=X.Y.Z" + + + - task: PythonScript@0 + displayName: 'Download Miniforge' + inputs: + scriptSource: inline + script: | + import urllib.request + url = 'https://github.com/conda-forge/miniforge/releases/latest/download/Mambaforge-Windows-x86_64.exe' + path = r"$(Build.ArtifactStagingDirectory)/Miniforge.exe" + urllib.request.urlretrieve(url, path) + + - script: | + start /wait "" %BUILD_ARTIFACTSTAGINGDIRECTORY%\Miniforge.exe /InstallationType=JustMe /RegisterPython=0 /S /D=C:\Miniforge + displayName: Install Miniforge + + - powershell: Write-Host "##vso[task.prependpath]C:\Miniforge\Scripts" + displayName: Add conda to PATH + + - script: | + call ".scripts\run_win_build.bat" + displayName: Run Windows build + env: + PYTHONUNBUFFERED: 1 + CONFIG: $(CONFIG) + CI: azure + flow_run_id: azure_$(Build.BuildNumber).$(System.JobAttempt) + remote_url: $(Build.Repository.Uri) + sha: $(Build.SourceVersion) + UPLOAD_PACKAGES: $(UPLOAD_PACKAGES) + UPLOAD_TEMP: $(UPLOAD_TEMP) + UPLOAD_ON_BRANCH: foo-branch + BINSTAR_TOKEN: $(BINSTAR_TOKEN) + ''' +# --- +# name: test_choco_install[rattler-build] + ''' + # This file was generated automatically from conda-smithy. To update this configuration, + # update the conda-forge.yml and/or the recipe/meta.yaml. + # -*- mode: yaml -*- + + jobs: + - job: win + pool: + vmImage: windows-2022 + strategy: + matrix: + win_64_python2.7: + CONFIG: win_64_python2.7 + UPLOAD_PACKAGES: 'True' + win_64_python3.5: + CONFIG: win_64_python3.5 + UPLOAD_PACKAGES: 'True' + timeoutInMinutes: 360 + variables: + CONDA_BLD_PATH: D:\\bld\\ + UPLOAD_TEMP: D:\\tmp + + steps: + - script: | + choco install pkg0 -fdv -y --debug + displayName: "Install Chocolatey Package: pkg0" + + - script: | + choco install pkg1 --version=X.Y.Z -fdv -y --debug + displayName: "Install Chocolatey Package: pkg1 --version=X.Y.Z" + + + - task: PythonScript@0 + displayName: 'Download Miniforge' + inputs: + scriptSource: inline + script: | + import urllib.request + url = 'https://github.com/conda-forge/miniforge/releases/latest/download/Mambaforge-Windows-x86_64.exe' + path = r"$(Build.ArtifactStagingDirectory)/Miniforge.exe" + urllib.request.urlretrieve(url, path) + + - script: | + start /wait "" %BUILD_ARTIFACTSTAGINGDIRECTORY%\Miniforge.exe /InstallationType=JustMe /RegisterPython=0 /S /D=C:\Miniforge + displayName: Install Miniforge + + - powershell: Write-Host "##vso[task.prependpath]C:\Miniforge\Scripts" + displayName: Add conda to PATH + + - script: | + call ".scripts\run_win_build.bat" + displayName: Run Windows build + env: + PYTHONUNBUFFERED: 1 + CONFIG: $(CONFIG) + CI: azure + flow_run_id: azure_$(Build.BuildNumber).$(System.JobAttempt) + remote_url: $(Build.Repository.Uri) + sha: $(Build.SourceVersion) + UPLOAD_PACKAGES: $(UPLOAD_PACKAGES) + UPLOAD_TEMP: $(UPLOAD_TEMP) + UPLOAD_ON_BRANCH: foo-branch + BINSTAR_TOKEN: $(BINSTAR_TOKEN) + ''' +# --- +# name: test_conda_build_tools[conda-build] + ''' + WARNING conda_smithy.configure_feedstock:configure_feedstock.py:2236 conda-forge.yml: $.build_with_mambabuild = True -> 'Build With Mambabuild' is deprecated. + build_with_mambabuild is deprecated, use `conda_build_tool` instead. + WARNING conda_smithy.configure_feedstock:configure_feedstock.py:2236 conda-forge.yml: $.build_with_mambabuild = False -> 'Build With Mambabuild' is deprecated. + build_with_mambabuild is deprecated, use `conda_build_tool` instead. + WARNING conda_smithy.configure_feedstock:configure_feedstock.py:2236 conda-forge.yml: $.conda_build_tool = does-not-exist -> 'does-not-exist' is not valid under any of the given schemas + + ''' +# --- +# name: test_conda_build_tools[rattler-build] + ''' + WARNING conda_smithy.configure_feedstock:configure_feedstock.py:2236 conda-forge.yml: $.conda_build_tool = does-not-exist -> 'does-not-exist' is not valid under any of the given schemas + + ''' +# --- +# name: test_get_used_key_values_by_input_order[squished_input_variants0-squished_used_variants0-all_used_vars0][get_used_key_values_by_input_order] + dict({ + 'c_compiler': list([ + 'gcc', + ]), + 'c_compiler_version': list([ + '10', + '12', + ]), + 'cdt_name': list([ + 'cos7', + 'cos6', + ]), + 'channel_sources': list([ + 'conda-forge', + ]), + 'channel_targets': list([ + 'conda-forge main', + ]), + 'cuda_compiler': list([ + 'nvcc', + 'None', + ]), + 'cuda_compiler_version': list([ + '11.2', + 'None', + ]), + 'docker_image': list([ + 'quay.io/condaforge/linux-anvil-cuda:11.2', + 'quay.io/condaforge/linux-anvil-cos7-x86_64', + ]), + 'pin_run_as_build': dict({ + 'flann': dict({ + 'max_pin': 'x.x.x', + }), + 'graphviz': dict({ + 'max_pin': 'x', + }), + 'libsvm': dict({ + 'max_pin': 'x', + }), + 'netcdf-cxx4': dict({ + 'max_pin': 'x.x', + }), + 'occt': dict({ + 'max_pin': 'x.x', + }), + 'poppler': dict({ + 'max_pin': 'x.x', + }), + 'python': dict({ + 'max_pin': 'x.x', + 'min_pin': 'x.x', + }), + 'r-base': dict({ + 'max_pin': 'x.x', + 'min_pin': 'x.x', + }), + 'vlfeat': dict({ + 'max_pin': 'x.x.x', + }), + }), + 'target_platform': list([ + 'linux-64', + ]), + 'zip_keys': list([ + tuple( + 'arrow_cpp', + 'libarrow', + 'libarrow_all', + ), + tuple( + 'c_compiler_version', + 'cxx_compiler_version', + 'fortran_compiler_version', + 'cuda_compiler', + 'cuda_compiler_version', + 'cdt_name', + 'docker_image', + ), + tuple( + 'c_stdlib', + 'c_stdlib_version', + ), + tuple( + 'libgrpc', + 'libprotobuf', + ), + tuple( + 'python', + 'numpy', + 'python_impl', + ), + ]), + }) +# --- +# name: test_noarch_platforms_bad_yaml[conda-build] + ''' + WARNING conda_smithy.configure_feedstock:configure_feedstock.py:2236 conda-forge.yml: $.noarch_platforms = ['eniac', 'zx80'] -> ['eniac', 'zx80'] is not valid under any of the given schemas + + ''' +# --- +# name: test_noarch_platforms_bad_yaml[rattler-build] + ''' + WARNING conda_smithy.configure_feedstock:configure_feedstock.py:2236 conda-forge.yml: $.noarch_platforms = ['eniac', 'zx80'] -> ['eniac', 'zx80'] is not valid under any of the given schemas + + ''' +# --- +# name: test_stdlib_deployment_target[conda-build] + ''' + MACOSX_DEPLOYMENT_TARGET: + - '10.14' + MACOSX_SDK_VERSION: + - '10.14' + c_compiler: + - clang + c_stdlib: + - macosx_deployment_target + c_stdlib_version: + - '10.14' + target_platform: + - osx-64 + + ''' +# --- +# name: test_stdlib_on_azure[conda-build][linux content] + ''' + c_compiler: + - gcc + c_stdlib: + - sysroot + c_stdlib_version: + - '2.12' + docker_image: + - quay.io/condaforge/linux-anvil-comp7 + target_platform: + - linux-64 + + ''' +# --- +# name: test_stdlib_on_azure[conda-build][osx content] + ''' + MACOSX_DEPLOYMENT_TARGET: + - '10.9' + MACOSX_SDK_VERSION: + - '10.9' + c_compiler: + - clang + c_stdlib: + - macosx_deployment_target + c_stdlib_version: + - '10.9' + target_platform: + - osx-64 + + ''' +# --- +# name: test_stdlib_on_azure[conda-build][win content] + ''' + c_compiler: + - vs2017 + c_stdlib: + - vs + target_platform: + - win-64 + + ''' +# --- +# name: test_upload_on_branch_appveyor[conda-build][config] + dict({ + 'upload_on_branch': 'foo-branch', + }) +# --- +# name: test_upload_on_branch_appveyor[conda-build][deploy script] + list([ + 'set "GIT_BRANCH=%APPVEYOR_REPO_BRANCH%"', + 'set "FEEDSTOCK_NAME=%APPVEYOR_REPO_NAME:*/=%"', + 'set "UPLOAD_ON_BRANCH=foo-branch"', + dict({ + 'cmd': 'upload_package .\\ .\\recipe .ci_support\\%CONFIG%.yaml', + }), + ]) +# --- +# name: test_upload_on_branch_appveyor[rattler-build][config] + dict({ + 'upload_on_branch': 'foo-branch', + }) +# --- +# name: test_upload_on_branch_appveyor[rattler-build][deploy script] + list([ + 'set "GIT_BRANCH=%APPVEYOR_REPO_BRANCH%"', + 'set "FEEDSTOCK_NAME=%APPVEYOR_REPO_NAME:*/=%"', + 'set "UPLOAD_ON_BRANCH=foo-branch"', + dict({ + 'cmd': 'upload_package .\\ .\\recipe .ci_support\\%CONFIG%.yaml', + }), + ]) +# --- +# name: test_upload_on_branch_azure[conda-build][config] + dict({ + 'upload_on_branch': 'foo-branch', + }) +# --- +# name: test_upload_on_branch_azure[conda-build][content linux] + ''' + export CI=azure + export flow_run_id=azure_$(Build.BuildNumber).$(System.JobAttempt) + export remote_url=$(Build.Repository.Uri) + export sha=$(Build.SourceVersion) + export GIT_BRANCH=$BUILD_SOURCEBRANCHNAME + export FEEDSTOCK_NAME=$(basename ${BUILD_REPOSITORY_NAME}) + export UPLOAD_ON_BRANCH="foo-branch" + if [[ "${BUILD_REASON:-}" == "PullRequest" ]]; then + export IS_PR_BUILD="True" + else + export IS_PR_BUILD="False" + fi + .scripts/run_docker_build.sh + + ''' +# --- +# name: test_upload_on_branch_azure[conda-build][content osx] + ''' + export CI=azure + export flow_run_id=azure_$(Build.BuildNumber).$(System.JobAttempt) + export remote_url=$(Build.Repository.Uri) + export sha=$(Build.SourceVersion) + export OSX_FORCE_SDK_DOWNLOAD="1" + export GIT_BRANCH=$BUILD_SOURCEBRANCHNAME + export FEEDSTOCK_NAME=$(basename ${BUILD_REPOSITORY_NAME}) + export UPLOAD_ON_BRANCH="foo-branch" + if [[ "${BUILD_REASON:-}" == "PullRequest" ]]; then + export IS_PR_BUILD="True" + else + export IS_PR_BUILD="False" + fi + ./.scripts/run_osx_build.sh + + ''' +# --- +# name: test_upload_on_branch_azure[rattler-build][config] + dict({ + 'upload_on_branch': 'foo-branch', + }) +# --- +# name: test_upload_on_branch_azure[rattler-build][content linux] + ''' + export CI=azure + export flow_run_id=azure_$(Build.BuildNumber).$(System.JobAttempt) + export remote_url=$(Build.Repository.Uri) + export sha=$(Build.SourceVersion) + export GIT_BRANCH=$BUILD_SOURCEBRANCHNAME + export FEEDSTOCK_NAME=$(basename ${BUILD_REPOSITORY_NAME}) + export UPLOAD_ON_BRANCH="foo-branch" + if [[ "${BUILD_REASON:-}" == "PullRequest" ]]; then + export IS_PR_BUILD="True" + else + export IS_PR_BUILD="False" + fi + .scripts/run_docker_build.sh + + ''' +# --- +# name: test_upload_on_branch_azure[rattler-build][content osx] + ''' + export CI=azure + export flow_run_id=azure_$(Build.BuildNumber).$(System.JobAttempt) + export remote_url=$(Build.Repository.Uri) + export sha=$(Build.SourceVersion) + export OSX_FORCE_SDK_DOWNLOAD="1" + export GIT_BRANCH=$BUILD_SOURCEBRANCHNAME + export FEEDSTOCK_NAME=$(basename ${BUILD_REPOSITORY_NAME}) + export UPLOAD_ON_BRANCH="foo-branch" + if [[ "${BUILD_REASON:-}" == "PullRequest" ]]; then + export IS_PR_BUILD="True" + else + export IS_PR_BUILD="False" + fi + ./.scripts/run_osx_build.sh + + ''' +# --- +# name: test_webservices_action_exists[conda-build] + dict({ + 'jobs': dict({ + 'webservices': dict({ + 'name': 'webservices', + 'runs-on': 'ubuntu-latest', + 'steps': list([ + dict({ + 'id': 'webservices', + 'name': 'webservices', + 'uses': 'conda-forge/webservices-dispatch-action@main', + 'with': dict({ + 'github_token': '${{ secrets.GITHUB_TOKEN }}', + 'rerendering_github_token': '${{ secrets.RERENDERING_GITHUB_TOKEN }}', + }), + }), + ]), + }), + }), + True: 'repository_dispatch', + }) +# --- +# name: test_webservices_action_exists[rattler-build] + dict({ + 'jobs': dict({ + 'webservices': dict({ + 'name': 'webservices', + 'runs-on': 'ubuntu-latest', + 'steps': list([ + dict({ + 'id': 'webservices', + 'name': 'webservices', + 'uses': 'conda-forge/webservices-dispatch-action@main', + 'with': dict({ + 'github_token': '${{ secrets.GITHUB_TOKEN }}', + 'rerendering_github_token': '${{ secrets.RERENDERING_GITHUB_TOKEN }}', + }), + }), + ]), + }), + }), + True: 'repository_dispatch', + }) +# --- diff --git a/tests/__snapshots__/test_feedstock_tokens.ambr b/tests/__snapshots__/test_feedstock_tokens.ambr new file mode 100644 index 000000000..14e66e920 --- /dev/null +++ b/tests/__snapshots__/test_feedstock_tokens.ambr @@ -0,0 +1,49 @@ +# serializer version: 1 +# name: test_feedstock_token_raises[$GH_TOKEN-bar-None] + FeedstockTokenError('Testing for the feedstock token for foo/bar on provider failed! Try the command locally with DEBUG_FEEDSTOCK_TOKENS defined in the environment to investigate!') +# --- +# name: test_feedstock_token_raises[$GH_TOKEN-bar-azure] + FeedstockTokenError('Testing for the feedstock token for foo/bar on provider azure failed! Try the command locally with DEBUG_FEEDSTOCK_TOKENS defined in the environment to investigate!') +# --- +# name: test_feedstock_token_raises[$GH_TOKEN-bar-feedstock-None] + FeedstockTokenError('Testing for the feedstock token for foo/bar-feedstock on provider failed! Try the command locally with DEBUG_FEEDSTOCK_TOKENS defined in the environment to investigate!') +# --- +# name: test_feedstock_token_raises[$GH_TOKEN-bar-feedstock-azure] + FeedstockTokenError('Testing for the feedstock token for foo/bar-feedstock on provider azure failed! Try the command locally with DEBUG_FEEDSTOCK_TOKENS defined in the environment to investigate!') +# --- +# name: test_feedstock_token_raises[$GITHUB_TOKEN-bar-None] + FeedstockTokenError('Testing for the feedstock token for foo/bar on provider failed! Try the command locally with DEBUG_FEEDSTOCK_TOKENS defined in the environment to investigate!') +# --- +# name: test_feedstock_token_raises[$GITHUB_TOKEN-bar-azure] + FeedstockTokenError('Testing for the feedstock token for foo/bar on provider azure failed! Try the command locally with DEBUG_FEEDSTOCK_TOKENS defined in the environment to investigate!') +# --- +# name: test_feedstock_token_raises[$GITHUB_TOKEN-bar-feedstock-None] + FeedstockTokenError('Testing for the feedstock token for foo/bar-feedstock on provider failed! Try the command locally with DEBUG_FEEDSTOCK_TOKENS defined in the environment to investigate!') +# --- +# name: test_feedstock_token_raises[$GITHUB_TOKEN-bar-feedstock-azure] + FeedstockTokenError('Testing for the feedstock token for foo/bar-feedstock on provider azure failed! Try the command locally with DEBUG_FEEDSTOCK_TOKENS defined in the environment to investigate!') +# --- +# name: test_feedstock_token_raises[${GH_TOKEN}-bar-None] + FeedstockTokenError('Testing for the feedstock token for foo/bar on provider failed! Try the command locally with DEBUG_FEEDSTOCK_TOKENS defined in the environment to investigate!') +# --- +# name: test_feedstock_token_raises[${GH_TOKEN}-bar-azure] + FeedstockTokenError('Testing for the feedstock token for foo/bar on provider azure failed! Try the command locally with DEBUG_FEEDSTOCK_TOKENS defined in the environment to investigate!') +# --- +# name: test_feedstock_token_raises[${GH_TOKEN}-bar-feedstock-None] + FeedstockTokenError('Testing for the feedstock token for foo/bar-feedstock on provider failed! Try the command locally with DEBUG_FEEDSTOCK_TOKENS defined in the environment to investigate!') +# --- +# name: test_feedstock_token_raises[${GH_TOKEN}-bar-feedstock-azure] + FeedstockTokenError('Testing for the feedstock token for foo/bar-feedstock on provider azure failed! Try the command locally with DEBUG_FEEDSTOCK_TOKENS defined in the environment to investigate!') +# --- +# name: test_feedstock_token_raises[${GITHUB_TOKEN}-bar-None] + FeedstockTokenError('Testing for the feedstock token for foo/bar on provider failed! Try the command locally with DEBUG_FEEDSTOCK_TOKENS defined in the environment to investigate!') +# --- +# name: test_feedstock_token_raises[${GITHUB_TOKEN}-bar-azure] + FeedstockTokenError('Testing for the feedstock token for foo/bar on provider azure failed! Try the command locally with DEBUG_FEEDSTOCK_TOKENS defined in the environment to investigate!') +# --- +# name: test_feedstock_token_raises[${GITHUB_TOKEN}-bar-feedstock-None] + FeedstockTokenError('Testing for the feedstock token for foo/bar-feedstock on provider failed! Try the command locally with DEBUG_FEEDSTOCK_TOKENS defined in the environment to investigate!') +# --- +# name: test_feedstock_token_raises[${GITHUB_TOKEN}-bar-feedstock-azure] + FeedstockTokenError('Testing for the feedstock token for foo/bar-feedstock on provider azure failed! Try the command locally with DEBUG_FEEDSTOCK_TOKENS defined in the environment to investigate!') +# --- diff --git a/tests/__snapshots__/test_lint_recipe.ambr b/tests/__snapshots__/test_lint_recipe.ambr new file mode 100644 index 000000000..2dceffd7a --- /dev/null +++ b/tests/__snapshots__/test_lint_recipe.ambr @@ -0,0 +1,66 @@ +# serializer version: 1 +# name: test_osx_hint[run] + list([ + 'You\'re setting a constraint on the `__osx` virtual package directly; this should now be done by adding a build dependence on `{{ stdlib("c") }}`, and overriding `c_stdlib_version` in `recipe/conda_build_config.yaml` for the respective platform as necessary. For further details, please see https://github.com/conda-forge/conda-forge.github.io/issues/2102.', + ]) +# --- +# name: test_osx_hint[run_constrained] + list([ + 'You\'re setting a constraint on the `__osx` virtual package directly; this should now be done by adding a build dependence on `{{ stdlib("c") }}`, and overriding `c_stdlib_version` in `recipe/conda_build_config.yaml` for the respective platform as necessary. For further details, please see https://github.com/conda-forge/conda-forge.github.io/issues/2102.', + ]) +# --- +# name: test_osx_noarch_hint[run] + list([ + ]) +# --- +# name: test_osx_noarch_hint[run_constrained] + list([ + ]) +# --- +# name: test_stdlib_hint[c] + list([ + 'This recipe is using a compiler, which now requires adding a build dependence on `{{ stdlib("c") }}` as well. Note that this rule applies to each output of the recipe using a compiler. For further details, please see https://github.com/conda-forge/conda-forge.github.io/issues/2102.', + ]) +# --- +# name: test_stdlib_hint[cxx] + list([ + 'This recipe is using a compiler, which now requires adding a build dependence on `{{ stdlib("c") }}` as well. Note that this rule applies to each output of the recipe using a compiler. For further details, please see https://github.com/conda-forge/conda-forge.github.io/issues/2102.', + ]) +# --- +# name: test_stdlib_hint[fortran] + list([ + 'This recipe is using a compiler, which now requires adding a build dependence on `{{ stdlib("c") }}` as well. Note that this rule applies to each output of the recipe using a compiler. For further details, please see https://github.com/conda-forge/conda-forge.github.io/issues/2102.', + ]) +# --- +# name: test_stdlib_hint[m2w64_c] + list([ + 'This recipe is using a compiler, which now requires adding a build dependence on `{{ stdlib("c") }}` as well. Note that this rule applies to each output of the recipe using a compiler. For further details, please see https://github.com/conda-forge/conda-forge.github.io/issues/2102.', + ]) +# --- +# name: test_stdlib_hint[m2w64_cxx] + list([ + 'This recipe is using a compiler, which now requires adding a build dependence on `{{ stdlib("c") }}` as well. Note that this rule applies to each output of the recipe using a compiler. For further details, please see https://github.com/conda-forge/conda-forge.github.io/issues/2102.', + ]) +# --- +# name: test_stdlib_hint[m2w64_fortran] + list([ + 'This recipe is using a compiler, which now requires adding a build dependence on `{{ stdlib("c") }}` as well. Note that this rule applies to each output of the recipe using a compiler. For further details, please see https://github.com/conda-forge/conda-forge.github.io/issues/2102.', + ]) +# --- +# name: test_stdlib_hint[rust] + list([ + 'This recipe is using a compiler, which now requires adding a build dependence on `{{ stdlib("c") }}` as well. Note that this rule applies to each output of the recipe using a compiler. For further details, please see https://github.com/conda-forge/conda-forge.github.io/issues/2102.', + ]) +# --- +# name: test_stdlib_hints_multi_output + list([ + 'This recipe is using a compiler, which now requires adding a build dependence on `{{ stdlib("c") }}` as well. Note that this rule applies to each output of the recipe using a compiler. For further details, please see https://github.com/conda-forge/conda-forge.github.io/issues/2102.', + 'You\'re setting a requirement on sysroot_linux- directly; this should now be done by adding a build dependence on `{{ stdlib("c") }}`, and overriding `c_stdlib_version` in `recipe/conda_build_config.yaml` for the respective platform as necessary. For further details, please see https://github.com/conda-forge/conda-forge.github.io/issues/2102.', + 'You\'re setting a constraint on the `__osx` virtual package directly; this should now be done by adding a build dependence on `{{ stdlib("c") }}`, and overriding `c_stdlib_version` in `recipe/conda_build_config.yaml` for the respective platform as necessary. For further details, please see https://github.com/conda-forge/conda-forge.github.io/issues/2102.', + ]) +# --- +# name: test_sysroot_hint + list([ + 'You\'re setting a requirement on sysroot_linux- directly; this should now be done by adding a build dependence on `{{ stdlib("c") }}`, and overriding `c_stdlib_version` in `recipe/conda_build_config.yaml` for the respective platform as necessary. For further details, please see https://github.com/conda-forge/conda-forge.github.io/issues/2102.', + ]) +# --- diff --git a/tests/__snapshots__/test_variant_algebra.ambr b/tests/__snapshots__/test_variant_algebra.ambr new file mode 100644 index 000000000..5e58f6897 --- /dev/null +++ b/tests/__snapshots__/test_variant_algebra.ambr @@ -0,0 +1,275 @@ +# serializer version: 1 +# name: test_migrate_windows_compilers + dict({ + 'c_compiler': list([ + 'vs2008', + 'vs2017', + ]), + 'migrator_ts': list([ + -1.0, + ]), + 'vc': list([ + '9', + '14.1', + ]), + 'zip_keys': list([ + list([ + 'vc', + 'c_compiler', + ]), + ]), + }) +# --- +# name: test_multiple_key_add_migration[res2] + dict({ + 'migrator_ts': list([ + -1.0, + ]), + 'numpy': list([ + '1.16', + '1.100', + '1.200', + '1.16', + '1.16', + '1.18', + ]), + 'python': list([ + '3.6.* *_cpython', + '3.9.* *_cpython', + '3.10.* *_cpython', + '3.7.* *_cpython', + '3.8.* *_cpython', + '3.6.* *_73_pypy', + ]), + 'python_impl': list([ + 'cpython', + 'cpython', + 'cpython', + 'cpython', + 'cpython', + 'pypy', + ]), + 'zip_keys': list([ + list([ + 'cuda_compiler_version', + 'docker_image', + ]), + list([ + 'numpy', + 'python', + 'python_impl', + ]), + ]), + }) +# --- +# name: test_multiple_key_add_migration[res3] + dict({ + 'migrator_ts': -1.0, + 'python': list([ + '3.6.* *_cpython', + '3.9.* *_cpython', + '3.10.* *_cpython', + '3.7.* *_cpython', + '3.8.* *_cpython', + ]), + 'python_impl': list([ + 'cpython', + ]), + 'zip_keys': list([ + list([ + 'python', + ]), + list([ + 'cuda_compiler_version', + 'docker_image', + ]), + ]), + }) +# --- +# name: test_new_pinned_package + dict({ + 'gprc-cpp': list([ + '1.23', + ]), + 'jpeg': list([ + '3.0', + ]), + 'migrator_ts': list([ + -1.0, + ]), + 'pin_run_as_build': dict({ + 'gprc-cpp': dict({ + 'max_pin': 'x.x', + }), + 'jpeg': dict({ + 'max_pin': 'x', + }), + }), + }) +# --- +# name: test_no_ordering + dict({ + 'migrator_ts': list([ + -1.0, + ]), + 'xyz': list([ + '2', + ]), + }) +# --- +# name: test_ordering + dict({ + 'c_compiler': list([ + 'gcc', + ]), + 'migrator_ts': list([ + -1.0, + ]), + }) +# --- +# name: test_ordering_downgrade + dict({ + 'jpeg': list([ + '2.0', + ]), + 'migrator_ts': list([ + -1.0, + ]), + }) +# --- +# name: test_ordering_space + dict({ + 'migrator_ts': list([ + -1.0, + ]), + 'python': list([ + '2.7 *_cpython', + ]), + }) +# --- +# name: test_pin_run_as_build + dict({ + 'migrator_ts': list([ + -1.0, + ]), + 'pin_run_as_build': dict({ + 'boost-cpp': dict({ + 'max_pin': 'x.x', + }), + 'python': dict({ + 'max_pin': 'x.x', + }), + 'rust': dict({ + 'max_pin': 'x', + }), + }), + }) +# --- +# name: test_py39_migration[res2] + dict({ + 'migrator_ts': list([ + -1.0, + ]), + 'numpy': list([ + '1.16', + '1.100', + '1.16', + '1.16', + '1.18', + ]), + 'python': list([ + '3.6.* *_cpython', + '3.9.* *_cpython', + '3.7.* *_cpython', + '3.8.* *_cpython', + '3.6.* *_73_pypy', + ]), + 'python_impl': list([ + 'cpython', + 'cpython', + 'cpython', + 'cpython', + 'pypy', + ]), + 'zip_keys': list([ + list([ + 'cuda_compiler_version', + 'docker_image', + ]), + list([ + 'numpy', + 'python', + 'python_impl', + ]), + ]), + }) +# --- +# name: test_py39_migration[res3] + dict({ + 'migrator_ts': -1.0, + 'python': list([ + '3.6.* *_cpython', + '3.9.* *_cpython', + '3.7.* *_cpython', + '3.8.* *_cpython', + ]), + 'python_impl': list([ + 'cpython', + ]), + 'zip_keys': list([ + list([ + 'python', + ]), + list([ + 'cuda_compiler_version', + 'docker_image', + ]), + ]), + }) +# --- +# name: test_variant_key_remove + dict({ + 'migrator_ts': -1.0, + 'numpy': list([ + '1.18', + '1.16', + ]), + 'python': list([ + '3.6.* *_73_pypy', + '3.8.* *_cpython', + ]), + 'python_impl': list([ + 'pypy', + 'cpython', + ]), + 'zip_keys': list([ + list([ + 'python', + 'numpy', + 'python_impl', + ]), + ]), + }) +# --- +# name: test_zip_keys + dict({ + 'migrator_ts': list([ + -1.0, + ]), + 'zip_keys': list([ + list([ + 'c_compiler', + 'root', + ]), + list([ + 'pyqt', + 'qt', + ]), + list([ + 'python', + 'vc', + 'vc_runtime', + ]), + ]), + }) +# --- diff --git a/tests/test_anaconda_token_rotation.py b/tests/test_anaconda_token_rotation.py index b860f6ecc..ef8f4513a 100644 --- a/tests/test_anaconda_token_rotation.py +++ b/tests/test_anaconda_token_rotation.py @@ -1,5 +1,3 @@ -from unittest import mock - import pytest from conda_smithy.anaconda_token_rotation import rotate_anaconda_token @@ -13,25 +11,8 @@ @pytest.mark.parametrize("azure", [True, False]) @pytest.mark.parametrize("travis", [True, False]) @pytest.mark.parametrize("github_actions", [True, False]) -@mock.patch("conda_smithy.github.gh_token") -@mock.patch("conda_smithy.anaconda_token_rotation._get_anaconda_token") -@mock.patch("conda_smithy.anaconda_token_rotation.rotate_token_in_appveyor") -@mock.patch("conda_smithy.anaconda_token_rotation.rotate_token_in_drone") -@mock.patch("conda_smithy.anaconda_token_rotation.rotate_token_in_circle") -@mock.patch("conda_smithy.anaconda_token_rotation.rotate_token_in_travis") -@mock.patch("conda_smithy.anaconda_token_rotation.rotate_token_in_azure") -@mock.patch( - "conda_smithy.anaconda_token_rotation.rotate_token_in_github_actions" -) def test_rotate_anaconda_token( - github_actions_mock, - azure_mock, - travis_mock, - circle_mock, - drone_mock, - appveyor_mock, - get_ac_token, - get_gh_token, + mocker, appveyor, drone, circle, @@ -39,6 +20,29 @@ def test_rotate_anaconda_token( travis, github_actions, ): + github_actions_mock = mocker.patch( + "conda_smithy.anaconda_token_rotation.rotate_token_in_github_actions" + ) + azure_mock = mocker.patch( + "conda_smithy.anaconda_token_rotation.rotate_token_in_azure" + ) + travis_mock = mocker.patch( + "conda_smithy.anaconda_token_rotation.rotate_token_in_travis" + ) + circle_mock = mocker.patch( + "conda_smithy.anaconda_token_rotation.rotate_token_in_circle" + ) + drone_mock = mocker.patch( + "conda_smithy.anaconda_token_rotation.rotate_token_in_drone" + ) + appveyor_mock = mocker.patch( + "conda_smithy.anaconda_token_rotation.rotate_token_in_appveyor" + ) + get_ac_token = mocker.patch( + "conda_smithy.anaconda_token_rotation._get_anaconda_token" + ) + get_gh_token = mocker.patch("conda_smithy.github.gh_token") + user = "foo" project = "bar" @@ -111,7 +115,7 @@ def test_rotate_anaconda_token( project, anaconda_token, "MY_FANCY_TOKEN", - mock.ANY, + mocker.ANY, ) else: github_actions_mock.assert_not_called() @@ -123,29 +127,35 @@ def test_rotate_anaconda_token( @pytest.mark.parametrize("azure", [True, False]) @pytest.mark.parametrize("travis", [True, False]) @pytest.mark.parametrize("github_actions", [True, False]) -@mock.patch("conda_smithy.anaconda_token_rotation.rotate_token_in_appveyor") -@mock.patch("conda_smithy.anaconda_token_rotation.rotate_token_in_drone") -@mock.patch("conda_smithy.anaconda_token_rotation.rotate_token_in_circle") -@mock.patch("conda_smithy.anaconda_token_rotation.rotate_token_in_travis") -@mock.patch("conda_smithy.anaconda_token_rotation.rotate_token_in_azure") -@mock.patch( - "conda_smithy.anaconda_token_rotation.rotate_token_in_github_actions" -) def test_rotate_anaconda_token_notoken( - github_actions_mock, - azure_mock, - travis_mock, - circle_mock, - drone_mock, - appveyor_mock, + mocker, appveyor, drone, circle, azure, travis, github_actions, - monkeypatch, + snapshot, ): + github_actions_mock = mocker.patch( + "conda_smithy.anaconda_token_rotation.rotate_token_in_github_actions" + ) + azure_mock = mocker.patch( + "conda_smithy.anaconda_token_rotation.rotate_token_in_azure" + ) + travis_mock = mocker.patch( + "conda_smithy.anaconda_token_rotation.rotate_token_in_travis" + ) + circle_mock = mocker.patch( + "conda_smithy.anaconda_token_rotation.rotate_token_in_circle" + ) + drone_mock = mocker.patch( + "conda_smithy.anaconda_token_rotation.rotate_token_in_drone" + ) + appveyor_mock = mocker.patch( + "conda_smithy.anaconda_token_rotation.rotate_token_in_appveyor" + ) + user = "foo" project = "bar" @@ -162,8 +172,7 @@ def test_rotate_anaconda_token_notoken( github_actions=github_actions, drone_endpoints=[drone_default_endpoint], ) - - assert "anaconda token" in str(e.value) + assert str(e.value) == snapshot drone_mock.assert_not_called() circle_mock.assert_not_called() @@ -177,27 +186,30 @@ def test_rotate_anaconda_token_notoken( "provider", ["drone", "circle", "travis", "azure", "appveyor", "github_actions"], ) -@mock.patch("conda_smithy.github.gh_token") -@mock.patch("conda_smithy.anaconda_token_rotation._get_anaconda_token") -@mock.patch("conda_smithy.anaconda_token_rotation.rotate_token_in_appveyor") -@mock.patch("conda_smithy.anaconda_token_rotation.rotate_token_in_drone") -@mock.patch("conda_smithy.anaconda_token_rotation.rotate_token_in_circle") -@mock.patch("conda_smithy.anaconda_token_rotation.rotate_token_in_travis") -@mock.patch("conda_smithy.anaconda_token_rotation.rotate_token_in_azure") -@mock.patch( - "conda_smithy.anaconda_token_rotation.rotate_token_in_github_actions" -) -def test_rotate_anaconda_token_provider_error( - github_actions_mock, - azure_mock, - travis_mock, - circle_mock, - drone_mock, - appveyor_mock, - get_ac_token, - get_gh_token, - provider, -): +def test_rotate_anaconda_token_provider_error(mocker, provider, snapshot): + github_actions_mock = mocker.patch( + "conda_smithy.anaconda_token_rotation.rotate_token_in_github_actions" + ) + azure_mock = mocker.patch( + "conda_smithy.anaconda_token_rotation.rotate_token_in_azure" + ) + travis_mock = mocker.patch( + "conda_smithy.anaconda_token_rotation.rotate_token_in_travis" + ) + circle_mock = mocker.patch( + "conda_smithy.anaconda_token_rotation.rotate_token_in_circle" + ) + drone_mock = mocker.patch( + "conda_smithy.anaconda_token_rotation.rotate_token_in_drone" + ) + appveyor_mock = mocker.patch( + "conda_smithy.anaconda_token_rotation.rotate_token_in_appveyor" + ) + get_ac_token = mocker.patch( + "conda_smithy.anaconda_token_rotation._get_anaconda_token" + ) + get_gh_token = mocker.patch("conda_smithy.github.gh_token") + user = "foo" project = "bar" @@ -225,5 +237,4 @@ def test_rotate_anaconda_token_provider_error( rotate_anaconda_token( user, project, None, drone_endpoints=[drone_default_endpoint] ) - - assert "on %s" % provider.replace("_", " ") in str(e.value) + assert str(e.value) == snapshot diff --git a/tests/test_ci_skeleton.py b/tests/test_ci_skeleton.py index de14bc463..1494cf0bb 100644 --- a/tests/test_ci_skeleton.py +++ b/tests/test_ci_skeleton.py @@ -3,99 +3,7 @@ from conda_smithy.ci_skeleton import generate -CONDA_FORGE_YML = """clone_depth: 0 -recipe_dir: myrecipe -skip_render: - - README.md - - LICENSE.txt - - .gitattributes - - .gitignore - - build-locally.py - - LICENSE - - .github/CONTRIBUTING.md - - .github/ISSUE_TEMPLATE.md - - .github/PULL_REQUEST_TEMPLATE.md - - .github/workflows""" - -META_YAML = """{% set name = "my-package" %} -{% set version = environ.get('GIT_DESCRIBE_TAG', 'untagged')|string|replace('-','_') %} -{% set build_number = environ.get('GIT_DESCRIBE_NUMBER', '0') %} - -package: - name: {{ name|lower }} - version: {{ version }} - -source: - git_url: {{ environ.get('FEEDSTOCK_ROOT', '..') }} - -build: - # Uncomment the following line if the package is pure Python and the recipe - # is exactly the same for all platforms. It is okay if the dependencies are - # not built for all platforms/versions, although selectors are still not allowed. - # See https://conda-forge.org/docs/maintainer/knowledge_base.html#noarch-python - # for more details. - # noarch: python - - number: {{ build_number }} - string: {{ [build_number, ('h' + PKG_HASH), environ.get('GIT_DESCRIBE_HASH', '')]|join('_') }} - - # If the installation is complex, or different between Unix and Windows, - # use separate bld.bat and build.sh files instead of this key. By default, - # the package will be built for the Python versions supported by conda-forge - # and for all major OSs. Add the line "skip: True # [py<35]" (for example) - # to limit to Python 3.5 and newer, or "skip: True # [not win]" to limit - # to Windows. - script: "{{ PYTHON }} -m pip install . -vv" - -requirements: - build: - # If your project compiles code (such as a C extension) then add the required - # compilers as separate entries here. Compilers are named 'c', 'cxx' and 'fortran'. - - {{ compiler('c') }} - host: - - python - - pip - run: - - python - -test: - # Some packages might need a `test/commands` key to check CLI. - # List all the packages/modules that `run_test.py` imports. - imports: - - my_package - # Run your test commands here - commands: - - my-package --help - - pytest - # declare any test-only requirements here - requires: - - pytest - # copy over any needed test files here - source_files: - - tests/ - -# Uncomment and fill in my-package metadata -#about: -# home: https://github.com/conda-forge/conda-smithy -# license: BSD-3-Clause -# license_family: BSD -# license_file: LICENSE - -# Uncomment the following if this will be on a forge -# Remove these lines if this is only be used for CI -#extra: -# recipe-maintainers: -# - BobaFett -# - LisaSimpson""" - -GITIGNORE = """# conda smithy ci-skeleton start -*.pyc -build_artifacts -# conda smithy ci-skeleton end -""" - - -def test_generate(tmpdir): +def test_generate(tmpdir, snapshot): generate( package_name="my-package", feedstock_directory=str(tmpdir), @@ -103,10 +11,10 @@ def test_generate(tmpdir): ) with open(tmpdir / "conda-forge.yml") as f: conda_forge_yml = f.read() - assert conda_forge_yml == CONDA_FORGE_YML + assert conda_forge_yml == snapshot(name="CONDA_FORGE_YML") with open(tmpdir / "myrecipe" / "meta.yaml") as f: meta_yaml = f.read() - assert meta_yaml == META_YAML + assert meta_yaml == snapshot(name="META_YML") with open(tmpdir / ".gitignore") as f: gitignore = f.read() - assert gitignore == GITIGNORE + assert gitignore == snapshot(name="gitignore") diff --git a/tests/test_cli.py b/tests/test_cli.py index a4d09d3e0..2e7719965 100644 --- a/tests/test_cli.py +++ b/tests/test_cli.py @@ -50,7 +50,7 @@ def test_init(py_recipe): assert os.path.isdir(destination) -def test_init_with_custom_config(py_recipe): +def test_init_with_custom_config(py_recipe, snapshot): """This is the command that takes the initial staged-recipe folder and turns it into a feedstock""" # actual parser doesn't matter. It's used for initialization only @@ -82,9 +82,7 @@ def test_init_with_custom_config(py_recipe): data = yaml.safe_load( open(os.path.join(destination, "conda-forge.yml"), "r").read() ) - assert data.get("bot") != None - assert data["bot"]["automerge"] == True - assert data["bot"]["run_deps_from_wheel"] == True + assert data == snapshot def test_init_multiple_output_matrix(testing_workdir): @@ -200,7 +198,7 @@ def test_render_readme_with_multiple_outputs(testing_workdir, dirname): assert False -def test_init_cuda_docker_images(testing_workdir): +def test_init_cuda_docker_images(testing_workdir, snapshot): parser = argparse.ArgumentParser() subparser = parser.add_subparsers() init_obj = cli.Init(subparser) @@ -241,20 +239,11 @@ def test_init_cuda_docker_images(testing_workdir): assert os.path.isfile(fn) with open(fn) as fh: config = yaml.safe_load(fh) - assert config["cuda_compiler"] == ["nvcc"] - assert config["cuda_compiler_version"] == [f"{v}"] - if v is None: - docker_image = "condaforge/linux-anvil-comp7" - else: - docker_image = f"condaforge/linux-anvil-cuda:{v}" - assert config["docker_image"] == [docker_image] - if v == "11.0": - assert config["cdt_name"] == ["cos7"] - else: - assert config["cdt_name"] == ["cos6"] + assert config == snapshot(name=v) -def test_init_multiple_docker_images(testing_workdir): + +def test_init_multiple_docker_images(testing_workdir, snapshot): parser = argparse.ArgumentParser() subparser = parser.add_subparsers() init_obj = cli.Init(subparser) @@ -292,8 +281,7 @@ def test_init_multiple_docker_images(testing_workdir): assert os.path.isfile(fn) with open(fn) as fh: config = yaml.safe_load(fh) - assert config["docker_image"] == ["pickme_a"] - assert config["cdt_name"] == ["pickme_1"] + assert config == snapshot def test_regenerate(py_recipe, testing_workdir): diff --git a/tests/test_configure_feedstock.py b/tests/test_configure_feedstock.py index 80bdf72b0..f0aa619fa 100644 --- a/tests/test_configure_feedstock.py +++ b/tests/test_configure_feedstock.py @@ -7,6 +7,7 @@ from pathlib import Path import pytest +from syrupy.filters import paths import yaml from conftest import ConfigYAML @@ -214,7 +215,7 @@ def test_py_matrix_on_azure(py_recipe, jinja_env): assert len(os.listdir(matrix_dir)) == 6 -def test_stdlib_on_azure(stdlib_recipe, jinja_env, request): +def test_stdlib_on_azure(stdlib_recipe, jinja_env, request, snapshot): conda_build_param = request.node.callspec.params["config_yaml"] if conda_build_param == "rattler-build": # stdlib is not yet implemented in rattler-build @@ -236,29 +237,22 @@ def test_stdlib_on_azure(stdlib_recipe, jinja_env, request): with open(os.path.join(matrix_dir, "linux_64_.yaml")) as f: linux_lines = f.readlines() linux_content = "".join(linux_lines) - # multiline pattern to ensure we don't match other stuff accidentally - assert re.match(r"(?s).*c_stdlib:\s*- sysroot", linux_content) - assert re.match(r"(?s).*c_stdlib_version:\s*- ['\"]?2\.\d+", linux_content) + assert linux_content == snapshot(name="linux content") + with open(os.path.join(matrix_dir, "osx_64_.yaml")) as f: osx_lines = f.readlines() osx_content = "".join(osx_lines) - assert re.match( - r"(?s).*c_stdlib:\s*- macosx_deployment_target", osx_content - ) - assert re.match(r"(?s).*c_stdlib_version:\s*- ['\"]?10\.9", osx_content) - # ensure MACOSX_DEPLOYMENT_TARGET _also_ gets set to the same value - assert re.match( - r"(?s).*MACOSX_DEPLOYMENT_TARGET:\s*- ['\"]?10\.9", osx_content - ) + assert osx_content == snapshot(name="osx content") + with open(os.path.join(matrix_dir, "win_64_.yaml")) as f: win_lines = f.readlines() win_content = "".join(win_lines) - assert re.match(r"(?s).*c_stdlib:\s*- vs", win_content) # no stdlib-version expected on windows + assert win_content == snapshot(name="win content") def test_stdlib_deployment_target( - stdlib_deployment_target_recipe, jinja_env, caplog, request + stdlib_deployment_target_recipe, jinja_env, caplog, request, snapshot ): conda_build_param = request.node.callspec.params["config_yaml"] if conda_build_param == "rattler-build": @@ -283,23 +277,20 @@ def test_stdlib_deployment_target( content = "".join(lines) # ensure both MACOSX_DEPLOYMENT_TARGET and c_stdlib_version match # the maximum of either, c.f. stdlib_deployment_target_recipe fixture - assert re.match(r"(?s).*c_stdlib_version:\s*- ['\"]?10\.14", content) - assert re.match( - r"(?s).*MACOSX_DEPLOYMENT_TARGET:\s*- ['\"]?10\.14", content - ) - # MACOSX_SDK_VERSION gets updated as well if it's below the other two - assert re.match(r"(?s).*MACOSX_SDK_VERSION:\s*- ['\"]?10\.14", content) + assert content == snapshot -def test_upload_on_branch_azure(upload_on_branch_recipe, jinja_env): +def test_upload_on_branch_azure(upload_on_branch_recipe, jinja_env, snapshot): configure_feedstock.render_azure( jinja_env=jinja_env, forge_config=upload_on_branch_recipe.config, forge_dir=upload_on_branch_recipe.recipe, ) # Check that the parameter is in the configuration. - assert "upload_on_branch" in upload_on_branch_recipe.config - assert upload_on_branch_recipe.config["upload_on_branch"] == "foo-branch" + assert upload_on_branch_recipe.config == snapshot( + include=paths("upload_on_branch"), name="config" + ) + # Check that the parameter is in the generated file. with open( os.path.join( @@ -309,13 +300,8 @@ def test_upload_on_branch_azure(upload_on_branch_recipe, jinja_env): ) ) as fp: content_osx = yaml.safe_load(fp) - assert ( - 'UPLOAD_ON_BRANCH="foo-branch"' - in content_osx["jobs"][0]["steps"][0]["script"] - ) - assert ( - "BUILD_SOURCEBRANCHNAME" - in content_osx["jobs"][0]["steps"][0]["script"] + assert content_osx["jobs"][0]["steps"][0]["script"] == snapshot( + name="content osx" ) with open( @@ -350,17 +336,14 @@ def test_upload_on_branch_azure(upload_on_branch_recipe, jinja_env): ) ) as fp: content_lin = yaml.safe_load(fp) - assert ( - 'UPLOAD_ON_BRANCH="foo-branch"' - in content_lin["jobs"][0]["steps"][1]["script"] - ) - assert ( - "BUILD_SOURCEBRANCHNAME" - in content_lin["jobs"][0]["steps"][1]["script"] + assert content_lin["jobs"][0]["steps"][1]["script"] == snapshot( + name="content linux" ) -def test_upload_on_branch_appveyor(upload_on_branch_recipe, jinja_env): +def test_upload_on_branch_appveyor( + upload_on_branch_recipe, jinja_env, snapshot +): upload_on_branch_recipe.config["provider"]["win"] = "appveyor" configure_feedstock.render_appveyor( jinja_env=jinja_env, @@ -368,16 +351,16 @@ def test_upload_on_branch_appveyor(upload_on_branch_recipe, jinja_env): forge_dir=upload_on_branch_recipe.recipe, ) # Check that the parameter is in the configuration. - assert "upload_on_branch" in upload_on_branch_recipe.config - assert upload_on_branch_recipe.config["upload_on_branch"] == "foo-branch" + assert upload_on_branch_recipe.config == snapshot( + include=paths("upload_on_branch"), name="config" + ) # Check that the parameter is in the generated file. with open( os.path.join(upload_on_branch_recipe.recipe, ".appveyor.yml") ) as fp: content = yaml.safe_load(fp) - assert "%APPVEYOR_REPO_BRANCH%" in content["deploy_script"][0] - assert "UPLOAD_ON_BRANCH=foo-branch" in content["deploy_script"][-2] + assert content["deploy_script"] == snapshot(name="deploy script") def test_circle_with_yum_reqs(py_recipe, jinja_env): @@ -766,7 +749,7 @@ def test_files_skip_render(render_skipped_recipe, jinja_env): assert not os.path.exists(fpath) -def test_choco_install(choco_recipe, jinja_env): +def test_choco_install(choco_recipe, jinja_env, snapshot): configure_feedstock.render_azure( jinja_env=jinja_env, forge_config=choco_recipe.config, @@ -780,19 +763,10 @@ def test_choco_install(choco_recipe, jinja_env): assert os.path.isfile(azure_file) with open(azure_file) as f: contents = f.read() - exp = """ - - script: | - choco install pkg0 -fdv -y --debug - displayName: "Install Chocolatey Package: pkg0" + assert contents == snapshot - - script: | - choco install pkg1 --version=X.Y.Z -fdv -y --debug - displayName: "Install Chocolatey Package: pkg1 --version=X.Y.Z" -""".strip() - assert exp in contents - -def test_webservices_action_exists(py_recipe, jinja_env): +def test_webservices_action_exists(py_recipe, jinja_env, snapshot): configure_feedstock.render_github_actions_services( jinja_env=jinja_env, forge_config=py_recipe.config, @@ -805,11 +779,10 @@ def test_webservices_action_exists(py_recipe, jinja_env): os.path.join(py_recipe.recipe, ".github/workflows/webservices.yml") ) as f: action_config = yaml.safe_load(f) - assert "jobs" in action_config - assert "webservices" in action_config["jobs"] + assert action_config == snapshot -def test_automerge_action_exists(py_recipe, jinja_env): +def test_automerge_action_exists(py_recipe, jinja_env, snapshot): configure_feedstock.render_github_actions_services( jinja_env=jinja_env, forge_config=py_recipe.config, @@ -822,8 +795,7 @@ def test_automerge_action_exists(py_recipe, jinja_env): os.path.join(py_recipe.recipe, ".github/workflows/automerge.yml") ) as f: action_config = yaml.safe_load(f) - assert "jobs" in action_config - assert "automerge-action" in action_config["jobs"] + assert action_config == snapshot def test_conda_forge_yaml_empty(config_yaml: ConfigYAML): @@ -845,7 +817,7 @@ def test_conda_forge_yaml_empty(config_yaml: ConfigYAML): assert load_forge_config()["recipe_dir"] == "recipe" -def test_noarch_platforms_bad_yaml(config_yaml: ConfigYAML, caplog): +def test_noarch_platforms_bad_yaml(config_yaml: ConfigYAML, caplog, snapshot): load_forge_config = lambda: configure_feedstock._load_forge_config( # noqa config_yaml.workdir, exclusive_config_file=os.path.join( @@ -860,8 +832,7 @@ def test_noarch_platforms_bad_yaml(config_yaml: ConfigYAML, caplog): with caplog.at_level(logging.WARNING): load_forge_config() - - assert "eniac" in caplog.text + assert caplog.text == snapshot def test_forge_yml_alt_path(config_yaml: ConfigYAML): @@ -952,7 +923,7 @@ def test_cuda_enabled_render(cuda_enabled_recipe, jinja_env): del os.environ["CF_CUDA_ENABLED"] -def test_conda_build_tools(config_yaml: ConfigYAML, caplog): +def test_conda_build_tools(config_yaml: ConfigYAML, caplog, snapshot): load_forge_config = lambda: configure_feedstock._load_forge_config( # noqa config_yaml.workdir, exclusive_config_file=os.path.join( @@ -997,7 +968,7 @@ def test_conda_build_tools(config_yaml: ConfigYAML, caplog): with caplog.at_level(logging.WARNING): assert load_forge_config() - assert "does-not-exist" in caplog.text + assert caplog.text == snapshot def test_remote_ci_setup(config_yaml: ConfigYAML): @@ -1019,7 +990,6 @@ def test_remote_ci_setup(config_yaml: ConfigYAML): ) fp.write("conda_install_tool: conda\n") cfg = load_forge_config() - # pylief was quoted due to < assert cfg["remote_ci_setup"] == [ "conda-forge-ci-setup=3", '"py-lief<0.12"', @@ -1049,7 +1019,7 @@ def test_remote_ci_setup(config_yaml: ConfigYAML): @pytest.mark.parametrize( - "squished_input_variants,squished_used_variants,all_used_vars,expected_used_key_values", + "squished_input_variants,squished_used_variants,all_used_vars", [ ( dict( @@ -1919,56 +1889,11 @@ def test_remote_ci_setup(config_yaml: ConfigYAML): "target_platform", "zip_keys", }, - { - "c_compiler": ["gcc"], - "c_compiler_version": ["10", "12"], - "cdt_name": ["cos7", "cos6"], - "channel_sources": ["conda-forge"], - "channel_targets": ["conda-forge main"], - "cuda_compiler": ["nvcc", "None"], - "cuda_compiler_version": ["11.2", "None"], - "docker_image": [ - "quay.io/condaforge/linux-anvil-cuda:11.2", - "quay.io/condaforge/linux-anvil-cos7-x86_64", - ], - "pin_run_as_build": dict( - [ - ("python", {"max_pin": "x.x", "min_pin": "x.x"}), - ("r-base", {"max_pin": "x.x", "min_pin": "x.x"}), - ("flann", {"max_pin": "x.x.x"}), - ("graphviz", {"max_pin": "x"}), - ("libsvm", {"max_pin": "x"}), - ("netcdf-cxx4", {"max_pin": "x.x"}), - ("occt", {"max_pin": "x.x"}), - ("poppler", {"max_pin": "x.x"}), - ("vlfeat", {"max_pin": "x.x.x"}), - ] - ), - "target_platform": ["linux-64"], - "zip_keys": [ - ("arrow_cpp", "libarrow", "libarrow_all"), - ( - "c_compiler_version", - "cxx_compiler_version", - "fortran_compiler_version", - "cuda_compiler", - "cuda_compiler_version", - "cdt_name", - "docker_image", - ), - ("c_stdlib", "c_stdlib_version"), - ("libgrpc", "libprotobuf"), - ("python", "numpy", "python_impl"), - ], - }, ) ], ) def test_get_used_key_values_by_input_order( - squished_input_variants, - squished_used_variants, - all_used_vars, - expected_used_key_values, + squished_input_variants, squished_used_variants, all_used_vars, snapshot ): used_key_values, _ = ( configure_feedstock._get_used_key_values_by_input_order( @@ -1977,7 +1902,9 @@ def test_get_used_key_values_by_input_order( all_used_vars, ) ) - assert used_key_values == expected_used_key_values + assert used_key_values == snapshot( + name="get_used_key_values_by_input_order" + ) def test_conda_build_api_render_for_smithy(testing_workdir): diff --git a/tests/test_feedstock_io.py b/tests/test_feedstock_io.py index 4665fbc1e..c46916d70 100644 --- a/tests/test_feedstock_io.py +++ b/tests/test_feedstock_io.py @@ -7,7 +7,6 @@ import string import shutil import tempfile -import unittest import git from git.index.typ import BlobFilter @@ -47,7 +46,7 @@ def parameterize(): shutil.rmtree(tmp_dir) -class TestFeedstockIO(unittest.TestCase): +class TestFeedstockIO: def setUp(self): self.old_dir = os.getcwd() @@ -62,11 +61,9 @@ def setUp(self): def test_repo(self): for tmp_dir, repo, pathfunc in parameterize(): if repo is None: - self.assertTrue(fio.get_repo(pathfunc(tmp_dir)) is None) + assert fio.get_repo(pathfunc(tmp_dir)) is None else: - self.assertIsInstance( - fio.get_repo(pathfunc(tmp_dir)), git.Repo - ) + assert isinstance(fio.get_repo(pathfunc(tmp_dir)), git.Repo) possible_repo_subdir = os.path.join( tmp_dir, "".join( @@ -97,12 +94,10 @@ def test_set_exe_file(self): fio.set_exe_file(pathfunc(filename), set_exe) file_mode = os.stat(filename).st_mode - self.assertEqual(file_mode & set_mode, int(set_exe) * set_mode) + assert file_mode & set_mode == int(set_exe) * set_mode if repo is not None: blob = next(repo.index.iter_blobs(BlobFilter(filename)))[1] - self.assertEqual( - blob.mode & set_mode, int(set_exe) * set_mode - ) + assert blob.mode & set_mode == int(set_exe) * set_mode def test_write_file(self): for tmp_dir, repo, pathfunc in parameterize(): @@ -120,13 +115,13 @@ def test_write_file(self): with io.open(filename, "r", encoding="utf-8") as fh: read_text = fh.read() - self.assertEqual(write_text, read_text) + assert write_text == read_text if repo is not None: blob = next(repo.index.iter_blobs(BlobFilter(filename)))[1] read_text = blob.data_stream[3].read().decode("utf-8") - self.assertEqual(write_text, read_text) + assert write_text == read_text def test_touch_file(self): for tmp_dir, repo, pathfunc in parameterize(): @@ -139,13 +134,13 @@ def test_touch_file(self): with io.open(filename, "r", encoding="utf-8") as fh: read_text = fh.read() - self.assertEqual("", read_text) + assert "" == read_text if repo is not None: blob = next(repo.index.iter_blobs(BlobFilter(filename)))[1] read_text = blob.data_stream[3].read().decode("utf-8") - self.assertEqual("", read_text) + assert "" == read_text def test_remove_file(self): for tmp_dir, repo, pathfunc in parameterize(): @@ -163,24 +158,22 @@ def test_remove_file(self): if repo is not None: repo.index.add([filename]) - self.assertTrue(os.path.exists(filename)) + assert os.path.exists(filename) if dirname: - self.assertTrue(os.path.exists(dirname)) - self.assertTrue(os.path.exists(os.path.dirname(dirname))) + assert os.path.exists(dirname) + assert os.path.exists(os.path.dirname(dirname)) if repo is not None: - self.assertTrue( - list(repo.index.iter_blobs(BlobFilter(filename))) - ) + assert list(repo.index.iter_blobs(BlobFilter(filename))) fio.remove_file(pathfunc(filename)) - self.assertFalse(os.path.exists(filename)) + assert not os.path.exists(filename) if dirname: - self.assertFalse(os.path.exists(dirname)) - self.assertFalse(os.path.exists(os.path.dirname(dirname))) + assert not os.path.exists(dirname) + assert not os.path.exists(os.path.dirname(dirname)) if repo is not None: - self.assertFalse( - list(repo.index.iter_blobs(BlobFilter(filename))) + assert not list( + repo.index.iter_blobs(BlobFilter(filename)) ) def test_copy_file(self): @@ -195,33 +188,29 @@ def test_copy_file(self): with io.open(filename1, "w", encoding="utf-8", newline="\n") as fh: fh.write(write_text) - self.assertTrue(os.path.exists(filename1)) - self.assertFalse(os.path.exists(filename2)) + assert os.path.exists(filename1) + assert not os.path.exists(filename2) if repo is not None: - self.assertFalse( - list(repo.index.iter_blobs(BlobFilter(filename2))) - ) + assert not list(repo.index.iter_blobs(BlobFilter(filename2))) fio.copy_file(pathfunc(filename1), pathfunc(filename2)) - self.assertTrue(os.path.exists(filename1)) - self.assertTrue(os.path.exists(filename2)) + assert os.path.exists(filename1) + assert os.path.exists(filename2) if repo is not None: - self.assertTrue( - list(repo.index.iter_blobs(BlobFilter(filename2))) - ) + assert list(repo.index.iter_blobs(BlobFilter(filename2))) read_text = "" with io.open(filename2, "r", encoding="utf-8") as fh: read_text = fh.read() - self.assertEqual(write_text, read_text) + assert write_text == read_text if repo is not None: blob = next(repo.index.iter_blobs(BlobFilter(filename2)))[1] read_text = blob.data_stream[3].read().decode("utf-8") - self.assertEqual(write_text, read_text) + assert write_text == read_text def tearDown(self): os.chdir(self.old_dir) @@ -229,7 +218,3 @@ def tearDown(self): shutil.rmtree(self.tmp_dir) del self.tmp_dir - - -if __name__ == "__main__": - unittest.main() diff --git a/tests/test_feedstock_tokens.py b/tests/test_feedstock_tokens.py index 645cacf47..44e9e0b5f 100644 --- a/tests/test_feedstock_tokens.py +++ b/tests/test_feedstock_tokens.py @@ -1,6 +1,5 @@ import os import json -from unittest import mock import time import pytest @@ -49,13 +48,8 @@ @pytest.mark.parametrize( "repo", ["GITHUB_TOKEN", "${GITHUB_TOKEN}", "GH_TOKEN", "${GH_TOKEN}"] ) -@mock.patch("conda_smithy.feedstock_tokens.tempfile") -@mock.patch("conda_smithy.feedstock_tokens.git") -@mock.patch("conda_smithy.github.gh_token") def test_feedstock_tokens_roundtrip( - gh_mock, - git_mock, - tmp_mock, + mocker, tmpdir, repo, project, @@ -65,6 +59,10 @@ def test_feedstock_tokens_roundtrip( expires_at, retval_time, ): + gh_mock = mocker.patch("conda_smithy.github.gh_token") + tmp_mock = mocker.patch("conda_smithy.feedstock_tokens.tempfile") + git_mock = mocker.patch("conda_smithy.feedstock_tokens.git") + gh_mock.return_value = "abc123" tmp_mock.TemporaryDirectory.return_value.__enter__.return_value = str( tmpdir @@ -115,18 +113,17 @@ def test_feedstock_tokens_roundtrip( @pytest.mark.parametrize( "repo", ["GITHUB_TOKEN", "${GITHUB_TOKEN}", "GH_TOKEN", "${GH_TOKEN}"] ) -@mock.patch("conda_smithy.feedstock_tokens.tempfile") -@mock.patch("conda_smithy.feedstock_tokens.git") -@mock.patch("conda_smithy.github.gh_token") def test_is_valid_feedstock_token_nofile( - gh_mock, - git_mock, - tmp_mock, + mocker, tmpdir, repo, project, ci, ): + gh_mock = mocker.patch("conda_smithy.github.gh_token") + tmp_mock = mocker.patch("conda_smithy.feedstock_tokens.tempfile") + git_mock = mocker.patch("conda_smithy.feedstock_tokens.git") + gh_mock.return_value = "abc123" tmp_mock.TemporaryDirectory.return_value.__enter__.return_value = str( tmpdir @@ -158,13 +155,8 @@ def test_is_valid_feedstock_token_nofile( @pytest.mark.parametrize( "repo", ["GITHUB_TOKEN", "${GITHUB_TOKEN}", "GH_TOKEN", "${GH_TOKEN}"] ) -@mock.patch("conda_smithy.feedstock_tokens.tempfile") -@mock.patch("conda_smithy.feedstock_tokens.git") -@mock.patch("conda_smithy.github.gh_token") def test_is_valid_feedstock_token_badtoken( - gh_mock, - git_mock, - tmp_mock, + mocker, tmpdir, repo, project, @@ -172,6 +164,9 @@ def test_is_valid_feedstock_token_badtoken( provider, ci, ): + gh_mock = mocker.patch("conda_smithy.github.gh_token") + tmp_mock = mocker.patch("conda_smithy.feedstock_tokens.tempfile") + git_mock = mocker.patch("conda_smithy.feedstock_tokens.git") gh_mock.return_value = "abc123" tmp_mock.TemporaryDirectory.return_value.__enter__.return_value = str( tmpdir @@ -301,13 +296,8 @@ def test_read_feedstock_token(ci): @pytest.mark.parametrize( "repo", ["$GITHUB_TOKEN", "${GITHUB_TOKEN}", "$GH_TOKEN", "${GH_TOKEN}"] ) -@mock.patch("conda_smithy.feedstock_tokens.tempfile") -@mock.patch("conda_smithy.feedstock_tokens.git") -@mock.patch("conda_smithy.github.gh_token") def test_feedstock_token_exists( - gh_mock, - git_mock, - tmp_mock, + mocker, tmpdir, repo, project, @@ -318,6 +308,10 @@ def test_feedstock_token_exists( expires_at, retval_time, ): + gh_mock = mocker.patch("conda_smithy.github.gh_token") + tmp_mock = mocker.patch("conda_smithy.feedstock_tokens.tempfile") + git_mock = mocker.patch("conda_smithy.feedstock_tokens.git") + gh_mock.return_value = "abc123" tmp_mock.TemporaryDirectory.return_value.__enter__.return_value = str( tmpdir @@ -352,12 +346,11 @@ def test_feedstock_token_exists( @pytest.mark.parametrize( "repo", ["$GITHUB_TOKEN", "${GITHUB_TOKEN}", "$GH_TOKEN", "${GH_TOKEN}"] ) -@mock.patch("conda_smithy.feedstock_tokens.tempfile") -@mock.patch("conda_smithy.feedstock_tokens.git") -@mock.patch("conda_smithy.github.gh_token") -def test_feedstock_token_raises( - gh_mock, git_mock, tmp_mock, tmpdir, repo, project, ci -): +def test_feedstock_token_raises(mocker, tmpdir, repo, project, ci, snapshot): + gh_mock = mocker.patch("conda_smithy.github.gh_token") + tmp_mock = mocker.patch("conda_smithy.feedstock_tokens.tempfile") + git_mock = mocker.patch("conda_smithy.feedstock_tokens.git") + gh_mock.return_value = "abc123" tmp_mock.TemporaryDirectory.return_value.__enter__.return_value = str( tmpdir @@ -371,8 +364,7 @@ def test_feedstock_token_raises( with pytest.raises(FeedstockTokenError) as e: feedstock_token_exists(user, project, repo, provider=ci) - - assert "Testing for the feedstock token for" in str(e.value) + assert e.value == snapshot git_mock.Repo.clone_from.assert_called_once_with( "abc123", @@ -385,21 +377,13 @@ def test_feedstock_token_raises( @pytest.mark.parametrize( "repo", ["$GITHUB_TOKEN", "${GITHUB_TOKEN}", "$GH_TOKEN", "${GH_TOKEN}"] ) -@mock.patch("conda_smithy.feedstock_tokens.secrets") -@mock.patch("conda_smithy.feedstock_tokens.os.urandom") -@mock.patch("conda_smithy.feedstock_tokens.tempfile") -@mock.patch("conda_smithy.feedstock_tokens.git") -@mock.patch("conda_smithy.github.gh_token") -def test_register_feedstock_token_works( - gh_mock, - git_mock, - tmp_mock, - osuran_mock, - secrets_mock, - tmpdir, - repo, - ci, -): +def test_register_feedstock_token_works(mocker, tmpdir, repo, ci): + gh_mock = mocker.patch("conda_smithy.github.gh_token") + tmp_mock = mocker.patch("conda_smithy.feedstock_tokens.tempfile") + git_mock = mocker.patch("conda_smithy.feedstock_tokens.git") + secrets_mock = mocker.patch("conda_smithy.feedstock_tokens.secrets") + osuran_mock = mocker.patch("conda_smithy.feedstock_tokens.os.urandom") + gh_mock.return_value = "abc123" tmp_mock.TemporaryDirectory.return_value.__enter__.return_value = str( tmpdir @@ -457,21 +441,18 @@ def test_register_feedstock_token_works( @pytest.mark.parametrize( "repo", ["$GITHUB_TOKEN", "${GITHUB_TOKEN}", "$GH_TOKEN", "${GH_TOKEN}"] ) -@mock.patch("conda_smithy.feedstock_tokens.secrets") -@mock.patch("conda_smithy.feedstock_tokens.os.urandom") -@mock.patch("conda_smithy.feedstock_tokens.tempfile") -@mock.patch("conda_smithy.feedstock_tokens.git") -@mock.patch("conda_smithy.github.gh_token") def test_register_feedstock_token_notoken( - gh_mock, - git_mock, - tmp_mock, - osuran_mock, - secrets_mock, + mocker, tmpdir, repo, ci, ): + gh_mock = mocker.patch("conda_smithy.github.gh_token") + tmp_mock = mocker.patch("conda_smithy.feedstock_tokens.tempfile") + git_mock = mocker.patch("conda_smithy.feedstock_tokens.git") + secrets_mock = mocker.patch("conda_smithy.feedstock_tokens.secrets") + osuran_mock = mocker.patch("conda_smithy.feedstock_tokens.os.urandom") + gh_mock.return_value = "abc123" tmp_mock.TemporaryDirectory.return_value.__enter__.return_value = str( tmpdir @@ -505,7 +486,6 @@ def test_register_feedstock_token_notoken( repo.remote.return_value.push.assert_not_called() assert not os.path.exists(token_json_pth) - assert "No token found in" in str(e.value) @@ -513,21 +493,18 @@ def test_register_feedstock_token_notoken( @pytest.mark.parametrize( "repo", ["$GITHUB_TOKEN", "${GITHUB_TOKEN}", "$GH_TOKEN", "${GH_TOKEN}"] ) -@mock.patch("conda_smithy.feedstock_tokens.secrets") -@mock.patch("conda_smithy.feedstock_tokens.os.urandom") -@mock.patch("conda_smithy.feedstock_tokens.tempfile") -@mock.patch("conda_smithy.feedstock_tokens.git") -@mock.patch("conda_smithy.github.gh_token") def test_register_feedstock_token_append( - gh_mock, - git_mock, - tmp_mock, - osuran_mock, - secrets_mock, + mocker, tmpdir, repo, ci, ): + gh_mock = mocker.patch("conda_smithy.github.gh_token") + tmp_mock = mocker.patch("conda_smithy.feedstock_tokens.tempfile") + git_mock = mocker.patch("conda_smithy.feedstock_tokens.git") + secrets_mock = mocker.patch("conda_smithy.feedstock_tokens.secrets") + osuran_mock = mocker.patch("conda_smithy.feedstock_tokens.os.urandom") + gh_mock.return_value = "abc123" tmp_mock.TemporaryDirectory.return_value.__enter__.return_value = str( tmpdir @@ -588,19 +565,8 @@ def test_register_feedstock_token_append( @pytest.mark.parametrize("travis", [True, False]) @pytest.mark.parametrize("github_actions", [True, False]) @pytest.mark.parametrize("clobber", [True, False]) -@mock.patch("conda_smithy.feedstock_tokens.add_feedstock_token_to_drone") -@mock.patch("conda_smithy.feedstock_tokens.add_feedstock_token_to_circle") -@mock.patch("conda_smithy.feedstock_tokens.add_feedstock_token_to_travis") -@mock.patch("conda_smithy.feedstock_tokens.add_feedstock_token_to_azure") -@mock.patch( - "conda_smithy.feedstock_tokens.add_feedstock_token_to_github_actions" -) def test_register_feedstock_token_with_providers( - github_actions_mock, - azure_mock, - travis_mock, - circle_mock, - drone_mock, + mocker, drone, circle, azure, @@ -609,6 +575,22 @@ def test_register_feedstock_token_with_providers( clobber, unique_token_per_provider, ): + github_actions_mock = mocker.patch( + "conda_smithy.feedstock_tokens.add_feedstock_token_to_github_actions" + ) + azure_mock = mocker.patch( + "conda_smithy.feedstock_tokens.add_feedstock_token_to_azure" + ) + travis_mock = mocker.patch( + "conda_smithy.feedstock_tokens.add_feedstock_token_to_travis" + ) + circle_mock = mocker.patch( + "conda_smithy.feedstock_tokens.add_feedstock_token_to_circle" + ) + drone_mock = mocker.patch( + "conda_smithy.feedstock_tokens.add_feedstock_token_to_drone" + ) + user = "foo" project = "bar" providers = [ @@ -726,19 +708,8 @@ def test_register_feedstock_token_with_providers( @pytest.mark.parametrize("travis", [True, False]) @pytest.mark.parametrize("github_actions", [True, False]) @pytest.mark.parametrize("clobber", [True, False]) -@mock.patch("conda_smithy.feedstock_tokens.add_feedstock_token_to_drone") -@mock.patch("conda_smithy.feedstock_tokens.add_feedstock_token_to_circle") -@mock.patch("conda_smithy.feedstock_tokens.add_feedstock_token_to_travis") -@mock.patch("conda_smithy.feedstock_tokens.add_feedstock_token_to_azure") -@mock.patch( - "conda_smithy.feedstock_tokens.add_feedstock_token_to_github_actions" -) def test_register_feedstock_token_with_providers_notoken( - github_actions_mock, - azure_mock, - travis_mock, - circle_mock, - drone_mock, + mocker, drone, circle, azure, @@ -747,6 +718,22 @@ def test_register_feedstock_token_with_providers_notoken( clobber, unique_token_per_provider, ): + github_actions_mock = mocker.patch( + "conda_smithy.feedstock_tokens.add_feedstock_token_to_github_actions" + ) + azure_mock = mocker.patch( + "conda_smithy.feedstock_tokens.add_feedstock_token_to_azure" + ) + travis_mock = mocker.patch( + "conda_smithy.feedstock_tokens.add_feedstock_token_to_travis" + ) + circle_mock = mocker.patch( + "conda_smithy.feedstock_tokens.add_feedstock_token_to_circle" + ) + drone_mock = mocker.patch( + "conda_smithy.feedstock_tokens.add_feedstock_token_to_drone" + ) + user = "foo" project = "bar" @@ -777,22 +764,27 @@ def test_register_feedstock_token_with_providers_notoken( @pytest.mark.parametrize( "provider", ["drone", "circle", "travis", "azure", "github_actions"] ) -@mock.patch("conda_smithy.feedstock_tokens.add_feedstock_token_to_drone") -@mock.patch("conda_smithy.feedstock_tokens.add_feedstock_token_to_circle") -@mock.patch("conda_smithy.feedstock_tokens.add_feedstock_token_to_travis") -@mock.patch("conda_smithy.feedstock_tokens.add_feedstock_token_to_azure") -@mock.patch( - "conda_smithy.feedstock_tokens.add_feedstock_token_to_github_actions" -) def test_register_feedstock_token_with_providers_error( - github_actions_mock, - azure_mock, - travis_mock, - circle_mock, - drone_mock, + mocker, provider, unique_token_per_provider, ): + github_actions_mock = mocker.patch( + "conda_smithy.feedstock_tokens.add_feedstock_token_to_github_actions" + ) + azure_mock = mocker.patch( + "conda_smithy.feedstock_tokens.add_feedstock_token_to_azure" + ) + travis_mock = mocker.patch( + "conda_smithy.feedstock_tokens.add_feedstock_token_to_travis" + ) + circle_mock = mocker.patch( + "conda_smithy.feedstock_tokens.add_feedstock_token_to_circle" + ) + drone_mock = mocker.patch( + "conda_smithy.feedstock_tokens.add_feedstock_token_to_drone" + ) + user = "foo" project = "bar-feedstock" providers = [ diff --git a/tests/test_lint_recipe.py b/tests/test_lint_recipe.py index b398320bb..c21292ac7 100644 --- a/tests/test_lint_recipe.py +++ b/tests/test_lint_recipe.py @@ -8,7 +8,6 @@ import subprocess import tempfile import textwrap -import unittest import warnings import github @@ -34,9 +33,7 @@ def tmp_directory(): "comp_lang", ["c", "cxx", "fortran", "rust", "m2w64_c", "m2w64_cxx", "m2w64_fortran"], ) -def test_stdlib_hint(comp_lang): - expected_message = "This recipe is using a compiler" - +def test_stdlib_hint(comp_lang, snapshot): with tmp_directory() as recipe_dir: with io.open(os.path.join(recipe_dir, "meta.yaml"), "w") as fh: fh.write( @@ -51,12 +48,10 @@ def test_stdlib_hint(comp_lang): ) _, hints = linter.main(recipe_dir, return_hints=True) - assert any(h.startswith(expected_message) for h in hints) - + assert hints == snapshot() -def test_sysroot_hint(): - expected_message = "You're setting a requirement on sysroot" +def test_sysroot_hint(snapshot): with tmp_directory() as recipe_dir: with io.open(os.path.join(recipe_dir, "meta.yaml"), "w") as fh: fh.write( @@ -70,13 +65,11 @@ def test_sysroot_hint(): ) _, hints = linter.main(recipe_dir, return_hints=True) - assert any(h.startswith(expected_message) for h in hints) + assert hints == snapshot @pytest.mark.parametrize("where", ["run", "run_constrained"]) -def test_osx_hint(where): - expected_message = "You're setting a constraint on the `__osx` virtual" - +def test_osx_hint(where, snapshot): with tmp_directory() as recipe_dir: with io.open(os.path.join(recipe_dir, "meta.yaml"), "w") as fh: fh.write( @@ -91,12 +84,10 @@ def test_osx_hint(where): ) _, hints = linter.main(recipe_dir, return_hints=True) - assert any(h.startswith(expected_message) for h in hints) - + assert hints == snapshot -def test_stdlib_hints_multi_output(): - expected_message = "You're setting a requirement on sysroot" +def test_stdlib_hints_multi_output(snapshot): with tmp_directory() as recipe_dir: with io.open(os.path.join(recipe_dir, "meta.yaml"), "w") as fh: fh.write( @@ -132,20 +123,13 @@ def test_stdlib_hints_multi_output(): ) _, hints = linter.main(recipe_dir, return_hints=True) - exp_stdlib = "This recipe is using a compiler" - exp_sysroot = "You're setting a requirement on sysroot" - exp_osx = "You're setting a constraint on the `__osx`" - assert any(h.startswith(exp_stdlib) for h in hints) - assert any(h.startswith(exp_sysroot) for h in hints) - assert any(h.startswith(exp_osx) for h in hints) + assert hints == snapshot @pytest.mark.parametrize("where", ["run", "run_constrained"]) -def test_osx_noarch_hint(where): +def test_osx_noarch_hint(where, snapshot): # don't warn on packages that are using __osx as a noarch-marker, see # https://conda-forge.org/docs/maintainer/knowledge_base/#noarch-packages-with-os-specific-dependencies - avoid_message = "You're setting a constraint on the `__osx` virtual" - with tmp_directory() as recipe_dir: with io.open(os.path.join(recipe_dir, "meta.yaml"), "w") as fh: fh.write( @@ -159,7 +143,7 @@ def test_osx_noarch_hint(where): ) _, hints = linter.main(recipe_dir, return_hints=True) - assert not any(h.startswith(avoid_message) for h in hints) + assert hints == snapshot @pytest.mark.parametrize( @@ -266,7 +250,7 @@ def test_cbc_osx_hints( assert any(h.startswith(exp_hint) for h in hints) -class Test_linter(unittest.TestCase): +class Test_linter: def test_pin_compatible_in_run_exports(self): meta = { "package": { @@ -276,9 +260,9 @@ def test_pin_compatible_in_run_exports(self): "run_exports": ["compatible_pin apackage"], }, } - lints, hints = linter.lintify_meta_yaml(meta) + lints, _ = linter.lintify_meta_yaml(meta) expected = "pin_subpackage should be used instead" - self.assertTrue(any(lint.startswith(expected) for lint in lints)) + assert any(lint.startswith(expected) for lint in lints) def test_pin_compatible_in_run_exports_output(self): meta = { @@ -294,15 +278,15 @@ def test_pin_compatible_in_run_exports_output(self): } ], } - lints, hints = linter.lintify_meta_yaml(meta) + lints, _ = linter.lintify_meta_yaml(meta) expected = "pin_compatible should be used instead" - self.assertTrue(any(lint.startswith(expected) for lint in lints)) + assert any(lint.startswith(expected) for lint in lints) def test_bad_top_level(self): meta = OrderedDict([["package", {}], ["build", {}], ["sources", {}]]) lints, hints = linter.lintify_meta_yaml(meta) expected_msg = "The top level meta key sources is unexpected" - self.assertIn(expected_msg, lints) + assert expected_msg in lints def test_bad_order(self): meta = OrderedDict([["package", {}], ["build", {}], ["source", {}]]) @@ -311,16 +295,16 @@ def test_bad_order(self): "The top level meta keys are in an unexpected " "order. Expecting ['package', 'source', 'build']." ) - self.assertIn(expected_msg, lints) + assert expected_msg in lints def test_missing_about_license_and_summary(self): meta = {"about": {"home": "a URL"}} lints, hints = linter.lintify_meta_yaml(meta) expected_message = "The license item is expected in the about section." - self.assertIn(expected_message, lints) + assert expected_message in lints expected_message = "The summary item is expected in the about section." - self.assertIn(expected_message, lints) + assert expected_message in lints def test_bad_about_license(self): meta = { @@ -332,7 +316,7 @@ def test_bad_about_license(self): } lints, hints = linter.lintify_meta_yaml(meta) expected_message = "The recipe license cannot be unknown." - self.assertIn(expected_message, lints) + assert expected_message in lints def test_bad_about_license_family(self): meta = { @@ -345,31 +329,31 @@ def test_bad_about_license_family(self): } lints, hints = linter.lintify_meta_yaml(meta) expected = "about/license_family 'BSD3' not allowed" - self.assertTrue(any(lint.startswith(expected) for lint in lints)) + assert any(lint.startswith(expected) for lint in lints) def test_missing_about_home(self): meta = {"about": {"license": "BSD", "summary": "A test summary"}} lints, hints = linter.lintify_meta_yaml(meta) expected_message = "The home item is expected in the about section." - self.assertIn(expected_message, lints) + assert expected_message in lints def test_missing_about_home_empty(self): meta = {"about": {"home": "", "summary": "", "license": ""}} lints, hints = linter.lintify_meta_yaml(meta) expected_message = "The home item is expected in the about section." - self.assertIn(expected_message, lints) + assert expected_message in lints expected_message = "The license item is expected in the about section." - self.assertIn(expected_message, lints) + assert expected_message in lints expected_message = "The summary item is expected in the about section." - self.assertIn(expected_message, lints) + assert expected_message in lints def test_noarch_value(self): meta = {"build": {"noarch": "true"}} expected = "Invalid `noarch` value `true`. Should be one of" lints, hints = linter.lintify_meta_yaml(meta) - self.assertTrue(any(lint.startswith(expected) for lint in lints)) + assert any(lint.startswith(expected) for lint in lints) def test_maintainers_section(self): expected_message = ( @@ -380,16 +364,16 @@ def test_maintainers_section(self): lints, hints = linter.lintify_meta_yaml( {"extra": {"recipe-maintainers": []}} ) - self.assertIn(expected_message, lints) + assert expected_message in lints # No extra section at all. lints, hints = linter.lintify_meta_yaml({}) - self.assertIn(expected_message, lints) + assert expected_message in lints lints, hints = linter.lintify_meta_yaml( {"extra": {"recipe-maintainers": ["a"]}} ) - self.assertNotIn(expected_message, lints) + assert expected_message not in lints expected_message = ( 'The "extra" section was expected to be a ' @@ -398,38 +382,38 @@ def test_maintainers_section(self): lints, hints = linter.lintify_meta_yaml( {"extra": ["recipe-maintainers"]} ) - self.assertIn(expected_message, lints) + assert expected_message in lints lints, hints = linter.lintify_meta_yaml( {"extra": {"recipe-maintainers": "Luke"}} ) expected_message = "Recipe maintainers should be a json list." - self.assertIn(expected_message, lints) + assert expected_message in lints def test_test_section(self): expected_message = "The recipe must have some tests." lints, hints = linter.lintify_meta_yaml({}) - self.assertIn(expected_message, lints) + assert expected_message in lints lints, hints = linter.lintify_meta_yaml({"test": {"files": "foo"}}) - self.assertIn(expected_message, lints) + assert expected_message in lints lints, hints = linter.lintify_meta_yaml({"test": {"imports": "sys"}}) - self.assertNotIn(expected_message, lints) + assert expected_message not in lints lints, hints = linter.lintify_meta_yaml({"outputs": [{"name": "foo"}]}) - self.assertIn(expected_message, lints) + assert expected_message in lints lints, hints = linter.lintify_meta_yaml( {"outputs": [{"name": "foo", "test": {"files": "foo"}}]} ) - self.assertIn(expected_message, lints) + assert expected_message in lints lints, hints = linter.lintify_meta_yaml( {"outputs": [{"name": "foo", "test": {"imports": "sys"}}]} ) - self.assertNotIn(expected_message, lints) + assert expected_message not in lints lints, hints = linter.lintify_meta_yaml( { @@ -439,9 +423,10 @@ def test_test_section(self): ] } ) - self.assertNotIn(expected_message, lints) - self.assertIn( - "It looks like the 'foobar' output doesn't have any tests.", hints + assert expected_message not in lints + assert ( + "It looks like the 'foobar' output doesn't have any tests." + in hints ) lints, hints = linter.lintify_meta_yaml( @@ -452,9 +437,10 @@ def test_test_section(self): ] } ) - self.assertNotIn(expected_message, lints) - self.assertIn( - "It looks like the 'foobar' output doesn't have any tests.", hints + assert expected_message not in lints + assert ( + "It looks like the 'foobar' output doesn't have any tests." + in hints ) def test_test_section_with_recipe(self): @@ -465,12 +451,12 @@ def test_test_section_with_recipe(self): with tmp_directory() as recipe_dir: lints, hints = linter.lintify_meta_yaml({}, recipe_dir) - self.assertIn(expected_message, lints) + assert expected_message in lints with io.open(os.path.join(recipe_dir, "run_test.py"), "w") as fh: fh.write("# foo") lints, hints = linter.lintify_meta_yaml({}, recipe_dir) - self.assertNotIn(expected_message, lints) + assert expected_message not in lints def test_jinja2_vars(self): expected_message = ( @@ -499,7 +485,7 @@ def test_jinja2_vars(self): ) _, hints = linter.lintify_meta_yaml({}, recipe_dir) - self.assertTrue(any(h.startswith(expected_message) for h in hints)) + assert any(h.startswith(expected_message) for h in hints) def test_selectors(self): expected_message = ( @@ -532,11 +518,9 @@ def assert_selector(selector, is_good=True): "Expecting lints for '{}', but didn't get any." "".format(selector) ) - self.assertEqual( - not is_good, - any(lint.startswith(expected_message) for lint in lints), - message, - ) + assert (not is_good) == any( + lint.startswith(expected_message) for lint in lints + ), message assert_selector("name: foo_py3 # [py3k]") assert_selector("name: foo_py3 [py3k]", is_good=False) @@ -565,14 +549,9 @@ def assert_pyXY_selector(meta_string, is_good=False, kind="lint"): "Expected lints or hints for '{}', but didn't get any." ).format(meta_string) problems = lints if kind == "lint" else hints - self.assertEqual( - not is_good, - any( - problem.startswith(expected_start) - for problem in problems - ), - message, - ) + assert (not is_good) == any( + problem.startswith(expected_start) for problem in problems + ), message assert_pyXY_selector( """ @@ -674,11 +653,10 @@ def assert_noarch_selector(meta_string, is_good=False): message = ( "Expected lints for '{}', but didn't " "get any." ).format(meta_string) - self.assertEqual( - not is_good, - any(lint.startswith(expected_start) for lint in lints), - message, - ) + + assert (not is_good) == any( + lint.startswith(expected_start) for lint in lints + ), message assert_noarch_selector( """ @@ -822,11 +800,10 @@ def assert_noarch_hint(meta_string, is_good=False): message = ( "Expected hints for '{}', but didn't " "get any." ).format(meta_string) - self.assertEqual( - not is_good, - any(lint.startswith(expected_start) for lint in hints), - message, - ) + + assert (not is_good) == any( + lint.startswith(expected_start) for lint in hints + ), message assert_noarch_hint( """ @@ -1007,11 +984,11 @@ def test_missing_build_number(self): } } lints, hints = linter.lintify_meta_yaml(meta) - self.assertNotIn(expected_message, lints) + assert expected_message not in lints meta = {"build": {"skip": "True", "script": "python setup.py install"}} lints, hints = linter.lintify_meta_yaml(meta) - self.assertIn(expected_message, lints) + assert expected_message in lints def test_bad_requirements_order(self): expected_message = ( @@ -1024,7 +1001,7 @@ def test_bad_requirements_order(self): "requirements": OrderedDict([["run", ["a"]], ["build", ["a"]]]) } lints, hints = linter.lintify_meta_yaml(meta) - self.assertIn(expected_message, lints) + assert expected_message in lints meta = { "requirements": OrderedDict( @@ -1032,13 +1009,13 @@ def test_bad_requirements_order(self): ) } lints, hints = linter.lintify_meta_yaml(meta) - self.assertIn(expected_message, lints) + assert expected_message in lints meta = { "requirements": OrderedDict([["build", ["a"]], ["run", ["a"]]]) } lints, hints = linter.lintify_meta_yaml(meta) - self.assertNotIn(expected_message, lints) + assert expected_message not in lints def test_noarch_python_bound(self): expected_message = ( @@ -1059,7 +1036,7 @@ def test_noarch_python_bound(self): }, } lints, hints = linter.lintify_meta_yaml(meta) - self.assertIn(expected_message, lints) + assert expected_message in lints meta = { "build": {"noarch": "python"}, @@ -1073,7 +1050,7 @@ def test_noarch_python_bound(self): }, } lints, hints = linter.lintify_meta_yaml(meta) - self.assertNotIn(expected_message, lints) + assert expected_message not in lints meta = { "build": {"noarch": "generic"}, @@ -1087,7 +1064,7 @@ def test_noarch_python_bound(self): }, } lints, hints = linter.lintify_meta_yaml(meta) - self.assertNotIn(expected_message, lints) + assert expected_message not in lints def test_no_sha_with_dl(self): expected_message = ( @@ -1095,20 +1072,20 @@ def test_no_sha_with_dl(self): "sha1 or md5 checksum (sha256 preferably)." ) lints, hints = linter.lintify_meta_yaml({"source": {"url": None}}) - self.assertIn(expected_message, lints) + assert expected_message in lints lints, hints = linter.lintify_meta_yaml( {"source": {"url": None, "sha1": None}} ) - self.assertNotIn(expected_message, lints) + assert expected_message not in lints lints, hints = linter.lintify_meta_yaml( {"source": {"url": None, "sha256": None}} ) - self.assertNotIn(expected_message, lints, hints) + assert expected_message not in lints, hints meta = {"source": {"url": None, "md5": None}} - self.assertNotIn(expected_message, linter.lintify_meta_yaml(meta)) + assert expected_message not in linter.lintify_meta_yaml(meta) def test_redundant_license(self): meta = { @@ -1122,7 +1099,7 @@ def test_redundant_license(self): expected_message = ( "The recipe `license` should not include " 'the word "License".' ) - self.assertIn(expected_message, lints) + assert expected_message in lints def test_spdx_license(self): msg = ( @@ -1147,9 +1124,9 @@ def test_spdx_license(self): lints, hints = linter.lintify_meta_yaml(meta) print(license, good) if good: - self.assertNotIn(msg, hints) + assert msg not in hints else: - self.assertIn(msg, hints) + assert msg in hints def test_spdx_license_exception(self): msg = ( @@ -1165,9 +1142,9 @@ def test_spdx_license_exception(self): meta = {"about": {"license": license}} lints, hints = linter.lintify_meta_yaml(meta) if good: - self.assertNotIn(msg, hints) + assert msg not in hints else: - self.assertIn(msg, hints) + assert msg in hints def test_license_file_required(self): meta = { @@ -1179,7 +1156,7 @@ def test_license_file_required(self): } lints, hints = linter.lintify_meta_yaml(meta) expected_message = "license_file entry is missing, but is required." - self.assertIn(expected_message, lints) + assert expected_message in lints def test_license_file_empty(self): meta = { @@ -1193,7 +1170,7 @@ def test_license_file_empty(self): } lints, hints = linter.lintify_meta_yaml(meta) expected_message = "license_file entry is missing, but is required." - self.assertIn(expected_message, lints) + assert expected_message in lints def test_recipe_name(self): meta = {"package": {"name": "mp++"}} @@ -1202,7 +1179,7 @@ def test_recipe_name(self): "Recipe name has invalid characters. only lowercase alpha, " "numeric, underscores, hyphens and dots allowed" ) - self.assertIn(expected_message, lints) + assert expected_message in lints def test_end_empty_line(self): bad_contents = [ @@ -1245,9 +1222,9 @@ def test_end_empty_line(self): " the end of the file." ) if content == valid_content: - self.assertNotIn(expected_message, lints) + assert expected_message not in lints else: - self.assertIn(expected_message, lints) + assert expected_message in lints def test_cb3_jinja2_functions(self): lints = linter.main( @@ -1255,19 +1232,19 @@ def test_cb3_jinja2_functions(self): ) assert not lints - @unittest.skipUnless(is_gh_token_set(), "GH_TOKEN not set") + @pytest.mark.skipif(not is_gh_token_set(), reason="GH_TOKEN not set") def test_maintainer_exists(self): lints, _ = linter.lintify_meta_yaml( {"extra": {"recipe-maintainers": ["support"]}}, conda_forge=True ) expected_message = 'Recipe maintainer "support" does not exist' - self.assertIn(expected_message, lints) + assert expected_message in lints lints, _ = linter.lintify_meta_yaml( {"extra": {"recipe-maintainers": ["isuruf"]}}, conda_forge=True ) expected_message = 'Recipe maintainer "isuruf" does not exist' - self.assertNotIn(expected_message, lints) + assert expected_message not in lints expected_message = ( "Feedstock with the same name exists in conda-forge." @@ -1278,26 +1255,26 @@ def test_maintainer_exists(self): recipe_dir="python", conda_forge=True, ) - self.assertIn(expected_message, lints) + assert expected_message in lints lints, _ = linter.lintify_meta_yaml( {"package": {"name": "python"}}, recipe_dir="python", conda_forge=False, ) - self.assertNotIn(expected_message, lints) + assert expected_message not in lints # No lint if in a feedstock lints, _ = linter.lintify_meta_yaml( {"package": {"name": "python"}}, recipe_dir="recipe", conda_forge=True, ) - self.assertNotIn(expected_message, lints) + assert expected_message not in lints lints, _ = linter.lintify_meta_yaml( {"package": {"name": "python"}}, recipe_dir="recipe", conda_forge=False, ) - self.assertNotIn(expected_message, lints) + assert expected_message not in lints # Make sure there's no feedstock named python1 before proceeding gh = github.Github(os.environ["GH_TOKEN"]) @@ -1318,7 +1295,7 @@ def test_maintainer_exists(self): recipe_dir="python", conda_forge=True, ) - self.assertNotIn(expected_message, lints) + assert expected_message not in lints # Test bioconda recipe checking expected_message = ( @@ -1340,27 +1317,27 @@ def test_maintainer_exists(self): lints, _ = linter.lintify_meta_yaml( {"package": {"name": r}}, recipe_dir=r, conda_forge=True ) - self.assertIn(expected_message, lints) + assert expected_message in lints lints, _ = linter.lintify_meta_yaml( {"package": {"name": r}}, recipe_dir=r, conda_forge=False ) - self.assertNotIn(expected_message, lints) + assert expected_message not in lints # No lint if in a feedstock lints, _ = linter.lintify_meta_yaml( {"package": {"name": r}}, recipe_dir="recipe", conda_forge=True ) - self.assertNotIn(expected_message, lints) + assert expected_message not in lints lints, _ = linter.lintify_meta_yaml( {"package": {"name": r}}, recipe_dir="recipe", conda_forge=False, ) - self.assertNotIn(expected_message, lints) + assert expected_message not in lints # No lint if the name isn't specified lints, _ = linter.lintify_meta_yaml( {}, recipe_dir=r, conda_forge=True ) - self.assertNotIn(expected_message, lints) + assert expected_message not in lints r = "this-will-never-exist" try: @@ -1369,7 +1346,7 @@ def test_maintainer_exists(self): lints, _ = linter.lintify_meta_yaml( {"package": {"name": r}}, recipe_dir=r, conda_forge=True ) - self.assertNotIn(expected_message, lints) + assert expected_message not in lints else: warnings.warn( "There's a bioconda recipe named {}, but tests assume that there isn't".format( @@ -1390,7 +1367,7 @@ def test_maintainer_exists(self): recipe_dir="recipes/foo", conda_forge=True, ) - self.assertIn(expected_message, hints) + assert expected_message in hints # check that this doesn't choke lints, hints = linter.lintify_meta_yaml( @@ -1406,7 +1383,7 @@ def test_maintainer_exists(self): conda_forge=True, ) - @unittest.skipUnless(is_gh_token_set(), "GH_TOKEN not set") + @pytest.mark.skipif(not is_gh_token_set(), reason="GH_TOKEN not set") def test_maintainer_participation(self): # Mocking PR and maintainer data os.environ["STAGED_RECIPES_PR_NUMBER"] = "1" # Example PR number @@ -1425,13 +1402,13 @@ def test_maintainer_participation(self): "The following maintainers have not yet confirmed that they are willing to be listed here: " "isuruf. Please ask them to comment on this PR if they are." ) - self.assertIn(expected_message, lints) + assert expected_message in lints expected_message = ( "The following maintainers have not yet confirmed that they are willing to be listed here: " "pelson, isuruf. Please ask them to comment on this PR if they are." ) - self.assertNotIn(expected_message, lints) + assert expected_message not in lints finally: del os.environ["STAGED_RECIPES_PR_NUMBER"] @@ -1449,7 +1426,7 @@ def test_bad_subheader(self): } } lints, hints = linter.lintify_meta_yaml(meta) - self.assertNotIn(expected_message.format("build", "ski"), lints) + assert expected_message.format("build", "ski") not in lints meta = { "build": { @@ -1459,15 +1436,15 @@ def test_bad_subheader(self): } } lints, hints = linter.lintify_meta_yaml(meta) - self.assertIn(expected_message.format("build", "ski"), lints) + assert expected_message.format("build", "ski") in lints meta = {"source": {"urll": "http://test"}} lints, hints = linter.lintify_meta_yaml(meta) - self.assertIn(expected_message.format("source", "urll"), lints) + assert expected_message.format("source", "urll") in lints meta = {"source": [{"urll": "http://test"}, {"url": "https://test"}]} lints, hints = linter.lintify_meta_yaml(meta) - self.assertIn(expected_message.format("source", "urll"), lints) + assert expected_message.format("source", "urll") in lints def test_outputs(self): meta = OrderedDict([["outputs", [{"name": "asd"}]]]) @@ -1477,16 +1454,16 @@ def test_version(self): meta = {"package": {"name": "python", "version": "3.6.4"}} expected_message = "Package version 3.6.4 doesn't match conda spec" lints, hints = linter.lintify_meta_yaml(meta) - self.assertNotIn(expected_message, lints) + assert expected_message not in lints meta = {"package": {"name": "python", "version": "2.0.0~alpha0"}} expected_message = ( "Package version 2.0.0~alpha0 doesn't match conda spec" ) lints, hints = linter.lintify_meta_yaml(meta) - self.assertIn(expected_message, lints) + assert expected_message in lints - @unittest.skipUnless(is_gh_token_set(), "GH_TOKEN not set") + @pytest.mark.skipif(not is_gh_token_set(), reason="GH_TOKEN not set") def test_examples(self): msg = ( "Please move the recipe out of the example dir and into its " @@ -1497,13 +1474,13 @@ def test_examples(self): recipe_dir="recipes/example/", conda_forge=True, ) - self.assertIn(msg, lints) + assert msg in lints lints = linter.lintify_meta_yaml( {"extra": {"recipe-maintainers": ["support"]}}, recipe_dir="python", conda_forge=True, ) - self.assertNotIn(msg, lints) + assert msg not in lints def test_multiple_sources(self): lints = linter.main( @@ -1530,7 +1507,7 @@ def test_string_source(self): 'The "source" section was expected to be a dictionary or a ' "list, but got a {}.{}." ).format(type(url).__module__, type(url).__name__) - self.assertIn(msg, lints) + assert msg in lints def test_single_space_pins(self): meta = { @@ -1554,7 +1531,7 @@ def test_single_space_pins(self): "``requirements: run: conda-smithy<=54.*`` must contain a space " "between the name and the pin, i.e. ``conda-smithy <=54.*``", ] - self.assertEqual(expected_messages, filtered_lints) + assert expected_messages == filtered_lints def test_empty_host(self): meta = {"requirements": {"build": None, "host": None, "run": None}} @@ -1564,9 +1541,9 @@ def test_empty_host(self): def test_python_requirements(self): meta = {"requirements": {"host": ["python >=3"]}} lints, hints = linter.lintify_meta_yaml(meta) - self.assertIn( - "If python is a host requirement, it should be a run requirement.", - lints, + assert ( + "If python is a host requirement, it should be a run requirement." + in lints ) meta = { @@ -1574,23 +1551,23 @@ def test_python_requirements(self): "outputs": [{"name": "foo"}], } lints, hints = linter.lintify_meta_yaml(meta) - self.assertNotIn( - "If python is a host requirement, it should be a run requirement.", - lints, + assert ( + "If python is a host requirement, it should be a run requirement." + not in lints ) meta = {"requirements": {"host": ["python >=3", "python"]}} lints, hints = linter.lintify_meta_yaml(meta) - self.assertNotIn( - "Non noarch packages should have python requirement without any version constraints.", - lints, + assert ( + "Non noarch packages should have python requirement without any version constraints." + not in lints ) meta = {"requirements": {"host": ["python >=3"]}} lints, hints = linter.lintify_meta_yaml(meta) - self.assertIn( - "Non noarch packages should have python requirement without any version constraints.", - lints, + assert ( + "Non noarch packages should have python requirement without any version constraints." + in lints ) meta = { @@ -1602,9 +1579,9 @@ def test_python_requirements(self): def test_r_base_requirements(self): meta = {"requirements": {"host": ["r-base >=3.5"]}} lints, hints = linter.lintify_meta_yaml(meta) - self.assertIn( - "If r-base is a host requirement, it should be a run requirement.", - lints, + assert ( + "If r-base is a host requirement, it should be a run requirement." + in lints ) meta = { @@ -1612,23 +1589,23 @@ def test_r_base_requirements(self): "outputs": [{"name": "foo"}], } lints, hints = linter.lintify_meta_yaml(meta) - self.assertNotIn( - "If r-base is a host requirement, it should be a run requirement.", - lints, + assert ( + "If r-base is a host requirement, it should be a run requirement." + not in lints ) meta = {"requirements": {"host": ["r-base >=3.5", "r-base"]}} lints, hints = linter.lintify_meta_yaml(meta) - self.assertNotIn( - "Non noarch packages should have r-base requirement without any version constraints.", - lints, + assert ( + "Non noarch packages should have r-base requirement without any version constraints." + not in lints ) meta = {"requirements": {"host": ["r-base >=3.5"]}} lints, hints = linter.lintify_meta_yaml(meta) - self.assertIn( - "Non noarch packages should have r-base requirement without any version constraints.", - lints, + assert ( + "Non noarch packages should have r-base requirement without any version constraints." + in lints ) @pytest.mark.skipif( @@ -1644,7 +1621,7 @@ def test_build_sh_with_shellcheck_findings(self): ) assert len(hints) < 100 - @unittest.skipUnless(is_gh_token_set(), "GH_TOKEN not set") + @pytest.mark.skipif(not is_gh_token_set(), reason="GH_TOKEN not set") def test_mpl_base_hint(self): meta = { "requirements": { @@ -1653,9 +1630,9 @@ def test_mpl_base_hint(self): } lints, hints = linter.lintify_meta_yaml(meta, conda_forge=True) expected = "Recipes should usually depend on `matplotlib-base`" - self.assertTrue(any(hint.startswith(expected) for hint in hints)) + assert any(hint.startswith(expected) for hint in hints) - @unittest.skipUnless(is_gh_token_set(), "GH_TOKEN not set") + @pytest.mark.skipif(not is_gh_token_set(), reason="GH_TOKEN not set") def test_mpl_base_hint_outputs(self): meta = { "outputs": [ @@ -1668,7 +1645,7 @@ def test_mpl_base_hint_outputs(self): } lints, hints = linter.lintify_meta_yaml(meta, conda_forge=True) expected = "Recipes should usually depend on `matplotlib-base`" - self.assertTrue(any(hint.startswith(expected) for hint in hints)) + assert any(hint.startswith(expected) for hint in hints) def test_rust_license_bundling(self): # Case where cargo-bundle-licenses is missing @@ -1681,7 +1658,7 @@ def test_rust_license_bundling(self): "Rust packages must include the licenses of the Rust dependencies. " "For more info, visit: https://conda-forge.org/docs/maintainer/adding_pkgs/#rust" ) - self.assertIn(expected_msg, lints) + assert expected_msg in lints # Case where cargo-bundle-licenses is present meta_with_license = { @@ -1691,7 +1668,7 @@ def test_rust_license_bundling(self): } lints, hints = linter.lintify_meta_yaml(meta_with_license) - self.assertNotIn(expected_msg, lints) + assert expected_msg not in lints def test_go_license_bundling(self): # Case where go-licenses is missing @@ -1704,7 +1681,7 @@ def test_go_license_bundling(self): "Go packages must include the licenses of the Go dependencies. " "For more info, visit: https://conda-forge.org/docs/maintainer/adding_pkgs/#go" ) - self.assertIn(expected_msg, lints) + assert expected_msg in lints # Case where go-licenses is present meta_with_license = { @@ -1712,11 +1689,11 @@ def test_go_license_bundling(self): } lints, hints = linter.lintify_meta_yaml(meta_with_license) - self.assertNotIn(expected_msg, lints) + assert expected_msg not in lints @pytest.mark.cli -class TestCLI_recipe_lint(unittest.TestCase): +class TestCLI_recipe_lint: def test_cli_fail(self): with tmp_directory() as recipe_dir: with io.open(os.path.join(recipe_dir, "meta.yaml"), "w") as fh: @@ -1735,7 +1712,7 @@ def test_cli_fail(self): stdout=subprocess.PIPE, ) out, _ = child.communicate() - self.assertEqual(child.returncode, 1, out) + assert child.returncode == 1, out def test_cli_success(self): with tmp_directory() as recipe_dir: @@ -1766,7 +1743,7 @@ def test_cli_success(self): stdout=subprocess.PIPE, ) out, _ = child.communicate() - self.assertEqual(child.returncode, 0, out) + assert child.returncode == 0, out def test_cli_environ(self): with tmp_directory() as recipe_dir: @@ -1799,7 +1776,7 @@ def test_cli_environ(self): stdout=subprocess.PIPE, ) out, _ = child.communicate() - self.assertEqual(child.returncode, 0, out) + assert child.returncode == 0, out def test_unicode(self): """ @@ -1857,11 +1834,9 @@ def assert_jinja(jinja_var, is_good=True): "Expecting lints for '{}', but didn't get any." "".format(jinja_var) ) - self.assertEqual( - not is_good, - any(lint.startswith(expected_message) for lint in lints), - message, - ) + assert not is_good == any( + lint.startswith(expected_message) for lint in lints + ), message assert_jinja('{% set version = "0.27.3" %}') assert_jinja('{% set version="0.27.3" %}', is_good=False) @@ -1870,7 +1845,7 @@ def assert_jinja(jinja_var, is_good=True): assert_jinja('{% set version= "0.27.3"%}', is_good=False) -@unittest.skipUnless(is_gh_token_set(), "GH_TOKEN not set") +@pytest.mark.skipif(not is_gh_token_set(), reason="GH_TOKEN not set") def test_lint_no_builds(): expected_message = "The feedstock has no `.ci_support` files and " @@ -1995,7 +1970,3 @@ def test_lint_wheels(tmp_path, yaml_block, annotation): assert any(expected_message in lint for lint in lints) else: assert any(expected_message in hint for hint in hints) - - -if __name__ == "__main__": - unittest.main() diff --git a/tests/test_variant_algebra.py b/tests/test_variant_algebra.py index 45f43336c..38a9178e4 100644 --- a/tests/test_variant_algebra.py +++ b/tests/test_variant_algebra.py @@ -65,7 +65,7 @@ def test_add(): variant_add(tv4, tv1) -def test_ordering(): +def test_ordering(snapshot): start = parse_variant( dedent( """\ @@ -90,12 +90,12 @@ def test_ordering(): ) res = variant_add(start, mig_compiler) - assert res["c_compiler"] == ["gcc"] - print(res) + assert res == snapshot + # raise Exception() -def test_no_ordering(): +def test_no_ordering(snapshot): start = parse_variant( dedent( """\ @@ -120,12 +120,11 @@ def test_no_ordering(): ) res = variant_add(start, mig_compiler) - assert res["xyz"] == ["2"] - print(res) + assert res == snapshot # raise Exception() -def test_ordering_downgrade(): +def test_ordering_downgrade(snapshot): start = parse_variant( dedent( """\ @@ -150,11 +149,10 @@ def test_ordering_downgrade(): ) res = variant_add(start, mig_compiler) - assert res["jpeg"] == ["2.0"] - print(res) + assert res == snapshot -def test_ordering_space(): +def test_ordering_space(snapshot): start = parse_variant( dedent( """\ @@ -174,11 +172,10 @@ def test_ordering_space(): ) res = variant_add(start, mig_compiler) - assert res["python"] == ["2.7 *_cpython"] - print(res) + assert res == snapshot -def test_new_pinned_package(): +def test_new_pinned_package(snapshot): start = parse_variant( dedent( """\ @@ -204,12 +201,10 @@ def test_new_pinned_package(): ) res = variant_add(start, mig_compiler) - assert res["gprc-cpp"] == ["1.23"] - assert res["pin_run_as_build"]["gprc-cpp"]["max_pin"] == "x.x" - print(res) + assert res == snapshot -def test_zip_keys(): +def test_zip_keys(snapshot): start = parse_variant( dedent( """\ @@ -240,13 +235,10 @@ def test_zip_keys(): ) res = variant_add(start, mig_compiler) - print(res) - - assert len(res["zip_keys"]) == 3 - assert ["python", "vc", "vc_runtime"] in res["zip_keys"] + assert res == snapshot -def test_migrate_windows_compilers(): +def test_migrate_windows_compilers(snapshot): start = parse_variant( dedent( """ @@ -277,14 +269,10 @@ def test_migrate_windows_compilers(): ) res = variant_add(start, mig) - print(res) - - assert len(res["c_compiler"]) == 2 - assert res["c_compiler"] == ["vs2008", "vs2017"] - assert len(res["zip_keys"][0]) == 2 + assert res == snapshot -def test_pin_run_as_build(): +def test_pin_run_as_build(snapshot): start = parse_variant( dedent( """\ @@ -310,12 +298,10 @@ def test_pin_run_as_build(): ) res = variant_add(start, mig_compiler) - print(res) - - assert len(res["pin_run_as_build"]) == 3 + assert res == snapshot -def test_py39_migration(): +def test_py39_migration(snapshot): """Test that running the python 3.9 keyadd migrator has the desired effect.""" base = parse_variant( dedent( @@ -394,33 +380,14 @@ def test_py39_migration(): res = variant_add(base, migration_pypy) res2 = variant_add(res, migration_py39) + res3 = variant_add(base, migration_py39) print(res) - print(res2) - - assert res2["python"] == migration_py39["__migrator"]["ordering"]["python"] - # assert that we've ordered the numpy bits properly - assert res2["numpy"] == [ - "1.16", - "1.100", - "1.16", - "1.16", - "1.18", - ] + assert res2 == snapshot(name="res2") + assert res3 == snapshot(name="res3") - res3 = variant_add(base, migration_py39) - print(res3) - assert res3["python"] == [ - "3.6.* *_cpython", - "3.9.* *_cpython", # newly added - "3.7.* *_cpython", - "3.8.* *_cpython", - ] - # The base doesn't have an entry for numpy - assert "numpy" not in res3 - - -def test_multiple_key_add_migration(): + +def test_multiple_key_add_migration(snapshot): """Test that running the python 3.9 keyadd migrator has the desired effect.""" base = parse_variant( dedent( @@ -503,35 +470,14 @@ def test_multiple_key_add_migration(): res = variant_add(base, migration_pypy) res2 = variant_add(res, migration_py39) + res3 = variant_add(base, migration_py39) print(res) - print(res2) - - assert res2["python"] == migration_py39["__migrator"]["ordering"]["python"] - # assert that we've ordered the numpy bits properly - assert res2["numpy"] == [ - "1.16", - "1.100", - "1.200", - "1.16", - "1.16", - "1.18", - ] + assert res2 == snapshot(name="res2") + assert res3 == snapshot(name="res3") - res3 = variant_add(base, migration_py39) - print(res3) - assert res3["python"] == [ - "3.6.* *_cpython", - "3.9.* *_cpython", # newly added - "3.10.* *_cpython", - "3.7.* *_cpython", - "3.8.* *_cpython", - ] - # The base doesn't have an entry for numpy - assert "numpy" not in res3 - - -def test_variant_key_remove(): + +def test_variant_key_remove(snapshot): base = parse_variant( dedent( """ @@ -575,10 +521,7 @@ def test_variant_key_remove(): ) res = variant_add(base, removal) - - assert res["python"] == ["3.6.* *_73_pypy", "3.8.* *_cpython"] - assert res["numpy"] == ["1.18", "1.16"] - assert res["python_impl"] == ["pypy", "cpython"] + assert res == snapshot @pytest.mark.parametrize(