-
Notifications
You must be signed in to change notification settings - Fork 914
134 lines (121 loc) · 5.71 KB
/
misc-dailies.yml
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
name: "Various daily checks"
on:
schedule:
- cron: '34 4 * * *'
permissions: # least privileges, see https://docs.github.com/en/actions/using-workflows/workflow-syntax-for-github-actions#permissions
contents: read
env:
CLANG_VERSION: '12'
jobs:
el7-devtoolset:
if: ${{ vars.SCHEDULED_MISC_DAILIES }}
runs-on: ubuntu-22.04
steps:
- name: Check whether a newer devtoolset exists
run: |
if docker run --rm centos:7 bash -c 'yum install -y centos-release-scl-rh && yum info devtoolset-12-gcc-c++'
then
echo "::warning file=builder-support/dockerfiles/Dockerfile.rpmbuild::A newer devtoolset exists. Please edit builder-support/dockerfiles/Dockerfile.rpmbuild, builder-support/dockerfiles/Dockerfile.rpmbuild, and .github/workflows/dailies.yml"
exit 1
else
echo "::notice ::No newer devtoolset exists (good)"
exit 0
fi
check-debian-autoremovals:
if: ${{ vars.SCHEDULED_MISC_DAILIES }}
runs-on: ubuntu-22.04
steps:
- uses: actions/checkout@v4
with:
fetch-depth: 5
submodules: recursive
- name: Check if Debian is about to toss us off a balcony
run: ./build-scripts/check-debian-autoremovals.py
coverity-auth:
name: coverity scan of the auth
if: ${{ vars.SCHEDULED_MISC_DAILIES }}
uses: PowerDNS/pdns/.github/workflows/coverity.yml@master
with:
product: 'authoritative'
secrets:
COVERITY_TOKEN: ${{ secrets.coverity_auth_token }}
COVERITY_EMAIL: ${{ secrets.coverity_email }}
coverity-dnsdist:
name: coverity scan of dnsdist
if: ${{ vars.SCHEDULED_MISC_DAILIES }}
uses: PowerDNS/pdns/.github/workflows/coverity.yml@master
with:
product: 'dnsdist'
secrets:
COVERITY_TOKEN: ${{ secrets.coverity_dnsdist_token }}
COVERITY_EMAIL: ${{ secrets.coverity_email }}
coverity-rec:
name: coverity scan of the rec
if: ${{ vars.SCHEDULED_MISC_DAILIES }}
uses: PowerDNS/pdns/.github/workflows/coverity.yml@master
with:
product: 'recursor'
secrets:
COVERITY_TOKEN: ${{ secrets.coverity_rec_token }}
COVERITY_EMAIL: ${{ secrets.coverity_email }}
# The jobs below check that only the pinned version of Python3 packages are installed with pip. Running in a container, a
# pip proxy registers all requests for installing packages with pip. Then, the downloaded packages and their versions are compared
# with the list used for the installation (i.e. docs/requirements.txt, pdns/recursordist/docs/requirements.txt, etc). If a package
# is missing or a version does not match the one expected, this job fails, which makes the workflow fail.
#
# The pinned version plus hashes are generated using pip-compile using an input file that includes the original list of packages
# (pip-compile --generate-hashes -U requirements.in). "pip-compile" can be installed via pip-tools with Python 3.11, which is the version
# used in the CI. Any other Python version would end up with different versions for packages and could result in workflow failures.
#
# One recurring error thrown by this validation is when a new version of a pinned package is released for a "setup-requires" dependency
# of one of the packages in the list (see https://github.com/PowerDNS/pdns/pull/14596). The package version in “requirements.in” should
# be modified to solve this issue. In some cases, it is enough to generate again the list of packages, making sure to add the -U flag
# to force the upgrade: "pip-compile --generate-hashes -U requirements.in" (this could include upgrading other packages).
list-pip-requirement-files:
if: ${{ vars.SCHEDULED_MISC_DAILIES }}
runs-on: ubuntu-22.04
outputs:
req-files: ${{ steps.get-list-requirements.outputs.files }}
steps:
- uses: actions/checkout@v4
- name: Get all requirements.txt files and export them as outputs
id: get-list-requirements
run: |
echo "files=$(find . -name 'requirements.txt' | jq -R -s -c 'split("\n")[:-1]')" >> "$GITHUB_OUTPUT"
validate-pip-hashes:
if: ${{ vars.SCHEDULED_MISC_DAILIES }}
name: ${{ matrix.requirements-file }} - Validate list of packages and hashes
runs-on: ubuntu-22.04
needs: list-pip-requirement-files
env:
SERVICE_IP_ADDR: 127.0.0.1
services:
database:
image: epicwink/proxpi
ports:
- 5000:5000
options: >-
--restart always
strategy:
fail-fast: false
matrix:
requirements-file: ${{ fromJson(needs.list-pip-requirement-files.outputs.req-files) }}
steps:
- run: echo "${{ matrix.requirements-file }}"
- uses: PowerDNS/pdns/set-ubuntu-mirror@meta
- uses: actions/checkout@v4
- uses: actions/setup-python@v5
with:
python-version: '3.11'
# Configure pip index-url set to proxpi
- run: pip config set global.index-url http://${{ env.SERVICE_IP_ADDR }}:5000/index/
- run: pip config set global.trusted-host ${{ env.SERVICE_IP_ADDR }}
- id: proxpi-docker
run: echo "id=$(docker ps | grep "epicwink/proxpi" | awk '{print $1}')" >> "$GITHUB_OUTPUT"
- run: pip install -r ${{ matrix.requirements-file }}
- name: Get the list of packages requested to the pip proxy
run: |
docker logs ${{ steps.proxpi-docker.outputs.id }} 2>&1 | grep whl | awk '{print $8}' | cut -d "/" -f 4 | awk -F'-' '{print $1"=="$2}' | sort -u --ignore-case | sed 's/_/-/' | egrep -v "pip==|setuptools==|wheel==|setuptools-git==" > /tmp/proxpi.log
cat /tmp/proxpi.log
- name: check only listed packages were installed
run: for i in `cat /tmp/proxpi.log`; do grep -qq -i $i ${{ matrix.requirements-file }} || ( echo "$i not found" && exit 1 ); done