-
Notifications
You must be signed in to change notification settings - Fork 17
65 lines (56 loc) · 2.55 KB
/
airflow-am.yml
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
# This is a basic workflow to help you get started with Actions
name: Airflow/DAG [A-M]
# Controls when the action will run. Triggers the workflow on push or pull request
# events but only for the master branch
on:
push:
branches: [ master ]
pull_request:
branches: [ master ]
jobs:
airflow-am:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v2
- name: Set up Python
uses: actions/setup-python@v1
with:
python-version: '3.7'
- uses: actions/setup-java@v1
with:
java-version: '9.0.4'
- name: Run pip install
run: |
pip install -r requirements.txt --constraint https://raw.githubusercontent.com/apache/airflow/constraints-1.10.14/constraints-3.7.txt --use-deprecated legacy-resolver
- name: Configure Airflow
run: |
export AIRFLOW_HOME=${{ GITHUB.workspace }}
airflow upgradedb
airflow variables -s ENVIRONMENT CI
airflow variables -s S3_BUCKET test-covid19
airflow variables -s AWS_ACCESS_KEY_ID ${{ secrets.AWS_ACCESS_KEY_ID }}
airflow variables -s AWS_SECRET_ACCESS_KEY ${{ secrets.AWS_SECRET_ACCESS_KEY }}
airflow variables -s SNOWFLAKE_CONNECTION SNOWFLAKE_DEV
airflow variables -s SNOWFLAKE_STAGE COVID_DEV
airflow variables -s GIT_USER ${{ secrets.GIT_USER }}
airflow variables -s GIT_TOKEN ${{ secrets.GIT_TOKEN }}
airflow connections -a --conn_id SNOWFLAKE_DEV --conn_type snowflake --conn_host starschema.snowflakecomputing.com --conn_login etl --conn_password '${{ secrets.SNOWFLAKE_ETL_PASSWORD }}' --conn_schema PUBLIC --conn_extra '{ "account": "starschema", "warehouse": "LOAD_WH", "database": "COVID19_DEV", "region": "us-west-2", "role": "COVID_ADMIN" }'
airflow connections -l
airflow list_dags
- name: Run DAG tests
run: |
export AIRFLOW_HOME=${{ GITHUB.workspace }}
export ENVIRONMENT=CI
export GSHEET_API_CREDENTIALS='${{ secrets.GSHEET_API_CREDENTIALS }}'
readarray -t skip_files < known-issues.lst
for i in $AIRFLOW_HOME/notebooks/[A-I]* ; do
load=`basename $i`;
if [[ " ${skip_files[@]} " =~ " ${load} " ]]; then
continue
fi
load=$(echo "$load" | sed 's/\.[^.]*$//');
airflow test etl_$load execute_notebook 2020-01-01;
airflow test etl_$load upload_to_s3 2020-01-01;
airflow test etl_$load upload_to_snowflake 2020-01-01;
done
airflow test github_poll_trigger check_commits_jhu_covid-19 2020-03-27