forked from usgs/shakemap
-
Notifications
You must be signed in to change notification settings - Fork 0
/
Copy path.gitlab-ci.yml
111 lines (98 loc) · 2.86 KB
/
.gitlab-ci.yml
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
# Need to use Conda to build Shakemap, pull the Docker image
default:
image: code.usgs.gov:5001/devops/images/usgs/python:3.12-build
tags:
- build
before_script:
- python --version
- export PATH="/home/usgs-user/.local/bin:$PATH"
- mini_conda_url=https://github.com/conda-forge/miniforge/releases/latest/download/Miniforge3-Linux-x86_64.sh
- curl -L $mini_conda_url -o miniconda.sh
- bash miniconda.sh -f -b -p $HOME/miniconda
- . $HOME/miniconda/etc/profile.d/conda.sh
- conda install -c conda-forge -y tree make
# repository rules
.fork: &fork
if: >
$CI_PROJECT_PATH != 'ghsc/esi/shakemap'
&& $CI_COMMIT_BRANCH != $CI_DEFAULT_BRANCH
variables:
ENVIRONMENT: development
# Temporary rule for upstream development branch
.dev: &dev
if: >
$CI_PROJECT_PATH == 'ghsc/esi/shakemap'
&& $CI_COMMIT_BRANCH == 'v4.2_rc'
variables:
ENVIRONMENT: development
.upstream: &upstream
if: >
$CI_PROJECT_PATH == 'ghsc/esi/shakemap'
&& $CI_COMMIT_BRANCH == $CI_DEFAULT_BRANCH
variables:
ENVIRONMENT: staging
# For now, just set up a testing stagge to run PyTest
stages:
- test
- build
- docs
# Create parallel pipeline and job version configuration
.versions:
parallel:
matrix:
- PYTHON_VERSION: ["3.12"]
# Create reusable build configuration for referencing
.build_config:
script:
- conda init bash
- bash install.sh -d
- conda activate shakemap
- echo "***********************************************"
- conda list
- echo "***********************************************"
# - strec_cfg update --datafolder $HOME/.strec/data --slab --gcmt
# Create test job
run tests and coverage:
stage: test
script:
# Run build configuration script
- !reference [.build_config, script]
# Continue job script
- pytest -x --cov=shakemap --cov-report xml:coverage.xml --cov-report term --junitxml junit.xml
# - coverage xml
# Define test coverage and reporting
coverage: '/(?i)total.*? (100(?:\.0+)?\%|[1-9]?\d(?:\.\d+)?\%)$/'
artifacts:
reports:
coverage_report:
coverage_format: cobertura
path: coverage.xml
junit: junit.xml
rules:
- *fork
- *dev
- *upstream
# Run the jobs as parallel pipelines
parallel: !reference [.versions, parallel]
pages:
artifacts:
paths:
# The folder that contains the files to be exposed at the Page URL
- public
rules:
# This ensures that only merges to the default branch will trigger
# a pages deploy
- <<: *upstream
when: on_success
variables:
PYTHON_VERSION: "3.12"
script:
# Run build configuration script
- !reference [.build_config, script]
- cd doc
- pip install -r requirements.txt
- cd ..
- mkdir $HOME/.shakemap
- mv doc/skeleton_profile $HOME/.shakemap/profiles.conf
- sphinx-build doc/ public
stage: docs