diff --git a/Jenkinsfile b/Jenkinsfile index 1418e369a..7b595392e 100644 --- a/Jenkinsfile +++ b/Jenkinsfile @@ -1,5 +1,20 @@ -@Library(value='kids-first/aws-infra-jenkins-shared-libraries', changelog=false) _ -ecs_service_type_1 { +@Library(value="kids-first/aws-infra-jenkins-shared-libraries", changelog=false) _ +ecs_service_type_1_standard { projectName = "kf-api-study-creator" - agentLabel = "terraform-testing" + environments = "dev,qa,prd" + docker_image_type = "alpine" + entrypoint_command = "/app/bin/entrypoint.sh" + quick_deploy = "true" + deploy_scripts_version = "master" + internal_app = "false" + container_port = "80" + vcpu_container = "2048" + memory_container = "4096" + vcpu_task = "2048" + memory_task = "4096" + health_check_path = "/health_check" + dependencies = "ecr" + friendly_dns_name = "study-creator" + override_templates = "true" + additional_ssl_cert_domain_name = "*.kidsfirstdrc.org" } diff --git a/JenkinsfileWorker b/JenkinsfileWorker new file mode 100644 index 000000000..70f8b94d7 --- /dev/null +++ b/JenkinsfileWorker @@ -0,0 +1,19 @@ +@Library(value='kids-first/aws-infra-jenkins-shared-libraries', changelog=false) _ +ecs_service_existing_alb { + projectName = "kf-api-study-creator-worker" + alb_name = "kf-api-study-creator" + host_based_routing = "0" + attach_to_alb = "false" + orgFullName = "kids-first" + environments = "dev,qa,prd" + build_environments = "dev,qa,prd" + docker_image_type = "alpine" + create_default_iam_role = "1" + entrypoint_command = "/app/bin/entrypoint.sh" + quick_deploy = "true" + container_port = "80" + health_check_path = "/" + internal_app = "false" + dependencies = "ecr" + module_version = "v1.2" +} diff --git a/bin/entrypoint.sh b/bin/entrypoint.sh index b710a3c41..89174af71 100755 --- a/bin/entrypoint.sh +++ b/bin/entrypoint.sh @@ -1,59 +1,21 @@ -#!/bin/ash -# Try to load any database secrets, these will override the above -if [ -n $DATABASE_SECRETS ]; then - aws s3 cp $DATABASE_SECRETS ./database.env - source ./database.env - export $(cut -d= -f1 ./database.env) - rm ./database.env -fi - -# Try to load auth0 secrets from S3 -if [ -n $AUTH0_SECRETS ]; then - aws s3 cp $AUTH0_SECRETS ./auth0.env - source ./auth0.env - export $(cut -d= -f1 ./auth0.env) - rm ./auth0.env -fi - -# Try to load general settings from S3 -if [ -n $SETTINGS ]; then - aws s3 cp $SETTINGS ./settings.env - source ./settings.env - export $(cut -d= -f1 ./settings.env) - rm ./settings.env -fi - -# This will export our secrets from S3 into our environment -if [ -n $CAVATICA_SECRETS ]; then - aws s3 cp $CAVATICA_SECRETS ./cavatica.env - source ./cavatica.env - export $(cut -d= -f1 ./cavatica.env) - rm ./cavatica.env -fi - -if [[ -n $CAVATICA_VOLUMES ]]; then - echo "Loading Cavatica volume credentials from S3" - aws s3 cp $CAVATICA_VOLUMES ./cavatica_volumes.env - source ./cavatica_volumes.env - export $(cut -d= -f1 ./cavatica_volumes.env) - rm ./cavatica_volumes.env -fi - -if [[ -n $EMAIL ]]; then - echo "Loading email credentials from S3" - aws s3 cp $EMAIL ./email.env - source ./email.env - export $(cut -d= -f1 ./email.env) - rm ./email.env -fi - -if $WORKER ; then +set -e +set +x +if [[ "$WORKER" == "true" ]]; then + echo "Starting worker" supervisord -c /etc/supervisor/conf.d/worker.conf -elif [[ $1 = scheduler ]]; then +elif [[ "$1" == "scheduler" ]]; then + echo "Starting scheduler" /app/manage.py schedule_jobs supervisord -c /etc/supervisor/conf.d/scheduler.conf else + echo "Starting service" + echo "Sync Studies" + echo "Dataservice: $DATASERVICE_URL" + python manage.py syncstudies --api $DATASERVICE_URL + echo "Migrate" /app/manage.py migrate + echo "Setup Permissions" /app/manage.py setup_permissions + echo "Execute Gunicorn" exec gunicorn creator.wsgi:application -b 0.0.0.0:80 --access-logfile - --error-logfile - --workers 4 fi