Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

reuse built frontend in ci, merge compose files #6674

Merged
merged 4 commits into from
Apr 10, 2024
Merged
Show file tree
Hide file tree
Changes from 1 commit
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
6 changes: 3 additions & 3 deletions Makefile
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
.PHONY: compose_build up test_db create_database clean clean-all down tests lint backend-unit-tests frontend-unit-tests test build watch start redis-cli bash
.PHONY: compose_build up test_db create_db clean clean-all down tests lint backend-unit-tests frontend-unit-tests test build watch start redis-cli bash

export COMPOSE_DOCKER_CLI_BUILD=1
export DOCKER_BUILDKIT=1
Expand All @@ -16,8 +16,8 @@ test_db:
done
docker compose exec postgres sh -c 'psql -U postgres -c "drop database if exists tests;" && psql -U postgres -c "create database tests;"'

create_database: .env
docker compose run server migrate
justinclift marked this conversation as resolved.
Show resolved Hide resolved
create_db: .env
docker compose run server create_db

clean:
docker compose down
Expand Down
42 changes: 21 additions & 21 deletions bin/docker-entrypoint
Original file line number Diff line number Diff line change
@@ -1,12 +1,12 @@
#!/bin/bash
set -e

if [ -z $REDASH_CACHE_URL ]; then
export REDASH_CACHE_URL=redis://:${REDASH_CACHE_PASSWORD}@${REDASH_CACHE_HOSTNAME}:${REDASH_CACHE_PORT}/${REDASH_CACHE_NAME}
if [ -z $REDASH_REDIS_URL ]; then
export REDASH_REDIS_URL=redis://:${REDASH_REDIS_PASSWORD}@${REDASH_REDIS_HOSTNAME}:${REDASH_REDIS_PORT}/${REDASH_REDIS_NAME}
fi

if [ -z $REDASH_DB_URL ]; then
export REDASH_DB_URL=postgresql://${REDASH_DB_USER}:${REDASH_DB_PASSWORD}@${REDASH_DB_HOSTNAME}:${REDASH_DB_PORT}/${REDASH_DB_NAME}
if [ -z $REDASH_DATABASE_URL ]; then
export REDASH_DATABASE_URL=postgresql://${REDASH_DATABASE_USER}:${REDASH_DATABASE_PASSWORD}@${REDASH_DATABASE_HOSTNAME}:${REDASH_DATABASE_PORT}/${REDASH_DATABASE_NAME}
fi

scheduler() {
Expand Down Expand Up @@ -48,7 +48,7 @@ worker() {
workers_healthcheck() {
WORKERS_COUNT=${WORKERS_COUNT}
echo "Checking active workers count against $WORKERS_COUNT..."
ACTIVE_WORKERS_COUNT=`echo $(rq info --url $REDASH_CACHE_URL -R | grep workers | grep -oP ^[0-9]+)`
ACTIVE_WORKERS_COUNT=`echo $(rq info --url $REDASH_REDIS_URL -R | grep workers | grep -oP ^[0-9]+)`
if [ "$ACTIVE_WORKERS_COUNT" -lt "$WORKERS_COUNT" ]; then
echo "$ACTIVE_WORKERS_COUNT workers are active, Exiting"
exit 1
Expand Down Expand Up @@ -82,17 +82,17 @@ server() {
esac
}

migrate() {
REDASH_DB_MIGRATE_TIMEOUT=${REDASH_DB_UPGRADE_TIMEOUT:-600}
REDASH_DB_MIGRATE_MAX_ATTEMPTS=${REDASH_DB_MIGRATE_MAX_ATTEMPTS:-5}
REDASH_DB_MIGRATE_RETRY_WAIT=${REDASH_DB_MIGRATE_RETRY_WAIT:-10}
create_db() {
REDASH_DATABASE_MIGRATE_TIMEOUT=${REDASH_DATABASE_UPGRADE_TIMEOUT:-600}
REDASH_DATABASE_MIGRATE_MAX_ATTEMPTS=${REDASH_DATABASE_MIGRATE_MAX_ATTEMPTS:-5}
REDASH_DATABASE_MIGRATE_RETRY_WAIT=${REDASH_DATABASE_MIGRATE_RETRY_WAIT:-10}
ATTEMPTS=0
while ((ATTEMPTS < REDASH_DB_MIGRATE_MAX_ATTEMPTS)); do
echo "Starting attempt ${ATTEMPTS} of ${REDASH_DB_MIGRATE_MAX_ATTEMPTS}"
while ((ATTEMPTS < REDASH_DATABASE_MIGRATE_MAX_ATTEMPTS)); do
echo "Starting attempt ${ATTEMPTS} of ${REDASH_DATABASE_MIGRATE_MAX_ATTEMPTS}"
ATTEMPTS=$((ATTEMPTS+1))
timeout $REDASH_DB_MIGRATE_TIMEOUT /app/manage.py database create_tables
timeout $REDASH_DB_MIGRATE_TIMEOUT /app/manage.py db upgrade
STATUS=$(timeout $REDASH_DB_MIGRATE_TIMEOUT /app/manage.py status 2>&1)
timeout $REDASH_DATABASE_MIGRATE_TIMEOUT /app/manage.py database create_tables
timeout $REDASH_DATABASE_MIGRATE_TIMEOUT /app/manage.py db upgrade
STATUS=$(timeout $REDASH_DATABASE_MIGRATE_TIMEOUT /app/manage.py status 2>&1)
RETCODE=$?
echo "Return code: ${RETCODE}"
echo "Status: ${STATUS}"
Expand All @@ -101,7 +101,7 @@ migrate() {
exit 0
;;
124)
echo "Status command timed out after ${REDASH_DB_MIGRATE_TIMEOUT} seconds."
echo "Status command timed out after ${REDASH_DATABASE_MIGRATE_TIMEOUT} seconds."
;;
esac
case "$STATUS" in
Expand All @@ -112,10 +112,10 @@ migrate() {
echo "Database does not appear to be installed."
;;
esac
echo "Waiting ${REDASH_DB_MIGRATE_RETRY_WAIT} seconds before retrying."
sleep ${REDASH_DB_MIGRATE_RETRY_WAIT}
echo "Waiting ${REDASH_DATABASE_MIGRATE_RETRY_WAIT} seconds before retrying."
sleep ${REDASH_DATABASE_MIGRATE_RETRY_WAIT}
done
echo "Reached ${REDASH_DB_MIGRATE_MAX_ATTEMPTS} attempts, giving up."
echo "Reached ${REDASH_DATABASE_MIGRATE_MAX_ATTEMPTS} attempts, giving up."
exit 1
}

Expand All @@ -131,7 +131,7 @@ help() {
echo ""
echo "shell -- open shell"
echo "debug -- start Flask development server with remote debugger via ptvsd"
echo "migrate -- create database tables and run migrations"
echo "create_db -- create database tables and run migrations"
echo "manage -- CLI to manage redash"
echo "tests -- run tests"
}
Expand Down Expand Up @@ -174,8 +174,8 @@ case "$1" in
shell)
exec /app/manage.py shell
;;
migrate)
migrate
create_db)
create_db
;;
manage)
shift
Expand Down
2 changes: 1 addition & 1 deletion client/cypress/cypress.js
Original file line number Diff line number Diff line change
Expand Up @@ -49,7 +49,7 @@ function buildServer() {
function startServer() {
console.log("Starting the server...");
execSync("docker compose up -d", { stdio: "inherit" });
execSync("docker compose run server migrate", { stdio: "inherit" });
execSync("docker compose run server create_db", { stdio: "inherit" });
}

function stopServer() {
Expand Down
5 changes: 2 additions & 3 deletions compose.base.yaml
Original file line number Diff line number Diff line change
@@ -1,4 +1,3 @@
version: "3.8"
services:
.redash:
build:
Expand All @@ -12,8 +11,8 @@ services:
command: manage version
environment:
REDASH_LOG_LEVEL: "INFO"
REDASH_CACHE_URL: "redis://redis:6379/0"
REDASH_DB_URL: "postgresql://postgres@postgres/postgres"
REDASH_REDIS_URL: "redis://redis:6379/0"
REDASH_DATABASE_URL: "postgresql://postgres@postgres/postgres"
REDASH_RATELIMIT_ENABLED: "false"
REDASH_MAIL_DEFAULT_SENDER: "redash@example.com"
REDASH_MAIL_SERVER: "email"
Expand Down
4 changes: 2 additions & 2 deletions redash/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -43,8 +43,8 @@ def setup_logging():

setup_logging()

redis_connection = redis.from_url(settings.CACHE_URL)
rq_redis_connection = redis.from_url(settings.RQ_CACHE_URL)
redis_connection = redis.from_url(settings.REDIS_URL)
rq_redis_connection = redis.from_url(settings.RQ_REDIS_URL)
mail = Mail()
migrate = Migrate(compare_type=True)
statsd_client = StatsClient(host=settings.STATSD_HOST, port=settings.STATSD_PORT, prefix=settings.STATSD_PREFIX)
Expand Down
12 changes: 6 additions & 6 deletions redash/settings/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -16,10 +16,10 @@
)
from redash.settings.organization import DATE_FORMAT, TIME_FORMAT # noqa

# _CACHE_URL is the unchanged CACHE_URL we get from env vars, to be used later with RQ
_CACHE_URL = os.environ.get("REDASH_CACHE_URL", os.environ.get("CACHE_URL", "redis://localhost:6379/0"))
# _REDIS_URL is the unchanged REDIS_URL we get from env vars, to be used later with RQ
_REDIS_URL = os.environ.get("REDASH_REDIS_URL", os.environ.get("REDIS_URL", "redis://localhost:6379/0"))
# This is the one to use for Redash' own connection:
CACHE_URL = add_decode_responses_to_redis_url(_CACHE_URL)
REDIS_URL = add_decode_responses_to_redis_url(_REDIS_URL)
PROXIES_COUNT = int(os.environ.get("REDASH_PROXIES_COUNT", "1"))

STATSD_HOST = os.environ.get("REDASH_STATSD_HOST", "127.0.0.1")
Expand All @@ -28,15 +28,15 @@
STATSD_USE_TAGS = parse_boolean(os.environ.get("REDASH_STATSD_USE_TAGS", "false"))

# Connection settings for Redash's own database (where we store the queries, results, etc)
SQLALCHEMY_DATABASE_URI = os.environ.get("REDASH_DB_URL", os.environ.get("DB_URL", "postgresql:///postgres"))
SQLALCHEMY_DATABASE_URI = os.environ.get("REDASH_DATABASE_URL", os.environ.get("DATABASE_URL", "postgresql:///postgres"))
SQLALCHEMY_MAX_OVERFLOW = int_or_none(os.environ.get("SQLALCHEMY_MAX_OVERFLOW"))
SQLALCHEMY_POOL_SIZE = int_or_none(os.environ.get("SQLALCHEMY_POOL_SIZE"))
SQLALCHEMY_DISABLE_POOL = parse_boolean(os.environ.get("SQLALCHEMY_DISABLE_POOL", "false"))
SQLALCHEMY_ENABLE_POOL_PRE_PING = parse_boolean(os.environ.get("SQLALCHEMY_ENABLE_POOL_PRE_PING", "false"))
SQLALCHEMY_TRACK_MODIFICATIONS = False
SQLALCHEMY_ECHO = False

RQ_CACHE_URL = os.environ.get("RQ_CACHE_URL", _CACHE_URL)
RQ_REDIS_URL = os.environ.get("RQ_REDIS_URL", _REDIS_URL)

# The following enables periodic job (every 5 minutes) of removing unused query results.
QUERY_RESULTS_CLEANUP_ENABLED = parse_boolean(os.environ.get("REDASH_QUERY_RESULTS_CLEANUP_ENABLED", "true"))
Expand Down Expand Up @@ -258,7 +258,7 @@ def email_server_is_configured():

RATELIMIT_ENABLED = parse_boolean(os.environ.get("REDASH_RATELIMIT_ENABLED", "true"))
THROTTLE_LOGIN_PATTERN = os.environ.get("REDASH_THROTTLE_LOGIN_PATTERN", "50/hour")
LIMITER_STORAGE = os.environ.get("REDASH_LIMITER_STORAGE", CACHE_URL)
LIMITER_STORAGE = os.environ.get("REDASH_LIMITER_STORAGE", REDIS_URL)
THROTTLE_PASS_RESET_PATTERN = os.environ.get("REDASH_THROTTLE_PASS_RESET_PATTERN", "10/hour")

# CORS settings for the Query Result API (and possibly future external APIs).
Expand Down
4 changes: 2 additions & 2 deletions tests/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,9 +4,9 @@
from contextlib import contextmanager
from unittest import TestCase

os.environ["REDASH_CACHE_URL"] = os.environ.get("REDASH_CACHE_URL", "redis://localhost:6379/0").replace("/0", "/5")
os.environ["REDASH_REDIS_URL"] = os.environ.get("REDASH_REDIS_URL", "redis://localhost:6379/0").replace("/0", "/5")
# Use different url for RQ to avoid DB being cleaned up:
os.environ["RQ_CACHE_URL"] = os.environ.get("REDASH_CACHE_URL", "redis://localhost:6379/0").replace("/5", "/6")
os.environ["RQ_REDIS_URL"] = os.environ.get("REDASH_REDIS_URL", "redis://localhost:6379/0").replace("/5", "/6")

# Dummy values for oauth login
os.environ["REDASH_GOOGLE_CLIENT_ID"] = "dummy"
Expand Down
Loading