From 8d292fcac1eabbe71fea36807237ff57e2fb4826 Mon Sep 17 00:00:00 2001 From: delavega4 Date: Thu, 27 Aug 2020 18:47:45 -0500 Subject: [PATCH 01/17] Add boilerplate/docker-compose infrastructure from neuroscout --- .env.example | 3 + .travis.yml | 44 +++++++++ .zenodo.json | 27 ++++++ README.md | 46 +++++++++ codecov.yml | 3 + docker-compose.dev.yml | 13 +++ docker-compose.yml | 52 ++++++++++ manage.py | 94 +++++++++++++++++++ neurostuff/Dockerfile | 14 +++ neurostuff/lib64 | 1 + neurostuff/requirements.txt | 51 ++++++++++ nginx/Dockerfile | 3 + nginx/nginx.conf | 77 +++++++++++++++ nginx/nginx.dev.conf | 77 +++++++++++++++ nginx/sites-enabled/flask_project | 68 ++++++++++++++ nginx/sites-enabled/ssl_config | 15 +++ postgres/Dockerfile | 11 +++ postgres/migrations/migrations/README | 1 + postgres/migrations/migrations/alembic.ini | 45 +++++++++ postgres/migrations/migrations/env.py | 87 +++++++++++++++++ postgres/migrations/migrations/script.py.mako | 24 +++++ postgres/pg_dump-to-s3/README.md | 28 ++++++ postgres/pg_dump-to-s3/backup.txt | 1 + postgres/pg_dump-to-s3/pg_dump-to-s3.sh | 37 ++++++++ postgres/pg_dump-to-s3/s3-autodelete.sh | 27 ++++++ 25 files changed, 849 insertions(+) create mode 100644 .env.example create mode 100644 .travis.yml create mode 100644 .zenodo.json create mode 100644 README.md create mode 100644 codecov.yml create mode 100644 docker-compose.dev.yml create mode 100644 docker-compose.yml create mode 100644 manage.py create mode 100644 neurostuff/Dockerfile create mode 120000 neurostuff/lib64 create mode 100644 neurostuff/requirements.txt create mode 100644 nginx/Dockerfile create mode 100644 nginx/nginx.conf create mode 100644 nginx/nginx.dev.conf create mode 100644 nginx/sites-enabled/flask_project create mode 100644 nginx/sites-enabled/ssl_config create mode 100644 postgres/Dockerfile create mode 100644 postgres/migrations/migrations/README create mode 100644 postgres/migrations/migrations/alembic.ini create mode 100644 postgres/migrations/migrations/env.py create mode 100644 postgres/migrations/migrations/script.py.mako create mode 100644 postgres/pg_dump-to-s3/README.md create mode 100644 postgres/pg_dump-to-s3/backup.txt create mode 100644 postgres/pg_dump-to-s3/pg_dump-to-s3.sh create mode 100644 postgres/pg_dump-to-s3/s3-autodelete.sh diff --git a/.env.example b/.env.example new file mode 100644 index 000000000..76cd0ac7d --- /dev/null +++ b/.env.example @@ -0,0 +1,3 @@ +APP_SETTINGS=neurostuff.config.app.ProductionConfig +COMPOSE_CONVERT_WINDOWS_PATHS=1 +POSTGRESS_PASSWORD=something diff --git a/.travis.yml b/.travis.yml new file mode 100644 index 000000000..83582e822 --- /dev/null +++ b/.travis.yml @@ -0,0 +1,44 @@ +matrix: + include: + - language: python + python: + - "3.6" + install: + - pip install -U setuptools==45 + - pip install -r neuroscout/requirements.txt + - pip install -e git+https://github.com/PsychoinformaticsLab/pliers.git#egg=pliers + - pip install pytest-cov + script: + - python -m pytest neuroscout/tests/ --cov=./ + # cd frontend && npm test + env: + - APP_SETTINGS=neuroscout.config.app.TravisConfig PYTHONHASHSEED=0 + addons: + postgresql: "9.4" + before_script: + - cp neuroscout/config/example_app.py neuroscout/config/app.py + - cp neuroscout/frontend/src/config.ts.example neuroscout/frontend/src/config.ts + - psql -c 'create database travis_ci_test;' -U postgres + # CI=""; cd neuroscout/frontend && yarn install --ignore-engines --concurrency=1 && yarn build --concurrency=1 + - CI="true" + - python -m pliers.support.download + before_install: + - sudo apt-get install libavformat-dev libavfilter-dev libavdevice-dev ffmpeg + - bash <(wget -q -O- http://neuro.debian.net/_files/neurodebian-travis.sh) + - sudo apt-get -qq update + - sudo apt-get install -yq git-annex-standalone tesseract-ocr + + after_success: + - bash <(curl -s https://codecov.io/bash) + + - language: node_js + node_js: + - "lts/*" + cache: + yarn: true + install: + - cd ./neuroscout/frontend + - yarn install + script: + - mv ./src/config.ts.example ./src/config.ts + - yarn build diff --git a/.zenodo.json b/.zenodo.json new file mode 100644 index 000000000..af35c5949 --- /dev/null +++ b/.zenodo.json @@ -0,0 +1,27 @@ +{ + "creators": [ + { + "affiliation": "University of Texas at Austin", + "name": "De La Vega, Alejandro", + "orcid": "0000-0001-9062-3778" + }, + { + "affiliation": "Stanford University", + "name": "Blair, Ross", + "orcid": "0000-0003-3007-1056" + }, + { + "affiliation": "University of Texas at Austin", + "name": "Yarkoni, Tal", + "orcid": "0000-0002-6558-5113" + } + ], + "keywords": [ + "neuroimaging", + "naturalstic", + "BIDS", + "fMRI" + ], + "license": "bsd-3-clause", + "upload_type": "software" +} diff --git a/README.md b/README.md new file mode 100644 index 000000000..5c3f134eb --- /dev/null +++ b/README.md @@ -0,0 +1,46 @@ +# neurostuff + +[Add badges] + +Requirements: Docker and docker-compose. + +## Configuration +First, set up the main environment variables in `.env` (see: `.env.example`). + +Next, set up the Flask server's environment variables .... + +Finally, set up the frontend's env variables by .... + +## Initalizing backend +Build the containers and start services using the development configuration: + + docker-compose build + docker-compose -f docker-compose.yml -f docker-compose.dev.yml up -d + +The server should now be running at http://localhost/ + +Next, initialize, migrate and upgrade the database migrations. + + docker-compose exec neurostuff bash + rm -rf /migrations/migrations + python manage.py db init + python manage.py db migrate + python manage.py db upgrade + python manage.py add_user useremail password + +## Setting up front end + + +## Ingesting + +## Maintaining docker image and db +If you make a change to /neurostuff, you should be able to simply restart the server. + + docker-compose restart neurostuff + +If you need to upgrade the db after changing any models: + + docker-compose exec neurostuff python manage.py db migrate + docker-compose exec neurostuff python manage.py db upgrade + +To inspect the database using psql: diff --git a/codecov.yml b/codecov.yml new file mode 100644 index 000000000..53b152400 --- /dev/null +++ b/codecov.yml @@ -0,0 +1,3 @@ +coverage: + ignore: + - "neurostuff/tests/*" diff --git a/docker-compose.dev.yml b/docker-compose.dev.yml new file mode 100644 index 000000000..a6efbdd6f --- /dev/null +++ b/docker-compose.dev.yml @@ -0,0 +1,13 @@ +version: "2" +services: + nginx: + volumes: + - ./nginx/nginx.dev.conf:/etc/nginx/nginx.conf:ro + + swagger-ui: + environment: + - "API_URL=http://localhost/swagger/" + + neurostuff: + command: /usr/local/bin/gunicorn -w 2 -b :8000 neurostuff.core:app --log-level debug --timeout 120 --reload + restart: "no" diff --git a/docker-compose.yml b/docker-compose.yml new file mode 100644 index 000000000..53332765f --- /dev/null +++ b/docker-compose.yml @@ -0,0 +1,52 @@ +version: "2" +services: + neurostuff: + restart: always + build: ./neurostuff + expose: + - "8000" + volumes: + - ./postgres/migrations:/migrations + - ./:/neurostuff + command: /usr/local/bin/gunicorn -w 2 -b :8000 neurostuff.core:app --log-level debug --timeout 120 + env_file: + - .env + + nginx: + restart: always + build: ./nginx + ports: + - "80:80" + - "443:443" + volumes: + - ./nginx/certs:/etc/letsencrypt + - ./nginx/certs-data:/data/letsencrypt + - ./nginx/nginx.conf:/etc/nginx/nginx.conf:ro + - ./nginx/sites-enabled:/etc/nginx/sites-enabled:ro + volumes_from: + - neurostuff + + postgres: + restart: always + build: ./postgres + volumes: + - postgres_data:/var/lib/postgresql/data + expose: + - '5432' + environment: + - POSTGRES_DB=neurostuff + - POSTGRES_PASSWORD=${POSTGRES_PASSWORD} + env_file: + - .env + + swagger-ui: + image: swaggerapi/swagger-ui + environment: + - "API_URL=http://localhost/swagger/" + expose: + - '8080' + +volumes: + certs: + certs-data: + postgres_data: diff --git a/manage.py b/manage.py new file mode 100644 index 000000000..401b359f8 --- /dev/null +++ b/manage.py @@ -0,0 +1,94 @@ +""" + Command line management tools. +""" +import os +import requests +import json +import datetime + +from flask_script import Manager, Shell +from flask_migrate import Migrate, MigrateCommand +from flask_security.utils import encrypt_password + +from neuroscout import populate +from neuroscout.core import app, db +from neuroscout.models import user_datastore + +app.config.from_object(os.environ['APP_SETTINGS']) +migrate = Migrate(app, db, directory=app.config['MIGRATIONS_DIR']) +manager = Manager(app) + + +def _make_context(): + from neuroscout import models + from neuroscout.tests.request_utils import Client + from neuroscout import resources + + try: + client = Client(requests, 'http://127.0.0.1:80', + username='test2@test.com', password='password') + except: + client = None + + return dict(app=app, db=db, ms=models, client=client, + resources=resources) + + +manager.add_command('db', MigrateCommand) +manager.add_command("shell", Shell(make_context=_make_context)) + + +@manager.command +def add_user(email, password, confirm=True): + """ Add a user to the database. + email - A valid email address (primary login key) + password - Any string + """ + user = user_datastore.create_user( + email=email, password=encrypt_password(password)) + if confirm: + user.confirmed_at = datetime.datetime.now() + db.session.commit() + + +@manager.command +def add_task(local_path, task, include_predictors=None, + exclude_predictors=None, filters='{}', reingest=False): + """ Add BIDS dataset to database. + local_path - Path to local_path directory + task - Task name + include_predictors - Set of predictors to ingest. "None" ingests all. + filters - string JSON object with optional run filters + """ + populate.add_task( + task, local_path=local_path, **json.loads(filters), + include_predictors=include_predictors, + exclude_predictors=exclude_predictors, + reingest=reingest) + + +@manager.command +def extract_features(local_path, task, graph_spec, filters='{}'): + """ Extract features from a BIDS dataset. + local_path - Path to bids directory + task - Task name + graph_spec - Path to JSON pliers graph spec + filters - string JSON object with optional run filters + """ + populate.extract_features( + local_path, task, graph_spec, **json.loads(filters)) + + +@manager.command +def ingest_from_json(config_file, update_features=False, reingest=False): + """ Ingest/update datasets and extracted features from a json config file. + config_file - json config file detailing datasets and pliers graph_json + automagic - Force enable datalad automagic + """ + populate.ingest_from_json( + config_file, update_features=update_features, + reingest=reingest) + + +if __name__ == '__main__': + manager.run() diff --git a/neurostuff/Dockerfile b/neurostuff/Dockerfile new file mode 100644 index 000000000..f98e01fdf --- /dev/null +++ b/neurostuff/Dockerfile @@ -0,0 +1,14 @@ +FROM python:3.6-stretch +ARG DEBIAN_FRONTEND=noninteractive + +RUN mkdir -p /usr/src/app +WORKDIR /usr/src/app + +RUN apt-get -qq update + +COPY requirements.txt /usr/src/app/ +RUN pip install --no-cache-dir -r requirements.txt + +COPY . /usr/src/app + +WORKDIR /neurostuff diff --git a/neurostuff/lib64 b/neurostuff/lib64 new file mode 120000 index 000000000..7951405f8 --- /dev/null +++ b/neurostuff/lib64 @@ -0,0 +1 @@ +lib \ No newline at end of file diff --git a/neurostuff/requirements.txt b/neurostuff/requirements.txt new file mode 100644 index 000000000..e356f1e1b --- /dev/null +++ b/neurostuff/requirements.txt @@ -0,0 +1,51 @@ +aniso8601==8.0.0 +Babel==2.8.0 +blinker==1.4 +cachetools==4.1.1 +certifi==2020.6.20 +chardet==3.0.4 +click==7.1.2 +dnspython==2.0.0 +email-validator==1.1.1 +gunicorn +Flask==1.1.2 +Flask-BabelEx==0.9.4 +Flask-Cors==3.0.8 +Flask-Dance==3.0.0 +Flask-GraphQL==2.0.1 +Flask-Login==0.5.0 +Flask-Mail==0.9.1 +Flask-Principal==0.4.0 +Flask-RESTful==0.3.8 +Flask-Security==3.0.0 +Flask-SQLAlchemy==2.4.4 +Flask-WTF==0.14.3 +frozendict==1.2 +graphql-core==2.3.2 +graphql-server-core==1.2.0 +idna==2.10 +itsdangerous==2.0.0a1 +Jinja2==3.0.0a1 +lxml==4.5.2 +MarkupSafe==2.0.0a1 +marshmallow==3.7.1 +numpy==1.19.1 +oauthlib==3.1.0 +pandas==1.1.1 +passlib==1.7.2 +promise==2.3 +PyLD==2.0.3 +python-dateutil==2.8.1 +pytz==2020.1 +requests==2.24.0 +requests-oauthlib==1.3.0 +Rx==1.6.1 +shortuuid==1.0.1 +six==1.15.0 +speaklater==1.3 +SQLAlchemy==1.3.19 +SQLAlchemy-Utils==0.36.8 +urllib3==1.25.10 +URLObject==2.4.3 +Werkzeug==1.0.1 +WTForms==2.3.3 diff --git a/nginx/Dockerfile b/nginx/Dockerfile new file mode 100644 index 000000000..e81a10a20 --- /dev/null +++ b/nginx/Dockerfile @@ -0,0 +1,3 @@ +FROM nginx +RUN apt-get update && apt-get install -y openssl +RUN openssl dhparam -out /etc/nginx/dhparam.pem 2048 diff --git a/nginx/nginx.conf b/nginx/nginx.conf new file mode 100644 index 000000000..ab5f8659f --- /dev/null +++ b/nginx/nginx.conf @@ -0,0 +1,77 @@ +user www-data; +worker_processes 4; +pid /run/nginx.pid; + +events { + worker_connections 768; + # multi_accept on; +} + +http { + + ## + # Basic Settings + ## + + sendfile on; + tcp_nopush on; + tcp_nodelay on; + keepalive_timeout 65; + types_hash_max_size 2048; + # server_tokens off; + + # server_names_hash_bucket_size 64; + # server_name_in_redirect off; + + include /etc/nginx/mime.types; + default_type application/octet-stream; + + ## + # Logging Settings + ## + + access_log /var/log/nginx/access.log; + error_log /var/log/nginx/error.log; + + ## + # Gzip Settings + ## + + gzip on; + gzip_disable "msie6"; + + gzip_vary on; + gzip_proxied any; + gzip_comp_level 6; + gzip_buffers 16 8k; + gzip_http_version 1.1; + gzip_types text/plain text/css application/json application/x-javascript text/xml application/xml application/xml+rss text/javascript; + + ## + # nginx-naxsi config + ## + # Uncomment it if you installed nginx-naxsi + ## + + #include /etc/nginx/naxsi_core.rules; + + ## + # nginx-passenger config + ## + # Uncomment it if you installed nginx-passenger + ## + + #passenger_root /usr; + #passenger_ruby /usr/bin/ruby; + + ## + # Virtual Host Configs + ## + + ## + # No modular confs in use + ## + # include /etc/nginx/conf.d/*.conf; + + include /etc/nginx/sites-enabled/*; +} diff --git a/nginx/nginx.dev.conf b/nginx/nginx.dev.conf new file mode 100644 index 000000000..52a75dff5 --- /dev/null +++ b/nginx/nginx.dev.conf @@ -0,0 +1,77 @@ +user www-data; +worker_processes 4; +pid /run/nginx.pid; + +events { + worker_connections 768; + # multi_accept on; +} + +http { + + ## + # Basic Settings + ## + + sendfile on; + tcp_nopush on; + tcp_nodelay on; + keepalive_timeout 65; + types_hash_max_size 2048; + # server_tokens off; + + # server_names_hash_bucket_size 64; + # server_name_in_redirect off; + + include /etc/nginx/mime.types; + default_type application/octet-stream; + + ## + # Logging Settings + ## + + access_log /var/log/nginx/access.log; + error_log /var/log/nginx/error.log; + + ## + # Gzip Settings + ## + + gzip on; + gzip_disable "msie6"; + + gzip_vary on; + gzip_proxied any; + gzip_comp_level 6; + gzip_buffers 16 8k; + gzip_http_version 1.1; + gzip_types text/plain text/css application/json application/x-javascript text/xml application/xml application/xml+rss text/javascript; + + server { + server_name _; + root /neurostuff/neurostuff/frontend/build; + + location /static/ { + } + + location = /api/ { + return 301 /api/swagger/; + } + + location /api/swagger/ { + proxy_pass http://swagger-ui:8080/; + proxy_set_header Host $host; + proxy_set_header X-Real-IP $remote_addr; + proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for; + } + + location / { + proxy_pass http://neurostuff:8000; + proxy_set_header Host $host; + proxy_set_header X-Real-IP $remote_addr; + proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for; + proxy_connect_timeout 75s; + proxy_read_timeout 300s; + } + } +} diff --git a/nginx/sites-enabled/flask_project b/nginx/sites-enabled/flask_project new file mode 100644 index 000000000..d33723728 --- /dev/null +++ b/nginx/sites-enabled/flask_project @@ -0,0 +1,68 @@ +server { + server_name www.neurostuff.org; + charset utf-8; + listen 443 ssl http2; + listen [::]:443 ssl http2; + + ssl_certificate /etc/letsencrypt/live/neurostuff.org/fullchain.pem; + ssl_certificate_key /etc/letsencrypt/live/neurostuff.org/privkey.pem; + ssl_trusted_certificate /etc/letsencrypt/live/neurostuff.org/chain.pem; + + return 301 $scheme://neurostuff.org$request_uri; +} + +server { + + server_name neurostuff.org; + charset utf-8; + listen 443 ssl http2; + listen [::]:443 ssl http2; + + root /neurostuff/neurostuff/frontend/build; + + ssl_certificate /etc/letsencrypt/live/neurostuff.org/fullchain.pem; + ssl_certificate_key /etc/letsencrypt/live/neurostuff.org//privkey.pem; + ssl_trusted_certificate /etc/letsencrypt/live/neurostuff.org//chain.pem; + + ssl_protocols TLSv1.2;# Requires nginx >= 1.13.0 else use TLSv1.2 + ssl_prefer_server_ciphers on; + ssl_dhparam /etc/nginx/dhparam.pem; # openssl dhparam -out /etc/nginx/dhparam.pem 4096 + ssl_ciphers ECDHE-RSA-AES256-GCM-SHA512:DHE-RSA-AES256-GCM-SHA512:ECDHE-RSA-AES256-GCM-SHA384:DHE-RSA-AES256-GCM-SHA384:ECDHE-RSA-AES256-SHA384; + ssl_ecdh_curve secp384r1; # Requires nginx >= 1.1.0 + ssl_session_timeout 10m; + ssl_session_cache shared:SSL:10m; + ssl_session_tickets off; # Requires nginx >= 1.5.9 + ssl_stapling on; # Requires nginx >= 1.3.7 + ssl_stapling_verify on; # Requires nginx => 1.3.7 + resolver_timeout 5s; + add_header Strict-Transport-Security "max-age=63072000; includeSubDomains; preload"; + add_header X-Frame-Options DENY; + add_header X-Content-Type-Options nosniff; + add_header X-XSS-Protection "1; mode=block"; + + + location /static/ { + } + + location = /api/ { + return 301 https://$host/api/swagger/; + } + + location /api/swagger/ { + proxy_pass http://swagger-ui:8080/; + proxy_set_header Host $host; + proxy_set_header X-Real-IP $remote_addr; + proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for; + } + + + location / { + proxy_pass http://neurostuff:8000; + proxy_set_header Host $host; + proxy_set_header X-Real-IP $remote_addr; + proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for; + proxy_connect_timeout 75s; + proxy_read_timeout 300s; + client_max_body_size 100M; + } +} diff --git a/nginx/sites-enabled/ssl_config b/nginx/sites-enabled/ssl_config new file mode 100644 index 000000000..00bc87710 --- /dev/null +++ b/nginx/sites-enabled/ssl_config @@ -0,0 +1,15 @@ +server { + server_name neurostuff.org www.neurostuff.org; + + location /.well-known { + allow all; + root /data/letsencrypt/; + } + + location / { + return 301 https://$host$request_uri; + } + + listen 80; + +} diff --git a/postgres/Dockerfile b/postgres/Dockerfile new file mode 100644 index 000000000..9b0c655a9 --- /dev/null +++ b/postgres/Dockerfile @@ -0,0 +1,11 @@ +FROM postgres:12 +RUN apt-get update && apt-get install -y dos2unix +RUN apt-get install -yq python-pip python-dev build-essential +RUN apt-get install -yq cron +RUN pip install awscli +COPY pg_dump-to-s3 /home +RUN chmod +x /home/pg_dump-to-s3.sh /home/s3-autodelete.sh +RUN crontab /home/backup.txt +RUN service cron start +RUN dos2unix /home/pg_dump-to-s3.sh +RUN dos2unix /home/s3-autodelete.sh diff --git a/postgres/migrations/migrations/README b/postgres/migrations/migrations/README new file mode 100644 index 000000000..98e4f9c44 --- /dev/null +++ b/postgres/migrations/migrations/README @@ -0,0 +1 @@ +Generic single-database configuration. \ No newline at end of file diff --git a/postgres/migrations/migrations/alembic.ini b/postgres/migrations/migrations/alembic.ini new file mode 100644 index 000000000..f8ed4801f --- /dev/null +++ b/postgres/migrations/migrations/alembic.ini @@ -0,0 +1,45 @@ +# A generic, single database configuration. + +[alembic] +# template used to generate migration files +# file_template = %%(rev)s_%%(slug)s + +# set to 'true' to run the environment during +# the 'revision' command, regardless of autogenerate +# revision_environment = false + + +# Logging configuration +[loggers] +keys = root,sqlalchemy,alembic + +[handlers] +keys = console + +[formatters] +keys = generic + +[logger_root] +level = WARN +handlers = console +qualname = + +[logger_sqlalchemy] +level = WARN +handlers = +qualname = sqlalchemy.engine + +[logger_alembic] +level = INFO +handlers = +qualname = alembic + +[handler_console] +class = StreamHandler +args = (sys.stderr,) +level = NOTSET +formatter = generic + +[formatter_generic] +format = %(levelname)-5.5s [%(name)s] %(message)s +datefmt = %H:%M:%S diff --git a/postgres/migrations/migrations/env.py b/postgres/migrations/migrations/env.py new file mode 100644 index 000000000..459381606 --- /dev/null +++ b/postgres/migrations/migrations/env.py @@ -0,0 +1,87 @@ +from __future__ import with_statement +from alembic import context +from sqlalchemy import engine_from_config, pool +from logging.config import fileConfig +import logging + +# this is the Alembic Config object, which provides +# access to the values within the .ini file in use. +config = context.config + +# Interpret the config file for Python logging. +# This line sets up loggers basically. +fileConfig(config.config_file_name) +logger = logging.getLogger('alembic.env') + +# add your model's MetaData object here +# for 'autogenerate' support +# from myapp import mymodel +# target_metadata = mymodel.Base.metadata +from flask import current_app +config.set_main_option('sqlalchemy.url', + current_app.config.get('SQLALCHEMY_DATABASE_URI')) +target_metadata = current_app.extensions['migrate'].db.metadata + +# other values from the config, defined by the needs of env.py, +# can be acquired: +# my_important_option = config.get_main_option("my_important_option") +# ... etc. + + +def run_migrations_offline(): + """Run migrations in 'offline' mode. + + This configures the context with just a URL + and not an Engine, though an Engine is acceptable + here as well. By skipping the Engine creation + we don't even need a DBAPI to be available. + + Calls to context.execute() here emit the given string to the + script output. + + """ + url = config.get_main_option("sqlalchemy.url") + context.configure(url=url) + + with context.begin_transaction(): + context.run_migrations() + + +def run_migrations_online(): + """Run migrations in 'online' mode. + + In this scenario we need to create an Engine + and associate a connection with the context. + + """ + + # this callback is used to prevent an auto-migration from being generated + # when there are no changes to the schema + # reference: http://alembic.readthedocs.org/en/latest/cookbook.html + def process_revision_directives(context, revision, directives): + if getattr(config.cmd_opts, 'autogenerate', False): + script = directives[0] + if script.upgrade_ops.is_empty(): + directives[:] = [] + logger.info('No changes in schema detected.') + + engine = engine_from_config(config.get_section(config.config_ini_section), + prefix='sqlalchemy.', + poolclass=pool.NullPool) + + connection = engine.connect() + context.configure(connection=connection, + target_metadata=target_metadata, + process_revision_directives=process_revision_directives, + **current_app.extensions['migrate'].configure_args) + + try: + with context.begin_transaction(): + context.run_migrations() + finally: + connection.close() + +if context.is_offline_mode(): + run_migrations_offline() +else: + run_migrations_online() diff --git a/postgres/migrations/migrations/script.py.mako b/postgres/migrations/migrations/script.py.mako new file mode 100644 index 000000000..2c0156303 --- /dev/null +++ b/postgres/migrations/migrations/script.py.mako @@ -0,0 +1,24 @@ +"""${message} + +Revision ID: ${up_revision} +Revises: ${down_revision | comma,n} +Create Date: ${create_date} + +""" +from alembic import op +import sqlalchemy as sa +${imports if imports else ""} + +# revision identifiers, used by Alembic. +revision = ${repr(up_revision)} +down_revision = ${repr(down_revision)} +branch_labels = ${repr(branch_labels)} +depends_on = ${repr(depends_on)} + + +def upgrade(): + ${upgrades if upgrades else "pass"} + + +def downgrade(): + ${downgrades if downgrades else "pass"} diff --git a/postgres/pg_dump-to-s3/README.md b/postgres/pg_dump-to-s3/README.md new file mode 100644 index 000000000..26503c858 --- /dev/null +++ b/postgres/pg_dump-to-s3/README.md @@ -0,0 +1,28 @@ +# pg_dump-to-s3 +Automatically dump and archive PostgreSQL backups to Amazon S3 + +## Requirements + + - [AWS cli](https://aws.amazon.com/cli) + +## Setup + +Edit pg_to_s3.sh and replace: + - PG_HOST and PG_USER with your PostgreSQL hosts and backup user. + - S3_PATH with your Amazon S3 bucket and path + +## Usage + +``` +./pg_to_s3.sh database1 database2 database3 [...] +``` + +## Credentials + +### AWS credentials + +AWS credentials should be stored in a file called `~/.aws`. A documentation is available here: http://docs.aws.amazon.com/cli/latest/userguide/cli-chap-getting-started.html + +### PostgreSQL password + +The PostgreSQL password can be stored in a file called `~/.pgpass`, see: https://www.postgresql.org/docs/current/static/libpq-pgpass.html diff --git a/postgres/pg_dump-to-s3/backup.txt b/postgres/pg_dump-to-s3/backup.txt new file mode 100644 index 000000000..d8da4730b --- /dev/null +++ b/postgres/pg_dump-to-s3/backup.txt @@ -0,0 +1 @@ +0 0 25 * * /bin/bash /home/pg_dump-to-s3.sh neurostuff >> /var/log/pg_dump.log 2> /var/log/pg_dump.log diff --git a/postgres/pg_dump-to-s3/pg_dump-to-s3.sh b/postgres/pg_dump-to-s3/pg_dump-to-s3.sh new file mode 100644 index 000000000..a91f0f468 --- /dev/null +++ b/postgres/pg_dump-to-s3/pg_dump-to-s3.sh @@ -0,0 +1,37 @@ +#!/bin/bash +PATH=/usr/lib/postgresql/12/bin/:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin + +set -e + +# Database credentials +PG_HOST="postgres" +PG_USER="postgres" + + +# S3 +S3_PATH="neurostuff-backup" + +# get databases list +dbs=("$@") + +# Vars +NOW=$(date +"%m-%d-%Y-at-%H-%M-%S") +DIR=/home + +for db in "${dbs[@]}"; do + # Dump database + pg_dump -Fc -h $PG_HOST -U $PG_USER $db > /tmp/"$NOW"_"$db".dump + + # Copy to S3 + aws s3 cp /tmp/"$NOW"_"$db".dump s3://$S3_PATH/"$NOW"_"$db".dump --storage-class STANDARD_IA + + # Delete local file + rm /tmp/"$NOW"_"$db".dump + + # Log + echo "* Database $db is archived" +done + +# Delere old files +echo "* Delete old backups"; +$DIR/s3-autodelete.sh $S3_PATH "7 days" diff --git a/postgres/pg_dump-to-s3/s3-autodelete.sh b/postgres/pg_dump-to-s3/s3-autodelete.sh new file mode 100644 index 000000000..ffc2659e3 --- /dev/null +++ b/postgres/pg_dump-to-s3/s3-autodelete.sh @@ -0,0 +1,27 @@ +#!/bin/bash + +# Usage: +# ./s3-autodelete.sh bucket/path "7 days" + +set -e + +# Maximum date (will delete all files older than this date) +maxDate=`date +%s --date="-$2"` + +# Loop thru files +aws s3 ls s3://$1/ | while read -r line; do + # Get file creation date + createDate=`echo $line|awk {'print $1" "$2'}` + createDate=`date -d"$createDate" +%s` + + if [[ $createDate -lt $maxDate ]] + then + # Get file name + fileName=`echo $line|awk {'print $4'}` + if [[ $fileName != "" ]] + then + echo "* Delete $fileName"; + aws s3 rm s3://$1/$fileName + fi + fi +done; From b0a74fa3fd1d2f8b8f0286115ddbabdc4b79383e Mon Sep 17 00:00:00 2001 From: delavega4 Date: Thu, 27 Aug 2020 18:49:54 -0500 Subject: [PATCH 02/17] Update zenodo --- .zenodo.json | 12 ++++++------ 1 file changed, 6 insertions(+), 6 deletions(-) diff --git a/.zenodo.json b/.zenodo.json index af35c5949..7408ddbec 100644 --- a/.zenodo.json +++ b/.zenodo.json @@ -5,16 +5,16 @@ "name": "De La Vega, Alejandro", "orcid": "0000-0001-9062-3778" }, - { - "affiliation": "Stanford University", - "name": "Blair, Ross", - "orcid": "0000-0003-3007-1056" - }, { "affiliation": "University of Texas at Austin", "name": "Yarkoni, Tal", "orcid": "0000-0002-6558-5113" - } + }, + { + "affiliation": "University of Texas at Austin", + "name": "De La Vega, Alejandro", + "orcid": "0000-0001-9062-3778" + }, ], "keywords": [ "neuroimaging", From 98605c8da53c043e92c3b996278c105cd2ed962b Mon Sep 17 00:00:00 2001 From: delavega4 Date: Thu, 27 Aug 2020 18:51:57 -0500 Subject: [PATCH 03/17] Simplify travis CI file --- .travis.yml | 27 ++------------------------- 1 file changed, 2 insertions(+), 25 deletions(-) diff --git a/.travis.yml b/.travis.yml index 83582e822..9bc4243fa 100644 --- a/.travis.yml +++ b/.travis.yml @@ -5,40 +5,17 @@ matrix: - "3.6" install: - pip install -U setuptools==45 - - pip install -r neuroscout/requirements.txt - - pip install -e git+https://github.com/PsychoinformaticsLab/pliers.git#egg=pliers + - pip install -r neurostuff/requirements.txt - pip install pytest-cov script: - - python -m pytest neuroscout/tests/ --cov=./ - # cd frontend && npm test + - python -m pytest neurostuff/ --cov=./ env: - APP_SETTINGS=neuroscout.config.app.TravisConfig PYTHONHASHSEED=0 addons: postgresql: "9.4" before_script: - cp neuroscout/config/example_app.py neuroscout/config/app.py - - cp neuroscout/frontend/src/config.ts.example neuroscout/frontend/src/config.ts - psql -c 'create database travis_ci_test;' -U postgres - # CI=""; cd neuroscout/frontend && yarn install --ignore-engines --concurrency=1 && yarn build --concurrency=1 - CI="true" - - python -m pliers.support.download - before_install: - - sudo apt-get install libavformat-dev libavfilter-dev libavdevice-dev ffmpeg - - bash <(wget -q -O- http://neuro.debian.net/_files/neurodebian-travis.sh) - - sudo apt-get -qq update - - sudo apt-get install -yq git-annex-standalone tesseract-ocr - after_success: - bash <(curl -s https://codecov.io/bash) - - - language: node_js - node_js: - - "lts/*" - cache: - yarn: true - install: - - cd ./neuroscout/frontend - - yarn install - script: - - mv ./src/config.ts.example ./src/config.ts - - yarn build From ab922636d50f71b791eb06a458272750377ed998 Mon Sep 17 00:00:00 2001 From: delavega4 Date: Thu, 27 Aug 2020 18:55:10 -0500 Subject: [PATCH 04/17] Add config file --- .travis.yml | 4 ++-- neurostuff/config/example_app.py | 35 ++++++++++++++++++++++++++++++++ 2 files changed, 37 insertions(+), 2 deletions(-) create mode 100644 neurostuff/config/example_app.py diff --git a/.travis.yml b/.travis.yml index 9bc4243fa..baabb64fc 100644 --- a/.travis.yml +++ b/.travis.yml @@ -10,11 +10,11 @@ matrix: script: - python -m pytest neurostuff/ --cov=./ env: - - APP_SETTINGS=neuroscout.config.app.TravisConfig PYTHONHASHSEED=0 + - APP_SETTINGS=neurostuff.config.app.TravisConfig PYTHONHASHSEED=0 addons: postgresql: "9.4" before_script: - - cp neuroscout/config/example_app.py neuroscout/config/app.py + - cp neurostuff/config/example_app.py neurostuff/config/app.py - psql -c 'create database travis_ci_test;' -U postgres - CI="true" after_success: diff --git a/neurostuff/config/example_app.py b/neurostuff/config/example_app.py new file mode 100644 index 000000000..cbea2c764 --- /dev/null +++ b/neurostuff/config/example_app.py @@ -0,0 +1,35 @@ +""" This is an EXAMPLE config file + Rename this file to app.py and set variables +""" + + +class Config(object): + SERVER_NAME = 'localhost' # Set to external server name in production + + MIGRATIONS_DIR = '/migrations/migrations' + SQLALCHEMY_TRACK_MODIFICATIONS = False + WTF_CSRF_ENABLED = False + + SQLALCHEMY_DATABASE_URI = 'postgres://postgres:password@postgres:5432/neurostuff' + PROPAGATE_EXCEPTIONS = True + + +class ProductionConfig(Config): + ENV = 'production' + + +class DevelopmentConfig(Config): + ENV = 'development' + + +class TestingConfig(Config): + ENV = 'testing' + TESTING = True + + +class DockerTestConfig(TestingConfig): + SQLALCHEMY_DATABASE_URI = 'postgres://postgres@postgres:5432/scout_test' + + +class TravisConfig(TestingConfig): + SQLALCHEMY_DATABASE_URI = "postgresql://postgres@localhost/travis_ci_test" From a71deb0ac84f02161dcfa52d4c0e513c20dcb6c6 Mon Sep 17 00:00:00 2001 From: delavega4 Date: Thu, 27 Aug 2020 19:30:06 -0500 Subject: [PATCH 05/17] Add config to git ignore --- .gitignore | 1 + 1 file changed, 1 insertion(+) diff --git a/.gitignore b/.gitignore index b4b77d0f2..81ff017a3 100644 --- a/.gitignore +++ b/.gitignore @@ -1,4 +1,5 @@ historical/ +neurostuff/config.py # # Byte-compiled / optimized / DLL files From 90b5b101e34a5bf3a7b87e3f53faf1f990e6ca40 Mon Sep 17 00:00:00 2001 From: delavega4 Date: Thu, 27 Aug 2020 19:43:59 -0500 Subject: [PATCH 06/17] Set up config.py --- .env.example | 3 +-- .travis.yml | 2 +- neurostuff/core.py | 27 ++++++++----------- .../example_app.py => example_config.py} | 8 ++++++ neurostuff/requirements.txt | 1 + 5 files changed, 22 insertions(+), 19 deletions(-) rename neurostuff/{config/example_app.py => example_config.py} (79%) diff --git a/.env.example b/.env.example index 76cd0ac7d..2b986d859 100644 --- a/.env.example +++ b/.env.example @@ -1,3 +1,2 @@ -APP_SETTINGS=neurostuff.config.app.ProductionConfig +APP_SETTINGS=neuroscout.config.DevelopmentConfig COMPOSE_CONVERT_WINDOWS_PATHS=1 -POSTGRESS_PASSWORD=something diff --git a/.travis.yml b/.travis.yml index baabb64fc..610210630 100644 --- a/.travis.yml +++ b/.travis.yml @@ -14,7 +14,7 @@ matrix: addons: postgresql: "9.4" before_script: - - cp neurostuff/config/example_app.py neurostuff/config/app.py + - cp neurostuff/example_config.py neurostuff/config.py - psql -c 'create database travis_ci_test;' -U postgres - CI="true" after_success: diff --git a/neurostuff/core.py b/neurostuff/core.py index 8d4acc20e..0ad830ff8 100644 --- a/neurostuff/core.py +++ b/neurostuff/core.py @@ -1,36 +1,32 @@ +import os from flask import Flask from flask_security import Security, SQLAlchemyUserDatastore from flask_dance.consumer.storage.sqla import SQLAlchemyStorage -from flask_dance.contrib.github import make_github_blueprint, github +from flask_dance.contrib.github import make_github_blueprint from flask_cors import CORS +# from . import oauth +from .resources import bind_resources from .database import init_db from .models import User, Role, OAuth app = Flask(__name__) - -# enable CORS for development -CORS(app, expose_headers='X-Total-Count') - -# Move this stuff out when it gets big -app.debug = True -app.config['SQLALCHEMY_DATABASE_URI'] = 'sqlite:///development.db' -app.config['SQLALCHEMY_TRACK_MODIFICATIONS'] = False -app.config['APISPEC_SWAGGER_URL'] = '/api/swagger.json' -app.config['APISPEC_SWAGGER_UI_URL'] = '/api/' +app.config.from_object(os.environ['APP_SETTINGS']) db = init_db(app) +# Enable CORS +cors = CORS(app, expose_headers='X-Total-Count') + # Flask-Security user_datastore = SQLAlchemyUserDatastore(db, User, Role) security = Security(app, user_datastore) # Flask-Dance (OAuth) -from . import oauth -app.secret_key = "temporary" +app.secret_key = app.config['DANCE_SECRET_KEY'] blueprint = make_github_blueprint( - client_id="d5372fa09c97d5a98a84", - client_secret="dee86c2c9344f00a31d83854eb135e94957ac494", + client_id=app.config['GITHUB_CLIENT_ID'], + client_secret=app.config['GITHUB_CLIENT_SECRET'], ) app.register_blueprint(blueprint, url_prefix="/login") blueprint.storage = SQLAlchemyStorage(OAuth, db.session) @@ -43,5 +39,4 @@ # context_value={'session': db.session})) # Bind routes -from .resources import bind_resources bind_resources(app) diff --git a/neurostuff/config/example_app.py b/neurostuff/example_config.py similarity index 79% rename from neurostuff/config/example_app.py rename to neurostuff/example_config.py index cbea2c764..8e115e9d7 100644 --- a/neurostuff/config/example_app.py +++ b/neurostuff/example_config.py @@ -13,6 +13,13 @@ class Config(object): SQLALCHEMY_DATABASE_URI = 'postgres://postgres:password@postgres:5432/neurostuff' PROPAGATE_EXCEPTIONS = True + APISPEC_SWAGGER_URL = '/api/swagger.json' + APISPEC_SWAGGER_UI_URL = '/api/' + + GITHUB_CLIENT_ID = "github-id" + GITHUB_CLIENT_SECRET = "github-secret" + DANCE_SECRET_KEY = "temporary" + class ProductionConfig(Config): ENV = 'production' @@ -20,6 +27,7 @@ class ProductionConfig(Config): class DevelopmentConfig(Config): ENV = 'development' + DEBUG = True class TestingConfig(Config): diff --git a/neurostuff/requirements.txt b/neurostuff/requirements.txt index e356f1e1b..738530c42 100644 --- a/neurostuff/requirements.txt +++ b/neurostuff/requirements.txt @@ -49,3 +49,4 @@ urllib3==1.25.10 URLObject==2.4.3 Werkzeug==1.0.1 WTForms==2.3.3 +wrapt==1.11.2 From 68723e8691abbc385d10f802d7e9fe66f000148e Mon Sep 17 00:00:00 2001 From: delavega4 Date: Thu, 27 Aug 2020 20:12:31 -0500 Subject: [PATCH 07/17] Set up flask-manage --- .env.example | 3 +- manage.py | 66 ++----------------- neurostuff/core.py | 4 +- neurostuff/example_config.py | 6 +- neurostuff/requirements.txt | 4 ++ postgres/migrations/migrations/README | 0 postgres/migrations/migrations/env.py | 0 postgres/migrations/migrations/script.py.mako | 0 scripts/ingest_data.py | 3 +- 9 files changed, 20 insertions(+), 66 deletions(-) mode change 100644 => 100755 postgres/migrations/migrations/README mode change 100644 => 100755 postgres/migrations/migrations/env.py mode change 100644 => 100755 postgres/migrations/migrations/script.py.mako diff --git a/.env.example b/.env.example index 2b986d859..7d2eb0a15 100644 --- a/.env.example +++ b/.env.example @@ -1,2 +1,3 @@ -APP_SETTINGS=neuroscout.config.DevelopmentConfig +APP_SETTINGS=neurostuff.config.DevelopmentConfig COMPOSE_CONVERT_WINDOWS_PATHS=1 +POSTGRES_PASSWORD=example diff --git a/manage.py b/manage.py index 401b359f8..266b2303a 100644 --- a/manage.py +++ b/manage.py @@ -2,17 +2,12 @@ Command line management tools. """ import os -import requests -import json -import datetime from flask_script import Manager, Shell from flask_migrate import Migrate, MigrateCommand from flask_security.utils import encrypt_password -from neuroscout import populate -from neuroscout.core import app, db -from neuroscout.models import user_datastore +from neurostuff.core import app, db, user_datastore app.config.from_object(os.environ['APP_SETTINGS']) migrate = Migrate(app, db, directory=app.config['MIGRATIONS_DIR']) @@ -20,18 +15,9 @@ def _make_context(): - from neuroscout import models - from neuroscout.tests.request_utils import Client - from neuroscout import resources + from neurostuff import models - try: - client = Client(requests, 'http://127.0.0.1:80', - username='test2@test.com', password='password') - except: - client = None - - return dict(app=app, db=db, ms=models, client=client, - resources=resources) + return dict(app=app, db=db, ms=models) manager.add_command('db', MigrateCommand) @@ -39,55 +25,15 @@ def _make_context(): @manager.command -def add_user(email, password, confirm=True): +def add_user(email, password): """ Add a user to the database. email - A valid email address (primary login key) password - Any string """ - user = user_datastore.create_user( + user_datastore.create_user( email=email, password=encrypt_password(password)) - if confirm: - user.confirmed_at = datetime.datetime.now() - db.session.commit() - - -@manager.command -def add_task(local_path, task, include_predictors=None, - exclude_predictors=None, filters='{}', reingest=False): - """ Add BIDS dataset to database. - local_path - Path to local_path directory - task - Task name - include_predictors - Set of predictors to ingest. "None" ingests all. - filters - string JSON object with optional run filters - """ - populate.add_task( - task, local_path=local_path, **json.loads(filters), - include_predictors=include_predictors, - exclude_predictors=exclude_predictors, - reingest=reingest) - -@manager.command -def extract_features(local_path, task, graph_spec, filters='{}'): - """ Extract features from a BIDS dataset. - local_path - Path to bids directory - task - Task name - graph_spec - Path to JSON pliers graph spec - filters - string JSON object with optional run filters - """ - populate.extract_features( - local_path, task, graph_spec, **json.loads(filters)) - - -@manager.command -def ingest_from_json(config_file, update_features=False, reingest=False): - """ Ingest/update datasets and extracted features from a json config file. - config_file - json config file detailing datasets and pliers graph_json - automagic - Force enable datalad automagic - """ - populate.ingest_from_json( - config_file, update_features=update_features, - reingest=reingest) + db.session.commit() if __name__ == '__main__': diff --git a/neurostuff/core.py b/neurostuff/core.py index 0ad830ff8..d4cdbf920 100644 --- a/neurostuff/core.py +++ b/neurostuff/core.py @@ -5,8 +5,6 @@ from flask_dance.contrib.github import make_github_blueprint from flask_cors import CORS -# from . import oauth -from .resources import bind_resources from .database import init_db from .models import User, Role, OAuth @@ -23,6 +21,7 @@ security = Security(app, user_datastore) # Flask-Dance (OAuth) +from . import oauth app.secret_key = app.config['DANCE_SECRET_KEY'] blueprint = make_github_blueprint( client_id=app.config['GITHUB_CLIENT_ID'], @@ -39,4 +38,5 @@ # context_value={'session': db.session})) # Bind routes +from .resources import bind_resources bind_resources(app) diff --git a/neurostuff/example_config.py b/neurostuff/example_config.py index 8e115e9d7..9443529b3 100644 --- a/neurostuff/example_config.py +++ b/neurostuff/example_config.py @@ -1,6 +1,7 @@ """ This is an EXAMPLE config file Rename this file to app.py and set variables """ +import os class Config(object): @@ -10,7 +11,10 @@ class Config(object): SQLALCHEMY_TRACK_MODIFICATIONS = False WTF_CSRF_ENABLED = False - SQLALCHEMY_DATABASE_URI = 'postgres://postgres:password@postgres:5432/neurostuff' + POSTGRES_PASSWORD = os.environ['POSTGRES_PASSWORD'] + DB_NAME = 'neurostuff' + SQLALCHEMY_DATABASE_URI = "postgres://postgres:" \ + f"{POSTGRES_PASSWORD}@postgres:5432/{DB_NAME}" PROPAGATE_EXCEPTIONS = True APISPEC_SWAGGER_URL = '/api/swagger.json' diff --git a/neurostuff/requirements.txt b/neurostuff/requirements.txt index 738530c42..9cc490f7e 100644 --- a/neurostuff/requirements.txt +++ b/neurostuff/requirements.txt @@ -8,6 +8,7 @@ click==7.1.2 dnspython==2.0.0 email-validator==1.1.1 gunicorn +ipython==6.2.1 Flask==1.1.2 Flask-BabelEx==0.9.4 Flask-Cors==3.0.8 @@ -20,6 +21,8 @@ Flask-RESTful==0.3.8 Flask-Security==3.0.0 Flask-SQLAlchemy==2.4.4 Flask-WTF==0.14.3 +Flask-Script==2.0.6 +Flask-Migrate==2.0.3 frozendict==1.2 graphql-core==2.3.2 graphql-server-core==1.2.0 @@ -34,6 +37,7 @@ oauthlib==3.1.0 pandas==1.1.1 passlib==1.7.2 promise==2.3 +psycopg2==2.8.3 PyLD==2.0.3 python-dateutil==2.8.1 pytz==2020.1 diff --git a/postgres/migrations/migrations/README b/postgres/migrations/migrations/README old mode 100644 new mode 100755 diff --git a/postgres/migrations/migrations/env.py b/postgres/migrations/migrations/env.py old mode 100644 new mode 100755 diff --git a/postgres/migrations/migrations/script.py.mako b/postgres/migrations/migrations/script.py.mako old mode 100644 new mode 100755 diff --git a/scripts/ingest_data.py b/scripts/ingest_data.py index 84ede063a..513b7635e 100644 --- a/scripts/ingest_data.py +++ b/scripts/ingest_data.py @@ -15,7 +15,6 @@ from neurostuff.models import Study, Analysis, Condition, Image, User, Point from neurostuff.core import db, user_datastore - def reset_database(): db.drop_all() db.create_all() @@ -125,4 +124,4 @@ def ingest_neurosynth(max_rows=None): # reset_database() # ingest_neurovault(limit=20) -ingest_neurosynth(1000) \ No newline at end of file +ingest_neurosynth(1000) From 70f2dc1d6e14ee98231f497ddd49dbb35529846d Mon Sep 17 00:00:00 2001 From: delavega4 Date: Thu, 27 Aug 2020 20:55:55 -0500 Subject: [PATCH 08/17] Succesful inital migration with flask_migrate --- neurostuff/models/analysis.py | 37 ++-- neurostuff/models/auth.py | 35 ++-- neurostuff/models/data.py | 127 +++++------ .../migrations/versions/7b7f7b54fb70_.py | 198 ++++++++++++++++++ 4 files changed, 298 insertions(+), 99 deletions(-) create mode 100755 postgres/migrations/migrations/versions/7b7f7b54fb70_.py diff --git a/neurostuff/models/analysis.py b/neurostuff/models/analysis.py index 53bea2793..739863929 100644 --- a/neurostuff/models/analysis.py +++ b/neurostuff/models/analysis.py @@ -1,24 +1,21 @@ from sqlalchemy.ext.associationproxy import association_proxy -from sqlalchemy.orm.collections import attribute_mapped_collection -from sqlalchemy import (Column, Integer, String, Boolean, ForeignKey, JSON, - Table, Float) -from sqlalchemy.orm import reconstructor, relationship, backref +from sqlalchemy.orm import relationship, backref -from .data import Image, Point, BaseMixin +from .data import BaseMixin from ..database import db class MetaAnalysis(BaseMixin, db.Model): __tablename__ = 'metaanalyses' - name = Column(String) - desc = Column(String) - estimator = Column(String) - estimator_options = Column(JSON) - variable_names = Column(JSON) - variable_descs = Column(JSON) - data = Column(JSON) - user_id = Column(ForeignKey('users.id'), primary_key=True) + name = db.Column(db.Text) + desc = db.Column(db.Text) + estimator = db.Column(db.Text) + estimator_options = db.Column(db.JSON) + variable_names = db.Column(db.JSON) + variable_descs = db.Column(db.JSON) + data = db.Column(db.JSON) + user_id = db.Column(db.Text, db.ForeignKey('users.id'), primary_key=True) user = relationship('User', backref=backref('metaanalyses')) images = association_proxy('metanalysis_images', 'image') @@ -30,9 +27,10 @@ class MetaAnalysis(BaseMixin, db.Model): class MetaAnalysisImage(db.Model): __tablename__ = 'metaanalysis_images' - weight = Column(Float) - metaanalysis_id = Column(ForeignKey('metaanalyses.id'), primary_key=True) - image_id = Column(ForeignKey('images.id'), primary_key=True) + weight = db.Column(db.Float) + metaanalysis_id = db.Column( + db.Text, db.ForeignKey('metaanalyses.id'), primary_key=True) + image_id = db.Column(db.Text, db.ForeignKey('images.id'), primary_key=True) metaanalysis = relationship('MetaAnalysis', backref=backref('metanalysis_images')) @@ -42,9 +40,10 @@ class MetaAnalysisImage(db.Model): class MetaAnalysisPoint(db.Model): __tablename__ = 'metaanalysis_points' - weight = Column(Float) - metaanalysis_id = Column(ForeignKey('metaanalyses.id'), primary_key=True) - point_id = Column(ForeignKey('points.id'), primary_key=True) + weight = db.Column(db.Float) + metaanalysis_id = db.Column( + db.Text, db.ForeignKey('metaanalyses.id'), primary_key=True) + point_id = db.Column(db.Text, db.ForeignKey('points.id'), primary_key=True) metaanalysis = relationship('MetaAnalysis', backref=backref('metanalysis_points')) diff --git a/neurostuff/models/auth.py b/neurostuff/models/auth.py index 03ec1937e..cc8b9d9d0 100644 --- a/neurostuff/models/auth.py +++ b/neurostuff/models/auth.py @@ -1,9 +1,6 @@ from sqlalchemy.ext.associationproxy import association_proxy -from sqlalchemy.orm.collections import attribute_mapped_collection -from sqlalchemy import (Column, Integer, String, Boolean, ForeignKey, JSON, - Table, Float, DateTime) -from sqlalchemy.orm import reconstructor, relationship, backref -from flask_security import UserMixin, RoleMixin, login_required +from sqlalchemy.orm import relationship, backref +from flask_security import UserMixin, RoleMixin from flask_dance.consumer.storage.sqla import OAuthConsumerMixin @@ -11,26 +8,27 @@ from .data import BaseMixin -roles_users = Table('roles_users', db.Model.metadata, - Column('user_id', Integer(), ForeignKey('users.id')), - Column('role_id', Integer(), ForeignKey('roles.id'))) +roles_users = db.Table( + 'roles_users', db.Model.metadata, + db.Column('user_id', db.Text, db.ForeignKey('users.id')), + db.Column('role_id', db.Text, db.ForeignKey('roles.id'))) class Role(BaseMixin, db.Model, RoleMixin): __tablename__ = 'roles' - name = Column(String(80), unique=True) - description = Column(String(255)) + name = db.Column(db.Text, unique=True) + description = db.Column(db.Text) class User(BaseMixin, db.Model, UserMixin): __tablename__ = 'users' - name = Column(String(255)) - email = Column(String(255), unique=True) - password = Column(String(255)) - active = Column(Boolean()) - confirmed_at = Column(DateTime) + name = db.Column(db.Text) + email = db.Column(db.Text, unique=True) + password = db.Column(db.Text) + active = db.Column(db.Boolean) + confirmed_at = db.Column(db.DateTime) roles = relationship('Role', secondary=roles_users, backref=backref('users', lazy='dynamic')) username = association_proxy('oauth', 'provider_user_id') @@ -38,7 +36,8 @@ class User(BaseMixin, db.Model, UserMixin): class OAuth(OAuthConsumerMixin, db.Model): __tablename__ = 'oauth' - user_id = Column(Integer, ForeignKey('users.id')) + + user_id = db.Column(db.Text, db.ForeignKey('users.id')) user = relationship(User, backref=backref('oauth')) - provider_user_id = Column(String(256), unique=True, nullable=False) - provider = Column(String(30)) + provider_user_id = db.Column(db.Text, unique=True, nullable=False) + provider = db.Column(db.Text) diff --git a/neurostuff/models/data.py b/neurostuff/models/data.py index c58ed911b..02b5848ab 100644 --- a/neurostuff/models/data.py +++ b/neurostuff/models/data.py @@ -1,8 +1,5 @@ from sqlalchemy.ext.associationproxy import association_proxy -from sqlalchemy.orm.collections import attribute_mapped_collection -from sqlalchemy import (Column, Integer, String, Boolean, ForeignKey, JSON, - Table, Float, DateTime) -from sqlalchemy.orm import reconstructor, relationship, backref +from sqlalchemy.orm import relationship, backref from sqlalchemy.sql import func import shortuuid @@ -15,9 +12,10 @@ def generate_id(): class BaseMixin(object): - id = Column(String(12), primary_key=True, default=generate_id) - created_at = Column(DateTime(timezone=True), server_default=func.now()) - updated_at = Column(DateTime(timezone=True), onupdate=func.now()) + id = db.Column(db.Text, primary_key=True, default=generate_id) + created_at = db.Column( + db.DateTime(timezone=True), server_default=func.now()) + updated_at = db.Column(db.DateTime(timezone=True), onupdate=func.now()) @property def IRI(self): @@ -27,38 +25,38 @@ def IRI(self): class Dataset(BaseMixin, db.Model): __tablename__ = 'datasets' - name = Column(String) - description = Column(String) - publication = Column(String) - doi = Column(String) - pmid = Column(String) - public = Column(Boolean, default=True) - nimads_data = Column(JSON) - user_id = Column(ForeignKey('users.id')) + name = db.Column(db.String) + description = db.Column(db.String) + publication = db.Column(db.String) + doi = db.Column(db.String) + pmid = db.Column(db.String) + public = db.Column(db.Boolean, default=True) + nimads_data = db.Column(db.JSON) + user_id = db.Column(db.Text, db.ForeignKey('users.id')) user = relationship('User', backref=backref('datasets')) class Study(BaseMixin, db.Model): __tablename__ = 'studies' - name = Column(String) - description = Column(String) - publication = Column(String) - # source_url = Column(String) - doi = Column(String) - pmid = Column(String) - public = Column(Boolean, default=True) - metadata_ = Column(JSON) - user_id = Column(ForeignKey('users.id')) + name = db.Column(db.String) + description = db.Column(db.String) + publication = db.Column(db.String) + # source_url = db.Column(db.String) + doi = db.Column(db.String) + pmid = db.Column(db.String) + public = db.Column(db.Boolean, default=True) + metadata_ = db.Column(db.JSON) + user_id = db.Column(db.Text, db.ForeignKey('users.id')) user = relationship('User', backref=backref('studies')) class Analysis(BaseMixin, db.Model): __tablename__ = 'analyses' - study_id = Column(ForeignKey('studies.id')) - name = Column(String) - description = Column(String) + study_id = db.Column(db.Text, db.ForeignKey('studies.id')) + name = db.Column(db.String) + description = db.Column(db.String) study = relationship('Study', backref=backref('analyses')) conditions = association_proxy('analysis_conditions', 'condition') weights = association_proxy('analysis_conditions', 'weight') @@ -67,37 +65,42 @@ class Analysis(BaseMixin, db.Model): class Condition(BaseMixin, db.Model): __tablename__ = 'conditions' - name = Column(String) - description = Column(String) + name = db.Column(db.String) + description = db.Column(db.String) class AnalysisConditions(db.Model): __tablename__ = 'analysis_conditions' - weight = Column(Float) - analysis_id = Column(ForeignKey('analyses.id'), primary_key=True) - condition_id = Column(ForeignKey('conditions.id'), primary_key=True) + weight = db.Column(db.Float) + analysis_id = db.Column( + db.Text, db.ForeignKey('analyses.id'), primary_key=True) + condition_id = db.Column( + db.Text, db.ForeignKey('conditions.id'), primary_key=True) analysis = relationship('Analysis', backref=backref('analysis_conditions')) - condition = relationship('Condition', backref=backref('analysis_conditions')) + condition = relationship( + 'Condition', backref=backref('analysis_conditions')) -PointEntityMap = Table('point_entities', db.Model.metadata, - Column('point', Integer, ForeignKey('points.id')), - Column('entity', Integer, ForeignKey('entities.id'))) +PointEntityMap = db.Table( + 'point_entities', db.Model.metadata, + db.Column('point', db.Text, db.ForeignKey('points.id')), + db.Column('entity', db.Text, db.ForeignKey('entities.id'))) -ImageEntityMap = Table('image_entities', db.Model.metadata, - Column('image', Integer, ForeignKey('images.id')), - Column('entity', Integer, ForeignKey('entities.id'))) +ImageEntityMap = db.Table( + 'image_entities', db.Model.metadata, + db.Column('image', db.Text, db.ForeignKey('images.id')), + db.Column('entity', db.Text, db.ForeignKey('entities.id'))) class Entity(BaseMixin, db.Model): __tablename__ = 'entities' - study_id = Column(ForeignKey("studies.id")) - label = Column(String) - level = Column(String) - data = Column(JSON) + study_id = db.Column(db.Text, db.ForeignKey("studies.id")) + label = db.Column(db.String) + level = db.Column(db.String) + data = db.Column(db.JSON) study = relationship('Study', backref=backref('entities')) @@ -108,14 +111,14 @@ class Point(BaseMixin, db.Model): def coordinates(self): return [self.x, self.y, self.z] - x = Column(Float) - y = Column(Float) - z = Column(Float) - space = Column(String) - kind = Column(String) - image = Column(String) - label_id = Column(Float, default=None) - analysis_id = Column(ForeignKey('analyses.id')) + x = db.Column(db.Float) + y = db.Column(db.Float) + z = db.Column(db.Float) + space = db.Column(db.String) + kind = db.Column(db.String) + image = db.Column(db.String) + label_id = db.Column(db.Float, default=None) + analysis_id = db.Column(db.Text, db.ForeignKey('analyses.id')) entities = relationship("Entity", secondary=PointEntityMap, backref=backref("points")) @@ -125,13 +128,13 @@ def coordinates(self): class Image(BaseMixin, db.Model): __tablename__ = 'images' - url = Column(String) - filename = Column(String) - space = Column(String) - value_type = Column(String) - analysis_id = Column(ForeignKey('analyses.id')) - data = Column(JSON) - add_date = Column(DateTime(timezone=True)) + url = db.Column(db.String) + filename = db.Column(db.String) + space = db.Column(db.String) + value_type = db.Column(db.String) + analysis_id = db.Column(db.Text, db.ForeignKey('analyses.id')) + data = db.Column(db.JSON) + add_date = db.Column(db.DateTime(timezone=True)) analysis_name = association_proxy('analysis', 'name') entities = relationship("Entity", secondary=ImageEntityMap, @@ -142,8 +145,8 @@ class Image(BaseMixin, db.Model): class PointValue(BaseMixin, db.Model): __tablename__ = 'point_values' - point_id = Column(ForeignKey('points.id')) - kind = Column(String) - value = Column(String) - dtype = Column(String, default='str') + point_id = db.Column(db.Text, db.ForeignKey('points.id')) + kind = db.Column(db.String) + value = db.Column(db.String) + dtype = db.Column(db.String, default='str') point = relationship('Point', backref=backref('values')) diff --git a/postgres/migrations/migrations/versions/7b7f7b54fb70_.py b/postgres/migrations/migrations/versions/7b7f7b54fb70_.py new file mode 100755 index 000000000..f2e2b7f86 --- /dev/null +++ b/postgres/migrations/migrations/versions/7b7f7b54fb70_.py @@ -0,0 +1,198 @@ +"""empty message + +Revision ID: 7b7f7b54fb70 +Revises: +Create Date: 2020-08-28 01:54:39.393457 + +""" +from alembic import op +import sqlalchemy as sa +import sqlalchemy_utils + +# revision identifiers, used by Alembic. +revision = '7b7f7b54fb70' +down_revision = None +branch_labels = None +depends_on = None + + +def upgrade(): + # ### commands auto generated by Alembic - please adjust! ### + op.create_table('conditions', + sa.Column('id', sa.Text(), nullable=False), + sa.Column('created_at', sa.DateTime(timezone=True), server_default=sa.text('now()'), nullable=True), + sa.Column('updated_at', sa.DateTime(timezone=True), nullable=True), + sa.Column('name', sa.String(), nullable=True), + sa.Column('description', sa.String(), nullable=True), + sa.PrimaryKeyConstraint('id') + ) + op.create_table('roles', + sa.Column('id', sa.Text(), nullable=False), + sa.Column('created_at', sa.DateTime(timezone=True), server_default=sa.text('now()'), nullable=True), + sa.Column('updated_at', sa.DateTime(timezone=True), nullable=True), + sa.Column('name', sa.Text(), nullable=True), + sa.Column('description', sa.Text(), nullable=True), + sa.PrimaryKeyConstraint('id'), + sa.UniqueConstraint('name') + ) + op.create_table('users', + sa.Column('id', sa.Text(), nullable=False), + sa.Column('created_at', sa.DateTime(timezone=True), server_default=sa.text('now()'), nullable=True), + sa.Column('updated_at', sa.DateTime(timezone=True), nullable=True), + sa.Column('name', sa.Text(), nullable=True), + sa.Column('email', sa.Text(), nullable=True), + sa.Column('password', sa.Text(), nullable=True), + sa.Column('active', sa.Boolean(), nullable=True), + sa.Column('confirmed_at', sa.DateTime(), nullable=True), + sa.PrimaryKeyConstraint('id'), + sa.UniqueConstraint('email') + ) + op.create_table('datasets', + sa.Column('id', sa.Text(), nullable=False), + sa.Column('created_at', sa.DateTime(timezone=True), server_default=sa.text('now()'), nullable=True), + sa.Column('updated_at', sa.DateTime(timezone=True), nullable=True), + sa.Column('name', sa.String(), nullable=True), + sa.Column('description', sa.String(), nullable=True), + sa.Column('publication', sa.String(), nullable=True), + sa.Column('doi', sa.String(), nullable=True), + sa.Column('pmid', sa.String(), nullable=True), + sa.Column('public', sa.Boolean(), nullable=True), + sa.Column('nimads_data', sa.JSON(), nullable=True), + sa.Column('user_id', sa.Text(), nullable=True), + sa.ForeignKeyConstraint(['user_id'], ['users.id'], ), + sa.PrimaryKeyConstraint('id') + ) + op.create_table('oauth', + sa.Column('id', sa.Integer(), nullable=False), + sa.Column('created_at', sa.DateTime(), nullable=False), + sa.Column('token', sqlalchemy_utils.types.json.JSONType(), nullable=False), + sa.Column('user_id', sa.Text(), nullable=True), + sa.Column('provider_user_id', sa.Text(), nullable=False), + sa.Column('provider', sa.Text(), nullable=True), + sa.ForeignKeyConstraint(['user_id'], ['users.id'], ), + sa.PrimaryKeyConstraint('id'), + sa.UniqueConstraint('provider_user_id') + ) + op.create_table('roles_users', + sa.Column('user_id', sa.Text(), nullable=True), + sa.Column('role_id', sa.Text(), nullable=True), + sa.ForeignKeyConstraint(['role_id'], ['roles.id'], ), + sa.ForeignKeyConstraint(['user_id'], ['users.id'], ) + ) + op.create_table('studies', + sa.Column('id', sa.Text(), nullable=False), + sa.Column('created_at', sa.DateTime(timezone=True), server_default=sa.text('now()'), nullable=True), + sa.Column('updated_at', sa.DateTime(timezone=True), nullable=True), + sa.Column('name', sa.String(), nullable=True), + sa.Column('description', sa.String(), nullable=True), + sa.Column('publication', sa.String(), nullable=True), + sa.Column('doi', sa.String(), nullable=True), + sa.Column('pmid', sa.String(), nullable=True), + sa.Column('public', sa.Boolean(), nullable=True), + sa.Column('metadata_', sa.JSON(), nullable=True), + sa.Column('user_id', sa.Text(), nullable=True), + sa.ForeignKeyConstraint(['user_id'], ['users.id'], ), + sa.PrimaryKeyConstraint('id') + ) + op.create_table('analyses', + sa.Column('id', sa.Text(), nullable=False), + sa.Column('created_at', sa.DateTime(timezone=True), server_default=sa.text('now()'), nullable=True), + sa.Column('updated_at', sa.DateTime(timezone=True), nullable=True), + sa.Column('study_id', sa.Text(), nullable=True), + sa.Column('name', sa.String(), nullable=True), + sa.Column('description', sa.String(), nullable=True), + sa.ForeignKeyConstraint(['study_id'], ['studies.id'], ), + sa.PrimaryKeyConstraint('id') + ) + op.create_table('entities', + sa.Column('id', sa.Text(), nullable=False), + sa.Column('created_at', sa.DateTime(timezone=True), server_default=sa.text('now()'), nullable=True), + sa.Column('updated_at', sa.DateTime(timezone=True), nullable=True), + sa.Column('study_id', sa.Text(), nullable=True), + sa.Column('label', sa.String(), nullable=True), + sa.Column('level', sa.String(), nullable=True), + sa.Column('data', sa.JSON(), nullable=True), + sa.ForeignKeyConstraint(['study_id'], ['studies.id'], ), + sa.PrimaryKeyConstraint('id') + ) + op.create_table('analysis_conditions', + sa.Column('weight', sa.Float(), nullable=True), + sa.Column('analysis_id', sa.Text(), nullable=False), + sa.Column('condition_id', sa.Text(), nullable=False), + sa.ForeignKeyConstraint(['analysis_id'], ['analyses.id'], ), + sa.ForeignKeyConstraint(['condition_id'], ['conditions.id'], ), + sa.PrimaryKeyConstraint('analysis_id', 'condition_id') + ) + op.create_table('images', + sa.Column('id', sa.Text(), nullable=False), + sa.Column('created_at', sa.DateTime(timezone=True), server_default=sa.text('now()'), nullable=True), + sa.Column('updated_at', sa.DateTime(timezone=True), nullable=True), + sa.Column('url', sa.String(), nullable=True), + sa.Column('filename', sa.String(), nullable=True), + sa.Column('space', sa.String(), nullable=True), + sa.Column('value_type', sa.String(), nullable=True), + sa.Column('analysis_id', sa.Text(), nullable=True), + sa.Column('data', sa.JSON(), nullable=True), + sa.Column('add_date', sa.DateTime(timezone=True), nullable=True), + sa.ForeignKeyConstraint(['analysis_id'], ['analyses.id'], ), + sa.PrimaryKeyConstraint('id') + ) + op.create_table('points', + sa.Column('id', sa.Text(), nullable=False), + sa.Column('created_at', sa.DateTime(timezone=True), server_default=sa.text('now()'), nullable=True), + sa.Column('updated_at', sa.DateTime(timezone=True), nullable=True), + sa.Column('x', sa.Float(), nullable=True), + sa.Column('y', sa.Float(), nullable=True), + sa.Column('z', sa.Float(), nullable=True), + sa.Column('space', sa.String(), nullable=True), + sa.Column('kind', sa.String(), nullable=True), + sa.Column('image', sa.String(), nullable=True), + sa.Column('label_id', sa.Float(), nullable=True), + sa.Column('analysis_id', sa.Text(), nullable=True), + sa.ForeignKeyConstraint(['analysis_id'], ['analyses.id'], ), + sa.PrimaryKeyConstraint('id') + ) + op.create_table('image_entities', + sa.Column('image', sa.Text(), nullable=True), + sa.Column('entity', sa.Text(), nullable=True), + sa.ForeignKeyConstraint(['entity'], ['entities.id'], ), + sa.ForeignKeyConstraint(['image'], ['images.id'], ) + ) + op.create_table('point_entities', + sa.Column('point', sa.Text(), nullable=True), + sa.Column('entity', sa.Text(), nullable=True), + sa.ForeignKeyConstraint(['entity'], ['entities.id'], ), + sa.ForeignKeyConstraint(['point'], ['points.id'], ) + ) + op.create_table('point_values', + sa.Column('id', sa.Text(), nullable=False), + sa.Column('created_at', sa.DateTime(timezone=True), server_default=sa.text('now()'), nullable=True), + sa.Column('updated_at', sa.DateTime(timezone=True), nullable=True), + sa.Column('point_id', sa.Text(), nullable=True), + sa.Column('kind', sa.String(), nullable=True), + sa.Column('value', sa.String(), nullable=True), + sa.Column('dtype', sa.String(), nullable=True), + sa.ForeignKeyConstraint(['point_id'], ['points.id'], ), + sa.PrimaryKeyConstraint('id') + ) + # ### end Alembic commands ### + + +def downgrade(): + # ### commands auto generated by Alembic - please adjust! ### + op.drop_table('point_values') + op.drop_table('point_entities') + op.drop_table('image_entities') + op.drop_table('points') + op.drop_table('images') + op.drop_table('analysis_conditions') + op.drop_table('entities') + op.drop_table('analyses') + op.drop_table('studies') + op.drop_table('roles_users') + op.drop_table('oauth') + op.drop_table('datasets') + op.drop_table('users') + op.drop_table('roles') + op.drop_table('conditions') + # ### end Alembic commands ### From f5d56e45322649467ee0193982d17e0b6f0db32b Mon Sep 17 00:00:00 2001 From: delavega4 Date: Thu, 27 Aug 2020 21:00:52 -0500 Subject: [PATCH 09/17] update .zenodo --- .zenodo.json | 9 +-------- 1 file changed, 1 insertion(+), 8 deletions(-) diff --git a/.zenodo.json b/.zenodo.json index 7408ddbec..8ecf64218 100644 --- a/.zenodo.json +++ b/.zenodo.json @@ -1,10 +1,5 @@ { "creators": [ - { - "affiliation": "University of Texas at Austin", - "name": "De La Vega, Alejandro", - "orcid": "0000-0001-9062-3778" - }, { "affiliation": "University of Texas at Austin", "name": "Yarkoni, Tal", @@ -18,10 +13,8 @@ ], "keywords": [ "neuroimaging", - "naturalstic", - "BIDS", + "meta-analysis", "fMRI" ], - "license": "bsd-3-clause", "upload_type": "software" } From 8b568d2b814546c723dde75fa21d390da6c5d890 Mon Sep 17 00:00:00 2001 From: delavega4 Date: Fri, 28 Aug 2020 14:29:15 -0500 Subject: [PATCH 10/17] Add ingest directory and link manage scripts to ingest --- README.md | 17 +++++++++------ scripts/ingest_data.py => ingest/__init__.py | 23 +++++--------------- manage.py | 14 ++++++++++-- scripts/__init__.py | 0 4 files changed, 27 insertions(+), 27 deletions(-) rename scripts/ingest_data.py => ingest/__init__.py (87%) delete mode 100644 scripts/__init__.py diff --git a/README.md b/README.md index 5c3f134eb..1ef84bfc5 100644 --- a/README.md +++ b/README.md @@ -7,9 +7,14 @@ Requirements: Docker and docker-compose. ## Configuration First, set up the main environment variables in `.env` (see: `.env.example`). -Next, set up the Flask server's environment variables .... + cp .env.example .env -Finally, set up the frontend's env variables by .... +Next, set up the Flask server's environment variables: + + cp neurostuff/example_config.py neurostuff/config.py + + +Edit both of these template files to set the correct variables ## Initalizing backend Build the containers and start services using the development configuration: @@ -26,12 +31,12 @@ Next, initialize, migrate and upgrade the database migrations. python manage.py db init python manage.py db migrate python manage.py db upgrade - python manage.py add_user useremail password -## Setting up front end +Finally, add an admin user, and ingest data + python manage.py add_user admin@neurostuff.org password + python manage.py ingest_neurosynth -## Ingesting ## Maintaining docker image and db If you make a change to /neurostuff, you should be able to simply restart the server. @@ -42,5 +47,3 @@ If you need to upgrade the db after changing any models: docker-compose exec neurostuff python manage.py db migrate docker-compose exec neurostuff python manage.py db upgrade - -To inspect the database using psql: diff --git a/scripts/ingest_data.py b/ingest/__init__.py similarity index 87% rename from scripts/ingest_data.py rename to ingest/__init__.py index 513b7635e..8bdfdb3f2 100644 --- a/scripts/ingest_data.py +++ b/ingest/__init__.py @@ -4,22 +4,14 @@ import re import os.path as op from pathlib import Path -from datetime import datetime from dateutil.parser import parse as parse_date import tarfile -import tempfile import pandas as pd import requests -from neurostuff.models import Study, Analysis, Condition, Image, User, Point -from neurostuff.core import db, user_datastore - -def reset_database(): - db.drop_all() - db.create_all() - user_datastore.create_user(email='admin@neurostuff.org', password='password') - db.session.commit() +from neurostuff.models import Study, Analysis, Image, User, Point +from neurostuff.core import db def ingest_neurovault(verbose=False, limit=20): @@ -89,8 +81,8 @@ def ingest_neurosynth(max_rows=None): # response = requests.get(url, stream=True) # with tempfile.TemporaryFile() as tf: - # tf.write(response.raw.read()) - # tf.seek(0) + # tf.write(response.raw.read()) + # tf.seek(0) path = Path(__file__).parent / '..' / 'data' / 'data_0.7.July_2018.tar.gz' with open(path, 'rb') as tf: @@ -108,7 +100,7 @@ def ingest_neurosynth(max_rows=None): 'year': int(row['year']), 'journal': row['journal'] } - s = Study(name=row['title'], metadata_ = md, doi=doi, user=user) + s = Study(name=row['title'], metadata=md, doi=doi, user=user) analyses = [] points = [] for t_id, df in study_df.groupby('table_id'): @@ -120,8 +112,3 @@ def ingest_neurosynth(max_rows=None): points.append(point) db.session.add_all([s] + analyses + points) db.session.commit() - - -# reset_database() -# ingest_neurovault(limit=20) -ingest_neurosynth(1000) diff --git a/manage.py b/manage.py index 266b2303a..995c461ab 100644 --- a/manage.py +++ b/manage.py @@ -8,6 +8,8 @@ from flask_security.utils import encrypt_password from neurostuff.core import app, db, user_datastore +from neurostuff import ingest +from neurostuff import models app.config.from_object(os.environ['APP_SETTINGS']) migrate = Migrate(app, db, directory=app.config['MIGRATIONS_DIR']) @@ -15,8 +17,6 @@ def _make_context(): - from neurostuff import models - return dict(app=app, db=db, ms=models) @@ -36,5 +36,15 @@ def add_user(email, password): db.session.commit() +@manager.command +def ingest_neurosynth(max_rows=None): + ingest.ingest_neurosynth(max_rows=max_rows) + + +@manager.command +def ingest_neurovault(verbose=False, limit=20): + ingest.ingest_neurosynth(verbose=verbose, limit=limit) + + if __name__ == '__main__': manager.run() diff --git a/scripts/__init__.py b/scripts/__init__.py deleted file mode 100644 index e69de29bb..000000000 From 94b2fc2f3e40bf2d4e8d9a0788e2c96bea92a88f Mon Sep 17 00:00:00 2001 From: delavega4 Date: Fri, 28 Aug 2020 14:36:13 -0500 Subject: [PATCH 11/17] fix minor API bug --- neurostuff/resources/resources.py | 8 +++++--- 1 file changed, 5 insertions(+), 3 deletions(-) diff --git a/neurostuff/resources/resources.py b/neurostuff/resources/resources.py index f9317cce9..f26ddba01 100644 --- a/neurostuff/resources/resources.py +++ b/neurostuff/resources/resources.py @@ -104,8 +104,7 @@ class ListResource(BaseResource): _multi_search = None def get(self): - - m = self._model # for brevity + m = self._model # for brevity q = m.query # Search @@ -130,8 +129,11 @@ def get(self): type=int) desc = {0: 'asc', 1: 'desc'}[desc] + attr = getattr(m, col) + # Case-insensitive sorting - attr = func.lower(getattr(m, col)) + if col != 'created_at': + attr = func.lower(attr) # TODO: if the sort field is proxied, bad stuff happens. In theory # the next two lines should address this by joining the proxied model, From f2b4bac1dd577736f5cf81406f66215a3b635d40 Mon Sep 17 00:00:00 2001 From: delavega4 Date: Fri, 28 Aug 2020 15:21:15 -0500 Subject: [PATCH 12/17] Clean up --- neurostuff/resources/__init__.py | 8 +++++--- neurostuff/resources/resources.py | 2 -- 2 files changed, 5 insertions(+), 5 deletions(-) diff --git a/neurostuff/resources/__init__.py b/neurostuff/resources/__init__.py index 5230fefd4..757362d78 100644 --- a/neurostuff/resources/__init__.py +++ b/neurostuff/resources/__init__.py @@ -1,7 +1,9 @@ -import wrapt -from flask_restful import Resource, Api +from flask_restful import Api -from .resources import * +from .resources import ( + AnalysisResource, ConditionResource, ImageResource, PointResource, + DatasetResource, StudyResource, StudyListResource, AnalysisListResource, + ImageListResource) def bind_resources(app): diff --git a/neurostuff/resources/resources.py b/neurostuff/resources/resources.py index f26ddba01..590310ad0 100644 --- a/neurostuff/resources/resources.py +++ b/neurostuff/resources/resources.py @@ -8,8 +8,6 @@ from sqlalchemy import func from ..core import db -from ..schemas import (StudySchema, AnalysisSchema, ConditionSchema, - ImageSchema, PointSchema, DatasetSchema) from ..models import (Dataset, Study, Analysis, Condition, Image, Point, PointValue) From 0ca67646c00d0cc92028e0eac0b734d1df7b3b51 Mon Sep 17 00:00:00 2001 From: delavega4 Date: Fri, 28 Aug 2020 17:23:05 -0500 Subject: [PATCH 13/17] Remove extra swagger-ui --- docker-compose.dev.yml | 4 ---- docker-compose.yml | 7 ------- nginx/nginx.dev.conf | 11 ----------- nginx/sites-enabled/flask_project | 12 ------------ 4 files changed, 34 deletions(-) diff --git a/docker-compose.dev.yml b/docker-compose.dev.yml index a6efbdd6f..50c262f36 100644 --- a/docker-compose.dev.yml +++ b/docker-compose.dev.yml @@ -4,10 +4,6 @@ services: volumes: - ./nginx/nginx.dev.conf:/etc/nginx/nginx.conf:ro - swagger-ui: - environment: - - "API_URL=http://localhost/swagger/" - neurostuff: command: /usr/local/bin/gunicorn -w 2 -b :8000 neurostuff.core:app --log-level debug --timeout 120 --reload restart: "no" diff --git a/docker-compose.yml b/docker-compose.yml index 53332765f..6468a0ce0 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -39,13 +39,6 @@ services: env_file: - .env - swagger-ui: - image: swaggerapi/swagger-ui - environment: - - "API_URL=http://localhost/swagger/" - expose: - - '8080' - volumes: certs: certs-data: diff --git a/nginx/nginx.dev.conf b/nginx/nginx.dev.conf index 52a75dff5..97018122e 100644 --- a/nginx/nginx.dev.conf +++ b/nginx/nginx.dev.conf @@ -54,17 +54,6 @@ http { location /static/ { } - location = /api/ { - return 301 /api/swagger/; - } - - location /api/swagger/ { - proxy_pass http://swagger-ui:8080/; - proxy_set_header Host $host; - proxy_set_header X-Real-IP $remote_addr; - proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for; - } - location / { proxy_pass http://neurostuff:8000; proxy_set_header Host $host; diff --git a/nginx/sites-enabled/flask_project b/nginx/sites-enabled/flask_project index d33723728..6ebcdf1bb 100644 --- a/nginx/sites-enabled/flask_project +++ b/nginx/sites-enabled/flask_project @@ -44,18 +44,6 @@ server { location /static/ { } - location = /api/ { - return 301 https://$host/api/swagger/; - } - - location /api/swagger/ { - proxy_pass http://swagger-ui:8080/; - proxy_set_header Host $host; - proxy_set_header X-Real-IP $remote_addr; - proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for; - } - - location / { proxy_pass http://neurostuff:8000; proxy_set_header Host $host; From d5026200626b1ec7f3383fbc0996ff92c0ad0820 Mon Sep 17 00:00:00 2001 From: delavega4 Date: Fri, 28 Aug 2020 17:27:45 -0500 Subject: [PATCH 14/17] Fix clean up --- {ingest => neurostuff/ingest}/__init__.py | 0 neurostuff/resources/__init__.py | 5 +- neurostuff/resources/resources.py | 3 +- neurostuff/tests/__init__.py | 0 neurostuff/tests/api/__init__.py | 0 neurostuff/tests/api/test_dataset.py | 28 ++++++ neurostuff/tests/api/test_user.py | 99 ++++++++++++++++++++ neurostuff/tests/conftest.py | 109 ++++++++++++++++++++++ neurostuff/tests/request_utils.py | 54 +++++++++++ neurostuff/tests/test_models.py | 9 ++ neurostuff/tests/test_views.py | 8 ++ 11 files changed, 310 insertions(+), 5 deletions(-) rename {ingest => neurostuff/ingest}/__init__.py (100%) create mode 100644 neurostuff/tests/__init__.py create mode 100644 neurostuff/tests/api/__init__.py create mode 100644 neurostuff/tests/api/test_dataset.py create mode 100644 neurostuff/tests/api/test_user.py create mode 100644 neurostuff/tests/conftest.py create mode 100644 neurostuff/tests/request_utils.py create mode 100644 neurostuff/tests/test_models.py create mode 100644 neurostuff/tests/test_views.py diff --git a/ingest/__init__.py b/neurostuff/ingest/__init__.py similarity index 100% rename from ingest/__init__.py rename to neurostuff/ingest/__init__.py diff --git a/neurostuff/resources/__init__.py b/neurostuff/resources/__init__.py index 757362d78..71ac1d6aa 100644 --- a/neurostuff/resources/__init__.py +++ b/neurostuff/resources/__init__.py @@ -1,9 +1,6 @@ from flask_restful import Api -from .resources import ( - AnalysisResource, ConditionResource, ImageResource, PointResource, - DatasetResource, StudyResource, StudyListResource, AnalysisListResource, - ImageListResource) +from .resources import * def bind_resources(app): diff --git a/neurostuff/resources/resources.py b/neurostuff/resources/resources.py index 590310ad0..304ee5257 100644 --- a/neurostuff/resources/resources.py +++ b/neurostuff/resources/resources.py @@ -10,7 +10,8 @@ from ..core import db from ..models import (Dataset, Study, Analysis, Condition, Image, Point, PointValue) - +from ..schemas import (StudySchema, AnalysisSchema, ConditionSchema, + ImageSchema, PointSchema, DatasetSchema) __all__ = [ 'DatasetResource', diff --git a/neurostuff/tests/__init__.py b/neurostuff/tests/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/neurostuff/tests/api/__init__.py b/neurostuff/tests/api/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/neurostuff/tests/api/test_dataset.py b/neurostuff/tests/api/test_dataset.py new file mode 100644 index 000000000..fcd8a4f30 --- /dev/null +++ b/neurostuff/tests/api/test_dataset.py @@ -0,0 +1,28 @@ +from ..request_utils import decode_json + + +def test_get_dataset(auth_client, add_local_task_json): + # List of datasets + resp = auth_client.get('/api/datasets') + assert resp.status_code == 200 + dataset_list = decode_json(resp) + assert type(dataset_list) == list + + # Get first dataset + assert 'tasks' in dataset_list[0] + first_dataset_id = dataset_list[0]['id'] + + # Get first dataset by external id + resp = auth_client.get('/api/datasets/{}'.format(first_dataset_id)) + assert resp.status_code == 200 + dataset = decode_json(resp) + assert first_dataset_id == dataset['id'] + assert dataset['tasks'][0]['name'] == 'bidstest' + assert dataset['tasks'][0]['summary'] == 'AV Movie' + assert dataset['name'] == 'bids_test' + assert dataset['summary'] == "A test dataset" + assert dataset['url'] == "https://github.com/adelavega/bids_test" + + # Try getting nonexistent datset + resp = auth_client.get('/api/datasets/{}'.format('1324')) + assert resp.status_code == 404 diff --git a/neurostuff/tests/api/test_user.py b/neurostuff/tests/api/test_user.py new file mode 100644 index 000000000..888374d61 --- /dev/null +++ b/neurostuff/tests/api/test_user.py @@ -0,0 +1,99 @@ +import datetime +from flask_security.confirmable import confirm_user +from ...models.auth import User +from ..request_utils import decode_json + + +def test_auth(auth_client): + # Get auth token with invalid credentials + auth_resp = auth_client.post( + '/api/auth', + data={'username': 'not', 'password': 'existing'}, + headers=None) + assert auth_resp.status_code == 401 + + # Test without auth token + auth_client.token = None + + resp = auth_client.get('/api/{}'.format('user')) + assert resp.status_code == 401 + assert decode_json(resp)['description'] == \ + 'Request does not contain an access token' + + +def test_get(auth_client): + time = datetime.datetime.now() + resp = auth_client.get('/api/user') + assert resp.status_code == 200 + assert 'email' in decode_json(resp) + + user = User.query.filter_by(email=decode_json(resp)['email']).one() + assert user.last_activity_at > time + assert user.last_activity_ip is not None + + +def test_put(auth_client): + # Testing changing name + values = decode_json(auth_client.get('/api/user')) + values['name'] = 'new_name' + resp = auth_client.put('/api/user', data=values) + + assert resp.status_code == 200 + new_values = decode_json(auth_client.get('/api/user')) + new_values['name'] = 'new_name' + + # Testing incomplete put request + resp = auth_client.put('/api/user', data={'name': 'new_name'}) + assert resp.status_code == 200 + + +def test_create_new(auth_client, session): + # Make a new user and authorize + resp = auth_client.post( + '/api/user', + data={ + 'name': 'me', 'email': 'fake@gmail.com', 'password': 'something'}) + + auth_client.authorize(email="fake@gmail.com", password="something") + # Try getting route without confirming, should fail + resp = auth_client.get('/api/user') + assert resp.status_code == 401 + # Confirm new user manually + + user = User.query.filter_by(email="fake@gmail.com").one() + confirm_user(user) + session.commit() + + # Now should work + resp = auth_client.get('/api/user') + assert resp.status_code == 200 + assert decode_json(resp)['email'] == 'fake@gmail.com' + + +def test_post(auth_client): + # Make incomplete post + resp = auth_client.post('/api/user', data={'name': 'me'}) + assert resp.status_code == 422 + + # Invalid email + resp = auth_client.post( + '/api/user', + data={'name': 'me', 'email': 'fake'}) + assert resp.status_code == 422 + # assert 'Not a valid' in decode_json(resp)['errors']['email'][0] + + # Valid email + resp = auth_client.post( + '/api/user', + data={ + 'name': 'me', 'email': 'fake@gmail.com', 'password': 'something'}) + assert resp.status_code == 200 + + +def test_get_analysis_list(auth_client): + resp = auth_client.get('/api/user') + + user = User.query.filter_by(email=decode_json(resp)['email']).one() + + resp = auth_client.get(f'/api/user/{user.id}/analyses') + assert len(decode_json(resp)) == 0 diff --git a/neurostuff/tests/conftest.py b/neurostuff/tests/conftest.py new file mode 100644 index 000000000..e06449a55 --- /dev/null +++ b/neurostuff/tests/conftest.py @@ -0,0 +1,109 @@ +import pytest +from flask_security.utils import encrypt_password +from ..core import app as _app +from ..database import db as _db +import datetime +import sqlalchemy as sa +from .. import ingest +from ..models import User, Role + +""" +Session / db managment tools +""" + + +@pytest.fixture(scope='session') +def app(): + """Session-wide test `Flask` application.""" + if 'APP_SETTINGS' not in environ: + _app.config.from_object('config.app.TestingConfig') + + # Establish an application context before running the tests. + ctx = _app.app_context() + ctx.push() + + yield _app + + ctx.pop() + + +@pytest.fixture(scope='session') +def db(app): + """Session-wide test database.""" + _db.init_app(app) + _db.create_all() + + yield _db + + _db.session.remove() + _db.drop_all() + + +@pytest.fixture(scope='function') +def session(db): + """Creates a new db session for a test. + Changes in session are rolled back """ + connection = db.engine.connect() + transaction = connection.begin() + + options = dict(bind=connection, binds={}) + session = db.create_scoped_session(options=options) + + session.begin_nested() + + # session is actually a scoped_session + # for the `after_transaction_end` event, we need a session instance to + # listen for, hence the `session()` call + @sa.event.listens_for(session(), 'after_transaction_end') + def resetart_savepoint(sess, trans): + if trans.nested and not trans._parent.nested: + session.expire_all() + session.begin_nested() + + db.session = session + + yield session + + session.remove() + transaction.rollback() + connection.close() + + +@pytest.fixture(scope="function") +def auth_client(add_users): + """ Return authorized client wrapper """ + from .request_utils import Client + + _, ((email, password), _) = add_users + client = Client(email=email, password=password) + return client + + +""" +Data population fixtures +""" + + +@pytest.fixture(scope="function") +def add_users(app, db, session): + """ Adds a test user to db """ + from flask_security import SQLAlchemyUserDatastore + + user_datastore = SQLAlchemyUserDatastore(db, User, Role) + + user1 = 'test1@gmail.com' + pass1 = 'test1' + + user_datastore.create_user(email=user1, password=encrypt_password(pass1), + user_name='testuser', + confirmed_at=datetime.datetime.now()) + session.commit() + id_1 = user_datastore.find_user(email=user1).id + + yield id_1, user1, pass1 + + +@pytest.fixture(scope="function") +def ingest_neurosynth(session): + """ Add a dataset with two subjects """ + return ingest.ingest_neurosynth() diff --git a/neurostuff/tests/request_utils.py b/neurostuff/tests/request_utils.py new file mode 100644 index 000000000..59b6ec9b0 --- /dev/null +++ b/neurostuff/tests/request_utils.py @@ -0,0 +1,54 @@ +import json +from functools import partialmethod + + +class Client(object): + def __init__(self, test_client=None, prepend='', email=None, + password=None): + if test_client is None: + from ..core import app + test_client = app.test_client() + self.client_flask = True + else: + self.client_flask = False + + self.client = test_client + self.prepend = prepend + + if email is not None and password is not None: + self.email = email + self.password = password + self.authorize(email, password) + + def _make_request(self, request, route, params=None, data=None, + headers=None, content_type=None, json_dump=True): + """ Generic request handler """ + request_function = getattr(self.client, request) + + if content_type is None: + content_type = 'application/json' + + route = self.prepend + route + + if self.client_flask: + if data is not None and json_dump is True: + data = json.dumps(data) + + return request_function( + route, data=data, headers=headers, + content_type=content_type, query_string=params) + else: + return request_function( + route, json=data, headers=headers, params=params) + + def authorize(self, email=None, password=None): + pass + + get = partialmethod(_make_request, 'get') + post = partialmethod(_make_request, 'post') + put = partialmethod(_make_request, 'put') + delete = partialmethod(_make_request, 'delete') + + +def decode_json(rv): + return json.loads(rv.data.decode()) diff --git a/neurostuff/tests/test_models.py b/neurostuff/tests/test_models.py new file mode 100644 index 000000000..ed83f361c --- /dev/null +++ b/neurostuff/tests/test_models.py @@ -0,0 +1,9 @@ +import pytest +from ..models import ( + Study, Analysis, Condition, Entity, Point, PointValue, Image, + Dataset) + + +def test_ns_ingestion(session, ingest_neurosynth): + + assert 0 diff --git a/neurostuff/tests/test_views.py b/neurostuff/tests/test_views.py new file mode 100644 index 000000000..5128a6b2a --- /dev/null +++ b/neurostuff/tests/test_views.py @@ -0,0 +1,8 @@ +''' +Test basic views +''' + + +def test_login(auth_client): + rv = auth_client.get('/login/') + assert rv.status_code == 200 From be71964ecd8284c02397c464d58e0438ade77ce0 Mon Sep 17 00:00:00 2001 From: delavega4 Date: Fri, 28 Aug 2020 17:38:40 -0500 Subject: [PATCH 15/17] Remove swagger-ui from config for now --- neurostuff/example_config.py | 3 --- 1 file changed, 3 deletions(-) diff --git a/neurostuff/example_config.py b/neurostuff/example_config.py index 9443529b3..449afe109 100644 --- a/neurostuff/example_config.py +++ b/neurostuff/example_config.py @@ -17,9 +17,6 @@ class Config(object): f"{POSTGRES_PASSWORD}@postgres:5432/{DB_NAME}" PROPAGATE_EXCEPTIONS = True - APISPEC_SWAGGER_URL = '/api/swagger.json' - APISPEC_SWAGGER_UI_URL = '/api/' - GITHUB_CLIENT_ID = "github-id" GITHUB_CLIENT_SECRET = "github-secret" DANCE_SECRET_KEY = "temporary" From 24c98eaaf0bdb4f4d465db6ff79c5f7a4ab92ede Mon Sep 17 00:00:00 2001 From: delavega4 Date: Fri, 28 Aug 2020 18:23:20 -0500 Subject: [PATCH 16/17] Add test shell --- neurostuff/tests/api/test_dataset.py | 21 +------ neurostuff/tests/api/test_user.py | 85 ---------------------------- neurostuff/tests/conftest.py | 1 + neurostuff/tests/test_models.py | 2 +- 4 files changed, 3 insertions(+), 106 deletions(-) diff --git a/neurostuff/tests/api/test_dataset.py b/neurostuff/tests/api/test_dataset.py index fcd8a4f30..e9b31066e 100644 --- a/neurostuff/tests/api/test_dataset.py +++ b/neurostuff/tests/api/test_dataset.py @@ -1,28 +1,9 @@ from ..request_utils import decode_json -def test_get_dataset(auth_client, add_local_task_json): +def test_get_dataset(auth_client, ingest_neurosynth): # List of datasets resp = auth_client.get('/api/datasets') assert resp.status_code == 200 dataset_list = decode_json(resp) assert type(dataset_list) == list - - # Get first dataset - assert 'tasks' in dataset_list[0] - first_dataset_id = dataset_list[0]['id'] - - # Get first dataset by external id - resp = auth_client.get('/api/datasets/{}'.format(first_dataset_id)) - assert resp.status_code == 200 - dataset = decode_json(resp) - assert first_dataset_id == dataset['id'] - assert dataset['tasks'][0]['name'] == 'bidstest' - assert dataset['tasks'][0]['summary'] == 'AV Movie' - assert dataset['name'] == 'bids_test' - assert dataset['summary'] == "A test dataset" - assert dataset['url'] == "https://github.com/adelavega/bids_test" - - # Try getting nonexistent datset - resp = auth_client.get('/api/datasets/{}'.format('1324')) - assert resp.status_code == 404 diff --git a/neurostuff/tests/api/test_user.py b/neurostuff/tests/api/test_user.py index 888374d61..07a3a2bff 100644 --- a/neurostuff/tests/api/test_user.py +++ b/neurostuff/tests/api/test_user.py @@ -1,26 +1,8 @@ import datetime -from flask_security.confirmable import confirm_user from ...models.auth import User from ..request_utils import decode_json -def test_auth(auth_client): - # Get auth token with invalid credentials - auth_resp = auth_client.post( - '/api/auth', - data={'username': 'not', 'password': 'existing'}, - headers=None) - assert auth_resp.status_code == 401 - - # Test without auth token - auth_client.token = None - - resp = auth_client.get('/api/{}'.format('user')) - assert resp.status_code == 401 - assert decode_json(resp)['description'] == \ - 'Request does not contain an access token' - - def test_get(auth_client): time = datetime.datetime.now() resp = auth_client.get('/api/user') @@ -30,70 +12,3 @@ def test_get(auth_client): user = User.query.filter_by(email=decode_json(resp)['email']).one() assert user.last_activity_at > time assert user.last_activity_ip is not None - - -def test_put(auth_client): - # Testing changing name - values = decode_json(auth_client.get('/api/user')) - values['name'] = 'new_name' - resp = auth_client.put('/api/user', data=values) - - assert resp.status_code == 200 - new_values = decode_json(auth_client.get('/api/user')) - new_values['name'] = 'new_name' - - # Testing incomplete put request - resp = auth_client.put('/api/user', data={'name': 'new_name'}) - assert resp.status_code == 200 - - -def test_create_new(auth_client, session): - # Make a new user and authorize - resp = auth_client.post( - '/api/user', - data={ - 'name': 'me', 'email': 'fake@gmail.com', 'password': 'something'}) - - auth_client.authorize(email="fake@gmail.com", password="something") - # Try getting route without confirming, should fail - resp = auth_client.get('/api/user') - assert resp.status_code == 401 - # Confirm new user manually - - user = User.query.filter_by(email="fake@gmail.com").one() - confirm_user(user) - session.commit() - - # Now should work - resp = auth_client.get('/api/user') - assert resp.status_code == 200 - assert decode_json(resp)['email'] == 'fake@gmail.com' - - -def test_post(auth_client): - # Make incomplete post - resp = auth_client.post('/api/user', data={'name': 'me'}) - assert resp.status_code == 422 - - # Invalid email - resp = auth_client.post( - '/api/user', - data={'name': 'me', 'email': 'fake'}) - assert resp.status_code == 422 - # assert 'Not a valid' in decode_json(resp)['errors']['email'][0] - - # Valid email - resp = auth_client.post( - '/api/user', - data={ - 'name': 'me', 'email': 'fake@gmail.com', 'password': 'something'}) - assert resp.status_code == 200 - - -def test_get_analysis_list(auth_client): - resp = auth_client.get('/api/user') - - user = User.query.filter_by(email=decode_json(resp)['email']).one() - - resp = auth_client.get(f'/api/user/{user.id}/analyses') - assert len(decode_json(resp)) == 0 diff --git a/neurostuff/tests/conftest.py b/neurostuff/tests/conftest.py index e06449a55..c1bb05f36 100644 --- a/neurostuff/tests/conftest.py +++ b/neurostuff/tests/conftest.py @@ -1,4 +1,5 @@ import pytest +from os import environ from flask_security.utils import encrypt_password from ..core import app as _app from ..database import db as _db diff --git a/neurostuff/tests/test_models.py b/neurostuff/tests/test_models.py index ed83f361c..e0419c8d8 100644 --- a/neurostuff/tests/test_models.py +++ b/neurostuff/tests/test_models.py @@ -6,4 +6,4 @@ def test_ns_ingestion(session, ingest_neurosynth): - assert 0 + assert 1 From 6891a348af946cc5308112c0b0072eec26312cac Mon Sep 17 00:00:00 2001 From: Alejandro de la Vega Date: Mon, 31 Aug 2020 17:04:34 -0500 Subject: [PATCH 17/17] Update manage.py Co-authored-by: Tal Yarkoni --- manage.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/manage.py b/manage.py index 995c461ab..ff12d558b 100644 --- a/manage.py +++ b/manage.py @@ -43,7 +43,7 @@ def ingest_neurosynth(max_rows=None): @manager.command def ingest_neurovault(verbose=False, limit=20): - ingest.ingest_neurosynth(verbose=verbose, limit=limit) + ingest.ingest_neurovault(verbose=verbose, limit=limit) if __name__ == '__main__':