diff --git a/.github/config/profile.yaml b/.github/config/profile.yaml index 009e3ed0ff..478b7d1233 100644 --- a/.github/config/profile.yaml +++ b/.github/config/profile.yaml @@ -1,14 +1,14 @@ --- -profile: PLACEHOLDER_PROFILE +profile: test_aiida email: aiida@localhost first_name: Giuseppe last_name: Verdi institution: Khedivial -db_backend: PLACEHOLDER_BACKEND +db_backend: psql_dos db_engine: postgresql_psycopg2 db_host: localhost db_port: 5432 -db_name: PLACEHOLDER_DATABASE_NAME +db_name: test_aiida db_username: postgres db_password: '' broker_protocol: amqp @@ -17,4 +17,4 @@ broker_password: guest broker_host: 127.0.0.1 broker_port: 5672 broker_virtual_host: '' -repository: PLACEHOLDER_REPOSITORY +repository: /tmp/test_repository_test_aiida/ diff --git a/.github/system_tests/test_polish_workchains.sh b/.github/system_tests/test_polish_workchains.sh index 8ab48f63b2..e35b83e6ce 100755 --- a/.github/system_tests/test_polish_workchains.sh +++ b/.github/system_tests/test_polish_workchains.sh @@ -15,10 +15,10 @@ VERDI=$(which verdi) if [ -n "$EXPRESSIONS" ]; then for expression in "${EXPRESSIONS[@]}"; do - $VERDI -p test_${AIIDA_TEST_BACKEND} run "${CLI_SCRIPT}" -X $CODE -C -F -d -t $TIMEOUT "$expression" + $VERDI -p test_aiida run "${CLI_SCRIPT}" -X $CODE -C -F -d -t $TIMEOUT "$expression" done else for i in $(seq 1 $NUMBER_WORKCHAINS); do - $VERDI -p test_${AIIDA_TEST_BACKEND} run "${CLI_SCRIPT}" -X $CODE -C -F -d -t $TIMEOUT + $VERDI -p test_aiida run "${CLI_SCRIPT}" -X $CODE -C -F -d -t $TIMEOUT done fi diff --git a/.github/system_tests/test_profile_manager.py b/.github/system_tests/test_profile_manager.py index a64d985ad6..cdfb5ed900 100644 --- a/.github/system_tests/test_profile_manager.py +++ b/.github/system_tests/test_profile_manager.py @@ -76,7 +76,7 @@ def test_create_use_destroy_profile2(self): with self.assertRaises(TestManagerError): self.test_create_aiida_db() - self.profile_manager.reset_db() + self.profile_manager.clear_profile() with self.assertRaises(Exception): load_node(data_pk) diff --git a/.github/workflows/benchmark-config.json b/.github/workflows/benchmark-config.json index cc698e011b..6c41732df8 100644 --- a/.github/workflows/benchmark-config.json +++ b/.github/workflows/benchmark-config.json @@ -7,6 +7,10 @@ "pytest-benchmarks:ubuntu-18.04,sqlalchemy": { "header": "Performance Benchmarks (Ubuntu-18.04, SQLAlchemy)", "description": "Performance benchmark tests, generated using pytest-benchmark." + }, + "pytest-benchmarks:ubuntu-18.04,psql_dos": { + "header": "Performance Benchmarks (Ubuntu-18.04)", + "description": "Performance benchmark tests, generated using pytest-benchmark." } }, "groups": { diff --git a/.github/workflows/benchmark.yml b/.github/workflows/benchmark.yml index 7afb6c7b30..82e38e6603 100644 --- a/.github/workflows/benchmark.yml +++ b/.github/workflows/benchmark.yml @@ -20,7 +20,6 @@ jobs: os: [ubuntu-18.04] postgres: ['12.3'] rabbitmq: ['3.8.3'] - backend: ['django', 'sqlalchemy'] runs-on: ${{ matrix.os }} timeout-minutes: 60 @@ -29,7 +28,7 @@ jobs: postgres: image: "postgres:${{ matrix.postgres }}" env: - POSTGRES_DB: test_${{ matrix.backend }} + POSTGRES_DB: test_aiida POSTGRES_PASSWORD: '' POSTGRES_HOST_AUTH_METHOD: trust options: >- @@ -79,15 +78,13 @@ jobs: pip freeze - name: Run benchmarks - env: - AIIDA_TEST_BACKEND: ${{ matrix.backend }} run: pytest --benchmark-only --benchmark-json benchmark.json - name: Store benchmark result uses: aiidateam/github-action-benchmark@v3 with: - benchmark-data-dir-path: "dev/bench/${{ matrix.os }}/${{ matrix.backend }}" - name: "pytest-benchmarks:${{ matrix.os }},${{ matrix.backend }}" + benchmark-data-dir-path: "dev/bench/${{ matrix.os }}/psql_dos" + name: "pytest-benchmarks:${{ matrix.os }},psql_dos" metadata: "postgres:${{ matrix.postgres }}, rabbitmq:${{ matrix.rabbitmq }}" output-file-path: benchmark.json render-json-path: .github/workflows/benchmark-config.json diff --git a/.github/workflows/ci-code.yml b/.github/workflows/ci-code.yml index ada25552d8..256d2e1d2c 100644 --- a/.github/workflows/ci-code.yml +++ b/.github/workflows/ci-code.yml @@ -44,19 +44,18 @@ jobs: needs: [check-requirements] runs-on: ubuntu-latest - timeout-minutes: 30 + timeout-minutes: 35 strategy: fail-fast: false matrix: python-version: ['3.8', '3.10'] - backend: ['django', 'sqlalchemy'] services: postgres: image: postgres:10 env: - POSTGRES_DB: test_${{ matrix.backend }} + POSTGRES_DB: test_aiida POSTGRES_PASSWORD: '' POSTGRES_HOST_AUTH_METHOD: trust options: >- @@ -116,14 +115,11 @@ jobs: pip freeze - name: Setup environment - env: - AIIDA_TEST_BACKEND: ${{ matrix.backend }} run: .github/workflows/setup.sh - name: Run test suite env: - AIIDA_TEST_BACKEND: ${{ matrix.backend }} SQLALCHEMY_WARN_20: 1 run: .github/workflows/tests.sh @@ -132,8 +128,7 @@ jobs: if: matrix.python-version == 3.8 && github.repository == 'aiidateam/aiida-core' uses: codecov/codecov-action@v1 with: - name: aiida-pytests-py3.8-${{ matrix.backend }} - flags: ${{ matrix.backend }} + name: aiida-pytests-py3.8 file: ./coverage.xml fail_ci_if_error: false # don't fail job, if coverage upload fails diff --git a/.github/workflows/rabbitmq.yml b/.github/workflows/rabbitmq.yml index a3fc311988..091b7872b4 100644 --- a/.github/workflows/rabbitmq.yml +++ b/.github/workflows/rabbitmq.yml @@ -23,7 +23,7 @@ jobs: postgres: image: postgres:10 env: - POSTGRES_DB: test_django + POSTGRES_DB: test_aiida POSTGRES_PASSWORD: '' POSTGRES_HOST_AUTH_METHOD: trust options: >- diff --git a/.github/workflows/release.yml b/.github/workflows/release.yml index 571d257d65..8eaaac432d 100644 --- a/.github/workflows/release.yml +++ b/.github/workflows/release.yml @@ -61,7 +61,7 @@ jobs: postgres: image: postgres:10 env: - POSTGRES_DB: test_django + POSTGRES_DB: test_aiida POSTGRES_PASSWORD: '' POSTGRES_HOST_AUTH_METHOD: trust options: >- diff --git a/.github/workflows/setup.sh b/.github/workflows/setup.sh index 6b66a0049a..07d9fadb1d 100755 --- a/.github/workflows/setup.sh +++ b/.github/workflows/setup.sh @@ -11,10 +11,6 @@ chmod 755 "${HOME}" # Replace the placeholders in configuration files with actual values CONFIG="${GITHUB_WORKSPACE}/.github/config" cp "${CONFIG}/slurm_rsa" "${HOME}/.ssh/slurm_rsa" -sed -i "s|PLACEHOLDER_BACKEND|${AIIDA_TEST_BACKEND}|" "${CONFIG}/profile.yaml" -sed -i "s|PLACEHOLDER_PROFILE|test_${AIIDA_TEST_BACKEND}|" "${CONFIG}/profile.yaml" -sed -i "s|PLACEHOLDER_DATABASE_NAME|test_${AIIDA_TEST_BACKEND}|" "${CONFIG}/profile.yaml" -sed -i "s|PLACEHOLDER_REPOSITORY|/tmp/test_repository_test_${AIIDA_TEST_BACKEND}/|" "${CONFIG}/profile.yaml" sed -i "s|PLACEHOLDER_WORK_DIR|${GITHUB_WORKSPACE}|" "${CONFIG}/localhost.yaml" sed -i "s|PLACEHOLDER_REMOTE_ABS_PATH_DOUBLER|${CONFIG}/doubler.sh|" "${CONFIG}/doubler.yaml" sed -i "s|PLACEHOLDER_SSH_KEY|${HOME}/.ssh/slurm_rsa|" "${CONFIG}/slurm-ssh-config.yaml" @@ -33,5 +29,5 @@ verdi computer setup --non-interactive --config "${CONFIG}/slurm-ssh.yaml" verdi computer configure core.ssh slurm-ssh --non-interactive --config "${CONFIG}/slurm-ssh-config.yaml" -n # needs slurm container verdi computer test slurm-ssh --print-traceback -verdi profile setdefault test_${AIIDA_TEST_BACKEND} +verdi profile setdefault test_aiida verdi config set runner.poll.interval 0 diff --git a/.github/workflows/test-install.yml b/.github/workflows/test-install.yml index 970523fcb4..cd557ef025 100644 --- a/.github/workflows/test-install.yml +++ b/.github/workflows/test-install.yml @@ -177,19 +177,18 @@ jobs: needs: [install-with-pip] runs-on: ubuntu-latest - timeout-minutes: 30 + timeout-minutes: 35 strategy: fail-fast: false matrix: python-version: ['3.8', '3.9', '3.10'] - backend: ['django', 'sqlalchemy'] services: postgres: image: postgres:10 env: - POSTGRES_DB: test_${{ matrix.backend }} + POSTGRES_DB: test_aiida POSTGRES_PASSWORD: '' POSTGRES_HOST_AUTH_METHOD: trust options: >- @@ -234,14 +233,12 @@ jobs: - run: pip freeze - name: Setup AiiDA environment - env: - AIIDA_TEST_BACKEND: ${{ matrix.backend }} run: .github/workflows/setup.sh - name: Run test suite env: - AIIDA_TEST_BACKEND: ${{ matrix.backend }} + SQLALCHEMY_WARN_20: 1 run: .github/workflows/tests.sh @@ -252,7 +249,6 @@ jobs: # This artifact can be used in the next step to automatically create a pull request # updating the requirements (in case they are inconsistent with the pyproject.toml file). - uses: actions/upload-artifact@v1 - if: matrix.backend == 'django' # The requirements are identical between backends. with: name: requirements.txt path: requirements-py-${{ matrix.python-version }}.txt diff --git a/.github/workflows/tests.sh b/.github/workflows/tests.sh index d94431e55b..bb5fd3169a 100755 --- a/.github/workflows/tests.sh +++ b/.github/workflows/tests.sh @@ -9,7 +9,7 @@ export PYTHONPATH="${PYTHONPATH}:${SYSTEM_TESTS}:${MODULE_POLISH}" # daemon tests verdi daemon start 4 -verdi -p test_${AIIDA_TEST_BACKEND} run ${SYSTEM_TESTS}/test_daemon.py +verdi -p test_aiida run ${SYSTEM_TESTS}/test_daemon.py bash ${SYSTEM_TESTS}/test_polish_workchains.sh verdi daemon stop @@ -21,7 +21,7 @@ python ${SYSTEM_TESTS}/test_plugin_testcase.py # uses custom unittest test runn # Until the `${SYSTEM_TESTS}/pytest` tests are moved within `tests` we have to run them separately and pass in the path to the # `conftest.py` explicitly, because otherwise it won't be able to find the fixtures it provides -AIIDA_TEST_PROFILE=test_$AIIDA_TEST_BACKEND pytest --cov aiida --verbose tests/conftest.py ${SYSTEM_TESTS}/pytest +AIIDA_TEST_PROFILE=test_aiida pytest --cov aiida --verbose tests/conftest.py ${SYSTEM_TESTS}/pytest # main aiida-core tests -AIIDA_TEST_PROFILE=test_$AIIDA_TEST_BACKEND pytest --cov aiida --verbose tests +AIIDA_TEST_PROFILE=test_aiida pytest --cov aiida --verbose tests diff --git a/.molecule/README.md b/.molecule/README.md index ee85a6edf7..fab3f6d192 100644 --- a/.molecule/README.md +++ b/.molecule/README.md @@ -12,7 +12,7 @@ The simplest way to run these tests is to use the `tox` environment provided in ```console $ pip install tox -$ tox -e molecule-django +$ tox -e molecule ``` **NOTE**: if you wan to run molecule directly, ensure that you set `export MOLECULE_GLOB=.molecule/*/config_local.yml`. @@ -29,26 +29,19 @@ This runs the `test` scenario (defined in `config_local.yml`) which: If you wish to setup the container for manual inspection (i.e. only run steps 2 - 4) you can run: ```console -$ tox -e molecule-django converge +$ tox -e molecule converge ``` Then you can jump into this container or run the tests (step 5) separately with: ```console -$ tox -e molecule-django validate +$ tox -e molecule validate ``` and finally run step 6: ```console -$ tox -e molecule-django destroy -``` - -You can set up the aiida profile with either django or sqla, -and even run both in parallel: - -```console -$ tox -e molecule-django,molecule-sqla -p -- test --parallel +$ tox -e molecule destroy ``` ## Additional variables @@ -56,5 +49,5 @@ $ tox -e molecule-django,molecule-sqla -p -- test --parallel You can specify the number of daemon workers to spawn using the `AIIDA_TEST_WORKERS` environment variable: ```console -$ AIIDA_TEST_WORKERS=4 tox -e molecule-django +$ AIIDA_TEST_WORKERS=4 tox -e molecule ``` diff --git a/.molecule/default/config_local.yml b/.molecule/default/config_local.yml index c9168f35ac..2db8c417f8 100644 --- a/.molecule/default/config_local.yml +++ b/.molecule/default/config_local.yml @@ -22,14 +22,14 @@ scenario: driver: name: docker platforms: -- name: molecule-aiida-${AIIDA_TEST_BACKEND:-django} +- name: molecule-aiida-${AIIDA_TEST_BACKEND:-psql_dos} image: molecule_tests context: "../.." command: /sbin/my_init healthcheck: test: wait-for-services volumes: - - molecule-pip-cache-${AIIDA_TEST_BACKEND:-django}:/home/.cache/pip + - molecule-pip-cache-${AIIDA_TEST_BACKEND:-psql_dos}:/home/.cache/pip privileged: true retries: 3 # configuration for how to run the playbooks @@ -63,7 +63,7 @@ provisioner: aiida_pip_cache: /home/.cache/pip venv_bin: /opt/conda/bin ansible_python_interpreter: "{{ venv_bin }}/python" - aiida_backend: ${AIIDA_TEST_BACKEND:-django} + aiida_backend: ${AIIDA_TEST_BACKEND:-psql_dos} aiida_workers: ${AIIDA_TEST_WORKERS:-2} - aiida_path: /tmp/.aiida_${AIIDA_TEST_BACKEND:-django} + aiida_path: /tmp/.aiida_${AIIDA_TEST_BACKEND:-psql_dos} aiida_query_stats: true diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 2a428aca3c..001085ffb2 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -86,7 +86,6 @@ repos: aiida/orm/implementation/nodes.py| aiida/orm/implementation/users.py| aiida/orm/implementation/sql/backends.py| - aiida/orm/implementation/django/backend.py| aiida/orm/implementation/sqlalchemy/backend.py| aiida/orm/implementation/querybuilder.py| aiida/orm/implementation/sqlalchemy/querybuilder/.*py| diff --git a/Dockerfile b/Dockerfile index 1c715f7ada..d53529f6eb 100644 --- a/Dockerfile +++ b/Dockerfile @@ -9,7 +9,7 @@ ENV USER_EMAIL aiida@localhost ENV USER_FIRST_NAME Giuseppe ENV USER_LAST_NAME Verdi ENV USER_INSTITUTION Khedivial -ENV AIIDADB_BACKEND django +ENV AIIDADB_BACKEND psql_dos # Copy and install AiiDA COPY . aiida-core diff --git a/aiida/__init__.py b/aiida/__init__.py index 391c98b709..fcbc1704f6 100644 --- a/aiida/__init__.py +++ b/aiida/__init__.py @@ -21,7 +21,7 @@ More information at http://www.aiida.net """ from aiida.common.log import configure_logging -from aiida.manage.configuration import get_config_option, get_profile, load_profile +from aiida.manage.configuration import get_config_option, get_profile, load_profile, profile_context __copyright__ = ( 'Copyright (c), This file is part of the AiiDA platform. ' @@ -48,23 +48,21 @@ def get_strict_version(): return StrictVersion(__version__) -def get_version(): +def get_version() -> str: """ Return the current AiiDA distribution version :returns: the current version - :rtype: str """ return __version__ -def _get_raw_file_header(): +def _get_raw_file_header() -> str: """ Get the default header for source AiiDA source code files. Note: is not preceded by comment character. :return: default AiiDA source file header - :rtype: str """ return f"""This file has been created with AiiDA v. {__version__} If you use AiiDA for publication purposes, please cite: @@ -72,7 +70,7 @@ def _get_raw_file_header(): """ -def get_file_header(comment_char='# '): +def get_file_header(comment_char: str = '# ') -> str: """ Get the default header for source AiiDA source code files. @@ -81,10 +79,8 @@ def get_file_header(comment_char='# '): Prepend by comment character. :param comment_char: string put in front of each line - :type comment_char: str :return: default AiiDA source file header - :rtype: str """ lines = _get_raw_file_header().splitlines() return '\n'.join(f'{comment_char}{line}' for line in lines) diff --git a/aiida/backends/__init__.py b/aiida/backends/__init__.py index e138a9beb7..c792ca6718 100644 --- a/aiida/backends/__init__.py +++ b/aiida/backends/__init__.py @@ -23,23 +23,3 @@ # yapf: enable # END AUTO-GENERATED - -BACKEND_DJANGO = 'django' -BACKEND_SQLA = 'sqlalchemy' - - -def get_backend_manager(backend): - """Get an instance of the `BackendManager` for the current backend. - - :param backend: the type of the database backend - :return: `BackendManager` - """ - if backend == BACKEND_DJANGO: - from aiida.backends.djsite.manager import DjangoBackendManager - return DjangoBackendManager() - - if backend == BACKEND_SQLA: - from aiida.backends.sqlalchemy.manager import SqlaBackendManager - return SqlaBackendManager() - - raise Exception(f'unknown backend type `{backend}`') diff --git a/aiida/backends/control.py b/aiida/backends/control.py index 73496e80b1..044424e5f6 100644 --- a/aiida/backends/control.py +++ b/aiida/backends/control.py @@ -15,7 +15,7 @@ from typing import TYPE_CHECKING, Optional, Set from aiida.common.log import AIIDA_LOGGER -from aiida.manage.manager import get_manager +from aiida.manage import get_manager if TYPE_CHECKING: from aiida.orm.implementation import Backend @@ -44,7 +44,7 @@ def repository_maintain( """ if backend is None: - backend = get_manager().get_backend() + backend = get_manager().get_profile_storage() repository = backend.get_repository() unreferenced_objects = get_unreferenced_keyset(aiida_backend=backend) @@ -75,7 +75,7 @@ def get_unreferenced_keyset(check_consistency: bool = True, aiida_backend: Optio MAINTAIN_LOGGER.info('Obtaining unreferenced object keys ...') if aiida_backend is None: - aiida_backend = get_manager().get_backend() + aiida_backend = get_manager().get_profile_storage() repository = aiida_backend.get_repository() @@ -95,6 +95,6 @@ def get_unreferenced_keyset(check_consistency: bool = True, aiida_backend: Optio def get_repository_info(statistics: bool = False, backend: Optional['Backend'] = None) -> dict: """Returns general information on the repository.""" if backend is None: - backend = get_manager().get_backend() + backend = get_manager().get_profile_storage() repository = backend.get_repository() return repository.get_info(statistics) diff --git a/aiida/backends/djsite/__init__.py b/aiida/backends/djsite/__init__.py deleted file mode 100644 index 94f96ba742..0000000000 --- a/aiida/backends/djsite/__init__.py +++ /dev/null @@ -1,67 +0,0 @@ -# -*- coding: utf-8 -*- -########################################################################### -# Copyright (c), The AiiDA team. All rights reserved. # -# This file is part of the AiiDA code. # -# # -# The code is hosted on GitHub at https://github.com/aiidateam/aiida-core # -# For further information on the license, see the LICENSE.txt file # -# For further information please visit http://www.aiida.net # -########################################################################### -# pylint: disable=global-statement -"""Module with implementation of the database backend using Django.""" -from aiida.backends.utils import create_scoped_session_factory, create_sqlalchemy_engine - -ENGINE = None -SESSION_FACTORY = None - - -def reset_session(): - """Reset the session which means setting the global engine and session factory instances to `None`.""" - global ENGINE - global SESSION_FACTORY - - if ENGINE is not None: - ENGINE.dispose() - - if SESSION_FACTORY is not None: - SESSION_FACTORY.expunge_all() # pylint: disable=no-member - SESSION_FACTORY.close() # pylint: disable=no-member - - ENGINE = None - SESSION_FACTORY = None - - -def get_scoped_session(**kwargs): - """Return a scoped session for the given profile that is exclusively to be used for the `QueryBuilder`. - - Since the `QueryBuilder` implementation uses SqlAlchemy to map the query onto the models in order to generate the - SQL to be sent to the database, it requires a session, which is an :class:`sqlalchemy.orm.session.Session` instance. - The only purpose is for SqlAlchemy to be able to connect to the database perform the query and retrieve the results. - Even the Django backend implementation will use SqlAlchemy for its `QueryBuilder` and so also needs an SqlA session. - It is important that we do not reuse the scoped session factory in the SqlAlchemy implementation, because that runs - the risk of cross-talk once profiles can be switched dynamically in a single python interpreter. Therefore the - Django implementation of the `QueryBuilder` should keep its own SqlAlchemy engine and scoped session factory - instances that are used to provide the query builder with a session. - - :param kwargs: keyword arguments that will be passed on to :py:func:`aiida.backends.utils.create_sqlalchemy_engine`, - opening the possibility to change QueuePool time outs and more. - See https://docs.sqlalchemy.org/en/13/core/engines.html?highlight=create_engine#sqlalchemy.create_engine for - more info. - - :return: :class:`sqlalchemy.orm.session.Session` instance with engine configured for the given profile. - """ - from aiida.manage.configuration import get_profile - - global ENGINE - global SESSION_FACTORY - - if SESSION_FACTORY is not None: - session = SESSION_FACTORY() - return session - - if ENGINE is None: - ENGINE = create_sqlalchemy_engine(get_profile(), **kwargs) - - SESSION_FACTORY = create_scoped_session_factory(ENGINE) - - return SESSION_FACTORY() diff --git a/aiida/backends/djsite/db/__init__.py b/aiida/backends/djsite/db/__init__.py deleted file mode 100644 index 2776a55f97..0000000000 --- a/aiida/backends/djsite/db/__init__.py +++ /dev/null @@ -1,9 +0,0 @@ -# -*- coding: utf-8 -*- -########################################################################### -# Copyright (c), The AiiDA team. All rights reserved. # -# This file is part of the AiiDA code. # -# # -# The code is hosted on GitHub at https://github.com/aiidateam/aiida-core # -# For further information on the license, see the LICENSE.txt file # -# For further information please visit http://www.aiida.net # -########################################################################### diff --git a/aiida/backends/djsite/db/migrations/0001_initial.py b/aiida/backends/djsite/db/migrations/0001_initial.py deleted file mode 100644 index ffdf64b185..0000000000 --- a/aiida/backends/djsite/db/migrations/0001_initial.py +++ /dev/null @@ -1,519 +0,0 @@ -# -*- coding: utf-8 -*- -########################################################################### -# Copyright (c), The AiiDA team. All rights reserved. # -# This file is part of the AiiDA code. # -# # -# The code is hosted on GitHub at https://github.com/aiidateam/aiida-core # -# For further information on the license, see the LICENSE.txt file # -# For further information please visit http://www.aiida.net # -########################################################################### -# pylint: disable=invalid-name -"""Database migration.""" -from django.db import migrations, models -import django.db.models.deletion -import django.utils.timezone - -from aiida.backends.djsite.db.migrations import upgrade_schema_version - -REVISION = '1.0.1' -DOWN_REVISION = '1.0.0' - - -class Migration(migrations.Migration): - """Database migration.""" - - dependencies = [ - ('auth', '0001_initial'), - ] - - operations = [ - migrations.CreateModel( - name='DbUser', - fields=[ - ('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)), - ('password', models.CharField(max_length=128, verbose_name='password')), - ('last_login', models.DateTimeField(default=django.utils.timezone.now, verbose_name='last login')), - ( - 'is_superuser', - models.BooleanField( - default=False, - help_text='Designates that this user has all permissions without explicitly assigning them.', - verbose_name='superuser status' - ) - ), - ('email', models.EmailField(unique=True, max_length=75, db_index=True)), - ('first_name', models.CharField(max_length=254, blank=True)), - ('last_name', models.CharField(max_length=254, blank=True)), - ('institution', models.CharField(max_length=254, blank=True)), - ( - 'is_staff', - models.BooleanField( - default=False, help_text='Designates whether the user can log into this admin site.' - ) - ), - ( - 'is_active', - models.BooleanField( - default=True, - help_text='Designates whether this user should be treated as active. Unselect this instead of ' - 'deleting accounts.' - ) - ), - ('date_joined', models.DateTimeField(default=django.utils.timezone.now)), - ( - 'groups', - models.ManyToManyField( - related_query_name='user', - related_name='user_set', - to='auth.Group', - blank=True, - help_text='The groups this user belongs to. A user will get all permissions granted to each of ' - 'his/her group.', - verbose_name='groups' - ) - ), - ( - 'user_permissions', - models.ManyToManyField( - related_query_name='user', - related_name='user_set', - to='auth.Permission', - blank=True, - help_text='Specific permissions for this user.', - verbose_name='user permissions' - ) - ), - ], - options={ - 'abstract': False, - }, - bases=(models.Model,), - ), - migrations.CreateModel( - name='DbAttribute', - fields=[ - ('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)), - ('key', models.CharField(max_length=1024, db_index=True)), - ( - 'datatype', - models.CharField( - default='none', - max_length=10, - db_index=True, - choices=[('float', 'float'), ('int', 'int'), ('txt', 'txt'), ('bool', 'bool'), ('date', 'date'), - ('json', 'json'), ('dict', 'dict'), ('list', 'list'), ('none', 'none')] - ) - ), - ('tval', models.TextField(default='', blank=True)), - ('fval', models.FloatField(default=None, null=True)), - ('ival', models.IntegerField(default=None, null=True)), - ('bval', models.NullBooleanField(default=None)), - ('dval', models.DateTimeField(default=None, null=True)), - ], - options={ - 'abstract': False, - }, - bases=(models.Model,), - ), - migrations.CreateModel( - name='DbAuthInfo', - fields=[ - ('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)), - ('auth_params', models.TextField(default='{}')), - ('metadata', models.TextField(default='{}')), - ('enabled', models.BooleanField(default=True)), - ('aiidauser', models.ForeignKey(to='db.DbUser', on_delete=models.CASCADE)), - ], - options={}, - bases=(models.Model,), - ), - migrations.CreateModel( - name='DbCalcState', - fields=[ - ('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)), - ( - 'state', - models.CharField( - db_index=True, - max_length=25, - choices=[('UNDETERMINED', 'UNDETERMINED'), ('NOTFOUND', 'NOTFOUND'), - ('RETRIEVALFAILED', 'RETRIEVALFAILED'), ('COMPUTED', 'COMPUTED'), - ('RETRIEVING', 'RETRIEVING'), ('WITHSCHEDULER', 'WITHSCHEDULER'), - ('SUBMISSIONFAILED', 'SUBMISSIONFAILED'), ('PARSING', 'PARSING'), ('FAILED', 'FAILED'), - ('FINISHED', 'FINISHED'), ('TOSUBMIT', 'TOSUBMIT'), ('SUBMITTING', 'SUBMITTING'), - ('IMPORTED', 'IMPORTED'), ('NEW', 'NEW'), ('PARSINGFAILED', 'PARSINGFAILED')] - ) - ), - ('time', models.DateTimeField(default=django.utils.timezone.now, editable=False)), - ], - options={}, - bases=(models.Model,), - ), - migrations.CreateModel( - name='DbComment', - fields=[ - ('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)), - ('uuid', models.CharField(editable=False, blank=True, max_length=36)), - ('ctime', models.DateTimeField(default=django.utils.timezone.now, editable=False)), - ('mtime', models.DateTimeField(auto_now=True)), - ('content', models.TextField(blank=True)), - ], - options={}, - bases=(models.Model,), - ), - migrations.CreateModel( - name='DbComputer', - fields=[ - ('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)), - ('uuid', models.CharField(max_length=36, editable=False, blank=True)), - ('name', models.CharField(unique=True, max_length=255)), - ('hostname', models.CharField(max_length=255)), - ('description', models.TextField(blank=True)), - ('enabled', models.BooleanField(default=True)), - ('transport_type', models.CharField(max_length=255)), - ('scheduler_type', models.CharField(max_length=255)), - ('transport_params', models.TextField(default='{}')), - ('metadata', models.TextField(default='{}')), - ], - options={}, - bases=(models.Model,), - ), - migrations.CreateModel( - name='DbExtra', - fields=[ - ('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)), - ('key', models.CharField(max_length=1024, db_index=True)), - ( - 'datatype', - models.CharField( - default='none', - max_length=10, - db_index=True, - choices=[('float', 'float'), ('int', 'int'), ('txt', 'txt'), ('bool', 'bool'), ('date', 'date'), - ('json', 'json'), ('dict', 'dict'), ('list', 'list'), ('none', 'none')] - ) - ), - ('tval', models.TextField(default='', blank=True)), - ('fval', models.FloatField(default=None, null=True)), - ('ival', models.IntegerField(default=None, null=True)), - ('bval', models.NullBooleanField(default=None)), - ('dval', models.DateTimeField(default=None, null=True)), - ], - options={ - 'abstract': False, - }, - bases=(models.Model,), - ), - migrations.CreateModel( - name='DbGroup', - fields=[ - ('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)), - ('uuid', models.CharField(max_length=36, editable=False, blank=True)), - ('name', models.CharField(max_length=255, db_index=True)), - ('type', models.CharField(default='', max_length=255, db_index=True)), - ('time', models.DateTimeField(default=django.utils.timezone.now, editable=False)), - ('description', models.TextField(blank=True)), - ], - options={}, - bases=(models.Model,), - ), - migrations.CreateModel( - name='DbLink', - fields=[ - ('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)), - ('label', models.CharField(max_length=255, db_index=True)), - ], - options={}, - bases=(models.Model,), - ), - migrations.CreateModel( - name='DbLock', - fields=[ - ('key', models.CharField(max_length=255, serialize=False, primary_key=True)), - ('creation', models.DateTimeField(default=django.utils.timezone.now, editable=False)), - ('timeout', models.IntegerField(editable=False)), - ('owner', models.CharField(max_length=255)), - ], - options={}, - bases=(models.Model,), - ), - migrations.CreateModel( - name='DbLog', - fields=[ - ('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)), - ('time', models.DateTimeField(default=django.utils.timezone.now, editable=False)), - ('loggername', models.CharField(max_length=255, db_index=True)), - ('levelname', models.CharField(max_length=50, db_index=True)), - ('objname', models.CharField(db_index=True, max_length=255, blank=True)), - ('objpk', models.IntegerField(null=True, db_index=True)), - ('message', models.TextField(blank=True)), - ('metadata', models.TextField(default='{}')), - ], - options={}, - bases=(models.Model,), - ), - migrations.CreateModel( - name='DbNode', - fields=[ - ('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)), - ('uuid', models.CharField(max_length=36, editable=False, blank=True)), - ('type', models.CharField(max_length=255, db_index=True)), - ('label', models.CharField(db_index=True, max_length=255, blank=True)), - ('description', models.TextField(blank=True)), - ('ctime', models.DateTimeField(default=django.utils.timezone.now, editable=False)), - ('mtime', models.DateTimeField(auto_now=True)), - ('nodeversion', models.IntegerField(default=1, editable=False)), - ('public', models.BooleanField(default=False)), - ], - options={}, - bases=(models.Model,), - ), - migrations.CreateModel( - name='DbPath', - fields=[ - ('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)), - ('depth', models.IntegerField(editable=False)), - ('entry_edge_id', models.IntegerField(null=True, editable=False)), - ('direct_edge_id', models.IntegerField(null=True, editable=False)), - ('exit_edge_id', models.IntegerField(null=True, editable=False)), - ( - 'child', - models.ForeignKey( - related_name='parent_paths', editable=False, to='db.DbNode', on_delete=models.CASCADE - ) - ), - ( - 'parent', - models.ForeignKey( - related_name='child_paths', editable=False, to='db.DbNode', on_delete=models.CASCADE - ) - ), - ], - options={}, - bases=(models.Model,), - ), - migrations.CreateModel( - name='DbSetting', - fields=[ - ('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)), - ('key', models.CharField(max_length=1024, db_index=True)), - ( - 'datatype', - models.CharField( - default='none', - max_length=10, - db_index=True, - choices=[('float', 'float'), ('int', 'int'), ('txt', 'txt'), ('bool', 'bool'), ('date', 'date'), - ('json', 'json'), ('dict', 'dict'), ('list', 'list'), ('none', 'none')] - ) - ), - ('tval', models.TextField(default='', blank=True)), - ('fval', models.FloatField(default=None, null=True)), - ('ival', models.IntegerField(default=None, null=True)), - ('bval', models.NullBooleanField(default=None)), - ('dval', models.DateTimeField(default=None, null=True)), - ('description', models.TextField(blank=True)), - ('time', models.DateTimeField(auto_now=True)), - ], - options={ - 'abstract': False, - }, - bases=(models.Model,), - ), - migrations.CreateModel( - name='DbWorkflow', - fields=[ - ('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)), - ('uuid', models.CharField(max_length=36, editable=False, blank=True)), - ('ctime', models.DateTimeField(default=django.utils.timezone.now, editable=False)), - ('mtime', models.DateTimeField(auto_now=True)), - ('label', models.CharField(db_index=True, max_length=255, blank=True)), - ('description', models.TextField(blank=True)), - ('nodeversion', models.IntegerField(default=1, editable=False)), - ('lastsyncedversion', models.IntegerField(default=0, editable=False)), - ( - 'state', - models.CharField( - choices=[('CREATED', 'CREATED'), ('ERROR', 'ERROR'), ('FINISHED', 'FINISHED'), - ('INITIALIZED', 'INITIALIZED'), ('RUNNING', 'RUNNING'), ('SLEEP', 'SLEEP')], - default='INITIALIZED', - max_length=255 - ) - ), - ('report', models.TextField(blank=True)), - ('module', models.TextField()), - ('module_class', models.TextField()), - ('script_path', models.TextField()), - ('script_md5', models.CharField(max_length=255)), - ('user', models.ForeignKey(to='db.DbUser', on_delete=django.db.models.deletion.PROTECT)), - ], - options={}, - bases=(models.Model,), - ), - migrations.CreateModel( - name='DbWorkflowData', - fields=[ - ('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)), - ('name', models.CharField(max_length=255)), - ('time', models.DateTimeField(default=django.utils.timezone.now, editable=False)), - ('data_type', models.CharField(default='PARAMETER', max_length=255)), - ('value_type', models.CharField(default='NONE', max_length=255)), - ('json_value', models.TextField(blank=True)), - ('aiida_obj', models.ForeignKey(blank=True, to='db.DbNode', null=True, on_delete=models.CASCADE)), - ('parent', models.ForeignKey(related_name='data', to='db.DbWorkflow', on_delete=models.CASCADE)), - ], - options={}, - bases=(models.Model,), - ), - migrations.CreateModel( - name='DbWorkflowStep', - fields=[ - ('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)), - ('name', models.CharField(max_length=255)), - ('time', models.DateTimeField(default=django.utils.timezone.now, editable=False)), - ('nextcall', models.CharField(default='none', max_length=255)), - ( - 'state', - models.CharField( - choices=[('CREATED', 'CREATED'), ('ERROR', 'ERROR'), ('FINISHED', 'FINISHED'), - ('INITIALIZED', 'INITIALIZED'), ('RUNNING', 'RUNNING'), ('SLEEP', 'SLEEP')], - default='CREATED', - max_length=255 - ) - ), - ('calculations', models.ManyToManyField(related_name='workflow_step', to='db.DbNode')), - ('parent', models.ForeignKey(related_name='steps', to='db.DbWorkflow', on_delete=models.CASCADE)), - ('sub_workflows', models.ManyToManyField(related_name='parent_workflow_step', to='db.DbWorkflow')), - ('user', models.ForeignKey(to='db.DbUser', on_delete=django.db.models.deletion.PROTECT)), - ], - options={}, - bases=(models.Model,), - ), - migrations.AlterUniqueTogether( - name='dbworkflowstep', - unique_together=set([('parent', 'name')]), - ), - migrations.AlterUniqueTogether( - name='dbworkflowdata', - unique_together=set([('parent', 'name', 'data_type')]), - ), - migrations.AlterUniqueTogether( - name='dbsetting', - unique_together=set([('key',)]), - ), - migrations.AddField( - model_name='dbnode', - name='children', - field=models.ManyToManyField(related_name='parents', through='db.DbPath', to='db.DbNode'), - preserve_default=True, - ), - migrations.AddField( - model_name='dbnode', - name='dbcomputer', - field=models.ForeignKey( - related_name='dbnodes', on_delete=django.db.models.deletion.PROTECT, to='db.DbComputer', null=True - ), - preserve_default=True, - ), - migrations.AddField( - model_name='dbnode', - name='outputs', - field=models.ManyToManyField(related_name='inputs', through='db.DbLink', to='db.DbNode'), - preserve_default=True, - ), - migrations.AddField( - model_name='dbnode', - name='user', - field=models.ForeignKey( - related_name='dbnodes', on_delete=django.db.models.deletion.PROTECT, to='db.DbUser' - ), - preserve_default=True, - ), - migrations.AddField( - model_name='dblink', - name='input', - field=models.ForeignKey( - related_name='output_links', on_delete=django.db.models.deletion.PROTECT, to='db.DbNode' - ), - preserve_default=True, - ), - migrations.AddField( - model_name='dblink', - name='output', - field=models.ForeignKey(related_name='input_links', to='db.DbNode', on_delete=models.CASCADE), - preserve_default=True, - ), - migrations.AlterUniqueTogether( - name='dblink', - unique_together=set([('input', 'output'), ('output', 'label')]), - ), - migrations.AddField( - model_name='dbgroup', - name='dbnodes', - field=models.ManyToManyField(related_name='dbgroups', to='db.DbNode'), - preserve_default=True, - ), - migrations.AddField( - model_name='dbgroup', - name='user', - field=models.ForeignKey(related_name='dbgroups', to='db.DbUser', on_delete=models.CASCADE), - preserve_default=True, - ), - migrations.AlterUniqueTogether( - name='dbgroup', - unique_together=set([('name', 'type')]), - ), - migrations.AddField( - model_name='dbextra', - name='dbnode', - field=models.ForeignKey(related_name='dbextras', to='db.DbNode', on_delete=models.CASCADE), - preserve_default=True, - ), - migrations.AlterUniqueTogether( - name='dbextra', - unique_together=set([('dbnode', 'key')]), - ), - migrations.AddField( - model_name='dbcomment', - name='dbnode', - field=models.ForeignKey(related_name='dbcomments', to='db.DbNode', on_delete=models.CASCADE), - preserve_default=True, - ), - migrations.AddField( - model_name='dbcomment', - name='user', - field=models.ForeignKey(to='db.DbUser', on_delete=models.CASCADE), - preserve_default=True, - ), - migrations.AddField( - model_name='dbcalcstate', - name='dbnode', - field=models.ForeignKey(related_name='dbstates', to='db.DbNode', on_delete=models.CASCADE), - preserve_default=True, - ), - migrations.AlterUniqueTogether( - name='dbcalcstate', - unique_together=set([('dbnode', 'state')]), - ), - migrations.AddField( - model_name='dbauthinfo', - name='dbcomputer', - field=models.ForeignKey(to='db.DbComputer', on_delete=models.CASCADE), - preserve_default=True, - ), - migrations.AlterUniqueTogether( - name='dbauthinfo', - unique_together=set([('aiidauser', 'dbcomputer')]), - ), - migrations.AddField( - model_name='dbattribute', - name='dbnode', - field=models.ForeignKey(related_name='dbattributes', to='db.DbNode', on_delete=models.CASCADE), - preserve_default=True, - ), - migrations.AlterUniqueTogether( - name='dbattribute', - unique_together=set([('dbnode', 'key')]), - ), - upgrade_schema_version(REVISION, DOWN_REVISION) - ] diff --git a/aiida/backends/djsite/db/migrations/0002_db_state_change.py b/aiida/backends/djsite/db/migrations/0002_db_state_change.py deleted file mode 100644 index 7b94910615..0000000000 --- a/aiida/backends/djsite/db/migrations/0002_db_state_change.py +++ /dev/null @@ -1,66 +0,0 @@ -# -*- coding: utf-8 -*- -########################################################################### -# Copyright (c), The AiiDA team. All rights reserved. # -# This file is part of the AiiDA code. # -# # -# The code is hosted on GitHub at https://github.com/aiidateam/aiida-core # -# For further information on the license, see the LICENSE.txt file # -# For further information please visit http://www.aiida.net # -########################################################################### -# pylint: disable=invalid-name -"""Database migration.""" -from django.db import migrations, models - -from aiida.backends.djsite.db.migrations import upgrade_schema_version - -REVISION = '1.0.2' -DOWN_REVISION = '1.0.1' - - -def fix_calc_states(apps, _): - """Fix calculation states.""" - from aiida.orm.utils import load_node - - # These states should never exist in the database but we'll play it safe - # and deal with them if they do - DbCalcState = apps.get_model('db', 'DbCalcState') - for calc_state in DbCalcState.objects.filter(state__in=['UNDETERMINED', 'NOTFOUND']): - old_state = calc_state.state - calc_state.state = 'FAILED' - calc_state.save() - # Now add a note in the log to say what we've done - calc = load_node(pk=calc_state.dbnode.pk) - calc.logger.warning( - 'Job state {} found for calculation {} which should never be in ' - 'the database. Changed state to FAILED.'.format(old_state, calc_state.dbnode.pk) - ) - - -class Migration(migrations.Migration): - """Database migration.""" - - dependencies = [ - ('db', '0001_initial'), - ] - - operations = [ - migrations.AlterField( - model_name='dbcalcstate', - name='state', - # The UNDETERMINED and NOTFOUND 'states' were removed as these - # don't make sense - field=models.CharField( - db_index=True, - max_length=25, - choices=[('RETRIEVALFAILED', 'RETRIEVALFAILED'), ('COMPUTED', 'COMPUTED'), ('RETRIEVING', 'RETRIEVING'), - ('WITHSCHEDULER', 'WITHSCHEDULER'), ('SUBMISSIONFAILED', 'SUBMISSIONFAILED'), - ('PARSING', 'PARSING'), ('FAILED', 'FAILED'), - ('FINISHED', 'FINISHED'), ('TOSUBMIT', 'TOSUBMIT'), ('SUBMITTING', 'SUBMITTING'), - ('IMPORTED', 'IMPORTED'), ('NEW', 'NEW'), ('PARSINGFAILED', 'PARSINGFAILED')] - ), - preserve_default=True, - ), - # Fix up any calculation states that had one of the removed states - migrations.RunPython(fix_calc_states), - upgrade_schema_version(REVISION, DOWN_REVISION) - ] diff --git a/aiida/backends/djsite/db/migrations/0003_add_link_type.py b/aiida/backends/djsite/db/migrations/0003_add_link_type.py deleted file mode 100644 index 45b889b99a..0000000000 --- a/aiida/backends/djsite/db/migrations/0003_add_link_type.py +++ /dev/null @@ -1,100 +0,0 @@ -# -*- coding: utf-8 -*- -########################################################################### -# Copyright (c), The AiiDA team. All rights reserved. # -# This file is part of the AiiDA code. # -# # -# The code is hosted on GitHub at https://github.com/aiidateam/aiida-core # -# For further information on the license, see the LICENSE.txt file # -# For further information please visit http://www.aiida.net # -########################################################################### -# pylint: disable=invalid-name -"""Database migration.""" -from django.db import migrations, models - -from aiida.backends.djsite.db.migrations import upgrade_schema_version -import aiida.common.timezone - -REVISION = '1.0.3' -DOWN_REVISION = '1.0.2' - - -class Migration(migrations.Migration): - """Database migration.""" - - dependencies = [ - ('db', '0002_db_state_change'), - ] - - operations = [ - migrations.AddField( - model_name='dblink', - name='type', - field=models.CharField(db_index=True, max_length=255, blank=True), - preserve_default=True, - ), - migrations.AlterField( - model_name='dbcalcstate', - name='time', - field=models.DateTimeField(default=aiida.common.timezone.now, editable=False), - preserve_default=True, - ), - migrations.AlterField( - model_name='dbcomment', - name='ctime', - field=models.DateTimeField(default=aiida.common.timezone.now, editable=False), - preserve_default=True, - ), - migrations.AlterField( - model_name='dbgroup', - name='time', - field=models.DateTimeField(default=aiida.common.timezone.now, editable=False), - preserve_default=True, - ), - migrations.AlterField( - model_name='dblock', - name='creation', - field=models.DateTimeField(default=aiida.common.timezone.now, editable=False), - preserve_default=True, - ), - migrations.AlterField( - model_name='dblog', - name='time', - field=models.DateTimeField(default=aiida.common.timezone.now, editable=False), - preserve_default=True, - ), - migrations.AlterField( - model_name='dbnode', - name='ctime', - field=models.DateTimeField(default=aiida.common.timezone.now, editable=False), - preserve_default=True, - ), - migrations.AlterField( - model_name='dbuser', - name='date_joined', - field=models.DateTimeField(default=aiida.common.timezone.now), - preserve_default=True, - ), - migrations.AlterField( - model_name='dbworkflow', - name='ctime', - field=models.DateTimeField(default=aiida.common.timezone.now, editable=False), - preserve_default=True, - ), - migrations.AlterField( - model_name='dbworkflowdata', - name='time', - field=models.DateTimeField(default=aiida.common.timezone.now, editable=False), - preserve_default=True, - ), - migrations.AlterField( - model_name='dbworkflowstep', - name='time', - field=models.DateTimeField(default=aiida.common.timezone.now, editable=False), - preserve_default=True, - ), - migrations.AlterUniqueTogether( - name='dblink', - unique_together=set([]), - ), - upgrade_schema_version(REVISION, DOWN_REVISION) - ] diff --git a/aiida/backends/djsite/db/migrations/0004_add_daemon_and_uuid_indices.py b/aiida/backends/djsite/db/migrations/0004_add_daemon_and_uuid_indices.py deleted file mode 100644 index 7327bc4866..0000000000 --- a/aiida/backends/djsite/db/migrations/0004_add_daemon_and_uuid_indices.py +++ /dev/null @@ -1,48 +0,0 @@ -# -*- coding: utf-8 -*- -########################################################################### -# Copyright (c), The AiiDA team. All rights reserved. # -# This file is part of the AiiDA code. # -# # -# The code is hosted on GitHub at https://github.com/aiidateam/aiida-core # -# For further information on the license, see the LICENSE.txt file # -# For further information please visit http://www.aiida.net # -########################################################################### -# pylint: disable=invalid-name -"""Database migration.""" -from django.db import migrations, models - -from aiida.backends.djsite.db.migrations import upgrade_schema_version - -REVISION = '1.0.4' -DOWN_REVISION = '1.0.3' - - -class Migration(migrations.Migration): - """Database migration.""" - - dependencies = [ - ('db', '0003_add_link_type'), - ] - - operations = [ - # Create the index that speeds up the daemon queries - # We use the RunSQL command because Django interface - # doesn't seem to support partial indexes - migrations.RunSQL( - """ - CREATE INDEX tval_idx_for_daemon - ON db_dbattribute (tval) - WHERE ("db_dbattribute"."tval" - IN ('COMPUTED', 'WITHSCHEDULER', 'TOSUBMIT'))""" - ), - - # Create an index on UUIDs to speed up loading of nodes - # using this field - migrations.AlterField( - model_name='dbnode', - name='uuid', - field=models.CharField(max_length=36, db_index=True, editable=False, blank=True), - preserve_default=True, - ), - upgrade_schema_version(REVISION, DOWN_REVISION) - ] diff --git a/aiida/backends/djsite/db/migrations/0005_add_cmtime_indices.py b/aiida/backends/djsite/db/migrations/0005_add_cmtime_indices.py deleted file mode 100644 index 05ea5d9b5b..0000000000 --- a/aiida/backends/djsite/db/migrations/0005_add_cmtime_indices.py +++ /dev/null @@ -1,42 +0,0 @@ -# -*- coding: utf-8 -*- -########################################################################### -# Copyright (c), The AiiDA team. All rights reserved. # -# This file is part of the AiiDA code. # -# # -# The code is hosted on GitHub at https://github.com/aiidateam/aiida-core # -# For further information on the license, see the LICENSE.txt file # -# For further information please visit http://www.aiida.net # -########################################################################### -# pylint: disable=invalid-name -"""Database migration.""" -from django.db import migrations, models - -from aiida.backends.djsite.db.migrations import upgrade_schema_version -import aiida.common.timezone - -REVISION = '1.0.5' -DOWN_REVISION = '1.0.4' - - -class Migration(migrations.Migration): - """Database migration.""" - - dependencies = [ - ('db', '0004_add_daemon_and_uuid_indices'), - ] - - operations = [ - migrations.AlterField( - model_name='dbnode', - name='ctime', - field=models.DateTimeField(default=aiida.common.timezone.now, editable=False, db_index=True), - preserve_default=True, - ), - migrations.AlterField( - model_name='dbnode', - name='mtime', - field=models.DateTimeField(auto_now=True, db_index=True), - preserve_default=True, - ), - upgrade_schema_version(REVISION, DOWN_REVISION) - ] diff --git a/aiida/backends/djsite/db/migrations/0006_delete_dbpath.py b/aiida/backends/djsite/db/migrations/0006_delete_dbpath.py deleted file mode 100644 index b519255639..0000000000 --- a/aiida/backends/djsite/db/migrations/0006_delete_dbpath.py +++ /dev/null @@ -1,48 +0,0 @@ -# -*- coding: utf-8 -*- -########################################################################### -# Copyright (c), The AiiDA team. All rights reserved. # -# This file is part of the AiiDA code. # -# # -# The code is hosted on GitHub at https://github.com/aiidateam/aiida-core # -# For further information on the license, see the LICENSE.txt file # -# For further information please visit http://www.aiida.net # -########################################################################### -# pylint: disable=invalid-name -"""Database migration.""" -from django.db import migrations - -from aiida.backends.djsite.db.migrations import upgrade_schema_version - -REVISION = '1.0.6' -DOWN_REVISION = '1.0.5' - - -class Migration(migrations.Migration): - """Database migration.""" - - dependencies = [ - ('db', '0005_add_cmtime_indices'), - ] - - operations = [ - migrations.RemoveField( - model_name='dbpath', - name='child', - ), - migrations.RemoveField( - model_name='dbpath', - name='parent', - ), - migrations.RemoveField( - model_name='dbnode', - name='children', - ), - migrations.DeleteModel(name='DbPath',), - migrations.RunSQL( - """ - DROP TRIGGER IF EXISTS autoupdate_tc ON db_dblink; - DROP FUNCTION IF EXISTS update_tc(); - """ - ), - upgrade_schema_version(REVISION, DOWN_REVISION) - ] diff --git a/aiida/backends/djsite/db/migrations/0007_update_linktypes.py b/aiida/backends/djsite/db/migrations/0007_update_linktypes.py deleted file mode 100644 index 3134bb807a..0000000000 --- a/aiida/backends/djsite/db/migrations/0007_update_linktypes.py +++ /dev/null @@ -1,142 +0,0 @@ -# -*- coding: utf-8 -*- -########################################################################### -# Copyright (c), The AiiDA team. All rights reserved. # -# This file is part of the AiiDA code. # -# # -# The code is hosted on GitHub at https://github.com/aiidateam/aiida-core # -# For further information on the license, see the LICENSE.txt file # -# For further information please visit http://www.aiida.net # -########################################################################### -# pylint: disable=invalid-name -"""Database migration.""" -from django.db import migrations - -from aiida.backends.djsite.db.migrations import upgrade_schema_version - -REVISION = '1.0.8' -DOWN_REVISION = '1.0.7' - - -class Migration(migrations.Migration): - """Database migration.""" - - dependencies = [ - ('db', '0006_delete_dbpath'), - ] - - operations = [ - # I am first migrating the wrongly declared returnlinks out of - # the InlineCalculations. - # This bug is reported #628 https://github.com/aiidateam/aiida-core/issues/628 - # There is an explicit check in the code of the inline calculation - # ensuring that the calculation returns UNSTORED nodes. - # Therefore, no cycle can be created with that migration! - # - # this command: - # 1) selects all links that - # - joins an InlineCalculation (or subclass) as input - # - joins a Data (or subclass) as output - # - is marked as a returnlink. - # 2) set for these links the type to 'createlink' - migrations.RunSQL( - """ - UPDATE db_dblink set type='createlink' WHERE db_dblink.id IN ( - SELECT db_dblink_1.id - FROM db_dbnode AS db_dbnode_1 - JOIN db_dblink AS db_dblink_1 ON db_dblink_1.input_id = db_dbnode_1.id - JOIN db_dbnode AS db_dbnode_2 ON db_dblink_1.output_id = db_dbnode_2.id - WHERE db_dbnode_1.type LIKE 'calculation.inline.%' - AND db_dbnode_2.type LIKE 'data.%' - AND db_dblink_1.type = 'returnlink' - ); - """ - ), - # Now I am updating the link-types that are null because of either an export and subsequent import - # https://github.com/aiidateam/aiida-core/issues/685 - # or because the link types don't exist because the links were added before the introduction of link types. - # This is reported here: https://github.com/aiidateam/aiida-core/issues/687 - # - # The following sql statement: - # 1) selects all links that - # - joins Data (or subclass) or Code as input - # - joins Calculation (or subclass) as output: includes WorkCalculation, InlineCalcuation, JobCalculations... - # - has no type (null) - # 2) set for these links the type to 'inputlink' - migrations.RunSQL( - """ - UPDATE db_dblink set type='inputlink' where id in ( - SELECT db_dblink_1.id - FROM db_dbnode AS db_dbnode_1 - JOIN db_dblink AS db_dblink_1 ON db_dblink_1.input_id = db_dbnode_1.id - JOIN db_dbnode AS db_dbnode_2 ON db_dblink_1.output_id = db_dbnode_2.id - WHERE ( db_dbnode_1.type LIKE 'data.%' or db_dbnode_1.type = 'code.Code.' ) - AND db_dbnode_2.type LIKE 'calculation.%' - AND ( db_dblink_1.type = null OR db_dblink_1.type = '') - ); - """ - ), - # - # The following sql statement: - # 1) selects all links that - # - join JobCalculation (or subclass) or InlineCalculation as input - # - joins Data (or subclass) as output. - # - has no type (null) - # 2) set for these links the type to 'createlink' - migrations.RunSQL( - """ - UPDATE db_dblink set type='createlink' where id in ( - SELECT db_dblink_1.id - FROM db_dbnode AS db_dbnode_1 - JOIN db_dblink AS db_dblink_1 ON db_dblink_1.input_id = db_dbnode_1.id - JOIN db_dbnode AS db_dbnode_2 ON db_dblink_1.output_id = db_dbnode_2.id - WHERE db_dbnode_2.type LIKE 'data.%' - AND ( - db_dbnode_1.type LIKE 'calculation.job.%' - OR - db_dbnode_1.type = 'calculation.inline.InlineCalculation.' - ) - AND ( db_dblink_1.type = null OR db_dblink_1.type = '') - ); - """ - ), - # The following sql statement: - # 1) selects all links that - # - join WorkCalculation as input. No subclassing was introduced so far, so only one type string is checked - # - join Data (or subclass) as output. - # - has no type (null) - # 2) set for these links the type to 'returnlink' - migrations.RunSQL( - """ - UPDATE db_dblink set type='returnlink' where id in ( - SELECT db_dblink_1.id - FROM db_dbnode AS db_dbnode_1 - JOIN db_dblink AS db_dblink_1 ON db_dblink_1.input_id = db_dbnode_1.id - JOIN db_dbnode AS db_dbnode_2 ON db_dblink_1.output_id = db_dbnode_2.id - WHERE db_dbnode_2.type LIKE 'data.%' - AND db_dbnode_1.type = 'calculation.work.WorkCalculation.' - AND ( db_dblink_1.type = null OR db_dblink_1.type = '') - ); - """ - ), - # Now I update links that are CALLS: - # The following sql statement: - # 1) selects all links that - # - join WorkCalculation as input. No subclassing was introduced so far, so only one type string is checked - # - join Calculation (or subclass) as output. Includes JobCalculation and WorkCalculations and all subclasses. - # - has no type (null) - # 2) set for these links the type to 'calllink' - migrations.RunSQL( - """ - UPDATE db_dblink set type='calllink' where id in ( - SELECT db_dblink_1.id - FROM db_dbnode AS db_dbnode_1 - JOIN db_dblink AS db_dblink_1 ON db_dblink_1.input_id = db_dbnode_1.id - JOIN db_dbnode AS db_dbnode_2 ON db_dblink_1.output_id = db_dbnode_2.id - WHERE db_dbnode_1.type = 'calculation.work.WorkCalculation.' - AND db_dbnode_2.type LIKE 'calculation.%' - AND ( db_dblink_1.type = null OR db_dblink_1.type = '') - ); - """ - ), - upgrade_schema_version(REVISION, DOWN_REVISION) - ] diff --git a/aiida/backends/djsite/db/migrations/0008_code_hidden_to_extra.py b/aiida/backends/djsite/db/migrations/0008_code_hidden_to_extra.py deleted file mode 100644 index 604433f79f..0000000000 --- a/aiida/backends/djsite/db/migrations/0008_code_hidden_to_extra.py +++ /dev/null @@ -1,57 +0,0 @@ -# -*- coding: utf-8 -*- -########################################################################### -# Copyright (c), The AiiDA team. All rights reserved. # -# This file is part of the AiiDA code. # -# # -# The code is hosted on GitHub at https://github.com/aiidateam/aiida-core # -# For further information on the license, see the LICENSE.txt file # -# For further information please visit http://www.aiida.net # -########################################################################### -# pylint: disable=invalid-name -"""Database migration.""" -from django.db import migrations - -from aiida.backends.djsite.db.migrations import upgrade_schema_version - -REVISION = '1.0.8' -DOWN_REVISION = '1.0.7' - - -class Migration(migrations.Migration): - """Database migration.""" - - dependencies = [ - ('db', '0007_update_linktypes'), - ] - - operations = [ - # The 'hidden' property of AbstractCode has been changed from an attribute to an extra - # Therefore we find all nodes of type Code and if they have an attribute with the key 'hidden' - # we move that value to the extra table - # - # First we copy the 'hidden' attributes from code.Code. nodes to the db_extra table - migrations.RunSQL( - """ - INSERT INTO db_dbextra (key, datatype, tval, fval, ival, bval, dval, dbnode_id) ( - SELECT db_dbattribute.key, db_dbattribute.datatype, db_dbattribute.tval, db_dbattribute.fval, - db_dbattribute.ival, db_dbattribute.bval, db_dbattribute.dval, db_dbattribute.dbnode_id - FROM db_dbattribute JOIN db_dbnode ON db_dbnode.id = db_dbattribute.dbnode_id - WHERE db_dbattribute.key = 'hidden' - AND db_dbnode.type = 'code.Code.' - ); - """ - ), - # Secondly, we delete the original entries from the DbAttribute table - migrations.RunSQL( - """ - DELETE FROM db_dbattribute - WHERE id in ( - SELECT db_dbattribute.id - FROM db_dbattribute - JOIN db_dbnode ON db_dbnode.id = db_dbattribute.dbnode_id - WHERE db_dbattribute.key = 'hidden' AND db_dbnode.type = 'code.Code.' - ); - """ - ), - upgrade_schema_version(REVISION, DOWN_REVISION) - ] diff --git a/aiida/backends/djsite/db/migrations/0009_base_data_plugin_type_string.py b/aiida/backends/djsite/db/migrations/0009_base_data_plugin_type_string.py deleted file mode 100644 index b270529c38..0000000000 --- a/aiida/backends/djsite/db/migrations/0009_base_data_plugin_type_string.py +++ /dev/null @@ -1,41 +0,0 @@ -# -*- coding: utf-8 -*- -########################################################################### -# Copyright (c), The AiiDA team. All rights reserved. # -# This file is part of the AiiDA code. # -# # -# The code is hosted on GitHub at https://github.com/aiidateam/aiida-core # -# For further information on the license, see the LICENSE.txt file # -# For further information please visit http://www.aiida.net # -########################################################################### -# pylint: disable=invalid-name -"""Database migration.""" -from django.db import migrations - -from aiida.backends.djsite.db.migrations import upgrade_schema_version - -REVISION = '1.0.9' -DOWN_REVISION = '1.0.8' - - -class Migration(migrations.Migration): - """Database migration.""" - - dependencies = [ - ('db', '0008_code_hidden_to_extra'), - ] - - operations = [ - # The base Data types Bool, Float, Int and Str have been moved in the source code, which means that their - # module path changes, which determines the plugin type string which is stored in the databse. - # The type string now will have a type string prefix that is unique to each sub type. - migrations.RunSQL( - """ - UPDATE db_dbnode SET type = 'data.bool.Bool.' WHERE type = 'data.base.Bool.'; - UPDATE db_dbnode SET type = 'data.float.Float.' WHERE type = 'data.base.Float.'; - UPDATE db_dbnode SET type = 'data.int.Int.' WHERE type = 'data.base.Int.'; - UPDATE db_dbnode SET type = 'data.str.Str.' WHERE type = 'data.base.Str.'; - UPDATE db_dbnode SET type = 'data.list.List.' WHERE type = 'data.base.List.'; - """ - ), - upgrade_schema_version(REVISION, DOWN_REVISION) - ] diff --git a/aiida/backends/djsite/db/migrations/0011_delete_kombu_tables.py b/aiida/backends/djsite/db/migrations/0011_delete_kombu_tables.py deleted file mode 100644 index b723de87e0..0000000000 --- a/aiida/backends/djsite/db/migrations/0011_delete_kombu_tables.py +++ /dev/null @@ -1,42 +0,0 @@ -# -*- coding: utf-8 -*- -########################################################################### -# Copyright (c), The AiiDA team. All rights reserved. # -# This file is part of the AiiDA code. # -# # -# The code is hosted on GitHub at https://github.com/aiidateam/aiida-core # -# For further information on the license, see the LICENSE.txt file # -# For further information please visit http://www.aiida.net # -########################################################################### -# pylint: disable=invalid-name -"""Database migration.""" -from django.db import migrations - -from aiida.backends.djsite.db.migrations import upgrade_schema_version - -REVISION = '1.0.11' -DOWN_REVISION = '1.0.10' - - -class Migration(migrations.Migration): - """Database migration.""" - - dependencies = [ - ('db', '0010_process_type'), - ] - - operations = [ - migrations.RunSQL( - """ - DROP TABLE IF EXISTS kombu_message; - DROP TABLE IF EXISTS kombu_queue; - DELETE FROM db_dbsetting WHERE key = 'daemon|user'; - DELETE FROM db_dbsetting WHERE key = 'daemon|task_stop|retriever'; - DELETE FROM db_dbsetting WHERE key = 'daemon|task_start|retriever'; - DELETE FROM db_dbsetting WHERE key = 'daemon|task_stop|updater'; - DELETE FROM db_dbsetting WHERE key = 'daemon|task_start|updater'; - DELETE FROM db_dbsetting WHERE key = 'daemon|task_stop|submitter'; - DELETE FROM db_dbsetting WHERE key = 'daemon|task_start|submitter'; - """ - ), - upgrade_schema_version(REVISION, DOWN_REVISION) - ] diff --git a/aiida/backends/djsite/db/migrations/0013_django_1_8.py b/aiida/backends/djsite/db/migrations/0013_django_1_8.py deleted file mode 100644 index 32265ea1ee..0000000000 --- a/aiida/backends/djsite/db/migrations/0013_django_1_8.py +++ /dev/null @@ -1,41 +0,0 @@ -# -*- coding: utf-8 -*- -########################################################################### -# Copyright (c), The AiiDA team. All rights reserved. # -# This file is part of the AiiDA code. # -# # -# The code is hosted on GitHub at https://github.com/aiidateam/aiida-core # -# For further information on the license, see the LICENSE.txt file # -# For further information please visit http://www.aiida.net # -########################################################################### -# pylint: disable=invalid-name -"""Database migration.""" -from django.db import migrations, models - -from aiida.backends.djsite.db.migrations import upgrade_schema_version - -REVISION = '1.0.13' -DOWN_REVISION = '1.0.12' - - -class Migration(migrations.Migration): - """Database migration.""" - - dependencies = [ - ('db', '0012_drop_dblock'), - ] - - # An amalgamation from django:django/contrib/auth/migrations/ - # these changes are already the default for SQLA at this point - operations = [ - migrations.AlterField( - model_name='dbuser', - name='last_login', - field=models.DateTimeField(null=True, verbose_name='last login', blank=True), - ), - migrations.AlterField( - model_name='dbuser', - name='email', - field=models.EmailField(max_length=254, verbose_name='email address', blank=True), - ), - upgrade_schema_version(REVISION, DOWN_REVISION) - ] diff --git a/aiida/backends/djsite/db/migrations/0014_add_node_uuid_unique_constraint.py b/aiida/backends/djsite/db/migrations/0014_add_node_uuid_unique_constraint.py deleted file mode 100644 index f72fea9931..0000000000 --- a/aiida/backends/djsite/db/migrations/0014_add_node_uuid_unique_constraint.py +++ /dev/null @@ -1,54 +0,0 @@ -# -*- coding: utf-8 -*- -########################################################################### -# Copyright (c), The AiiDA team. All rights reserved. # -# This file is part of the AiiDA code. # -# # -# The code is hosted on GitHub at https://github.com/aiidateam/aiida-core # -# For further information on the license, see the LICENSE.txt file # -# For further information please visit http://www.aiida.net # -########################################################################### -# pylint: disable=invalid-name -"""Add a uniqueness constraint to the uuid column of DbNode table.""" - -from django.db import migrations, models - -from aiida.backends.djsite.db.migrations import upgrade_schema_version -from aiida.common.utils import get_new_uuid - -REVISION = '1.0.14' -DOWN_REVISION = '1.0.13' - - -def verify_node_uuid_uniqueness(_, __): - """Check whether the database contains nodes with duplicate UUIDS. - - Note that we have to redefine this method from aiida.manage.database.integrity.verify_node_uuid_uniqueness - because the migrations.RunPython command that will invoke this function, will pass two arguments and therefore - this wrapper needs to have a different function signature. - - :raises: IntegrityError if database contains nodes with duplicate UUIDS. - """ - from aiida.backends.general.migrations.duplicate_uuids import verify_uuid_uniqueness - verify_uuid_uniqueness(table='db_dbnode') - - -def reverse_code(_, __): - pass - - -class Migration(migrations.Migration): - """Add a uniqueness constraint to the uuid column of DbNode table.""" - - dependencies = [ - ('db', '0013_django_1_8'), - ] - - operations = [ - migrations.RunPython(verify_node_uuid_uniqueness, reverse_code=reverse_code), - migrations.AlterField( - model_name='dbnode', - name='uuid', - field=models.CharField(max_length=36, default=get_new_uuid, unique=True), - ), - upgrade_schema_version(REVISION, DOWN_REVISION) - ] diff --git a/aiida/backends/djsite/db/migrations/0015_invalidating_node_hash.py b/aiida/backends/djsite/db/migrations/0015_invalidating_node_hash.py deleted file mode 100644 index 75f6ba2e8e..0000000000 --- a/aiida/backends/djsite/db/migrations/0015_invalidating_node_hash.py +++ /dev/null @@ -1,39 +0,0 @@ -# -*- coding: utf-8 -*- -########################################################################### -# Copyright (c), The AiiDA team. All rights reserved. # -# This file is part of the AiiDA code. # -# # -# The code is hosted on GitHub at https://github.com/aiidateam/aiida-core # -# For further information on the license, see the LICENSE.txt file # -# For further information please visit http://www.aiida.net # -########################################################################### -# pylint: disable=invalid-name -"""Invalidating node hash - User should rehash nodes for caching.""" - -# Remove when https://github.com/PyCQA/pylint/issues/1931 is fixed -# pylint: disable=no-name-in-module,import-error -from django.db import migrations - -from aiida.backends.djsite.db.migrations import upgrade_schema_version - -REVISION = '1.0.15' -DOWN_REVISION = '1.0.14' - -# Currently valid hash key -_HASH_EXTRA_KEY = '_aiida_hash' - - -class Migration(migrations.Migration): - """Invalidating node hash - User should rehash nodes for caching""" - - dependencies = [ - ('db', '0014_add_node_uuid_unique_constraint'), - ] - - operations = [ - migrations.RunSQL( - f" DELETE FROM db_dbextra WHERE key='{_HASH_EXTRA_KEY}';", - reverse_sql=f" DELETE FROM db_dbextra WHERE key='{_HASH_EXTRA_KEY}';" - ), - upgrade_schema_version(REVISION, DOWN_REVISION) - ] diff --git a/aiida/backends/djsite/db/migrations/0016_code_sub_class_of_data.py b/aiida/backends/djsite/db/migrations/0016_code_sub_class_of_data.py deleted file mode 100644 index fa3ee78a73..0000000000 --- a/aiida/backends/djsite/db/migrations/0016_code_sub_class_of_data.py +++ /dev/null @@ -1,35 +0,0 @@ -# -*- coding: utf-8 -*- -########################################################################### -# Copyright (c), The AiiDA team. All rights reserved. # -# This file is part of the AiiDA code. # -# # -# The code is hosted on GitHub at https://github.com/aiidateam/aiida-core # -# For further information on the license, see the LICENSE.txt file # -# For further information please visit http://www.aiida.net # -########################################################################### -# pylint: disable=invalid-name -"""Database migration.""" -from django.db import migrations - -from aiida.backends.djsite.db.migrations import upgrade_schema_version - -REVISION = '1.0.16' -DOWN_REVISION = '1.0.15' - - -class Migration(migrations.Migration): - """Database migration.""" - - dependencies = [ - ('db', '0015_invalidating_node_hash'), - ] - - operations = [ - # The Code class used to be just a sub class of Node but was changed to act like a Data node. - # To make everything fully consistent, its type string should therefore also start with `data.` - migrations.RunSQL( - sql="""UPDATE db_dbnode SET type = 'data.code.Code.' WHERE type = 'code.Code.';""", - reverse_sql="""UPDATE db_dbnode SET type = 'code.Code.' WHERE type = 'data.code.Code.';""" - ), - upgrade_schema_version(REVISION, DOWN_REVISION) - ] diff --git a/aiida/backends/djsite/db/migrations/0018_django_1_11.py b/aiida/backends/djsite/db/migrations/0018_django_1_11.py deleted file mode 100644 index c97f8f0ff8..0000000000 --- a/aiida/backends/djsite/db/migrations/0018_django_1_11.py +++ /dev/null @@ -1,115 +0,0 @@ -# -*- coding: utf-8 -*- -########################################################################### -# Copyright (c), The AiiDA team. All rights reserved. # -# This file is part of the AiiDA code. # -# # -# The code is hosted on GitHub at https://github.com/aiidateam/aiida-core # -# For further information on the license, see the LICENSE.txt file # -# For further information please visit http://www.aiida.net # -########################################################################### -# Generated by Django 1.11.16 on 2018-11-12 16:46 -# pylint: disable=invalid-name -"""Migration for upgrade to django 1.11""" - -# Remove when https://github.com/PyCQA/pylint/issues/1931 is fixed -# pylint: disable=no-name-in-module,import-error -from django.db import migrations, models - -from aiida.backends.djsite.db.migrations import upgrade_schema_version -import aiida.common.utils - -REVISION = '1.0.18' -DOWN_REVISION = '1.0.17' - -tables = ['db_dbcomment', 'db_dbcomputer', 'db_dbgroup', 'db_dbworkflow'] - - -def _verify_uuid_uniqueness(apps, schema_editor): - """Check whether the respective tables contain rows with duplicate UUIDS. - - Note that we have to redefine this method from aiida.manage.database.integrity - because the migrations.RunPython command that will invoke this function, will pass two arguments and therefore - this wrapper needs to have a different function signature. - - :raises: IntegrityError if database contains rows with duplicate UUIDS. - """ - # pylint: disable=unused-argument - from aiida.backends.general.migrations.duplicate_uuids import verify_uuid_uniqueness - - for table in tables: - verify_uuid_uniqueness(table=table) - - -def reverse_code(apps, schema_editor): - # pylint: disable=unused-argument - pass - - -class Migration(migrations.Migration): - """Migration for upgrade to django 1.11 - - This migration switches from the django_extensions UUID field to the - native UUIDField of django 1.11 - - It also introduces unique constraints on all uuid columns - (previously existed only on dbnode). - """ - - dependencies = [ - ('db', '0017_drop_dbcalcstate'), - ] - - operations = [ - migrations.RunPython(_verify_uuid_uniqueness, reverse_code=reverse_code), - migrations.AlterField( - model_name='dbcomment', - name='uuid', - field=models.UUIDField(unique=True, default=aiida.common.utils.get_new_uuid), - ), - migrations.AlterField( - model_name='dbcomputer', - name='uuid', - field=models.UUIDField(unique=True, default=aiida.common.utils.get_new_uuid), - ), - migrations.AlterField( - model_name='dbgroup', - name='uuid', - field=models.UUIDField(unique=True, default=aiida.common.utils.get_new_uuid), - ), - # first: remove index - migrations.AlterField( - model_name='dbnode', - name='uuid', - field=models.CharField(max_length=36, default=aiida.common.utils.get_new_uuid, unique=False), - ), - # second: switch to UUIDField - migrations.AlterField( - model_name='dbnode', - name='uuid', - field=models.UUIDField(default=aiida.common.utils.get_new_uuid, unique=True), - ), - migrations.AlterField( - model_name='dbuser', - name='email', - field=models.EmailField(db_index=True, max_length=254, unique=True), - ), - migrations.AlterField( - model_name='dbuser', - name='groups', - field=models.ManyToManyField( - blank=True, - help_text= - 'The groups this user belongs to. A user will get all permissions granted to each of their groups.', - related_name='user_set', - related_query_name='user', - to='auth.Group', - verbose_name='groups' - ), - ), - migrations.AlterField( - model_name='dbworkflow', - name='uuid', - field=models.UUIDField(unique=True, default=aiida.common.utils.get_new_uuid), - ), - upgrade_schema_version(REVISION, DOWN_REVISION) - ] diff --git a/aiida/backends/djsite/db/migrations/0019_migrate_builtin_calculations.py b/aiida/backends/djsite/db/migrations/0019_migrate_builtin_calculations.py deleted file mode 100644 index e5cabcf50b..0000000000 --- a/aiida/backends/djsite/db/migrations/0019_migrate_builtin_calculations.py +++ /dev/null @@ -1,86 +0,0 @@ -# -*- coding: utf-8 -*- -########################################################################### -# Copyright (c), The AiiDA team. All rights reserved. # -# This file is part of the AiiDA code. # -# # -# The code is hosted on GitHub at https://github.com/aiidateam/aiida-core # -# For further information on the license, see the LICENSE.txt file # -# For further information please visit http://www.aiida.net # -########################################################################### -# pylint: disable=invalid-name -"""Migration to reflect the name change of the built in calculation entry points in the database.""" - -# Remove when https://github.com/PyCQA/pylint/issues/1931 is fixed -# pylint: disable=no-name-in-module,import-error -from django.db import migrations - -from aiida.backends.djsite.db.migrations import upgrade_schema_version - -REVISION = '1.0.19' -DOWN_REVISION = '1.0.18' - - -class Migration(migrations.Migration): - """Migration to remove entry point groups from process type strings and prefix unknown types with a marker.""" - - dependencies = [ - ('db', '0018_django_1_11'), - ] - - operations = [ - # The built in calculation plugins `arithmetic.add` and `templatereplacer` have been moved and their entry point - # renamed. In the change the `simpleplugins` namespace was dropped so we migrate the existing nodes. - migrations.RunSQL( - sql=""" - UPDATE db_dbnode SET type = 'calculation.job.arithmetic.add.ArithmeticAddCalculation.' - WHERE type = 'calculation.job.simpleplugins.arithmetic.add.ArithmeticAddCalculation.'; - - UPDATE db_dbnode SET type = 'calculation.job.templatereplacer.TemplatereplacerCalculation.' - WHERE type = 'calculation.job.simpleplugins.templatereplacer.TemplatereplacerCalculation.'; - - UPDATE db_dbnode SET process_type = 'aiida.calculations:arithmetic.add' - WHERE process_type = 'aiida.calculations:simpleplugins.arithmetic.add'; - - UPDATE db_dbnode SET process_type = 'aiida.calculations:templatereplacer' - WHERE process_type = 'aiida.calculations:simpleplugins.templatereplacer'; - - UPDATE db_dbattribute AS a SET tval = 'arithmetic.add' - FROM db_dbnode AS n WHERE a.dbnode_id = n.id - AND a.key = 'input_plugin' - AND a.tval = 'simpleplugins.arithmetic.add' - AND n.type = 'data.code.Code.'; - - UPDATE db_dbattribute AS a SET tval = 'templatereplacer' - FROM db_dbnode AS n WHERE a.dbnode_id = n.id - AND a.key = 'input_plugin' - AND a.tval = 'simpleplugins.templatereplacer' - AND n.type = 'data.code.Code.'; - """, - reverse_sql=""" - UPDATE db_dbnode SET type = 'calculation.job.simpleplugins.arithmetic.add.ArithmeticAddCalculation.' - WHERE type = 'calculation.job.arithmetic.add.ArithmeticAddCalculation.'; - - UPDATE db_dbnode SET type = 'calculation.job.simpleplugins.templatereplacer.TemplatereplacerCalculation.' - WHERE type = 'calculation.job.templatereplacer.TemplatereplacerCalculation.'; - - UPDATE db_dbnode SET process_type = 'aiida.calculations:simpleplugins.arithmetic.add' - WHERE process_type = 'aiida.calculations:arithmetic.add'; - - UPDATE db_dbnode SET process_type = 'aiida.calculations:simpleplugins.templatereplacer' - WHERE process_type = 'aiida.calculations:templatereplacer'; - - UPDATE db_dbattribute AS a SET tval = 'simpleplugins.arithmetic.add' - FROM db_dbnode AS n WHERE a.dbnode_id = n.id - AND a.key = 'input_plugin' - AND a.tval = 'arithmetic.add' - AND n.type = 'data.code.Code.'; - - UPDATE db_dbattribute AS a SET tval = 'simpleplugins.templatereplacer' - FROM db_dbnode AS n WHERE a.dbnode_id = n.id - AND a.key = 'input_plugin' - AND a.tval = 'templatereplacer' - AND n.type = 'data.code.Code.'; - """ - ), - upgrade_schema_version(REVISION, DOWN_REVISION) - ] diff --git a/aiida/backends/djsite/db/migrations/0020_provenance_redesign.py b/aiida/backends/djsite/db/migrations/0020_provenance_redesign.py deleted file mode 100644 index cd12c05a9f..0000000000 --- a/aiida/backends/djsite/db/migrations/0020_provenance_redesign.py +++ /dev/null @@ -1,202 +0,0 @@ -# -*- coding: utf-8 -*- -########################################################################### -# Copyright (c), The AiiDA team. All rights reserved. # -# This file is part of the AiiDA code. # -# # -# The code is hosted on GitHub at https://github.com/aiidateam/aiida-core # -# For further information on the license, see the LICENSE.txt file # -# For further information please visit http://www.aiida.net # -########################################################################### -# pylint: disable=invalid-name,unused-argument -"""Migration after the provenance redesign""" - -# Remove when https://github.com/PyCQA/pylint/issues/1931 is fixed -# pylint: disable=no-name-in-module,import-error -from django.db import migrations - -from aiida.backends.djsite.db.migrations import upgrade_schema_version - -REVISION = '1.0.20' -DOWN_REVISION = '1.0.19' - - -def migrate_infer_calculation_entry_point(apps, schema_editor): - """Set the process type for calculation nodes by inferring it from their type string.""" - from aiida.manage.database.integrity import write_database_integrity_violation - from aiida.manage.database.integrity.plugins import infer_calculation_entry_point - from aiida.plugins.entry_point import ENTRY_POINT_STRING_SEPARATOR - - fallback_cases = [] - DbNode = apps.get_model('db', 'DbNode') - - type_strings = DbNode.objects.filter(type__startswith='calculation.').values_list('type', flat=True) - mapping_node_type_to_entry_point = infer_calculation_entry_point(type_strings=type_strings) - - for type_string, entry_point_string in mapping_node_type_to_entry_point.items(): - - # If the entry point string does not contain the entry point string separator, the mapping function was not able - # to map the type string onto a known entry point string. As a fallback it uses the modified type string itself. - # All affected entries should be logged to file that the user can consult. - if ENTRY_POINT_STRING_SEPARATOR not in entry_point_string: - query_set = DbNode.objects.filter(type=type_string).values_list('uuid') - uuids = [str(entry[0]) for entry in query_set] - for uuid in uuids: - fallback_cases.append([uuid, type_string, entry_point_string]) - - DbNode.objects.filter(type=type_string).update(process_type=entry_point_string) - - if fallback_cases: - headers = ['UUID', 'type (old)', 'process_type (fallback)'] - warning_message = 'found calculation nodes with a type string that could not be mapped onto a known entry point' - action_message = 'inferred `process_type` for all calculation nodes, using fallback for unknown entry points' - write_database_integrity_violation(fallback_cases, headers, warning_message, action_message) - - -def detect_unexpected_links(apps, schema_editor): - """Scan the database for any links that are unexpected. - - The checks will verify that there are no outgoing `call` or `return` links from calculation nodes and that if a - workflow node has a `create` link, it has at least an accompanying return link to the same data node, or it has a - `call` link to a calculation node that takes the created data node as input. - """ - from aiida.backends.general.migrations.provenance_redesign import INVALID_LINK_SELECT_STATEMENTS - from aiida.manage.database.integrity import write_database_integrity_violation - - with schema_editor.connection.cursor() as cursor: - - for sql, warning_message in INVALID_LINK_SELECT_STATEMENTS: - cursor.execute(sql) - results = cursor.fetchall() - if results: - headers = ['UUID source', 'UUID target', 'link type', 'link label'] - write_database_integrity_violation(results, headers, warning_message) - - -def reverse_code(apps, schema_editor): - """Reversing the inference of the process type is not possible and not necessary.""" - - -class Migration(migrations.Migration): - """Migration to effectuate changes introduced by the provenance redesign - - This includes in order: - - * Rename the type column of process nodes - * Remove illegal links - * Rename link types - - The exact reverse operation is not possible because the renaming of the type string of `JobCalculation` nodes is - done in a lossy way. Originally this type string contained the exact sub class of the `JobCalculation` but in the - migration this is changed to always be `node.process.calculation.calcjob.CalcJobNode.`. In the reverse operation, - this can then only be reset to `calculation.job.JobCalculation.` but the information on the exact sub class is lost. - """ - dependencies = [ - ('db', '0019_migrate_builtin_calculations'), - ] - - operations = [ - migrations.RunPython(migrate_infer_calculation_entry_point, reverse_code=reverse_code, atomic=True), - migrations.RunPython(detect_unexpected_links, reverse_code=reverse_code, atomic=True), - migrations.RunSQL( - """ - DELETE FROM db_dblink WHERE db_dblink.id IN ( - SELECT db_dblink.id FROM db_dblink - INNER JOIN db_dbnode ON db_dblink.input_id = db_dbnode.id - WHERE - (db_dbnode.type LIKE 'calculation.job%' OR db_dbnode.type LIKE 'calculation.inline%') - AND db_dblink.type = 'returnlink' - ); -- Delete all outgoing RETURN links from JobCalculation and InlineCalculation nodes - - DELETE FROM db_dblink WHERE db_dblink.id IN ( - SELECT db_dblink.id FROM db_dblink - INNER JOIN db_dbnode ON db_dblink.input_id = db_dbnode.id - WHERE - (db_dbnode.type LIKE 'calculation.job%' OR db_dbnode.type LIKE 'calculation.inline%') - AND db_dblink.type = 'calllink' - ); -- Delete all outgoing CALL links from JobCalculation and InlineCalculation nodes - - DELETE FROM db_dblink WHERE db_dblink.id IN ( - SELECT db_dblink.id FROM db_dblink - INNER JOIN db_dbnode ON db_dblink.input_id = db_dbnode.id - WHERE - (db_dbnode.type LIKE 'calculation.function%' OR db_dbnode.type LIKE 'calculation.work%') - AND db_dblink.type = 'createlink' - ); -- Delete all outgoing CREATE links from FunctionCalculation and WorkCalculation nodes - - UPDATE db_dbnode SET type = 'calculation.work.WorkCalculation.' - WHERE type = 'calculation.process.ProcessCalculation.'; - -- First migrate very old `ProcessCalculation` to `WorkCalculation` - - UPDATE db_dbnode SET type = 'node.process.workflow.workfunction.WorkFunctionNode.' FROM db_dbattribute - WHERE db_dbattribute.dbnode_id = db_dbnode.id - AND type = 'calculation.work.WorkCalculation.' - AND db_dbattribute.key = 'function_name'; - -- WorkCalculations that have a `function_name` attribute are FunctionCalculations - - UPDATE db_dbnode SET type = 'node.process.workflow.workchain.WorkChainNode.' - WHERE type = 'calculation.work.WorkCalculation.'; - -- Update type for `WorkCalculation` nodes - all what is left should be `WorkChainNodes` - - UPDATE db_dbnode SET type = 'node.process.calculation.calcjob.CalcJobNode.' - WHERE type LIKE 'calculation.job.%'; -- Update type for JobCalculation nodes - - UPDATE db_dbnode SET type = 'node.process.calculation.calcfunction.CalcFunctionNode.' - WHERE type = 'calculation.inline.InlineCalculation.'; -- Update type for InlineCalculation nodes - - UPDATE db_dbnode SET type = 'node.process.workflow.workfunction.WorkFunctionNode.' - WHERE type = 'calculation.function.FunctionCalculation.'; -- Update type for FunctionCalculation nodes - - UPDATE db_dblink SET type = 'create' WHERE type = 'createlink'; -- Rename `createlink` to `create` - UPDATE db_dblink SET type = 'return' WHERE type = 'returnlink'; -- Rename `returnlink` to `return` - - UPDATE db_dblink SET type = 'input_calc' FROM db_dbnode - WHERE db_dblink.output_id = db_dbnode.id AND db_dbnode.type LIKE 'node.process.calculation%' - AND db_dblink.type = 'inputlink'; - -- Rename `inputlink` to `input_calc` if the target node is a calculation type node - - UPDATE db_dblink SET type = 'input_work' FROM db_dbnode - WHERE db_dblink.output_id = db_dbnode.id AND db_dbnode.type LIKE 'node.process.workflow%' - AND db_dblink.type = 'inputlink'; - -- Rename `inputlink` to `input_work` if the target node is a workflow type node - - UPDATE db_dblink SET type = 'call_calc' FROM db_dbnode - WHERE db_dblink.output_id = db_dbnode.id AND db_dbnode.type LIKE 'node.process.calculation%' - AND db_dblink.type = 'calllink'; - -- Rename `calllink` to `call_calc` if the target node is a calculation type node - - UPDATE db_dblink SET type = 'call_work' FROM db_dbnode - WHERE db_dblink.output_id = db_dbnode.id AND db_dbnode.type LIKE 'node.process.workflow%' - AND db_dblink.type = 'calllink'; - -- Rename `calllink` to `call_work` if the target node is a workflow type node - - """, - reverse_sql=""" - UPDATE db_dbnode SET type = 'calculation.job.JobCalculation.' - WHERE type = 'node.process.calculation.calcjob.CalcJobNode.'; - - UPDATE db_dbnode SET type = 'calculatison.inline.InlineCalculation.' - WHERE type = 'node.process.calculation.calcfunction.CalcFunctionNode.'; - - UPDATE db_dbnode SET type = 'calculation.function.FunctionCalculation.' - WHERE type = 'node.process.workflow.workfunction.WorkFunctionNode.'; - - UPDATE db_dbnode SET type = 'calculation.work.WorkCalculation.' - WHERE type = 'node.process.workflow.workchain.WorkChainNode.'; - - - UPDATE db_dblink SET type = 'inputlink' - WHERE type = 'input_call' OR type = 'input_work'; - - UPDATE db_dblink SET type = 'calllink' - WHERE type = 'call_call' OR type = 'call_work'; - - UPDATE db_dblink SET type = 'createlink' - WHERE type = 'create'; - - UPDATE db_dblink SET type = 'returnlink' - WHERE type = 'return'; - - """ - ), - upgrade_schema_version(REVISION, DOWN_REVISION) - ] diff --git a/aiida/backends/djsite/db/migrations/0021_dbgroup_name_to_label_type_to_type_string.py b/aiida/backends/djsite/db/migrations/0021_dbgroup_name_to_label_type_to_type_string.py deleted file mode 100644 index 24f2564f58..0000000000 --- a/aiida/backends/djsite/db/migrations/0021_dbgroup_name_to_label_type_to_type_string.py +++ /dev/null @@ -1,45 +0,0 @@ -# -*- coding: utf-8 -*- -########################################################################### -# Copyright (c), The AiiDA team. All rights reserved. # -# This file is part of the AiiDA code. # -# # -# The code is hosted on GitHub at https://github.com/aiidateam/aiida-core # -# For further information on the license, see the LICENSE.txt file # -# For further information please visit http://www.aiida.net # -########################################################################### -# pylint: disable=invalid-name -"""Migration that renames name and type columns to label and type_string""" - -# pylint: disable=no-name-in-module,import-error -from django.db import migrations - -from aiida.backends.djsite.db.migrations import upgrade_schema_version - -REVISION = '1.0.21' -DOWN_REVISION = '1.0.20' - - -class Migration(migrations.Migration): - """Migration that renames name and type columns to label and type_string""" - - dependencies = [ - ('db', '0020_provenance_redesign'), - ] - - operations = [ - migrations.RenameField( - model_name='dbgroup', - old_name='name', - new_name='label', - ), - migrations.RenameField( - model_name='dbgroup', - old_name='type', - new_name='type_string', - ), - migrations.AlterUniqueTogether( - name='dbgroup', - unique_together=set([('label', 'type_string')]), - ), - upgrade_schema_version(REVISION, DOWN_REVISION) - ] diff --git a/aiida/backends/djsite/db/migrations/0023_calc_job_option_attribute_keys.py b/aiida/backends/djsite/db/migrations/0023_calc_job_option_attribute_keys.py deleted file mode 100644 index eba7254e54..0000000000 --- a/aiida/backends/djsite/db/migrations/0023_calc_job_option_attribute_keys.py +++ /dev/null @@ -1,132 +0,0 @@ -# -*- coding: utf-8 -*- -########################################################################### -# Copyright (c), The AiiDA team. All rights reserved. # -# This file is part of the AiiDA code. # -# # -# The code is hosted on GitHub at https://github.com/aiidateam/aiida-core # -# For further information on the license, see the LICENSE.txt file # -# For further information please visit http://www.aiida.net # -########################################################################### -# pylint: disable=invalid-name -"""Migration of ProcessNode attributes for metadata options whose key changed.""" - -# Remove when https://github.com/PyCQA/pylint/issues/1931 is fixed -# pylint: disable=no-name-in-module,import-error -from django.db import migrations - -from aiida.backends.djsite.db.migrations import upgrade_schema_version - -REVISION = '1.0.23' -DOWN_REVISION = '1.0.22' - - -class Migration(migrations.Migration): - """Migration of ProcessNode attributes for metadata options whose key changed. - - Renamed attribute keys: - - * `custom_environment_variables` -> `environment_variables` (CalcJobNode) - * `jobresource_params` -> `resources` (CalcJobNode) - * `_process_label` -> `process_label` (ProcessNode) - * `parser` -> `parser_name` (CalcJobNode) - - Deleted attributes: - * `linkname_retrieved` (We do not actually delete it just in case some relies on it) - - """ - - dependencies = [ - ('db', '0022_dbgroup_type_string_change_content'), - ] - - operations = [ - migrations.RunSQL( - sql=r""" - UPDATE db_dbattribute AS attribute - SET key = regexp_replace(attribute.key, '^custom_environment_variables', 'environment_variables') - FROM db_dbnode AS node - WHERE - ( - attribute.key = 'custom_environment_variables' OR - attribute.key LIKE 'custom\_environment\_variables.%' - ) AND - node.type = 'node.process.calculation.calcjob.CalcJobNode.' AND - node.id = attribute.dbnode_id; - -- custom_environment_variables -> environment_variables - - UPDATE db_dbattribute AS attribute - SET key = regexp_replace(attribute.key, '^jobresource_params', 'resources') - FROM db_dbnode AS node - WHERE - ( - attribute.key = 'jobresource_params' OR - attribute.key LIKE 'jobresource\_params.%' - ) AND - node.type = 'node.process.calculation.calcjob.CalcJobNode.' AND - node.id = attribute.dbnode_id; - -- jobresource_params -> resources - - UPDATE db_dbattribute AS attribute - SET key = regexp_replace(attribute.key, '^_process_label', 'process_label') - FROM db_dbnode AS node - WHERE - attribute.key = '_process_label' AND - node.type LIKE 'node.process.%' AND - node.id = attribute.dbnode_id; - -- _process_label -> process_label - - UPDATE db_dbattribute AS attribute - SET key = regexp_replace(attribute.key, '^parser', 'parser_name') - FROM db_dbnode AS node - WHERE - attribute.key = 'parser' AND - node.type = 'node.process.calculation.calcjob.CalcJobNode.' AND - node.id = attribute.dbnode_id; - -- parser -> parser_name - """, - reverse_sql=r""" - UPDATE db_dbattribute AS attribute - SET key = regexp_replace(attribute.key, '^environment_variables', 'custom_environment_variables') - FROM db_dbnode AS node - WHERE - ( - attribute.key = 'environment_variables' OR - attribute.key LIKE 'environment\_variables.%' - ) AND - node.type = 'node.process.calculation.calcjob.CalcJobNode.' AND - node.id = attribute.dbnode_id; - -- environment_variables -> custom_environment_variables - - UPDATE db_dbattribute AS attribute - SET key = regexp_replace(attribute.key, '^resources', 'jobresource_params') - FROM db_dbnode AS node - WHERE - ( - attribute.key = 'resources' OR - attribute.key LIKE 'resources.%' - ) AND - node.type = 'node.process.calculation.calcjob.CalcJobNode.' AND - node.id = attribute.dbnode_id; - -- resources -> jobresource_params - - UPDATE db_dbattribute AS attribute - SET key = regexp_replace(attribute.key, '^process_label', '_process_label') - FROM db_dbnode AS node - WHERE - attribute.key = 'process_label' AND - node.type LIKE 'node.process.%' AND - node.id = attribute.dbnode_id; - -- process_label -> _process_label - - UPDATE db_dbattribute AS attribute - SET key = regexp_replace(attribute.key, '^parser_name', 'parser') - FROM db_dbnode AS node - WHERE - attribute.key = 'parser_name' AND - node.type = 'node.process.calculation.calcjob.CalcJobNode.' AND - node.id = attribute.dbnode_id; - -- parser_name -> parser - """ - ), - upgrade_schema_version(REVISION, DOWN_REVISION) - ] diff --git a/aiida/backends/djsite/db/migrations/0024_dblog_update.py b/aiida/backends/djsite/db/migrations/0024_dblog_update.py deleted file mode 100644 index f4b78a2ce6..0000000000 --- a/aiida/backends/djsite/db/migrations/0024_dblog_update.py +++ /dev/null @@ -1,362 +0,0 @@ -# -*- coding: utf-8 -*- -########################################################################### -# Copyright (c), The AiiDA team. All rights reserved. # -# This file is part of the AiiDA code. # -# # -# The code is hosted on GitHub at https://github.com/aiidateam/aiida-core # -# For further information on the license, see the LICENSE.txt file # -# For further information please visit http://www.aiida.net # -########################################################################### -# Generated by Django 1.11.16 on 2018-12-21 10:56 -# pylint: disable=invalid-name -"""Migration for the update of the DbLog table. Addition of uuids""" -import sys - -import click -# Remove when https://github.com/PyCQA/pylint/issues/1931 is fixed -# pylint: disable=no-name-in-module,import-error -from django.db import migrations, models - -from aiida.backends.djsite.db.migrations import upgrade_schema_version -from aiida.backends.general.migrations.utils import dumps_json -from aiida.cmdline.utils import echo -from aiida.common.utils import get_new_uuid -from aiida.manage import configuration - -REVISION = '1.0.24' -DOWN_REVISION = '1.0.23' - -# The values that will be exported for the log records that will be deleted -values_to_export = ['id', 'time', 'loggername', 'levelname', 'objpk', 'objname', 'message', 'metadata'] - -node_prefix = 'node.' -leg_workflow_prefix = 'aiida.workflows.user.' - - -def get_legacy_workflow_log_number(schema_editor): - """ Get the number of the log records that correspond to legacy workflows """ - with schema_editor.connection.cursor() as cursor: - cursor.execute( - """ - SELECT COUNT(*) FROM db_dblog - WHERE - (db_dblog.objname LIKE 'aiida.workflows.user.%') - """ - ) - return cursor.fetchall()[0][0] - - -def get_unknown_entity_log_number(schema_editor): - """ Get the number of the log records that correspond to unknown entities """ - with schema_editor.connection.cursor() as cursor: - cursor.execute( - """ - SELECT COUNT(*) FROM db_dblog - WHERE - (db_dblog.objname NOT LIKE 'node.%') AND - (db_dblog.objname NOT LIKE 'aiida.workflows.user.%') - """ - ) - return cursor.fetchall()[0][0] - - -def get_logs_with_no_nodes_number(schema_editor): - """ Get the number of the log records that don't correspond to a node """ - with schema_editor.connection.cursor() as cursor: - cursor.execute( - """ - SELECT COUNT(*) FROM db_dblog - WHERE - (db_dblog.objname LIKE 'node.%') AND NOT EXISTS - (SELECT 1 FROM db_dbnode WHERE db_dbnode.id = db_dblog.objpk LIMIT 1) - """ - ) - return cursor.fetchall()[0][0] - - -def get_serialized_legacy_workflow_logs(schema_editor): - """ Get the serialized log records that correspond to legacy workflows """ - with schema_editor.connection.cursor() as cursor: - cursor.execute(( - """ - SELECT db_dblog.id, db_dblog.time, db_dblog.loggername, db_dblog.levelname, db_dblog.objpk, - db_dblog.objname, db_dblog.message, db_dblog.metadata FROM db_dblog - WHERE - (db_dblog.objname LIKE 'aiida.workflows.user.%') - """ - )) - keys = ['id', 'time', 'loggername', 'levelname', 'objpk', 'objname', 'message', 'metadata'] - res = [] - for row in cursor.fetchall(): - res.append(dict(list(zip(keys, row)))) - return dumps_json(res) - - -def get_serialized_unknown_entity_logs(schema_editor): - """ Get the serialized log records that correspond to unknown entities """ - with schema_editor.connection.cursor() as cursor: - cursor.execute(( - """ - SELECT db_dblog.id, db_dblog.time, db_dblog.loggername, db_dblog.levelname, db_dblog.objpk, - db_dblog.objname, db_dblog.message, db_dblog.metadata FROM db_dblog - WHERE - (db_dblog.objname NOT LIKE 'node.%') AND - (db_dblog.objname NOT LIKE 'aiida.workflows.user.%') - """ - )) - keys = ['id', 'time', 'loggername', 'levelname', 'objpk', 'objname', 'message', 'metadata'] - res = [] - for row in cursor.fetchall(): - res.append(dict(list(zip(keys, row)))) - return dumps_json(res) - - -def get_serialized_logs_with_no_nodes(schema_editor): - """ Get the serialized log records that don't correspond to a node """ - with schema_editor.connection.cursor() as cursor: - cursor.execute(( - """ - SELECT db_dblog.id, db_dblog.time, db_dblog.loggername, db_dblog.levelname, db_dblog.objpk, - db_dblog.objname, db_dblog.message, db_dblog.metadata FROM db_dblog - WHERE - (db_dblog.objname LIKE 'node.%') AND NOT EXISTS - (SELECT 1 FROM db_dbnode WHERE db_dbnode.id = db_dblog.objpk LIMIT 1) - """ - )) - keys = ['id', 'time', 'loggername', 'levelname', 'objpk', 'objname', 'message', 'metadata'] - res = [] - for row in cursor.fetchall(): - res.append(dict(list(zip(keys, row)))) - return dumps_json(res) - - -def set_new_uuid(apps, _): - """ - Set new UUIDs for all logs - """ - DbLog = apps.get_model('db', 'DbLog') - query_set = DbLog.objects.all() - for log in query_set.iterator(): - log.uuid = get_new_uuid() - log.save(update_fields=['uuid']) - - -def export_and_clean_workflow_logs(apps, schema_editor): - """ - Export the logs records that correspond to legacy workflows and to unknown entities. - """ - from tempfile import NamedTemporaryFile - - DbLog = apps.get_model('db', 'DbLog') - - lwf_number = get_legacy_workflow_log_number(schema_editor) - other_number = get_unknown_entity_log_number(schema_editor) - log_no_node_number = get_logs_with_no_nodes_number(schema_editor) - - # If there are no legacy workflow log records or log records of an unknown entity - if lwf_number == 0 and other_number == 0 and log_no_node_number == 0: - return - - if not configuration.PROFILE.is_test_profile: - echo.echo_warning( - 'We found {} log records that correspond to legacy workflows and {} log records to correspond ' - 'to an unknown entity.'.format(lwf_number, other_number) - ) - echo.echo_warning( - 'These records will be removed from the database and exported to JSON files to the current directory).' - ) - proceed = click.confirm('Would you like to proceed?', default=True) - if not proceed: - sys.exit(1) - - delete_on_close = configuration.PROFILE.is_test_profile - - # Exporting the legacy workflow log records - if lwf_number != 0: - # Get the records and write them to file - with NamedTemporaryFile( - prefix='legagy_wf_logs-', suffix='.log', dir='.', delete=delete_on_close, mode='w+' - ) as handle: - filename = handle.name - handle.write(get_serialized_legacy_workflow_logs(schema_editor)) - - # If delete_on_close is False, we are running for the user and add additional message of file location - if not delete_on_close: - echo.echo(f'Exported legacy workflow logs to {filename}') - - # Now delete the records - DbLog.objects.filter(objname__startswith=leg_workflow_prefix).delete() - with schema_editor.connection.cursor() as cursor: - cursor.execute(( - """ - DELETE FROM db_dblog - WHERE - (db_dblog.objname LIKE 'aiida.workflows.user.%') - """ - )) - - # Exporting unknown log records - if other_number != 0: - # Get the records and write them to file - with NamedTemporaryFile( - prefix='unknown_entity_logs-', suffix='.log', dir='.', delete=delete_on_close, mode='w+' - ) as handle: - filename = handle.name - handle.write(get_serialized_unknown_entity_logs(schema_editor)) - - # If delete_on_close is False, we are running for the user and add additional message of file location - if not delete_on_close: - echo.echo(f'Exported unexpected entity logs to {filename}') - - # Now delete the records - DbLog.objects.exclude(objname__startswith=node_prefix).exclude(objname__startswith=leg_workflow_prefix).delete() - with schema_editor.connection.cursor() as cursor: - cursor.execute(( - """ - DELETE FROM db_dblog WHERE - (db_dblog.objname NOT LIKE 'node.%') AND - (db_dblog.objname NOT LIKE 'aiida.workflows.user.%') - """ - )) - - # Exporting log records that don't correspond to nodes - if log_no_node_number != 0: - # Get the records and write them to file - with NamedTemporaryFile( - prefix='no_node_entity_logs-', suffix='.log', dir='.', delete=delete_on_close, mode='w+' - ) as handle: - filename = handle.name - handle.write(get_serialized_logs_with_no_nodes(schema_editor)) - - # If delete_on_close is False, we are running for the user and add additional message of file location - if not delete_on_close: - echo.echo('Exported entity logs that don\'t correspond to nodes to {}'.format(filename)) - - # Now delete the records - with schema_editor.connection.cursor() as cursor: - cursor.execute(( - """ - DELETE FROM db_dblog WHERE - (db_dblog.objname LIKE 'node.%') AND NOT EXISTS - (SELECT 1 FROM db_dbnode WHERE db_dbnode.id = db_dblog.objpk LIMIT 1) - """ - )) - - -def clean_dblog_metadata(apps, _): - """ - Remove objpk and objname from the DbLog table metadata. - """ - import json - - DbLog = apps.get_model('db', 'DbLog') - query_set = DbLog.objects.all() - for log in query_set.iterator(): - met = json.loads(log.metadata) - if 'objpk' in met: - del met['objpk'] - if 'objname' in met: - del met['objname'] - log.metadata = json.dumps(met) - log.save(update_fields=['metadata']) - - -def enrich_dblog_metadata(apps, _): - """ - Add objpk and objname to the DbLog table metadata. - """ - import json - - DbLog = apps.get_model('db', 'DbLog') - query_set = DbLog.objects.all() - for log in query_set.iterator(): - met = json.loads(log.metadata) - if 'objpk' not in met: - met['objpk'] = log.objpk - if 'objname' not in met: - met['objname'] = log.objname - log.metadata = json.dumps(met) - log.save(update_fields=['metadata']) - - -class Migration(migrations.Migration): - """ - This migration updates the DbLog schema and adds UUID for correct export of the DbLog entries. - More specifically, it adds UUIDS, it exports to files the not needed log entries (that correspond - to legacy workflows and unknown entities), it creates a foreign key to the dbnode table, it - transfers there the objpk data to the new dbnode column (just altering the objpk column and making - it a foreign key when containing data, raised problems) and in the end objpk and objname columns - are removed. - """ - - dependencies = [ - ('db', '0023_calc_job_option_attribute_keys'), - ] - - operations = [ - # Export of the logs of the old workflows to a JSON file, there is no re-import - # for the reverse migrations - migrations.RunPython(export_and_clean_workflow_logs, reverse_code=migrations.RunPython.noop), - - # Removing objname and objpk from the metadata. The reverse migration adds the - # objname and objpk to the metadata - migrations.RunPython(clean_dblog_metadata, reverse_code=enrich_dblog_metadata), - - # The forward migration will not do anything for the objname, the reverse - # migration will populate it with correct values - migrations.RunSQL( - '', - reverse_sql='UPDATE db_dblog SET objname=db_dbnode.type ' - 'FROM db_dbnode WHERE db_dbnode.id = db_dblog.objpk' - ), - - # Removal of the column objname, the reverse migration will add it - migrations.RemoveField(model_name='dblog', name='objname'), - - # Creation of a new column called dbnode which is a foreign key to the dbnode table - # The reverse migration will remove this column - migrations.AddField( - model_name='dblog', - name='dbnode', - field=models.ForeignKey( - on_delete=models.deletion.CASCADE, related_name='dblogs', to='db.DbNode', blank=True, null=True - ), - ), - - # Transfer of the data from the objpk to the node field - # The reverse migration will do the inverse transfer - migrations.RunSQL('UPDATE db_dblog SET dbnode_id=objpk', reverse_sql='UPDATE db_dblog SET objpk=dbnode_id'), - - # Now that all the data have been migrated, make the column not nullable and not blank. - # A log record should always correspond to a node record - migrations.AlterField( - model_name='dblog', - name='dbnode', - field=models.ForeignKey(on_delete=models.deletion.CASCADE, related_name='dblogs', to='db.DbNode'), - ), - - # Since the new column is created correctly, drop the old objpk column - # The reverse migration will add the field - migrations.RemoveField(model_name='dblog', name='objpk'), - - # This is the correct pattern to generate unique fields, see - # https://docs.djangoproject.com/en/1.11/howto/writing-migrations/#migrations-that-add-unique-fields - # The reverse migration will remove it - migrations.AddField( - model_name='dblog', - name='uuid', - field=models.UUIDField(default=get_new_uuid, null=True), - ), - - # Add unique UUIDs to the UUID field. There is no need for a reverse migration for a field - # tha will be deleted - migrations.RunPython(set_new_uuid, reverse_code=migrations.RunPython.noop), - - # Changing the column to unique - migrations.AlterField( - model_name='dblog', - name='uuid', - field=models.UUIDField(default=get_new_uuid, unique=True), - ), - upgrade_schema_version(REVISION, DOWN_REVISION) - ] diff --git a/aiida/backends/djsite/db/migrations/0025_move_data_within_node_module.py b/aiida/backends/djsite/db/migrations/0025_move_data_within_node_module.py deleted file mode 100644 index 3df4f55e10..0000000000 --- a/aiida/backends/djsite/db/migrations/0025_move_data_within_node_module.py +++ /dev/null @@ -1,45 +0,0 @@ -# -*- coding: utf-8 -*- -########################################################################### -# Copyright (c), The AiiDA team. All rights reserved. # -# This file is part of the AiiDA code. # -# # -# The code is hosted on GitHub at https://github.com/aiidateam/aiida-core # -# For further information on the license, see the LICENSE.txt file # -# For further information please visit http://www.aiida.net # -########################################################################### -# pylint: disable=invalid-name -"""Data migration for `Data` nodes after it was moved in the `aiida.orm.node` module changing the type string.""" - -# Remove when https://github.com/PyCQA/pylint/issues/1931 is fixed -# pylint: disable=no-name-in-module,import-error -from django.db import migrations - -from aiida.backends.djsite.db.migrations import upgrade_schema_version - -REVISION = '1.0.25' -DOWN_REVISION = '1.0.24' - - -class Migration(migrations.Migration): - """Data migration for `Data` nodes after it was moved in the `aiida.orm.node` module changing the type string.""" - - dependencies = [ - ('db', '0024_dblog_update'), - ] - - operations = [ - # The type string for `Data` nodes changed from `data.*` to `node.data.*`. - migrations.RunSQL( - sql=r""" - UPDATE db_dbnode - SET type = regexp_replace(type, '^data.', 'node.data.') - WHERE type LIKE 'data.%' - """, - reverse_sql=r""" - UPDATE db_dbnode - SET type = regexp_replace(type, '^node.data.', 'data.') - WHERE type LIKE 'node.data.%' - """ - ), - upgrade_schema_version(REVISION, DOWN_REVISION) - ] diff --git a/aiida/backends/djsite/db/migrations/0026_trajectory_symbols_to_attribute.py b/aiida/backends/djsite/db/migrations/0026_trajectory_symbols_to_attribute.py deleted file mode 100644 index 3d08c4cb88..0000000000 --- a/aiida/backends/djsite/db/migrations/0026_trajectory_symbols_to_attribute.py +++ /dev/null @@ -1,67 +0,0 @@ -# -*- coding: utf-8 -*- -########################################################################### -# Copyright (c), The AiiDA team. All rights reserved. # -# This file is part of the AiiDA code. # -# # -# The code is hosted on GitHub at https://github.com/aiidateam/aiida-core # -# For further information on the license, see the LICENSE.txt file # -# For further information please visit http://www.aiida.net # -########################################################################### -# pylint: disable=invalid-name -"""Data migration for `TrajectoryData` nodes where symbol lists are moved from repository array to attribute. - -This process has to be done in two separate consecutive migrations to prevent data loss in between. -""" - -# Remove when https://github.com/PyCQA/pylint/issues/1931 is fixed -# pylint: disable=no-member,no-name-in-module,import-error -from django.db import migrations - -from aiida.backends.djsite.db.migrations import upgrade_schema_version -from aiida.backends.general.migrations.utils import load_numpy_array_from_repository - -from . import ModelModifierV0025 - -REVISION = '1.0.26' -DOWN_REVISION = '1.0.25' - - -def create_trajectory_symbols_attribute(apps, _): - """Create the symbols attribute from the repository array for all `TrajectoryData` nodes.""" - DbNode = apps.get_model('db', 'DbNode') - DbAttribute = apps.get_model('db', 'DbAttribute') - - modifier = ModelModifierV0025(apps, DbAttribute) - - nodes = DbNode.objects.filter(type='node.data.array.trajectory.TrajectoryData.').values_list('id', 'uuid') - for pk, uuid in nodes: - symbols = load_numpy_array_from_repository(uuid, 'symbols').tolist() - modifier.set_value_for_node(DbNode.objects.get(pk=pk), 'symbols', symbols) - - -def delete_trajectory_symbols_attribute(apps, _): - """Delete the symbols attribute for all `TrajectoryData` nodes.""" - DbNode = apps.get_model('db', 'DbNode') - DbAttribute = apps.get_model('db', 'DbAttribute') - - modifier = ModelModifierV0025(apps, DbAttribute) - - nodes = DbNode.objects.filter(type='node.data.array.trajectory.TrajectoryData.').values_list('id', flat=True) - for pk in nodes: - modifier.del_value_for_node(DbNode.objects.get(pk=pk), 'symbols') - - -class Migration(migrations.Migration): - """Storing symbols in TrajectoryData nodes as attributes, while keeping numpy arrays. - TrajectoryData symbols arrays are deleted in the next migration. - We split the migration into two because every migration is wrapped in an atomic transaction and we want to avoid - to delete the data while it is written in the database""" - - dependencies = [ - ('db', '0025_move_data_within_node_module'), - ] - - operations = [ - migrations.RunPython(create_trajectory_symbols_attribute, reverse_code=delete_trajectory_symbols_attribute), - upgrade_schema_version(REVISION, DOWN_REVISION) - ] diff --git a/aiida/backends/djsite/db/migrations/0027_delete_trajectory_symbols_array.py b/aiida/backends/djsite/db/migrations/0027_delete_trajectory_symbols_array.py deleted file mode 100644 index be1859b41f..0000000000 --- a/aiida/backends/djsite/db/migrations/0027_delete_trajectory_symbols_array.py +++ /dev/null @@ -1,68 +0,0 @@ -# -*- coding: utf-8 -*- -########################################################################### -# Copyright (c), The AiiDA team. All rights reserved. # -# This file is part of the AiiDA code. # -# # -# The code is hosted on GitHub at https://github.com/aiidateam/aiida-core # -# For further information on the license, see the LICENSE.txt file # -# For further information please visit http://www.aiida.net # -########################################################################### -# pylint: disable=invalid-name -"""Data migration for `TrajectoryData` nodes where symbol lists are moved from repository array to attribute. - -This process has to be done in two separate consecutive migrations to prevent data loss in between. -""" - -# Remove when https://github.com/PyCQA/pylint/issues/1931 is fixed -# pylint: disable=no-name-in-module,import-error -from django.db import migrations - -from aiida.backends.djsite.db.migrations import upgrade_schema_version -from aiida.backends.general.migrations import utils - -from . import ModelModifierV0025 - -REVISION = '1.0.27' -DOWN_REVISION = '1.0.26' - - -def delete_trajectory_symbols_array(apps, _): - """Delete the symbols array from all `TrajectoryData` nodes.""" - DbNode = apps.get_model('db', 'DbNode') - DbAttribute = apps.get_model('db', 'DbAttribute') - - modifier = ModelModifierV0025(apps, DbAttribute) - - nodes = DbNode.objects.filter(type='node.data.array.trajectory.TrajectoryData.').values_list('id', 'uuid') - for pk, uuid in nodes: - modifier.del_value_for_node(DbNode.objects.get(pk=pk), 'array|symbols') - utils.delete_numpy_array_from_repository(uuid, 'symbols') - - -def create_trajectory_symbols_array(apps, _): - """Create the symbols array for all `TrajectoryData` nodes.""" - import numpy - - DbNode = apps.get_model('db', 'DbNode') - DbAttribute = apps.get_model('db', 'DbAttribute') - - modifier = ModelModifierV0025(apps, DbAttribute) - - nodes = DbNode.objects.filter(type='node.data.array.trajectory.TrajectoryData.').values_list('id', 'uuid') - for pk, uuid in nodes: - symbols = numpy.array(modifier.get_value_for_node(pk, 'symbols')) - utils.store_numpy_array_in_repository(uuid, 'symbols', symbols) - modifier.set_value_for_node(DbNode.objects.get(pk=pk), 'array|symbols', list(symbols.shape)) - - -class Migration(migrations.Migration): - """Deleting duplicated information stored in TrajectoryData symbols numpy arrays""" - - dependencies = [ - ('db', '0026_trajectory_symbols_to_attribute'), - ] - - operations = [ - migrations.RunPython(delete_trajectory_symbols_array, reverse_code=create_trajectory_symbols_array), - upgrade_schema_version(REVISION, DOWN_REVISION) - ] diff --git a/aiida/backends/djsite/db/migrations/0028_remove_node_prefix.py b/aiida/backends/djsite/db/migrations/0028_remove_node_prefix.py deleted file mode 100644 index e6484ebf72..0000000000 --- a/aiida/backends/djsite/db/migrations/0028_remove_node_prefix.py +++ /dev/null @@ -1,52 +0,0 @@ -# -*- coding: utf-8 -*- -########################################################################### -# Copyright (c), The AiiDA team. All rights reserved. # -# This file is part of the AiiDA code. # -# # -# The code is hosted on GitHub at https://github.com/aiidateam/aiida-core # -# For further information on the license, see the LICENSE.txt file # -# For further information please visit http://www.aiida.net # -########################################################################### -# pylint: disable=invalid-name -"""Final data migration for `Nodes` after `aiida.orm.nodes` reorganization was finalized to remove the `node.` prefix""" - -# Remove when https://github.com/PyCQA/pylint/issues/1931 is fixed -# pylint: disable=no-name-in-module,import-error -from django.db import migrations - -from aiida.backends.djsite.db.migrations import upgrade_schema_version - -REVISION = '1.0.28' -DOWN_REVISION = '1.0.27' - - -class Migration(migrations.Migration): - """Now all node sub classes live in `aiida.orm.nodes` so now the `node.` prefix can be removed.""" - - dependencies = [ - ('db', '0027_delete_trajectory_symbols_array'), - ] - - operations = [ - migrations.RunSQL( - sql=r""" - UPDATE db_dbnode - SET type = regexp_replace(type, '^node.data.', 'data.') - WHERE type LIKE 'node.data.%'; - - UPDATE db_dbnode - SET type = regexp_replace(type, '^node.process.', 'process.') - WHERE type LIKE 'node.process.%'; - """, - reverse_sql=r""" - UPDATE db_dbnode - SET type = regexp_replace(type, '^data.', 'node.data.') - WHERE type LIKE 'data.%'; - - UPDATE db_dbnode - SET type = regexp_replace(type, '^process.', 'node.process.') - WHERE type LIKE 'process.%'; - """ - ), - upgrade_schema_version(REVISION, DOWN_REVISION) - ] diff --git a/aiida/backends/djsite/db/migrations/0029_rename_parameter_data_to_dict.py b/aiida/backends/djsite/db/migrations/0029_rename_parameter_data_to_dict.py deleted file mode 100644 index d797569208..0000000000 --- a/aiida/backends/djsite/db/migrations/0029_rename_parameter_data_to_dict.py +++ /dev/null @@ -1,38 +0,0 @@ -# -*- coding: utf-8 -*- -########################################################################### -# Copyright (c), The AiiDA team. All rights reserved. # -# This file is part of the AiiDA code. # -# # -# The code is hosted on GitHub at https://github.com/aiidateam/aiida-core # -# For further information on the license, see the LICENSE.txt file # -# For further information please visit http://www.aiida.net # -########################################################################### -# pylint: disable=invalid-name -"""Data migration for after `ParameterData` was renamed to `Dict`.""" - -# Remove when https://github.com/PyCQA/pylint/issues/1931 is fixed -# pylint: disable=no-name-in-module,import-error -from django.db import migrations - -from aiida.backends.djsite.db.migrations import upgrade_schema_version - -REVISION = '1.0.29' -DOWN_REVISION = '1.0.28' - - -class Migration(migrations.Migration): - """Data migration for after `ParameterData` was renamed to `Dict`.""" - - dependencies = [ - ('db', '0028_remove_node_prefix'), - ] - - operations = [ - migrations.RunSQL( - sql=r"""UPDATE db_dbnode SET type = 'data.dict.Dict.' WHERE type = 'data.parameter.ParameterData.';""", - reverse_sql=r""" - UPDATE db_dbnode SET type = 'data.parameter.ParameterData.' WHERE type = 'data.dict.Dict.'; - """ - ), - upgrade_schema_version(REVISION, DOWN_REVISION) - ] diff --git a/aiida/backends/djsite/db/migrations/0030_dbnode_type_to_dbnode_node_type.py b/aiida/backends/djsite/db/migrations/0030_dbnode_type_to_dbnode_node_type.py deleted file mode 100644 index 7b5c76e9b3..0000000000 --- a/aiida/backends/djsite/db/migrations/0030_dbnode_type_to_dbnode_node_type.py +++ /dev/null @@ -1,37 +0,0 @@ -# -*- coding: utf-8 -*- -########################################################################### -# Copyright (c), The AiiDA team. All rights reserved. # -# This file is part of the AiiDA code. # -# # -# The code is hosted on GitHub at https://github.com/aiidateam/aiida-core # -# For further information on the license, see the LICENSE.txt file # -# For further information please visit http://www.aiida.net # -########################################################################### -# pylint: disable=invalid-name -"""Renaming `DbNode.type` to `DbNode.node_type`""" - -# Remove when https://github.com/PyCQA/pylint/issues/1931 is fixed -# pylint: disable=no-name-in-module,import-error -from django.db import migrations - -from aiida.backends.djsite.db.migrations import upgrade_schema_version - -REVISION = '1.0.30' -DOWN_REVISION = '1.0.29' - - -class Migration(migrations.Migration): - """Renaming `DbNode.type` to `DbNode.node_type`""" - - dependencies = [ - ('db', '0029_rename_parameter_data_to_dict'), - ] - - operations = [ - migrations.RenameField( - model_name='dbnode', - old_name='type', - new_name='node_type', - ), - upgrade_schema_version(REVISION, DOWN_REVISION) - ] diff --git a/aiida/backends/djsite/db/migrations/0031_remove_dbcomputer_enabled.py b/aiida/backends/djsite/db/migrations/0031_remove_dbcomputer_enabled.py deleted file mode 100644 index 597d3b4e09..0000000000 --- a/aiida/backends/djsite/db/migrations/0031_remove_dbcomputer_enabled.py +++ /dev/null @@ -1,36 +0,0 @@ -# -*- coding: utf-8 -*- -########################################################################### -# Copyright (c), The AiiDA team. All rights reserved. # -# This file is part of the AiiDA code. # -# # -# The code is hosted on GitHub at https://github.com/aiidateam/aiida-core # -# For further information on the license, see the LICENSE.txt file # -# For further information please visit http://www.aiida.net # -########################################################################### -# pylint: disable=invalid-name -"""Remove `DbComputer.enabled`""" - -# Remove when https://github.com/PyCQA/pylint/issues/1931 is fixed -# pylint: disable=no-name-in-module,import-error -from django.db import migrations - -from aiida.backends.djsite.db.migrations import upgrade_schema_version - -REVISION = '1.0.31' -DOWN_REVISION = '1.0.30' - - -class Migration(migrations.Migration): - """Remove `DbComputer.enabled`""" - - dependencies = [ - ('db', '0030_dbnode_type_to_dbnode_node_type'), - ] - - operations = [ - migrations.RemoveField( - model_name='dbcomputer', - name='enabled', - ), - upgrade_schema_version(REVISION, DOWN_REVISION) - ] diff --git a/aiida/backends/djsite/db/migrations/0032_remove_legacy_workflows.py b/aiida/backends/djsite/db/migrations/0032_remove_legacy_workflows.py deleted file mode 100644 index 4ee6cd2347..0000000000 --- a/aiida/backends/djsite/db/migrations/0032_remove_legacy_workflows.py +++ /dev/null @@ -1,124 +0,0 @@ -# -*- coding: utf-8 -*- -########################################################################### -# Copyright (c), The AiiDA team. All rights reserved. # -# This file is part of the AiiDA code. # -# # -# The code is hosted on GitHub at https://github.com/aiidateam/aiida-core # -# For further information on the license, see the LICENSE.txt file # -# For further information please visit http://www.aiida.net # -########################################################################### -# pylint: disable=invalid-name -"""Remove legacy workflow.""" - -import sys - -import click -# Remove when https://github.com/PyCQA/pylint/issues/1931 is fixed -# pylint: disable=no-name-in-module,import-error -from django.core import serializers -from django.db import migrations - -from aiida.backends.djsite.db.migrations import upgrade_schema_version -from aiida.cmdline.utils import echo -from aiida.common import json -from aiida.manage import configuration - -REVISION = '1.0.32' -DOWN_REVISION = '1.0.31' - - -def export_workflow_data(apps, _): - """Export existing legacy workflow data to a JSON file.""" - from tempfile import NamedTemporaryFile - - DbWorkflow = apps.get_model('db', 'DbWorkflow') - DbWorkflowData = apps.get_model('db', 'DbWorkflowData') - DbWorkflowStep = apps.get_model('db', 'DbWorkflowStep') - - count_workflow = DbWorkflow.objects.count() - count_workflow_data = DbWorkflowData.objects.count() - count_workflow_step = DbWorkflowStep.objects.count() - - # Nothing to do if all tables are empty - if count_workflow == 0 and count_workflow_data == 0 and count_workflow_step == 0: - return - - if not configuration.PROFILE.is_test_profile: - echo.echo('\n') - echo.echo_warning('The legacy workflow tables contain data but will have to be dropped to continue.') - echo.echo_warning('If you continue, the content will be dumped to a JSON file, before dropping the tables.') - echo.echo_warning('This serves merely as a reference and cannot be used to restore the database.') - echo.echo_warning('If you want a proper backup, make sure to dump the full database and backup your repository') - if not click.confirm('Are you sure you want to continue', default=True): - sys.exit(1) - - delete_on_close = configuration.PROFILE.is_test_profile - - data = { - 'workflow': serializers.serialize('json', DbWorkflow.objects.all()), - 'workflow_data': serializers.serialize('json', DbWorkflowData.objects.all()), - 'workflow_step': serializers.serialize('json', DbWorkflowStep.objects.all()), - } - - with NamedTemporaryFile( - prefix='legacy-workflows', suffix='.json', dir='.', delete=delete_on_close, mode='wb' - ) as handle: - filename = handle.name - json.dump(data, handle) - - # If delete_on_close is False, we are running for the user and add additional message of file location - if not delete_on_close: - echo.echo_report(f'Exported workflow data to {filename}') - - -class Migration(migrations.Migration): - """Remove legacy workflow.""" - - dependencies = [ - ('db', '0031_remove_dbcomputer_enabled'), - ] - - operations = [ - # Export existing data to a JSON file - migrations.RunPython(export_workflow_data, reverse_code=migrations.RunPython.noop), - migrations.RemoveField( - model_name='dbworkflow', - name='user', - ), - migrations.AlterUniqueTogether( - name='dbworkflowdata', - unique_together=set([]), - ), - migrations.RemoveField( - model_name='dbworkflowdata', - name='aiida_obj', - ), - migrations.RemoveField( - model_name='dbworkflowdata', - name='parent', - ), - migrations.AlterUniqueTogether( - name='dbworkflowstep', - unique_together=set([]), - ), - migrations.RemoveField( - model_name='dbworkflowstep', - name='calculations', - ), - migrations.RemoveField( - model_name='dbworkflowstep', - name='parent', - ), - migrations.RemoveField( - model_name='dbworkflowstep', - name='sub_workflows', - ), - migrations.RemoveField( - model_name='dbworkflowstep', - name='user', - ), - migrations.DeleteModel(name='DbWorkflow',), - migrations.DeleteModel(name='DbWorkflowData',), - migrations.DeleteModel(name='DbWorkflowStep',), - upgrade_schema_version(REVISION, DOWN_REVISION) - ] diff --git a/aiida/backends/djsite/db/migrations/0033_replace_text_field_with_json_field.py b/aiida/backends/djsite/db/migrations/0033_replace_text_field_with_json_field.py deleted file mode 100644 index 63dab67797..0000000000 --- a/aiida/backends/djsite/db/migrations/0033_replace_text_field_with_json_field.py +++ /dev/null @@ -1,59 +0,0 @@ -# -*- coding: utf-8 -*- -########################################################################### -# Copyright (c), The AiiDA team. All rights reserved. # -# This file is part of the AiiDA code. # -# # -# The code is hosted on GitHub at https://github.com/aiidateam/aiida-core # -# For further information on the license, see the LICENSE.txt file # -# For further information please visit http://www.aiida.net # -########################################################################### -# pylint: disable=invalid-name -"""Replace use of text fields to store JSON data with builtin JSONField.""" - -# Remove when https://github.com/PyCQA/pylint/issues/1931 is fixed -# pylint: disable=no-name-in-module,import-error,no-member -import django.contrib.postgres.fields.jsonb -from django.db import migrations - -from aiida.backends.djsite.db.migrations import upgrade_schema_version -from aiida.common.json import JSONEncoder - -REVISION = '1.0.33' -DOWN_REVISION = '1.0.32' - - -class Migration(migrations.Migration): - """Replace use of text fields to store JSON data with builtin JSONField.""" - - dependencies = [ - ('db', '0032_remove_legacy_workflows'), - ] - - operations = [ - migrations.AlterField( - model_name='dbauthinfo', - name='auth_params', - field=django.contrib.postgres.fields.jsonb.JSONField(default=dict, encoder=JSONEncoder), - ), - migrations.AlterField( - model_name='dbauthinfo', - name='metadata', - field=django.contrib.postgres.fields.jsonb.JSONField(default=dict, encoder=JSONEncoder), - ), - migrations.AlterField( - model_name='dbcomputer', - name='metadata', - field=django.contrib.postgres.fields.jsonb.JSONField(default=dict, encoder=JSONEncoder), - ), - migrations.AlterField( - model_name='dbcomputer', - name='transport_params', - field=django.contrib.postgres.fields.jsonb.JSONField(default=dict, encoder=JSONEncoder), - ), - migrations.AlterField( - model_name='dblog', - name='metadata', - field=django.contrib.postgres.fields.jsonb.JSONField(default=dict, encoder=JSONEncoder), - ), - upgrade_schema_version(REVISION, DOWN_REVISION) - ] diff --git a/aiida/backends/djsite/db/migrations/0034_drop_node_columns_nodeversion_public.py b/aiida/backends/djsite/db/migrations/0034_drop_node_columns_nodeversion_public.py deleted file mode 100644 index 1edfb67a3d..0000000000 --- a/aiida/backends/djsite/db/migrations/0034_drop_node_columns_nodeversion_public.py +++ /dev/null @@ -1,40 +0,0 @@ -# -*- coding: utf-8 -*- -########################################################################### -# Copyright (c), The AiiDA team. All rights reserved. # -# This file is part of the AiiDA code. # -# # -# The code is hosted on GitHub at https://github.com/aiidateam/aiida-core # -# For further information on the license, see the LICENSE.txt file # -# For further information please visit http://www.aiida.net # -########################################################################### -# pylint: disable=invalid-name -"""Drop the columns `nodeversion` and `public` from the `DbNode` model.""" - -# Remove when https://github.com/PyCQA/pylint/issues/1931 is fixed -# pylint: disable=no-name-in-module,import-error,no-member -from django.db import migrations - -from aiida.backends.djsite.db.migrations import upgrade_schema_version - -REVISION = '1.0.34' -DOWN_REVISION = '1.0.33' - - -class Migration(migrations.Migration): - """Drop the columns `nodeversion` and `public` from the `DbNode` model.""" - - dependencies = [ - ('db', '0033_replace_text_field_with_json_field'), - ] - - operations = [ - migrations.RemoveField( - model_name='dbnode', - name='nodeversion', - ), - migrations.RemoveField( - model_name='dbnode', - name='public', - ), - upgrade_schema_version(REVISION, DOWN_REVISION) - ] diff --git a/aiida/backends/djsite/db/migrations/0035_simplify_user_model.py b/aiida/backends/djsite/db/migrations/0035_simplify_user_model.py deleted file mode 100644 index 0cb38d2fef..0000000000 --- a/aiida/backends/djsite/db/migrations/0035_simplify_user_model.py +++ /dev/null @@ -1,74 +0,0 @@ -# -*- coding: utf-8 -*- -########################################################################### -# Copyright (c), The AiiDA team. All rights reserved. # -# This file is part of the AiiDA code. # -# # -# The code is hosted on GitHub at https://github.com/aiidateam/aiida-core # -# For further information on the license, see the LICENSE.txt file # -# For further information please visit http://www.aiida.net # -########################################################################### -# pylint: disable=invalid-name -"""Simplify the `DbUser` model.""" - -# Remove when https://github.com/PyCQA/pylint/issues/1931 is fixed -# pylint: disable=no-name-in-module,import-error,no-member -from django.db import migrations, models - -from aiida.backends.djsite.db.migrations import upgrade_schema_version - -REVISION = '1.0.35' -DOWN_REVISION = '1.0.34' - - -class Migration(migrations.Migration): - """Simplify the `DbUser` model by dropping unused columns.""" - - dependencies = [ - ('db', '0034_drop_node_columns_nodeversion_public'), - ] - - operations = [ - migrations.AlterField( - model_name='dbuser', - name='password', - field=models.CharField(max_length=128, default='pass', verbose_name='password'), - ), - migrations.RemoveField( - model_name='dbuser', - name='password', - ), - migrations.RemoveField( - model_name='dbuser', - name='date_joined', - ), - migrations.RemoveField( - model_name='dbuser', - name='groups', - ), - migrations.RemoveField( - model_name='dbuser', - name='is_active', - ), - migrations.RemoveField( - model_name='dbuser', - name='is_staff', - ), - migrations.AlterField( - model_name='dbuser', - name='is_superuser', - field=models.BooleanField(default=False, blank=True), - ), - migrations.RemoveField( - model_name='dbuser', - name='is_superuser', - ), - migrations.RemoveField( - model_name='dbuser', - name='last_login', - ), - migrations.RemoveField( - model_name='dbuser', - name='user_permissions', - ), - upgrade_schema_version(REVISION, DOWN_REVISION) - ] diff --git a/aiida/backends/djsite/db/migrations/0036_drop_computer_transport_params.py b/aiida/backends/djsite/db/migrations/0036_drop_computer_transport_params.py deleted file mode 100644 index cad2aa3081..0000000000 --- a/aiida/backends/djsite/db/migrations/0036_drop_computer_transport_params.py +++ /dev/null @@ -1,36 +0,0 @@ -# -*- coding: utf-8 -*- -########################################################################### -# Copyright (c), The AiiDA team. All rights reserved. # -# This file is part of the AiiDA code. # -# # -# The code is hosted on GitHub at https://github.com/aiidateam/aiida-core # -# For further information on the license, see the LICENSE.txt file # -# For further information please visit http://www.aiida.net # -########################################################################### -# pylint: disable=invalid-name -"""Drop the `transport_params` from the `Computer` database model.""" - -# Remove when https://github.com/PyCQA/pylint/issues/1931 is fixed -# pylint: disable=no-name-in-module,import-error,no-member -from django.db import migrations - -from aiida.backends.djsite.db.migrations import upgrade_schema_version - -REVISION = '1.0.36' -DOWN_REVISION = '1.0.35' - - -class Migration(migrations.Migration): - """Drop the `transport_params` from the `Computer` database model.""" - - dependencies = [ - ('db', '0035_simplify_user_model'), - ] - - operations = [ - migrations.RemoveField( - model_name='dbcomputer', - name='transport_params', - ), - upgrade_schema_version(REVISION, DOWN_REVISION) - ] diff --git a/aiida/backends/djsite/db/migrations/0037_attributes_extras_settings_json.py b/aiida/backends/djsite/db/migrations/0037_attributes_extras_settings_json.py deleted file mode 100644 index 51bf30c1cb..0000000000 --- a/aiida/backends/djsite/db/migrations/0037_attributes_extras_settings_json.py +++ /dev/null @@ -1,280 +0,0 @@ -# -*- coding: utf-8 -*- -########################################################################### -# Copyright (c), The AiiDA team. All rights reserved. # -# This file is part of the AiiDA code. # -# # -# The code is hosted on GitHub at https://github.com/aiidateam/aiida-core # -# For further information on the license, see the LICENSE.txt file # -# For further information please visit http://www.aiida.net # -########################################################################### -# pylint: disable=invalid-name,import-error,no-name-in-module,no-member -"""Adding JSONB field for Node.attributes and Node.Extras""" - -import math - -import click -import django.contrib.postgres.fields.jsonb -from django.db import migrations, models, transaction - -from aiida.backends.djsite.db.migrations import upgrade_schema_version -from aiida.cmdline.utils import echo -from aiida.common.json import JSONEncoder -from aiida.common.timezone import datetime_to_isoformat - -REVISION = '1.0.37' -DOWN_REVISION = '1.0.36' - -# Nodes are processes in groups of the following size -group_size = 1000 - - -def lazy_bulk_fetch(max_obj, max_count, fetch_func, start=0): - counter = start - while counter < max_count: - yield fetch_func()[counter:counter + max_obj] - counter += max_obj - - -def transition_attributes_extras(apps, _): - """ Migrate the DbAttribute & the DbExtras tables into the attributes and extras columns of DbNode. """ - db_node_model = apps.get_model('db', 'DbNode') - - with transaction.atomic(): - total_node_no = db_node_model.objects.count() - - if total_node_no == 0: - return - - with click.progressbar(label='Updating attributes and extras', length=total_node_no, show_pos=True) as pr_bar: - fetcher = lazy_bulk_fetch(group_size, total_node_no, db_node_model.objects.order_by('id').all) - error = False - - for batch in fetcher: - for curr_dbnode in batch: - - # Migrating attributes - dbattrs = list(curr_dbnode.dbattributes.all()) - attrs, err_ = attributes_to_dict(sorted(dbattrs, key=lambda a: a.key)) - error |= err_ - curr_dbnode.attributes = attrs - - # Migrating extras - dbextr = list(curr_dbnode.dbextras.all()) - extr, err_ = attributes_to_dict(sorted(dbextr, key=lambda a: a.key)) - error |= err_ - curr_dbnode.extras = extr - - # Saving the result - curr_dbnode.save() - pr_bar.update(1) - - if error: - raise Exception('There has been some errors during the migration') - - -def transition_settings(apps, _): - """ Migrate the DbSetting EAV val into the JSONB val column of the same table. """ - db_setting_model = apps.get_model('db', 'DbSetting') - - with transaction.atomic(): - total_settings_no = db_setting_model.objects.count() - - if total_settings_no == 0: - return - - with click.progressbar(label='Updating settings', length=total_settings_no, show_pos=True) as pr_bar: - fetcher = lazy_bulk_fetch(group_size, total_settings_no, db_setting_model.objects.order_by('id').all) - error = False - - for batch in fetcher: - for curr_dbsetting in batch: - - # Migrating dbsetting.val - dt = curr_dbsetting.datatype - val = None - if dt == 'txt': - val = curr_dbsetting.tval - elif dt == 'float': - val = curr_dbsetting.fval - if math.isnan(val) or math.isinf(val): - val = str(val) - elif dt == 'int': - val = curr_dbsetting.ival - elif dt == 'bool': - val = curr_dbsetting.bval - elif dt == 'date': - val = datetime_to_isoformat(curr_dbsetting.dval) - - curr_dbsetting.val = val - - # Saving the result - curr_dbsetting.save() - pr_bar.update(1) - - if error: - raise Exception('There has been some errors during the migration') - - -def attributes_to_dict(attr_list): - """ - Transform the attributes of a node into a dictionary. It assumes the key - are ordered alphabetically, and that they all belong to the same node. - """ - d = {} - - error = False - for a in attr_list: - try: - tmp_d = select_from_key(a.key, d) - except ValueError: - echo.echo_error(f"Couldn't transfer attribute {a.id} with key {a.key} for dbnode {a.dbnode_id}") - error = True - continue - key = a.key.split('.')[-1] - - if isinstance(tmp_d, (list, tuple)): - key = int(key) - - dt = a.datatype - - if dt == 'dict': - tmp_d[key] = {} - elif dt == 'list': - tmp_d[key] = [None] * a.ival - else: - val = None - if dt == 'txt': - val = a.tval - elif dt == 'float': - val = a.fval - if math.isnan(val) or math.isinf(val): - val = str(val) - elif dt == 'int': - val = a.ival - elif dt == 'bool': - val = a.bval - elif dt == 'date': - val = datetime_to_isoformat(a.dval) - - tmp_d[key] = val - - return d, error - - -def select_from_key(key, d): - """ - Return element of the dict to do the insertion on. If it is foo.1.bar, it - will return d["foo"][1]. If it is only foo, it will return d directly. - """ - path = key.split('.')[:-1] - - tmp_d = d - for p in path: - if isinstance(tmp_d, (list, tuple)): - tmp_d = tmp_d[int(p)] - else: - tmp_d = tmp_d[p] - - return tmp_d - - -class Migration(migrations.Migration): - """ - This migration changes Django backend to support the JSONB fields. - It is a schema migration that removes the DbAttribute and DbExtra - tables and their reference to the DbNode tables and adds the - corresponding JSONB columns to the DbNode table. - It is also a data migration that transforms and adds the data of - the DbAttribute and DbExtra tables to the JSONB columns to the - DbNode table. - """ - - dependencies = [ - ('db', '0036_drop_computer_transport_params'), - ] - - operations = [ - # ############################################ - # Migration of the Attribute and Extras tables - # ############################################ - - # Create the DbNode.attributes JSONB and DbNode.extras JSONB fields - migrations.AddField( - model_name='dbnode', - name='attributes', - field=django.contrib.postgres.fields.jsonb.JSONField(default=dict, null=True, encoder=JSONEncoder), - ), - migrations.AddField( - model_name='dbnode', - name='extras', - field=django.contrib.postgres.fields.jsonb.JSONField(default=dict, null=True, encoder=JSONEncoder), - ), - # Migrate the data from the DbAttribute table to the JSONB field - migrations.RunPython(transition_attributes_extras, reverse_code=migrations.RunPython.noop), - migrations.AlterUniqueTogether( - name='dbattribute', - unique_together=set([]), - ), - # Delete the DbAttribute table - migrations.DeleteModel(name='DbAttribute',), - migrations.AlterUniqueTogether( - name='dbextra', - unique_together=set([]), - ), - # Delete the DbExtra table - migrations.DeleteModel(name='DbExtra',), - - # ############################### - # Migration of the Settings table - - # ############################### - # Create the DbSetting.val JSONB field - migrations.AddField( - model_name='dbsetting', - name='val', - field=django.contrib.postgres.fields.jsonb.JSONField(default=None, null=True, encoder=JSONEncoder), - ), - # Migrate the data from the DbSetting EAV to the JSONB val field - migrations.RunPython(transition_settings, reverse_code=migrations.RunPython.noop), - - # Delete the tval, fval, ival, bval, dval - migrations.RemoveField( - model_name='dbsetting', - name='tval', - ), - migrations.RemoveField( - model_name='dbsetting', - name='fval', - ), - migrations.RemoveField( - model_name='dbsetting', - name='ival', - ), - migrations.RemoveField( - model_name='dbsetting', - name='bval', - ), - migrations.RemoveField( - model_name='dbsetting', - name='dval', - ), - migrations.RemoveField( - model_name='dbsetting', - name='datatype', - ), - migrations.AlterField( - model_name='dbsetting', - name='key', - field=models.TextField(), - ), - migrations.AlterUniqueTogether( - name='dbsetting', - unique_together=set([]), - ), - migrations.AlterField( - model_name='dbsetting', - name='key', - field=models.CharField(max_length=1024, db_index=True, unique=True), - ), - upgrade_schema_version(REVISION, DOWN_REVISION), - ] diff --git a/aiida/backends/djsite/db/migrations/0038_data_migration_legacy_job_calculations.py b/aiida/backends/djsite/db/migrations/0038_data_migration_legacy_job_calculations.py deleted file mode 100644 index bc515fb7f1..0000000000 --- a/aiida/backends/djsite/db/migrations/0038_data_migration_legacy_job_calculations.py +++ /dev/null @@ -1,107 +0,0 @@ -# -*- coding: utf-8 -*- -########################################################################### -# Copyright (c), The AiiDA team. All rights reserved. # -# This file is part of the AiiDA code. # -# # -# The code is hosted on GitHub at https://github.com/aiidateam/aiida-core # -# For further information on the license, see the LICENSE.txt file # -# For further information please visit http://www.aiida.net # -########################################################################### -# pylint: disable=invalid-name -"""Data migration for legacy `JobCalculations`. - -These old nodes have already been migrated to the correct `CalcJobNode` type in a previous migration, but they can -still contain a `state` attribute with a deprecated `JobCalcState` value and they are missing a value for the -`process_state`, `process_status`, `process_label` and `exit_status`. The `process_label` is impossible to infer -consistently in SQL so it will be omitted. The other will be mapped from the `state` attribute as follows: - -.. code-block:: text - - Old state | Process state | Exit status | Process status - ---------------------|----------------|-------------|---------------------------------------------------------- - `NEW` | `killed` | `None` | Legacy `JobCalculation` with state `NEW` - `TOSUBMIT` | `killed` | `None` | Legacy `JobCalculation` with state `TOSUBMIT` - `SUBMITTING` | `killed` | `None` | Legacy `JobCalculation` with state `SUBMITTING` - `WITHSCHEDULER` | `killed` | `None` | Legacy `JobCalculation` with state `WITHSCHEDULER` - `COMPUTED` | `killed` | `None` | Legacy `JobCalculation` with state `COMPUTED` - `RETRIEVING` | `killed` | `None` | Legacy `JobCalculation` with state `RETRIEVING` - `PARSING` | `killed` | `None` | Legacy `JobCalculation` with state `PARSING` - `SUBMISSIONFAILED` | `excepted` | `None` | Legacy `JobCalculation` with state `SUBMISSIONFAILED` - `RETRIEVALFAILED` | `excepted` | `None` | Legacy `JobCalculation` with state `RETRIEVALFAILED` - `PARSINGFAILED` | `excepted` | `None` | Legacy `JobCalculation` with state `PARSINGFAILED` - `FAILED` | `finished` | 2 | - - `FINISHED` | `finished` | 0 | - - `IMPORTED` | - | - | - - -Note the `IMPORTED` state was never actually stored in the `state` attribute, so we do not have to consider it. -The old `state` attribute has to be removed after the data is migrated, because its value is no longer valid or useful. - -Note: in addition to the three attributes mentioned in the table, all matched nodes will get `Legacy JobCalculation` as -their `process_label` which is one of the default columns of `verdi process list`. -""" - -# Remove when https://github.com/PyCQA/pylint/issues/1931 is fixed -# pylint: disable=no-name-in-module,import-error -from django.db import migrations - -from aiida.backends.djsite.db.migrations import upgrade_schema_version - -REVISION = '1.0.38' -DOWN_REVISION = '1.0.37' - - -class Migration(migrations.Migration): - """Data migration for legacy `JobCalculations`.""" - - dependencies = [ - ('db', '0037_attributes_extras_settings_json'), - ] - - # Note that the condition on matching target nodes is done only on the `node_type` amd the `state` attribute value. - # New `CalcJobs` will have the same node type and while their active can have a `state` attribute with a value - # of the enum `CalcJobState`, some of which match the deprecated `JobCalcState`, however, the new ones are stored - # in lower case, so we do not run the risk of matching them by accident. - operations = [ - migrations.RunSQL( - sql=r""" - UPDATE db_dbnode - SET attributes = attributes - 'state' || '{"process_state": "killed", "process_status": "Legacy `JobCalculation` with state `NEW`", "process_label": "Legacy JobCalculation"}' - WHERE node_type = 'process.calculation.calcjob.CalcJobNode.' AND attributes @> '{"state": "NEW"}'; - UPDATE db_dbnode - SET attributes = attributes - 'state' || '{"process_state": "killed", "process_status": "Legacy `JobCalculation` with state `TOSUBMIT`", "process_label": "Legacy JobCalculation"}' - WHERE node_type = 'process.calculation.calcjob.CalcJobNode.' AND attributes @> '{"state": "TOSUBMIT"}'; - UPDATE db_dbnode - SET attributes = attributes - 'state' || '{"process_state": "killed", "process_status": "Legacy `JobCalculation` with state `SUBMITTING`", "process_label": "Legacy JobCalculation"}' - WHERE node_type = 'process.calculation.calcjob.CalcJobNode.' AND attributes @> '{"state": "SUBMITTING"}'; - UPDATE db_dbnode - SET attributes = attributes - 'state' || '{"process_state": "killed", "process_status": "Legacy `JobCalculation` with state `WITHSCHEDULER`", "process_label": "Legacy JobCalculation"}' - WHERE node_type = 'process.calculation.calcjob.CalcJobNode.' AND attributes @> '{"state": "WITHSCHEDULER"}'; - UPDATE db_dbnode - SET attributes = attributes - 'state' || '{"process_state": "killed", "process_status": "Legacy `JobCalculation` with state `COMPUTED`", "process_label": "Legacy JobCalculation"}' - WHERE node_type = 'process.calculation.calcjob.CalcJobNode.' AND attributes @> '{"state": "COMPUTED"}'; - UPDATE db_dbnode - SET attributes = attributes - 'state' || '{"process_state": "killed", "process_status": "Legacy `JobCalculation` with state `RETRIEVING`", "process_label": "Legacy JobCalculation"}' - WHERE node_type = 'process.calculation.calcjob.CalcJobNode.' AND attributes @> '{"state": "RETRIEVING"}'; - UPDATE db_dbnode - SET attributes = attributes - 'state' || '{"process_state": "killed", "process_status": "Legacy `JobCalculation` with state `PARSING`", "process_label": "Legacy JobCalculation"}' - WHERE node_type = 'process.calculation.calcjob.CalcJobNode.' AND attributes @> '{"state": "PARSING"}'; - UPDATE db_dbnode - SET attributes = attributes - 'state' || '{"process_state": "excepted", "process_status": "Legacy `JobCalculation` with state `SUBMISSIONFAILED`", "process_label": "Legacy JobCalculation"}' - WHERE node_type = 'process.calculation.calcjob.CalcJobNode.' AND attributes @> '{"state": "SUBMISSIONFAILED"}'; - UPDATE db_dbnode - SET attributes = attributes - 'state' || '{"process_state": "excepted", "process_status": "Legacy `JobCalculation` with state `RETRIEVALFAILED`", "process_label": "Legacy JobCalculation"}' - WHERE node_type = 'process.calculation.calcjob.CalcJobNode.' AND attributes @> '{"state": "RETRIEVALFAILED"}'; - UPDATE db_dbnode - SET attributes = attributes - 'state' || '{"process_state": "excepted", "process_status": "Legacy `JobCalculation` with state `PARSINGFAILED`", "process_label": "Legacy JobCalculation"}' - WHERE node_type = 'process.calculation.calcjob.CalcJobNode.' AND attributes @> '{"state": "PARSINGFAILED"}'; - UPDATE db_dbnode - SET attributes = attributes - 'state' || '{"process_state": "finished", "exit_status": 2, "process_label": "Legacy JobCalculation"}' - WHERE node_type = 'process.calculation.calcjob.CalcJobNode.' AND attributes @> '{"state": "FAILED"}'; - UPDATE db_dbnode - SET attributes = attributes - 'state' || '{"process_state": "finished", "exit_status": 0, "process_label": "Legacy JobCalculation"}' - WHERE node_type = 'process.calculation.calcjob.CalcJobNode.' AND attributes @> '{"state": "FINISHED"}'; - """, - reverse_sql='' - ), - upgrade_schema_version(REVISION, DOWN_REVISION) - ] diff --git a/aiida/backends/djsite/db/migrations/0039_reset_hash.py b/aiida/backends/djsite/db/migrations/0039_reset_hash.py deleted file mode 100644 index e5c1276c77..0000000000 --- a/aiida/backends/djsite/db/migrations/0039_reset_hash.py +++ /dev/null @@ -1,49 +0,0 @@ -# -*- coding: utf-8 -*- -########################################################################### -# Copyright (c), The AiiDA team. All rights reserved. # -# This file is part of the AiiDA code. # -# # -# The code is hosted on GitHub at https://github.com/aiidateam/aiida-core # -# For further information on the license, see the LICENSE.txt file # -# For further information please visit http://www.aiida.net # -########################################################################### -# pylint: disable=invalid-name -""" -Invalidating node hash - User should rehash nodes for caching -""" - -# Remove when https://github.com/PyCQA/pylint/issues/1931 is fixed -# pylint: disable=no-name-in-module,import-error -from django.db import migrations - -from aiida.backends.djsite.db.migrations import upgrade_schema_version -from aiida.cmdline.utils import echo - -REVISION = '1.0.39' -DOWN_REVISION = '1.0.38' - -# Currently valid hash key -_HASH_EXTRA_KEY = '_aiida_hash' - - -def notify_user(apps, schema_editor): # pylint: disable=unused-argument - DbNode = apps.get_model('db', 'DbNode') - if DbNode.objects.count(): - echo.echo_warning('Invalidating the hashes of all nodes. Please run "verdi rehash".', bold=True) - - -class Migration(migrations.Migration): - """Invalidating node hash - User should rehash nodes for caching""" - - dependencies = [ - ('db', '0038_data_migration_legacy_job_calculations'), - ] - - operations = [ - migrations.RunPython(notify_user, reverse_code=notify_user), - migrations.RunSQL( - f"UPDATE db_dbnode SET extras = extras #- '{{{_HASH_EXTRA_KEY}}}'::text[];", - reverse_sql=f"UPDATE db_dbnode SET extras = extras #- '{{{_HASH_EXTRA_KEY}}}'::text[];" - ), - upgrade_schema_version(REVISION, DOWN_REVISION) - ] diff --git a/aiida/backends/djsite/db/migrations/0040_data_migration_legacy_process_attributes.py b/aiida/backends/djsite/db/migrations/0040_data_migration_legacy_process_attributes.py deleted file mode 100644 index 08d0f8ef84..0000000000 --- a/aiida/backends/djsite/db/migrations/0040_data_migration_legacy_process_attributes.py +++ /dev/null @@ -1,86 +0,0 @@ -# -*- coding: utf-8 -*- -########################################################################### -# Copyright (c), The AiiDA team. All rights reserved. # -# This file is part of the AiiDA code. # -# # -# The code is hosted on GitHub at https://github.com/aiidateam/aiida-core # -# For further information on the license, see the LICENSE.txt file # -# For further information please visit http://www.aiida.net # -########################################################################### -# pylint: disable=invalid-name -"""Data migration for some legacy process attributes. - -Attribute keys that are renamed: - - * `_sealed` -> `sealed` - -Attribute keys that are removed entirely: - - * `_finished` - * `_failed` - * `_aborted` - * `_do_abort` - -Finally, after these first migrations, any remaining process nodes that still do not have a sealed attribute, have -it set to `True`. Excluding the nodes that have a `process_state` attribute of one of the active states: `created`; -`running`; or `waiting`, because those are valid active processes that are not yet sealed. - -""" - -# Remove when https://github.com/PyCQA/pylint/issues/1931 is fixed -# pylint: disable=no-name-in-module,import-error -from django.db import migrations - -from aiida.backends.djsite.db.migrations import upgrade_schema_version - -REVISION = '1.0.40' -DOWN_REVISION = '1.0.39' - - -class Migration(migrations.Migration): - """Data migration for legacy process attributes.""" - - dependencies = [ - ('db', '0039_reset_hash'), - ] - - operations = [ - migrations.RunSQL( - sql=r""" - UPDATE db_dbnode - SET attributes = jsonb_set(attributes, '{"sealed"}', attributes->'_sealed') - WHERE attributes ? '_sealed' AND node_type LIKE 'process.%'; - -- Copy `_sealed` -> `sealed` - - UPDATE db_dbnode SET attributes = attributes - '_sealed' - WHERE attributes ? '_sealed' AND node_type LIKE 'process.%'; - -- Delete `_sealed` - - UPDATE db_dbnode SET attributes = attributes - '_finished' - WHERE attributes ? '_finished' AND node_type LIKE 'process.%'; - -- Delete `_finished` - - UPDATE db_dbnode SET attributes = attributes - '_failed' - WHERE attributes ? '_failed' AND node_type LIKE 'process.%'; - -- Delete `_failed` - - UPDATE db_dbnode SET attributes = attributes - '_aborted' - WHERE attributes ? '_aborted' AND node_type LIKE 'process.%'; - -- Delete `_aborted` - - UPDATE db_dbnode SET attributes = attributes - '_do_abort' - WHERE attributes ? '_do_abort' AND node_type LIKE 'process.%'; - -- Delete `_do_abort` - - UPDATE db_dbnode - SET attributes = jsonb_set(attributes, '{"sealed"}', to_jsonb(True)) - WHERE - node_type LIKE 'process.%' AND - NOT (attributes ? 'sealed') AND - attributes->>'process_state' NOT IN ('created', 'running', 'waiting'); - -- Set `sealed=True` for process nodes that do not yet have a `sealed` attribute AND are not in an active state - """, - reverse_sql='' - ), - upgrade_schema_version(REVISION, DOWN_REVISION) - ] diff --git a/aiida/backends/djsite/db/migrations/0041_seal_unsealed_processes.py b/aiida/backends/djsite/db/migrations/0041_seal_unsealed_processes.py deleted file mode 100644 index a8f26e883d..0000000000 --- a/aiida/backends/djsite/db/migrations/0041_seal_unsealed_processes.py +++ /dev/null @@ -1,58 +0,0 @@ -# -*- coding: utf-8 -*- -########################################################################### -# Copyright (c), The AiiDA team. All rights reserved. # -# This file is part of the AiiDA code. # -# # -# The code is hosted on GitHub at https://github.com/aiidateam/aiida-core # -# For further information on the license, see the LICENSE.txt file # -# For further information please visit http://www.aiida.net # -########################################################################### -# pylint: disable=invalid-name -"""Seal any process nodes that have not yet been sealed but should. - -This should have been accomplished by the last step in the previous migration, but because the WHERE clause was -incorrect, not all nodes that should have been targeted were included. The problem is with the statement: - - attributes->>'process_state' NOT IN ('created', 'running', 'waiting') - -The problem here is that this will yield `False` if the attribute `process_state` does not even exist. This will be the -case for legacy calculations like `InlineCalculation` nodes. Their node type was already migrated in `0020` but most of -them will be unsealed. -""" - -# Remove when https://github.com/PyCQA/pylint/issues/1931 is fixed -# pylint: disable=no-name-in-module,import-error -from django.db import migrations - -from aiida.backends.djsite.db.migrations import upgrade_schema_version - -REVISION = '1.0.41' -DOWN_REVISION = '1.0.40' - - -class Migration(migrations.Migration): - """Data migration for legacy process attributes.""" - - dependencies = [ - ('db', '0040_data_migration_legacy_process_attributes'), - ] - - operations = [ - migrations.RunSQL( - sql=r""" - UPDATE db_dbnode - SET attributes = jsonb_set(attributes, '{"sealed"}', to_jsonb(True)) - WHERE - node_type LIKE 'process.%' AND - NOT attributes ? 'sealed' AND - NOT ( - attributes ? 'process_state' AND - attributes->>'process_state' IN ('created', 'running', 'waiting') - ); - -- Set `sealed=True` for process nodes that do not yet have a `sealed` attribute AND are not in an active state - -- It is important to check that `process_state` exists at all before doing the IN check. - """, - reverse_sql='' - ), - upgrade_schema_version(REVISION, DOWN_REVISION) - ] diff --git a/aiida/backends/djsite/db/migrations/0045_dbgroup_extras.py b/aiida/backends/djsite/db/migrations/0045_dbgroup_extras.py deleted file mode 100644 index e5d0816eeb..0000000000 --- a/aiida/backends/djsite/db/migrations/0045_dbgroup_extras.py +++ /dev/null @@ -1,35 +0,0 @@ -# -*- coding: utf-8 -*- -########################################################################### -# Copyright (c), The AiiDA team. All rights reserved. # -# This file is part of the AiiDA code. # -# # -# The code is hosted on GitHub at https://github.com/aiidateam/aiida-core # -# For further information on the license, see the LICENSE.txt file # -# For further information please visit http://www.aiida.net # -########################################################################### -"""Migration to add the `extras` JSONB column to the `DbGroup` model.""" -# pylint: disable=invalid-name -import django.contrib.postgres.fields.jsonb -from django.db import migrations - -from aiida.backends.djsite.db.migrations import upgrade_schema_version -from aiida.common.json import JSONEncoder - -REVISION = '1.0.45' -DOWN_REVISION = '1.0.44' - - -class Migration(migrations.Migration): - """Migrate to add the extras column to the dbgroup table.""" - dependencies = [ - ('db', '0044_dbgroup_type_string'), - ] - - operations = [ - migrations.AddField( - model_name='dbgroup', - name='extras', - field=django.contrib.postgres.fields.jsonb.JSONField(default=dict, null=False, encoder=JSONEncoder), - ), - upgrade_schema_version(REVISION, DOWN_REVISION), - ] diff --git a/aiida/backends/djsite/db/migrations/0046_add_node_repository_metadata.py b/aiida/backends/djsite/db/migrations/0046_add_node_repository_metadata.py deleted file mode 100644 index 7edcf1902f..0000000000 --- a/aiida/backends/djsite/db/migrations/0046_add_node_repository_metadata.py +++ /dev/null @@ -1,38 +0,0 @@ -# -*- coding: utf-8 -*- -########################################################################### -# Copyright (c), The AiiDA team. All rights reserved. # -# This file is part of the AiiDA code. # -# # -# The code is hosted on GitHub at https://github.com/aiidateam/aiida-core # -# For further information on the license, see the LICENSE.txt file # -# For further information please visit http://www.aiida.net # -########################################################################### -# pylint: disable=invalid-name,too-few-public-methods -"""Migration to add the `repository_metadata` JSONB column.""" - -# pylint: disable=no-name-in-module,import-error -import django.contrib.postgres.fields.jsonb -from django.db import migrations - -from aiida.backends.djsite.db.migrations import upgrade_schema_version -from aiida.common.json import JSONEncoder - -REVISION = '1.0.46' -DOWN_REVISION = '1.0.45' - - -class Migration(migrations.Migration): - """Migration to add the `repository_metadata` JSONB column.""" - - dependencies = [ - ('db', '0045_dbgroup_extras'), - ] - - operations = [ - migrations.AddField( - model_name='dbnode', - name='repository_metadata', - field=django.contrib.postgres.fields.jsonb.JSONField(default=dict, null=True, encoder=JSONEncoder), - ), - upgrade_schema_version(REVISION, DOWN_REVISION), - ] diff --git a/aiida/backends/djsite/db/migrations/0047_migrate_repository.py b/aiida/backends/djsite/db/migrations/0047_migrate_repository.py deleted file mode 100644 index 32c5dbd7ee..0000000000 --- a/aiida/backends/djsite/db/migrations/0047_migrate_repository.py +++ /dev/null @@ -1,160 +0,0 @@ -# -*- coding: utf-8 -*- -########################################################################### -# Copyright (c), The AiiDA team. All rights reserved. # -# This file is part of the AiiDA code. # -# # -# The code is hosted on GitHub at https://github.com/aiidateam/aiida-core # -# For further information on the license, see the LICENSE.txt file # -# For further information please visit http://www.aiida.net # -########################################################################### -# pylint: disable=invalid-name,too-few-public-methods,no-name-in-module,import-error -"""Migrate the file repository to the new disk object store based implementation.""" -import pathlib - -from django.core.exceptions import ObjectDoesNotExist -from django.db import migrations - -from aiida.backends.djsite.db.migrations import upgrade_schema_version -from aiida.backends.general.migrations import utils -from aiida.cmdline.utils import echo - -REVISION = '1.0.47' -DOWN_REVISION = '1.0.46' - -REPOSITORY_UUID_KEY = 'repository|uuid' - - -def migrate_repository(apps, schema_editor): - """Migrate the repository.""" - # pylint: disable=too-many-locals,too-many-branches,too-many-statements - import json - from tempfile import NamedTemporaryFile - - from disk_objectstore import Container - - from aiida.common import exceptions - from aiida.common.progress_reporter import get_progress_reporter, set_progress_bar_tqdm, set_progress_reporter - from aiida.manage.configuration import get_profile - from aiida.manage.manager import get_manager - - DbNode = apps.get_model('db', 'DbNode') - - profile = get_profile() - backend = get_manager().get_backend() - node_count = DbNode.objects.count() - missing_node_uuids = [] - missing_repo_folder = [] - shard_count = 256 - - basepath = pathlib.Path(profile.repository_path) / 'repository' / 'node' - filepath = pathlib.Path(profile.repository_path) / 'container' - container = Container(filepath) - - if not profile.is_test_profile and (node_count > 0 and not basepath.is_dir()): - raise exceptions.DatabaseMigrationError( - f'the file repository `{basepath}` does not exist but the database is not empty, it contains {node_count} ' - 'nodes. Aborting the migration.' - ) - - if not profile.is_test_profile and container.is_initialised: - raise exceptions.DatabaseMigrationError( - f'the container {filepath} already exists. If you ran this migration before and it failed simply ' - 'delete this directory and restart the migration.' - ) - - container.init_container(clear=True, **profile.defaults['repository']) - - # Only show the progress bar if there is at least a node in the database. Note that we cannot simply make the entire - # next block under the context manager optional, since it performs checks on whether the repository contains files - # that are not in the database that are still important to perform even if the database is empty. - if node_count > 0: - set_progress_bar_tqdm() - else: - set_progress_reporter(None) - - with get_progress_reporter()(total=shard_count, desc='Migrating file repository') as progress: - for i in range(shard_count): - - shard = '%.2x' % i # noqa flynt - progress.set_description_str(f'Migrating file repository: shard {shard}') - - mapping_node_repository_metadata, missing_sub_repo_folder = utils.migrate_legacy_repository(shard) - - if missing_sub_repo_folder: - missing_repo_folder.extend(missing_sub_repo_folder) - del missing_sub_repo_folder - - if mapping_node_repository_metadata is None: - continue - - for node_uuid, repository_metadata in mapping_node_repository_metadata.items(): - - # If `repository_metadata` is `{}` or `None`, we skip it, as we can leave the column default `null`. - if not repository_metadata: - continue - - try: - # This can happen if the node was deleted but the repo folder wasn't, or the repo folder just never - # corresponded to an actual node. In any case, we don't want to fail but just log the warning. - node = DbNode.objects.get(uuid=node_uuid) - except ObjectDoesNotExist: - missing_node_uuids.append((node_uuid, repository_metadata)) - else: - node.repository_metadata = repository_metadata - node.save() - - del mapping_node_repository_metadata - progress.update() - - # Store the UUID of the repository container in the `DbSetting` table. Note that for new databases, the profile - # setup will already have stored the UUID and so it should be skipped, or an exception for a duplicate key will be - # raised. This migration step is only necessary for existing databases that are migrated. - container_id = backend.get_repository().uuid - with schema_editor.connection.cursor() as cursor: - cursor.execute( - f""" - INSERT INTO db_dbsetting (key, val, description, time) - VALUES ('repository|uuid', to_json('{container_id}'::text), 'Repository UUID', current_timestamp) - ON CONFLICT (key) DO NOTHING; - """ - ) - - if not profile.is_test_profile: - - if missing_node_uuids: - prefix = 'migration-repository-missing-nodes-' - with NamedTemporaryFile(prefix=prefix, suffix='.json', dir='.', mode='w+', delete=False) as handle: - json.dump(missing_node_uuids, handle) - echo.echo_warning( - '\nDetected node repository folders for nodes that do not exist in the database. The UUIDs of ' - f'those nodes have been written to a log file: {handle.name}' - ) - - if missing_repo_folder: - prefix = 'migration-repository-missing-subfolder-' - with NamedTemporaryFile(prefix=prefix, suffix='.json', dir='.', mode='w+', delete=False) as handle: - json.dump(missing_repo_folder, handle) - echo.echo_warning( - '\nDetected repository folders that were missing the required subfolder `path` or `raw_input`.' - f' The paths of those nodes repository folders have been written to a log file: {handle.name}' - ) - - # If there were no nodes, most likely a new profile, there is not need to print the warning - if node_count: - echo.echo_warning( - '\nMigrated file repository to the new disk object store. The old repository has not been deleted ' - f'out of safety and can be found at {pathlib.Path(profile.repository_path, "repository")}.' - ) - - -class Migration(migrations.Migration): - """Migrate the file repository to the new disk object store based implementation.""" - - dependencies = [ - ('db', '0046_add_node_repository_metadata'), - ] - - operations = [ - migrations.RunPython(migrate_repository, reverse_code=migrations.RunPython.noop), - upgrade_schema_version(REVISION, DOWN_REVISION), - ] diff --git a/aiida/backends/djsite/db/migrations/0048_computer_name_to_label.py b/aiida/backends/djsite/db/migrations/0048_computer_name_to_label.py deleted file mode 100644 index 2a65aa3a91..0000000000 --- a/aiida/backends/djsite/db/migrations/0048_computer_name_to_label.py +++ /dev/null @@ -1,42 +0,0 @@ -# -*- coding: utf-8 -*- -########################################################################### -# Copyright (c), The AiiDA team. All rights reserved. # -# This file is part of the AiiDA code. # -# # -# The code is hosted on GitHub at https://github.com/aiidateam/aiida-core # -# For further information on the license, see the LICENSE.txt file # -# For further information please visit http://www.aiida.net # -########################################################################### -# pylint: disable=invalid-name -"""Rename the ``name`` column of the ``Computer`` entity to ``label``.""" -from django.db import migrations - -from aiida.backends.djsite.db.migrations import upgrade_schema_version - -REVISION = '1.0.48' -DOWN_REVISION = '1.0.47' - - -class Migration(migrations.Migration): - """Rename the ``name`` column of the ``Computer`` entity to ``label``.""" - - dependencies = [ - ('db', '0047_migrate_repository'), - ] - - operations = [ - migrations.RenameField( - model_name='dbcomputer', - old_name='name', - new_name='label', - ), - migrations.RunSQL( - 'ALTER INDEX db_dbcomputer_name_key rename TO db_dbcomputer_label_bc480bab_uniq', - 'ALTER INDEX db_dbcomputer_label_bc480bab_uniq rename TO db_dbcomputer_name_key', - ), - migrations.RunSQL( - 'ALTER INDEX db_dbcomputer_name_f1800b1a_like rename TO db_dbcomputer_label_bc480bab_like', - 'ALTER INDEX db_dbcomputer_label_bc480bab_like rename TO db_dbcomputer_name_f1800b1a_like', - ), - upgrade_schema_version(REVISION, DOWN_REVISION), - ] diff --git a/aiida/backends/djsite/db/migrations/0049_entry_point_core_prefix.py b/aiida/backends/djsite/db/migrations/0049_entry_point_core_prefix.py deleted file mode 100644 index 6e39957065..0000000000 --- a/aiida/backends/djsite/db/migrations/0049_entry_point_core_prefix.py +++ /dev/null @@ -1,71 +0,0 @@ -# -*- coding: utf-8 -*- -########################################################################### -# Copyright (c), The AiiDA team. All rights reserved. # -# This file is part of the AiiDA code. # -# # -# The code is hosted on GitHub at https://github.com/aiidateam/aiida-core # -# For further information on the license, see the LICENSE.txt file # -# For further information please visit http://www.aiida.net # -########################################################################### -# pylint: disable=invalid-name,line-too-long -"""Update node types after `core.` prefix was added to entry point names.""" -from django.db import migrations - -from aiida.backends.djsite.db.migrations import upgrade_schema_version - -REVISION = '1.0.49' -DOWN_REVISION = '1.0.48' - -forward_sql = """ - UPDATE db_dbnode SET node_type = 'data.core.array.ArrayData.' WHERE node_type = 'data.array.ArrayData.'; - UPDATE db_dbnode SET node_type = 'data.core.array.bands.BandsData.' WHERE node_type = 'data.array.bands.BandsData.'; - UPDATE db_dbnode SET node_type = 'data.core.array.kpoints.KpointsData.' WHERE node_type = 'data.array.kpoints.KpointsData.'; - UPDATE db_dbnode SET node_type = 'data.core.array.projection.ProjectionData.' WHERE node_type = 'data.array.projection.ProjectionData.'; - UPDATE db_dbnode SET node_type = 'data.core.array.trajectory.TrajectoryData.' WHERE node_type = 'data.array.trajectory.TrajectoryData.'; - UPDATE db_dbnode SET node_type = 'data.core.array.xy.XyData.' WHERE node_type = 'data.array.xy.XyData.'; - UPDATE db_dbnode SET node_type = 'data.core.base.BaseData.' WHERE node_type = 'data.base.BaseData.'; - UPDATE db_dbnode SET node_type = 'data.core.bool.Bool.' WHERE node_type = 'data.bool.Bool.'; - UPDATE db_dbnode SET node_type = 'data.core.cif.CifData.' WHERE node_type = 'data.cif.CifData.'; - UPDATE db_dbnode SET node_type = 'data.core.code.Code.' WHERE node_type = 'data.code.Code.'; - UPDATE db_dbnode SET node_type = 'data.core.dict.Dict.' WHERE node_type = 'data.dict.Dict.'; - UPDATE db_dbnode SET node_type = 'data.core.float.Float.' WHERE node_type = 'data.float.Float.'; - UPDATE db_dbnode SET node_type = 'data.core.folder.FolderData.' WHERE node_type = 'data.folder.FolderData.'; - UPDATE db_dbnode SET node_type = 'data.core.int.Int.' WHERE node_type = 'data.int.Int.'; - UPDATE db_dbnode SET node_type = 'data.core.list.List.' WHERE node_type = 'data.list.List.'; - UPDATE db_dbnode SET node_type = 'data.core.numeric.NumericData.' WHERE node_type = 'data.numeric.NumericData.'; - UPDATE db_dbnode SET node_type = 'data.core.orbital.OrbitalData.' WHERE node_type = 'data.orbital.OrbitalData.'; - UPDATE db_dbnode SET node_type = 'data.core.remote.RemoteData.' WHERE node_type = 'data.remote.RemoteData.'; - UPDATE db_dbnode SET node_type = 'data.core.remote.stash.RemoteStashData.' WHERE node_type = 'data.remote.stash.RemoteStashData.'; - UPDATE db_dbnode SET node_type = 'data.core.remote.stash.folder.RemoteStashFolderData.' WHERE node_type = 'data.remote.stash.folder.RemoteStashFolderData.'; - UPDATE db_dbnode SET node_type = 'data.core.singlefile.SinglefileData.' WHERE node_type = 'data.singlefile.SinglefileData.'; - UPDATE db_dbnode SET node_type = 'data.core.str.Str.' WHERE node_type = 'data.str.Str.'; - UPDATE db_dbnode SET node_type = 'data.core.structure.StructureData.' WHERE node_type = 'data.structure.StructureData.'; - UPDATE db_dbnode SET node_type = 'data.core.upf.UpfData.' WHERE node_type = 'data.upf.UpfData.'; - UPDATE db_dbcomputer SET scheduler_type = 'core.direct' WHERE scheduler_type = 'direct'; - UPDATE db_dbcomputer SET scheduler_type = 'core.lsf' WHERE scheduler_type = 'lsf'; - UPDATE db_dbcomputer SET scheduler_type = 'core.pbspro' WHERE scheduler_type = 'pbspro'; - UPDATE db_dbcomputer SET scheduler_type = 'core.sge' WHERE scheduler_type = 'sge'; - UPDATE db_dbcomputer SET scheduler_type = 'core.slurm' WHERE scheduler_type = 'slurm'; - UPDATE db_dbcomputer SET scheduler_type = 'core.torque' WHERE scheduler_type = 'torque'; - UPDATE db_dbcomputer SET transport_type = 'core.local' WHERE transport_type = 'local'; - UPDATE db_dbcomputer SET transport_type = 'core.ssh' WHERE transport_type = 'ssh'; - UPDATE db_dbnode SET process_type = 'aiida.calculations:core.arithmetic.add' WHERE process_type = 'aiida.calculations:arithmetic.add'; - UPDATE db_dbnode SET process_type = 'aiida.calculations:core.templatereplacer' WHERE process_type = 'aiida.calculations:templatereplacer'; - UPDATE db_dbnode SET process_type = 'aiida.workflows:core.arithmetic.add_multiply' WHERE process_type = 'aiida.workflows:arithmetic.add_multiply'; - UPDATE db_dbnode SET process_type = 'aiida.workflows:core.arithmetic.multiply_add' WHERE process_type = 'aiida.workflows:arithmetic.multiply_add'; - UPDATE db_dbnode SET attributes = jsonb_set(attributes, '{"parser_name"}', '"core.arithmetic.add"') WHERE attributes->>'parser_name' = 'arithmetic.add'; - UPDATE db_dbnode SET attributes = jsonb_set(attributes, '{"parser_name"}', '"core.templatereplacer.doubler"') WHERE attributes->>'parser_name' = 'templatereplacer.doubler'; - """ - - -class Migration(migrations.Migration): - """Update node types after `core.` prefix was added to entry point names.""" - - dependencies = [ - ('db', '0048_computer_name_to_label'), - ] - - operations = [ - migrations.RunSQL(sql=forward_sql, reverse_sql=''), - upgrade_schema_version(REVISION, DOWN_REVISION), - ] diff --git a/aiida/backends/djsite/db/migrations/__init__.py b/aiida/backends/djsite/db/migrations/__init__.py deleted file mode 100644 index 2dc83ae697..0000000000 --- a/aiida/backends/djsite/db/migrations/__init__.py +++ /dev/null @@ -1,805 +0,0 @@ -# -*- coding: utf-8 -*- -########################################################################### -# Copyright (c), The AiiDA team. All rights reserved. # -# This file is part of the AiiDA code. # -# # -# The code is hosted on GitHub at https://github.com/aiidateam/aiida-core # -# For further information on the license, see the LICENSE.txt file # -# For further information please visit http://www.aiida.net # -########################################################################### -# pylint: disable=import-error,no-name-in-module -"""Module that contains the db migrations.""" -from django.core.exceptions import ObjectDoesNotExist - -from aiida.backends.manager import ( - SCHEMA_GENERATION_DESCRIPTION, - SCHEMA_GENERATION_KEY, - SCHEMA_VERSION_DESCRIPTION, - SCHEMA_VERSION_KEY, -) -from aiida.common.exceptions import AiidaException, DbContentError -from aiida.manage.configuration import get_config_option - - -class DeserializationException(AiidaException): - pass - - -LATEST_MIGRATION = '0049_entry_point_core_prefix' - - -def _update_schema_version(version, apps, _): - """The update schema uses the current models (and checks if the value is stored in EAV mode or JSONB) - to avoid to use the DbSettings schema that may change (as it changed with the migration of the - settings table to JSONB).""" - db_setting_model = apps.get_model('db', 'DbSetting') - result = db_setting_model.objects.filter(key=SCHEMA_VERSION_KEY).first() - # If there is no schema record, create ones - if result is None: - result = db_setting_model() - result.key = SCHEMA_VERSION_KEY - result.description = SCHEMA_VERSION_DESCRIPTION - - # If it stores the values in an EAV format, add the value in the tval field - if hasattr(result, 'tval'): - result.tval = str(version) - # Otherwise add it to the val (JSON) fiels - else: - result.val = str(version) - - result.save() - - -def _upgrade_schema_generation(version, apps, _): - """The update schema uses the current models (and checks if the value is stored in EAV mode or JSONB) - to avoid to use the DbSettings schema that may change (as it changed with the migration of the - settings table to JSONB).""" - db_setting_model = apps.get_model('db', 'DbSetting') - result = db_setting_model.objects.filter(key=SCHEMA_GENERATION_KEY).first() - # If there is no schema record, create ones - if result is None: - result = db_setting_model() - result.key = SCHEMA_GENERATION_KEY - result.description = SCHEMA_GENERATION_DESCRIPTION - - result.val = str(version) - result.save() - - -def upgrade_schema_version(up_revision, down_revision): - """Run migrations, to translate the database schema.""" - from functools import partial - - from django.db import migrations - - return migrations.RunPython( - partial(_update_schema_version, up_revision), reverse_code=partial(_update_schema_version, down_revision) - ) - - -def current_schema_version(): - """Migrate the current schema version.""" - # Have to use this ugly way of importing because the django migration - # files start with numbers which are not a valid package name - latest_migration = __import__(f'aiida.backends.djsite.db.migrations.{LATEST_MIGRATION}', fromlist=['REVISION']) - return latest_migration.REVISION - - -# Here I copied the class method definitions from aiida.backends.djsite.db.models -# used to set and delete values for nodes. -# This was done because: -# 1) The DbAttribute object loaded with apps.get_model() does not provide the class methods -# 2) When the django model changes the migration will continue to work -# 3) If we defined in the migration a new class with these methods as an extension of the DbAttribute class, -# django detects a change in the model and creates a new migration - - -def _deserialize_basic_type(mainitem): - """Deserialize the basic python data types.""" - if mainitem['datatype'] == 'none': - return None - if mainitem['datatype'] == 'bool': - return mainitem['bval'] - if mainitem['datatype'] == 'int': - return mainitem['ival'] - if mainitem['datatype'] == 'float': - return mainitem['fval'] - if mainitem['datatype'] == 'txt': - return mainitem['tval'] - raise TypeError( - f"Expected one of the following types: 'none', 'bool', 'int', 'float', 'txt', got {mainitem['datatype']}" - ) - - -def deserialize_list(mainitem, subitems, sep, original_class, original_pk, lesserrors): - """Deserialize a Python list.""" - # pylint: disable=protected-access - # subitems contains all subitems, here I store only those of - # deepness 1, i.e. if I have subitems '0', '1' and '1.c' I - # store only '0' and '1' - - from aiida.common import AIIDA_LOGGER - - firstlevelsubdict = {k: v for k, v in subitems.items() if sep not in k} - - # For checking, I verify the expected values - expected_set = {f'{i:d}' for i in range(mainitem['ival'])} - received_set = set(firstlevelsubdict.keys()) - # If there are more entries than expected, but all expected - # ones are there, I just issue an error but I do not stop. - - if not expected_set.issubset(received_set): - if (original_class is not None and original_class._subspecifier_field_name is not None): - subspecifier_string = f'{original_class._subspecifier_field_name}={original_pk} and ' - else: - subspecifier_string = '' - if original_class is None: - sourcestr = 'the data passed' - else: - sourcestr = original_class.__name__ - - raise DeserializationException( - 'Wrong list elements stored in {} for ' - "{}key='{}' ({} vs {})".format(sourcestr, subspecifier_string, mainitem['key'], expected_set, received_set) - ) - if expected_set != received_set: - if (original_class is not None and original_class._subspecifier_field_name is not None): - subspecifier_string = f'{original_class._subspecifier_field_name}={original_pk} and ' - else: - subspecifier_string = '' - - sourcestr = 'the data passed' if original_class is None else original_class.__name__ - - msg = ( - 'Wrong list elements stored in {} for ' - "{}key='{}' ({} vs {})".format(sourcestr, subspecifier_string, mainitem['key'], expected_set, received_set) - ) - if lesserrors: - AIIDA_LOGGER.error(msg) - else: - raise DeserializationException(msg) - - # I get the values in memory as a dictionary - tempdict = {} - for firstsubk, firstsubv in firstlevelsubdict.items(): - # I call recursively the same function to get subitems - newsubitems = {k[len(firstsubk) + len(sep):]: v for k, v in subitems.items() if k.startswith(firstsubk + sep)} - tempdict[firstsubk] = _deserialize_attribute( - mainitem=firstsubv, subitems=newsubitems, sep=sep, original_class=original_class, original_pk=original_pk - ) - - # And then I put them in a list - retlist = [tempdict[f'{i:d}'] for i in range(mainitem['ival'])] - return retlist - - -def deserialize_dict(mainitem, subitems, sep, original_class, original_pk, lesserrors): - """Deserialize a Python dictionary.""" - # pylint: disable=protected-access - # subitems contains all subitems, here I store only those of - # deepness 1, i.e. if I have subitems '0', '1' and '1.c' I - # store only '0' and '1' - from aiida.common import AIIDA_LOGGER - - firstlevelsubdict = {k: v for k, v in subitems.items() if sep not in k} - - if len(firstlevelsubdict) != mainitem['ival']: - if (original_class is not None and original_class._subspecifier_field_name is not None): - subspecifier_string = f'{original_class._subspecifier_field_name}={original_pk} and ' - else: - subspecifier_string = '' - if original_class is None: - sourcestr = 'the data passed' - else: - sourcestr = original_class.__name__ - - msg = ( - 'Wrong dict length stored in {} for ' - "{}key='{}' ({} vs {})".format( - sourcestr, subspecifier_string, mainitem['key'], len(firstlevelsubdict), mainitem['ival'] - ) - ) - if lesserrors: - AIIDA_LOGGER.error(msg) - else: - raise DeserializationException(msg) - - # I get the values in memory as a dictionary - tempdict = {} - for firstsubk, firstsubv in firstlevelsubdict.items(): - # I call recursively the same function to get subitems - newsubitems = {k[len(firstsubk) + len(sep):]: v for k, v in subitems.items() if k.startswith(firstsubk + sep)} - tempdict[firstsubk] = _deserialize_attribute( - mainitem=firstsubv, subitems=newsubitems, sep=sep, original_class=original_class, original_pk=original_pk - ) - - return tempdict - - -def _deserialize_attribute(mainitem, subitems, sep, original_class=None, original_pk=None, lesserrors=False): - """Deserialize a single attribute. - - :param mainitem: the main item (either the attribute itself for base - types (None, string, ...) or the main item for lists and dicts. - Must contain the 'key' key and also the following keys: - datatype, tval, fval, ival, bval, dval. - NOTE that a type check is not performed! tval is expected to be a string, - dval a date, etc. - :param subitems: must be a dictionary of dictionaries. In the top-level dictionary, - the key must be the key of the attribute, stripped of all prefixes - (i.e., if the mainitem has key 'a.b' and we pass subitems - 'a.b.0', 'a.b.1', 'a.b.1.c', their keys must be '0', '1', '1.c'). - It must be None if the value is not iterable (int, str, - float, ...). - It is an empty dictionary if there are no subitems. - :param sep: a string, the separator between subfields (to separate the - name of a dictionary from the keys it contains, for instance) - :param original_class: if these elements come from a specific subclass - of DbMultipleValueAttributeBaseClass, pass here the class (note: the class, - not the instance!). This is used only in case the wrong number of elements - is found in the raw data, to print a more meaningful message (if the class - has a dbnode associated to it) - :param original_pk: if the elements come from a specific subclass - of DbMultipleValueAttributeBaseClass that has a dbnode associated to it, - pass here the PK integer. This is used only in case the wrong number - of elements is found in the raw data, to print a more meaningful message - :param lesserrors: If set to True, in some cases where the content of the - DB is not consistent but data is still recoverable, - it will just log the message rather than raising - an exception (e.g. if the number of elements of a dictionary is different - from the number declared in the ival field). - - :return: the deserialized value - :raise aiida.backends.djsite.db.migrations.DeserializationException: if an error occurs""" - - from aiida.common import json - from aiida.common.timezone import get_current_timezone, is_naive, make_aware - - if mainitem['datatype'] in ['none', 'bool', 'int', 'float', 'txt']: - if subitems: - raise DeserializationException(f"'{mainitem.key}' is of a base type, but has subitems!") - return _deserialize_basic_type(mainitem) - - if mainitem['datatype'] == 'date': - if subitems: - raise DeserializationException(f"'{mainitem.key}' is of a base type, but has subitems!") - if is_naive(mainitem['dval']): - return make_aware(mainitem['dval'], get_current_timezone()) - return mainitem['dval'] - - if mainitem['datatype'] == 'list': - return deserialize_list(mainitem, subitems, sep, original_class, original_pk, lesserrors) - if mainitem['datatype'] == 'dict': - return deserialize_dict(mainitem, subitems, sep, original_class, original_pk, lesserrors) - if mainitem['datatype'] == 'json': - try: - return json.loads(mainitem['tval']) - except ValueError: - raise DeserializationException('Error in the content of the json field') from ValueError - else: - raise DeserializationException(f"The type field '{mainitem['datatype']}' is not recognized") - - -def deserialize_attributes(data, sep, original_class=None, original_pk=None): - """ - Deserialize the attributes from the format internally stored in the DB - to the actual format (dictionaries, lists, integers, ... - - :param data: must be a dictionary of dictionaries. In the top-level dictionary, - the key must be the key of the attribute. The value must be a dictionary - with the following keys: datatype, tval, fval, ival, bval, dval. Other - keys are ignored. - NOTE that a type check is not performed! tval is expected to be a string, - dval a date, etc. - :param sep: a string, the separator between subfields (to separate the - name of a dictionary from the keys it contains, for instance) - :param original_class: if these elements come from a specific subclass - of DbMultipleValueAttributeBaseClass, pass here the class (note: the class, - not the instance!). This is used only in case the wrong number of elements - is found in the raw data, to print a more meaningful message (if the class - has a dbnode associated to it) - :param original_pk: if the elements come from a specific subclass - of DbMultipleValueAttributeBaseClass that has a dbnode associated to it, - pass here the PK integer. This is used only in case the wrong number - of elements is found in the raw data, to print a more meaningful message - - :return: a dictionary, where for each entry the corresponding value is - returned, deserialized back to lists, dictionaries, etc. - Example: if ``data = {'a': {'datatype': "list", "ival": 2, ...}, - 'a.0': {'datatype': "int", "ival": 2, ...}, - 'a.1': {'datatype': "txt", "tval": "yy"}]``, - it will return ``{"a": [2, "yy"]}`` - """ - from collections import defaultdict - - # I group results by zero-level entity - found_mainitems = {} - found_subitems = defaultdict(dict) - for mainkey, descriptiondict in data.items(): - prefix, thissep, postfix = mainkey.partition(sep) - if thissep: - found_subitems[prefix][postfix] = {k: v for k, v in descriptiondict.items() if k != 'key'} - else: - mainitem = descriptiondict.copy() - mainitem['key'] = prefix - found_mainitems[prefix] = mainitem - - # There can be mainitems without subitems, but there should not be subitems - # without mainitmes. - lone_subitems = set(found_subitems.keys()) - set(found_mainitems.keys()) - if lone_subitems: - raise DeserializationException(f"Missing base keys for the following items: {','.join(lone_subitems)}") - - # For each zero-level entity, I call the _deserialize_attribute function - retval = {} - for key, value in found_mainitems.items(): - # Note: found_subitems[k] will return an empty dictionary it the - # key does not exist, as it is a defaultdict - retval[key] = _deserialize_attribute( - mainitem=value, - subitems=found_subitems[key], - sep=sep, - original_class=original_class, - original_pk=original_pk - ) - - return retval - - -class ModelModifierV0025: - """This class implements the legacy EAV model used originally instead of JSONB. - - The original Django backend implementation used a custom entity-attribute-value table for the attributes and extras - of a node. The logic was implemented in this class which was removed when the native JSONB field was used. However, - for the migrations this code is still needed, that is why it is kept here. - """ - - from aiida.backends.utils import AIIDA_ATTRIBUTE_SEP - - _subspecifier_field_name = 'dbnode' - _sep = AIIDA_ATTRIBUTE_SEP - - def __init__(self, apps, model_class): - self._apps = apps - self._model_class = model_class - - @property - def apps(self): - return self._apps - - def subspecifiers_dict(self, attr): - """Return a dict to narrow down the query to only those matching also the - subspecifier.""" - if self._subspecifier_field_name is None: - return {} - return {self._subspecifier_field_name: getattr(attr, self._subspecifier_field_name)} - - def subspecifier_pk(self, attr): - """ - Return the subspecifier PK in the database (or None, if no - subspecifier should be used) - """ - if self._subspecifier_field_name is None: - return None - - return getattr(attr, self._subspecifier_field_name).pk - - @staticmethod - def validate_key(key): - """ - Validate the key string to check if it is valid (e.g., if it does not - contain the separator symbol.). - - :return: None if the key is valid - :raise aiida.common.ValidationError: if the key is not valid - """ - from aiida.backends.utils import AIIDA_ATTRIBUTE_SEP - from aiida.common.exceptions import ValidationError - - if not isinstance(key, str): - raise ValidationError('The key must be a string.') - if not key: - raise ValidationError('The key cannot be an empty string.') - if AIIDA_ATTRIBUTE_SEP in key: - raise ValidationError( - f"The separator symbol '{AIIDA_ATTRIBUTE_SEP}' cannot be present in the key of attributes, extras, etc." - ) - - def get_value_for_node(self, dbnode, key): - """ - Get an attribute from the database for the given dbnode. - - :return: the value stored in the Db table, correctly converted - to the right type. - :raise AttributeError: if no key is found for the given dbnode - """ - cls = self._model_class - DbNode = self.apps.get_model('db', 'DbNode') # pylint: disable=invalid-name - - if isinstance(dbnode, int): - dbnode_node = DbNode(id=dbnode) - else: - dbnode_node = dbnode - - try: - attr = cls.objects.get(dbnode=dbnode_node, key=key) - except ObjectDoesNotExist: - raise AttributeError(f'{cls.__name__} with key {key} for node {dbnode.pk} not found in db') \ - from ObjectDoesNotExist - - return self.getvalue(attr) - - def getvalue(self, attr): - """This can be called on a given row and will get the corresponding value, casting it correctly. """ - try: - if attr.datatype in ('list', 'dict'): - prefix = f'{attr.key}{self._sep}' - prefix_len = len(prefix) - dballsubvalues = self._model_class.objects.filter( - key__startswith=prefix, **self.subspecifiers_dict(attr) - ).values_list('key', 'datatype', 'tval', 'fval', 'ival', 'bval', 'dval') - # Strip the FULL prefix and replace it with the simple - # "attr" prefix - data = { - f'attr.{_[0][prefix_len:]}': { - 'datatype': _[1], - 'tval': _[2], - 'fval': _[3], - 'ival': _[4], - 'bval': _[5], - 'dval': _[6], - } for _ in dballsubvalues - } - # for _ in dballsubvalues} - # Append also the item itself - data['attr'] = { - # Replace the key (which may contain the separator) with the - # simple "attr" key. In any case I do not need to return it! - 'key': 'attr', - 'datatype': attr.datatype, - 'tval': attr.tval, - 'fval': attr.fval, - 'ival': attr.ival, - 'bval': attr.bval, - 'dval': attr.dval - } - return deserialize_attributes( - data, sep=self._sep, original_class=self._model_class, original_pk=self.subspecifier_pk(attr) - )['attr'] - - data = { - 'attr': { - # Replace the key (which may contain the separator) with the - # simple "attr" key. In any case I do not need to return it! - 'key': 'attr', - 'datatype': attr.datatype, - 'tval': attr.tval, - 'fval': attr.fval, - 'ival': attr.ival, - 'bval': attr.bval, - 'dval': attr.dval - } - } - - return deserialize_attributes( - data, sep=self._sep, original_class=self._model_class, original_pk=self.subspecifier_pk(attr) - )['attr'] - except DeserializationException as exc: - exc = DbContentError(exc) - exc.original_exception = exc - raise exc - - def set_value_for_node(self, dbnode, key, value, with_transaction=False, stop_if_existing=False): - """ - This is the raw-level method that accesses the DB. No checks are done - to prevent the user from (re)setting a valid key. - To be used only internally. - - :todo: there may be some error on concurrent write; - not checked in this unlucky case! - - :param dbnode: the dbnode for which the attribute should be stored; - in an integer is passed, this is used as the PK of the dbnode, - without any further check (for speed reasons) - :param key: the key of the attribute to store; must be a level-zero - attribute (i.e., no separators in the key) - :param value: the value of the attribute to store - :param with_transaction: if True (default), do this within a transaction, - so that nothing gets stored if a subitem cannot be created. - Otherwise, if this parameter is False, no transaction management - is performed. - :param stop_if_existing: if True, it will stop with an - UniquenessError exception if the key already exists - for the given node. Otherwise, it will - first delete the old value, if existent. The use with True is - useful if you want to use a given attribute as a "locking" value, - e.g. to avoid to perform an action twice on the same node. - Note that, if you are using transactions, you may get the error - only when the transaction is committed. - - :raise ValueError: if the key contains the separator symbol used - internally to unpack dictionaries and lists (defined in cls._sep). - """ - DbNode = self.apps.get_model('db', 'DbNode') # pylint: disable=invalid-name - - if isinstance(dbnode, int): - dbnode_node = DbNode(id=dbnode) - else: - dbnode_node = dbnode - - self.set_value( - key, - value, - with_transaction=with_transaction, - subspecifier_value=dbnode_node, - stop_if_existing=stop_if_existing - ) - - def del_value_for_node(self, dbnode, key): - """ - Delete an attribute from the database for the given dbnode. - - :note: no exception is raised if no attribute with the given key is - found in the DB. - - :param dbnode: the dbnode for which you want to delete the key. - :param key: the key to delete. - """ - self.del_value(key, subspecifier_value=dbnode) - - def del_value(self, key, only_children=False, subspecifier_value=None): - """ - Delete a value associated with the given key (if existing). - - :note: No exceptions are raised if no entry is found. - - :param key: the key to delete. Can contain the separator self._sep if - you want to delete a subkey. - :param only_children: if True, delete only children and not the - entry itself. - :param subspecifier_value: must be None if this class has no - subspecifier set (e.g., the DbSetting class). - Must be the value of the subspecifier (e.g., the dbnode) for classes - that define it (e.g. DbAttribute and DbExtra) - """ - cls = self._model_class - from django.db.models import Q - - if self._subspecifier_field_name is None: - if subspecifier_value is not None: - raise ValueError( - f'You cannot specify a subspecifier value for class {cls.__name__} because it has no subspecifiers' - ) - subspecifiers_dict = {} - else: - if subspecifier_value is None: - raise ValueError( - 'You also have to specify a subspecifier value ' - 'for class {} (the {})'.format(self.__name__, self._subspecifier_field_name) # pylint: disable=no-member - ) - subspecifiers_dict = {self._subspecifier_field_name: subspecifier_value} - - query = Q(key__startswith=f'{key}{self._sep}', **subspecifiers_dict) - - if not only_children: - query.add(Q(key=key, **subspecifiers_dict), Q.OR) - - cls.objects.filter(query).delete() - - def set_value( - self, - key, - value, - with_transaction=False, - subspecifier_value=None, - other_attribs=None, - stop_if_existing=False - ): # pylint: disable=too-many-arguments - """ - Set a new value in the DB, possibly associated to the given subspecifier. - - :note: This method also stored directly in the DB. - - :param key: a string with the key to create (must be a level-0 - attribute, that is it cannot contain the separator cls._sep). - :param value: the value to store (a basic data type or a list or a dict) - :param subspecifier_value: must be None if this class has no - subspecifier set (e.g., the DbSetting class). - Must be the value of the subspecifier (e.g., the dbnode) for classes - that define it (e.g. DbAttribute and DbExtra) - :param with_transaction: True if you want this function to be managed - with transactions. Set to False if you already have a manual - management of transactions in the block where you are calling this - function (useful for speed improvements to avoid recursive - transactions) - :param other_attribs: a dictionary of other parameters, to store - only on the level-zero attribute (e.g. for description in DbSetting). - :param stop_if_existing: if True, it will stop with an - UniquenessError exception if the new entry would violate an - uniqueness constraint in the DB (same key, or same key+node, - depending on the specific subclass). Otherwise, it will - first delete the old value, if existent. The use with True is - useful if you want to use a given attribute as a "locking" value, - e.g. to avoid to perform an action twice on the same node. - Note that, if you are using transactions, you may get the error - only when the transaction is committed. - """ - cls = self._model_class - from django.db import transaction - - other_attribs = other_attribs if other_attribs is not None else {} - - self.validate_key(key) - - try: - if with_transaction: - sid = transaction.savepoint() - - # create_value returns a list of nodes to store - to_store = self.create_value(key, value, subspecifier_value=subspecifier_value, other_attribs=other_attribs) - - if to_store: - if not stop_if_existing: - # Delete the old values if stop_if_existing is False, - # otherwise don't delete them and hope they don't - # exist. If they exist, I'll get an UniquenessError - - # NOTE! Be careful in case the extra/attribute to - # store is not a simple attribute but a list or dict: - # like this, it should be ok because if we are - # overwriting an entry it will stop anyway to avoid - # to overwrite the main entry, but otherwise - # there is the risk that trailing pieces remain - # so in general it is good to recursively clean - # all sub-items. - self.del_value(key, subspecifier_value=subspecifier_value) - cls.objects.bulk_create(to_store, batch_size=get_config_option('db.batch_size')) - - if with_transaction: - transaction.savepoint_commit(sid) - except BaseException as exc: # All exceptions including CTRL+C, ... - from django.db.utils import IntegrityError - - from aiida.common.exceptions import UniquenessError - - if with_transaction: - transaction.savepoint_rollback(sid) - if isinstance(exc, IntegrityError) and stop_if_existing: - raise UniquenessError( - 'Impossible to create the required ' - 'entry ' - "in table '{}', " - 'another entry already exists and the creation would ' - 'violate an uniqueness constraint.\nFurther details: ' - '{}'.format(cls.__name__, exc) - ) from exc - raise - - @staticmethod - def set_basic_data_attributes(obj, value): - """Set obj attributes if they are of basic Python types.""" - if isinstance(value, bool): - obj.datatype = 'bool' - obj.bval = value - - elif isinstance(value, int): - obj.datatype = 'int' - obj.ival = value - - elif isinstance(value, float): - obj.datatype = 'float' - obj.fval = value - obj.tval = '' - - elif isinstance(value, str): - obj.datatype = 'txt' - obj.tval = value - - def create_value(self, key, value, subspecifier_value=None, other_attribs=None): - """ - Create a new list of attributes, without storing them, associated - with the current key/value pair (and to the given subspecifier, - e.g. the DbNode for DbAttributes and DbExtras). - - :note: No hits are done on the DB, in particular no check is done - on the existence of the given nodes. - - :param key: a string with the key to create (can contain the - separator self._sep if this is a sub-attribute: indeed, this - function calls itself recursively) - :param value: the value to store (a basic data type or a list or a dict) - :param subspecifier_value: must be None if this class has no - subspecifier set (e.g., the DbSetting class). - Must be the value of the subspecifier (e.g., the dbnode) for classes - that define it (e.g. DbAttribute and DbExtra) - :param other_attribs: a dictionary of other parameters, to store - only on the level-zero attribute (e.g. for description in DbSetting). - - :return: always a list of class instances; it is the user - responsibility to store such entries (typically with a Django - bulk_create() call).""" - - cls = self._model_class - import datetime - - from aiida.common import json - from aiida.common.timezone import get_current_timezone, is_naive, make_aware - - other_attribs = other_attribs if other_attribs is not None else {} - - if self._subspecifier_field_name is None: - if subspecifier_value is not None: - raise ValueError( - f'You cannot specify a subspecifier value for class {cls.__name__} because it has no subspecifiers' - ) - new_entry = cls(key=key, **other_attribs) - else: - if subspecifier_value is None: - raise ValueError( - 'You also have to specify a subspecifier value ' - 'for class {} (the {})'.format(cls.__name__, self._subspecifier_field_name) - ) - further_params = other_attribs.copy() - further_params.update({self._subspecifier_field_name: subspecifier_value}) - new_entry = cls(key=key, **further_params) - - list_to_return = [new_entry] - - new_entry.datatype = 'none' - new_entry.bval = None - new_entry.tval = '' - new_entry.ival = None - new_entry.fval = None - new_entry.dval = None - - if isinstance(value, (bool, int, float, str)): - self.set_basic_data_attributes(new_entry, value) - - elif isinstance(value, datetime.datetime): - - new_entry.datatype = 'date' - # For time-aware and time-naive datetime objects, see - # https://docs.djangoproject.com/en/dev/topics/i18n/timezones/#naive-and-aware-datetime-objects - new_entry.dval = make_aware(value, get_current_timezone()) if is_naive(value) else value - - elif isinstance(value, (list, tuple)): - - new_entry.datatype = 'list' - new_entry.ival = len(value) - - for i, subv in enumerate(value): - # I do not need get_or_create here, because - # above I deleted all children (and I - # expect no concurrency) - # NOTE: I do not pass other_attribs - list_to_return.extend( - self.create_value(key=f'{key}{self._sep}{i:d}', value=subv, subspecifier_value=subspecifier_value) - ) - - elif isinstance(value, dict): - - new_entry.datatype = 'dict' - new_entry.ival = len(value) - - for subk, subv in value.items(): - self.validate_key(subk) - - # I do not need get_or_create here, because - # above I deleted all children (and I - # expect no concurrency) - # NOTE: I do not pass other_attribs - list_to_return.extend( - self.create_value(key=f'{key}{self._sep}{subk}', value=subv, subspecifier_value=subspecifier_value) - ) - else: - try: - jsondata = json.dumps(value) - except TypeError: - raise ValueError( - f'Unable to store the value: it must be either a basic datatype, or json-serializable: {value}' - ) from TypeError - - new_entry.datatype = 'json' - new_entry.tval = jsondata - - return list_to_return diff --git a/aiida/backends/djsite/db/models.py b/aiida/backends/djsite/db/models.py deleted file mode 100644 index 1c5a3714d8..0000000000 --- a/aiida/backends/djsite/db/models.py +++ /dev/null @@ -1,419 +0,0 @@ -# -*- coding: utf-8 -*- -########################################################################### -# Copyright (c), The AiiDA team. All rights reserved. # -# This file is part of the AiiDA code. # -# # -# The code is hosted on GitHub at https://github.com/aiidateam/aiida-core # -# For further information on the license, see the LICENSE.txt file # -# For further information please visit http://www.aiida.net # -########################################################################### -# pylint: disable=import-error,no-name-in-module,no-member -"""Module that defines db models.""" -import contextlib - -from django.contrib.postgres.fields import JSONField -from django.db import models as m -from django.db.models.query import QuerySet -from pytz import UTC - -from aiida.backends.djsite.db import migrations -from aiida.common import timezone -from aiida.common.json import JSONEncoder -from aiida.common.utils import get_new_uuid - -# This variable identifies the schema version of this file. -# Every time you change the schema below in *ANY* way, REMEMBER TO CHANGE -# the version here in the migration file and update migrations/__init__.py. -# See the documentation for how to do all this. -# -# The version is checked at code load time to verify that the code schema -# version and the DB schema version are the same. (The DB schema version -# is stored in the DbSetting table and the check is done in the -# load_dbenv() function). -SCHEMA_VERSION = migrations.current_schema_version() - - -class AiidaQuerySet(QuerySet): - """Represent a lazy database lookup for a set of objects.""" - - def iterator(self, chunk_size=2000): - from aiida.orm.implementation.django import convert - for obj in super().iterator(chunk_size=chunk_size): - yield convert.get_backend_entity(obj, None) - - def __iter__(self): - """Iterate for list comprehensions. - - Note: used to rely on the iterator in django 1.8 but does no longer in django 1.11. - """ - from aiida.orm.implementation.django import convert - return (convert.get_backend_entity(model, None) for model in super().__iter__()) - - def __getitem__(self, key): - """Get item for [] operator - - Note: used to rely on the iterator in django 1.8 but does no longer in django 1.11.""" - from aiida.orm.implementation.django import convert - res = super().__getitem__(key) - return convert.get_backend_entity(res, None) - - -class AiidaObjectManager(m.Manager): - - def get_queryset(self): - return AiidaQuerySet(self.model, using=self._db) - - -class DbUser(m.Model): - """Class that represents a user as the owner of a specific Node.""" - - is_anonymous = False - is_authenticated = True - - USERNAME_FIELD = 'email' - REQUIRED_FIELDS = () - - # Set unique email field - email = m.EmailField(unique=True, db_index=True) - first_name = m.CharField(max_length=254, blank=True) - last_name = m.CharField(max_length=254, blank=True) - institution = m.CharField(max_length=254, blank=True) - - -class DbNode(m.Model): - """Generic node: data or calculation or code. - - Nodes can be linked (DbLink table) - Naming convention for Node relationships: A --> C --> B. - - * A is 'input' of C. - * C is 'output' of A. - - Internal attributes, that define the node itself, - are stored in the DbAttribute table; further user-defined attributes, - called 'extra', are stored in the DbExtra table (same schema and methods - of the DbAttribute table, but the code does not rely on the content of the - table, therefore the user can use it at his will to tag or annotate nodes. - - :note: Attributes in the DbAttribute table have to be thought as belonging - to the DbNode, (this is the reason for which there is no 'user' field - in the DbAttribute field). Moreover, Attributes define uniquely the - Node so should be immutable.""" - - uuid = m.UUIDField(default=get_new_uuid, unique=True) - # in the form data.upffile., data.structure., calculation., ... - # Note that there is always a final dot, to allow to do queries of the - # type (node_type__startswith="calculation.") and avoid problems with classes - # starting with the same string - # max_length required for index by MySql - node_type = m.CharField(max_length=255, db_index=True) - process_type = m.CharField(max_length=255, db_index=True, null=True) - label = m.CharField(max_length=255, db_index=True, blank=True) - description = m.TextField(blank=True) - # creation time - ctime = m.DateTimeField(default=timezone.now, db_index=True, editable=False) - mtime = m.DateTimeField(auto_now=True, db_index=True, editable=False) - # Cannot delete a user if something is associated to it - user = m.ForeignKey(DbUser, on_delete=m.PROTECT, related_name='dbnodes') - - # Direct links - outputs = m.ManyToManyField('self', symmetrical=False, related_name='inputs', through='DbLink') - - # Used only if dbnode is a calculation, or remotedata - # Avoid that computers can be deleted if at least a node exists pointing - # to it. - dbcomputer = m.ForeignKey('DbComputer', null=True, on_delete=m.PROTECT, related_name='dbnodes') - - # JSON Attributes - attributes = JSONField(default=dict, null=True, encoder=JSONEncoder) - # JSON Extras - extras = JSONField(default=dict, null=True, encoder=JSONEncoder) - repository_metadata = JSONField(default=dict, null=True, encoder=JSONEncoder) - - objects = m.Manager() - # Return aiida Node instances or their subclasses instead of DbNode instances - aiidaobjects = AiidaObjectManager() - - def get_simple_name(self, invalid_result=None): - """Return a string with the last part of the type name. - - If the type is empty, use 'Node'. - If the type is invalid, return the content of the input variable - ``invalid_result``. - - :param invalid_result: The value to be returned if the node type is - not recognized.""" - thistype = self.node_type - # Fix for base class - if thistype == '': - thistype = 'node.Node.' - if not thistype.endswith('.'): - return invalid_result - thistype = thistype[:-1] # Strip final dot - return thistype.rpartition('.')[2] - - def __str__(self): - simplename = self.get_simple_name(invalid_result='Unknown') - # node pk + type - if self.label: - return f'{simplename} node [{self.pk}]: {self.label}' - return f'{simplename} node [{self.pk}]' - - -class DbLink(m.Model): - """Direct connection between two dbnodes. The label is identifying thelink type.""" - - # If I delete an output, delete also the link; if I delete an input, stop - # NOTE: this will in most cases render a DbNode.objects.filter(...).delete() - # call unusable because some nodes will be inputs; Nodes will have to - # be deleted in the proper order (or links will need to be deleted first) - # The `input` and `output` columns do not need an explicit `db_index` as it is `True` by default for foreign keys - input = m.ForeignKey('DbNode', related_name='output_links', on_delete=m.PROTECT) - output = m.ForeignKey('DbNode', related_name='input_links', on_delete=m.CASCADE) - label = m.CharField(max_length=255, db_index=True, blank=False) - type = m.CharField(max_length=255, db_index=True, blank=True) - - def __str__(self): - return '{} ({}) --> {} ({})'.format( - self.input.get_simple_name(invalid_result='Unknown node'), - self.input.pk, - self.output.get_simple_name(invalid_result='Unknown node'), - self.output.pk, - ) - - -class DbSetting(m.Model): - """This will store generic settings that should be database-wide.""" - key = m.CharField(max_length=1024, db_index=True, blank=False, unique=True) - val = JSONField(default=None, null=True, encoder=JSONEncoder) - # I also add a description field for the variables - description = m.TextField(blank=True) - # Modification time of this attribute - time = m.DateTimeField(auto_now=True, editable=False) - - def __str__(self): - return f"'{self.key}'={self.getvalue()}" - - @classmethod - def set_value(cls, key, value, other_attribs=None, stop_if_existing=False): - """Delete a setting value.""" - other_attribs = other_attribs if other_attribs is not None else {} - setting = DbSetting.objects.filter(key=key).first() - if setting is not None: - if stop_if_existing: - return - else: - setting = cls() - - setting.key = key - setting.val = value - setting.time = timezone.datetime.now(tz=UTC) - if 'description' in other_attribs.keys(): - setting.description = other_attribs['description'] - setting.save() - - def getvalue(self): - """This can be called on a given row and will get the corresponding value.""" - return self.val - - def get_description(self): - """This can be called on a given row and will get the corresponding description.""" - return self.description - - @classmethod - def del_value(cls, key): - """Set a setting value.""" - - setting = DbSetting.objects.filter(key=key).first() - if setting is not None: - setting.val = None - setting.time = timezone.datetime.utcnow() - setting.save() - else: - raise KeyError() - - -class DbGroup(m.Model): - """ - A group of nodes. - - Any group of nodes can be created, but some groups may have specific meaning - if they satisfy specific rules (for instance, groups of UpdData objects are - pseudopotential families - if no two pseudos are included for the same - atomic element). - """ - uuid = m.UUIDField(default=get_new_uuid, unique=True) - # max_length is required by MySql to have indexes and unique constraints - label = m.CharField(max_length=255, db_index=True) - # The type_string of group: a user group, a pseudopotential group,... - # User groups have type_string equal to an empty string - type_string = m.CharField(default='', max_length=255, db_index=True) - dbnodes = m.ManyToManyField('DbNode', related_name='dbgroups') - # Creation time - time = m.DateTimeField(default=timezone.now, editable=False) - description = m.TextField(blank=True) - # The owner of the group, not of the calculations - # On user deletion, remove his/her groups too (not the calcuations, only - # the groups - user = m.ForeignKey(DbUser, on_delete=m.CASCADE, related_name='dbgroups') - # JSON Extras - extras = JSONField(default=dict, null=False, encoder=JSONEncoder) - - class Meta: - unique_together = (('label', 'type_string'),) - - def __str__(self): - return f'' - - -class DbComputer(m.Model): - """ - Table of computers or clusters. - - Attributes: - * label: A name to be used to refer to this computer. Must be unique. - * hostname: Fully-qualified hostname of the host - * transport_type: a string with a valid transport type - - - Note: other things that may be set in the metadata: - - * mpirun command - - * num cores per node - - * max num cores - - * workdir: Full path of the aiida folder on the host. It can contain\ - the string {username} that will be substituted by the username\ - of the user on that machine.\ - The actual workdir is then obtained as\ - workdir.format(username=THE_ACTUAL_USERNAME)\ - Example: \ - workdir = "/scratch/{username}/aiida/" - - - * allocate full node = True or False - - * ... (further limits per user etc.) - - """ - uuid = m.UUIDField(default=get_new_uuid, unique=True) - label = m.CharField(max_length=255, unique=True, blank=False) - hostname = m.CharField(max_length=255) - description = m.TextField(blank=True) - scheduler_type = m.CharField(max_length=255) - transport_type = m.CharField(max_length=255) - metadata = JSONField(default=dict, encoder=JSONEncoder) - - def __str__(self): - return f'{self.label} ({self.hostname})' - - -class DbAuthInfo(m.Model): - """ - Table that pairs aiida users and computers, with all required authentication - information. - """ - # Delete the DbAuthInfo if either the user or the computer are removed - aiidauser = m.ForeignKey(DbUser, on_delete=m.CASCADE) - dbcomputer = m.ForeignKey(DbComputer, on_delete=m.CASCADE) - auth_params = JSONField(default=dict, encoder=JSONEncoder) # contains mainly the remoteuser and the private_key - - # The keys defined in the metadata of the DbAuthInfo will override the - # keys with the same label defined in the DbComputer (using a dict.update() - # call of python). - metadata = JSONField(default=dict, encoder=JSONEncoder) - # Whether this computer is enabled (user-level enabling feature) - enabled = m.BooleanField(default=True) - - class Meta: - unique_together = (('aiidauser', 'dbcomputer'),) - - def __str__(self): - if self.enabled: - return f'DB authorization info for {self.aiidauser.email} on {self.dbcomputer.label}' - return f'DB authorization info for {self.aiidauser.email} on {self.dbcomputer.label} [DISABLED]' - - -class DbComment(m.Model): - """Class to store comments. """ - uuid = m.UUIDField(default=get_new_uuid, unique=True) - # Delete comments if the node is removed - dbnode = m.ForeignKey(DbNode, related_name='dbcomments', on_delete=m.CASCADE) - ctime = m.DateTimeField(default=timezone.now, editable=False) - mtime = m.DateTimeField(auto_now=True, editable=False) - # Delete the comments of a deleted user (TODO: check if this is a good policy) - user = m.ForeignKey(DbUser, on_delete=m.CASCADE) - content = m.TextField(blank=True) - - def __str__(self): - return 'DbComment for [{} {}] on {}'.format( - self.dbnode.get_simple_name(), self.dbnode.pk, - timezone.localtime(self.ctime).strftime('%Y-%m-%d') - ) - - -class DbLog(m.Model): - """Class to store logs.""" - uuid = m.UUIDField(default=get_new_uuid, unique=True) - time = m.DateTimeField(default=timezone.now, editable=False) - loggername = m.CharField(max_length=255, db_index=True) - levelname = m.CharField(max_length=50, db_index=True) - dbnode = m.ForeignKey(DbNode, related_name='dblogs', on_delete=m.CASCADE) - message = m.TextField(blank=True) - metadata = JSONField(default=dict, encoder=JSONEncoder) - - def __str__(self): - return f'DbLog: {self.levelname} for node {self.dbnode.id}: {self.message}' - - -@contextlib.contextmanager -def suppress_auto_now(list_of_models_fields): - """ - This context manager disables the auto_now & editable flags for the - fields of the given models. - This is useful when we would like to update the datetime fields of an - entry bypassing the automatic set of the date (with the current time). - This is very useful when entries are imported and we would like to keep e.g. - the modification time that we set during the import and not allow Django - to set it to the datetime that corresponds to when the entry was saved. - In the end the flags are returned to their original value. - :param list_of_models_fields: A list of (model, fields) tuples for - which the flags will be updated. The model is an object that corresponds - to the model objects and fields is a list of strings with the field names. - """ - # Here we store the original values of the fields of the models that will - # be updated - # E.g. - # _original_model_values = { - # ModelA: [fieldA: { - # 'auto_now': orig_valA1 - # 'editable': orig_valA2 - # }, - # fieldB: { - # 'auto_now': orig_valB1 - # 'editable': orig_valB2 - # } - # ] - # ... - # } - _original_model_values = {} - for model, fields in list_of_models_fields: - _original_field_values = {} - for field in model._meta.local_fields: # pylint: disable=protected-access - if field.name in fields: - _original_field_values[field] = { - 'auto_now': field.auto_now, - 'editable': field.editable, - } - field.auto_now = False - field.editable = True - _original_model_values[model] = _original_field_values - try: - yield - finally: - for model, data in _original_model_values.items(): - for field, value in data.items(): - field.auto_now = value['auto_now'] - field.editable = value['editable'] diff --git a/aiida/backends/djsite/db/testbase.py b/aiida/backends/djsite/db/testbase.py deleted file mode 100644 index a76aab5763..0000000000 --- a/aiida/backends/djsite/db/testbase.py +++ /dev/null @@ -1,38 +0,0 @@ -# -*- coding: utf-8 -*- -########################################################################### -# Copyright (c), The AiiDA team. All rights reserved. # -# This file is part of the AiiDA code. # -# # -# The code is hosted on GitHub at https://github.com/aiidateam/aiida-core # -# For further information on the license, see the LICENSE.txt file # -# For further information please visit http://www.aiida.net # -########################################################################### -""" -Base class for AiiDA tests -""" - -from aiida.backends.testimplbase import AiidaTestImplementation - - -# This contains the codebase for the setUpClass and tearDown methods used internally by the AiidaTestCase -# This inherits only from 'object' to avoid that it is picked up by the automatic discovery of tests -# (It shouldn't, as it risks to destroy the DB if there are not the checks in place, and these are -# implemented in the AiidaTestCase -class DjangoTests(AiidaTestImplementation): - """ - Automatically takes care of the setUpClass and TearDownClass, when needed. - """ - - def clean_db(self): - from aiida.backends.djsite.db import models - - # I first need to delete the links, because in principle I could not delete input nodes, only outputs. - # For simplicity, since I am deleting everything, I delete the links first - models.DbLink.objects.all().delete() - - # Then I delete the nodes, otherwise I cannot delete computers and users - models.DbLog.objects.all().delete() - models.DbNode.objects.all().delete() # pylint: disable=no-member - models.DbUser.objects.all().delete() # pylint: disable=no-member - models.DbComputer.objects.all().delete() - models.DbGroup.objects.all().delete() diff --git a/aiida/backends/djsite/manage.py b/aiida/backends/djsite/manage.py deleted file mode 100755 index ec7732002a..0000000000 --- a/aiida/backends/djsite/manage.py +++ /dev/null @@ -1,36 +0,0 @@ -#!/usr/bin/env python -# -*- coding: utf-8 -*- -########################################################################### -# Copyright (c), The AiiDA team. All rights reserved. # -# This file is part of the AiiDA code. # -# # -# The code is hosted on GitHub at https://github.com/aiidateam/aiida-core # -# For further information on the license, see the LICENSE.txt file # -# For further information please visit http://www.aiida.net # -########################################################################### -"""Simple wrapper around Django's `manage.py` CLI script.""" -import click - -from aiida.cmdline.params import options, types - - -@click.command() -@options.PROFILE(required=True, type=types.ProfileParamType(load_profile=True)) -@click.argument('command', nargs=-1) -def main(profile, command): # pylint: disable=unused-argument - """Simple wrapper around the Django command line tool that first loads an AiiDA profile.""" - from django.core.management import execute_from_command_line # pylint: disable=import-error,no-name-in-module - - from aiida.manage.manager import get_manager - - manager = get_manager() - manager._load_backend(schema_check=False) # pylint: disable=protected-access - - # The `execute_from_command` expects a list of command line arguments where the first is the program name that one - # would normally call directly. Since this is now replaced by our `click` command we just spoof a random name. - argv = ['basename'] + list(command) - execute_from_command_line(argv) - - -if __name__ == '__main__': - main() # pylint: disable=no-value-for-parameter diff --git a/aiida/backends/djsite/manager.py b/aiida/backends/djsite/manager.py deleted file mode 100644 index a59521a7c3..0000000000 --- a/aiida/backends/djsite/manager.py +++ /dev/null @@ -1,205 +0,0 @@ -# -*- coding: utf-8 -*- -########################################################################### -# Copyright (c), The AiiDA team. All rights reserved. # -# This file is part of the AiiDA code. # -# # -# The code is hosted on GitHub at https://github.com/aiidateam/aiida-core # -# For further information on the license, see the LICENSE.txt file # -# For further information please visit http://www.aiida.net # -########################################################################### -# pylint: disable=import-error,no-name-in-module -"""Utilities and configuration of the Django database schema.""" - -import os - -import django - -from aiida.common import NotExistent - -from ..manager import SCHEMA_VERSION_DESCRIPTION, SCHEMA_VERSION_KEY, BackendManager, Setting, SettingsManager - -# The database schema version required to perform schema reset for a given code schema generation -SCHEMA_VERSION_RESET = {'1': None} - - -class DjangoBackendManager(BackendManager): - """Class to manage the database schema.""" - - def get_settings_manager(self): - """Return an instance of the `SettingsManager`. - - :return: `SettingsManager` - """ - if self._settings_manager is None: - self._settings_manager = DjangoSettingsManager() - - return self._settings_manager - - def _load_backend_environment(self, **kwargs): - """Load the backend environment. - - The scoped session is needed for the QueryBuilder only. - - :param kwargs: keyword arguments that will be passed on to :py:func:`aiida.backends.djsite.get_scoped_session`. - """ - os.environ['DJANGO_SETTINGS_MODULE'] = 'aiida.backends.djsite.settings' - django.setup() # pylint: disable=no-member - - # For QueryBuilder only - from . import get_scoped_session - get_scoped_session(**kwargs) - - def reset_backend_environment(self): - """Reset the backend environment.""" - from . import reset_session - reset_session() - - def is_database_schema_ahead(self): - """Determine whether the database schema version is ahead of the code schema version. - - .. warning:: this will not check whether the schema generations are equal - - :return: boolean, True if the database schema version is ahead of the code schema version. - """ - # For Django the versions numbers are numerical so we can compare them - from distutils.version import StrictVersion - return StrictVersion(self.get_schema_version_backend()) > StrictVersion(self.get_schema_version_head()) - - def get_schema_version_head(self): - from .db.models import SCHEMA_VERSION - return SCHEMA_VERSION - - def get_schema_version_reset(self, schema_generation_code): - """Return schema version the database should have to be able to automatically reset to code schema generation. - - :param schema_generation_code: the schema generation of the code. - :return: schema version - """ - return SCHEMA_VERSION_RESET[schema_generation_code] - - def get_schema_generation_database(self): - """Return the database schema version. - - :return: `distutils.version.StrictVersion` with schema version of the database - """ - from django.db.utils import ProgrammingError - - from aiida.manage.manager import get_manager - - backend = get_manager()._load_backend(schema_check=False, repository_check=False) # pylint: disable=protected-access - - try: - result = backend.execute_raw(r"""SELECT val FROM db_dbsetting WHERE key = 'schema_generation';""") - except ProgrammingError: - # If this value does not exist, the schema has to correspond to the first generation which didn't actually - # record its value explicitly in the database until ``aiida-core>=1.0.0``. - return '1' - else: - try: - return str(int(result[0][0])) - except (IndexError, ValueError, TypeError): - return '1' - - def get_schema_version_backend(self): - from django.db.utils import ProgrammingError - - from aiida.manage.manager import get_manager - - backend = get_manager()._load_backend(schema_check=False, repository_check=False) # pylint: disable=protected-access - - try: - result = backend.execute_raw(r"""SELECT val FROM db_dbsetting WHERE key = 'db|schemaversion';""") - except ProgrammingError: - result = backend.execute_raw(r"""SELECT tval FROM db_dbsetting WHERE key = 'db|schemaversion';""") - return result[0][0] - - def set_schema_version_backend(self, version: str) -> None: - return self.get_settings_manager().set(SCHEMA_VERSION_KEY, version, description=SCHEMA_VERSION_DESCRIPTION) - - def _migrate_database_generation(self): - """Reset the database schema generation. - - For Django we also have to clear the `django_migrations` table that contains a history of all applied - migrations. After clearing it, we reinsert the name of the new initial schema . - """ - # pylint: disable=cyclic-import - from aiida.manage.manager import get_manager - super()._migrate_database_generation() - - backend = get_manager()._load_backend(schema_check=False, repository_check=False) # pylint: disable=protected-access - backend.execute_raw(r"""DELETE FROM django_migrations WHERE app = 'db';""") - backend.execute_raw( - r"""INSERT INTO django_migrations (app, name, applied) VALUES ('db', '0001_initial', NOW());""" - ) - - def _migrate_database_version(self): - """Migrate the database to the current schema version.""" - super()._migrate_database_version() - from django.core.management import call_command # pylint: disable=no-name-in-module,import-error - call_command('migrate') - - -class DjangoSettingsManager(SettingsManager): - """Class to get, set and delete settings from the `DbSettings` table.""" - - table_name = 'db_dbsetting' - - def validate_table_existence(self): - """Verify that the `DbSetting` table actually exists. - - :raises: `~aiida.common.exceptions.NotExistent` if the settings table does not exist - """ - from django.db import connection - if self.table_name not in connection.introspection.table_names(): - raise NotExistent('the settings table does not exist') - - def get(self, key): - """Return the setting with the given key. - - :param key: the key identifying the setting - :return: Setting - :raises: `~aiida.common.exceptions.NotExistent` if the settings does not exist - """ - from aiida.backends.djsite.db.models import DbSetting - - self.validate_table_existence() - setting = DbSetting.objects.filter(key=key).first() - - if setting is None: - raise NotExistent(f'setting `{key}` does not exist') - - return Setting(setting.key, setting.val, setting.description, setting.time) - - def set(self, key, value, description=None): - """Return the settings with the given key. - - :param key: the key identifying the setting - :param value: the value for the setting - :param description: optional setting description - """ - from aiida.backends.djsite.db.models import DbSetting - from aiida.orm.implementation.utils import validate_attribute_extra_key - - self.validate_table_existence() - validate_attribute_extra_key(key) - - other_attribs = {} - if description is not None: - other_attribs['description'] = description - - DbSetting.set_value(key, value, other_attribs=other_attribs) - - def delete(self, key): - """Delete the setting with the given key. - - :param key: the key identifying the setting - :raises: `~aiida.common.exceptions.NotExistent` if the settings does not exist - """ - from aiida.backends.djsite.db.models import DbSetting - - self.validate_table_existence() - - try: - DbSetting.del_value(key=key) - except KeyError: - raise NotExistent(f'setting `{key}` does not exist') from KeyError diff --git a/aiida/backends/djsite/settings.py b/aiida/backends/djsite/settings.py deleted file mode 100644 index 8dea65dd2d..0000000000 --- a/aiida/backends/djsite/settings.py +++ /dev/null @@ -1,110 +0,0 @@ -# -*- coding: utf-8 -*- -########################################################################### -# Copyright (c), The AiiDA team. All rights reserved. # -# This file is part of the AiiDA code. # -# # -# The code is hosted on GitHub at https://github.com/aiidateam/aiida-core # -# For further information on the license, see the LICENSE.txt file # -# For further information please visit http://www.aiida.net # -########################################################################### -# pylint: disable=import-error, no-name-in-module -""" Django settings for the AiiDA project. """ -from sqlalchemy.dialects.postgresql import JSONB, UUID - -from aiida.common import exceptions -from aiida.common.timezone import get_current_timezone -from aiida.manage.configuration import get_profile, settings - -try: - PROFILE = get_profile() -except exceptions.MissingConfigurationError as exception: - raise exceptions.MissingConfigurationError(f'the configuration could not be loaded: {exception}') - -if PROFILE is None: - raise exceptions.ProfileConfigurationError('no profile has been loaded') - -if PROFILE.storage_backend != 'django': - raise exceptions.ProfileConfigurationError( - f'incommensurate database backend `{PROFILE.storage_backend}` for profile `{PROFILE.name}`' - ) - -DATABASES = { - 'default': { - 'ENGINE': f"django.db.backends.{PROFILE.storage_config['database_engine']}", - 'NAME': PROFILE.storage_config['database_name'], - 'PORT': PROFILE.storage_config['database_port'], - 'HOST': PROFILE.storage_config['database_hostname'], - 'USER': PROFILE.storage_config['database_username'], - 'PASSWORD': PROFILE.storage_config['database_password'], - } -} - -# CUSTOM USER CLASS -AUTH_USER_MODEL = 'db.DbUser' - -# No secret key defined since we do not use Django to serve HTTP pages -SECRET_KEY = 'placeholder' # noqa - -# Automatic logging configuration for Django is disabled here -# and done for all backends in aiida/__init__.py -LOGGING_CONFIG = None - -# Keep DEBUG = False! Otherwise every query is stored in memory -DEBUG = False - -ADMINS = [] -ALLOWED_HOSTS = [] - -MANAGERS = ADMINS - -# Language code for this installation. All choices can be found here: -# http://www.i18nguy.com/unicode/language-identifiers.html -LANGUAGE_CODE = 'en-us' - -# Local time zone for this installation. Always choose the system timezone. -# Note: This causes django to set the 'TZ' environment variable, which is read by tzlocal from then onwards. -# See https://docs.djangoproject.com/en/2.2/ref/settings/#std:setting-TIME_ZONE -TIME_ZONE = get_current_timezone().zone - -SITE_ID = 1 - -# If you set this to False, Django will make some optimizations so as not -# to load the internationalization machinery. -USE_I18N = False - -# If you set this to False, Django will not format dates, numbers and -# calendars according to the current locale. -USE_L10N = False - -# If you set this to False, Django will not use timezone-aware datetimes. -# For AiiDA, leave it as True, otherwise setting properties with dates will not work. -USE_TZ = settings.USE_TZ - -TEMPLATES = [ - { - 'BACKEND': 'django.template.backends.django.DjangoTemplates', - 'DIRS': [], - 'APP_DIRS': True, - 'OPTIONS': { - 'context_processors': [ - 'django.template.context_processors.debug', - 'django.template.context_processors.request', - 'django.contrib.messages.context_processors.messages', - ], - 'debug': - DEBUG, - }, - }, -] - -INSTALLED_APPS = [ - 'django.contrib.auth', - 'django.contrib.contenttypes', - 'aiida.backends.djsite.db', - 'aldjemy', -] - -ALDJEMY_DATA_TYPES = { - 'UUIDField': lambda field: UUID(), - 'JSONField': lambda field: JSONB(), -} diff --git a/aiida/backends/general/__init__.py b/aiida/backends/general/__init__.py deleted file mode 100644 index 2776a55f97..0000000000 --- a/aiida/backends/general/__init__.py +++ /dev/null @@ -1,9 +0,0 @@ -# -*- coding: utf-8 -*- -########################################################################### -# Copyright (c), The AiiDA team. All rights reserved. # -# This file is part of the AiiDA code. # -# # -# The code is hosted on GitHub at https://github.com/aiidateam/aiida-core # -# For further information on the license, see the LICENSE.txt file # -# For further information please visit http://www.aiida.net # -########################################################################### diff --git a/aiida/backends/general/migrations/duplicate_uuids.py b/aiida/backends/general/migrations/duplicate_uuids.py deleted file mode 100644 index 0e06e84ec3..0000000000 --- a/aiida/backends/general/migrations/duplicate_uuids.py +++ /dev/null @@ -1,127 +0,0 @@ -# -*- coding: utf-8 -*- -########################################################################### -# Copyright (c), The AiiDA team. All rights reserved. # -# This file is part of the AiiDA code. # -# # -# The code is hosted on GitHub at https://github.com/aiidateam/aiida-core # -# For further information on the license, see the LICENSE.txt file # -# For further information please visit http://www.aiida.net # -########################################################################### -"""Generic functions to verify the integrity of the database and optionally apply patches to fix problems.""" -import os - -from aiida.common import exceptions - -TABLES_UUID_DEDUPLICATION = ('db_dbcomment', 'db_dbcomputer', 'db_dbgroup', 'db_dbnode') - - -def _get_duplicate_uuids(table): - """Retrieve rows with duplicate UUIDS. - - :param table: database table with uuid column, e.g. 'db_dbnode' - :return: list of tuples of (id, uuid) of rows with duplicate UUIDs - """ - from aiida.manage.manager import get_manager - backend = get_manager().get_backend() - query = f""" - SELECT s.id, s.uuid FROM (SELECT *, COUNT(*) OVER(PARTITION BY uuid) AS c FROM {table}) - AS s WHERE c > 1 - """ - return backend.execute_raw(query) - - -def verify_uuid_uniqueness(table): - """Check whether database table contains rows with duplicate UUIDS. - - :param table: Database table with uuid column, e.g. 'db_dbnode' - :type str: - - :raises: IntegrityError if table contains rows with duplicate UUIDS. - """ - duplicates = _get_duplicate_uuids(table=table) - - if duplicates: - raise exceptions.IntegrityError( - 'Table {table:} contains rows with duplicate UUIDS: run ' - '`verdi database integrity detect-duplicate-uuid -t {table:}` to address the problem'.format(table=table) - ) - - -def _apply_new_uuid_mapping(table, mapping): - """Take a mapping of pks to UUIDs and apply it to the given table. - - :param table: database table with uuid column, e.g. 'db_dbnode' - :param mapping: dictionary of UUIDs mapped onto a pk - """ - from aiida.manage.manager import get_manager - backend = get_manager().get_backend() - for pk, uuid in mapping.items(): - query = f"""UPDATE {table} SET uuid = '{uuid}' WHERE id = {pk}""" - with backend.cursor() as cursor: - cursor.execute(query) - - -def deduplicate_uuids(table=None, dry_run=True): - """Detect and solve entities with duplicate UUIDs in a given database table. - - Before aiida-core v1.0.0, there was no uniqueness constraint on the UUID column of the node table in the database - and a few other tables as well. This made it possible to store multiple entities with identical UUIDs in the same - table without the database complaining. This bug was fixed in aiida-core=1.0.0 by putting an explicit uniqueness - constraint on UUIDs on the database level. However, this would leave databases created before this patch with - duplicate UUIDs in an inconsistent state. This command will run an analysis to detect duplicate UUIDs in a given - table and solve it by generating new UUIDs. Note that it will not delete or merge any rows. - - :return: list of strings denoting the performed operations - :raises ValueError: if the specified table is invalid - """ - from collections import defaultdict - import distutils.dir_util - - from aiida.common.utils import get_new_uuid - - from .utils import get_node_repository_sub_folder # pylint: disable=no-name-in-module - - if table not in TABLES_UUID_DEDUPLICATION: - raise ValueError(f"invalid table {table}: choose from {', '.join(TABLES_UUID_DEDUPLICATION)}") - - mapping = defaultdict(list) - - for pk, uuid in _get_duplicate_uuids(table=table): - mapping[uuid].append(int(pk)) - - messages = [] - mapping_new_uuid = {} - - for uuid, rows in mapping.items(): - - uuid_ref = None - - for pk in rows: - - # We don't have to change all rows that have the same UUID, the first one can keep the original - if uuid_ref is None: - uuid_ref = uuid - continue - - uuid_new = str(get_new_uuid()) - mapping_new_uuid[pk] = uuid_new - - if dry_run: - messages.append(f'would update UUID of {table} row<{pk}> from {uuid_ref} to {uuid_new}') - else: - messages.append(f'updated UUID of {table} row<{pk}> from {uuid_ref} to {uuid_new}') - dirpath_repo_ref = get_node_repository_sub_folder(uuid_ref) - dirpath_repo_new = get_node_repository_sub_folder(uuid_new) - - # First make sure the new repository exists, then copy the contents of the ref into the new. We use the - # somewhat unknown `distuitils.dir_util` method since that does just contents as we want. - os.makedirs(dirpath_repo_new, exist_ok=True) - distutils.dir_util.copy_tree(dirpath_repo_ref, dirpath_repo_new) - - if not dry_run: - _apply_new_uuid_mapping(table, mapping_new_uuid) - - if not messages: - messages = ['no duplicate UUIDs found'] - - return messages diff --git a/aiida/backends/manager.py b/aiida/backends/manager.py deleted file mode 100644 index 112f785a18..0000000000 --- a/aiida/backends/manager.py +++ /dev/null @@ -1,339 +0,0 @@ -# -*- coding: utf-8 -*- -########################################################################### -# Copyright (c), The AiiDA team. All rights reserved. # -# This file is part of the AiiDA code. # -# # -# The code is hosted on GitHub at https://github.com/aiidateam/aiida-core # -# For further information on the license, see the LICENSE.txt file # -# For further information please visit http://www.aiida.net # -########################################################################### -"""Module for settings and utilities to determine and set the database schema versions.""" -import abc -import collections - -from aiida.common import exceptions - -SCHEMA_VERSION_KEY = 'db|schemaversion' -SCHEMA_VERSION_DESCRIPTION = 'Database schema version' - -SCHEMA_GENERATION_KEY = 'schema_generation' # The key to store the database schema generation in the settings table -SCHEMA_GENERATION_DESCRIPTION = 'Database schema generation' -SCHEMA_GENERATION_VALUE = '1' # The current schema generation - -# Mapping of schema generation onto a tuple of valid schema reset generation and `aiida-core` version number. Given the -# current code schema generation as the key, the first element of the tuple tells what schema generation the database -# should have to be able to reset the schema. If the generation of the database is correct, but the schema version of -# the database does not match the one required for the reset, it means the user first has to downgrade the `aiida-core` -# version and perform the latest migrations. The required version is provided by the tuples second element. -SCHEMA_GENERATION_RESET = { - '1': ('1', '1.*'), -} - -TEMPLATE_INVALID_SCHEMA_GENERATION = """ -Database schema generation `{schema_generation_database}` is incompatible with the required schema generation `{schema_generation_code}`. -To migrate the database schema generation to the current one, run the following command: - - verdi -p {profile_name} storage migrate -""" - -TEMPLATE_INVALID_SCHEMA_VERSION = """ -Database schema version `{schema_version_database}` is incompatible with the required schema version `{schema_version_code}`. -To migrate the database schema version to the current one, run the following command: - - verdi -p {profile_name} storage migrate -""" - -TEMPLATE_MIGRATE_SCHEMA_VERSION_INVALID_VERSION = """ -Cannot migrate the database version from `{schema_version_database}` to `{schema_version_code}`. -The database version is ahead of the version of the code and downgrades of the database are not supported. -""" - -TEMPLATE_MIGRATE_SCHEMA_GENERATION_INVALID_GENERATION = """ -Cannot migrate database schema generation from `{schema_generation_database}` to `{schema_generation_code}`. -This version of `aiida-core` can only migrate databases with schema generation `{schema_generation_reset}` -""" - -TEMPLATE_MIGRATE_SCHEMA_GENERATION_INVALID_VERSION = """ -Cannot migrate database schema generation from `{schema_generation_database}` to `{schema_generation_code}`. -The current database version is `{schema_version_database}` but `{schema_version_reset}` is required for generation migration. -First install `aiida-core~={aiida_core_version_reset}` and migrate the database to the latest version. -After the database schema is migrated to version `{schema_version_reset}` you can reinstall this version of `aiida-core` and migrate the schema generation. -""" - -REPOSITORY_UUID_KEY = 'repository|uuid' - -Setting = collections.namedtuple('Setting', ['key', 'value', 'description', 'time']) - - -class SettingsManager: - """Class to get, set and delete settings from the `DbSettings` table.""" - - @abc.abstractmethod - def get(self, key): - """Return the setting with the given key. - - :param key: the key identifying the setting - :return: Setting - :raises: `~aiida.common.exceptions.NotExistent` if the settings does not exist - """ - - @abc.abstractmethod - def set(self, key, value, description=None): - """Return the settings with the given key. - - :param key: the key identifying the setting - :param value: the value for the setting - :param description: optional setting description - """ - - @abc.abstractmethod - def delete(self, key): - """Delete the setting with the given key. - - :param key: the key identifying the setting - :raises: `~aiida.common.exceptions.NotExistent` if the settings does not exist - """ - - @abc.abstractmethod - def validate_table_existence(self): - """Verify that the `DbSetting` table actually exists. - - :raises: `~aiida.common.exceptions.NotExistent` if the settings table does not exist - """ - - -class BackendManager: - """Class to manage the database schema and environment.""" - - _settings_manager = None - - @abc.abstractmethod - def get_settings_manager(self): - """Return an instance of the `SettingsManager`. - - :return: `SettingsManager` - """ - - def load_backend_environment(self, profile, validate_schema=True, **kwargs): - """Load the backend environment. - - :param profile: the profile whose backend environment to load - :param validate_schema: boolean, if True, validate the schema first before loading the environment. - :param kwargs: keyword arguments that will be passed on to the backend specific scoped session getter function. - """ - self._load_backend_environment(**kwargs) - - if validate_schema: - self.validate_schema(profile) - - @abc.abstractmethod - def _load_backend_environment(self, **kwargs): - """Load the backend environment. - - :param kwargs: keyword arguments that will be passed on to the backend specific scoped session getter function. - """ - - @abc.abstractmethod - def reset_backend_environment(self): - """Reset the backend environment.""" - - def migrate(self): - """Migrate the database to the latest schema generation or version.""" - try: - # If the settings table does not exist, we are dealing with an empty database. We cannot perform the checks - # because they rely on the settings table existing, so instead we do not validate but directly call method - # `_migrate_database_version` which will perform the migration to create the initial schema. - self.get_settings_manager().validate_table_existence() - except exceptions.NotExistent: - self._migrate_database_version() - return - - if SCHEMA_GENERATION_VALUE != self.get_schema_generation_database(): - self.validate_schema_generation_for_migration() - self._migrate_database_generation() - - if self.get_schema_version_head() != self.get_schema_version_backend(): - self.validate_schema_version_for_migration() - self._migrate_database_version() - - def _migrate_database_generation(self): - """Migrate the database schema generation. - - .. warning:: this should NEVER be called directly because there is no validation performed on whether the - current database schema generation and version can actually be migrated. - - This normally just consists out of setting the schema generation value, but depending on the backend more might - be needed. In that case, this method should be overridden and call `super` first, followed by the additional - logic that is required. - """ - self.set_schema_generation_database(SCHEMA_GENERATION_VALUE) - self.set_schema_version_backend(self.get_schema_version_head()) - - def _migrate_database_version(self): - """Migrate the database to the current schema version. - - .. warning:: this should NEVER be called directly because there is no validation performed on whether the - current database schema generation and version can actually be migrated. - """ - - @abc.abstractmethod - def is_database_schema_ahead(self): - """Determine whether the backend schema version is ahead of the head schema version. - - .. warning:: this will not check whether the schema generations are equal - - :return: boolean, True if the backend schema version is ahead of the head schema version. - """ - - @abc.abstractmethod - def get_schema_version_head(self) -> str: - """Return the head schema version for this backend, i.e. the latest schema this backend can be migrated to""" - - @abc.abstractmethod - def get_schema_version_reset(self, schema_generation_code): - """Return schema version the database should have to be able to automatically reset to code schema generation. - - :param schema_generation_code: the schema generation of the code. - :return: schema version - """ - - @abc.abstractmethod - def get_schema_version_backend(self) -> str: - """Return the schema version of the currently configured backend instance.""" - - @abc.abstractmethod - def set_schema_version_backend(self, version: str) -> None: - """Set the database schema version. - - :param version: string with schema version to set - """ - - def get_schema_generation_database(self): - """Return the database schema generation. - - :return: `distutils.version.LooseVersion` with schema generation of the database - """ - try: - setting = self.get_settings_manager().get(SCHEMA_GENERATION_KEY) - return setting.value - except exceptions.NotExistent: - return '1' - - def set_schema_generation_database(self, generation): - """Set the database schema generation. - - :param generation: string with schema generation to set - """ - self.get_settings_manager().set(SCHEMA_GENERATION_KEY, generation) - - def set_repository_uuid(self, uuid): - """Set the UUID of the repository that is associated with this database. - - :param uuid: the UUID of the repository associated with this database. - """ - self.get_settings_manager().set(REPOSITORY_UUID_KEY, uuid, description='Repository UUID') - - def get_repository_uuid(self): - """Return the UUID of the repository that is associated with this database. - - :return: the UUID of the repository associated with this database or None if it doesn't exist. - """ - try: - setting = self.get_settings_manager().get(REPOSITORY_UUID_KEY) - return setting.value - except exceptions.NotExistent: - return None - - def validate_schema(self, profile): - """Validate that the current database generation and schema are up-to-date with that of the code. - - :param profile: the profile for which to validate the database schema - :raises `aiida.common.exceptions.ConfigurationError`: if database schema version or generation is not up-to-date - """ - self.validate_schema_generation(profile) - self.validate_schema_version(profile) - - def validate_schema_generation_for_migration(self): - """Validate whether the current database schema generation can be migrated. - - :raises `aiida.common.exceptions.IncompatibleDatabaseSchema`: if database schema generation cannot be migrated - """ - schema_generation_code = SCHEMA_GENERATION_VALUE - schema_generation_database = self.get_schema_generation_database() - schema_version_database = self.get_schema_version_backend() - schema_version_reset = self.get_schema_version_reset(schema_generation_code) - schema_generation_reset, aiida_core_version_reset = SCHEMA_GENERATION_RESET[schema_generation_code] - - if schema_generation_database != schema_generation_reset: - raise exceptions.IncompatibleDatabaseSchema( - TEMPLATE_MIGRATE_SCHEMA_GENERATION_INVALID_GENERATION.format( - schema_generation_database=schema_generation_database, - schema_generation_code=schema_generation_code, - schema_generation_reset=schema_generation_reset - ) - ) - - if schema_version_database != schema_version_reset: - raise exceptions.IncompatibleDatabaseSchema( - TEMPLATE_MIGRATE_SCHEMA_GENERATION_INVALID_VERSION.format( - schema_generation_database=schema_generation_database, - schema_generation_code=schema_generation_code, - schema_version_database=schema_version_database, - schema_version_reset=schema_version_reset, - aiida_core_version_reset=aiida_core_version_reset - ) - ) - - def validate_schema_version_for_migration(self): - """Validate whether the current database schema version can be migrated. - - .. warning:: this will not validate that the schema generation is correct. - - :raises `aiida.common.exceptions.IncompatibleDatabaseSchema`: if database schema version cannot be migrated - """ - schema_version_code = self.get_schema_version_head() - schema_version_database = self.get_schema_version_backend() - - if self.is_database_schema_ahead(): - # Database is newer than the code so a downgrade would be necessary but this is not supported. - raise exceptions.IncompatibleDatabaseSchema( - TEMPLATE_MIGRATE_SCHEMA_VERSION_INVALID_VERSION.format( - schema_version_database=schema_version_database, - schema_version_code=schema_version_code, - ) - ) - - def validate_schema_generation(self, profile): - """Validate that the current database schema generation is up-to-date with that of the code. - - :raises `aiida.common.exceptions.IncompatibleDatabaseSchema`: if database schema generation is not up-to-date - """ - schema_generation_code = SCHEMA_GENERATION_VALUE - schema_generation_database = self.get_schema_generation_database() - - if schema_generation_database != schema_generation_code: - raise exceptions.IncompatibleDatabaseSchema( - TEMPLATE_INVALID_SCHEMA_GENERATION.format( - schema_generation_database=schema_generation_database, - schema_generation_code=schema_generation_code, - profile_name=profile.name, - ) - ) - - def validate_schema_version(self, profile): - """Validate that the current database schema version is up-to-date with that of the code. - - :param profile: the profile for which to validate the database schema - :raises `aiida.common.exceptions.IncompatibleDatabaseSchema`: if database schema version is not up-to-date - """ - schema_version_code = self.get_schema_version_head() - schema_version_database = self.get_schema_version_backend() - - if schema_version_database != schema_version_code: - raise exceptions.IncompatibleDatabaseSchema( - TEMPLATE_INVALID_SCHEMA_VERSION.format( - schema_version_database=schema_version_database, - schema_version_code=schema_version_code, - profile_name=profile.name - ) - ) diff --git a/aiida/backends/managers/__init__.py b/aiida/backends/managers/__init__.py deleted file mode 100644 index 2776a55f97..0000000000 --- a/aiida/backends/managers/__init__.py +++ /dev/null @@ -1,9 +0,0 @@ -# -*- coding: utf-8 -*- -########################################################################### -# Copyright (c), The AiiDA team. All rights reserved. # -# This file is part of the AiiDA code. # -# # -# The code is hosted on GitHub at https://github.com/aiidateam/aiida-core # -# For further information on the license, see the LICENSE.txt file # -# For further information please visit http://www.aiida.net # -########################################################################### diff --git a/aiida/backends/sqlalchemy/__init__.py b/aiida/backends/sqlalchemy/__init__.py index 232346800d..e03db5e246 100644 --- a/aiida/backends/sqlalchemy/__init__.py +++ b/aiida/backends/sqlalchemy/__init__.py @@ -7,53 +7,4 @@ # For further information on the license, see the LICENSE.txt file # # For further information please visit http://www.aiida.net # ########################################################################### -# pylint: disable=global-statement -"""Module with implementation of the database backend using SqlAlchemy.""" -from aiida.backends.utils import create_scoped_session_factory, create_sqlalchemy_engine - -ENGINE = None -SESSION_FACTORY = None - - -def reset_session(): - """Reset the session which means setting the global engine and session factory instances to `None`.""" - global ENGINE - global SESSION_FACTORY - - if ENGINE is not None: - ENGINE.dispose() - - if SESSION_FACTORY is not None: - SESSION_FACTORY.expunge_all() # pylint: disable=no-member - SESSION_FACTORY.close() # pylint: disable=no-member - - ENGINE = None - SESSION_FACTORY = None - - -def get_scoped_session(**kwargs): - """Return a scoped session - - According to SQLAlchemy docs, this returns always the same object within a thread, and a different object in a - different thread. Moreover, since we update the session class upon forking, different session objects will be used. - - :param kwargs: keyword argument that will be passed on to :py:func:`aiida.backends.utils.create_sqlalchemy_engine`, - opening the possibility to change QueuePool time outs and more. - See https://docs.sqlalchemy.org/en/13/core/engines.html?highlight=create_engine#sqlalchemy.create_engine for - more info. - """ - from aiida.manage.configuration import get_profile - - global ENGINE - global SESSION_FACTORY - - if SESSION_FACTORY is not None: - session = SESSION_FACTORY() - return session - - if ENGINE is None: - ENGINE = create_sqlalchemy_engine(get_profile(), **kwargs) - - SESSION_FACTORY = create_scoped_session_factory(ENGINE, expire_on_commit=True) - - return SESSION_FACTORY() +"""Module with implementation of the storage backend using SqlAlchemy and the disk-objectstore.""" diff --git a/aiida/backends/sqlalchemy/alembic_cli.py b/aiida/backends/sqlalchemy/alembic_cli.py new file mode 100755 index 0000000000..7b9b1edf24 --- /dev/null +++ b/aiida/backends/sqlalchemy/alembic_cli.py @@ -0,0 +1,107 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- +########################################################################### +# Copyright (c), The AiiDA team. All rights reserved. # +# This file is part of the AiiDA code. # +# # +# The code is hosted on GitHub at https://github.com/aiidateam/aiida-core # +# For further information on the license, see the LICENSE.txt file # +# For further information please visit http://www.aiida.net # +########################################################################### +"""Simple wrapper around the alembic command line tool that first loads an AiiDA profile.""" +import alembic +import click +from sqlalchemy.util.compat import nullcontext + +from aiida.backends.sqlalchemy.migrator import PsqlDostoreMigrator +from aiida.cmdline import is_verbose +from aiida.cmdline.params import options + + +class AlembicRunner: + """Wrapper around the alembic command line tool that first loads an AiiDA profile.""" + + def __init__(self) -> None: + self.profile = None + + def execute_alembic_command(self, command_name, connect=True, **kwargs): + """Execute an Alembic CLI command. + + :param command_name: the sub command name + :param kwargs: parameters to pass to the command + """ + if self.profile is None: + raise click.ClickException('No profile specified') + migrator = PsqlDostoreMigrator(self.profile) + + context = migrator._alembic_connect() if connect else nullcontext(migrator._alembic_config()) # pylint: disable=protected-access + with context as config: + command = getattr(alembic.command, command_name) + config.stdout = click.get_text_stream('stdout') + command(config, **kwargs) + + +pass_runner = click.make_pass_decorator(AlembicRunner, ensure=True) + + +@click.group() +@options.PROFILE(required=True) +@pass_runner +def alembic_cli(runner, profile): + """Simple wrapper around the alembic command line tool that first loads an AiiDA profile.""" + runner.profile = profile + + +@alembic_cli.command('revision') +@click.argument('message') +@pass_runner +def alembic_revision(runner, message): + """Create a new database revision.""" + # to-do this does not currently work, because `alembic.RevisionContext._run_environment` has issues with heads + # (it works if we comment out the initial autogenerate check) + runner.execute_alembic_command('revision', message=message, autogenerate=True, head='main@head') + + +@alembic_cli.command('current') +@options.VERBOSITY() +@pass_runner +def alembic_current(runner): + """Show the current revision.""" + runner.execute_alembic_command('current', verbose=is_verbose()) + + +@alembic_cli.command('history') +@click.option('-r', '--rev-range') +@options.VERBOSITY() +@pass_runner +def alembic_history(runner, rev_range): + """Show the history for the given revision range.""" + runner.execute_alembic_command('history', connect=False, rev_range=rev_range, verbose=is_verbose()) + + +@alembic_cli.command('show') +@click.argument('revision', type=click.STRING) +@pass_runner +def alembic_show(runner, revision): + """Show details of the given REVISION.""" + runner.execute_alembic_command('show', rev=revision) + + +@alembic_cli.command('upgrade') +@click.argument('revision', type=click.STRING) +@pass_runner +def alembic_upgrade(runner, revision): + """Upgrade the database to the given REVISION.""" + runner.execute_alembic_command('upgrade', revision=revision) + + +@alembic_cli.command('downgrade') +@click.argument('revision', type=click.STRING) +@pass_runner +def alembic_downgrade(runner, revision): + """Downgrade the database to the given REVISION.""" + runner.execute_alembic_command('downgrade', revision=revision) + + +if __name__ == '__main__': + alembic_cli() # pylint: disable=no-value-for-parameter diff --git a/aiida/backends/sqlalchemy/manage.py b/aiida/backends/sqlalchemy/manage.py deleted file mode 100755 index 1538a1b9e1..0000000000 --- a/aiida/backends/sqlalchemy/manage.py +++ /dev/null @@ -1,83 +0,0 @@ -#!/usr/bin/env python -# -*- coding: utf-8 -*- -########################################################################### -# Copyright (c), The AiiDA team. All rights reserved. # -# This file is part of the AiiDA code. # -# # -# The code is hosted on GitHub at https://github.com/aiidateam/aiida-core # -# For further information on the license, see the LICENSE.txt file # -# For further information please visit http://www.aiida.net # -########################################################################### -"""Simple wrapper around the alembic command line tool that first loads an AiiDA profile.""" -import alembic -import click - -from aiida.cmdline import is_verbose -from aiida.cmdline.params import options - - -def execute_alembic_command(command_name, **kwargs): - """Execute an Alembic CLI command. - - :param command_name: the sub command name - :param kwargs: parameters to pass to the command - """ - from aiida.backends.sqlalchemy.manager import SqlaBackendManager - - manager = SqlaBackendManager() - - with manager.alembic_config() as config: - command = getattr(alembic.command, command_name) - command(config, **kwargs) - - -@click.group() -@options.PROFILE(required=True) -def alembic_cli(profile): - """Simple wrapper around the alembic command line tool that first loads an AiiDA profile.""" - from aiida.manage.configuration import load_profile - from aiida.manage.manager import get_manager - - load_profile(profile=profile.name) - manager = get_manager() - manager._load_backend(schema_check=False) # pylint: disable=protected-access - - -@alembic_cli.command('revision') -@click.argument('message') -def alembic_revision(message): - """Create a new database revision.""" - execute_alembic_command('revision', message=message, autogenerate=True) - - -@alembic_cli.command('current') -@options.VERBOSITY() -def alembic_current(): - """Show the current revision.""" - execute_alembic_command('current', verbose=is_verbose()) - - -@alembic_cli.command('history') -@click.option('-r', '--rev-range') -@options.VERBOSITY() -def alembic_history(rev_range): - """Show the history for the given revision range.""" - execute_alembic_command('history', rev_range=rev_range, verbose=is_verbose()) - - -@alembic_cli.command('upgrade') -@click.argument('revision', type=click.STRING) -def alembic_upgrade(revision): - """Upgrade the database to the given REVISION.""" - execute_alembic_command('upgrade', revision=revision) - - -@alembic_cli.command('downgrade') -@click.argument('revision', type=click.STRING) -def alembic_downgrade(revision): - """Downgrade the database to the given REVISION.""" - execute_alembic_command('downgrade', revision=revision) - - -if __name__ == '__main__': - alembic_cli() # pylint: disable=no-value-for-parameter diff --git a/aiida/backends/sqlalchemy/manager.py b/aiida/backends/sqlalchemy/manager.py deleted file mode 100644 index a49cd014ac..0000000000 --- a/aiida/backends/sqlalchemy/manager.py +++ /dev/null @@ -1,234 +0,0 @@ -# -*- coding: utf-8 -*- -########################################################################### -# Copyright (c), The AiiDA team. All rights reserved. # -# This file is part of the AiiDA code. # -# # -# The code is hosted on GitHub at https://github.com/aiidateam/aiida-core # -# For further information on the license, see the LICENSE.txt file # -# For further information please visit http://www.aiida.net # -########################################################################### -"""Utilities and configuration of the SqlAlchemy database schema.""" -import contextlib -import os - -from alembic.command import downgrade, upgrade -import sqlalchemy -from sqlalchemy.orm.exc import NoResultFound - -from aiida.backends.sqlalchemy import get_scoped_session -from aiida.common import NotExistent - -from ..manager import SCHEMA_GENERATION_VALUE, BackendManager, Setting, SettingsManager - -ALEMBIC_REL_PATH = 'migrations' - -# The database schema version required to perform schema reset for a given code schema generation -SCHEMA_VERSION_RESET = {'1': None} - - -class SqlaBackendManager(BackendManager): - """Class to manage the database schema.""" - - @staticmethod - @contextlib.contextmanager - def alembic_config(start_transaction=True): - """Context manager to return an instance of an Alembic configuration. - - The current database connection is added in the `attributes` property, through which it can then also be - retrieved, also in the `env.py` file, which is run when the database is migrated. - """ - from alembic.config import Config - - from . import ENGINE - - # Certain migrations, such as altering tables, require that there is no existing transactions - # locking the tables. - # Presently, ``SqlaSettingsManager.get`` has been found to leave idle transactions, - # and so we need to ensure that they are closed. - transaction = get_scoped_session().get_transaction() - if transaction: - transaction.close() - - engine_context = ENGINE.begin if start_transaction else ENGINE.connect - with engine_context() as connection: - dir_path = os.path.dirname(os.path.realpath(__file__)) - config = Config() - config.set_main_option('script_location', os.path.join(dir_path, ALEMBIC_REL_PATH)) - config.attributes['connection'] = connection # pylint: disable=unsupported-assignment-operation - yield config - - @contextlib.contextmanager - def alembic_script(self): - """Context manager to return an instance of an Alembic `ScriptDirectory`.""" - from alembic.script import ScriptDirectory - - with self.alembic_config() as config: - yield ScriptDirectory.from_config(config) - - @contextlib.contextmanager - def migration_context(self): - """Context manager to return an instance of an Alembic migration context. - - This migration context will have been configured with the current database connection, which allows this context - to be used to inspect the contents of the database, such as the current revision. - """ - from alembic.runtime.environment import EnvironmentContext - from alembic.script import ScriptDirectory - - with self.alembic_config() as config: - script = ScriptDirectory.from_config(config) - with EnvironmentContext(config, script) as context: - context.configure(context.config.attributes['connection']) - yield context.get_context() - - def get_settings_manager(self): - """Return an instance of the `SettingsManager`. - - :return: `SettingsManager` - """ - if self._settings_manager is None: - self._settings_manager = SqlaSettingsManager() - - return self._settings_manager - - def _load_backend_environment(self, **kwargs): - """Load the backend environment. - - :param kwargs: keyword arguments that will be passed on to - :py:func:`aiida.backends.sqlalchemy.get_scoped_session`. - """ - get_scoped_session(**kwargs) - - def reset_backend_environment(self): - """Reset the backend environment.""" - from . import reset_session - reset_session() - - def list_schema_versions(self): - """List all available schema versions (oldest to latest). - - :return: list of strings with schema versions - """ - with self.alembic_script() as script: - return list(reversed([entry.revision for entry in script.walk_revisions()])) - - def is_database_schema_ahead(self): - """Determine whether the database schema version is ahead of the code schema version. - - .. warning:: this will not check whether the schema generations are equal - - :return: boolean, True if the database schema version is ahead of the code schema version. - """ - with self.alembic_script() as script: - return self.get_schema_version_backend() not in [entry.revision for entry in script.walk_revisions()] - - def get_schema_version_head(self): - with self.alembic_script() as script: - return script.get_current_head() - - def get_schema_version_reset(self, schema_generation_code): - """Return schema version the database should have to be able to automatically reset to code schema generation. - - :param schema_generation_code: the schema generation of the code. - :return: schema version - """ - return SCHEMA_VERSION_RESET[schema_generation_code] - - def get_schema_version_backend(self): - with self.migration_context() as context: - return context.get_current_revision() - - def set_schema_version_backend(self, version: str) -> None: - with self.migration_context() as context: - return context.stamp(context.script, version) - - def _migrate_database_generation(self): - self.set_schema_generation_database(SCHEMA_GENERATION_VALUE) - self.set_schema_version_backend('head') - - def migrate_up(self, version: str): - """Migrate the database up to a specific version. - - :param version: string with schema version to migrate to - """ - with self.alembic_config(start_transaction=False) as config: - upgrade(config, version) - - def migrate_down(self, version: str): - """Migrate the database down to a specific version. - - :param version: string with schema version to migrate to - """ - with self.alembic_config(start_transaction=False) as config: - downgrade(config, version) - - def _migrate_database_version(self): - """Migrate the database to the latest schema version.""" - super()._migrate_database_version() - self.migrate_up('head') - - -class SqlaSettingsManager(SettingsManager): - """Class to get, set and delete settings from the `DbSettings` table.""" - - table_name = 'db_dbsetting' - - def validate_table_existence(self): - """Verify that the `DbSetting` table actually exists. - - :raises: `~aiida.common.exceptions.NotExistent` if the settings table does not exist - """ - inspector = sqlalchemy.inspect(get_scoped_session().bind) - if self.table_name not in inspector.get_table_names(): - raise NotExistent('the settings table does not exist') - - def get(self, key): - """Return the setting with the given key. - - :param key: the key identifying the setting - :return: Setting - :raises: `~aiida.common.exceptions.NotExistent` if the settings does not exist - """ - from aiida.backends.sqlalchemy.models.settings import DbSetting - self.validate_table_existence() - - try: - setting = get_scoped_session().query(DbSetting).filter_by(key=key).one() - except NoResultFound: - raise NotExistent(f'setting `{key}` does not exist') from NoResultFound - - return Setting(key, setting.getvalue(), setting.description, setting.time) - - def set(self, key, value, description=None): - """Return the settings with the given key. - - :param key: the key identifying the setting - :param value: the value for the setting - :param description: optional setting description - """ - from aiida.backends.sqlalchemy.models.settings import DbSetting - from aiida.orm.implementation.utils import validate_attribute_extra_key - - self.validate_table_existence() - validate_attribute_extra_key(key) - - other_attribs = {} - if description is not None: - other_attribs['description'] = description - - DbSetting.set_value(key, value, other_attribs=other_attribs) - - def delete(self, key): - """Delete the setting with the given key. - - :param key: the key identifying the setting - :raises: `~aiida.common.exceptions.NotExistent` if the settings does not exist - """ - from aiida.backends.sqlalchemy.models.settings import DbSetting - self.validate_table_existence() - - try: - setting = get_scoped_session().query(DbSetting).filter_by(key=key).one() - setting.delete() - except NoResultFound: - raise NotExistent(f'setting `{key}` does not exist') from NoResultFound diff --git a/aiida/backends/sqlalchemy/migrations/env.py b/aiida/backends/sqlalchemy/migrations/env.py index e34c61c75c..b15509162f 100644 --- a/aiida/backends/sqlalchemy/migrations/env.py +++ b/aiida/backends/sqlalchemy/migrations/env.py @@ -31,15 +31,22 @@ def run_migrations_online(): config = context.config # pylint: disable=no-member connection = config.attributes.get('connection', None) + aiida_profile = config.attributes.get('aiida_profile', None) + on_version_apply = config.attributes.get('on_version_apply', None) if connection is None: from aiida.common.exceptions import ConfigurationError raise ConfigurationError('An initialized connection is expected for the AiiDA online migrations.') + if aiida_profile is None: + from aiida.common.exceptions import ConfigurationError + raise ConfigurationError('An aiida_profile is expected for the AiiDA online migrations.') context.configure( # pylint: disable=no-member connection=connection, target_metadata=Base.metadata, transaction_per_migration=True, + aiida_profile=aiida_profile, + on_version_apply=on_version_apply ) context.run_migrations() # pylint: disable=no-member diff --git a/aiida/backends/general/migrations/__init__.py b/aiida/backends/sqlalchemy/migrations/utils/__init__.py similarity index 88% rename from aiida/backends/general/migrations/__init__.py rename to aiida/backends/sqlalchemy/migrations/utils/__init__.py index 2776a55f97..5350388b1a 100644 --- a/aiida/backends/general/migrations/__init__.py +++ b/aiida/backends/sqlalchemy/migrations/utils/__init__.py @@ -7,3 +7,5 @@ # For further information on the license, see the LICENSE.txt file # # For further information please visit http://www.aiida.net # ########################################################################### +"""Utilities to perform the migrations.""" +from .reflect import ReflectMigrations diff --git a/aiida/backends/general/migrations/calc_state.py b/aiida/backends/sqlalchemy/migrations/utils/calc_state.py similarity index 100% rename from aiida/backends/general/migrations/calc_state.py rename to aiida/backends/sqlalchemy/migrations/utils/calc_state.py diff --git a/aiida/backends/sqlalchemy/migrations/utils/create_dbattribute.py b/aiida/backends/sqlalchemy/migrations/utils/create_dbattribute.py new file mode 100644 index 0000000000..54ce1dac8b --- /dev/null +++ b/aiida/backends/sqlalchemy/migrations/utils/create_dbattribute.py @@ -0,0 +1,113 @@ +# -*- coding: utf-8 -*- +########################################################################### +# Copyright (c), The AiiDA team. All rights reserved. # +# This file is part of the AiiDA code. # +# # +# The code is hosted on GitHub at https://github.com/aiidateam/aiida-core # +# For further information on the license, see the LICENSE.txt file # +# For further information please visit http://www.aiida.net # +########################################################################### +"""Create an old style node attribute/extra, via the `db_dbattribute`/`db_dbextra` tables. + +Adapted from: `aiida/backends/djsite/db/migrations/__init__.py` +""" +from __future__ import annotations + +import datetime + +from aiida.common import json +from aiida.common.exceptions import ValidationError +from aiida.common.timezone import get_current_timezone, is_naive, make_aware + + +def create_rows(key: str, value, node_id: int) -> list[dict]: # pylint: disable=too-many-branches + """Create an old style node attribute/extra, via the `db_dbattribute`/`db_dbextra` tables. + + :note: No hits are done on the DB, in particular no check is done + on the existence of the given nodes. + + :param key: a string with the key to create (can contain the + separator self._sep if this is a sub-attribute: indeed, this + function calls itself recursively) + :param value: the value to store (a basic data type or a list or a dict) + :param node_id: the node id to store the attribute/extra + + :return: A list of column name -> value dictionaries, with which to instantiate database rows + """ + list_to_return = [] + + columns = { + 'key': key, + 'dbnode_id': node_id, + 'datatype': 'none', + 'tval': '', + 'bval': None, + 'ival': None, + 'fval': None, + 'dval': None, + } + + if isinstance(value, bool): + columns['datatype'] = 'bool' + columns['bval'] = value + + elif isinstance(value, int): + columns['datatype'] = 'int' + columns['ival'] = value + + elif isinstance(value, float): + columns['datatype'] = 'float' + columns['fval'] = value + columns['tval'] = '' + + elif isinstance(value, str): + columns['datatype'] = 'txt' + columns['tval'] = value + + elif isinstance(value, datetime.datetime): + + columns['datatype'] = 'date' + # For time-aware and time-naive datetime objects, see + # https://docs.djangoproject.com/en/dev/topics/i18n/timezones/#naive-and-aware-datetime-objects + columns['dval'] = make_aware(value, get_current_timezone()) if is_naive(value) else value + + elif isinstance(value, (list, tuple)): + + columns['datatype'] = 'list' + columns['ival'] = len(value) + + for i, subv in enumerate(value): + # I do not need get_or_create here, because + # above I deleted all children (and I + # expect no concurrency) + # NOTE: I do not pass other_attribs + list_to_return.extend(create_rows(f'{key}.{i:d}', subv, node_id)) + + elif isinstance(value, dict): + + columns['datatype'] = 'dict' + columns['ival'] = len(value) + + for subk, subv in value.items(): + if not isinstance(key, str) or not key: + raise ValidationError('The key must be a non-empty string.') + if '.' in key: + raise ValidationError( + "The separator symbol '.' cannot be present in the key of attributes, extras, etc." + ) + list_to_return.extend(create_rows(f'{key}.{subk}', subv, node_id)) + else: + try: + jsondata = json.dumps(value) + except TypeError: + raise ValueError( + f'Unable to store the value: it must be either a basic datatype, or json-serializable: {value}' + ) from TypeError + + columns['datatype'] = 'json' + columns['tval'] = jsondata + + # create attr row and add to list_to_return + list_to_return.append(columns) + + return list_to_return diff --git a/aiida/backends/sqlalchemy/migrations/utils/dblog_update.py b/aiida/backends/sqlalchemy/migrations/utils/dblog_update.py new file mode 100644 index 0000000000..5084cfea6f --- /dev/null +++ b/aiida/backends/sqlalchemy/migrations/utils/dblog_update.py @@ -0,0 +1,246 @@ +# -*- coding: utf-8 -*- +########################################################################### +# Copyright (c), The AiiDA team. All rights reserved. # +# This file is part of the AiiDA code. # +# # +# The code is hosted on GitHub at https://github.com/aiidateam/aiida-core # +# For further information on the license, see the LICENSE.txt file # +# For further information please visit http://www.aiida.net # +########################################################################### +"""Shared function for django_0024 and sqlalchemy ea2f50e7f615""" +import sys +from tempfile import NamedTemporaryFile + +import click +import sqlalchemy as sa + +from aiida.cmdline.utils import echo + +from .utils import dumps_json + + +def get_legacy_workflow_log_number(connection): + """ Get the number of the log records that correspond to legacy workflows """ + return connection.execute( + sa.text( + """ + SELECT COUNT(*) FROM db_dblog + WHERE + (db_dblog.objname LIKE 'aiida.workflows.user.%') + """ + ) + ).fetchall()[0][0] + + +def get_unknown_entity_log_number(connection): + """ Get the number of the log records that correspond to unknown entities """ + return connection.execute( + sa.text( + """ + SELECT COUNT(*) FROM db_dblog + WHERE + (db_dblog.objname NOT LIKE 'node.%') AND + (db_dblog.objname NOT LIKE 'aiida.workflows.user.%') + """ + ) + ).fetchall()[0][0] + + +def get_logs_with_no_nodes_number(connection): + """ Get the number of the log records that correspond to nodes that were deleted """ + return connection.execute( + sa.text( + """ + SELECT COUNT(*) FROM db_dblog + WHERE + (db_dblog.objname LIKE 'node.%') AND NOT EXISTS + (SELECT 1 FROM db_dbnode WHERE db_dbnode.id = db_dblog.objpk LIMIT 1) + """ + ) + ).fetchall()[0][0] + + +def get_serialized_legacy_workflow_logs(connection): + """ Get the serialized log records that correspond to legacy workflows """ + query = connection.execute( + sa.text( + """ + SELECT db_dblog.id, db_dblog.time, db_dblog.loggername, db_dblog.levelname, db_dblog.objpk, db_dblog.objname, + db_dblog.message, db_dblog.metadata FROM db_dblog + WHERE + (db_dblog.objname LIKE 'aiida.workflows.user.%') + """ + ) + ) + res = [] + for row in query: + res.append(row._asdict()) + return dumps_json(res) + + +def get_serialized_unknown_entity_logs(connection): + """ Get the serialized log records that correspond to unknown entities """ + query = connection.execute( + sa.text( + """ + SELECT db_dblog.id, db_dblog.time, db_dblog.loggername, db_dblog.levelname, db_dblog.objpk, db_dblog.objname, + db_dblog.message, db_dblog.metadata FROM db_dblog + WHERE + (db_dblog.objname NOT LIKE 'node.%') AND + (db_dblog.objname NOT LIKE 'aiida.workflows.user.%') + """ + ) + ) + res = [] + for row in query: + res.append(row._asdict()) + return dumps_json(res) + + +def get_serialized_logs_with_no_nodes(connection): + """ Get the serialized log records that correspond to nodes that were deleted """ + query = connection.execute( + sa.text( + """ + SELECT db_dblog.id, db_dblog.time, db_dblog.loggername, db_dblog.levelname, db_dblog.objpk, db_dblog.objname, + db_dblog.message, db_dblog.metadata FROM db_dblog + WHERE + (db_dblog.objname LIKE 'node.%') AND NOT EXISTS + (SELECT 1 FROM db_dbnode WHERE db_dbnode.id = db_dblog.objpk LIMIT 1) + """ + ) + ) + res = [] + for row in query: + res.append(row._asdict()) + return dumps_json(res) + + +def export_and_clean_workflow_logs(connection, profile): + """Export the logs records that correspond to legacy workflows and to unknown entities + (place them to files and remove them from the DbLog table). + """ + lwf_no_number = get_legacy_workflow_log_number(connection) + other_number = get_unknown_entity_log_number(connection) + log_no_node_number = get_logs_with_no_nodes_number(connection) + + # If there are no legacy workflow log records or log records of an unknown entity + if lwf_no_number == 0 and other_number == 0 and log_no_node_number == 0: + return + + if not profile.is_test_profile: + echo.echo_warning( + 'We found {} log records that correspond to legacy workflows and {} log records to correspond ' + 'to an unknown entity.'.format(lwf_no_number, other_number) + ) + echo.echo_warning( + 'These records will be removed from the database and exported to JSON files (to the current directory).' + ) + proceed = click.confirm('Would you like to proceed?', default=True) + if not proceed: + sys.exit(1) + + delete_on_close = profile.is_test_profile + + # Exporting the legacy workflow log records + if lwf_no_number != 0: + + # Get the records and write them to file + with NamedTemporaryFile( + prefix='legagy_wf_logs-', suffix='.log', dir='.', delete=delete_on_close, mode='w+' + ) as handle: + # Export the log records + filename = handle.name + handle.write(get_serialized_legacy_workflow_logs(connection)) + + # If delete_on_close is False, we are running for the user and add additional message of file location + if not delete_on_close: + echo.echo(f'Exported legacy workflow logs to {filename}') + + # Now delete the records + connection.execute( + sa.text( + """ + DELETE FROM db_dblog + WHERE + (db_dblog.objname LIKE 'aiida.workflows.user.%') + """ + ) + ) + + # Exporting unknown log records + if other_number != 0: + # Get the records and write them to file + with NamedTemporaryFile( + prefix='unknown_entity_logs-', suffix='.log', dir='.', delete=delete_on_close, mode='w+' + ) as handle: + # Export the log records + filename = handle.name + handle.write(get_serialized_unknown_entity_logs(connection)) + + # If delete_on_close is False, we are running for the user and add additional message of file location + if not delete_on_close: + echo.echo(f'Exported unexpected entity logs to {filename}') + + # Now delete the records + connection.execute( + sa.text( + """ + DELETE FROM db_dblog WHERE + (db_dblog.objname NOT LIKE 'node.%') AND + (db_dblog.objname NOT LIKE 'aiida.workflows.user.%') + """ + ) + ) + + # Exporting log records that don't correspond to nodes + if log_no_node_number != 0: + # Get the records and write them to file + with NamedTemporaryFile( + prefix='no_node_entity_logs-', suffix='.log', dir='.', delete=delete_on_close, mode='w+' + ) as handle: + # Export the log records + filename = handle.name + handle.write(get_serialized_logs_with_no_nodes(connection)) + + # If delete_on_close is False, we are running for the user and add additional message of file location + if not delete_on_close: + echo.echo('Exported entity logs that don\'t correspond to nodes to {}'.format(filename)) + + # Now delete the records + connection.execute( + sa.text( + """ + DELETE FROM db_dblog WHERE + (db_dblog.objname LIKE 'node.%') AND NOT EXISTS + (SELECT 1 FROM db_dbnode WHERE db_dbnode.id = db_dblog.objpk LIMIT 1) + """ + ) + ) + + +def set_new_uuid(connection): + """ Set new and distinct UUIDs to all the logs """ + from aiida.common.utils import get_new_uuid + + # Exit if there are no rows - e.g. initial setup + id_query = connection.execute(sa.text('SELECT db_dblog.id FROM db_dblog')) + if id_query.rowcount == 0: + return + + id_res = id_query.fetchall() + ids = [] + for (curr_id,) in id_res: + ids.append(curr_id) + uuids = set() + while len(uuids) < len(ids): + uuids.add(get_new_uuid()) + + # Create the key/value pairs + key_values = ','.join(f"({curr_id}, '{curr_uuid}')" for curr_id, curr_uuid in zip(ids, uuids)) + + update_stm = f""" + UPDATE db_dblog as t SET + uuid = uuid(c.uuid) + from (values {key_values}) as c(id, uuid) where c.id = t.id""" + connection.execute(sa.text(update_stm)) diff --git a/aiida/backends/sqlalchemy/migrations/utils/duplicate_uuids.py b/aiida/backends/sqlalchemy/migrations/utils/duplicate_uuids.py new file mode 100644 index 0000000000..827b556a86 --- /dev/null +++ b/aiida/backends/sqlalchemy/migrations/utils/duplicate_uuids.py @@ -0,0 +1,34 @@ +# -*- coding: utf-8 -*- +########################################################################### +# Copyright (c), The AiiDA team. All rights reserved. # +# This file is part of the AiiDA code. # +# # +# The code is hosted on GitHub at https://github.com/aiidateam/aiida-core # +# For further information on the license, see the LICENSE.txt file # +# For further information please visit http://www.aiida.net # +########################################################################### +"""Generic functions to verify the integrity of the database and optionally apply patches to fix problems.""" +from sqlalchemy import text + +from aiida.common import exceptions + +TABLES_UUID_DEDUPLICATION = ('db_dbcomment', 'db_dbcomputer', 'db_dbgroup', 'db_dbnode') + + +def _get_duplicate_uuids(table: str, connection): + """Check whether database table contains rows with duplicate UUIDS.""" + return connection.execute( + text( + f""" + SELECT s.id, s.uuid FROM (SELECT *, COUNT(*) OVER(PARTITION BY uuid) AS c FROM {table}) + AS s WHERE c > 1 + """ + ) + ) + + +def verify_uuid_uniqueness(table: str, connection): + """Check whether database table contains rows with duplicate UUIDS.""" + duplicates = _get_duplicate_uuids(table=table, connection=connection) + if duplicates.rowcount > 0: + raise exceptions.IntegrityError(f'Table {table} contains rows with duplicate UUIDS') diff --git a/aiida/manage/database/integrity/plugins.py b/aiida/backends/sqlalchemy/migrations/utils/integrity.py similarity index 78% rename from aiida/manage/database/integrity/plugins.py rename to aiida/backends/sqlalchemy/migrations/utils/integrity.py index 4fc48c8ccd..f51f48163e 100644 --- a/aiida/manage/database/integrity/plugins.py +++ b/aiida/backends/sqlalchemy/migrations/utils/integrity.py @@ -8,7 +8,8 @@ # For further information please visit http://www.aiida.net # ########################################################################### # pylint: disable=invalid-name -"""Generic functions to verify the integrity of the database and optionally apply patches to fix problems.""" +"""Methods to validate the database integrity and fix violations.""" +WARNING_BORDER = '*' * 120 # These are all the entry points from the `aiida.calculations` category as registered with the AiiDA registry # on Tuesday December 4 at 13:00:00 UTC @@ -142,3 +143,66 @@ class of `JobCalculation`, would get `calculation.job.quantumespresso.pw.PwCalcu mapping_node_type_to_entry_point[type_string] = entry_point_string return mapping_node_type_to_entry_point + + +def write_database_integrity_violation(results, headers, reason_message, action_message=None): + """Emit a integrity violation warning and write the violating records to a log file in the current directory + + :param results: a list of tuples representing the violating records + :param headers: a tuple of strings that will be used as a header for the log file. Should have the same length + as each tuple in the results list. + :param reason_message: a human readable message detailing the reason of the integrity violation + :param action_message: an optional human readable message detailing a performed action, if any + """ + # pylint: disable=duplicate-string-formatting-argument + from datetime import datetime + from tempfile import NamedTemporaryFile + + from tabulate import tabulate + + from aiida.cmdline.utils import echo + from aiida.manage import configuration + + global_profile = configuration.get_profile() + if global_profile and global_profile.is_test_profile: + return + + if action_message is None: + action_message = 'nothing' + + with NamedTemporaryFile(prefix='migration-', suffix='.log', dir='.', delete=False, mode='w+') as handle: + echo.echo('') + echo.echo_warning( + '\n{}\nFound one or multiple records that violate the integrity of the database\nViolation reason: {}\n' + 'Performed action: {}\nViolators written to: {}\n{}\n'.format( + WARNING_BORDER, reason_message, action_message, handle.name, WARNING_BORDER + ) + ) + + handle.write(f'# {datetime.utcnow().isoformat()}\n') + handle.write(f'# Violation reason: {reason_message}\n') + handle.write(f'# Performed action: {action_message}\n') + handle.write('\n') + handle.write(tabulate(results, headers)) + + +# Currently valid hash key +_HASH_EXTRA_KEY = '_aiida_hash' + + +def drop_hashes(conn): + """Drop hashes of nodes. + + Print warning only if the DB actually contains nodes. + """ + # Remove when https://github.com/PyCQA/pylint/issues/1931 is fixed + # pylint: disable=no-name-in-module,import-error + from sqlalchemy.sql import text + + from aiida.cmdline.utils import echo + n_nodes = conn.execute(text("""SELECT count(*) FROM db_dbnode;""")).fetchall()[0][0] + if n_nodes > 0: + echo.echo_warning('Invalidating the hashes of all nodes. Please run "verdi rehash".', bold=True) + + statement = text(f"UPDATE db_dbnode SET extras = extras #- '{{{_HASH_EXTRA_KEY}}}'::text[];") + conn.execute(statement) diff --git a/aiida/backends/sqlalchemy/migrations/utils/legacy_workflows.py b/aiida/backends/sqlalchemy/migrations/utils/legacy_workflows.py new file mode 100644 index 0000000000..92bcb4f95e --- /dev/null +++ b/aiida/backends/sqlalchemy/migrations/utils/legacy_workflows.py @@ -0,0 +1,77 @@ +# -*- coding: utf-8 -*- +########################################################################### +# Copyright (c), The AiiDA team. All rights reserved. # +# This file is part of the AiiDA code. # +# # +# The code is hosted on GitHub at https://github.com/aiidateam/aiida-core # +# For further information on the license, see the LICENSE.txt file # +# For further information please visit http://www.aiida.net # +########################################################################### +# pylint: disable=invalid-name +"""Utilities for removing legacy workflows.""" +import sys + +import click +from sqlalchemy.sql import func, select, table + +from aiida.cmdline.utils import echo +from aiida.common import json + + +def json_serializer(obj): + """JSON serializer for objects not serializable by default json code""" + from datetime import date, datetime + from uuid import UUID + + if isinstance(obj, UUID): + return str(obj) + + if isinstance(obj, (datetime, date)): + return obj.isoformat() + + raise TypeError(f'Type {type(obj)} not serializable') + + +def export_workflow_data(connection, profile): + """Export existing legacy workflow data to a JSON file.""" + from tempfile import NamedTemporaryFile + + DbWorkflow = table('db_dbworkflow') + DbWorkflowData = table('db_dbworkflowdata') + DbWorkflowStep = table('db_dbworkflowstep') + + count_workflow = connection.execute(select(func.count()).select_from(DbWorkflow)).scalar() + count_workflow_data = connection.execute(select(func.count()).select_from(DbWorkflowData)).scalar() + count_workflow_step = connection.execute(select(func.count()).select_from(DbWorkflowStep)).scalar() + + # Nothing to do if all tables are empty + if count_workflow == 0 and count_workflow_data == 0 and count_workflow_step == 0: + return + + if not profile.is_test_profile: + echo.echo('\n') + echo.echo_warning('The legacy workflow tables contain data but will have to be dropped to continue.') + echo.echo_warning('If you continue, the content will be dumped to a JSON file, before dropping the tables.') + echo.echo_warning('This serves merely as a reference and cannot be used to restore the database.') + echo.echo_warning('If you want a proper backup, make sure to dump the full database and backup your repository') + if not click.confirm('Are you sure you want to continue', default=True): + sys.exit(1) + + delete_on_close = profile.is_test_profile + + # pylint: disable=protected-access + data = { + 'workflow': [dict(row._mapping) for row in connection.execute(select('*').select_from(DbWorkflow))], + 'workflow_data': [dict(row._mapping) for row in connection.execute(select('*').select_from(DbWorkflowData))], + 'workflow_step': [dict(row._mapping) for row in connection.execute(select('*').select_from(DbWorkflowStep))], + } + + with NamedTemporaryFile( + prefix='legacy-workflows', suffix='.json', dir='.', delete=delete_on_close, mode='wb' + ) as handle: + filename = handle.name + json.dump(data, handle, default=json_serializer) + + # If delete_on_close is False, we are running for the user and add additional message of file location + if not delete_on_close: + echo.echo_report(f'Exported workflow data to {filename}') diff --git a/aiida/backends/sqlalchemy/migrations/utils/migrate_repository.py b/aiida/backends/sqlalchemy/migrations/utils/migrate_repository.py new file mode 100644 index 0000000000..35511fa4ef --- /dev/null +++ b/aiida/backends/sqlalchemy/migrations/utils/migrate_repository.py @@ -0,0 +1,124 @@ +# -*- coding: utf-8 -*- +########################################################################### +# Copyright (c), The AiiDA team. All rights reserved. # +# This file is part of the AiiDA code. # +# # +# The code is hosted on GitHub at https://github.com/aiidateam/aiida-core # +# For further information on the license, see the LICENSE.txt file # +# For further information please visit http://www.aiida.net # +########################################################################### +# pylint: disable=too-many-locals,too-many-branches,too-many-statements +""""Migrate the file repository to the new disk object store based implementation.""" +import json +import pathlib +from tempfile import NamedTemporaryFile + +from disk_objectstore import Container +from sqlalchemy import Integer, cast +from sqlalchemy.dialects.postgresql import JSONB, UUID +from sqlalchemy.sql import column, func, select, table, text + +from aiida.backends.sqlalchemy.migrations.utils import utils +from aiida.cmdline.utils import echo +from aiida.common import exceptions +from aiida.common.progress_reporter import get_progress_reporter, set_progress_bar_tqdm, set_progress_reporter +from aiida.orm.implementation.sqlalchemy.backend import CONTAINER_DEFAULTS + + +def migrate_repository(connection, profile): + """Migrations for the upgrade.""" + DbNode = table( # pylint: disable=invalid-name + 'db_dbnode', + column('id', Integer), + column('uuid', UUID), + column('repository_metadata', JSONB), + ) + + node_count = connection.execute(select(func.count()).select_from(DbNode)).scalar() + missing_repo_folder = [] + shard_count = 256 + + basepath = pathlib.Path(profile.repository_path) / 'repository' / 'node' + filepath = pathlib.Path(profile.repository_path) / 'container' + container = Container(filepath) + + if not profile.is_test_profile and (node_count > 0 and not basepath.is_dir()): + raise exceptions.StorageMigrationError( + f'the file repository `{basepath}` does not exist but the database is not empty, it contains {node_count} ' + 'nodes. Aborting the migration.' + ) + + if not profile.is_test_profile and container.is_initialised: + raise exceptions.StorageMigrationError( + f'the container {filepath} already exists. If you ran this migration before and it failed simply ' + 'delete this directory and restart the migration.' + ) + + container.init_container(clear=True, **CONTAINER_DEFAULTS) + + # Only show the progress bar if there is at least a node in the database. Note that we cannot simply make the entire + # next block under the context manager optional, since it performs checks on whether the repository contains files + # that are not in the database that are still important to perform even if the database is empty. + if node_count > 0: + set_progress_bar_tqdm() + else: + set_progress_reporter(None) + + with get_progress_reporter()(total=shard_count, desc='Migrating file repository') as progress: + for i in range(shard_count): + + shard = '%.2x' % i # noqa flynt + progress.set_description_str(f'Migrating file repository: shard {shard}') + + mapping_node_repository_metadata, missing_sub_repo_folder = utils.migrate_legacy_repository(profile, shard) + + if missing_sub_repo_folder: + missing_repo_folder.extend(missing_sub_repo_folder) + del missing_sub_repo_folder + + if mapping_node_repository_metadata is None: + continue + + for node_uuid, repository_metadata in mapping_node_repository_metadata.items(): + + # If `repository_metadata` is `{}` or `None`, we skip it, as we can leave the column default `null`. + if not repository_metadata: + continue + + value = cast(repository_metadata, JSONB) + # to-do in the django migration there was logic to log warnings for missing UUIDs, should we re-instate? + connection.execute(DbNode.update().where(DbNode.c.uuid == node_uuid).values(repository_metadata=value)) + + del mapping_node_repository_metadata + progress.update() + + # Store the UUID of the repository container in the `DbSetting` table. Note that for new databases, the profile + # setup will already have stored the UUID and so it should be skipped, or an exception for a duplicate key will be + # raised. This migration step is only necessary for existing databases that are migrated. + container_id = container.container_id + statement = text( + f""" + INSERT INTO db_dbsetting (key, val, description, time) + VALUES ('repository|uuid', to_json('{container_id}'::text), 'Repository UUID', NOW()) + ON CONFLICT (key) DO NOTHING; + """ + ) + connection.execute(statement) + + if not profile.is_test_profile: + + if missing_repo_folder: + prefix = 'migration-repository-missing-subfolder-' + with NamedTemporaryFile(prefix=prefix, suffix='.json', dir='.', mode='w+', delete=False) as handle: + json.dump(missing_repo_folder, handle) + echo.echo_warning( + 'Detected repository folders that were missing the required subfolder `path` or `raw_input`. ' + f'The paths of those nodes repository folders have been written to a log file: {handle.name}' + ) + + # If there were no nodes, most likely a new profile, there is not need to print the warning + if node_count: + echo.echo_warning( + 'Migrated file repository to the new disk object store. The old repository has not been deleted out' + f' of safety and can be found at {pathlib.Path(profile.repository_path, "repository")}.' + ) diff --git a/aiida/backends/sqlalchemy/migrations/utils/parity.py b/aiida/backends/sqlalchemy/migrations/utils/parity.py new file mode 100644 index 0000000000..32dfab061f --- /dev/null +++ b/aiida/backends/sqlalchemy/migrations/utils/parity.py @@ -0,0 +1,223 @@ +# -*- coding: utf-8 -*- +########################################################################### +# Copyright (c), The AiiDA team. All rights reserved. # +# This file is part of the AiiDA code. # +# # +# The code is hosted on GitHub at https://github.com/aiidateam/aiida-core # +# For further information on the license, see the LICENSE.txt file # +# For further information please visit http://www.aiida.net # +########################################################################### +"""Utilities for synchronizing the django and sqlalchemy schema.""" +import alembic + +from aiida.backends.sqlalchemy.migrations.utils import ReflectMigrations + + +def synchronize_schemas(alembic_op: alembic.op) -> None: + """This function is used by the final migration step, of django/sqlalchemy branches, to synchronize their schemas. + + 1. Remove and recreate all (non-unique) indexes, with standard names and postgresql ops. + 2. Remove and recreate all unique constraints, with standard names. + 3. Remove and recreate all foreign key constraints, with standard names and other rules. + + Schema naming conventions are defined ``aiida/backends/sqlalchemy/models/base.py::naming_convention``. + + Note we assume here that (a) all primary keys are already correct, and (b) there are no check constraints. + """ + reflect = ReflectMigrations(alembic_op) + + # drop all current non-unique indexes, then add the new ones + for tbl_name in ( + 'db_dbauthinfo', 'db_dbcomment', 'db_dbcomputer', 'db_dbgroup', 'db_dbgroup_dbnodes', 'db_dblink', 'db_dblog', + 'db_dbnode', 'db_dbsetting', 'db_dbuser' + ): + reflect.drop_all_indexes(tbl_name) + for name, tbl_name, column, psql_op in ( + ('ix_db_dbauthinfo_aiidauser_id', 'db_dbauthinfo', 'aiidauser_id', None), + ('ix_db_dbauthinfo_dbcomputer_id', 'db_dbauthinfo', 'dbcomputer_id', None), + ('ix_db_dbcomment_dbnode_id', 'db_dbcomment', 'dbnode_id', None), + ('ix_db_dbcomment_user_id', 'db_dbcomment', 'user_id', None), + ('ix_pat_db_dbcomputer_label', 'db_dbcomputer', 'label', 'varchar_pattern_ops'), + ('ix_db_dbgroup_label', 'db_dbgroup', 'label', None), + ('ix_pat_db_dbgroup_label', 'db_dbgroup', 'label', 'varchar_pattern_ops'), + ('ix_db_dbgroup_type_string', 'db_dbgroup', 'type_string', None), + ('ix_pat_db_dbgroup_type_string', 'db_dbgroup', 'type_string', 'varchar_pattern_ops'), + ('ix_db_dbgroup_user_id', 'db_dbgroup', 'user_id', None), + ('ix_db_dbgroup_dbnodes_dbgroup_id', 'db_dbgroup_dbnodes', 'dbgroup_id', None), + ('ix_db_dbgroup_dbnodes_dbnode_id', 'db_dbgroup_dbnodes', 'dbnode_id', None), + ('ix_db_dblink_input_id', 'db_dblink', 'input_id', None), + ('ix_db_dblink_label', 'db_dblink', 'label', None), + ('ix_pat_db_dblink_label', 'db_dblink', 'label', 'varchar_pattern_ops'), + ('ix_db_dblink_output_id', 'db_dblink', 'output_id', None), + ('ix_db_dblink_type', 'db_dblink', 'type', None), + ('ix_pat_db_dblink_type', 'db_dblink', 'type', 'varchar_pattern_ops'), + ('ix_db_dblog_dbnode_id', 'db_dblog', 'dbnode_id', None), + ('ix_db_dblog_levelname', 'db_dblog', 'levelname', None), + ('ix_pat_db_dblog_levelname', 'db_dblog', 'levelname', 'varchar_pattern_ops'), + ('ix_db_dblog_loggername', 'db_dblog', 'loggername', None), + ('ix_pat_db_dblog_loggername', 'db_dblog', 'loggername', 'varchar_pattern_ops'), + ('ix_db_dbnode_ctime', 'db_dbnode', 'ctime', None), + ('ix_db_dbnode_dbcomputer_id', 'db_dbnode', 'dbcomputer_id', None), + ('ix_db_dbnode_label', 'db_dbnode', 'label', None), + ('ix_pat_db_dbnode_label', 'db_dbnode', 'label', 'varchar_pattern_ops'), + ('ix_db_dbnode_mtime', 'db_dbnode', 'mtime', None), + ('ix_db_dbnode_process_type', 'db_dbnode', 'process_type', None), + ('ix_pat_db_dbnode_process_type', 'db_dbnode', 'process_type', 'varchar_pattern_ops'), + ('ix_db_dbnode_node_type', 'db_dbnode', 'node_type', None), + ('ix_pat_db_dbnode_node_type', 'db_dbnode', 'node_type', 'varchar_pattern_ops'), + ('ix_db_dbnode_user_id', 'db_dbnode', 'user_id', None), + ('ix_pat_db_dbsetting_key', 'db_dbsetting', 'key', 'varchar_pattern_ops'), + ('ix_pat_db_dbuser_email', 'db_dbuser', 'email', 'varchar_pattern_ops'), + ): + kwargs = {'unique': False} + if psql_op is not None: + kwargs['postgresql_ops'] = {column: psql_op} + alembic_op.create_index(name, tbl_name, [column], **kwargs) + + # drop all current unique constraints, then add the new ones + for tbl_name in ( + 'db_dbauthinfo', 'db_dbcomment', 'db_dbcomputer', 'db_dbgroup', 'db_dbgroup_dbnodes', 'db_dblink', 'db_dblog', + 'db_dbnode', 'db_dbsetting', 'db_dbuser' + ): + reflect.drop_all_unique_constraints(tbl_name) + reflect.reset_cache() + for name, tbl_name, columns in ( + ('uq_db_dbauthinfo_aiidauser_id_dbcomputer_id', 'db_dbauthinfo', ('aiidauser_id', 'dbcomputer_id')), + ('uq_db_dbcomment_uuid', 'db_dbcomment', ('uuid',)), + ('uq_db_dbcomputer_label', 'db_dbcomputer', ('label',)), + ('uq_db_dbcomputer_uuid', 'db_dbcomputer', ('uuid',)), + ('uq_db_dbgroup_label_type_string', 'db_dbgroup', ('label', 'type_string')), + ('uq_db_dbgroup_uuid', 'db_dbgroup', ('uuid',)), + ('uq_db_dbgroup_dbnodes_dbgroup_id_dbnode_id', 'db_dbgroup_dbnodes', ('dbgroup_id', 'dbnode_id')), + ('uq_db_dblog_uuid', 'db_dblog', ('uuid',)), + ('uq_db_dbnode_uuid', 'db_dbnode', ('uuid',)), + ('uq_db_dbsetting_key', 'db_dbsetting', ('key',)), + ('uq_db_dbuser_email', 'db_dbuser', ('email',)), + ): + reflect.drop_indexes(tbl_name, columns, unique=True) # drop any remaining indexes + alembic_op.create_unique_constraint(name, tbl_name, columns) + + # drop all current foreign key constraints, then add the new ones + for tbl_name in ( + 'db_dbauthinfo', 'db_dbcomment', 'db_dbcomputer', 'db_dbgroup', 'db_dbgroup_dbnodes', 'db_dblink', 'db_dblog', + 'db_dbnode', 'db_dbsetting', 'db_dbuser' + ): + reflect.drop_all_foreign_keys(tbl_name) + + alembic_op.create_foreign_key( + 'fk_db_dbauthinfo_aiidauser_id_db_dbuser', + 'db_dbauthinfo', + 'db_dbuser', + ['aiidauser_id'], + ['id'], + ondelete='CASCADE', + deferrable=True, + initially='DEFERRED', + ) + alembic_op.create_foreign_key( + 'fk_db_dbauthinfo_dbcomputer_id_db_dbcomputer', + 'db_dbauthinfo', + 'db_dbcomputer', + ['dbcomputer_id'], + ['id'], + ondelete='CASCADE', + deferrable=True, + initially='DEFERRED', + ) + alembic_op.create_foreign_key( + 'fk_db_dbcomment_dbnode_id_db_dbnode', + 'db_dbcomment', + 'db_dbnode', + ['dbnode_id'], + ['id'], + ondelete='CASCADE', + deferrable=True, + initially='DEFERRED', + ) + alembic_op.create_foreign_key( + 'fk_db_dbcomment_user_id_db_dbuser', + 'db_dbcomment', + 'db_dbuser', + ['user_id'], + ['id'], + ondelete='CASCADE', + deferrable=True, + initially='DEFERRED', + ) + alembic_op.create_foreign_key( + 'db_dbgroup_user_id_db_dbuser', + 'db_dbgroup', + 'db_dbuser', + ['user_id'], + ['id'], + ondelete='CASCADE', + deferrable=True, + initially='DEFERRED', + ) + alembic_op.create_foreign_key( + 'fk_db_dbgroup_dbnodes_dbgroup_id_db_dbgroup', + 'db_dbgroup_dbnodes', + 'db_dbgroup', + ['dbgroup_id'], + ['id'], + deferrable=True, + initially='DEFERRED', + ) + alembic_op.create_foreign_key( + 'fk_db_dbgroup_dbnodes_dbnode_id_db_dbnode', + 'db_dbgroup_dbnodes', + 'db_dbnode', + ['dbnode_id'], + ['id'], + deferrable=True, + initially='DEFERRED', + ) + alembic_op.create_foreign_key( + 'fk_db_dblink_input_id_db_dbnode', + 'db_dblink', + 'db_dbnode', + ['input_id'], + ['id'], + deferrable=True, + initially='DEFERRED', + ) + alembic_op.create_foreign_key( + 'fk_db_dblink_output_id_db_dbnode', + 'db_dblink', + 'db_dbnode', + ['output_id'], + ['id'], + ondelete='CASCADE', + deferrable=True, + initially='DEFERRED', + ) + alembic_op.create_foreign_key( + 'fk_db_dblog_dbnode_id_db_dbnode', + 'db_dblog', + 'db_dbnode', + ['dbnode_id'], + ['id'], + ondelete='CASCADE', + deferrable=True, + initially='DEFERRED', + ) + alembic_op.create_foreign_key( + 'fk_db_dbnode_dbcomputer_id_db_dbcomputer', + 'db_dbnode', + 'db_dbcomputer', + ['dbcomputer_id'], + ['id'], + ondelete='RESTRICT', + deferrable=True, + initially='DEFERRED', + ) + alembic_op.create_foreign_key( + 'fk_db_dbnode_user_id_db_dbuser', + 'db_dbnode', + 'db_dbuser', + ['user_id'], + ['id'], + ondelete='RESTRICT', + deferrable=True, + initially='DEFERRED', + ) diff --git a/aiida/backends/general/migrations/provenance_redesign.py b/aiida/backends/sqlalchemy/migrations/utils/provenance_redesign.py similarity index 55% rename from aiida/backends/general/migrations/provenance_redesign.py rename to aiida/backends/sqlalchemy/migrations/utils/provenance_redesign.py index c40e85e1ad..899e5a43ab 100644 --- a/aiida/backends/general/migrations/provenance_redesign.py +++ b/aiida/backends/sqlalchemy/migrations/utils/provenance_redesign.py @@ -7,7 +7,14 @@ # For further information on the license, see the LICENSE.txt file # # For further information please visit http://www.aiida.net # ########################################################################### -"""SQL statements to detect invalid/ununderstood links for the provenance redesign migration.""" +"""SQL statements to detect invalid/understood links for the provenance redesign migration.""" +from sqlalchemy import Integer, String +from sqlalchemy.dialects.postgresql import UUID +from sqlalchemy.sql import column, select, table, text + +from aiida.plugins.entry_point import ENTRY_POINT_STRING_SEPARATOR + +from .integrity import infer_calculation_entry_point, write_database_integrity_violation SELECT_CALCULATIONS_WITH_OUTGOING_CALL = """ SELECT node_in.uuid, node_out.uuid, link.type, link.label @@ -88,3 +95,58 @@ (SELECT_CALCULATIONS_WITH_OUTGOING_RETURN, 'detected calculation nodes with outgoing `return` links.'), (SELECT_WORKFLOWS_WITH_ISOLATED_CREATE_LINK, 'detected workflow nodes with isolated `create` links.'), ) + + +def migrate_infer_calculation_entry_point(alembic_op): + """Set the process type for calculation nodes by inferring it from their type string.""" + connection = alembic_op.get_bind() + DbNode = table( # pylint: disable=invalid-name + 'db_dbnode', column('id', Integer), column('uuid', UUID), column('type', String), + column('process_type', String) + ) + + query_set = connection.execute(select(DbNode.c.type).where(DbNode.c.type.like('calculation.%'))).fetchall() + type_strings = set(entry[0] for entry in query_set) + mapping_node_type_to_entry_point = infer_calculation_entry_point(type_strings=type_strings) + + fallback_cases = [] + + for type_string, entry_point_string in mapping_node_type_to_entry_point.items(): + + # If the entry point string does not contain the entry point string separator, the mapping function was not able + # to map the type string onto a known entry point string. As a fallback it uses the modified type string itself. + # All affected entries should be logged to file that the user can consult. + if ENTRY_POINT_STRING_SEPARATOR not in entry_point_string: + query_set = connection.execute( + select(DbNode.c.uuid).where(DbNode.c.type == alembic_op.inline_literal(type_string)) + ).fetchall() + + uuids = [str(entry.uuid) for entry in query_set] + for uuid in uuids: + fallback_cases.append([uuid, type_string, entry_point_string]) + + connection.execute( + DbNode.update().where(DbNode.c.type == alembic_op.inline_literal(type_string) + ).values(process_type=alembic_op.inline_literal(entry_point_string)) + ) + + if fallback_cases: + headers = ['UUID', 'type (old)', 'process_type (fallback)'] + warning_message = 'found calculation nodes with a type string that could not be mapped onto a known entry point' + action_message = 'inferred `process_type` for all calculation nodes, using fallback for unknown entry points' + write_database_integrity_violation(fallback_cases, headers, warning_message, action_message) + + +def detect_unexpected_links(alembic_op): + """Scan the database for any links that are unexpected. + + The checks will verify that there are no outgoing `call` or `return` links from calculation nodes and that if a + workflow node has a `create` link, it has at least an accompanying return link to the same data node, or it has a + `call` link to a calculation node that takes the created data node as input. + """ + connection = alembic_op.get_bind() + for sql, warning_message in INVALID_LINK_SELECT_STATEMENTS: + results = list(connection.execute(text(sql))) + if results: + headers = ['UUID source', 'UUID target', 'link type', 'link label'] + write_database_integrity_violation(results, headers, warning_message) diff --git a/aiida/backends/sqlalchemy/migrations/utils/reflect.py b/aiida/backends/sqlalchemy/migrations/utils/reflect.py new file mode 100644 index 0000000000..a1609dafaf --- /dev/null +++ b/aiida/backends/sqlalchemy/migrations/utils/reflect.py @@ -0,0 +1,99 @@ +# -*- coding: utf-8 -*- +########################################################################### +# Copyright (c), The AiiDA team. All rights reserved. # +# This file is part of the AiiDA code. # +# # +# The code is hosted on GitHub at https://github.com/aiidateam/aiida-core # +# For further information on the license, see the LICENSE.txt file # +# For further information please visit http://www.aiida.net # +########################################################################### +"""Utility for performing schema migrations, via reflection of the current database.""" +from __future__ import annotations + +import alembic +from sqlalchemy import inspect + + +class ReflectMigrations: + """Perform schema migrations, via reflection of the current database. + + In django, it is not possible to explicitly specify constraints/indexes and their names, + instead they are implicitly created by internal "auto-generation" code + (as opposed to sqlalchemy, where one can explicitly specify the names). + For a specific django version, this auto-generation code is deterministic, + however, over time it has changed. + So is not possible to know declaratively exactly what constraints/indexes are present on a users database, + withtout knowing the exact django version that created it (and run migrations). + Therefore, we need to reflect the database's schema, to determine what is present on the database, + to know what to drop. + """ + + def __init__(self, op: alembic.op) -> None: + self.op = op # pylint: disable=invalid-name + # note, we only want to instatiate the inspector once, since it caches reflection calls to the database + self.inspector = inspect(op.get_bind()) + + def reset_cache(self) -> None: + """Reset the inspector cache.""" + self.inspector = inspect(self.op.get_bind()) + + def drop_all_unique_constraints(self, table_name: str) -> None: + """Drop all unique constraints set for this table.""" + for constraint in self.inspector.get_unique_constraints(table_name): + self.op.drop_constraint(constraint['name'], table_name, type_='unique') + + def drop_unique_constraints(self, table_name: str, column_names: list[str]) -> None: + """Drop all unique constraints set for this column name group.""" + column_set = set(column_names) + for constraint in self.inspector.get_unique_constraints(table_name): + if set(constraint['column_names']) == column_set: + self.op.drop_constraint(constraint['name'], table_name, type_='unique') + + def drop_all_indexes(self, table_name: str, unique: bool = False) -> None: + """Drop all non-unique indexes set for this table.""" + for index in self.inspector.get_indexes(table_name): + if index['unique'] is unique: + self.op.drop_index(index['name'], table_name) + + def drop_indexes(self, table_name: str, column: str | list[str], unique: bool = False) -> None: + """Drop all indexes set for this column name group.""" + if isinstance(column, str): + column = [column] + column_set = set(column) + for index in self.inspector.get_indexes(table_name): + if (index['unique'] is unique) and (set(index['column_names']) == column_set): + self.op.drop_index(index['name'], table_name) + + def drop_all_foreign_keys(self, table_name: str) -> None: + """Drop all foreign keys set for this table.""" + for constraint in self.inspector.get_foreign_keys(table_name): + self.op.drop_constraint(constraint['name'], table_name, type_='foreignkey') + + def drop_foreign_keys(self, table_name: str, columns: list[str], ref_tbl: str, ref_columns: list[str]) -> None: + """Drop all foreign keys set for this column name group and referring column set.""" + column_set = set(columns) + ref_column_set = set(ref_columns) + for constraint in self.inspector.get_foreign_keys(table_name): + if constraint['referred_table'] != ref_tbl: + continue + if set(constraint['referred_columns']) != ref_column_set: + continue + if set(constraint['constrained_columns']) == column_set: + self.op.drop_constraint(constraint['name'], table_name, type_='foreignkey') + + def replace_index(self, label: str, table_name: str, column: str, unique: bool = False) -> None: + """Create index, dropping any existing index with the same table+columns.""" + self.drop_indexes(table_name, column, unique) + self.op.create_index(label, table_name, column, unique=unique) + + def replace_unique_constraint(self, label: str, table_name: str, columns: list[str]) -> None: + """Create unique constraint, dropping any existing unique constraint with the same table+columns.""" + self.drop_unique_constraints(table_name, columns) + self.op.create_unique_constraint(label, table_name, columns) + + def replace_foreign_key( + self, label: str, table_name: str, columns: list[str], ref_tbl: str, ref_columns: list[str], **kwargs + ) -> None: + """Create foreign key, dropping any existing foreign key with the same constraints.""" + self.drop_foreign_keys(table_name, columns, ref_tbl, ref_columns) + self.op.create_foreign_key(label, table_name, ref_tbl, columns, ref_columns, **kwargs) diff --git a/aiida/backends/general/migrations/utils.py b/aiida/backends/sqlalchemy/migrations/utils/utils.py similarity index 89% rename from aiida/backends/general/migrations/utils.py rename to aiida/backends/sqlalchemy/migrations/utils/utils.py index 94c03075d7..b652c82dad 100644 --- a/aiida/backends/general/migrations/utils.py +++ b/aiida/backends/sqlalchemy/migrations/utils/utils.py @@ -129,7 +129,7 @@ def get_info(self, statistics: bool = False, **kwargs) -> dict: raise NotImplementedError -def migrate_legacy_repository(shard=None): +def migrate_legacy_repository(profile, shard=None): """Migrate the legacy file repository to the new disk object store and return mapping of repository metadata. .. warning:: this method assumes that the new disk object store container has been initialized. @@ -148,9 +148,6 @@ def migrate_legacy_repository(shard=None): :return: mapping of node UUIDs onto the new repository metadata. """ # pylint: disable=too-many-locals - from aiida.manage.configuration import get_profile - - profile = get_profile() backend = NoopRepositoryBackend() repository = MigrationRepository(backend=backend) @@ -161,7 +158,7 @@ def migrate_legacy_repository(shard=None): if not basepath.exists(): return None, None - node_repository_dirpaths, missing_sub_repo_folder = get_node_repository_dirpaths(basepath, shard) + node_repository_dirpaths, missing_sub_repo_folder = get_node_repository_dirpaths(profile, basepath, shard) filepaths = [] streams = [] @@ -205,20 +202,17 @@ def migrate_legacy_repository(shard=None): return mapping_metadata, missing_sub_repo_folder -def get_node_repository_dirpaths(basepath, shard=None): +def get_node_repository_dirpaths(profile, basepath, shard=None): """Return a mapping of node UUIDs onto the path to their current repository folder in the old repository. :param basepath: the absolute path of the base folder of the old file repository. :param shard: optional shard to define which first shard level to check. If `None`, all shard levels are checked. :return: dictionary of node UUID onto absolute filepath and list of node repo missing one of the two known sub folders, ``path`` or ``raw_input``, which is unexpected. - :raises `~aiida.common.exceptions.DatabaseMigrationError`: if the repository contains node folders that contain both + :raises `~aiida.common.exceptions.StorageMigrationError`: if the repository contains node folders that contain both the `path` and `raw_input` subdirectories, which should never happen. """ # pylint: disable=too-many-branches - from aiida.manage.configuration import get_profile - - profile = get_profile() mapping = {} missing_sub_repo_folder = [] contains_both = [] @@ -276,7 +270,7 @@ def get_node_repository_dirpaths(basepath, shard=None): mapping[uuid] = path if contains_both and not profile.is_test_profile: - raise exceptions.DatabaseMigrationError( + raise exceptions.StorageMigrationError( f'The file repository `{basepath}` contained node repository folders that contained both the `path` as well' ' as the `raw_input` subfolders. This should not have happened, as the latter is used for calculation job ' 'nodes, and the former for all other nodes. The migration will be aborted and the paths of the offending ' @@ -303,24 +297,24 @@ def serialize_repository(repository: Repository) -> dict: return {'k': file_object.key} -def ensure_repository_folder_created(uuid): +def ensure_repository_folder_created(repository_path, uuid): """Make sure that the repository sub folder for the node with the given UUID exists or create it. :param uuid: UUID of the node """ - dirpath = get_node_repository_sub_folder(uuid) + dirpath = get_node_repository_sub_folder(repository_path, uuid) os.makedirs(dirpath, exist_ok=True) -def put_object_from_string(uuid, name, content): +def put_object_from_string(repository_path, uuid, name, content): """Write a file with the given content in the repository sub folder of the given node. :param uuid: UUID of the node :param name: name to use for the file :param content: the content to write to the file """ - ensure_repository_folder_created(uuid) - basepath = get_node_repository_sub_folder(uuid) + ensure_repository_folder_created(repository_path, uuid) + basepath = get_node_repository_sub_folder(repository_path, uuid) dirname = os.path.dirname(name) if dirname: @@ -332,65 +326,51 @@ def put_object_from_string(uuid, name, content): handle.write(content) -def get_object_from_repository(uuid, name): - """Return the content of a file with the given name in the repository sub folder of the given node. - - :param uuid: UUID of the node - :param name: name to use for the file - """ - filepath = os.path.join(get_node_repository_sub_folder(uuid), name) - - with open(filepath, encoding='utf-8') as handle: - return handle.read() - - -def get_node_repository_sub_folder(uuid, subfolder='path'): +def get_node_repository_sub_folder(repository_path, uuid, subfolder='path'): """Return the absolute path to the sub folder `path` within the repository of the node with the given UUID. :param uuid: UUID of the node :return: absolute path to node repository folder, i.e `/some/path/repository/node/12/ab/c123134-a123/path` """ - from aiida.manage.configuration import get_profile - uuid = str(uuid) - repo_dirpath = os.path.join(get_profile().repository_path, 'repository') + repo_dirpath = os.path.join(repository_path, 'repository') node_dirpath = os.path.join(repo_dirpath, 'node', uuid[:2], uuid[2:4], uuid[4:], subfolder) return node_dirpath -def get_numpy_array_absolute_path(uuid, name): +def get_numpy_array_absolute_path(repository_path, uuid, name): """Return the absolute path of a numpy array with the given name in the repository of the node with the given uuid. :param uuid: the UUID of the node :param name: the name of the numpy array :return: the absolute path of the numpy array file """ - return os.path.join(get_node_repository_sub_folder(uuid), f'{name}.npy') + return os.path.join(get_node_repository_sub_folder(repository_path, uuid), f'{name}.npy') -def store_numpy_array_in_repository(uuid, name, array): +def store_numpy_array_in_repository(repository_path, uuid, name, array): """Store a numpy array in the repository folder of a node. :param uuid: the node UUID :param name: the name under which to store the array :param array: the numpy array to store """ - ensure_repository_folder_created(uuid) - filepath = get_numpy_array_absolute_path(uuid, name) + ensure_repository_folder_created(repository_path, uuid) + filepath = get_numpy_array_absolute_path(repository_path, uuid, name) with open(filepath, 'wb') as handle: numpy.save(handle, array) -def delete_numpy_array_from_repository(uuid, name): +def delete_numpy_array_from_repository(repository_path, uuid, name): """Delete the numpy array with a given name from the repository corresponding to a node with a given uuid. :param uuid: the UUID of the node :param name: the name of the numpy array """ - filepath = get_numpy_array_absolute_path(uuid, name) + filepath = get_numpy_array_absolute_path(repository_path, uuid, name) try: os.remove(filepath) @@ -398,22 +378,20 @@ def delete_numpy_array_from_repository(uuid, name): pass -def load_numpy_array_from_repository(uuid, name): +def load_numpy_array_from_repository(repository_path, uuid, name): """Load and return a numpy array from the repository folder of a node. :param uuid: the node UUID :param name: the name under which to store the array :return: the numpy array """ - filepath = get_numpy_array_absolute_path(uuid, name) + filepath = get_numpy_array_absolute_path(repository_path, uuid, name) return numpy.load(filepath) -def get_repository_object(hashkey): +def get_repository_object(profile, hashkey): """Return the content of an object stored in the disk object store repository for the given hashkey.""" - from aiida.manage.configuration import get_profile - - dirpath_container = os.path.join(get_profile().repository_path, 'container') + dirpath_container = os.path.join(profile.repository_path, 'container') container = Container(dirpath_container) return container.get_object_content(hashkey) diff --git a/aiida/backends/sqlalchemy/migrations/versions/041a79fc615f_dblog_cleaning.py b/aiida/backends/sqlalchemy/migrations/versions/041a79fc615f_dblog_cleaning.py index fa8539ad3c..01a80b5769 100644 --- a/aiida/backends/sqlalchemy/migrations/versions/041a79fc615f_dblog_cleaning.py +++ b/aiida/backends/sqlalchemy/migrations/versions/041a79fc615f_dblog_cleaning.py @@ -9,25 +9,21 @@ ########################################################################### # pylint: disable=invalid-name,no-member,import-error,no-name-in-module,protected-access """This migration cleans the log records from non-Node entity records. + It removes from the DbLog table the legacy workflow records and records that correspond to an unknown entity and places them to corresponding files. -This migration corresponds to the 0024_dblog_update Django migration. +This migration corresponds to the 0024_dblog_update Django migration (except without uuid addition). Revision ID: 041a79fc615f Revises: 7ca08c391c49 Create Date: 2018-12-28 15:53:14.596810 """ -import sys - from alembic import op -import click import sqlalchemy as sa from sqlalchemy.sql import text -from aiida.backends.general.migrations.utils import dumps_json -from aiida.cmdline.utils import echo -from aiida.manage import configuration +from aiida.backends.sqlalchemy.migrations.utils.dblog_update import export_and_clean_workflow_logs # revision identifiers, used by Alembic. revision = '041a79fc615f' @@ -35,212 +31,6 @@ branch_labels = None depends_on = None -# The values that will be exported for the log records that will be deleted -values_to_export = ['id', 'time', 'loggername', 'levelname', 'objpk', 'objname', 'message', 'metadata'] - - -def get_legacy_workflow_log_number(connection): - """ Get the number of the log records that correspond to legacy workflows """ - return connection.execute( - text( - """ - SELECT COUNT(*) FROM db_dblog - WHERE - (db_dblog.objname LIKE 'aiida.workflows.user.%') - """ - ) - ).fetchall()[0][0] - - -def get_unknown_entity_log_number(connection): - """ Get the number of the log records that correspond to unknown entities """ - return connection.execute( - text( - """ - SELECT COUNT(*) FROM db_dblog - WHERE - (db_dblog.objname NOT LIKE 'node.%') AND - (db_dblog.objname NOT LIKE 'aiida.workflows.user.%') - """ - ) - ).fetchall()[0][0] - - -def get_logs_with_no_nodes_number(connection): - """ Get the number of the log records that correspond to nodes that were deleted """ - return connection.execute( - text( - """ - SELECT COUNT(*) FROM db_dblog - WHERE - (db_dblog.objname LIKE 'node.%') AND NOT EXISTS - (SELECT 1 FROM db_dbnode WHERE db_dbnode.id = db_dblog.objpk LIMIT 1) - """ - ) - ).fetchall()[0][0] - - -def get_serialized_legacy_workflow_logs(connection): - """ Get the serialized log records that correspond to legacy workflows """ - query = connection.execute( - text( - """ - SELECT db_dblog.id, db_dblog.time, db_dblog.loggername, db_dblog.levelname, db_dblog.objpk, db_dblog.objname, - db_dblog.message, db_dblog.metadata FROM db_dblog - WHERE - (db_dblog.objname LIKE 'aiida.workflows.user.%') - """ - ) - ) - res = [] - for row in query: - res.append(row._asdict()) - return dumps_json(res) - - -def get_serialized_unknown_entity_logs(connection): - """ Get the serialized log records that correspond to unknown entities """ - query = connection.execute( - text( - """ - SELECT db_dblog.id, db_dblog.time, db_dblog.loggername, db_dblog.levelname, db_dblog.objpk, db_dblog.objname, - db_dblog.message, db_dblog.metadata FROM db_dblog - WHERE - (db_dblog.objname NOT LIKE 'node.%') AND - (db_dblog.objname NOT LIKE 'aiida.workflows.user.%') - """ - ) - ) - res = [] - for row in query: - res.append(row._asdict()) - return dumps_json(res) - - -def get_serialized_logs_with_no_nodes(connection): - """ Get the serialized log records that correspond to nodes that were deleted """ - query = connection.execute( - text( - """ - SELECT db_dblog.id, db_dblog.time, db_dblog.loggername, db_dblog.levelname, db_dblog.objpk, db_dblog.objname, - db_dblog.message, db_dblog.metadata FROM db_dblog - WHERE - (db_dblog.objname LIKE 'node.%') AND NOT EXISTS - (SELECT 1 FROM db_dbnode WHERE db_dbnode.id = db_dblog.objpk LIMIT 1) - """ - ) - ) - res = [] - for row in query: - res.append(row._asdict()) - return dumps_json(res) - - -def export_and_clean_workflow_logs(connection): - """ - Export the logs records that correspond to legacy workflows and to unknown entities (place them to files - and remove them from the DbLog table). - """ - from tempfile import NamedTemporaryFile - - lwf_no_number = get_legacy_workflow_log_number(connection) - other_number = get_unknown_entity_log_number(connection) - log_no_node_number = get_logs_with_no_nodes_number(connection) - - # If there are no legacy workflow log records or log records of an unknown entity - if lwf_no_number == 0 and other_number == 0 and log_no_node_number == 0: - return - - if not configuration.PROFILE.is_test_profile: - echo.echo_warning( - 'We found {} log records that correspond to legacy workflows and {} log records to correspond ' - 'to an unknown entity.'.format(lwf_no_number, other_number) - ) - echo.echo_warning( - 'These records will be removed from the database and exported to JSON files to the current directory).' - ) - proceed = click.confirm('Would you like to proceed?', default=True) - if not proceed: - sys.exit(1) - - delete_on_close = configuration.PROFILE.is_test_profile - - # Exporting the legacy workflow log records - if lwf_no_number != 0: - - # Get the records and write them to file - with NamedTemporaryFile( - prefix='legagy_wf_logs-', suffix='.log', dir='.', delete=delete_on_close, mode='w+' - ) as handle: - # Export the log records - filename = handle.name - handle.write(get_serialized_legacy_workflow_logs(connection)) - - # If delete_on_close is False, we are running for the user and add additional message of file location - if not delete_on_close: - echo.echo(f'Exported legacy workflow logs to {filename}') - - # Now delete the records - connection.execute( - text( - """ - DELETE FROM db_dblog - WHERE - (db_dblog.objname LIKE 'aiida.workflows.user.%') - """ - ) - ) - - # Exporting unknown log records - if other_number != 0: - # Get the records and write them to file - with NamedTemporaryFile( - prefix='unknown_entity_logs-', suffix='.log', dir='.', delete=delete_on_close, mode='w+' - ) as handle: - # Export the log records - filename = handle.name - handle.write(get_serialized_unknown_entity_logs(connection)) - - # If delete_on_close is False, we are running for the user and add additional message of file location - if not delete_on_close: - echo.echo(f'Exported unexpected entity logs to {filename}') - - # Now delete the records - connection.execute( - text( - """ - DELETE FROM db_dblog WHERE - (db_dblog.objname NOT LIKE 'node.%') AND - (db_dblog.objname NOT LIKE 'aiida.workflows.user.%') - """ - ) - ) - - # Exporting log records that don't correspond to nodes - if log_no_node_number != 0: - # Get the records and write them to file - with NamedTemporaryFile( - prefix='no_node_entity_logs-', suffix='.log', dir='.', delete=delete_on_close, mode='w+' - ) as handle: - # Export the log records - filename = handle.name - handle.write(get_serialized_logs_with_no_nodes(connection)) - - # If delete_on_close is False, we are running for the user and add additional message of file location - if not delete_on_close: - echo.echo('Exported entity logs that don\'t correspond to nodes to {}'.format(filename)) - - # Now delete the records - connection.execute( - text( - """ - DELETE FROM db_dblog WHERE - (db_dblog.objname LIKE 'node.%') AND NOT EXISTS - (SELECT 1 FROM db_dbnode WHERE db_dbnode.id = db_dblog.objpk LIMIT 1) - """ - ) - ) - def upgrade(): """ @@ -250,18 +40,26 @@ def upgrade(): connection = op.get_bind() # Clean data - export_and_clean_workflow_logs(connection) + export_and_clean_workflow_logs(connection, op.get_context().opts['aiida_profile']) + + # Remove objpk and objname from the metadata dictionary + connection.execute(text("""UPDATE db_dblog SET metadata = metadata - 'objpk' - 'objname' """)) - # Create the dbnode_id column and add the necessary index + # Create a new column, which is a foreign key to the dbnode table op.add_column('db_dblog', sa.Column('dbnode_id', sa.INTEGER(), autoincrement=False, nullable=True)) # Transfer data to dbnode_id from objpk connection.execute(text("""UPDATE db_dblog SET dbnode_id=objpk""")) - op.create_foreign_key( - None, 'db_dblog', 'db_dbnode', ['dbnode_id'], ['id'], ondelete='CASCADE', initially='DEFERRED', deferrable=True + 'db_dblog_dbnode_id_fkey', + 'db_dblog', + 'db_dbnode', ['dbnode_id'], ['id'], + ondelete='CASCADE', + initially='DEFERRED', + deferrable=True ) - # Update the dbnode_id column to not nullable + # Now that all the data have been migrated, make the column not nullable and not blank. + # A log record should always correspond to a node record op.alter_column('db_dblog', 'dbnode_id', nullable=False) # Remove the objpk column @@ -270,43 +68,9 @@ def upgrade(): # Remove the objname column op.drop_column('db_dblog', 'objname') - # Remove objpk and objname from metadata dictionary - connection.execute(text("""UPDATE db_dblog SET metadata = metadata - 'objpk' - 'objname' """)) - def downgrade(): """ Downgrade function to the previous schema. """ - # Create an empty column objname (the data is permanently lost) - op.add_column('db_dblog', sa.Column('objname', sa.VARCHAR(length=255), autoincrement=False, nullable=True)) - op.create_index('ix_db_dblog_objname', 'db_dblog', ['objname']) - - # Creating a column objpk - - op.add_column('db_dblog', sa.Column('objpk', sa.INTEGER(), autoincrement=False, nullable=True)) - - # Copy the data back to objpk from dbnode_id - op.execute(text("""UPDATE db_dblog SET objpk=dbnode_id""")) - - # Removing the column dbnode_id - op.drop_column('db_dblog', 'dbnode_id') - - # Populate objname with correct values - op.execute( - text("""UPDATE db_dblog SET objname=db_dbnode.type - FROM db_dbnode WHERE db_dbnode.id = db_dblog.objpk""") - ) - - # Enrich metadata with objpk and objname if these keys don't exist - op.execute( - text( - """UPDATE db_dblog SET metadata = jsonb_set(metadata, '{"objpk"}', to_jsonb(objpk)) - WHERE NOT (metadata ?| '{"objpk"}') """ - ) - ) - op.execute( - text( - """UPDATE db_dblog SET metadata = jsonb_set(metadata, '{"objname"}', to_jsonb(objname)) - WHERE NOT (metadata ?| '{"objname"}') """ - ) - ) + raise NotImplementedError('Downgrade of 041a79fc615f.') diff --git a/aiida/backends/sqlalchemy/migrations/versions/07fac78e6209_drop_computer_transport_params.py b/aiida/backends/sqlalchemy/migrations/versions/07fac78e6209_drop_computer_transport_params.py index 66d8f7e0a8..9f24befc85 100644 --- a/aiida/backends/sqlalchemy/migrations/versions/07fac78e6209_drop_computer_transport_params.py +++ b/aiida/backends/sqlalchemy/migrations/versions/07fac78e6209_drop_computer_transport_params.py @@ -8,7 +8,9 @@ # For further information please visit http://www.aiida.net # ########################################################################### # pylint: disable=invalid-name,no-member -"""Drop the `transport_params` from the `Computer` database model. +"""Drop `db_dbcomputer.transport_params` + +This is similar to migration django_0036 Revision ID: 07fac78e6209 Revises: de2eaf6978b4 diff --git a/aiida/backends/sqlalchemy/migrations/versions/118349c10896_default_link_label.py b/aiida/backends/sqlalchemy/migrations/versions/118349c10896_default_link_label.py index 11bb63b7f6..b09a1b1120 100644 --- a/aiida/backends/sqlalchemy/migrations/versions/118349c10896_default_link_label.py +++ b/aiida/backends/sqlalchemy/migrations/versions/118349c10896_default_link_label.py @@ -8,11 +8,14 @@ # For further information please visit http://www.aiida.net # ########################################################################### # pylint: disable=invalid-name -"""Update all link labels with the value `_return` which is the legacy default single link label. +"""Update all link labels with the value `_return` +This is the legacy default single link label. The old process functions used to use `_return` as the default link label, however, since labels that start or end with and underscore are illegal because they are used for namespacing. +This is identical to migration django_0043 + Revision ID: 118349c10896 Revises: 91b573400be5 Create Date: 2019-11-21 09:43:45.006053 @@ -44,3 +47,4 @@ def upgrade(): def downgrade(): """Migrations for the downgrade.""" + raise NotImplementedError('Downgrade of 118349c10896.') diff --git a/aiida/backends/sqlalchemy/migrations/versions/12536798d4d3_trajectory_symbols_to_attribute.py b/aiida/backends/sqlalchemy/migrations/versions/12536798d4d3_trajectory_symbols_to_attribute.py index fe612b95b2..b91636d1eb 100644 --- a/aiida/backends/sqlalchemy/migrations/versions/12536798d4d3_trajectory_symbols_to_attribute.py +++ b/aiida/backends/sqlalchemy/migrations/versions/12536798d4d3_trajectory_symbols_to_attribute.py @@ -10,6 +10,8 @@ # pylint: disable=invalid-name,no-member """Move trajectory symbols from repository array to attribute +Note, this is similar to the django migration django_0025 + Revision ID: 12536798d4d3 Revises: 37f3d4882837 Create Date: 2019-01-21 10:15:02.451308 @@ -23,9 +25,9 @@ from alembic import op from sqlalchemy import Integer, String, cast from sqlalchemy.dialects.postgresql import JSONB, UUID -from sqlalchemy.sql import column, func, select, table, text +from sqlalchemy.sql import column, func, select, table -from aiida.backends.general.migrations.utils import load_numpy_array_from_repository +from aiida.backends.sqlalchemy.migrations.utils.utils import load_numpy_array_from_repository # revision identifiers, used by Alembic. revision = '12536798d4d3' @@ -39,34 +41,32 @@ def upgrade(): """Migrations for the upgrade.""" - # yapf:disable connection = op.get_bind() + profile = op.get_context().opts['aiida_profile'] + repo_path = profile.repository_path - DbNode = table('db_dbnode', column('id', Integer), column('uuid', UUID), column('type', String), - column('attributes', JSONB)) + DbNode = table( + 'db_dbnode', + column('id', Integer), + column('uuid', UUID), + column('type', String), + column('attributes', JSONB), + ) nodes = connection.execute( - select(DbNode.c.id, DbNode.c.uuid).where( - DbNode.c.type == op.inline_literal('node.data.array.trajectory.TrajectoryData.'))).fetchall() + select(DbNode.c.id, + DbNode.c.uuid).where(DbNode.c.type == op.inline_literal('node.data.array.trajectory.TrajectoryData.')) + ).fetchall() for pk, uuid in nodes: - symbols = load_numpy_array_from_repository(uuid, 'symbols').tolist() - connection.execute(DbNode.update().where(DbNode.c.id == pk).values( - attributes=func.jsonb_set(DbNode.c.attributes, op.inline_literal('{"symbols"}'), cast(symbols, JSONB)))) + symbols = load_numpy_array_from_repository(repo_path, uuid, 'symbols').tolist() + connection.execute( + DbNode.update().where(DbNode.c.id == pk).values( + attributes=func.jsonb_set(DbNode.c.attributes, op.inline_literal('{"symbols"}'), cast(symbols, JSONB)) + ) + ) def downgrade(): """Migrations for the downgrade.""" - # yapf:disable - connection = op.get_bind() - - DbNode = table('db_dbnode', column('id', Integer), column('uuid', UUID), column('type', String), - column('attributes', JSONB)) - - nodes = connection.execute( - select(DbNode.c.id, DbNode.c.uuid).where( - DbNode.c.type == op.inline_literal('node.data.array.trajectory.TrajectoryData.'))).fetchall() - - for pk, _ in nodes: - connection.execute( - text(f"""UPDATE db_dbnode SET attributes = attributes #- '{{symbols}}' WHERE id = {pk}""")) + raise NotImplementedError('Downgrade of 12536798d4d3.') diff --git a/aiida/backends/sqlalchemy/migrations/versions/140c971ae0a3_migrate_builtin_calculations.py b/aiida/backends/sqlalchemy/migrations/versions/140c971ae0a3_migrate_builtin_calculations.py index bcfa908ba2..b05ee5141e 100644 --- a/aiida/backends/sqlalchemy/migrations/versions/140c971ae0a3_migrate_builtin_calculations.py +++ b/aiida/backends/sqlalchemy/migrations/versions/140c971ae0a3_migrate_builtin_calculations.py @@ -61,29 +61,4 @@ def upgrade(): def downgrade(): """Migrations for the downgrade.""" - conn = op.get_bind() # pylint: disable=no-member - - statement = text( - """ - UPDATE db_dbnode SET type = 'calculation.job.simpleplugins.arithmetic.add.ArithmeticAddCalculation.' - WHERE type = 'calculation.job.arithmetic.add.ArithmeticAddCalculation.'; - - UPDATE db_dbnode SET type = 'calculation.job.simpleplugins.templatereplacer.TemplatereplacerCalculation.' - WHERE type = 'calculation.job.templatereplacer.TemplatereplacerCalculation.'; - - UPDATE db_dbnode SET process_type = 'aiida.calculations:simpleplugins.arithmetic.add' - WHERE process_type = 'aiida.calculations:arithmetic.add'; - - UPDATE db_dbnode SET process_type = 'aiida.calculations:simpleplugins.templatereplacer' - WHERE process_type = 'aiida.calculations:templatereplacer'; - - UPDATE db_dbnode SET attributes = jsonb_set(attributes, '{"input_plugin"}', '"simpleplugins.arithmetic.add"') - WHERE attributes @> '{"input_plugin": "arithmetic.add"}' - AND type = 'data.code.Code.'; - - UPDATE db_dbnode SET attributes = jsonb_set(attributes, '{"input_plugin"}', '"simpleplugins.templatereplacer"') - WHERE attributes @> '{"input_plugin": "templatereplacer"}' - AND type = 'data.code.Code.'; - """ - ) - conn.execute(statement) + raise NotImplementedError('Downgrade of 140c971ae0a3.') diff --git a/aiida/backends/sqlalchemy/migrations/versions/162b99bca4a2_drop_dbcalcstate.py b/aiida/backends/sqlalchemy/migrations/versions/162b99bca4a2_drop_dbcalcstate.py index 1d0539a20a..75184ec65d 100644 --- a/aiida/backends/sqlalchemy/migrations/versions/162b99bca4a2_drop_dbcalcstate.py +++ b/aiida/backends/sqlalchemy/migrations/versions/162b99bca4a2_drop_dbcalcstate.py @@ -16,8 +16,6 @@ """ from alembic import op -import sqlalchemy as sa -from sqlalchemy.dialects import postgresql # revision identifiers, used by Alembic. revision = '162b99bca4a2' @@ -27,19 +25,10 @@ def upgrade(): + """Migrations for the upgrade.""" op.drop_table('db_dbcalcstate') def downgrade(): - op.create_table( - 'db_dbcalcstate', sa.Column('id', sa.INTEGER(), nullable=False), - sa.Column('dbnode_id', sa.INTEGER(), autoincrement=False, nullable=True), - sa.Column('state', sa.VARCHAR(length=255), autoincrement=False, nullable=True), - sa.Column('time', postgresql.TIMESTAMP(timezone=True), autoincrement=False, nullable=True), - sa.ForeignKeyConstraint(['dbnode_id'], ['db_dbnode.id'], - name='db_dbcalcstate_dbnode_id_fkey', - ondelete='CASCADE', - initially='DEFERRED', - deferrable=True), sa.PrimaryKeyConstraint('id', name='db_dbcalcstate_pkey'), - sa.UniqueConstraint('dbnode_id', 'state', name='db_dbcalcstate_dbnode_id_state_key') - ) + """Migrations for the downgrade.""" + raise NotImplementedError('Downgrade of 162b99bca4a2.') diff --git a/aiida/backends/sqlalchemy/migrations/versions/1830c8430131_drop_node_columns_nodeversion_public.py b/aiida/backends/sqlalchemy/migrations/versions/1830c8430131_drop_node_columns_nodeversion_public.py index 0e9587e5b3..fad8751081 100644 --- a/aiida/backends/sqlalchemy/migrations/versions/1830c8430131_drop_node_columns_nodeversion_public.py +++ b/aiida/backends/sqlalchemy/migrations/versions/1830c8430131_drop_node_columns_nodeversion_public.py @@ -8,7 +8,9 @@ # For further information please visit http://www.aiida.net # ########################################################################### # pylint: disable=invalid-name,no-member -"""Drop the columns `nodeversion` and `public` from the `DbNode` model. +"""Drop `db_dbnode.nodeversion` and `db_dbnode.public` + +This is similar to migration django_0033 Revision ID: 1830c8430131 Revises: 1b8ed3425af9 @@ -18,7 +20,6 @@ # pylint: disable=invalid-name,no-member,import-error,no-name-in-module from alembic import op -import sqlalchemy as sa # revision identifiers, used by Alembic. revision = '1830c8430131' @@ -28,10 +29,11 @@ def upgrade(): + """Migrations for the upgrade.""" op.drop_column('db_dbnode', 'nodeversion') op.drop_column('db_dbnode', 'public') def downgrade(): - op.add_column('db_dbnode', sa.Column('public', sa.BOOLEAN(), autoincrement=False, nullable=True)) - op.add_column('db_dbnode', sa.Column('nodeversion', sa.INTEGER(), autoincrement=False, nullable=True)) + """Migrations for the downgrade.""" + raise NotImplementedError('Downgrade of 1830c8430131.') diff --git a/aiida/backends/sqlalchemy/migrations/versions/1b8ed3425af9_remove_legacy_workflows.py b/aiida/backends/sqlalchemy/migrations/versions/1b8ed3425af9_remove_legacy_workflows.py index b7a5a0cd20..23b9363e3a 100644 --- a/aiida/backends/sqlalchemy/migrations/versions/1b8ed3425af9_remove_legacy_workflows.py +++ b/aiida/backends/sqlalchemy/migrations/versions/1b8ed3425af9_remove_legacy_workflows.py @@ -7,27 +7,19 @@ # For further information on the license, see the LICENSE.txt file # # For further information please visit http://www.aiida.net # ########################################################################### -# pylint: disable=invalid-name +# pylint: disable=invalid-name,no-member,import-error,no-name-in-module """Remove legacy workflows +This is similar to migration django_0032 + Revision ID: 1b8ed3425af9 Revises: 3d6190594e19 Create Date: 2019-04-03 17:11:44.073582 """ -import sys - -# Remove when https://github.com/PyCQA/pylint/issues/1931 is fixed -# pylint: disable=no-member,import-error,no-name-in-module from alembic import op -import click -import sqlalchemy as sa -from sqlalchemy.dialects import postgresql -from sqlalchemy.sql import func, select, table -from aiida.cmdline.utils import echo -from aiida.common import json -from aiida.manage import configuration +from aiida.backends.sqlalchemy.migrations.utils.legacy_workflows import export_workflow_data # revision identifiers, used by Alembic. revision = '1b8ed3425af9' @@ -36,70 +28,10 @@ depends_on = None -def json_serializer(obj): - """JSON serializer for objects not serializable by default json code""" - from datetime import date, datetime - from uuid import UUID - - if isinstance(obj, UUID): - return str(obj) - - if isinstance(obj, (datetime, date)): - return obj.isoformat() - - raise TypeError(f'Type {type(obj)} not serializable') - - -def export_workflow_data(connection): - """Export existing legacy workflow data to a JSON file.""" - from tempfile import NamedTemporaryFile - - DbWorkflow = table('db_dbworkflow') - DbWorkflowData = table('db_dbworkflowdata') - DbWorkflowStep = table('db_dbworkflowstep') - - count_workflow = connection.execute(select(func.count()).select_from(DbWorkflow)).scalar() - count_workflow_data = connection.execute(select(func.count()).select_from(DbWorkflowData)).scalar() - count_workflow_step = connection.execute(select(func.count()).select_from(DbWorkflowStep)).scalar() - - # Nothing to do if all tables are empty - if count_workflow == 0 and count_workflow_data == 0 and count_workflow_step == 0: - return - - if not configuration.PROFILE.is_test_profile: - echo.echo('\n') - echo.echo_warning('The legacy workflow tables contain data but will have to be dropped to continue.') - echo.echo_warning('If you continue, the content will be dumped to a JSON file, before dropping the tables.') - echo.echo_warning('This serves merely as a reference and cannot be used to restore the database.') - echo.echo_warning('If you want a proper backup, make sure to dump the full database and backup your repository') - if not click.confirm('Are you sure you want to continue', default=True): - sys.exit(1) - - delete_on_close = configuration.PROFILE.is_test_profile - - data = { - 'workflow': [dict(row) for row in connection.execute(select('*').select_from(DbWorkflow))], - 'workflow_data': [dict(row) for row in connection.execute(select('*').select_from(DbWorkflowData))], - 'workflow_step': [dict(row) for row in connection.execute(select('*').select_from(DbWorkflowStep))], - } - - with NamedTemporaryFile( - prefix='legacy-workflows', suffix='.json', dir='.', delete=delete_on_close, mode='w+' - ) as handle: - filename = handle.name - json.dump(data, handle, default=json_serializer) - - # If delete_on_close is False, we are running for the user and add additional message of file location - if not delete_on_close: - echo.echo_report(f'Exported workflow data to {filename}') - - def upgrade(): """Migrations for the upgrade.""" - connection = op.get_bind() - # Clean data - export_workflow_data(connection) + export_workflow_data(op.get_bind(), op.get_context().opts['aiida_profile']) op.drop_table('db_dbworkflowstep_sub_workflows') op.drop_table('db_dbworkflowstep_calculations') @@ -113,84 +45,4 @@ def upgrade(): def downgrade(): """Migrations for the downgrade.""" - op.create_table( - 'db_dbworkflow', - sa.Column( - 'id', - sa.INTEGER(), - server_default=sa.text("nextval('db_dbworkflow_id_seq'::regclass)"), - autoincrement=True, - nullable=False - ), - sa.Column('uuid', postgresql.UUID(), autoincrement=False, nullable=True), - sa.Column('ctime', postgresql.TIMESTAMP(timezone=True), autoincrement=False, nullable=True), - sa.Column('mtime', postgresql.TIMESTAMP(timezone=True), autoincrement=False, nullable=True), - sa.Column('user_id', sa.INTEGER(), autoincrement=False, nullable=True), - sa.Column('label', sa.VARCHAR(length=255), autoincrement=False, nullable=True), - sa.Column('description', sa.TEXT(), autoincrement=False, nullable=True), - sa.Column('nodeversion', sa.INTEGER(), autoincrement=False, nullable=True), - sa.Column('lastsyncedversion', sa.INTEGER(), autoincrement=False, nullable=True), - sa.Column('state', sa.VARCHAR(length=255), autoincrement=False, nullable=True), - sa.Column('report', sa.TEXT(), autoincrement=False, nullable=True), - sa.Column('module', sa.TEXT(), autoincrement=False, nullable=True), - sa.Column('module_class', sa.TEXT(), autoincrement=False, nullable=True), - sa.Column('script_path', sa.TEXT(), autoincrement=False, nullable=True), - sa.Column('script_md5', sa.VARCHAR(length=255), autoincrement=False, nullable=True), - sa.ForeignKeyConstraint(['user_id'], ['db_dbuser.id'], name='db_dbworkflow_user_id_fkey'), - sa.PrimaryKeyConstraint('id', name='db_dbworkflow_pkey'), - sa.UniqueConstraint('uuid', name='db_dbworkflow_uuid_key'), - postgresql_ignore_search_path=False - ) - op.create_index('ix_db_dbworkflow_label', 'db_dbworkflow', ['label'], unique=False) - op.create_table( - 'db_dbworkflowdata', sa.Column('id', sa.INTEGER(), autoincrement=True, nullable=False), - sa.Column('parent_id', sa.INTEGER(), autoincrement=False, nullable=True), - sa.Column('name', sa.VARCHAR(length=255), autoincrement=False, nullable=True), - sa.Column('time', postgresql.TIMESTAMP(timezone=True), autoincrement=False, nullable=True), - sa.Column('data_type', sa.VARCHAR(length=255), autoincrement=False, nullable=True), - sa.Column('value_type', sa.VARCHAR(length=255), autoincrement=False, nullable=True), - sa.Column('json_value', sa.TEXT(), autoincrement=False, nullable=True), - sa.Column('aiida_obj_id', sa.INTEGER(), autoincrement=False, nullable=True), - sa.ForeignKeyConstraint(['aiida_obj_id'], ['db_dbnode.id'], name='db_dbworkflowdata_aiida_obj_id_fkey'), - sa.ForeignKeyConstraint(['parent_id'], ['db_dbworkflow.id'], name='db_dbworkflowdata_parent_id_fkey'), - sa.PrimaryKeyConstraint('id', name='db_dbworkflowdata_pkey'), - sa.UniqueConstraint('parent_id', 'name', 'data_type', name='db_dbworkflowdata_parent_id_name_data_type_key') - ) - op.create_index('ix_db_dbworkflowdata_parent_id', 'db_dbworkflowdata', ['parent_id'], unique=False) - op.create_index('ix_db_dbworkflowdata_aiida_obj_id', 'db_dbworkflowdata', ['aiida_obj_id'], unique=False) - op.create_table( - 'db_dbworkflowstep', sa.Column('id', sa.INTEGER(), autoincrement=True, nullable=False), - sa.Column('parent_id', sa.INTEGER(), autoincrement=False, nullable=True), - sa.Column('user_id', sa.INTEGER(), autoincrement=False, nullable=True), - sa.Column('name', sa.VARCHAR(length=255), autoincrement=False, nullable=True), - sa.Column('time', postgresql.TIMESTAMP(timezone=True), autoincrement=False, nullable=True), - sa.Column('nextcall', sa.VARCHAR(length=255), autoincrement=False, nullable=True), - sa.Column('state', sa.VARCHAR(length=255), autoincrement=False, nullable=True), - sa.ForeignKeyConstraint(['parent_id'], ['db_dbworkflow.id'], name='db_dbworkflowstep_parent_id_fkey'), - sa.ForeignKeyConstraint(['user_id'], ['db_dbuser.id'], name='db_dbworkflowstep_user_id_fkey'), - sa.PrimaryKeyConstraint('id', name='db_dbworkflowstep_pkey'), - sa.UniqueConstraint('parent_id', 'name', name='db_dbworkflowstep_parent_id_name_key') - ) - op.create_table( - 'db_dbworkflowstep_calculations', sa.Column('id', sa.INTEGER(), autoincrement=True, nullable=False), - sa.Column('dbworkflowstep_id', sa.INTEGER(), autoincrement=False, nullable=True), - sa.Column('dbnode_id', sa.INTEGER(), autoincrement=False, nullable=True), - sa.ForeignKeyConstraint(['dbnode_id'], ['db_dbnode.id'], name='db_dbworkflowstep_calculations_dbnode_id_fkey'), - sa.ForeignKeyConstraint(['dbworkflowstep_id'], ['db_dbworkflowstep.id'], - name='db_dbworkflowstep_calculations_dbworkflowstep_id_fkey'), - sa.PrimaryKeyConstraint('id', name='db_dbworkflowstep_calculations_pkey'), - sa.UniqueConstraint('dbworkflowstep_id', 'dbnode_id', name='db_dbworkflowstep_calculations_id_dbnode_id_key') - ) - op.create_table( - 'db_dbworkflowstep_sub_workflows', sa.Column('id', sa.INTEGER(), autoincrement=True, nullable=False), - sa.Column('dbworkflowstep_id', sa.INTEGER(), autoincrement=False, nullable=True), - sa.Column('dbworkflow_id', sa.INTEGER(), autoincrement=False, nullable=True), - sa.ForeignKeyConstraint(['dbworkflow_id'], ['db_dbworkflow.id'], - name='db_dbworkflowstep_sub_workflows_dbworkflow_id_fkey'), - sa.ForeignKeyConstraint(['dbworkflowstep_id'], ['db_dbworkflowstep.id'], - name='db_dbworkflowstep_sub_workflows_dbworkflowstep_id_fkey'), - sa.PrimaryKeyConstraint('id', name='db_dbworkflowstep_sub_workflows_pkey'), - sa.UniqueConstraint( - 'dbworkflowstep_id', 'dbworkflow_id', name='db_dbworkflowstep_sub_workflows_id_dbworkflow__key' - ) - ) + raise NotImplementedError('Removal of legacy workflows is not reversible.') diff --git a/aiida/backends/sqlalchemy/migrations/versions/1de112340b16_django_parity_1.py b/aiida/backends/sqlalchemy/migrations/versions/1de112340b16_django_parity_1.py index 97c50d7f73..b2ea6f2791 100644 --- a/aiida/backends/sqlalchemy/migrations/versions/1de112340b16_django_parity_1.py +++ b/aiida/backends/sqlalchemy/migrations/versions/1de112340b16_django_parity_1.py @@ -168,4 +168,4 @@ def upgrade(): # pylint: disable=too-many-statements def downgrade(): """Downgrade database schema.""" - # No need to convert the values back to null + raise NotImplementedError('Downgrade of 1de112340b16.') diff --git a/aiida/backends/sqlalchemy/migrations/versions/1de112340b18_django_parity_3.py b/aiida/backends/sqlalchemy/migrations/versions/1de112340b18_django_parity_3.py index b6caf6531f..4b661b5db5 100644 --- a/aiida/backends/sqlalchemy/migrations/versions/1de112340b18_django_parity_3.py +++ b/aiida/backends/sqlalchemy/migrations/versions/1de112340b18_django_parity_3.py @@ -18,205 +18,19 @@ """ from alembic import op -# revision identifiers, used by Alembic. +from aiida.backends.sqlalchemy.migrations.utils.parity import synchronize_schemas + revision = '1de112340b18' down_revision = '1de112340b17' branch_labels = None depends_on = None -# table name, column name, index name -MISSING_STANDARD_INDEXES = ( - ('db_dbauthinfo', ('aiidauser_id',), False, 'db_dbauthinfo_aiidauser_id_0684fdfb'), - ('db_dbauthinfo', ('dbcomputer_id',), False, 'db_dbauthinfo_dbcomputer_id_424f7ac4'), - ('db_dbcomment', ('dbnode_id',), False, 'db_dbcomment_dbnode_id_3b812b6b'), - ('db_dbcomment', ('user_id',), False, 'db_dbcomment_user_id_8ed5e360'), - ('db_dbgroup', ('user_id',), False, 'db_dbgroup_user_id_100f8a51'), - ('db_dblog', ('dbnode_id',), False, 'db_dblog_dbnode_id_da34b732'), - ('db_dbnode', ('ctime',), False, 'db_dbnode_ctime_71626ef5'), - ('db_dbnode', ('mtime',), False, 'db_dbnode_mtime_0554ea3d'), - ('db_dbnode', ('dbcomputer_id',), False, 'db_dbnode_dbcomputer_id_315372a3'), - ('db_dbnode', ('user_id',), False, 'db_dbnode_user_id_12e7aeaf'), -) - -# table name, column name, index name -MISSING_VARCHAR_INDEXES = ( - ('db_dbcomputer', 'label', 'db_dbcomputer_label_bc480bab_like'), - ('db_dbgroup', 'label', 'db_dbgroup_name_66c75272_like'), - ('db_dbgroup', 'type_string', 'db_dbgroup_type_23b2a748_like'), - ('db_dblink', 'label', 'db_dblink_label_f1343cfb_like'), - ('db_dblink', 'type', 'db_dblink_type_229f212b_like'), - ('db_dblog', 'levelname', 'db_dblog_levelname_ad5dc346_like'), - ('db_dblog', 'loggername', 'db_dblog_loggername_00b5ba16_like'), - ('db_dbnode', 'label', 'db_dbnode_label_6469539e_like'), - ('db_dbnode', 'node_type', 'db_dbnode_type_a8ce9753_like'), - ('db_dbnode', 'process_type', 'db_dbnode_process_type_df7298d0_like'), - ('db_dbsetting', 'key', 'db_dbsetting_key_1b84beb4_like'), - ('db_dbuser', 'email', 'db_dbuser_email_30150b7e_like'), -) - -# table name, column names, constraint name -DROP_UNIQUE_CONSTRAINTS = ( - ('db_dbauthinfo', ('aiidauser_id', 'dbcomputer_id'), 'db_dbauthinfo_aiidauser_id_dbcomputer_id_key'), - ('db_dbcomment', ('uuid',), 'db_dbcomment_uuid_key'), - ('db_dbcomputer', ('label',), 'db_dbcomputer_label_key'), - ('db_dbcomputer', ('uuid',), 'db_dbcomputer_uuid_key'), - ('db_dbgroup', ('label', 'type_string'), 'db_dbgroup_label_type_string_key'), - ('db_dbgroup_dbnodes', ('dbgroup_id', 'dbnode_id'), 'db_dbgroup_dbnodes_dbgroup_id_dbnode_id_key'), - ('db_dbgroup', ('uuid',), 'db_dbgroup_uuid_key'), - ('db_dblog', ('uuid',), 'db_dblog_uuid_key'), - ('db_dbnode', ('uuid',), 'db_dbnode_uuid_key'), - ('db_dbsetting', ('key',), 'db_dbsetting_key_key'), -) - -# table name, column names, constraint name -ADD_UNIQUE_CONSTRAINTS = ( - ('db_dbauthinfo', ('aiidauser_id', 'dbcomputer_id'), 'db_dbauthinfo_aiidauser_id_dbcomputer_id_777cdaa8_uniq'), - ('db_dbcomment', ('uuid',), 'db_dbcomment_uuid_49bac08c_uniq'), - ('db_dbcomputer', ('label',), 'db_dbcomputer_label_bc480bab_uniq'), - ('db_dbcomputer', ('uuid',), 'db_dbcomputer_uuid_f35defa6_uniq'), - ('db_dbgroup', ('label', 'type_string'), 'db_dbgroup_name_type_12656f33_uniq'), - ('db_dbgroup', ('uuid',), 'db_dbgroup_uuid_af896177_uniq'), - ('db_dbgroup_dbnodes', ('dbgroup_id', 'dbnode_id'), 'db_dbgroup_dbnodes_dbgroup_id_dbnode_id_eee23cce_uniq'), - ('db_dblog', ('uuid',), 'db_dblog_uuid_9cf77df3_uniq'), - ('db_dbnode', ('uuid',), 'db_dbnode_uuid_62e0bf98_uniq'), - ('db_dbuser', ('email',), 'db_dbuser_email_30150b7e_uniq'), - ('db_dbsetting', ('key',), 'db_dbsetting_key_1b84beb4_uniq'), -) - -# table name, column names, unique, old name, new name -RENAMED_INDEXES = ( - ('db_dbgroup', ('label',), False, 'ix_db_dbgroup_label', 'db_dbgroup_name_66c75272'), - ('db_dbgroup', ('type_string',), False, 'ix_db_dbgroup_type_string', 'db_dbgroup_type_23b2a748'), - ( - 'db_dbgroup_dbnodes', ('dbgroup_id',), False, 'db_dbgroup_dbnodes_dbgroup_id_idx', - 'db_dbgroup_dbnodes_dbgroup_id_9d3a0f9d' - ), - ( - 'db_dbgroup_dbnodes', ('dbnode_id',), False, 'db_dbgroup_dbnodes_dbnode_id_idx', - 'db_dbgroup_dbnodes_dbnode_id_118b9439' - ), - ('db_dblink', ('input_id',), False, 'ix_db_dblink_input_id', 'db_dblink_input_id_9245bd73'), - ('db_dblink', ('label',), False, 'ix_db_dblink_label', 'db_dblink_label_f1343cfb'), - ('db_dblink', ('output_id',), False, 'ix_db_dblink_output_id', 'db_dblink_output_id_c0167528'), - ('db_dblink', ('type',), False, 'ix_db_dblink_type', 'db_dblink_type_229f212b'), - ('db_dblog', ('levelname',), False, 'ix_db_dblog_levelname', 'db_dblog_levelname_ad5dc346'), - ('db_dblog', ('loggername',), False, 'ix_db_dblog_loggername', 'db_dblog_loggername_00b5ba16'), - ('db_dbnode', ('label',), False, 'ix_db_dbnode_label', 'db_dbnode_label_6469539e'), - ('db_dbnode', ('node_type',), False, 'ix_db_dbnode_node_type', 'db_dbnode_type_a8ce9753'), - ('db_dbnode', ('process_type',), False, 'ix_db_dbnode_process_type', 'db_dbnode_process_type_df7298d0'), -) - -# table name, column names, unique, name -DROP_INDEXES = ( - ('db_dbsetting', ('key',), True, 'ix_db_dbsetting_key'), - ('db_dbuser', ('email',), True, 'ix_db_dbuser_email'), -) - def upgrade(): - """Add indexes.""" - # drop unique constraints - for tbl_name, _, con_name in DROP_UNIQUE_CONSTRAINTS: - op.drop_constraint( - con_name, - tbl_name, - ) - # drop indexes - for tbl_name, _, _, con_name in DROP_INDEXES: - op.drop_index( - con_name, - table_name=tbl_name, - ) - # Add missing standard indexes - for tbl_name, col_names, unique, key_name in MISSING_STANDARD_INDEXES: - op.create_index( - key_name, - table_name=tbl_name, - columns=col_names, - unique=unique, - ) - - # Add missing PostgreSQL-specific indexes for strings - # these improve perform for filtering on string regexes - for tbl_name, col_name, key_name in MISSING_VARCHAR_INDEXES: - op.create_index( - key_name, - tbl_name, - [col_name], - unique=False, - postgresql_using='btree', - postgresql_ops={col_name: 'varchar_pattern_ops'}, - ) - # rename indexes - for tbl_name, columns, unique, old_col_name, new_col_name in RENAMED_INDEXES: - op.drop_index( - old_col_name, - table_name=tbl_name, - ) - op.create_index( - new_col_name, - tbl_name, - columns, - unique=unique, - ) - # add unique constraints - for tbl_name, columns, con_name in ADD_UNIQUE_CONSTRAINTS: - op.create_unique_constraint( - con_name, - tbl_name, - columns, - ) + """Migrations for the upgrade.""" + synchronize_schemas(op) def downgrade(): - """Remove indexes.""" - # drop unique constraints - for tbl_name, _, con_name in ADD_UNIQUE_CONSTRAINTS: - op.drop_constraint( - con_name, - tbl_name, - ) - # Drop missing standard indexes - for tbl_name, _, _, key_name in MISSING_STANDARD_INDEXES: - op.drop_index( - key_name, - table_name=tbl_name, - ) - - # Drop missing postgresql-specific indexes - for tbl_name, col_name, key_name in MISSING_VARCHAR_INDEXES: - op.drop_index( - key_name, - table_name=tbl_name, - postgresql_using='btree', - postgresql_ops={col_name: 'varchar_pattern_ops'}, - ) - # drop renamed indexes - for tbl_name, _, _, _, new_col_name in RENAMED_INDEXES: - op.drop_index( - new_col_name, - table_name=tbl_name, - ) - # add renamed indexes - for tbl_name, columns, unique, old_col_name, _ in RENAMED_INDEXES: - op.create_index( - old_col_name, - tbl_name, - columns, - unique=unique, - ) - # add indexes - for tbl_name, columns, unique, con_name in DROP_INDEXES: - op.create_index( - con_name, - tbl_name, - columns, - unique=unique, - ) - # add unique constraints - for tbl_name, columns, con_name in DROP_UNIQUE_CONSTRAINTS: - op.create_unique_constraint( - con_name, - tbl_name, - columns, - ) + """Migrations for the downgrade.""" + raise NotImplementedError('Downgrade of 1de112340b18.') diff --git a/aiida/backends/sqlalchemy/migrations/versions/1feaea71bd5a_migrate_repository.py b/aiida/backends/sqlalchemy/migrations/versions/1feaea71bd5a_migrate_repository.py index d7cdfb10fe..d0121864d6 100644 --- a/aiida/backends/sqlalchemy/migrations/versions/1feaea71bd5a_migrate_repository.py +++ b/aiida/backends/sqlalchemy/migrations/versions/1feaea71bd5a_migrate_repository.py @@ -1,4 +1,12 @@ # -*- coding: utf-8 -*- +########################################################################### +# Copyright (c), The AiiDA team. All rights reserved. # +# This file is part of the AiiDA code. # +# # +# The code is hosted on GitHub at https://github.com/aiidateam/aiida-core # +# For further information on the license, see the LICENSE.txt file # +# For further information please visit http://www.aiida.net # +########################################################################### # pylint: disable=invalid-name,no-member """Migrate the file repository to the new disk object store based implementation. @@ -7,15 +15,7 @@ Create Date: 2020-10-01 15:05:49.271958 """ -import pathlib - from alembic import op -from sqlalchemy import Integer, cast -from sqlalchemy.dialects.postgresql import JSONB, UUID -from sqlalchemy.sql import column, func, select, table, text - -from aiida.backends.general.migrations import utils -from aiida.cmdline.utils import echo # revision identifiers, used by Alembic. revision = '1feaea71bd5a' @@ -26,116 +26,11 @@ def upgrade(): """Migrations for the upgrade.""" - # pylint: disable=too-many-locals,too-many-branches,too-many-statements - import json - from tempfile import NamedTemporaryFile - - from disk_objectstore import Container - - from aiida.common import exceptions - from aiida.common.progress_reporter import get_progress_reporter, set_progress_bar_tqdm, set_progress_reporter - from aiida.manage.configuration import get_profile - from aiida.manage.manager import get_manager - - connection = op.get_bind() - - DbNode = table( - 'db_dbnode', - column('id', Integer), - column('uuid', UUID), - column('repository_metadata', JSONB), - ) - - profile = get_profile() - backend = get_manager().get_backend() - node_count = connection.execute(select(func.count()).select_from(DbNode)).scalar() - missing_repo_folder = [] - shard_count = 256 - - basepath = pathlib.Path(profile.repository_path) / 'repository' / 'node' - filepath = pathlib.Path(profile.repository_path) / 'container' - container = Container(filepath) - - if not profile.is_test_profile and (node_count > 0 and not basepath.is_dir()): - raise exceptions.DatabaseMigrationError( - f'the file repository `{basepath}` does not exist but the database is not empty, it contains {node_count} ' - 'nodes. Aborting the migration.' - ) - - if not profile.is_test_profile and container.is_initialised: - raise exceptions.DatabaseMigrationError( - f'the container {filepath} already exists. If you ran this migration before and it failed simply ' - 'delete this directory and restart the migration.' - ) - - container.init_container(clear=True, **profile.defaults['repository']) - - # Only show the progress bar if there is at least a node in the database. Note that we cannot simply make the entire - # next block under the context manager optional, since it performs checks on whether the repository contains files - # that are not in the database that are still important to perform even if the database is empty. - if node_count > 0: - set_progress_bar_tqdm() - else: - set_progress_reporter(None) - - with get_progress_reporter()(total=shard_count, desc='Migrating file repository') as progress: - for i in range(shard_count): - - shard = '%.2x' % i # noqa flynt - progress.set_description_str(f'Migrating file repository: shard {shard}') - - mapping_node_repository_metadata, missing_sub_repo_folder = utils.migrate_legacy_repository(shard) - - if missing_sub_repo_folder: - missing_repo_folder.extend(missing_sub_repo_folder) - del missing_sub_repo_folder - - if mapping_node_repository_metadata is None: - continue - - for node_uuid, repository_metadata in mapping_node_repository_metadata.items(): - - # If `repository_metadata` is `{}` or `None`, we skip it, as we can leave the column default `null`. - if not repository_metadata: - continue - - value = cast(repository_metadata, JSONB) - connection.execute(DbNode.update().where(DbNode.c.uuid == node_uuid).values(repository_metadata=value)) - - del mapping_node_repository_metadata - progress.update() - - # Store the UUID of the repository container in the `DbSetting` table. Note that for new databases, the profile - # setup will already have stored the UUID and so it should be skipped, or an exception for a duplicate key will be - # raised. This migration step is only necessary for existing databases that are migrated. - container_id = backend.get_repository().uuid - statement = text( - f""" - INSERT INTO db_dbsetting (key, val, description, time) - VALUES ('repository|uuid', to_json('{container_id}'::text), 'Repository UUID', NOW()) - ON CONFLICT (key) DO NOTHING; - """ - ) - connection.execute(statement) - - if not profile.is_test_profile: - - if missing_repo_folder: - prefix = 'migration-repository-missing-subfolder-' - with NamedTemporaryFile(prefix=prefix, suffix='.json', dir='.', mode='w+', delete=False) as handle: - json.dump(missing_repo_folder, handle) - echo.echo_warning( - 'Detected repository folders that were missing the required subfolder `path` or `raw_input`. ' - f'The paths of those nodes repository folders have been written to a log file: {handle.name}' - ) + from aiida.backends.sqlalchemy.migrations.utils.migrate_repository import migrate_repository - # If there were no nodes, most likely a new profile, there is not need to print the warning - if node_count: - echo.echo_warning( - 'Migrated file repository to the new disk object store. The old repository has not been deleted out' - f' of safety and can be found at {pathlib.Path(get_profile().repository_path, "repository")}.' - ) + migrate_repository(op.get_bind(), op.get_context().opts['aiida_profile']) def downgrade(): """Migrations for the downgrade.""" + raise NotImplementedError('Migration of the file repository is not reversible.') diff --git a/aiida/backends/sqlalchemy/migrations/versions/239cea6d2452_provenance_redesign.py b/aiida/backends/sqlalchemy/migrations/versions/239cea6d2452_provenance_redesign.py index 7e0d35064f..48b2c0db9a 100644 --- a/aiida/backends/sqlalchemy/migrations/versions/239cea6d2452_provenance_redesign.py +++ b/aiida/backends/sqlalchemy/migrations/versions/239cea6d2452_provenance_redesign.py @@ -15,13 +15,7 @@ Create Date: 2018-12-04 21:14:15.250247 """ - -# Remove when https://github.com/PyCQA/pylint/issues/1931 is fixed -# pylint: disable=no-name-in-module,import-error from alembic import op -from sqlalchemy import Integer, String -from sqlalchemy.dialects.postgresql import UUID -from sqlalchemy.sql import column, select, table, text # revision identifiers, used by Alembic. revision = '239cea6d2452' @@ -30,77 +24,17 @@ depends_on = None -def migrate_infer_calculation_entry_point(connection): - """Set the process type for calculation nodes by inferring it from their type string.""" - from aiida.manage.database.integrity import write_database_integrity_violation - from aiida.manage.database.integrity.plugins import infer_calculation_entry_point - from aiida.plugins.entry_point import ENTRY_POINT_STRING_SEPARATOR - - DbNode = table( - 'db_dbnode', column('id', Integer), column('uuid', UUID), column('type', String), - column('process_type', String) - ) - - query_set = connection.execute(select(DbNode.c.type).where(DbNode.c.type.like('calculation.%'))).fetchall() - type_strings = set(entry[0] for entry in query_set) - mapping_node_type_to_entry_point = infer_calculation_entry_point(type_strings=type_strings) - - fallback_cases = [] - - for type_string, entry_point_string in mapping_node_type_to_entry_point.items(): - - # If the entry point string does not contain the entry point string separator, the mapping function was not able - # to map the type string onto a known entry point string. As a fallback it uses the modified type string itself. - # All affected entries should be logged to file that the user can consult. - if ENTRY_POINT_STRING_SEPARATOR not in entry_point_string: - query_set = connection.execute( - select(DbNode.c.uuid).where(DbNode.c.type == op.inline_literal(type_string)) - ).fetchall() - - uuids = [str(entry.uuid) for entry in query_set] - for uuid in uuids: - fallback_cases.append([uuid, type_string, entry_point_string]) - - connection.execute( - DbNode.update().where(DbNode.c.type == op.inline_literal(type_string) - ).values(process_type=op.inline_literal(entry_point_string)) - ) - - if fallback_cases: - headers = ['UUID', 'type (old)', 'process_type (fallback)'] - warning_message = 'found calculation nodes with a type string that could not be mapped onto a known entry point' - action_message = 'inferred `process_type` for all calculation nodes, using fallback for unknown entry points' - write_database_integrity_violation(fallback_cases, headers, warning_message, action_message) - - -def detect_unexpected_links(connection): - """Scan the database for any links that are unexpected. - - The checks will verify that there are no outgoing `call` or `return` links from calculation nodes and that if a - workflow node has a `create` link, it has at least an accompanying return link to the same data node, or it has a - `call` link to a calculation node that takes the created data node as input. - """ - from aiida.backends.general.migrations.provenance_redesign import INVALID_LINK_SELECT_STATEMENTS - from aiida.manage.database.integrity import write_database_integrity_violation - - for sql, warning_message in INVALID_LINK_SELECT_STATEMENTS: - results = list(connection.execute(text(sql))) - if results: - headers = ['UUID source', 'UUID target', 'link type', 'link label'] - write_database_integrity_violation(results, headers, warning_message) - - def upgrade(): - """The upgrade migration actions.""" - connection = op.get_bind() + """Migrations for the upgrade.""" + from aiida.backends.sqlalchemy.migrations.utils import provenance_redesign # Migrate calculation nodes by inferring the process type from the type string - migrate_infer_calculation_entry_point(connection) + provenance_redesign.migrate_infer_calculation_entry_point(op) # Detect if the database contain any unexpected links - detect_unexpected_links(connection) + provenance_redesign.detect_unexpected_links(op) - statement = text( + op.execute( """ DELETE FROM db_dblink WHERE db_dblink.id IN ( SELECT db_dblink.id FROM db_dblink @@ -172,39 +106,8 @@ def upgrade(): -- Rename `calllink` to `call_work` if the target node is a workflow type node """ ) - connection.execute(statement) def downgrade(): - """The downgrade migration actions.""" - connection = op.get_bind() - - statement = text( - """ - UPDATE db_dbnode SET type = 'calculation.job.JobCalculation.' - WHERE type = 'node.process.calculation.calcjob.CalcJobNode.'; - - UPDATE db_dbnode SET type = 'calculatison.inline.InlineCalculation.' - WHERE type = 'node.process.calculation.calcfunction.CalcFunctionNode.'; - - UPDATE db_dbnode SET type = 'calculation.function.FunctionCalculation.' - WHERE type = 'node.process.workflow.workfunction.WorkFunctionNode.'; - - UPDATE db_dbnode SET type = 'calculation.work.WorkCalculation.' - WHERE type = 'node.process.workflow.workchain.WorkChainNode.'; - - - UPDATE db_dblink SET type = 'inputlink' - WHERE type = 'input_call' OR type = 'input_work'; - - UPDATE db_dblink SET type = 'calllink' - WHERE type = 'call_call' OR type = 'call_work'; - - UPDATE db_dblink SET type = 'createlink' - WHERE type = 'create'; - - UPDATE db_dblink SET type = 'returnlink' - WHERE type = 'return'; - """ - ) - connection.execute(statement) + """Migrations for the downgrade.""" + raise NotImplementedError('Downgrade of 239cea6d2452.') diff --git a/aiida/backends/sqlalchemy/migrations/versions/26d561acd560_data_migration_legacy_job_calculations.py b/aiida/backends/sqlalchemy/migrations/versions/26d561acd560_data_migration_legacy_job_calculations.py index af91d0e34c..c5e36bbdd9 100644 --- a/aiida/backends/sqlalchemy/migrations/versions/26d561acd560_data_migration_legacy_job_calculations.py +++ b/aiida/backends/sqlalchemy/migrations/versions/26d561acd560_data_migration_legacy_job_calculations.py @@ -8,7 +8,7 @@ # For further information please visit http://www.aiida.net # ########################################################################### # pylint: disable=invalid-name,no-member -"""Data migration for legacy `JobCalculations`. +"""Migrate legacy `JobCalculations`. These old nodes have already been migrated to the correct `CalcJobNode` type in a previous migration, but they can still contain a `state` attribute with a deprecated `JobCalcState` value and they are missing a value for the @@ -40,6 +40,8 @@ Note: in addition to the three attributes mentioned in the table, all matched nodes will get `Legacy JobCalculation` as their `process_label` which is one of the default columns of `verdi process list`. +This migration is identical to django_0038 + Revision ID: 26d561acd560 Revises: 07fac78e6209 Create Date: 2019-06-22 09:55:25.284168 @@ -110,3 +112,4 @@ def upgrade(): def downgrade(): """Migrations for the downgrade.""" + raise NotImplementedError('Downgrade of 26d561acd560.') diff --git a/aiida/backends/sqlalchemy/migrations/versions/34a831f4286d_entry_point_core_prefix.py b/aiida/backends/sqlalchemy/migrations/versions/34a831f4286d_entry_point_core_prefix.py index d6484a0883..bb9d27d632 100644 --- a/aiida/backends/sqlalchemy/migrations/versions/34a831f4286d_entry_point_core_prefix.py +++ b/aiida/backends/sqlalchemy/migrations/versions/34a831f4286d_entry_point_core_prefix.py @@ -1,4 +1,12 @@ # -*- coding: utf-8 -*- +########################################################################### +# Copyright (c), The AiiDA team. All rights reserved. # +# This file is part of the AiiDA code. # +# # +# The code is hosted on GitHub at https://github.com/aiidateam/aiida-core # +# For further information on the license, see the LICENSE.txt file # +# For further information please visit http://www.aiida.net # +########################################################################### # pylint: disable=invalid-name,no-member,line-too-long """Update node types after `core.` prefix was added to entry point names. @@ -19,7 +27,7 @@ def upgrade(): """Migrations for the upgrade.""" - conn = op.get_bind() # pylint: disable=no-member + conn = op.get_bind() statement = text( """ UPDATE db_dbnode SET node_type = 'data.core.array.ArrayData.' WHERE node_type = 'data.array.ArrayData.'; @@ -67,3 +75,4 @@ def upgrade(): def downgrade(): """Migrations for the downgrade.""" + raise NotImplementedError('Downgrade of 34a831f4286d.') diff --git a/aiida/backends/sqlalchemy/migrations/versions/35d4ee9a1b0e_code_hidden_attr_to_extra.py b/aiida/backends/sqlalchemy/migrations/versions/35d4ee9a1b0e_code_hidden_attr_to_extra.py index 8d417a4ffc..8af5134df9 100644 --- a/aiida/backends/sqlalchemy/migrations/versions/35d4ee9a1b0e_code_hidden_attr_to_extra.py +++ b/aiida/backends/sqlalchemy/migrations/versions/35d4ee9a1b0e_code_hidden_attr_to_extra.py @@ -54,26 +54,4 @@ def upgrade(): def downgrade(): """Migrations for the downgrade.""" - conn = op.get_bind() - - # Set hidden=True in attributes if the extras contain hidden=True - statement = text( - """ - UPDATE db_dbnode SET attributes = jsonb_set(attributes, '{"hidden"}', to_jsonb(True)) - WHERE type = 'code.Code.' AND extras @> '{"hidden": true}' - """ - ) - conn.execute(statement) - - # Set hidden=False in attributes if the extras contain hidden=False - statement = text( - """ - UPDATE db_dbnode SET attributes = jsonb_set(attributes, '{"hidden"}', to_jsonb(False)) - WHERE type = 'code.Code.' AND extras @> '{"hidden": false}' - """ - ) - conn.execute(statement) - - # Delete the hidden key from the extras - statement = text("""UPDATE db_dbnode SET extras = extras-'hidden' WHERE type = 'code.Code.'""") - conn.execute(statement) + raise NotImplementedError('Downgrade of 35d4ee9a1b0e.') diff --git a/aiida/backends/sqlalchemy/migrations/versions/375c2db70663_dblog_uuid_uniqueness_constraint.py b/aiida/backends/sqlalchemy/migrations/versions/375c2db70663_dblog_uuid_uniqueness_constraint.py index 73ccd2b232..ee8f18e24b 100644 --- a/aiida/backends/sqlalchemy/migrations/versions/375c2db70663_dblog_uuid_uniqueness_constraint.py +++ b/aiida/backends/sqlalchemy/migrations/versions/375c2db70663_dblog_uuid_uniqueness_constraint.py @@ -27,10 +27,10 @@ def upgrade(): - """Add unique key constraint to the UUID column.""" + """Migrations for the upgrade.""" op.create_unique_constraint('db_dblog_uuid_key', 'db_dblog', ['uuid']) def downgrade(): - """Remove unique key constraint to the UUID column.""" + """Migrations for the downgrade.""" op.drop_constraint('db_dblog_uuid_key', 'db_dblog') diff --git a/aiida/backends/sqlalchemy/migrations/versions/37f3d4882837_make_all_uuid_columns_unique.py b/aiida/backends/sqlalchemy/migrations/versions/37f3d4882837_make_all_uuid_columns_unique.py index 0d639e63d3..9a9a4a56af 100644 --- a/aiida/backends/sqlalchemy/migrations/versions/37f3d4882837_make_all_uuid_columns_unique.py +++ b/aiida/backends/sqlalchemy/migrations/versions/37f3d4882837_make_all_uuid_columns_unique.py @@ -31,41 +31,14 @@ tables = ['db_dbcomment', 'db_dbcomputer', 'db_dbgroup', 'db_dbworkflow'] -def verify_uuid_uniqueness(table): - """Check whether the database contains duplicate UUIDS. - - Note that we have to redefine this method from - aiida.backends.general.migrations.duplicate_uuids.verify_uuid_uniqueness - because that uses the default database connection, while here the one created by Alembic should be used instead. - - :raises: IntegrityError if database contains nodes with duplicate UUIDS. - """ - from sqlalchemy.sql import text - - from aiida.common.exceptions import IntegrityError - - query = text( - f'SELECT s.id, s.uuid FROM (SELECT *, COUNT(*) OVER(PARTITION BY uuid) AS c FROM {table}) AS s WHERE c > 1' - ) - conn = op.get_bind() - duplicates = conn.execute(query).fetchall() - - if duplicates: - command = f'`verdi database integrity detect-duplicate-uuid {table}`' - raise IntegrityError( - 'Your table "{}"" contains entries with duplicate UUIDS.\nRun {} ' - 'to return to a consistent state'.format(table, command) - ) - - def upgrade(): - + """Migrations for the upgrade.""" + from aiida.backends.sqlalchemy.migrations.utils.duplicate_uuids import verify_uuid_uniqueness for table in tables: - verify_uuid_uniqueness(table) + verify_uuid_uniqueness(table, op.get_bind()) op.create_unique_constraint(f'{table}_uuid_key', table, ['uuid']) def downgrade(): - - for table in tables: - op.drop_constraint(f'{table}_uuid_key', table) + """Migrations for the downgrade.""" + raise NotImplementedError('Downgrade of 37f3d4882837.') diff --git a/aiida/backends/sqlalchemy/migrations/versions/3d6190594e19_remove_dbcomputer_enabled.py b/aiida/backends/sqlalchemy/migrations/versions/3d6190594e19_remove_dbcomputer_enabled.py index a0ce3fdda4..da60862636 100644 --- a/aiida/backends/sqlalchemy/migrations/versions/3d6190594e19_remove_dbcomputer_enabled.py +++ b/aiida/backends/sqlalchemy/migrations/versions/3d6190594e19_remove_dbcomputer_enabled.py @@ -8,7 +8,9 @@ # For further information please visit http://www.aiida.net # ########################################################################### # pylint: disable=invalid-name -"""Remove `DbComputer.enabled` +"""Remove `db_dbcomputer.enabled` + +This is similar to migration django_0031 Revision ID: 3d6190594e19 Revises: 5a49629f0d45 diff --git a/aiida/backends/sqlalchemy/migrations/versions/535039300e4a_computer_name_to_label.py b/aiida/backends/sqlalchemy/migrations/versions/535039300e4a_computer_name_to_label.py index 1799a3ed41..7eba581d70 100644 --- a/aiida/backends/sqlalchemy/migrations/versions/535039300e4a_computer_name_to_label.py +++ b/aiida/backends/sqlalchemy/migrations/versions/535039300e4a_computer_name_to_label.py @@ -1,6 +1,6 @@ # -*- coding: utf-8 -*- # pylint: disable=invalid-name,no-member -"""Rename the ``name`` column of the ``Computer`` entity to ``label``. +"""Rename `db_dbcomputer.name` to `db_dbcomputer.label` Revision ID: 535039300e4a Revises: 1feaea71bd5a @@ -25,6 +25,4 @@ def upgrade(): def downgrade(): """Migrations for the downgrade.""" - op.drop_constraint('db_dbcomputer_label_key', 'db_dbcomputer') - op.alter_column('db_dbcomputer', 'label', new_column_name='name') # pylint: disable=no-member - op.create_unique_constraint('db_dbcomputer_name_key', 'db_dbcomputer', ['name']) + raise NotImplementedError('Downgrade of 535039300e4a.') diff --git a/aiida/backends/sqlalchemy/migrations/versions/59edaf8a8b79_adding_indexes_and_constraints_to_the_.py b/aiida/backends/sqlalchemy/migrations/versions/59edaf8a8b79_adding_indexes_and_constraints_to_the_.py index 9f47898db8..b1233583a0 100644 --- a/aiida/backends/sqlalchemy/migrations/versions/59edaf8a8b79_adding_indexes_and_constraints_to_the_.py +++ b/aiida/backends/sqlalchemy/migrations/versions/59edaf8a8b79_adding_indexes_and_constraints_to_the_.py @@ -43,11 +43,4 @@ def upgrade(): def downgrade(): """Migrations for the downgrade.""" - op.drop_index('db_dbgroup_dbnodes_dbnode_id_idx', 'db_dbgroup_dbnodes') - op.drop_index('db_dbgroup_dbnodes_dbgroup_id_idx', 'db_dbgroup_dbnodes') - op.drop_constraint('db_dbgroup_dbnodes_dbgroup_id_dbnode_id_key', 'db_dbgroup_dbnodes') - # Creating the constraint uix_dbnode_id_dbgroup_id that migration - # 7a6587e16f4c would add - op.create_unique_constraint( - 'db_dbgroup_dbnodes_dbgroup_id_dbnode_id_key', 'db_dbgroup_dbnodes', ['dbgroup_id', 'dbnode_id'] - ) + raise NotImplementedError('Downgrade of 59edaf8a8b79.') diff --git a/aiida/backends/sqlalchemy/migrations/versions/5ddd24e52864_dbnode_type_to_dbnode_node_type.py b/aiida/backends/sqlalchemy/migrations/versions/5ddd24e52864_dbnode_type_to_dbnode_node_type.py index 9685949640..6d01f05a81 100644 --- a/aiida/backends/sqlalchemy/migrations/versions/5ddd24e52864_dbnode_type_to_dbnode_node_type.py +++ b/aiida/backends/sqlalchemy/migrations/versions/5ddd24e52864_dbnode_type_to_dbnode_node_type.py @@ -8,7 +8,9 @@ # For further information please visit http://www.aiida.net # ########################################################################### # pylint: disable=invalid-name,no-member -"""Renaming `DbNode.type` to `DbNode.node_type` +"""Rename `db_dbnode.type` to `db_dbnode.node_type` + +This is identical to migration django_0029 Revision ID: 5ddd24e52864 Revises: d254fdfed416 diff --git a/aiida/backends/sqlalchemy/migrations/versions/61fc0913fae9_remove_node_prefix.py b/aiida/backends/sqlalchemy/migrations/versions/61fc0913fae9_remove_node_prefix.py index 4420d84cd6..b3bed0c1ad 100644 --- a/aiida/backends/sqlalchemy/migrations/versions/61fc0913fae9_remove_node_prefix.py +++ b/aiida/backends/sqlalchemy/migrations/versions/61fc0913fae9_remove_node_prefix.py @@ -8,7 +8,11 @@ # For further information please visit http://www.aiida.net # ########################################################################### # pylint: disable=invalid-name,no-member -"""Final data migration for `Nodes` after `aiida.orm.nodes` reorganization was finalized to remove the `node.` prefix +"""Remove the `node.` prefix from `db_dbnode.type` + +Final data migration for `Nodes` after `aiida.orm.nodes` reorganization was finalized to remove the `node.` prefix + +Note, this is identical to the django_0027 migration. Revision ID: 61fc0913fae9 Revises: ce56d84bcc35 @@ -48,17 +52,4 @@ def upgrade(): def downgrade(): """Migrations for the downgrade.""" - conn = op.get_bind() - - statement = text( - r""" - UPDATE db_dbnode - SET type = regexp_replace(type, '^data.', 'node.data.') - WHERE type LIKE 'data.%'; - - UPDATE db_dbnode - SET type = regexp_replace(type, '^process.', 'node.process.') - WHERE type LIKE 'process.%'; - """ - ) - conn.execute(statement) + raise NotImplementedError('Downgrade of 61fc0913fae9.') diff --git a/aiida/backends/sqlalchemy/migrations/versions/62fe0d36de90_add_node_uuid_unique_constraint.py b/aiida/backends/sqlalchemy/migrations/versions/62fe0d36de90_add_node_uuid_unique_constraint.py index 21aa8739cf..74b97a67a8 100644 --- a/aiida/backends/sqlalchemy/migrations/versions/62fe0d36de90_add_node_uuid_unique_constraint.py +++ b/aiida/backends/sqlalchemy/migrations/versions/62fe0d36de90_add_node_uuid_unique_constraint.py @@ -24,36 +24,10 @@ depends_on = None -def verify_node_uuid_uniqueness(): - """Check whether the database contains nodes with duplicate UUIDS. - - Note that we have to redefine this method from aiida.manage.database.integrity.verify_node_uuid_uniqueness - because that uses the default database connection, while here the one created by Alembic should be used instead. - - :raises: IntegrityError if database contains nodes with duplicate UUIDS. - """ - from sqlalchemy.sql import text - - from aiida.common.exceptions import IntegrityError - - query = text( - 'SELECT s.id, s.uuid FROM (SELECT *, COUNT(*) OVER(PARTITION BY uuid) AS c FROM db_dbnode) AS s WHERE c > 1' - ) - conn = op.get_bind() - duplicates = conn.execute(query).fetchall() - - if duplicates: - table = 'db_dbnode' - command = f'`verdi database integrity detect-duplicate-uuid {table}`' - raise IntegrityError( - 'Your table "{}" contains entries with duplicate UUIDS.\nRun {} ' - 'to return to a consistent state'.format(table, command) - ) - - def upgrade(): """Migrations for the upgrade.""" - verify_node_uuid_uniqueness() + from aiida.backends.sqlalchemy.migrations.utils.duplicate_uuids import verify_uuid_uniqueness + verify_uuid_uniqueness('db_dbnode', op.get_bind()) op.create_unique_constraint('db_dbnode_uuid_key', 'db_dbnode', ['uuid']) diff --git a/aiida/backends/sqlalchemy/migrations/versions/6a5c2ea1439d_move_data_within_node_module.py b/aiida/backends/sqlalchemy/migrations/versions/6a5c2ea1439d_move_data_within_node_module.py index 86160b0e46..82243643af 100644 --- a/aiida/backends/sqlalchemy/migrations/versions/6a5c2ea1439d_move_data_within_node_module.py +++ b/aiida/backends/sqlalchemy/migrations/versions/6a5c2ea1439d_move_data_within_node_module.py @@ -8,7 +8,9 @@ # For further information please visit http://www.aiida.net # ########################################################################### # pylint: disable=invalid-name,no-member -"""Data migration for `Data` nodes after it was moved in the `aiida.orm.node` module changing the type string. +"""Change type string for `Data` nodes, from `data.*` to `node.data.*` + +Note, this is identical to django_0025 Revision ID: 6a5c2ea1439d Revises: 375c2db70663 @@ -43,13 +45,4 @@ def upgrade(): def downgrade(): """Migrations for the downgrade.""" - conn = op.get_bind() - - statement = text( - r""" - UPDATE db_dbnode - SET type = regexp_replace(type, '^node.data.', 'data.') - WHERE type LIKE 'node.data.%' - """ - ) - conn.execute(statement) + raise NotImplementedError('Downgrade of 6a5c2ea1439d.') diff --git a/aiida/backends/sqlalchemy/migrations/versions/70c7d732f1b2_delete_dbpath.py b/aiida/backends/sqlalchemy/migrations/versions/70c7d732f1b2_delete_dbpath.py index b037edf3f7..590ea1c531 100644 --- a/aiida/backends/sqlalchemy/migrations/versions/70c7d732f1b2_delete_dbpath.py +++ b/aiida/backends/sqlalchemy/migrations/versions/70c7d732f1b2_delete_dbpath.py @@ -17,9 +17,6 @@ """ from alembic import op import sqlalchemy as sa -from sqlalchemy.orm.session import Session - -from aiida.backends.sqlalchemy.utils import install_tc # revision identifiers, used by Alembic. revision = '70c7d732f1b2' @@ -38,25 +35,4 @@ def upgrade(): def downgrade(): """Migrations for the downgrade.""" - op.create_table( - 'db_dbpath', sa.Column('id', sa.INTEGER(), nullable=False), - sa.Column('parent_id', sa.INTEGER(), autoincrement=False, nullable=True), - sa.Column('child_id', sa.INTEGER(), autoincrement=False, nullable=True), - sa.Column('depth', sa.INTEGER(), autoincrement=False, nullable=True), - sa.Column('entry_edge_id', sa.INTEGER(), autoincrement=False, nullable=True), - sa.Column('direct_edge_id', sa.INTEGER(), autoincrement=False, nullable=True), - sa.Column('exit_edge_id', sa.INTEGER(), autoincrement=False, nullable=True), - sa.ForeignKeyConstraint(['child_id'], ['db_dbnode.id'], - name='db_dbpath_child_id_fkey', - initially='DEFERRED', - deferrable=True), - sa.ForeignKeyConstraint(['parent_id'], ['db_dbnode.id'], - name='db_dbpath_parent_id_fkey', - initially='DEFERRED', - deferrable=True), sa.PrimaryKeyConstraint('id', name='db_dbpath_pkey') - ) - # I get the session using the alembic connection - # (Keep in mind that alembic uses the AiiDA SQLA - # session) - session = Session(bind=op.get_bind()) - install_tc(session) + raise NotImplementedError('Downgrade of 70c7d732f1b2.') diff --git a/aiida/backends/sqlalchemy/migrations/versions/7536a82b2cc4_add_node_repository_metadata.py b/aiida/backends/sqlalchemy/migrations/versions/7536a82b2cc4_add_node_repository_metadata.py index a9dcc55679..cf37b0b6f1 100644 --- a/aiida/backends/sqlalchemy/migrations/versions/7536a82b2cc4_add_node_repository_metadata.py +++ b/aiida/backends/sqlalchemy/migrations/versions/7536a82b2cc4_add_node_repository_metadata.py @@ -28,9 +28,11 @@ def upgrade(): """Migrations for the upgrade.""" - # We add the column with a `server_default` because otherwise the migration would fail since existing rows will not - # have a value and violate the not-nullable clause. - op.add_column('db_dbnode', sa.Column('repository_metadata', postgresql.JSONB(astext_type=sa.Text()), nullable=True)) + op.add_column( + 'db_dbnode', + sa.Column('repository_metadata', postgresql.JSONB(astext_type=sa.Text()), nullable=False, server_default='{}') + ) + op.alter_column('db_dbnode', 'repository_metadata', server_default=None) def downgrade(): diff --git a/aiida/backends/sqlalchemy/migrations/versions/7b38a9e783e7_seal_unsealed_processes.py b/aiida/backends/sqlalchemy/migrations/versions/7b38a9e783e7_seal_unsealed_processes.py index 4efa91aa29..6ceef2552c 100644 --- a/aiida/backends/sqlalchemy/migrations/versions/7b38a9e783e7_seal_unsealed_processes.py +++ b/aiida/backends/sqlalchemy/migrations/versions/7b38a9e783e7_seal_unsealed_processes.py @@ -19,6 +19,8 @@ case for legacy calculations like `InlineCalculation` nodes. Their node type was already migrated in `0020` but most of them will be unsealed. +This is identical to migration django_0041 + Revision ID: 7b38a9e783e7 Revises: e734dd5e50d7 Create Date: 2019-10-28 13:22:56.224234 @@ -61,3 +63,4 @@ def upgrade(): def downgrade(): """Migrations for the downgrade.""" + raise NotImplementedError('Downgrade of 7b38a9e783e7.') diff --git a/aiida/backends/sqlalchemy/migrations/versions/7ca08c391c49_calc_job_option_attribute_keys.py b/aiida/backends/sqlalchemy/migrations/versions/7ca08c391c49_calc_job_option_attribute_keys.py index 953111f23e..26819ca508 100644 --- a/aiida/backends/sqlalchemy/migrations/versions/7ca08c391c49_calc_job_option_attribute_keys.py +++ b/aiida/backends/sqlalchemy/migrations/versions/7ca08c391c49_calc_job_option_attribute_keys.py @@ -95,4 +95,5 @@ def upgrade(): def downgrade(): - pass + """Migrations for the downgrade.""" + raise NotImplementedError('Downgrade of 7ca08c391c49.') diff --git a/aiida/backends/sqlalchemy/migrations/versions/89176227b25_add_indexes_to_dbworkflowdata_table.py b/aiida/backends/sqlalchemy/migrations/versions/89176227b25_add_indexes_to_dbworkflowdata_table.py index f3f8087837..0cf4ab55d2 100644 --- a/aiida/backends/sqlalchemy/migrations/versions/89176227b25_add_indexes_to_dbworkflowdata_table.py +++ b/aiida/backends/sqlalchemy/migrations/versions/89176227b25_add_indexes_to_dbworkflowdata_table.py @@ -25,10 +25,11 @@ def upgrade(): + """Migrations for the upgrade.""" op.create_index('ix_db_dbworkflowdata_aiida_obj_id', 'db_dbworkflowdata', ['aiida_obj_id']) op.create_index('ix_db_dbworkflowdata_parent_id', 'db_dbworkflowdata', ['parent_id']) def downgrade(): - op.drop_index('ix_db_dbworkflowdata_aiida_obj_id', 'db_dbworkflowdata') - op.drop_index('ix_db_dbworkflowdata_parent_id', 'db_dbworkflowdata') + """Migrations for the downgrade.""" + raise NotImplementedError('Downgrade of 89176227b25.') diff --git a/aiida/backends/sqlalchemy/migrations/versions/91b573400be5_prepare_schema_reset.py b/aiida/backends/sqlalchemy/migrations/versions/91b573400be5_prepare_schema_reset.py index 88ec6ded94..b48f3429e5 100644 --- a/aiida/backends/sqlalchemy/migrations/versions/91b573400be5_prepare_schema_reset.py +++ b/aiida/backends/sqlalchemy/migrations/versions/91b573400be5_prepare_schema_reset.py @@ -10,6 +10,8 @@ # pylint: disable=invalid-name,no-member """Prepare schema reset. +This is similar to migration django_0042 + Revision ID: 91b573400be5 Revises: 7b38a9e783e7 Create Date: 2019-07-25 14:58:39.866822 @@ -50,3 +52,4 @@ def upgrade(): def downgrade(): """Migrations for the downgrade.""" + raise NotImplementedError('Downgrade of 91b573400be5.') diff --git a/aiida/backends/sqlalchemy/migrations/versions/a514d673c163_drop_dblock.py b/aiida/backends/sqlalchemy/migrations/versions/a514d673c163_drop_dblock.py index f539716b45..2a3d6e4f57 100644 --- a/aiida/backends/sqlalchemy/migrations/versions/a514d673c163_drop_dblock.py +++ b/aiida/backends/sqlalchemy/migrations/versions/a514d673c163_drop_dblock.py @@ -16,8 +16,6 @@ """ from alembic import op -import sqlalchemy as sa -from sqlalchemy.dialects import postgresql # revision identifiers, used by Alembic. revision = 'a514d673c163' @@ -27,14 +25,10 @@ def upgrade(): + """Migrations for the upgrade.""" op.drop_table('db_dblock') def downgrade(): - op.create_table( - 'db_dblock', sa.Column('key', sa.VARCHAR(length=255), autoincrement=False, nullable=False), - sa.Column('creation', postgresql.TIMESTAMP(timezone=True), autoincrement=False, nullable=True), - sa.Column('timeout', sa.INTEGER(), autoincrement=False, nullable=True), - sa.Column('owner', sa.VARCHAR(length=255), autoincrement=False, nullable=True), - sa.PrimaryKeyConstraint('key', name='db_dblock_pkey') - ) + """Migrations for the downgrade.""" + raise NotImplementedError('Downgrade of a514d673c163.') diff --git a/aiida/backends/sqlalchemy/migrations/versions/a6048f0ffca8_update_linktypes.py b/aiida/backends/sqlalchemy/migrations/versions/a6048f0ffca8_update_linktypes.py index 440d41cf20..0b55342c49 100644 --- a/aiida/backends/sqlalchemy/migrations/versions/a6048f0ffca8_update_linktypes.py +++ b/aiida/backends/sqlalchemy/migrations/versions/a6048f0ffca8_update_linktypes.py @@ -151,4 +151,4 @@ def upgrade(): def downgrade(): """Migrations for the downgrade.""" - print('There is no downgrade for the link types') + raise NotImplementedError('Downgrade of a6048f0ffca8.') diff --git a/aiida/backends/sqlalchemy/migrations/versions/b8b23ddefad4_dbgroup_name_to_label_type_to_type_string.py b/aiida/backends/sqlalchemy/migrations/versions/b8b23ddefad4_dbgroup_name_to_label_type_to_type_string.py index 48ae39eb1d..b2fc72b083 100644 --- a/aiida/backends/sqlalchemy/migrations/versions/b8b23ddefad4_dbgroup_name_to_label_type_to_type_string.py +++ b/aiida/backends/sqlalchemy/migrations/versions/b8b23ddefad4_dbgroup_name_to_label_type_to_type_string.py @@ -44,16 +44,4 @@ def upgrade(): def downgrade(): """The downgrade migration actions.""" - # dropping - op.drop_constraint('db_dbgroup_label_type_string_key', 'db_dbgroup') - op.drop_index('ix_db_dbgroup_label', 'db_dbgroup') - op.drop_index('ix_db_dbgroup_type_string', 'db_dbgroup') - - # renaming - op.alter_column('db_dbgroup', 'label', new_column_name='name') - op.alter_column('db_dbgroup', 'type_string', new_column_name='type') - - # creating - op.create_unique_constraint('db_dbgroup_name_type_key', 'db_dbgroup', ['name', 'type']) - op.create_index('ix_db_dbgroup_name', 'db_dbgroup', ['name']) - op.create_index('ix_db_dbgroup_type', 'db_dbgroup', ['type']) + raise NotImplementedError('Downgrade of b8b23ddefad4.') diff --git a/aiida/backends/sqlalchemy/migrations/versions/bf591f31dd12_dbgroup_type_string.py b/aiida/backends/sqlalchemy/migrations/versions/bf591f31dd12_dbgroup_type_string.py index 6d71cd55f6..6f3cd63df1 100644 --- a/aiida/backends/sqlalchemy/migrations/versions/bf591f31dd12_dbgroup_type_string.py +++ b/aiida/backends/sqlalchemy/migrations/versions/bf591f31dd12_dbgroup_type_string.py @@ -26,13 +26,6 @@ """UPDATE db_dbgroup SET type_string = 'core.auto' WHERE type_string = 'auto.run';""", ] -reverse_sql = [ - """UPDATE db_dbgroup SET type_string = 'user' WHERE type_string = 'core';""", - """UPDATE db_dbgroup SET type_string = 'data.upf' WHERE type_string = 'core.upf';""", - """UPDATE db_dbgroup SET type_string = 'auto.import' WHERE type_string = 'core.import';""", - """UPDATE db_dbgroup SET type_string = 'auto.run' WHERE type_string = 'core.auto';""", -] - # revision identifiers, used by Alembic. revision = 'bf591f31dd12' down_revision = '118349c10896' @@ -49,6 +42,4 @@ def upgrade(): def downgrade(): """Migrations for the downgrade.""" - conn = op.get_bind() - statement = text('\n'.join(reverse_sql)) - conn.execute(statement) + raise NotImplementedError('Downgrade of bf591f31dd12.') diff --git a/aiida/backends/sqlalchemy/migrations/versions/ce56d84bcc35_delete_trajectory_symbols_array.py b/aiida/backends/sqlalchemy/migrations/versions/ce56d84bcc35_delete_trajectory_symbols_array.py index defce436ad..cc8f3c10ef 100644 --- a/aiida/backends/sqlalchemy/migrations/versions/ce56d84bcc35_delete_trajectory_symbols_array.py +++ b/aiida/backends/sqlalchemy/migrations/versions/ce56d84bcc35_delete_trajectory_symbols_array.py @@ -10,21 +10,19 @@ # pylint: disable=invalid-name,no-member """Delete trajectory symbols array from the repository and the reference in the attributes +Note, this is similar to the django migration django_0026 + Revision ID: ce56d84bcc35 Revises: 12536798d4d3 Create Date: 2019-01-21 15:35:07.280805 """ -# Remove when https://github.com/PyCQA/pylint/issues/1931 is fixed -# pylint: disable=no-member,no-name-in-module,import-error - from alembic import op -import numpy -from sqlalchemy import Integer, String, cast +from sqlalchemy import Integer, String from sqlalchemy.dialects.postgresql import JSONB, UUID -from sqlalchemy.sql import column, func, select, table, text +from sqlalchemy.sql import column, select, table, text -from aiida.backends.general.migrations import utils +from aiida.backends.sqlalchemy.migrations.utils import utils # revision identifiers, used by Alembic. revision = 'ce56d84bcc35' @@ -35,38 +33,30 @@ def upgrade(): """Migrations for the upgrade.""" - # yapf:disable connection = op.get_bind() + profile = op.get_context().opts['aiida_profile'] + repo_path = profile.repository_path - DbNode = table('db_dbnode', column('id', Integer), column('uuid', UUID), column('type', String), - column('attributes', JSONB)) + DbNode = table( + 'db_dbnode', + column('id', Integer), + column('uuid', UUID), + column('type', String), + column('attributes', JSONB), + ) nodes = connection.execute( - select(DbNode.c.id, DbNode.c.uuid).where( - DbNode.c.type == op.inline_literal('node.data.array.trajectory.TrajectoryData.'))).fetchall() + select(DbNode.c.id, + DbNode.c.uuid).where(DbNode.c.type == op.inline_literal('node.data.array.trajectory.TrajectoryData.')) + ).fetchall() for pk, uuid in nodes: connection.execute( - text(f"""UPDATE db_dbnode SET attributes = attributes #- '{{array|symbols}}' WHERE id = {pk}""")) - utils.delete_numpy_array_from_repository(uuid, 'symbols') + text(f"""UPDATE db_dbnode SET attributes = attributes #- '{{array|symbols}}' WHERE id = {pk}""") + ) + utils.delete_numpy_array_from_repository(repo_path, uuid, 'symbols') def downgrade(): """Migrations for the downgrade.""" - # yapf:disable - connection = op.get_bind() - - DbNode = table('db_dbnode', column('id', Integer), column('uuid', UUID), column('type', String), - column('attributes', JSONB)) - - nodes = connection.execute( - select(DbNode.c.id, DbNode.c.uuid).where( - DbNode.c.type == op.inline_literal('node.data.array.trajectory.TrajectoryData.'))).fetchall() - - for pk, uuid in nodes: - attributes = connection.execute(select(DbNode.c.attributes).where(DbNode.c.id == pk)).fetchone() - symbols = numpy.array(attributes['symbols']) - utils.store_numpy_array_in_repository(uuid, 'symbols', symbols) - key = op.inline_literal('{"array|symbols"}') - connection.execute(DbNode.update().where(DbNode.c.id == pk).values( - attributes=func.jsonb_set(DbNode.c.attributes, key, cast(list(symbols.shape), JSONB)))) + raise NotImplementedError('Downgrade of ce56d84bcc35.') diff --git a/aiida/backends/sqlalchemy/migrations/versions/d254fdfed416_rename_parameter_data_to_dict.py b/aiida/backends/sqlalchemy/migrations/versions/d254fdfed416_rename_parameter_data_to_dict.py index 87a1aa8fc0..424e424718 100644 --- a/aiida/backends/sqlalchemy/migrations/versions/d254fdfed416_rename_parameter_data_to_dict.py +++ b/aiida/backends/sqlalchemy/migrations/versions/d254fdfed416_rename_parameter_data_to_dict.py @@ -8,7 +8,9 @@ # For further information please visit http://www.aiida.net # ########################################################################### # pylint: disable=invalid-name,no-member -"""Data migration for after `ParameterData` was renamed to `Dict`. +"""Rename `db_dbnode.type` values `data.parameter.ParameterData.` to `data.dict.Dict.` + +Note this is identical to migration django_0028 Revision ID: d254fdfed416 Revises: 61fc0913fae9 @@ -39,11 +41,9 @@ def upgrade(): def downgrade(): """Migrations for the downgrade.""" - conn = op.get_bind() - statement = text( r""" UPDATE db_dbnode SET type = 'data.parameter.ParameterData.' WHERE type = 'data.dict.Dict.'; """ ) - conn.execute(statement) + op.get_bind().execute(statement) diff --git a/aiida/backends/sqlalchemy/migrations/versions/de2eaf6978b4_simplify_user_model.py b/aiida/backends/sqlalchemy/migrations/versions/de2eaf6978b4_simplify_user_model.py index a154d0f019..d4470057ce 100644 --- a/aiida/backends/sqlalchemy/migrations/versions/de2eaf6978b4_simplify_user_model.py +++ b/aiida/backends/sqlalchemy/migrations/versions/de2eaf6978b4_simplify_user_model.py @@ -8,19 +8,18 @@ # For further information please visit http://www.aiida.net # ########################################################################### # pylint: disable=invalid-name,no-member,import-error,no-name-in-module -"""Drop various columns from the `DbUser` model. +"""Simplify `db_dbuser`, by dropping unnecessary columns These columns were part of the default Django user model +This migration is similar to django_0035 + Revision ID: de2eaf6978b4 Revises: 1830c8430131 Create Date: 2019-05-28 11:15:33.242602 """ - from alembic import op -import sqlalchemy as sa -from sqlalchemy.dialects import postgresql # revision identifiers, used by Alembic. revision = 'de2eaf6978b4' @@ -41,13 +40,4 @@ def upgrade(): def downgrade(): """Migrations for the downgrade.""" - op.add_column( - 'db_dbuser', sa.Column('date_joined', postgresql.TIMESTAMP(timezone=True), autoincrement=False, nullable=True) - ) - op.add_column('db_dbuser', sa.Column('password', sa.VARCHAR(length=128), autoincrement=False, nullable=True)) - op.add_column( - 'db_dbuser', sa.Column('last_login', postgresql.TIMESTAMP(timezone=True), autoincrement=False, nullable=True) - ) - op.add_column('db_dbuser', sa.Column('is_staff', sa.BOOLEAN(), autoincrement=False, nullable=True)) - op.add_column('db_dbuser', sa.Column('is_superuser', sa.BOOLEAN(), autoincrement=False, nullable=True)) - op.add_column('db_dbuser', sa.Column('is_active', sa.BOOLEAN(), autoincrement=False, nullable=True)) + raise NotImplementedError('Downgrade of de2eaf6978b4.') diff --git a/aiida/backends/sqlalchemy/migrations/versions/django_0001_initial.py b/aiida/backends/sqlalchemy/migrations/versions/django_0001_initial.py new file mode 100644 index 0000000000..6c8db70fb2 --- /dev/null +++ b/aiida/backends/sqlalchemy/migrations/versions/django_0001_initial.py @@ -0,0 +1,737 @@ +# -*- coding: utf-8 -*- +########################################################################### +# Copyright (c), The AiiDA team. All rights reserved. # +# This file is part of the AiiDA code. # +# # +# The code is hosted on GitHub at https://github.com/aiidateam/aiida-core # +# For further information on the license, see the LICENSE.txt file # +# For further information please visit http://www.aiida.net # +########################################################################### +# pylint: disable=invalid-name,no-member +"""Initial django schema + +Revision ID: django_0001 +Revises: +Create Date: 2017-06-28 17:12:23.327195 + +""" +from alembic import op +import sqlalchemy as sa +from sqlalchemy.dialects import postgresql + +revision = 'django_0001' +down_revision = None +branch_labels = ('django',) +depends_on = None + + +def upgrade(): + """Migrations for the upgrade.""" + + # dummy django tables + op.create_table( + 'auth_group', + sa.Column('id', sa.INTEGER(), nullable=False, primary_key=True), + ) + op.create_table( + 'auth_group_permissions', + sa.Column('id', sa.INTEGER(), nullable=False, primary_key=True), + ) + op.create_table( + 'auth_permission', + sa.Column('id', sa.INTEGER(), nullable=False, primary_key=True), + ) + op.create_table( + 'django_content_type', + sa.Column('id', sa.INTEGER(), nullable=False, primary_key=True), + ) + op.create_table( + 'django_migrations', + sa.Column('id', sa.INTEGER(), nullable=False, primary_key=True), + ) + + op.create_table( + 'db_dbuser', + sa.Column('id', sa.INTEGER(), nullable=False), + sa.PrimaryKeyConstraint('id', name='db_dbuser_pkey'), + sa.Column('email', sa.VARCHAR(length=75), nullable=False), + sa.Column('password', sa.VARCHAR(length=128), nullable=False), + sa.Column('is_superuser', sa.BOOLEAN(), nullable=False), + sa.Column('first_name', sa.VARCHAR(length=254), nullable=False), + sa.Column('last_name', sa.VARCHAR(length=254), nullable=False), + sa.Column('institution', sa.VARCHAR(length=254), nullable=False), + sa.Column('is_staff', sa.BOOLEAN(), nullable=False), + sa.Column('is_active', sa.BOOLEAN(), nullable=False), + sa.Column('last_login', postgresql.TIMESTAMP(timezone=True), nullable=False), + sa.Column('date_joined', postgresql.TIMESTAMP(timezone=True), nullable=False), + sa.UniqueConstraint('email', name='db_dbuser_email_key'), + sa.Index( + 'db_dbuser_email_30150b7e_like', + 'email', + postgresql_using='btree', + postgresql_ops={'email': 'varchar_pattern_ops'}, + ), + ) + + op.create_table( + 'db_dbcomputer', + sa.Column('id', sa.INTEGER(), nullable=False), + sa.PrimaryKeyConstraint('id', name='db_dbcomputer_pkey'), + sa.Column('uuid', sa.VARCHAR(length=36), nullable=False), + sa.Column('name', sa.VARCHAR(length=255), nullable=False), + sa.Column('hostname', sa.VARCHAR(length=255), nullable=False), + sa.Column('description', sa.TEXT(), nullable=False), + sa.Column('enabled', sa.BOOLEAN(), nullable=False), + sa.Column('transport_type', sa.VARCHAR(length=255), nullable=False), + sa.Column('scheduler_type', sa.VARCHAR(length=255), nullable=False), + sa.Column('transport_params', sa.TEXT(), nullable=False), + sa.Column('metadata', sa.TEXT(), nullable=False), + sa.UniqueConstraint('name', name='db_dbcomputer_name_key'), + sa.Index( + 'db_dbcomputer_name_f1800b1a_like', + 'name', + postgresql_using='btree', + postgresql_ops={'name': 'varchar_pattern_ops'}, + ), + ) + + op.create_table( + 'db_dbgroup', + sa.Column('id', sa.INTEGER(), nullable=False), + sa.PrimaryKeyConstraint('id', name='db_dbgroup_pkey'), + sa.Column('uuid', sa.VARCHAR(length=36), nullable=False), + sa.Column('name', sa.VARCHAR(length=255), nullable=False), + sa.Column('type', sa.VARCHAR(length=255), nullable=False), + sa.Column('time', postgresql.TIMESTAMP(timezone=True), nullable=False), + sa.Column('description', sa.TEXT(), nullable=False), + sa.Column('user_id', sa.INTEGER(), nullable=False), + sa.UniqueConstraint('name', 'type', name='db_dbgroup_name_type_12656f33_uniq'), + sa.Index('db_dbgroup_name_66c75272', 'name'), + sa.Index('db_dbgroup_type_23b2a748', 'type'), + sa.Index('db_dbgroup_user_id_100f8a51', 'user_id'), + sa.Index( + 'db_dbgroup_name_66c75272_like', + 'name', + postgresql_using='btree', + postgresql_ops={'name': 'varchar_pattern_ops'}, + ), + sa.Index( + 'db_dbgroup_type_23b2a748_like', + 'type', + postgresql_using='btree', + postgresql_ops={'type': 'varchar_pattern_ops'}, + ), + sa.ForeignKeyConstraint( + ['user_id'], + ['db_dbuser.id'], + name='db_dbgroup_user_id_100f8a51_fk_db_dbuser_id', + initially='DEFERRED', + deferrable=True, + ), + ) + + op.create_table( + 'db_dblock', + sa.Column('key', sa.VARCHAR(length=255), nullable=False), + sa.PrimaryKeyConstraint('key', name='db_dblock_pkey'), + sa.Column('creation', postgresql.TIMESTAMP(timezone=True), nullable=False), + sa.Column('timeout', sa.INTEGER(), nullable=False), + sa.Column('owner', sa.VARCHAR(length=255), nullable=False), + sa.Index( + 'db_dblock_key_048c6767_like', + 'key', + postgresql_using='btree', + postgresql_ops={'key': 'varchar_pattern_ops'}, + ), + ) + + op.create_table( + 'db_dblog', + sa.Column('id', sa.INTEGER(), nullable=False), + sa.PrimaryKeyConstraint('id', name='db_dblog_pkey'), + sa.Column('time', postgresql.TIMESTAMP(timezone=True), nullable=False), + sa.Column('loggername', sa.VARCHAR(length=255), nullable=False), + sa.Column('levelname', sa.VARCHAR(length=50), nullable=False), + sa.Column('objname', sa.VARCHAR(length=255), nullable=False), + sa.Column('objpk', sa.INTEGER(), nullable=True), + sa.Column('message', sa.TEXT(), nullable=False), + sa.Column('metadata', sa.TEXT(), nullable=False), + sa.Index('db_dblog_levelname_ad5dc346', 'levelname'), + sa.Index('db_dblog_loggername_00b5ba16', 'loggername'), + sa.Index('db_dblog_objname_69932b1e', 'objname'), + sa.Index('db_dblog_objpk_fc47afa9', 'objpk'), + sa.Index( + 'db_dblog_levelname_ad5dc346_like', + 'levelname', + postgresql_using='btree', + postgresql_ops={'levelname': 'varchar_pattern_ops'}, + ), + sa.Index( + 'db_dblog_loggername_00b5ba16_like', + 'loggername', + postgresql_using='btree', + postgresql_ops={'loggername': 'varchar_pattern_ops'}, + ), + sa.Index( + 'db_dblog_objname_69932b1e_like', + 'objname', + postgresql_using='btree', + postgresql_ops={'objname': 'varchar_pattern_ops'}, + ), + ) + + op.create_table( + 'db_dbnode', + sa.Column('id', sa.INTEGER(), nullable=False), + sa.PrimaryKeyConstraint('id', name='db_dbnode_pkey'), + sa.Column('uuid', sa.VARCHAR(length=36), nullable=False), + sa.Column('type', sa.VARCHAR(length=255), nullable=False), + sa.Column('label', sa.VARCHAR(length=255), nullable=False), + sa.Column('description', sa.TEXT(), nullable=False), + sa.Column('ctime', postgresql.TIMESTAMP(timezone=True), nullable=False), + sa.Column('mtime', postgresql.TIMESTAMP(timezone=True), nullable=False), + sa.Column('nodeversion', sa.INTEGER(), nullable=False), + sa.Column('public', sa.BOOLEAN(), nullable=False), + sa.Column('dbcomputer_id', sa.INTEGER(), nullable=True), + sa.Column('user_id', sa.INTEGER(), nullable=False), + sa.Index('db_dbnode_dbcomputer_id_315372a3', 'dbcomputer_id'), + sa.Index('db_dbnode_label_6469539e', 'label'), + sa.Index('db_dbnode_type_a8ce9753', 'type'), + sa.Index('db_dbnode_user_id_12e7aeaf', 'user_id'), + sa.Index( + 'db_dbnode_label_6469539e_like', + 'label', + postgresql_using='btree', + postgresql_ops={'label': 'varchar_pattern_ops'}, + ), + sa.Index( + 'db_dbnode_type_a8ce9753_like', + 'type', + postgresql_using='btree', + postgresql_ops={'type': 'varchar_pattern_ops'}, + ), + sa.ForeignKeyConstraint( + ['dbcomputer_id'], + ['db_dbcomputer.id'], + name='db_dbnode_dbcomputer_id_315372a3_fk_db_dbcomputer_id', + initially='DEFERRED', + deferrable=True, + ), + sa.ForeignKeyConstraint( + ['user_id'], + ['db_dbuser.id'], + name='db_dbnode_user_id_12e7aeaf_fk_db_dbuser_id', + initially='DEFERRED', + deferrable=True, + ), + ) + + op.create_table( + 'db_dbattribute', + sa.Column('id', sa.INTEGER(), nullable=False), + sa.PrimaryKeyConstraint('id', name='db_dbattribute_pkey'), + sa.Column('datatype', sa.VARCHAR(length=10), nullable=False), + sa.Column('dbnode_id', sa.INTEGER(), nullable=False), + sa.Column('key', sa.VARCHAR(length=1024), nullable=False), + sa.Column('bval', sa.BOOLEAN(), nullable=True), + sa.Column('ival', sa.INTEGER(), nullable=True), + sa.Column('fval', sa.FLOAT(), nullable=True), + sa.Column('tval', sa.TEXT(), nullable=False), + sa.Column('dval', postgresql.TIMESTAMP(timezone=True), nullable=True), + sa.UniqueConstraint('dbnode_id', 'key', name='db_dbattribute_dbnode_id_key_c589e447_uniq'), + sa.Index('db_dbattribute_datatype_91c4dc04', 'datatype'), + sa.Index('db_dbattribute_dbnode_id_253bf153', 'dbnode_id'), + sa.Index('db_dbattribute_key_ac2bc4e4', 'key'), + sa.Index( + 'db_dbattribute_datatype_91c4dc04_like', + 'datatype', + postgresql_using='btree', + postgresql_ops={'datatype': 'varchar_pattern_ops'}, + ), + sa.Index( + 'db_dbattribute_key_ac2bc4e4_like', + 'key', + postgresql_using='btree', + postgresql_ops={'key': 'varchar_pattern_ops'}, + ), + sa.ForeignKeyConstraint( + ['dbnode_id'], + ['db_dbnode.id'], + name='db_dbattribute_dbnode_id_253bf153_fk_db_dbnode_id', + deferrable=True, + initially='DEFERRED', + ), + ) + + op.create_table( + 'db_dbextra', + sa.Column('id', sa.INTEGER(), nullable=False), + sa.PrimaryKeyConstraint('id', name='db_dbextra_pkey'), + sa.Column('datatype', sa.VARCHAR(length=10), nullable=False), + sa.Column('dbnode_id', sa.INTEGER(), nullable=False), + sa.Column('key', sa.VARCHAR(length=1024), nullable=False), + sa.Column('bval', sa.BOOLEAN(), nullable=True), + sa.Column('ival', sa.INTEGER(), nullable=True), + sa.Column('fval', sa.FLOAT(), nullable=True), + sa.Column('tval', sa.TEXT(), nullable=False), + sa.Column('dval', postgresql.TIMESTAMP(timezone=True), nullable=True), + sa.UniqueConstraint('dbnode_id', 'key', name='db_dbextra_dbnode_id_key_aa56fd37_uniq'), + sa.Index('db_dbextra_datatype_2eba38c6', 'datatype'), + sa.Index('db_dbextra_dbnode_id_c7fe8961', 'dbnode_id'), + sa.Index('db_dbextra_key_b1a8abc6', 'key'), + sa.Index( + 'db_dbextra_datatype_2eba38c6_like', + 'datatype', + postgresql_using='btree', + postgresql_ops={'datatype': 'varchar_pattern_ops'}, + ), + sa.Index( + 'db_dbextra_key_b1a8abc6_like', + 'key', + postgresql_using='btree', + postgresql_ops={'key': 'varchar_pattern_ops'}, + ), + sa.ForeignKeyConstraint( + ['dbnode_id'], + ['db_dbnode.id'], + name='db_dbextra_dbnode_id_c7fe8961_fk_db_dbnode_id', + deferrable=True, + initially='DEFERRED', + ), + ) + + op.create_table( + 'db_dblink', + sa.Column('id', sa.INTEGER(), nullable=False), + sa.PrimaryKeyConstraint('id', name='db_dblink_pkey'), + sa.Column('input_id', sa.INTEGER(), nullable=False), + sa.Column('output_id', sa.INTEGER(), nullable=False), + sa.Column('label', sa.VARCHAR(length=255), nullable=False), + sa.UniqueConstraint('input_id', 'output_id', name='db_dblink_input_id_output_id_fbe99cb5_uniq'), + sa.UniqueConstraint('output_id', 'label', name='db_dblink_output_id_label_00bdb9c7_uniq'), + sa.Index('db_dblink_input_id_9245bd73', 'input_id'), + sa.Index('db_dblink_label_f1343cfb', 'label'), + sa.Index('db_dblink_output_id_c0167528', 'output_id'), + sa.Index( + 'db_dblink_label_f1343cfb_like', + 'label', + postgresql_using='btree', + postgresql_ops={'label': 'varchar_pattern_ops'}, + ), + sa.ForeignKeyConstraint( + ['input_id'], + ['db_dbnode.id'], + name='db_dblink_input_id_9245bd73_fk_db_dbnode_id', + initially='DEFERRED', + deferrable=True, + ), + sa.ForeignKeyConstraint( + ['output_id'], + ['db_dbnode.id'], + name='db_dblink_output_id_c0167528_fk_db_dbnode_id', + initially='DEFERRED', + deferrable=True, + ), + ) + + op.create_table( + 'db_dbgroup_dbnodes', + sa.Column('id', sa.INTEGER(), nullable=False), + sa.PrimaryKeyConstraint('id', name='db_dbgroup_dbnodes_pkey'), + sa.Column('dbnode_id', sa.INTEGER(), nullable=False), + sa.Column('dbgroup_id', sa.INTEGER(), nullable=False), + sa.UniqueConstraint('dbgroup_id', 'dbnode_id', name='db_dbgroup_dbnodes_dbgroup_id_dbnode_id_eee23cce_uniq'), + sa.Index('db_dbgroup_dbnodes_dbgroup_id_9d3a0f9d', 'dbgroup_id'), + sa.Index('db_dbgroup_dbnodes_dbnode_id_118b9439', 'dbnode_id'), + sa.ForeignKeyConstraint( + ['dbgroup_id'], + ['db_dbgroup.id'], + name='db_dbgroup_dbnodes_dbgroup_id_9d3a0f9d_fk_db_dbgroup_id', + initially='DEFERRED', + deferrable=True, + ), + sa.ForeignKeyConstraint( + ['dbnode_id'], + ['db_dbnode.id'], + name='db_dbgroup_dbnodes_dbnode_id_118b9439_fk_db_dbnode_id', + initially='DEFERRED', + deferrable=True, + ), + ) + + op.create_table( + 'db_dbcalcstate', + sa.Column('id', sa.INTEGER(), nullable=False), + sa.PrimaryKeyConstraint('id', name='db_dbcalcstate_pkey'), + sa.Column('dbnode_id', sa.INTEGER(), nullable=False), + sa.Column('state', sa.VARCHAR(length=25), nullable=False), + sa.Column('time', postgresql.TIMESTAMP(timezone=True), nullable=False), + sa.UniqueConstraint('dbnode_id', 'state', name='db_dbcalcstate_dbnode_id_state_b4a14db3_uniq'), + sa.Index('db_dbcalcstate_dbnode_id_f217a84c', 'dbnode_id'), + sa.Index('db_dbcalcstate_state_0bf54584', 'state'), + sa.Index( + 'db_dbcalcstate_state_0bf54584_like', + 'state', + postgresql_using='btree', + postgresql_ops={'state': 'varchar_pattern_ops'}, + ), + sa.ForeignKeyConstraint( + ['dbnode_id'], + ['db_dbnode.id'], + name='db_dbcalcstate_dbnode_id_f217a84c_fk_db_dbnode_id', + deferrable=True, + initially='DEFERRED', + ), + ) + + op.create_table( + 'db_dbcomment', + sa.Column('id', sa.INTEGER(), nullable=False), + sa.PrimaryKeyConstraint('id', name='db_dbcomment_pkey'), + sa.Column('uuid', sa.VARCHAR(length=36), nullable=False), + sa.Column('dbnode_id', sa.INTEGER(), nullable=False), + sa.Column('ctime', postgresql.TIMESTAMP(timezone=True), nullable=False), + sa.Column('mtime', postgresql.TIMESTAMP(timezone=True), nullable=False), + sa.Column('user_id', sa.INTEGER(), nullable=False), + sa.Column('content', sa.TEXT(), nullable=False), + sa.Index('db_dbcomment_dbnode_id_3b812b6b', 'dbnode_id'), + sa.Index('db_dbcomment_user_id_8ed5e360', 'user_id'), + sa.ForeignKeyConstraint( + ['dbnode_id'], + ['db_dbnode.id'], + name='db_dbcomment_dbnode_id_3b812b6b_fk_db_dbnode_id', + deferrable=True, + initially='DEFERRED', + ), + sa.ForeignKeyConstraint( + ['user_id'], + ['db_dbuser.id'], + name='db_dbcomment_user_id_8ed5e360_fk_db_dbuser_id', + deferrable=True, + initially='DEFERRED', + ), + ) + + op.create_table( + 'db_dbpath', + sa.Column('id', sa.INTEGER(), nullable=False), + sa.PrimaryKeyConstraint('id', name='db_dbpath_pkey'), + sa.Column('parent_id', sa.INTEGER(), nullable=False), + sa.Column('child_id', sa.INTEGER(), nullable=False), + sa.Column('depth', sa.INTEGER(), nullable=False), + sa.Column('entry_edge_id', sa.INTEGER(), nullable=True), + sa.Column('direct_edge_id', sa.INTEGER(), nullable=True), + sa.Column('exit_edge_id', sa.INTEGER(), nullable=True), + sa.Index('db_dbpath_child_id_d8228636', 'child_id'), + sa.Index('db_dbpath_parent_id_3b82d6c8', 'parent_id'), + sa.ForeignKeyConstraint( + ['child_id'], + ['db_dbnode.id'], + name='db_dbpath_child_id_d8228636_fk_db_dbnode_id', + initially='DEFERRED', + deferrable=True, + ), + sa.ForeignKeyConstraint( + ['parent_id'], + ['db_dbnode.id'], + name='db_dbpath_parent_id_3b82d6c8_fk_db_dbnode_id', + initially='DEFERRED', + deferrable=True, + ), + ) + + op.create_table( + 'db_dbsetting', + sa.Column('id', sa.INTEGER(), nullable=False), + sa.PrimaryKeyConstraint('id', name='db_dbsetting_pkey'), + sa.Column('key', sa.VARCHAR(length=1024), nullable=False), + sa.Column('description', sa.TEXT(), nullable=False), + sa.Column('time', postgresql.TIMESTAMP(timezone=True), nullable=False), + sa.Column('datatype', sa.VARCHAR(length=10), nullable=False), + sa.Column('bval', sa.BOOLEAN(), nullable=True), + sa.Column('ival', sa.INTEGER(), nullable=True), + sa.Column('fval', sa.FLOAT(), nullable=True), + sa.Column('tval', sa.TEXT(), nullable=False), + sa.Column('dval', postgresql.TIMESTAMP(timezone=True), nullable=True), + sa.UniqueConstraint('key', name='db_dbsetting_key_1b84beb4_uniq'), + sa.Index('db_dbsetting_datatype_49f4397c', 'datatype'), + sa.Index('db_dbsetting_key_1b84beb4', 'key'), + sa.Index( + 'db_dbsetting_datatype_49f4397c_like', + 'datatype', + postgresql_using='btree', + postgresql_ops={'datatype': 'varchar_pattern_ops'}, + ), + sa.Index( + 'db_dbsetting_key_1b84beb4_like', + 'key', + postgresql_using='btree', + postgresql_ops={'key': 'varchar_pattern_ops'}, + ), + ) + + op.create_table( + 'db_dbuser_groups', + sa.Column('id', sa.INTEGER(), nullable=False), + sa.PrimaryKeyConstraint('id', name='db_dbuser_groups_pkey'), + sa.Column('dbuser_id', sa.INTEGER(), nullable=False), + sa.Column('group_id', sa.INTEGER(), nullable=False), + sa.UniqueConstraint('dbuser_id', 'group_id', name='db_dbuser_groups_dbuser_id_group_id_9155eb4f_uniq'), + sa.Index('db_dbuser_groups_dbuser_id_480b3520', 'dbuser_id'), + sa.Index('db_dbuser_groups_group_id_8478d87e', 'group_id'), + sa.ForeignKeyConstraint( + ['dbuser_id'], + ['db_dbuser.id'], + name='db_dbuser_groups_dbuser_id_480b3520_fk_db_dbuser_id', + initially='DEFERRED', + deferrable=True, + ), + sa.ForeignKeyConstraint( + ['group_id'], + ['auth_group.id'], + name='db_dbuser_groups_group_id_8478d87e_fk_auth_group_id', + initially='DEFERRED', + deferrable=True, + ), + ) + + op.create_table( + 'db_dbuser_user_permissions', + sa.Column('id', sa.INTEGER(), nullable=False), + sa.PrimaryKeyConstraint('id', name='db_dbuser_user_permissions_pkey'), + sa.Column('dbuser_id', sa.INTEGER(), nullable=False), + sa.Column('permission_id', sa.INTEGER(), nullable=False), + sa.UniqueConstraint( + 'dbuser_id', 'permission_id', name='db_dbuser_user_permissio_dbuser_id_permission_id_e6cbabe4_uniq' + ), + sa.Index('db_dbuser_user_permissions_dbuser_id_364456ee', 'dbuser_id'), + sa.Index('db_dbuser_user_permissions_permission_id_c5aafc54', 'permission_id'), + sa.ForeignKeyConstraint( + ['dbuser_id'], + ['db_dbuser.id'], + name='db_dbuser_user_permissions_dbuser_id_364456ee_fk_db_dbuser_id', + initially='DEFERRED', + deferrable=True, + ), + sa.ForeignKeyConstraint( + ['permission_id'], + ['auth_permission.id'], + name='db_dbuser_user_permi_permission_id_c5aafc54_fk_auth_perm', + initially='DEFERRED', + deferrable=True, + ), + ) + + op.create_table( + 'db_dbworkflow', + sa.Column('id', sa.INTEGER(), nullable=False), + sa.PrimaryKeyConstraint('id', name='db_dbworkflow_pkey'), + sa.Column('uuid', sa.VARCHAR(length=36), nullable=False), + sa.Column('ctime', postgresql.TIMESTAMP(timezone=True), nullable=False), + sa.Column('mtime', postgresql.TIMESTAMP(timezone=True), nullable=False), + sa.Column('user_id', sa.INTEGER(), nullable=False), + sa.Column('label', sa.VARCHAR(length=255), nullable=False), + sa.Column('description', sa.TEXT(), nullable=False), + sa.Column('nodeversion', sa.INTEGER(), nullable=False), + sa.Column('lastsyncedversion', sa.INTEGER(), nullable=False), + sa.Column('state', sa.VARCHAR(length=255), nullable=False), + sa.Column('report', sa.TEXT(), nullable=False), + sa.Column('module', sa.TEXT(), nullable=False), + sa.Column('module_class', sa.TEXT(), nullable=False), + sa.Column('script_path', sa.TEXT(), nullable=False), + sa.Column('script_md5', sa.VARCHAR(length=255), nullable=False), + sa.Index('db_dbworkflow_label_7368f34a', 'label'), + sa.Index('db_dbworkflow_user_id_ef1f3251', 'user_id'), + sa.Index( + 'db_dbworkflow_label_7368f34a_like', + 'label', + postgresql_using='btree', + postgresql_ops={'label': 'varchar_pattern_ops'}, + ), + sa.ForeignKeyConstraint( + ['user_id'], + ['db_dbuser.id'], + name='db_dbworkflow_user_id_ef1f3251_fk_db_dbuser_id', + initially='DEFERRED', + deferrable=True, + ), + ) + + op.create_table( + 'db_dbworkflowstep', + sa.Column('id', sa.INTEGER(), nullable=False), + sa.PrimaryKeyConstraint('id', name='db_dbworkflowstep_pkey'), + sa.Column('parent_id', sa.INTEGER(), nullable=False), + sa.Column('user_id', sa.INTEGER(), nullable=False), + sa.Column('name', sa.VARCHAR(length=255), nullable=False), + sa.Column('time', postgresql.TIMESTAMP(timezone=True), nullable=False), + sa.Column('nextcall', sa.VARCHAR(length=255), nullable=False), + sa.Column('state', sa.VARCHAR(length=255), nullable=False), + sa.UniqueConstraint('parent_id', 'name', name='db_dbworkflowstep_parent_id_name_111027e3_uniq'), + sa.Index('db_dbworkflowstep_parent_id_ffb754d9', 'parent_id'), + sa.Index('db_dbworkflowstep_user_id_04282431', 'user_id'), + sa.ForeignKeyConstraint( + ['parent_id'], + ['db_dbworkflow.id'], + name='db_dbworkflowstep_parent_id_ffb754d9_fk_db_dbworkflow_id', + initially='DEFERRED', + deferrable=True, + ), + sa.ForeignKeyConstraint( + ['user_id'], + ['db_dbuser.id'], + name='db_dbworkflowstep_user_id_04282431_fk_db_dbuser_id', + initially='DEFERRED', + deferrable=True, + ), + ) + + op.create_table( + 'db_dbworkflowdata', + sa.Column('id', sa.INTEGER(), nullable=False), + sa.PrimaryKeyConstraint('id', name='db_dbworkflowdata_pkey'), + sa.Column('parent_id', sa.INTEGER(), nullable=False), + sa.Column('name', sa.VARCHAR(length=255), nullable=False), + sa.Column('time', postgresql.TIMESTAMP(timezone=True), nullable=False), + sa.Column('data_type', sa.VARCHAR(length=255), nullable=False), + sa.Column('value_type', sa.VARCHAR(length=255), nullable=False), + sa.Column('json_value', sa.TEXT(), nullable=False), + sa.Column('aiida_obj_id', sa.INTEGER(), nullable=True), + sa.UniqueConstraint( + 'parent_id', 'name', 'data_type', name='db_dbworkflowdata_parent_id_name_data_type_a4b50dae_uniq' + ), + sa.Index('db_dbworkflowdata_aiida_obj_id_70a2d33b', 'aiida_obj_id'), + sa.Index('db_dbworkflowdata_parent_id_ff4dbf8d', 'parent_id'), + sa.ForeignKeyConstraint( + ['aiida_obj_id'], + ['db_dbnode.id'], + name='db_dbworkflowdata_aiida_obj_id_70a2d33b_fk_db_dbnode_id', + initially='DEFERRED', + deferrable=True, + ), + sa.ForeignKeyConstraint( + ['parent_id'], + ['db_dbworkflow.id'], + name='db_dbworkflowdata_parent_id_ff4dbf8d_fk_db_dbworkflow_id', + initially='DEFERRED', + deferrable=True, + ), + ) + + op.create_table( + 'db_dbworkflowstep_calculations', + sa.Column('id', sa.INTEGER(), nullable=False), + sa.PrimaryKeyConstraint('id', name='db_dbworkflowstep_calculations_pkey'), + sa.Column('dbworkflowstep_id', sa.INTEGER(), nullable=False), + sa.Column('dbnode_id', sa.INTEGER(), nullable=False), + sa.UniqueConstraint( + 'dbworkflowstep_id', 'dbnode_id', name='db_dbworkflowstep_calcul_dbworkflowstep_id_dbnode_60f50d02_uniq' + ), + sa.Index('db_dbworkflowstep_calculations_dbnode_id_0d07b7a7', 'dbnode_id'), + sa.Index('db_dbworkflowstep_calculations_dbworkflowstep_id_575c3637', 'dbworkflowstep_id'), + sa.ForeignKeyConstraint( + ['dbnode_id'], + ['db_dbnode.id'], + name='db_dbworkflowstep_ca_dbnode_id_0d07b7a7_fk_db_dbnode', + initially='DEFERRED', + deferrable=True, + ), + sa.ForeignKeyConstraint( + ['dbworkflowstep_id'], + ['db_dbworkflowstep.id'], + name='db_dbworkflowstep_ca_dbworkflowstep_id_575c3637_fk_db_dbwork', + initially='DEFERRED', + deferrable=True, + ), + ) + + op.create_table( + 'db_dbworkflowstep_sub_workflows', + sa.Column('id', sa.INTEGER(), nullable=False), + sa.PrimaryKeyConstraint('id', name='db_dbworkflowstep_sub_workflows_pkey'), + sa.Column('dbworkflowstep_id', sa.INTEGER(), nullable=False), + sa.Column('dbworkflow_id', sa.INTEGER(), nullable=False), + sa.UniqueConstraint( + 'dbworkflowstep_id', + 'dbworkflow_id', + name='db_dbworkflowstep_sub_wo_dbworkflowstep_id_dbwork_e9b2b624_uniq', + ), + sa.Index('db_dbworkflowstep_sub_workflows_dbworkflow_id_dca4d103', 'dbworkflow_id'), + sa.Index('db_dbworkflowstep_sub_workflows_dbworkflowstep_id_e183bbb7', 'dbworkflowstep_id'), + sa.ForeignKeyConstraint( + ['dbworkflow_id'], + ['db_dbworkflow.id'], + name='db_dbworkflowstep_su_dbworkflow_id_dca4d103_fk_db_dbwork', + initially='DEFERRED', + deferrable=True, + ), + sa.ForeignKeyConstraint( + ['dbworkflowstep_id'], + ['db_dbworkflowstep.id'], + name='db_dbworkflowstep_su_dbworkflowstep_id_e183bbb7_fk_db_dbwork', + initially='DEFERRED', + deferrable=True, + ), + ) + + op.create_table( + 'db_dbauthinfo', + sa.Column('id', sa.INTEGER(), nullable=False), + sa.PrimaryKeyConstraint('id', name='db_dbauthinfo_pkey'), + sa.Column('aiidauser_id', sa.INTEGER(), nullable=False), + sa.Column('dbcomputer_id', sa.INTEGER(), nullable=False), + sa.Column('metadata', sa.TEXT(), nullable=False), + sa.Column('auth_params', sa.TEXT(), nullable=False), + sa.Column('enabled', sa.BOOLEAN(), nullable=False), + sa.UniqueConstraint( + 'aiidauser_id', 'dbcomputer_id', name='db_dbauthinfo_aiidauser_id_dbcomputer_id_777cdaa8_uniq' + ), + sa.Index('db_dbauthinfo_aiidauser_id_0684fdfb', 'aiidauser_id'), + sa.Index('db_dbauthinfo_dbcomputer_id_424f7ac4', 'dbcomputer_id'), + sa.ForeignKeyConstraint( + ['aiidauser_id'], + ['db_dbuser.id'], + name='db_dbauthinfo_aiidauser_id_0684fdfb_fk_db_dbuser_id', + initially='DEFERRED', + deferrable=True, + ), + sa.ForeignKeyConstraint( + ['dbcomputer_id'], + ['db_dbcomputer.id'], + name='db_dbauthinfo_dbcomputer_id_424f7ac4_fk_db_dbcomputer_id', + initially='DEFERRED', + deferrable=True, + ), + ) + + +def downgrade(): + """Migrations for the downgrade.""" + op.drop_table('db_dbauthinfo') + op.drop_table('db_dbworkflowstep_calculations') + op.drop_table('db_dbworkflowstep_sub_workflows') + op.drop_table('db_dbworkflowdata') + op.drop_table('db_dbworkflowstep') + op.drop_table('db_dbworkflow') + op.drop_table('db_dbuser_user_permissions') + op.drop_table('db_dbuser_groups') + op.drop_table('db_dbgroup_dbnodes') + op.drop_table('db_dbgroup') + op.drop_table('db_dblink') + op.drop_table('db_dbpath') + op.drop_table('db_dbcalcstate') + op.drop_table('db_dbcomment') + op.drop_table('db_dbattribute') + op.drop_table('db_dbextra') + op.drop_table('db_dbnode') + op.drop_table('db_dbcomputer') + op.drop_table('db_dblog') + op.drop_table('db_dbsetting') + op.drop_table('db_dblock') + op.drop_table('db_dbuser') + + op.drop_table('auth_group_permissions') + op.drop_table('auth_permission') + op.drop_table('auth_group') + op.drop_table('django_content_type') + op.drop_table('django_migrations') diff --git a/aiida/backends/sqlalchemy/migrations/versions/django_0002_db_state_change.py b/aiida/backends/sqlalchemy/migrations/versions/django_0002_db_state_change.py new file mode 100644 index 0000000000..928d0cc4e7 --- /dev/null +++ b/aiida/backends/sqlalchemy/migrations/versions/django_0002_db_state_change.py @@ -0,0 +1,47 @@ +# -*- coding: utf-8 -*- +########################################################################### +# Copyright (c), The AiiDA team. All rights reserved. # +# This file is part of the AiiDA code. # +# # +# The code is hosted on GitHub at https://github.com/aiidateam/aiida-core # +# For further information on the license, see the LICENSE.txt file # +# For further information please visit http://www.aiida.net # +########################################################################### +# pylint: disable=invalid-name,no-member +"""Fix calculation states. + +`UNDETERMINED` and `NOTFOUND` `dbcalcstate.state` values are replaced by `FAILED`. + +Revision ID: django_0002 +Revises: django_0001 + +""" +from alembic import op + +revision = 'django_0002' +down_revision = 'django_0001' +branch_labels = None +depends_on = None + + +def upgrade(): + """Migrations for the upgrade.""" + # Note in the original django migration, a warning log was actually added to the node, + # but we forgo that here + op.execute(""" + UPDATE db_dbcalcstate + SET state = 'FAILED' + WHERE state = 'NOTFOUND' + """) + op.execute( + """ + UPDATE db_dbcalcstate + SET state = 'FAILED' + WHERE state = 'UNDETERMINED' + """ + ) + + +def downgrade(): + """Migrations for the downgrade.""" + raise NotImplementedError('Downgrade of django_0002.') diff --git a/aiida/backends/sqlalchemy/migrations/versions/django_0003_add_link_type.py b/aiida/backends/sqlalchemy/migrations/versions/django_0003_add_link_type.py new file mode 100644 index 0000000000..a659e4f308 --- /dev/null +++ b/aiida/backends/sqlalchemy/migrations/versions/django_0003_add_link_type.py @@ -0,0 +1,47 @@ +# -*- coding: utf-8 -*- +########################################################################### +# Copyright (c), The AiiDA team. All rights reserved. # +# This file is part of the AiiDA code. # +# # +# The code is hosted on GitHub at https://github.com/aiidateam/aiida-core # +# For further information on the license, see the LICENSE.txt file # +# For further information please visit http://www.aiida.net # +########################################################################### +# pylint: disable=invalid-name,no-member +"""Add `db_dblink.type` field, and remove link field uniqueness constraints + +Revision ID: django_0003 +Revises: django_0002 + +""" +from alembic import op +import sqlalchemy as sa + +from aiida.backends.sqlalchemy.migrations.utils import ReflectMigrations + +revision = 'django_0003' +down_revision = 'django_0002' +branch_labels = None +depends_on = None + + +def upgrade(): + """Migrations for the upgrade.""" + op.add_column('db_dblink', sa.Column('type', sa.VARCHAR(length=255), nullable=False, server_default='')) + op.alter_column('db_dblink', 'type', server_default=None) + op.create_index('db_dblink_type_229f212b', 'db_dblink', ['type']) + op.create_index( + 'db_dblink_type_229f212b_like', + 'db_dblink', + ['type'], + postgresql_using='btree', + postgresql_ops={'type': 'varchar_pattern_ops'}, + ) + reflect = ReflectMigrations(op) + reflect.drop_unique_constraints('db_dblink', ['input_id', 'output_id']) + reflect.drop_unique_constraints('db_dblink', ['output_id', 'label']) + + +def downgrade(): + """Migrations for the downgrade.""" + raise NotImplementedError('Downgrade of django_0003.') diff --git a/aiida/backends/sqlalchemy/migrations/versions/django_0004_add_daemon_and_uuid_indices.py b/aiida/backends/sqlalchemy/migrations/versions/django_0004_add_daemon_and_uuid_indices.py new file mode 100644 index 0000000000..94ab927b17 --- /dev/null +++ b/aiida/backends/sqlalchemy/migrations/versions/django_0004_add_daemon_and_uuid_indices.py @@ -0,0 +1,46 @@ +# -*- coding: utf-8 -*- +########################################################################### +# Copyright (c), The AiiDA team. All rights reserved. # +# This file is part of the AiiDA code. # +# # +# The code is hosted on GitHub at https://github.com/aiidateam/aiida-core # +# For further information on the license, see the LICENSE.txt file # +# For further information please visit http://www.aiida.net # +########################################################################### +# pylint: disable=invalid-name,no-member +"""Add indices to `db_dbattribute.tval` and `db_dbnode.uuid` + +Revision ID: django_0004 +Revises: django_0003 + +""" +from alembic import op + +revision = 'django_0004' +down_revision = 'django_0003' +branch_labels = None +depends_on = None + + +def upgrade(): + """Migrations for the upgrade.""" + op.execute( + """ + CREATE INDEX tval_idx_for_daemon + ON db_dbattribute (tval) + WHERE ("db_dbattribute"."tval" + IN ('COMPUTED', 'WITHSCHEDULER', 'TOSUBMIT'))""" + ) + op.create_index('db_dbnode_uuid_62e0bf98', 'db_dbnode', ['uuid']) + op.create_index( + 'db_dbnode_uuid_62e0bf98_like', + 'db_dbnode', + ['uuid'], + postgresql_using='btree', + postgresql_ops={'uuid': 'varchar_pattern_ops'}, + ) + + +def downgrade(): + """Migrations for the downgrade.""" + raise NotImplementedError('Downgrade of django_0004.') diff --git a/aiida/backends/djsite/db/migrations/0010_process_type.py b/aiida/backends/sqlalchemy/migrations/versions/django_0005_add_cmtime_indices.py similarity index 51% rename from aiida/backends/djsite/db/migrations/0010_process_type.py rename to aiida/backends/sqlalchemy/migrations/versions/django_0005_add_cmtime_indices.py index 07bbc5d6f9..13eef22067 100644 --- a/aiida/backends/djsite/db/migrations/0010_process_type.py +++ b/aiida/backends/sqlalchemy/migrations/versions/django_0005_add_cmtime_indices.py @@ -7,26 +7,27 @@ # For further information on the license, see the LICENSE.txt file # # For further information please visit http://www.aiida.net # ########################################################################### -# pylint: disable=invalid-name -"""Database migration.""" -from django.db import migrations, models +# pylint: disable=invalid-name,no-member +"""Add indexes on `db_dbnode.mtime` and `db_dbnode.mtime` -from aiida.backends.djsite.db.migrations import upgrade_schema_version +Revision ID: django_0005 +Revises: django_0004 -REVISION = '1.0.10' -DOWN_REVISION = '1.0.9' +""" +from alembic import op +revision = 'django_0005' +down_revision = 'django_0004' +branch_labels = None +depends_on = None -class Migration(migrations.Migration): - """Database migration.""" - dependencies = [ - ('db', '0009_base_data_plugin_type_string'), - ] +def upgrade(): + """Migrations for the upgrade.""" + op.create_index('db_dbnode_ctime_71626ef5', 'db_dbnode', ['ctime'], unique=False) + op.create_index('db_dbnode_mtime_0554ea3d', 'db_dbnode', ['mtime'], unique=False) - operations = [ - migrations.AddField( - model_name='dbnode', name='process_type', field=models.CharField(max_length=255, db_index=True, null=True) - ), - upgrade_schema_version(REVISION, DOWN_REVISION) - ] + +def downgrade(): + """Migrations for the downgrade.""" + raise NotImplementedError('Downgrade of django_0005.') diff --git a/aiida/backends/sqlalchemy/migrations/versions/django_0006_delete_dbpath.py b/aiida/backends/sqlalchemy/migrations/versions/django_0006_delete_dbpath.py new file mode 100644 index 0000000000..718ac3fb49 --- /dev/null +++ b/aiida/backends/sqlalchemy/migrations/versions/django_0006_delete_dbpath.py @@ -0,0 +1,40 @@ +# -*- coding: utf-8 -*- +########################################################################### +# Copyright (c), The AiiDA team. All rights reserved. # +# This file is part of the AiiDA code. # +# # +# The code is hosted on GitHub at https://github.com/aiidateam/aiida-core # +# For further information on the license, see the LICENSE.txt file # +# For further information please visit http://www.aiida.net # +########################################################################### +# pylint: disable=invalid-name,no-member +"""Drop `db_dbpath` table + +Revision ID: django_0006 +Revises: django_0005 + +""" +from alembic import op + +revision = 'django_0006' +down_revision = 'django_0005' +branch_labels = None +depends_on = None + + +def upgrade(): + """Migrations for the upgrade.""" + op.drop_table('db_dbpath') + + # Note this was also an undocumented part of the migration + op.execute( + """ + DROP TRIGGER IF EXISTS autoupdate_tc ON db_dblink; + DROP FUNCTION IF EXISTS update_tc(); + """ + ) + + +def downgrade(): + """Migrations for the downgrade.""" + raise NotImplementedError('Downgrade of django_0006.') diff --git a/aiida/backends/sqlalchemy/migrations/versions/django_0007_update_linktypes.py b/aiida/backends/sqlalchemy/migrations/versions/django_0007_update_linktypes.py new file mode 100644 index 0000000000..ec9532990a --- /dev/null +++ b/aiida/backends/sqlalchemy/migrations/versions/django_0007_update_linktypes.py @@ -0,0 +1,144 @@ +# -*- coding: utf-8 -*- +########################################################################### +# Copyright (c), The AiiDA team. All rights reserved. # +# This file is part of the AiiDA code. # +# # +# The code is hosted on GitHub at https://github.com/aiidateam/aiida-core # +# For further information on the license, see the LICENSE.txt file # +# For further information please visit http://www.aiida.net # +########################################################################### +# pylint: disable=invalid-name,no-member +"""Update `db_dblink.type` values + +Revision ID: django_0007 +Revises: django_0006 + +""" +from alembic import op + +revision = 'django_0007' +down_revision = 'django_0006' +branch_labels = None +depends_on = None + + +def upgrade(): + """Migrations for the upgrade.""" + # I am first migrating the wrongly declared returnlinks out of + # the InlineCalculations. + # This bug is reported #628 https://github.com/aiidateam/aiida-core/issues/628 + # There is an explicit check in the code of the inline calculation + # ensuring that the calculation returns UNSTORED nodes. + # Therefore, no cycle can be created with that migration! + # + # this command: + # 1) selects all links that + # - joins an InlineCalculation (or subclass) as input + # - joins a Data (or subclass) as output + # - is marked as a returnlink. + # 2) set for these links the type to 'createlink' + op.execute( + """ + UPDATE db_dblink set type='createlink' WHERE db_dblink.id IN ( + SELECT db_dblink_1.id + FROM db_dbnode AS db_dbnode_1 + JOIN db_dblink AS db_dblink_1 ON db_dblink_1.input_id = db_dbnode_1.id + JOIN db_dbnode AS db_dbnode_2 ON db_dblink_1.output_id = db_dbnode_2.id + WHERE db_dbnode_1.type LIKE 'calculation.inline.%' + AND db_dbnode_2.type LIKE 'data.%' + AND db_dblink_1.type = 'returnlink' + ); + """ + ) + # Now I am updating the link-types that are null because of either an export and subsequent import + # https://github.com/aiidateam/aiida-core/issues/685 + # or because the link types don't exist because the links were added before the introduction of link types. + # This is reported here: https://github.com/aiidateam/aiida-core/issues/687 + # + # The following sql statement: + # 1) selects all links that + # - joins Data (or subclass) or Code as input + # - joins Calculation (or subclass) as output: includes WorkCalculation, InlineCalcuation, JobCalculations... + # - has no type (null) + # 2) set for these links the type to 'inputlink' + op.execute( + """ + UPDATE db_dblink set type='inputlink' where id in ( + SELECT db_dblink_1.id + FROM db_dbnode AS db_dbnode_1 + JOIN db_dblink AS db_dblink_1 ON db_dblink_1.input_id = db_dbnode_1.id + JOIN db_dbnode AS db_dbnode_2 ON db_dblink_1.output_id = db_dbnode_2.id + WHERE ( db_dbnode_1.type LIKE 'data.%' or db_dbnode_1.type = 'code.Code.' ) + AND db_dbnode_2.type LIKE 'calculation.%' + AND ( db_dblink_1.type = null OR db_dblink_1.type = '') + ); + """ + ) + # + # The following sql statement: + # 1) selects all links that + # - join JobCalculation (or subclass) or InlineCalculation as input + # - joins Data (or subclass) as output. + # - has no type (null) + # 2) set for these links the type to 'createlink' + op.execute( + """ + UPDATE db_dblink set type='createlink' where id in ( + SELECT db_dblink_1.id + FROM db_dbnode AS db_dbnode_1 + JOIN db_dblink AS db_dblink_1 ON db_dblink_1.input_id = db_dbnode_1.id + JOIN db_dbnode AS db_dbnode_2 ON db_dblink_1.output_id = db_dbnode_2.id + WHERE db_dbnode_2.type LIKE 'data.%' + AND ( + db_dbnode_1.type LIKE 'calculation.job.%' + OR + db_dbnode_1.type = 'calculation.inline.InlineCalculation.' + ) + AND ( db_dblink_1.type = null OR db_dblink_1.type = '') + ); + """ + ) + # The following sql statement: + # 1) selects all links that + # - join WorkCalculation as input. No subclassing was introduced so far, so only one type string is checked + # - join Data (or subclass) as output. + # - has no type (null) + # 2) set for these links the type to 'returnlink' + op.execute( + """ + UPDATE db_dblink set type='returnlink' where id in ( + SELECT db_dblink_1.id + FROM db_dbnode AS db_dbnode_1 + JOIN db_dblink AS db_dblink_1 ON db_dblink_1.input_id = db_dbnode_1.id + JOIN db_dbnode AS db_dbnode_2 ON db_dblink_1.output_id = db_dbnode_2.id + WHERE db_dbnode_2.type LIKE 'data.%' + AND db_dbnode_1.type = 'calculation.work.WorkCalculation.' + AND ( db_dblink_1.type = null OR db_dblink_1.type = '') + ); + """ + ) + # Now I update links that are CALLS: + # The following sql statement: + # 1) selects all links that + # - join WorkCalculation as input. No subclassing was introduced so far, so only one type string is checked + # - join Calculation (or subclass) as output. Includes JobCalculation and WorkCalculations and all subclasses. + # - has no type (null) + # 2) set for these links the type to 'calllink' + op.execute( + """ + UPDATE db_dblink set type='calllink' where id in ( + SELECT db_dblink_1.id + FROM db_dbnode AS db_dbnode_1 + JOIN db_dblink AS db_dblink_1 ON db_dblink_1.input_id = db_dbnode_1.id + JOIN db_dbnode AS db_dbnode_2 ON db_dblink_1.output_id = db_dbnode_2.id + WHERE db_dbnode_1.type = 'calculation.work.WorkCalculation.' + AND db_dbnode_2.type LIKE 'calculation.%' + AND ( db_dblink_1.type = null OR db_dblink_1.type = '') + ); + """ + ) + + +def downgrade(): + """Migrations for the downgrade.""" + raise NotImplementedError('Downgrade of django_0007.') diff --git a/aiida/backends/sqlalchemy/migrations/versions/django_0008_code_hidden_to_extra.py b/aiida/backends/sqlalchemy/migrations/versions/django_0008_code_hidden_to_extra.py new file mode 100644 index 0000000000..f854ee393a --- /dev/null +++ b/aiida/backends/sqlalchemy/migrations/versions/django_0008_code_hidden_to_extra.py @@ -0,0 +1,59 @@ +# -*- coding: utf-8 -*- +########################################################################### +# Copyright (c), The AiiDA team. All rights reserved. # +# This file is part of the AiiDA code. # +# # +# The code is hosted on GitHub at https://github.com/aiidateam/aiida-core # +# For further information on the license, see the LICENSE.txt file # +# For further information please visit http://www.aiida.net # +########################################################################### +# pylint: disable=invalid-name,no-member +"""Move `Code` `hidden` attribute from `db_dbextra` to `db_dbattribute`. + +Revision ID: django_0008 +Revises: django_0007 + +""" +from alembic import op + +revision = 'django_0008' +down_revision = 'django_0007' +branch_labels = None +depends_on = None + + +def upgrade(): + """Migrations for the upgrade.""" + # The 'hidden' property of AbstractCode has been changed from an attribute to an extra + # Therefore we find all nodes of type Code and if they have an attribute with the key 'hidden' + # we move that value to the extra table + # + # First we copy the 'hidden' attributes from code.Code. nodes to the db_extra table + op.execute( + """ + INSERT INTO db_dbextra (key, datatype, tval, fval, ival, bval, dval, dbnode_id) ( + SELECT db_dbattribute.key, db_dbattribute.datatype, db_dbattribute.tval, db_dbattribute.fval, + db_dbattribute.ival, db_dbattribute.bval, db_dbattribute.dval, db_dbattribute.dbnode_id + FROM db_dbattribute JOIN db_dbnode ON db_dbnode.id = db_dbattribute.dbnode_id + WHERE db_dbattribute.key = 'hidden' + AND db_dbnode.type = 'code.Code.' + ); + """ + ) + # Secondly, we delete the original entries from the DbAttribute table + op.execute( + """ + DELETE FROM db_dbattribute + WHERE id in ( + SELECT db_dbattribute.id + FROM db_dbattribute + JOIN db_dbnode ON db_dbnode.id = db_dbattribute.dbnode_id + WHERE db_dbattribute.key = 'hidden' AND db_dbnode.type = 'code.Code.' + ); + """ + ) + + +def downgrade(): + """Migrations for the downgrade.""" + raise NotImplementedError('Downgrade of django_0008.') diff --git a/aiida/backends/sqlalchemy/migrations/versions/django_0009_base_data_plugin_type_string.py b/aiida/backends/sqlalchemy/migrations/versions/django_0009_base_data_plugin_type_string.py new file mode 100644 index 0000000000..790cfd31a2 --- /dev/null +++ b/aiida/backends/sqlalchemy/migrations/versions/django_0009_base_data_plugin_type_string.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +########################################################################### +# Copyright (c), The AiiDA team. All rights reserved. # +# This file is part of the AiiDA code. # +# # +# The code is hosted on GitHub at https://github.com/aiidateam/aiida-core # +# For further information on the license, see the LICENSE.txt file # +# For further information please visit http://www.aiida.net # +########################################################################### +# pylint: disable=invalid-name,no-member +"""Change `db_dbnode.type` for base `Data` types. + +The base Data types Bool, Float, Int and Str have been moved in the source code, which means that their +module path changes, which determines the plugin type string which is stored in the databse. +The type string now will have a type string prefix that is unique to each sub type. + +Revision ID: django_0009 +Revises: django_0008 + +""" +from alembic import op + +revision = 'django_0009' +down_revision = 'django_0008' +branch_labels = None +depends_on = None + + +def upgrade(): + """Migrations for the upgrade.""" + op.execute( + """ + UPDATE db_dbnode SET type = 'data.bool.Bool.' WHERE type = 'data.base.Bool.'; + UPDATE db_dbnode SET type = 'data.float.Float.' WHERE type = 'data.base.Float.'; + UPDATE db_dbnode SET type = 'data.int.Int.' WHERE type = 'data.base.Int.'; + UPDATE db_dbnode SET type = 'data.str.Str.' WHERE type = 'data.base.Str.'; + UPDATE db_dbnode SET type = 'data.list.List.' WHERE type = 'data.base.List.'; + """ + ) + + +def downgrade(): + """Migrations for the downgrade.""" + op.execute( + """ + UPDATE db_dbnode SET type = 'data.base.Bool.' WHERE type = 'data.bool.Bool.'; + UPDATE db_dbnode SET type = 'data.base.Float.' WHERE type = 'data.float.Float.'; + UPDATE db_dbnode SET type = 'data.base.Int.' WHERE type = 'data.int.Int.'; + UPDATE db_dbnode SET type = 'data.base.Str.' WHERE type = 'data.str.Str.'; + UPDATE db_dbnode SET type = 'data.base.List.' WHERE type = 'data.list.List.'; + """ + ) diff --git a/aiida/backends/sqlalchemy/migrations/versions/django_0010_process_type.py b/aiida/backends/sqlalchemy/migrations/versions/django_0010_process_type.py new file mode 100644 index 0000000000..145693aa09 --- /dev/null +++ b/aiida/backends/sqlalchemy/migrations/versions/django_0010_process_type.py @@ -0,0 +1,41 @@ +# -*- coding: utf-8 -*- +########################################################################### +# Copyright (c), The AiiDA team. All rights reserved. # +# This file is part of the AiiDA code. # +# # +# The code is hosted on GitHub at https://github.com/aiidateam/aiida-core # +# For further information on the license, see the LICENSE.txt file # +# For further information please visit http://www.aiida.net # +########################################################################### +# pylint: disable=invalid-name,no-member +"""Add `db_dbnode.process_type` + +Revision ID: django_0010 +Revises: django_0009 + +""" +from alembic import op +import sqlalchemy as sa + +revision = 'django_0010' +down_revision = 'django_0009' +branch_labels = None +depends_on = None + + +def upgrade(): + """Migrations for the upgrade.""" + op.add_column('db_dbnode', sa.Column('process_type', sa.String(length=255), nullable=True)) + op.create_index('db_dbnode_process_type_df7298d0', 'db_dbnode', ['process_type']) + op.create_index( + 'db_dbnode_process_type_df7298d0_like', + 'db_dbnode', + ['process_type'], + postgresql_using='btree', + postgresql_ops={'process_type': 'varchar_pattern_ops'}, + ) + + +def downgrade(): + """Migrations for the downgrade.""" + raise NotImplementedError('Downgrade of django_0010.') diff --git a/aiida/backends/sqlalchemy/migrations/versions/django_0011_delete_kombu_tables.py b/aiida/backends/sqlalchemy/migrations/versions/django_0011_delete_kombu_tables.py new file mode 100644 index 0000000000..6c79366873 --- /dev/null +++ b/aiida/backends/sqlalchemy/migrations/versions/django_0011_delete_kombu_tables.py @@ -0,0 +1,44 @@ +# -*- coding: utf-8 -*- +########################################################################### +# Copyright (c), The AiiDA team. All rights reserved. # +# This file is part of the AiiDA code. # +# # +# The code is hosted on GitHub at https://github.com/aiidateam/aiida-core # +# For further information on the license, see the LICENSE.txt file # +# For further information please visit http://www.aiida.net # +########################################################################### +# pylint: disable=invalid-name,no-member +"""Remove kombu messaging tables + +Revision ID: django_0011 +Revises: django_0010 + +""" +from alembic import op + +revision = 'django_0011' +down_revision = 'django_0010' +branch_labels = None +depends_on = None + + +def upgrade(): + """Migrations for the upgrade.""" + op.execute( + """ + DROP TABLE IF EXISTS kombu_message; + DROP TABLE IF EXISTS kombu_queue; + DELETE FROM db_dbsetting WHERE key = 'daemon|user'; + DELETE FROM db_dbsetting WHERE key = 'daemon|task_stop|retriever'; + DELETE FROM db_dbsetting WHERE key = 'daemon|task_start|retriever'; + DELETE FROM db_dbsetting WHERE key = 'daemon|task_stop|updater'; + DELETE FROM db_dbsetting WHERE key = 'daemon|task_start|updater'; + DELETE FROM db_dbsetting WHERE key = 'daemon|task_stop|submitter'; + DELETE FROM db_dbsetting WHERE key = 'daemon|task_start|submitter'; + """ + ) + + +def downgrade(): + """Migrations for the downgrade.""" + raise NotImplementedError('Deletion of the kombu tables is not reversible.') diff --git a/aiida/orm/implementation/sql/__init__.py b/aiida/backends/sqlalchemy/migrations/versions/django_0012_drop_dblock.py similarity index 59% rename from aiida/orm/implementation/sql/__init__.py rename to aiida/backends/sqlalchemy/migrations/versions/django_0012_drop_dblock.py index 439cd9ba84..30760711a0 100644 --- a/aiida/orm/implementation/sql/__init__.py +++ b/aiida/backends/sqlalchemy/migrations/versions/django_0012_drop_dblock.py @@ -7,21 +7,26 @@ # For further information on the license, see the LICENSE.txt file # # For further information please visit http://www.aiida.net # ########################################################################### -""" -This module is for subclasses of the generic backend entities that only apply to SQL backends +# pylint: disable=invalid-name,no-member +"""Drop `db_dblock` table + +Revision ID: django_0012 +Revises: django_0011 -All SQL backends with an ORM should subclass from the classes in this module """ +from alembic import op -# AUTO-GENERATED +revision = 'django_0012' +down_revision = 'django_0011' +branch_labels = None +depends_on = None -# yapf: disable -# pylint: disable=wildcard-import -from .backends import * +def upgrade(): + """Migrations for the upgrade.""" + op.drop_table('db_dblock') -__all__ = ( - 'SqlBackend', -) -# yapf: enable +def downgrade(): + """Migrations for the downgrade.""" + raise NotImplementedError('Downgrade of django_0012.') diff --git a/aiida/backends/sqlalchemy/migrations/versions/django_0013_django_1_8.py b/aiida/backends/sqlalchemy/migrations/versions/django_0013_django_1_8.py new file mode 100644 index 0000000000..76245e3e2f --- /dev/null +++ b/aiida/backends/sqlalchemy/migrations/versions/django_0013_django_1_8.py @@ -0,0 +1,50 @@ +# -*- coding: utf-8 -*- +########################################################################### +# Copyright (c), The AiiDA team. All rights reserved. # +# This file is part of the AiiDA code. # +# # +# The code is hosted on GitHub at https://github.com/aiidateam/aiida-core # +# For further information on the license, see the LICENSE.txt file # +# For further information please visit http://www.aiida.net # +########################################################################### +# pylint: disable=invalid-name,no-member +"""Update `db_dbuser.last_login` and `db_dbuser.email` + +Revision ID: django_0013 +Revises: django_0012 + +""" +from alembic import op +import sqlalchemy as sa + +from aiida.backends.sqlalchemy.migrations.utils import ReflectMigrations + +revision = 'django_0013' +down_revision = 'django_0012' +branch_labels = None +depends_on = None + + +def upgrade(): + """Migrations for the upgrade.""" + op.alter_column( + 'db_dbuser', + 'last_login', + existing_type=sa.DATETIME(), + nullable=True, + ) + op.alter_column( + 'db_dbuser', + 'email', + existing_type=sa.VARCHAR(length=75), + type_=sa.VARCHAR(length=254), + ) + # Note, I imagine the following was actually a mistake, it is re-added in django_0018 + reflect = ReflectMigrations(op) + reflect.drop_unique_constraints('db_dbuser', ['email']) # db_dbuser_email_key + reflect.drop_indexes('db_dbuser', 'email', unique=False) # db_dbuser_email_30150b7e_like + + +def downgrade(): + """Migrations for the downgrade.""" + raise NotImplementedError('Downgrade of django_0013.') diff --git a/aiida/backends/sqlalchemy/migrations/versions/django_0014_add_node_uuid_unique_constraint.py b/aiida/backends/sqlalchemy/migrations/versions/django_0014_add_node_uuid_unique_constraint.py new file mode 100644 index 0000000000..5255845333 --- /dev/null +++ b/aiida/backends/sqlalchemy/migrations/versions/django_0014_add_node_uuid_unique_constraint.py @@ -0,0 +1,35 @@ +# -*- coding: utf-8 -*- +########################################################################### +# Copyright (c), The AiiDA team. All rights reserved. # +# This file is part of the AiiDA code. # +# # +# The code is hosted on GitHub at https://github.com/aiidateam/aiida-core # +# For further information on the license, see the LICENSE.txt file # +# For further information please visit http://www.aiida.net # +########################################################################### +# pylint: disable=invalid-name,no-member +"""Add a uniqueness constraint on `db_dbnode.uuid`. + +Revision ID: django_0014 +Revises: django_0013 + +""" +from alembic import op + +revision = 'django_0014' +down_revision = 'django_0013' +branch_labels = None +depends_on = None + + +def upgrade(): + """Migrations for the upgrade.""" + from aiida.backends.sqlalchemy.migrations.utils.duplicate_uuids import verify_uuid_uniqueness + verify_uuid_uniqueness('db_dbnode', op.get_bind()) + op.create_unique_constraint('db_dbnode_uuid_62e0bf98_uniq', 'db_dbnode', ['uuid']) + op.drop_index('db_dbnode_uuid_62e0bf98', table_name='db_dbnode') + + +def downgrade(): + """Migrations for the downgrade.""" + raise NotImplementedError('Downgrade of django_0014.') diff --git a/aiida/backends/sqlalchemy/migrations/versions/django_0015_invalidating_node_hash.py b/aiida/backends/sqlalchemy/migrations/versions/django_0015_invalidating_node_hash.py new file mode 100644 index 0000000000..d00361f8fa --- /dev/null +++ b/aiida/backends/sqlalchemy/migrations/versions/django_0015_invalidating_node_hash.py @@ -0,0 +1,35 @@ +# -*- coding: utf-8 -*- +########################################################################### +# Copyright (c), The AiiDA team. All rights reserved. # +# This file is part of the AiiDA code. # +# # +# The code is hosted on GitHub at https://github.com/aiidateam/aiida-core # +# For further information on the license, see the LICENSE.txt file # +# For further information please visit http://www.aiida.net # +########################################################################### +# pylint: disable=invalid-name,no-member +"""Invalidating node hash. + +Revision ID: django_0015 +Revises: django_0014 + +""" +from alembic import op + +revision = 'django_0015' +down_revision = 'django_0014' +branch_labels = None +depends_on = None + +# Currently valid hash key +_HASH_EXTRA_KEY = '_aiida_hash' + + +def upgrade(): + """Migrations for the upgrade.""" + op.execute(f" DELETE FROM db_dbextra WHERE key='{_HASH_EXTRA_KEY}';") + + +def downgrade(): + """Migrations for the downgrade.""" + op.execute(f" DELETE FROM db_dbextra WHERE key='{_HASH_EXTRA_KEY}';") diff --git a/aiida/backends/sqlalchemy/migrations/versions/django_0016_code_sub_class_of_data.py b/aiida/backends/sqlalchemy/migrations/versions/django_0016_code_sub_class_of_data.py new file mode 100644 index 0000000000..8a72d6f079 --- /dev/null +++ b/aiida/backends/sqlalchemy/migrations/versions/django_0016_code_sub_class_of_data.py @@ -0,0 +1,32 @@ +# -*- coding: utf-8 -*- +########################################################################### +# Copyright (c), The AiiDA team. All rights reserved. # +# This file is part of the AiiDA code. # +# # +# The code is hosted on GitHub at https://github.com/aiidateam/aiida-core # +# For further information on the license, see the LICENSE.txt file # +# For further information please visit http://www.aiida.net # +########################################################################### +# pylint: disable=invalid-name,no-member +"""Change type of `code.Code.`. + +Revision ID: django_0016 +Revises: django_0015 + +""" +from alembic import op + +revision = 'django_0016' +down_revision = 'django_0015' +branch_labels = None +depends_on = None + + +def upgrade(): + """Migrations for the upgrade.""" + op.execute("UPDATE db_dbnode SET type = 'data.code.Code.' WHERE type = 'code.Code.';") + + +def downgrade(): + """Migrations for the downgrade.""" + op.execute("UPDATE db_dbnode SET type = 'code.Code.' WHERE type = 'data.code.Code.';") diff --git a/aiida/manage/database/integrity/__init__.py b/aiida/backends/sqlalchemy/migrations/versions/django_0017_drop_dbcalcstate.py similarity index 58% rename from aiida/manage/database/integrity/__init__.py rename to aiida/backends/sqlalchemy/migrations/versions/django_0017_drop_dbcalcstate.py index 01c64cc9db..3f9e79a43b 100644 --- a/aiida/manage/database/integrity/__init__.py +++ b/aiida/backends/sqlalchemy/migrations/versions/django_0017_drop_dbcalcstate.py @@ -7,17 +7,26 @@ # For further information on the license, see the LICENSE.txt file # # For further information please visit http://www.aiida.net # ########################################################################### -"""Methods to validate the database integrity and fix violations.""" +# pylint: disable=invalid-name,no-member +"""Drop `db_dbcalcstate` table -# AUTO-GENERATED +Revision ID: django_0017 +Revises: django_0016 -# yapf: disable -# pylint: disable=wildcard-import +""" +from alembic import op -from .utils import * +revision = 'django_0017' +down_revision = 'django_0016' +branch_labels = None +depends_on = None -__all__ = ( - 'write_database_integrity_violation', -) -# yapf: enable +def upgrade(): + """Migrations for the upgrade.""" + op.drop_table('db_dbcalcstate') + + +def downgrade(): + """Migrations for the downgrade.""" + raise NotImplementedError('Downgrade of django_0017.') diff --git a/aiida/backends/sqlalchemy/migrations/versions/django_0018_django_1_11.py b/aiida/backends/sqlalchemy/migrations/versions/django_0018_django_1_11.py new file mode 100644 index 0000000000..d2701e54d5 --- /dev/null +++ b/aiida/backends/sqlalchemy/migrations/versions/django_0018_django_1_11.py @@ -0,0 +1,66 @@ +# -*- coding: utf-8 -*- +########################################################################### +# Copyright (c), The AiiDA team. All rights reserved. # +# This file is part of the AiiDA code. # +# # +# The code is hosted on GitHub at https://github.com/aiidateam/aiida-core # +# For further information on the license, see the LICENSE.txt file # +# For further information please visit http://www.aiida.net # +########################################################################### +# pylint: disable=invalid-name,no-member +"""Change UUID type and add uniqueness constraints. + +Revision ID: django_0018 +Revises: django_0017 + +""" +from alembic import op +import sqlalchemy as sa +from sqlalchemy.dialects import postgresql + +from aiida.backends.sqlalchemy.migrations.utils import ReflectMigrations +from aiida.backends.sqlalchemy.migrations.utils.duplicate_uuids import verify_uuid_uniqueness + +revision = 'django_0018' +down_revision = 'django_0017' +branch_labels = None +depends_on = None + + +def upgrade(): + """Migrations for the upgrade.""" + reflect = ReflectMigrations(op) + + reflect.drop_indexes('db_dbnode', 'uuid') # db_dbnode_uuid_62e0bf98_like + for table, unique in ( + ('db_dbcomment', 'db_dbcomment_uuid_49bac08c_uniq'), + ('db_dbcomputer', 'db_dbcomputer_uuid_f35defa6_uniq'), + ('db_dbgroup', 'db_dbgroup_uuid_af896177_uniq'), + ('db_dbnode', None), + ('db_dbworkflow', 'db_dbworkflow_uuid_08947ee2_uniq'), + ): + op.alter_column( + table, + 'uuid', + existing_type=sa.VARCHAR(length=36), + type_=postgresql.UUID(as_uuid=True), + nullable=False, + postgresql_using='uuid::uuid' + ) + if unique: + verify_uuid_uniqueness(table, op.get_bind()) + op.create_unique_constraint(unique, table, ['uuid']) + + op.create_unique_constraint('db_dbuser_email_30150b7e_uniq', 'db_dbuser', ['email']) + op.create_index( + 'db_dbuser_email_30150b7e_like', + 'db_dbuser', + ['email'], + postgresql_using='btree', + postgresql_ops={'email': 'varchar_pattern_ops'}, + ) + + +def downgrade(): + """Migrations for the downgrade.""" + raise NotImplementedError('Downgrade of django_0018.') diff --git a/aiida/backends/sqlalchemy/migrations/versions/django_0019_migrate_builtin_calculations.py b/aiida/backends/sqlalchemy/migrations/versions/django_0019_migrate_builtin_calculations.py new file mode 100644 index 0000000000..615ea327bb --- /dev/null +++ b/aiida/backends/sqlalchemy/migrations/versions/django_0019_migrate_builtin_calculations.py @@ -0,0 +1,61 @@ +# -*- coding: utf-8 -*- +########################################################################### +# Copyright (c), The AiiDA team. All rights reserved. # +# This file is part of the AiiDA code. # +# # +# The code is hosted on GitHub at https://github.com/aiidateam/aiida-core # +# For further information on the license, see the LICENSE.txt file # +# For further information please visit http://www.aiida.net # +########################################################################### +# pylint: disable=invalid-name,no-member +"""Change of the built in calculation entry points. + +The built in calculation plugins `arithmetic.add` and `templatereplacer` have been moved and their entry point +renamed. In the change the `simpleplugins` namespace was dropped so we migrate the existing nodes. + +Revision ID: django_0019 +Revises: django_0018 + +""" +from alembic import op + +revision = 'django_0019' +down_revision = 'django_0018' +branch_labels = None +depends_on = None + + +def upgrade(): + """Migrations for the upgrade.""" + op.execute( + """ + UPDATE db_dbnode SET type = 'calculation.job.arithmetic.add.ArithmeticAddCalculation.' + WHERE type = 'calculation.job.simpleplugins.arithmetic.add.ArithmeticAddCalculation.'; + + UPDATE db_dbnode SET type = 'calculation.job.templatereplacer.TemplatereplacerCalculation.' + WHERE type = 'calculation.job.simpleplugins.templatereplacer.TemplatereplacerCalculation.'; + + UPDATE db_dbnode SET process_type = 'aiida.calculations:arithmetic.add' + WHERE process_type = 'aiida.calculations:simpleplugins.arithmetic.add'; + + UPDATE db_dbnode SET process_type = 'aiida.calculations:templatereplacer' + WHERE process_type = 'aiida.calculations:simpleplugins.templatereplacer'; + + UPDATE db_dbattribute AS a SET tval = 'arithmetic.add' + FROM db_dbnode AS n WHERE a.dbnode_id = n.id + AND a.key = 'input_plugin' + AND a.tval = 'simpleplugins.arithmetic.add' + AND n.type = 'data.code.Code.'; + + UPDATE db_dbattribute AS a SET tval = 'templatereplacer' + FROM db_dbnode AS n WHERE a.dbnode_id = n.id + AND a.key = 'input_plugin' + AND a.tval = 'simpleplugins.templatereplacer' + AND n.type = 'data.code.Code.'; + """ + ) + + +def downgrade(): + """Migrations for the downgrade.""" + raise NotImplementedError('Downgrade of django_0019.') diff --git a/aiida/backends/sqlalchemy/migrations/versions/django_0020_provenance_redesign.py b/aiida/backends/sqlalchemy/migrations/versions/django_0020_provenance_redesign.py new file mode 100644 index 0000000000..12b152b0fe --- /dev/null +++ b/aiida/backends/sqlalchemy/migrations/versions/django_0020_provenance_redesign.py @@ -0,0 +1,125 @@ +# -*- coding: utf-8 -*- +########################################################################### +# Copyright (c), The AiiDA team. All rights reserved. # +# This file is part of the AiiDA code. # +# # +# The code is hosted on GitHub at https://github.com/aiidateam/aiida-core # +# For further information on the license, see the LICENSE.txt file # +# For further information please visit http://www.aiida.net # +########################################################################### +# pylint: disable=invalid-name,no-member +"""Implement the provenance redesign. + +This includes: + +1. Rename the type column of process nodes +2. Remove illegal links +3. Rename link types + +Note, this is almost identical to sqlalchemy migration `239cea6d2452` + +Revision ID: django_0020 +Revises: django_0019 + +""" +from alembic import op + +revision = 'django_0020' +down_revision = 'django_0019' +branch_labels = None +depends_on = None + + +def upgrade(): + """Migrations for the upgrade.""" + from aiida.backends.sqlalchemy.migrations.utils import provenance_redesign + + # Migrate calculation nodes by inferring the process type from the type string + provenance_redesign.migrate_infer_calculation_entry_point(op) + + # Detect if the database contain any unexpected links + provenance_redesign.detect_unexpected_links(op) + + op.execute( + """ + DELETE FROM db_dblink WHERE db_dblink.id IN ( + SELECT db_dblink.id FROM db_dblink + INNER JOIN db_dbnode ON db_dblink.input_id = db_dbnode.id + WHERE + (db_dbnode.type LIKE 'calculation.job%' OR db_dbnode.type LIKE 'calculation.inline%') + AND db_dblink.type = 'returnlink' + ); -- Delete all outgoing RETURN links from JobCalculation and InlineCalculation nodes + + DELETE FROM db_dblink WHERE db_dblink.id IN ( + SELECT db_dblink.id FROM db_dblink + INNER JOIN db_dbnode ON db_dblink.input_id = db_dbnode.id + WHERE + (db_dbnode.type LIKE 'calculation.job%' OR db_dbnode.type LIKE 'calculation.inline%') + AND db_dblink.type = 'calllink' + ); -- Delete all outgoing CALL links from JobCalculation and InlineCalculation nodes + + DELETE FROM db_dblink WHERE db_dblink.id IN ( + SELECT db_dblink.id FROM db_dblink + INNER JOIN db_dbnode ON db_dblink.input_id = db_dbnode.id + WHERE + (db_dbnode.type LIKE 'calculation.function%' OR db_dbnode.type LIKE 'calculation.work%') + AND db_dblink.type = 'createlink' + ); -- Delete all outgoing CREATE links from FunctionCalculation and WorkCalculation nodes + + UPDATE db_dbnode SET type = 'calculation.work.WorkCalculation.' + WHERE type = 'calculation.process.ProcessCalculation.'; + -- First migrate very old `ProcessCalculation` to `WorkCalculation` + + UPDATE db_dbnode SET type = 'node.process.workflow.workfunction.WorkFunctionNode.' FROM db_dbattribute + WHERE db_dbattribute.dbnode_id = db_dbnode.id + AND type = 'calculation.work.WorkCalculation.' + AND db_dbattribute.key = 'function_name'; + -- WorkCalculations that have a `function_name` attribute are FunctionCalculations + + UPDATE db_dbnode SET type = 'node.process.workflow.workchain.WorkChainNode.' + WHERE type = 'calculation.work.WorkCalculation.'; + -- Update type for `WorkCalculation` nodes - all what is left should be `WorkChainNodes` + + UPDATE db_dbnode SET type = 'node.process.calculation.calcjob.CalcJobNode.' + WHERE type LIKE 'calculation.job.%'; -- Update type for JobCalculation nodes + + UPDATE db_dbnode SET type = 'node.process.calculation.calcfunction.CalcFunctionNode.' + WHERE type = 'calculation.inline.InlineCalculation.'; -- Update type for InlineCalculation nodes + + UPDATE db_dbnode SET type = 'node.process.workflow.workfunction.WorkFunctionNode.' + WHERE type = 'calculation.function.FunctionCalculation.'; -- Update type for FunctionCalculation nodes + + UPDATE db_dblink SET type = 'create' WHERE type = 'createlink'; -- Rename `createlink` to `create` + UPDATE db_dblink SET type = 'return' WHERE type = 'returnlink'; -- Rename `returnlink` to `return` + + UPDATE db_dblink SET type = 'input_calc' FROM db_dbnode + WHERE db_dblink.output_id = db_dbnode.id AND db_dbnode.type LIKE 'node.process.calculation%' + AND db_dblink.type = 'inputlink'; + -- Rename `inputlink` to `input_calc` if the target node is a calculation type node + + UPDATE db_dblink SET type = 'input_work' FROM db_dbnode + WHERE db_dblink.output_id = db_dbnode.id AND db_dbnode.type LIKE 'node.process.workflow%' + AND db_dblink.type = 'inputlink'; + -- Rename `inputlink` to `input_work` if the target node is a workflow type node + + UPDATE db_dblink SET type = 'call_calc' FROM db_dbnode + WHERE db_dblink.output_id = db_dbnode.id AND db_dbnode.type LIKE 'node.process.calculation%' + AND db_dblink.type = 'calllink'; + -- Rename `calllink` to `call_calc` if the target node is a calculation type node + + UPDATE db_dblink SET type = 'call_work' FROM db_dbnode + WHERE db_dblink.output_id = db_dbnode.id AND db_dbnode.type LIKE 'node.process.workflow%' + AND db_dblink.type = 'calllink'; + -- Rename `calllink` to `call_work` if the target node is a workflow type node + """ + ) + + +def downgrade(): + """Migrations for the downgrade.""" + # The exact reverse operation is not possible because the renaming of the type string of `JobCalculation` nodes is + # done in a lossy way. Originally this type string contained the exact sub class of the `JobCalculation` but in the + # migration this is changed to always be `node.process.calculation.calcjob.CalcJobNode.`. + # In the reverse operation, this can then only be reset to `calculation.job.JobCalculation.` + # but the information on the exact subclass is lost. + raise NotImplementedError('Downgrade of django_0020.') diff --git a/aiida/backends/sqlalchemy/migrations/versions/django_0021_dbgroup_name_to_label_type_to_type_string.py b/aiida/backends/sqlalchemy/migrations/versions/django_0021_dbgroup_name_to_label_type_to_type_string.py new file mode 100644 index 0000000000..8d9cb67a92 --- /dev/null +++ b/aiida/backends/sqlalchemy/migrations/versions/django_0021_dbgroup_name_to_label_type_to_type_string.py @@ -0,0 +1,64 @@ +# -*- coding: utf-8 -*- +########################################################################### +# Copyright (c), The AiiDA team. All rights reserved. # +# This file is part of the AiiDA code. # +# # +# The code is hosted on GitHub at https://github.com/aiidateam/aiida-core # +# For further information on the license, see the LICENSE.txt file # +# For further information please visit http://www.aiida.net # +########################################################################### +# pylint: disable=invalid-name,no-member +"""Renames `db_dbgroup.name`/`db_dbgroup.type` -> `db_dbgroup.label`/`db_dbgroup.type_string` + +Note, this is simliar to sqlalchemy migration b8b23ddefad4 + +Revision ID: django_0021 +Revises: django_0020 + +""" +from alembic import op + +from aiida.backends.sqlalchemy.migrations.utils import ReflectMigrations + +revision = 'django_0021' +down_revision = 'django_0020' +branch_labels = None +depends_on = None + + +def upgrade(): + """Migrations for the upgrade.""" + # drop old constraint and indexes + reflect = ReflectMigrations(op) + reflect.drop_unique_constraints('db_dbgroup', ['name', 'type']) + reflect.drop_indexes('db_dbgroup', 'name') + reflect.drop_indexes('db_dbgroup', 'type') + + # renaming + op.alter_column('db_dbgroup', 'name', new_column_name='label') + op.alter_column('db_dbgroup', 'type', new_column_name='type_string') + + # create new constraint and indexes + # note the naming here is actually incorrect, but inherited from the django migrations + op.create_unique_constraint('db_dbgroup_name_type_12656f33_uniq', 'db_dbgroup', ['label', 'type_string']) + op.create_index('db_dbgroup_name_66c75272', 'db_dbgroup', ['label']) + op.create_index( + 'db_dbgroup_name_66c75272_like', + 'db_dbgroup', + ['label'], + postgresql_using='btree', + postgresql_ops={'label': 'varchar_pattern_ops'}, + ) + op.create_index('db_dbgroup_type_23b2a748', 'db_dbgroup', ['type_string']) + op.create_index( + 'db_dbgroup_type_23b2a748_like', + 'db_dbgroup', + ['type_string'], + postgresql_using='btree', + postgresql_ops={'type_string': 'varchar_pattern_ops'}, + ) + + +def downgrade(): + """Migrations for the downgrade.""" + raise NotImplementedError('Downgrade of django_0021.') diff --git a/aiida/backends/djsite/db/migrations/0022_dbgroup_type_string_change_content.py b/aiida/backends/sqlalchemy/migrations/versions/django_0022_dbgroup_type_string_change_content.py similarity index 50% rename from aiida/backends/djsite/db/migrations/0022_dbgroup_type_string_change_content.py rename to aiida/backends/sqlalchemy/migrations/versions/django_0022_dbgroup_type_string_change_content.py index 643c52ac95..6542123fa9 100644 --- a/aiida/backends/djsite/db/migrations/0022_dbgroup_type_string_change_content.py +++ b/aiida/backends/sqlalchemy/migrations/versions/django_0022_dbgroup_type_string_change_content.py @@ -7,16 +7,21 @@ # For further information on the license, see the LICENSE.txt file # # For further information please visit http://www.aiida.net # ########################################################################### -# pylint: disable=invalid-name -"""Migration after the update of group_types""" +# pylint: disable=invalid-name,no-member +"""Rename `db_dbgroup.type_string`. -# pylint: disable=no-name-in-module,import-error -from django.db import migrations +Note this is identical to sqlalchemy migration e72ad251bcdb. -from aiida.backends.djsite.db.migrations import upgrade_schema_version +Revision ID: django_0022 +Revises: django_0021 -REVISION = '1.0.22' -DOWN_REVISION = '1.0.21' +""" +from alembic import op + +revision = 'django_0022' +down_revision = 'django_0021' +branch_labels = None +depends_on = None forward_sql = [ """UPDATE db_dbgroup SET type_string = 'user' WHERE type_string = '';""", @@ -25,21 +30,12 @@ """UPDATE db_dbgroup SET type_string = 'auto.run' WHERE type_string = 'autogroup.run';""", ] -reverse_sql = [ - """UPDATE db_dbgroup SET type_string = '' WHERE type_string = 'user';""", - """UPDATE db_dbgroup SET type_string = 'data.upf.family' WHERE type_string = 'data.upf';""", - """UPDATE db_dbgroup SET type_string = 'aiida.import' WHERE type_string = 'auto.import';""", - """UPDATE db_dbgroup SET type_string = 'autogroup.run' WHERE type_string = 'auto.run';""", -] +def upgrade(): + """Migrations for the upgrade.""" + op.execute('\n'.join(forward_sql)) -class Migration(migrations.Migration): - """Migration after the update of group_types""" - dependencies = [ - ('db', '0021_dbgroup_name_to_label_type_to_type_string'), - ] - operations = [ - migrations.RunSQL(sql='\n'.join(forward_sql), reverse_sql='\n'.join(reverse_sql)), - upgrade_schema_version(REVISION, DOWN_REVISION), - ] +def downgrade(): + """Migrations for the downgrade.""" + raise NotImplementedError('Downgrade of django_0022.') diff --git a/aiida/backends/sqlalchemy/migrations/versions/django_0023_calc_job_option_attribute_keys.py b/aiida/backends/sqlalchemy/migrations/versions/django_0023_calc_job_option_attribute_keys.py new file mode 100644 index 0000000000..d7f3a862b5 --- /dev/null +++ b/aiida/backends/sqlalchemy/migrations/versions/django_0023_calc_job_option_attribute_keys.py @@ -0,0 +1,88 @@ +# -*- coding: utf-8 -*- +########################################################################### +# Copyright (c), The AiiDA team. All rights reserved. # +# This file is part of the AiiDA code. # +# # +# The code is hosted on GitHub at https://github.com/aiidateam/aiida-core # +# For further information on the license, see the LICENSE.txt file # +# For further information please visit http://www.aiida.net # +########################################################################### +# pylint: disable=invalid-name,no-member +"""Rename `ProcessNode` attributes for metadata options whose key changed + +Renamed attribute keys: + + * `custom_environment_variables` -> `environment_variables` (CalcJobNode) + * `jobresource_params` -> `resources` (CalcJobNode) + * `_process_label` -> `process_label` (ProcessNode) + * `parser` -> `parser_name` (CalcJobNode) + +Deleted attributes: + * `linkname_retrieved` (We do not actually delete it just in case some relies on it) + +Note this is similar to the sqlalchemy migration 7ca08c391c49 + +Revision ID: django_0023 +Revises: django_0022 + +""" +from alembic import op + +revision = 'django_0023' +down_revision = 'django_0022' +branch_labels = None +depends_on = None + + +def upgrade(): + """Migrations for the upgrade.""" + op.execute( + r""" + UPDATE db_dbattribute AS attribute + SET key = regexp_replace(attribute.key, '^custom_environment_variables', 'environment_variables') + FROM db_dbnode AS node + WHERE + ( + attribute.key = 'custom_environment_variables' OR + attribute.key LIKE 'custom\_environment\_variables.%' + ) AND + node.type = 'node.process.calculation.calcjob.CalcJobNode.' AND + node.id = attribute.dbnode_id; + -- custom_environment_variables -> environment_variables + + UPDATE db_dbattribute AS attribute + SET key = regexp_replace(attribute.key, '^jobresource_params', 'resources') + FROM db_dbnode AS node + WHERE + ( + attribute.key = 'jobresource_params' OR + attribute.key LIKE 'jobresource\_params.%' + ) AND + node.type = 'node.process.calculation.calcjob.CalcJobNode.' AND + node.id = attribute.dbnode_id; + -- jobresource_params -> resources + + UPDATE db_dbattribute AS attribute + SET key = regexp_replace(attribute.key, '^_process_label', 'process_label') + FROM db_dbnode AS node + WHERE + attribute.key = '_process_label' AND + node.type LIKE 'node.process.%' AND + node.id = attribute.dbnode_id; + -- _process_label -> process_label + + UPDATE db_dbattribute AS attribute + SET key = regexp_replace(attribute.key, '^parser', 'parser_name') + FROM db_dbnode AS node + WHERE + attribute.key = 'parser' AND + node.type = 'node.process.calculation.calcjob.CalcJobNode.' AND + node.id = attribute.dbnode_id; + -- parser -> parser_name + """ + ) + + +def downgrade(): + """Migrations for the downgrade.""" + raise NotImplementedError('Downgrade of django_0023.') diff --git a/aiida/backends/sqlalchemy/migrations/versions/django_0024a_dblog_update.py b/aiida/backends/sqlalchemy/migrations/versions/django_0024a_dblog_update.py new file mode 100644 index 0000000000..14b5fac79f --- /dev/null +++ b/aiida/backends/sqlalchemy/migrations/versions/django_0024a_dblog_update.py @@ -0,0 +1,92 @@ +# -*- coding: utf-8 -*- +########################################################################### +# Copyright (c), The AiiDA team. All rights reserved. # +# This file is part of the AiiDA code. # +# # +# The code is hosted on GitHub at https://github.com/aiidateam/aiida-core # +# For further information on the license, see the LICENSE.txt file # +# For further information please visit http://www.aiida.net # +########################################################################### +# pylint: disable=invalid-name,no-member +"""Clean the log records from non-Node entity records (part a). + +It removes from the ``DbLog`` table, the legacy workflow records and records +that correspond to an unknown entity and places them to corresponding files. + +Note this migration is similar to the sqlalchemy migration 041a79fc615f + ea2f50e7f615 + +Revision ID: django_0024a +Revises: django_0023 + +""" +from alembic import op +import sqlalchemy as sa +from sqlalchemy.dialects import postgresql + +from aiida.backends.sqlalchemy.migrations.utils.dblog_update import export_and_clean_workflow_logs, set_new_uuid + +revision = 'django_0024a' +down_revision = 'django_0023' +branch_labels = None +depends_on = None + + +def upgrade(): + """Migrations for the upgrade.""" + connection = op.get_bind() + + # Clean data + export_and_clean_workflow_logs(connection, op.get_context().opts['aiida_profile']) + + # Note, we could also remove objpk and objname from the metadata dictionary here, + # but since this is not yet a JSONB column, it would be a costly operation, so we skip it for now. + + # Create a new column, which is a foreign key to the dbnode table + op.add_column( + 'db_dblog', sa.Column('dbnode_id', sa.INTEGER(), autoincrement=False, nullable=False, server_default='1') + ) + + # Transfer data to dbnode_id from objpk + connection.execute(sa.text("""UPDATE db_dblog SET dbnode_id=objpk""")) + + # Create the foreign key constraint and index + op.create_foreign_key( + 'db_dblog_dbnode_id_da34b732_fk_db_dbnode_id', + 'db_dblog', + 'db_dbnode', ['dbnode_id'], ['id'], + initially='DEFERRED', + deferrable=True + # note, the django migration added on_delete='CASCADE', however, this does not actually set it on the database, + # see: https://stackoverflow.com/a/35780859/5033292 + ) + op.create_index('db_dblog_dbnode_id_da34b732', 'db_dblog', ['dbnode_id'], unique=False) + + # Now that all the data have been migrated, remove the server default, and unnecessary columns + op.alter_column('db_dblog', 'dbnode_id', server_default=None) + op.drop_column('db_dblog', 'objpk') + op.drop_column('db_dblog', 'objname') + + # Create the UUID column, with a default UUID value + op.add_column( + 'db_dblog', + sa.Column( + 'uuid', + postgresql.UUID(), + nullable=False, + server_default='f6a16ff7-4a31-11eb-be7b-8344edc8f36b', + ) + ) + op.alter_column('db_dblog', 'uuid', server_default=None) + + # Set unique uuids on the column rows + set_new_uuid(connection) + + # we now want to set the unique constraint + # however, this gives: cannot ALTER TABLE "db_dblog" because it has pending trigger events + # so we do this in a follow up migration (which takes place in a new transaction) + # op.create_unique_constraint('db_dblog_uuid_9cf77df3_uniq', 'db_dblog', ['uuid']) + + +def downgrade(): + """Migrations for the downgrade.""" + raise NotImplementedError('Downgrade of django_0024a.') diff --git a/aiida/backends/sqlalchemy/migrations/versions/django_0024b_dblog_update.py b/aiida/backends/sqlalchemy/migrations/versions/django_0024b_dblog_update.py new file mode 100644 index 0000000000..042e601816 --- /dev/null +++ b/aiida/backends/sqlalchemy/migrations/versions/django_0024b_dblog_update.py @@ -0,0 +1,34 @@ +# -*- coding: utf-8 -*- +########################################################################### +# Copyright (c), The AiiDA team. All rights reserved. # +# This file is part of the AiiDA code. # +# # +# The code is hosted on GitHub at https://github.com/aiidateam/aiida-core # +# For further information on the license, see the LICENSE.txt file # +# For further information please visit http://www.aiida.net # +########################################################################### +# pylint: disable=invalid-name,no-member +"""Clean the log records from non-Node entity records (part b). + +We need to add the unique constraint on the `uuid` column in a new transaction. + +Revision ID: django_0024 +Revises: django_0024a + +""" +from alembic import op + +revision = 'django_0024' +down_revision = 'django_0024a' +branch_labels = None +depends_on = None + + +def upgrade(): + """Migrations for the upgrade.""" + op.create_unique_constraint('db_dblog_uuid_9cf77df3_uniq', 'db_dblog', ['uuid']) + + +def downgrade(): + """Migrations for the downgrade.""" + raise NotImplementedError('Downgrade of django_0024.') diff --git a/aiida/backends/sqlalchemy/migrations/versions/django_0025_move_data_within_node_module.py b/aiida/backends/sqlalchemy/migrations/versions/django_0025_move_data_within_node_module.py new file mode 100644 index 0000000000..94b6acc4fb --- /dev/null +++ b/aiida/backends/sqlalchemy/migrations/versions/django_0025_move_data_within_node_module.py @@ -0,0 +1,45 @@ +# -*- coding: utf-8 -*- +########################################################################### +# Copyright (c), The AiiDA team. All rights reserved. # +# This file is part of the AiiDA code. # +# # +# The code is hosted on GitHub at https://github.com/aiidateam/aiida-core # +# For further information on the license, see the LICENSE.txt file # +# For further information please visit http://www.aiida.net # +########################################################################### +# pylint: disable=invalid-name,no-member +"""Change type string for `Data` nodes, from `data.*` to `node.data.*` + +Note, this is identical to sqlalchemy migration 6a5c2ea1439d + +Revision ID: django_0025 +Revises: django_0024 + +""" +from alembic import op +import sqlalchemy as sa + +revision = 'django_0025' +down_revision = 'django_0024' +branch_labels = None +depends_on = None + + +def upgrade(): + """Migrations for the upgrade.""" + conn = op.get_bind() + + # The type string for `Data` nodes changed from `data.*` to `node.data.*`. + statement = sa.text( + r""" + UPDATE db_dbnode + SET type = regexp_replace(type, '^data.', 'node.data.') + WHERE type LIKE 'data.%' + """ + ) + conn.execute(statement) + + +def downgrade(): + """Migrations for the downgrade.""" + raise NotImplementedError('Downgrade of django_0025.') diff --git a/aiida/backends/sqlalchemy/migrations/versions/django_0026_trajectory_symbols_to_attribute.py b/aiida/backends/sqlalchemy/migrations/versions/django_0026_trajectory_symbols_to_attribute.py new file mode 100644 index 0000000000..23aa368862 --- /dev/null +++ b/aiida/backends/sqlalchemy/migrations/versions/django_0026_trajectory_symbols_to_attribute.py @@ -0,0 +1,59 @@ +# -*- coding: utf-8 -*- +########################################################################### +# Copyright (c), The AiiDA team. All rights reserved. # +# This file is part of the AiiDA code. # +# # +# The code is hosted on GitHub at https://github.com/aiidateam/aiida-core # +# For further information on the license, see the LICENSE.txt file # +# For further information please visit http://www.aiida.net # +########################################################################### +# pylint: disable=invalid-name,no-member +"""Move trajectory symbols from repository array to attribute + +Note, this is similar to the sqlalchemy migration 12536798d4d3 + +Revision ID: django_0026 +Revises: django_0025 + +""" +from alembic import op +import sqlalchemy as sa +from sqlalchemy.dialects import postgresql + +from aiida.backends.sqlalchemy.migrations.utils.create_dbattribute import create_rows +from aiida.backends.sqlalchemy.migrations.utils.utils import load_numpy_array_from_repository + +revision = 'django_0026' +down_revision = 'django_0025' +branch_labels = None +depends_on = None + + +def upgrade(): + """Migrations for the upgrade.""" + connection = op.get_bind() + profile = op.get_context().opts['aiida_profile'] + repo_path = profile.repository_path + + node_model = sa.table( + 'db_dbnode', + sa.column('id', sa.Integer), + sa.column('uuid', postgresql.UUID), + sa.column('type', sa.String), + ) + + nodes = connection.execute( + sa.select(node_model.c.id, node_model.c.uuid).where( + node_model.c.type == op.inline_literal('node.data.array.trajectory.TrajectoryData.') + ) + ).all() + + for node_id, uuid in nodes: + value = load_numpy_array_from_repository(repo_path, uuid, 'symbols').tolist() + for row in create_rows('symbols', value, node_id): + connection.execute(sa.insert(sa.table('db_dbattribute', *(sa.column(key) for key in row))).values(**row)) + + +def downgrade(): + """Migrations for the downgrade.""" + raise NotImplementedError('Downgrade of django_0026.') diff --git a/aiida/backends/sqlalchemy/migrations/versions/django_0027_delete_trajectory_symbols_array.py b/aiida/backends/sqlalchemy/migrations/versions/django_0027_delete_trajectory_symbols_array.py new file mode 100644 index 0000000000..bcc76bd773 --- /dev/null +++ b/aiida/backends/sqlalchemy/migrations/versions/django_0027_delete_trajectory_symbols_array.py @@ -0,0 +1,65 @@ +# -*- coding: utf-8 -*- +########################################################################### +# Copyright (c), The AiiDA team. All rights reserved. # +# This file is part of the AiiDA code. # +# # +# The code is hosted on GitHub at https://github.com/aiidateam/aiida-core # +# For further information on the license, see the LICENSE.txt file # +# For further information please visit http://www.aiida.net # +########################################################################### +# pylint: disable=invalid-name,no-member +"""Delete trajectory symbols array from the repository and the reference in the attributes. + +Note, this is similar to the sqlalchemy migration ce56d84bcc35 + +Revision ID: django_0027 +Revises: django_0026 + +""" +from alembic import op +import sqlalchemy as sa +from sqlalchemy.dialects import postgresql +from sqlalchemy.sql.expression import delete + +from aiida.backends.sqlalchemy.migrations.utils import utils + +revision = 'django_0027' +down_revision = 'django_0026' +branch_labels = None +depends_on = None + + +def upgrade(): + """Migrations for the upgrade.""" + # pylint: disable=unused-variable + connection = op.get_bind() + profile = op.get_context().opts['aiida_profile'] + repo_path = profile.repository_path + + node_tbl = sa.table( + 'db_dbnode', + sa.column('id', sa.Integer), + sa.column('uuid', postgresql.UUID), + sa.column('type', sa.String), + # sa.column('attributes', JSONB), + ) + + nodes = connection.execute( + sa.select(node_tbl.c.id, node_tbl.c.uuid).where( + node_tbl.c.type == op.inline_literal('node.data.array.trajectory.TrajectoryData.') + ) + ).all() + + attr_tbl = sa.table('db_dbattribute', sa.column('key')) + + for pk, uuid in nodes: + connection.execute(delete(attr_tbl).where(sa.and_(node_tbl.c.id == pk, attr_tbl.c.key == 'array|symbols'))) + connection.execute( + delete(attr_tbl).where(sa.and_(node_tbl.c.id == pk, attr_tbl.c.key.startswith('array|symbols.'))) + ) + utils.delete_numpy_array_from_repository(repo_path, uuid, 'symbols') + + +def downgrade(): + """Migrations for the downgrade.""" + raise NotImplementedError('Downgrade of django_0027.') diff --git a/aiida/backends/sqlalchemy/migrations/versions/django_0028_remove_node_prefix.py b/aiida/backends/sqlalchemy/migrations/versions/django_0028_remove_node_prefix.py new file mode 100644 index 0000000000..ec60db5df5 --- /dev/null +++ b/aiida/backends/sqlalchemy/migrations/versions/django_0028_remove_node_prefix.py @@ -0,0 +1,49 @@ +# -*- coding: utf-8 -*- +########################################################################### +# Copyright (c), The AiiDA team. All rights reserved. # +# This file is part of the AiiDA code. # +# # +# The code is hosted on GitHub at https://github.com/aiidateam/aiida-core # +# For further information on the license, see the LICENSE.txt file # +# For further information please visit http://www.aiida.net # +########################################################################### +# pylint: disable=invalid-name,no-member +"""Remove the `node.` prefix from `db_dbnode.type` + +Note, this is identical to the sqlalchemy migration 61fc0913fae9. + +Revision ID: django_0028 +Revises: django_0027 + +""" +from alembic import op +import sqlalchemy as sa + +revision = 'django_0028' +down_revision = 'django_0027' +branch_labels = None +depends_on = None + + +def upgrade(): + """Migrations for the upgrade.""" + conn = op.get_bind() + + # The `node.` prefix is being dropped from the node type string + statement = sa.text( + r""" + UPDATE db_dbnode + SET type = regexp_replace(type, '^node.data.', 'data.') + WHERE type LIKE 'node.data.%'; + + UPDATE db_dbnode + SET type = regexp_replace(type, '^node.process.', 'process.') + WHERE type LIKE 'node.process.%'; + """ + ) + conn.execute(statement) + + +def downgrade(): + """Migrations for the downgrade.""" + raise NotImplementedError('Downgrade of django_0028.') diff --git a/aiida/backends/sqlalchemy/migrations/versions/django_0029_rename_parameter_data_to_dict.py b/aiida/backends/sqlalchemy/migrations/versions/django_0029_rename_parameter_data_to_dict.py new file mode 100644 index 0000000000..d0aa44f533 --- /dev/null +++ b/aiida/backends/sqlalchemy/migrations/versions/django_0029_rename_parameter_data_to_dict.py @@ -0,0 +1,42 @@ +# -*- coding: utf-8 -*- +########################################################################### +# Copyright (c), The AiiDA team. All rights reserved. # +# This file is part of the AiiDA code. # +# # +# The code is hosted on GitHub at https://github.com/aiidateam/aiida-core # +# For further information on the license, see the LICENSE.txt file # +# For further information please visit http://www.aiida.net # +########################################################################### +# pylint: disable=invalid-name,no-member +"""Rename `db_dbnode.type` values `data.parameter.ParameterData.` to `data.dict.Dict.` + +Note this is identical to migration d254fdfed416 + +Revision ID: django_0029 +Revises: django_0028 + +""" +from alembic import op +import sqlalchemy as sa + +revision = 'django_0029' +down_revision = 'django_0028' +branch_labels = None +depends_on = None + + +def upgrade(): + """Migrations for the upgrade.""" + conn = op.get_bind() + + statement = sa.text( + r""" + UPDATE db_dbnode SET type = 'data.dict.Dict.' WHERE type = 'data.parameter.ParameterData.'; + """ + ) + conn.execute(statement) + + +def downgrade(): + """Migrations for the downgrade.""" + raise NotImplementedError('Downgrade of django_0029.') diff --git a/aiida/backends/sqlalchemy/migrations/versions/django_0030_dbnode_type_to_dbnode_node_type.py b/aiida/backends/sqlalchemy/migrations/versions/django_0030_dbnode_type_to_dbnode_node_type.py new file mode 100644 index 0000000000..b9e4cd9464 --- /dev/null +++ b/aiida/backends/sqlalchemy/migrations/versions/django_0030_dbnode_type_to_dbnode_node_type.py @@ -0,0 +1,35 @@ +# -*- coding: utf-8 -*- +########################################################################### +# Copyright (c), The AiiDA team. All rights reserved. # +# This file is part of the AiiDA code. # +# # +# The code is hosted on GitHub at https://github.com/aiidateam/aiida-core # +# For further information on the license, see the LICENSE.txt file # +# For further information please visit http://www.aiida.net # +########################################################################### +# pylint: disable=invalid-name,no-member +"""Rename `db_dbnode.type` to `db_dbnode.node_type` + +This is similar to migration 5ddd24e52864 + +Revision ID: django_0030 +Revises: django_0029 + +""" +from alembic import op + +revision = 'django_0030' +down_revision = 'django_0029' +branch_labels = None +depends_on = None + + +def upgrade(): + """Migrations for the upgrade.""" + op.alter_column('db_dbnode', 'type', new_column_name='node_type') # pylint: disable=no-member + # note index names are (mistakenly) not changed here + + +def downgrade(): + """Migrations for the downgrade.""" + raise NotImplementedError('Downgrade of django_0030.') diff --git a/aiida/backends/djsite/db/migrations/0012_drop_dblock.py b/aiida/backends/sqlalchemy/migrations/versions/django_0031_remove_dbcomputer_enabled.py similarity index 55% rename from aiida/backends/djsite/db/migrations/0012_drop_dblock.py rename to aiida/backends/sqlalchemy/migrations/versions/django_0031_remove_dbcomputer_enabled.py index affaa90007..b063e02cc9 100644 --- a/aiida/backends/djsite/db/migrations/0012_drop_dblock.py +++ b/aiida/backends/sqlalchemy/migrations/versions/django_0031_remove_dbcomputer_enabled.py @@ -7,21 +7,28 @@ # For further information on the license, see the LICENSE.txt file # # For further information please visit http://www.aiida.net # ########################################################################### -# pylint: disable=invalid-name -"""Database migration.""" -from django.db import migrations +# pylint: disable=invalid-name,no-member +"""Remove `db_dbcomputer.enabled` -from aiida.backends.djsite.db.migrations import upgrade_schema_version +This is similar to migration 3d6190594e19 -REVISION = '1.0.12' -DOWN_REVISION = '1.0.11' +Revision ID: django_0031 +Revises: django_0030 +""" +from alembic import op -class Migration(migrations.Migration): - """Database migration.""" +revision = 'django_0031' +down_revision = 'django_0030' +branch_labels = None +depends_on = None - dependencies = [ - ('db', '0011_delete_kombu_tables'), - ] - operations = [migrations.DeleteModel(name='DbLock',), upgrade_schema_version(REVISION, DOWN_REVISION)] +def upgrade(): + """Migrations for the upgrade.""" + op.drop_column('db_dbcomputer', 'enabled') + + +def downgrade(): + """Migrations for the downgrade.""" + raise NotImplementedError('Downgrade of django_0031.') diff --git a/aiida/backends/sqlalchemy/migrations/versions/django_0032_remove_legacy_workflows.py b/aiida/backends/sqlalchemy/migrations/versions/django_0032_remove_legacy_workflows.py new file mode 100644 index 0000000000..bcde831f7c --- /dev/null +++ b/aiida/backends/sqlalchemy/migrations/versions/django_0032_remove_legacy_workflows.py @@ -0,0 +1,44 @@ +# -*- coding: utf-8 -*- +########################################################################### +# Copyright (c), The AiiDA team. All rights reserved. # +# This file is part of the AiiDA code. # +# # +# The code is hosted on GitHub at https://github.com/aiidateam/aiida-core # +# For further information on the license, see the LICENSE.txt file # +# For further information please visit http://www.aiida.net # +########################################################################### +# pylint: disable=invalid-name,no-member +"""Remove legacy workflows + +This is similar to migration 1b8ed3425af9 + +Revision ID: django_0032 +Revises: django_0031 + +""" +from alembic import op + +from aiida.backends.sqlalchemy.migrations.utils.legacy_workflows import export_workflow_data + +revision = 'django_0032' +down_revision = 'django_0031' +branch_labels = None +depends_on = None + + +def upgrade(): + """Migrations for the upgrade.""" + # Clean data + export_workflow_data(op.get_bind(), op.get_context().opts['aiida_profile']) + + # drop tables (indexes are also automatically dropped) + op.drop_table('db_dbworkflowstep_sub_workflows') + op.drop_table('db_dbworkflowstep_calculations') + op.drop_table('db_dbworkflowstep') + op.drop_table('db_dbworkflowdata') + op.drop_table('db_dbworkflow') + + +def downgrade(): + """Migrations for the downgrade.""" + raise NotImplementedError('Downgrade of django_0032.') diff --git a/aiida/backends/sqlalchemy/migrations/versions/django_0033_replace_text_field_with_json_field.py b/aiida/backends/sqlalchemy/migrations/versions/django_0033_replace_text_field_with_json_field.py new file mode 100644 index 0000000000..06508bb413 --- /dev/null +++ b/aiida/backends/sqlalchemy/migrations/versions/django_0033_replace_text_field_with_json_field.py @@ -0,0 +1,45 @@ +# -*- coding: utf-8 -*- +########################################################################### +# Copyright (c), The AiiDA team. All rights reserved. # +# This file is part of the AiiDA code. # +# # +# The code is hosted on GitHub at https://github.com/aiidateam/aiida-core # +# For further information on the license, see the LICENSE.txt file # +# For further information please visit http://www.aiida.net # +########################################################################### +# pylint: disable=invalid-name,no-member +"""Replace serialized dict text fields with JSONB + +Revision ID: django_0033 +Revises: django_0032 + +""" +from alembic import op +import sqlalchemy as sa +from sqlalchemy.dialects import postgresql + +revision = 'django_0033' +down_revision = 'django_0032' +branch_labels = None +depends_on = None + +FIELDS = ( + ('db_dbauthinfo', 'metadata'), + ('db_dbauthinfo', 'auth_params'), + ('db_dbcomputer', 'metadata'), + ('db_dbcomputer', 'transport_params'), + ('db_dblog', 'metadata'), +) + + +def upgrade(): + """Migrations for the upgrade.""" + for table_name, column in FIELDS: + op.alter_column( + table_name, column, existing_type=sa.TEXT, type_=postgresql.JSONB, postgresql_using=f'{column}::jsonb' + ) + + +def downgrade(): + """Migrations for the downgrade.""" + raise NotImplementedError('Downgrade of django_0033.') diff --git a/aiida/backends/sqlalchemy/migrations/versions/django_0034_drop_node_columns_nodeversion_public.py b/aiida/backends/sqlalchemy/migrations/versions/django_0034_drop_node_columns_nodeversion_public.py new file mode 100644 index 0000000000..087e8421d8 --- /dev/null +++ b/aiida/backends/sqlalchemy/migrations/versions/django_0034_drop_node_columns_nodeversion_public.py @@ -0,0 +1,35 @@ +# -*- coding: utf-8 -*- +########################################################################### +# Copyright (c), The AiiDA team. All rights reserved. # +# This file is part of the AiiDA code. # +# # +# The code is hosted on GitHub at https://github.com/aiidateam/aiida-core # +# For further information on the license, see the LICENSE.txt file # +# For further information please visit http://www.aiida.net # +########################################################################### +# pylint: disable=invalid-name,no-member +"""Drop `db_dbnode.nodeversion` and `db_dbnode.public` + +This is similar to migration 1830c8430131 + +Revision ID: django_0034 +Revises: django_0033 + +""" +from alembic import op + +revision = 'django_0034' +down_revision = 'django_0033' +branch_labels = None +depends_on = None + + +def upgrade(): + """Migrations for the upgrade.""" + op.drop_column('db_dbnode', 'nodeversion') + op.drop_column('db_dbnode', 'public') + + +def downgrade(): + """Migrations for the downgrade.""" + raise NotImplementedError('Downgrade of django_0034.') diff --git a/aiida/backends/sqlalchemy/migrations/versions/django_0035_simplify_user_model.py b/aiida/backends/sqlalchemy/migrations/versions/django_0035_simplify_user_model.py new file mode 100644 index 0000000000..c7ab3bcfbf --- /dev/null +++ b/aiida/backends/sqlalchemy/migrations/versions/django_0035_simplify_user_model.py @@ -0,0 +1,43 @@ +# -*- coding: utf-8 -*- +########################################################################### +# Copyright (c), The AiiDA team. All rights reserved. # +# This file is part of the AiiDA code. # +# # +# The code is hosted on GitHub at https://github.com/aiidateam/aiida-core # +# For further information on the license, see the LICENSE.txt file # +# For further information please visit http://www.aiida.net # +########################################################################### +# pylint: disable=invalid-name,no-member +"""Simplify `db_dbuser`, by dropping unnecessary columns and join tables + +These columns were part of the default Django user model + +This migration is similar to de2eaf6978b4 + +Revision ID: django_0035 +Revises: django_0034 + +""" +from alembic import op + +revision = 'django_0035' +down_revision = 'django_0034' +branch_labels = None +depends_on = None + + +def upgrade(): + """Migrations for the upgrade.""" + op.drop_column('db_dbuser', 'date_joined') + op.drop_column('db_dbuser', 'is_active') + op.drop_column('db_dbuser', 'is_staff') + op.drop_column('db_dbuser', 'is_superuser') + op.drop_column('db_dbuser', 'last_login') + op.drop_column('db_dbuser', 'password') + op.drop_table('db_dbuser_groups') + op.drop_table('db_dbuser_user_permissions') + + +def downgrade(): + """Migrations for the downgrade.""" + raise NotImplementedError('Downgrade of django_0035.') diff --git a/aiida/backends/sqlalchemy/migrations/versions/django_0036_drop_computer_transport_params.py b/aiida/backends/sqlalchemy/migrations/versions/django_0036_drop_computer_transport_params.py new file mode 100644 index 0000000000..b0400f8288 --- /dev/null +++ b/aiida/backends/sqlalchemy/migrations/versions/django_0036_drop_computer_transport_params.py @@ -0,0 +1,34 @@ +# -*- coding: utf-8 -*- +########################################################################### +# Copyright (c), The AiiDA team. All rights reserved. # +# This file is part of the AiiDA code. # +# # +# The code is hosted on GitHub at https://github.com/aiidateam/aiida-core # +# For further information on the license, see the LICENSE.txt file # +# For further information please visit http://www.aiida.net # +########################################################################### +# pylint: disable=invalid-name,no-member +"""Drop `db_dbcomputer.transport_params` + +This is similar to migration 07fac78e6209 + +Revision ID: django_0036 +Revises: django_0035 + +""" +from alembic import op + +revision = 'django_0036' +down_revision = 'django_0035' +branch_labels = None +depends_on = None + + +def upgrade(): + """Migrations for the upgrade.""" + op.drop_column('db_dbcomputer', 'transport_params') + + +def downgrade(): + """Migrations for the downgrade.""" + raise NotImplementedError('Downgrade of django_0036.') diff --git a/aiida/backends/sqlalchemy/migrations/versions/django_0037_attributes_extras_settings_json.py b/aiida/backends/sqlalchemy/migrations/versions/django_0037_attributes_extras_settings_json.py new file mode 100644 index 0000000000..945d18efe3 --- /dev/null +++ b/aiida/backends/sqlalchemy/migrations/versions/django_0037_attributes_extras_settings_json.py @@ -0,0 +1,217 @@ +# -*- coding: utf-8 -*- +########################################################################### +# Copyright (c), The AiiDA team. All rights reserved. # +# This file is part of the AiiDA code. # +# # +# The code is hosted on GitHub at https://github.com/aiidateam/aiida-core # +# For further information on the license, see the LICENSE.txt file # +# For further information please visit http://www.aiida.net # +########################################################################### +# pylint: disable=invalid-name,no-member +"""Move `db_dbattribute`/`db_dbextra` to `db_dbnode.attributes`/`db_dbnode.extras`, and add `dbsetting.val` + +Revision ID: django_0037 +Revises: django_0036 + +""" +import math + +from alembic import op +import sqlalchemy as sa +from sqlalchemy import cast, func, select +from sqlalchemy.dialects import postgresql +from sqlalchemy.sql import column, table + +from aiida.backends.sqlalchemy.migrations.utils import ReflectMigrations +from aiida.cmdline.utils import echo +from aiida.common.progress_reporter import get_progress_reporter +from aiida.common.timezone import datetime_to_isoformat + +revision = 'django_0037' +down_revision = 'django_0036' +branch_labels = None +depends_on = None + +node_tbl = table( + 'db_dbnode', + column('id'), + column('attributes', postgresql.JSONB(astext_type=sa.Text())), + column('extras', postgresql.JSONB(astext_type=sa.Text())), +) + +attr_tbl = table( + 'db_dbattribute', + column('id'), + column('dbnode_id'), + column('key'), + column('datatype'), + column('tval'), + column('ival'), + column('fval'), + column('dval'), + column('bval'), +) + +extra_tbl = table( + 'db_dbextra', + column('id'), + column('dbnode_id'), + column('key'), + column('datatype'), + column('tval'), + column('ival'), + column('fval'), + column('dval'), + column('bval'), +) + +setting_tbl = table( + 'db_dbsetting', + column('id'), + column('description'), + column('time'), + column('key'), + column('datatype'), + column('tval'), + column('ival'), + column('fval'), + column('dval'), + column('bval'), + column('val'), +) + + +def upgrade(): + """Migrations for the upgrade.""" + conn = op.get_bind() + + op.add_column('db_dbnode', sa.Column('attributes', postgresql.JSONB(astext_type=sa.Text()), nullable=True)) + op.add_column('db_dbnode', sa.Column('extras', postgresql.JSONB(astext_type=sa.Text()), nullable=True)) + + # transition attributes and extras to node + node_count = conn.execute(select(func.count()).select_from(node_tbl)).scalar() + if node_count: + with get_progress_reporter()(total=node_count, desc='Updating attributes and extras') as progress: + for node in conn.execute(select(node_tbl)).all(): + attr_list = conn.execute(select(attr_tbl).where(attr_tbl.c.dbnode_id == node.id)).all() + attributes, _ = attributes_to_dict(sorted(attr_list, key=lambda a: a.key)) + extra_list = conn.execute(select(extra_tbl).where(extra_tbl.c.dbnode_id == node.id)).all() + extras, _ = attributes_to_dict(sorted(extra_list, key=lambda a: a.key)) + conn.execute( + node_tbl.update().where(node_tbl.c.id == node.id).values(attributes=attributes, extras=extras) + ) + progress.update() + + op.drop_table('db_dbattribute') + op.drop_table('db_dbextra') + + op.add_column('db_dbsetting', sa.Column('val', postgresql.JSONB(astext_type=sa.Text()), nullable=True)) + + # transition settings + setting_count = conn.execute(select(func.count()).select_from(setting_tbl)).scalar() + if setting_count: + with get_progress_reporter()(total=setting_count, desc='Updating settings') as progress: + for setting in conn.execute(select(setting_tbl)).all(): + dt = setting.datatype + val = None + if dt == 'txt': + val = setting.tval + elif dt == 'float': + val = setting.fval + if math.isnan(val) or math.isinf(val): + val = str(val) + elif dt == 'int': + val = setting.ival + elif dt == 'bool': + val = setting.bval + elif dt == 'date': + val = datetime_to_isoformat(setting.dval) + conn.execute( + setting_tbl.update().where(setting_tbl.c.id == setting.id + ).values(val=cast(val, postgresql.JSONB(astext_type=sa.Text()))) + ) + progress.update() + + op.drop_column('db_dbsetting', 'tval') + op.drop_column('db_dbsetting', 'fval') + op.drop_column('db_dbsetting', 'ival') + op.drop_column('db_dbsetting', 'bval') + op.drop_column('db_dbsetting', 'dval') + op.drop_column('db_dbsetting', 'datatype') + + ReflectMigrations(op).drop_indexes('db_dbsetting', 'key') # db_dbsetting_key_1b84beb4 + op.create_index( + 'db_dbsetting_key_1b84beb4_like', + 'db_dbsetting', + ['key'], + postgresql_using='btree', + postgresql_ops={'key': 'varchar_pattern_ops'}, + ) + + +def downgrade(): + """Migrations for the downgrade.""" + raise NotImplementedError('Downgrade of django_0037.') + + +def attributes_to_dict(attr_list: list): + """ + Transform the attributes of a node into a dictionary. It assumes the key + are ordered alphabetically, and that they all belong to the same node. + """ + d = {} + + error = False + for a in attr_list: + try: + tmp_d = select_from_key(a.key, d) + except ValueError: + echo.echo_error(f"Couldn't transfer attribute {a.id} with key {a.key} for dbnode {a.dbnode_id}") + error = True + continue + key = a.key.split('.')[-1] + + if isinstance(tmp_d, (list, tuple)): + key = int(key) + + dt = a.datatype + + if dt == 'dict': + tmp_d[key] = {} + elif dt == 'list': + tmp_d[key] = [None] * a.ival + else: + val = None + if dt == 'txt': + val = a.tval + elif dt == 'float': + val = a.fval + if math.isnan(val) or math.isinf(val): + val = str(val) + elif dt == 'int': + val = a.ival + elif dt == 'bool': + val = a.bval + elif dt == 'date': + val = datetime_to_isoformat(a.dval) + + tmp_d[key] = val + + return d, error + + +def select_from_key(key, d): + """ + Return element of the dict to do the insertion on. If it is foo.1.bar, it + will return d["foo"][1]. If it is only foo, it will return d directly. + """ + path = key.split('.')[:-1] + + tmp_d = d + for p in path: + if isinstance(tmp_d, (list, tuple)): + tmp_d = tmp_d[int(p)] + else: + tmp_d = tmp_d[p] + + return tmp_d diff --git a/aiida/backends/sqlalchemy/migrations/versions/django_0038_data_migration_legacy_job_calculations.py b/aiida/backends/sqlalchemy/migrations/versions/django_0038_data_migration_legacy_job_calculations.py new file mode 100644 index 0000000000..66c45b62ff --- /dev/null +++ b/aiida/backends/sqlalchemy/migrations/versions/django_0038_data_migration_legacy_job_calculations.py @@ -0,0 +1,111 @@ +# -*- coding: utf-8 -*- +########################################################################### +# Copyright (c), The AiiDA team. All rights reserved. # +# This file is part of the AiiDA code. # +# # +# The code is hosted on GitHub at https://github.com/aiidateam/aiida-core # +# For further information on the license, see the LICENSE.txt file # +# For further information please visit http://www.aiida.net # +########################################################################### +# pylint: disable=invalid-name,no-member,line-too-long +"""Migrate legacy `JobCalculations`. + +These old nodes have already been migrated to the correct `CalcJobNode` type in a previous migration, but they can +still contain a `state` attribute with a deprecated `JobCalcState` value and they are missing a value for the +`process_state`, `process_status`, `process_label` and `exit_status`. The `process_label` is impossible to infer +consistently in SQL so it will be omitted. The other will be mapped from the `state` attribute as follows: + +.. code-block:: text + + Old state | Process state | Exit status | Process status + ---------------------|----------------|-------------|---------------------------------------------------------- + `NEW` | `Killed` | `None` | Legacy `JobCalculation` with state `NEW` + `TOSUBMIT` | `Killed` | `None` | Legacy `JobCalculation` with state `TOSUBMIT` + `SUBMITTING` | `Killed` | `None` | Legacy `JobCalculation` with state `SUBMITTING` + `WITHSCHEDULER` | `Killed` | `None` | Legacy `JobCalculation` with state `WITHSCHEDULER` + `COMPUTED` | `Killed` | `None` | Legacy `JobCalculation` with state `COMPUTED` + `RETRIEVING` | `Killed` | `None` | Legacy `JobCalculation` with state `RETRIEVING` + `PARSING` | `Killed` | `None` | Legacy `JobCalculation` with state `PARSING` + `SUBMISSIONFAILED` | `Excepted` | `None` | Legacy `JobCalculation` with state `SUBMISSIONFAILED` + `RETRIEVALFAILED` | `Excepted` | `None` | Legacy `JobCalculation` with state `RETRIEVALFAILED` + `PARSINGFAILED` | `Excepted` | `None` | Legacy `JobCalculation` with state `PARSINGFAILED` + `FAILED` | `Finished` | 2 | - + `FINISHED` | `Finished` | 0 | - + `IMPORTED` | - | - | - + + +Note the `IMPORTED` state was never actually stored in the `state` attribute, so we do not have to consider it. +The old `state` attribute has to be removed after the data is migrated, because its value is no longer valid or useful. + +Note: in addition to the three attributes mentioned in the table, all matched nodes will get `Legacy JobCalculation` as +their `process_label` which is one of the default columns of `verdi process list`. + +This migration is identical to 26d561acd560 + +Revision ID: django_0038 +Revises: django_0037 + +""" +from alembic import op +import sqlalchemy as sa + +revision = 'django_0038' +down_revision = 'django_0037' +branch_labels = None +depends_on = None + + +def upgrade(): + """Migrations for the upgrade.""" + conn = op.get_bind() # pylint: disable=no-member + + # Note that the condition on matching target nodes is done only on the `node_type` amd the `state` attribute value. + # New `CalcJobs` will have the same node type and while their active can have a `state` attribute with a value + # of the enum `CalcJobState`, some of which match the deprecated `JobCalcState`, however, the new ones are stored + # in lower case, so we do not run the risk of matching them by accident. + statement = sa.text( + """ + UPDATE db_dbnode + SET attributes = attributes - 'state' || '{"process_state": "killed", "process_status": "Legacy `JobCalculation` with state `NEW`", "process_label": "Legacy JobCalculation"}' + WHERE node_type = 'process.calculation.calcjob.CalcJobNode.' AND attributes @> '{"state": "NEW"}'; + UPDATE db_dbnode + SET attributes = attributes - 'state' || '{"process_state": "killed", "process_status": "Legacy `JobCalculation` with state `TOSUBMIT`", "process_label": "Legacy JobCalculation"}' + WHERE node_type = 'process.calculation.calcjob.CalcJobNode.' AND attributes @> '{"state": "TOSUBMIT"}'; + UPDATE db_dbnode + SET attributes = attributes - 'state' || '{"process_state": "killed", "process_status": "Legacy `JobCalculation` with state `SUBMITTING`", "process_label": "Legacy JobCalculation"}' + WHERE node_type = 'process.calculation.calcjob.CalcJobNode.' AND attributes @> '{"state": "SUBMITTING"}'; + UPDATE db_dbnode + SET attributes = attributes - 'state' || '{"process_state": "killed", "process_status": "Legacy `JobCalculation` with state `WITHSCHEDULER`", "process_label": "Legacy JobCalculation"}' + WHERE node_type = 'process.calculation.calcjob.CalcJobNode.' AND attributes @> '{"state": "WITHSCHEDULER"}'; + UPDATE db_dbnode + SET attributes = attributes - 'state' || '{"process_state": "killed", "process_status": "Legacy `JobCalculation` with state `COMPUTED`", "process_label": "Legacy JobCalculation"}' + WHERE node_type = 'process.calculation.calcjob.CalcJobNode.' AND attributes @> '{"state": "COMPUTED"}'; + UPDATE db_dbnode + SET attributes = attributes - 'state' || '{"process_state": "killed", "process_status": "Legacy `JobCalculation` with state `RETRIEVING`", "process_label": "Legacy JobCalculation"}' + WHERE node_type = 'process.calculation.calcjob.CalcJobNode.' AND attributes @> '{"state": "RETRIEVING"}'; + UPDATE db_dbnode + SET attributes = attributes - 'state' || '{"process_state": "killed", "process_status": "Legacy `JobCalculation` with state `PARSING`", "process_label": "Legacy JobCalculation"}' + WHERE node_type = 'process.calculation.calcjob.CalcJobNode.' AND attributes @> '{"state": "PARSING"}'; + UPDATE db_dbnode + SET attributes = attributes - 'state' || '{"process_state": "excepted", "process_status": "Legacy `JobCalculation` with state `SUBMISSIONFAILED`", "process_label": "Legacy JobCalculation"}' + WHERE node_type = 'process.calculation.calcjob.CalcJobNode.' AND attributes @> '{"state": "SUBMISSIONFAILED"}'; + UPDATE db_dbnode + SET attributes = attributes - 'state' || '{"process_state": "excepted", "process_status": "Legacy `JobCalculation` with state `RETRIEVALFAILED`", "process_label": "Legacy JobCalculation"}' + WHERE node_type = 'process.calculation.calcjob.CalcJobNode.' AND attributes @> '{"state": "RETRIEVALFAILED"}'; + UPDATE db_dbnode + SET attributes = attributes - 'state' || '{"process_state": "excepted", "process_status": "Legacy `JobCalculation` with state `PARSINGFAILED`", "process_label": "Legacy JobCalculation"}' + WHERE node_type = 'process.calculation.calcjob.CalcJobNode.' AND attributes @> '{"state": "PARSINGFAILED"}'; + UPDATE db_dbnode + SET attributes = attributes - 'state' || '{"process_state": "finished", "exit_status": 2, "process_label": "Legacy JobCalculation"}' + WHERE node_type = 'process.calculation.calcjob.CalcJobNode.' AND attributes @> '{"state": "FAILED"}'; + UPDATE db_dbnode + SET attributes = attributes - 'state' || '{"process_state": "finished", "exit_status": 0, "process_label": "Legacy JobCalculation"}' + WHERE node_type = 'process.calculation.calcjob.CalcJobNode.' AND attributes @> '{"state": "FINISHED"}'; + """ + ) + conn.execute(statement) + + +def downgrade(): + """Migrations for the downgrade.""" + raise NotImplementedError('Downgrade of django_0038.') diff --git a/aiida/backends/sqlalchemy/migrations/versions/django_0039_reset_hash.py b/aiida/backends/sqlalchemy/migrations/versions/django_0039_reset_hash.py new file mode 100644 index 0000000000..12a80f703f --- /dev/null +++ b/aiida/backends/sqlalchemy/migrations/versions/django_0039_reset_hash.py @@ -0,0 +1,36 @@ +# -*- coding: utf-8 -*- +########################################################################### +# Copyright (c), The AiiDA team. All rights reserved. # +# This file is part of the AiiDA code. # +# # +# The code is hosted on GitHub at https://github.com/aiidateam/aiida-core # +# For further information on the license, see the LICENSE.txt file # +# For further information please visit http://www.aiida.net # +########################################################################### +# pylint: disable=invalid-name,no-member +""""Invalidating node hashes + +Users should rehash nodes for caching + +Revision ID: django_0039 +Revises: django_0038 + +""" +from alembic import op + +from aiida.backends.sqlalchemy.migrations.utils.integrity import drop_hashes + +revision = 'django_0039' +down_revision = 'django_0038' +branch_labels = None +depends_on = None + + +def upgrade(): + """Migrations for the upgrade.""" + drop_hashes(op.get_bind()) # pylint: disable=no-member + + +def downgrade(): + """Migrations for the downgrade.""" + drop_hashes(op.get_bind()) # pylint: disable=no-member diff --git a/aiida/backends/sqlalchemy/migrations/versions/django_0040_data_migration_legacy_process_attributes.py b/aiida/backends/sqlalchemy/migrations/versions/django_0040_data_migration_legacy_process_attributes.py new file mode 100644 index 0000000000..3d59c021cf --- /dev/null +++ b/aiida/backends/sqlalchemy/migrations/versions/django_0040_data_migration_legacy_process_attributes.py @@ -0,0 +1,88 @@ +# -*- coding: utf-8 -*- +########################################################################### +# Copyright (c), The AiiDA team. All rights reserved. # +# This file is part of the AiiDA code. # +# # +# The code is hosted on GitHub at https://github.com/aiidateam/aiida-core # +# For further information on the license, see the LICENSE.txt file # +# For further information please visit http://www.aiida.net # +########################################################################### +# pylint: disable=invalid-name,no-member +"""Migrate some legacy process attributes. + +Attribute keys that are renamed: + + * `_sealed` -> `sealed` + +Attribute keys that are removed entirely: + + * `_finished` + * `_failed` + * `_aborted` + * `_do_abort` + +Finally, after these first migrations, any remaining process nodes that still do not have a sealed attribute and have +it set to `True`. Excluding the nodes that have a `process_state` attribute of one of the active states `created`, +running` or `waiting`, because those are actual valid active processes that are not yet sealed. + +This is identical to migration e734dd5e50d7 + +Revision ID: django_0040 +Revises: django_0039 + +""" +from alembic import op +import sqlalchemy as sa + +revision = 'django_0040' +down_revision = 'django_0039' +branch_labels = None +depends_on = None + + +def upgrade(): + """Migrations for the upgrade.""" + conn = op.get_bind() + + statement = sa.text( + """ + UPDATE db_dbnode + SET attributes = jsonb_set(attributes, '{"sealed"}', attributes->'_sealed') + WHERE attributes ? '_sealed' AND node_type LIKE 'process.%'; + -- Copy `_sealed` -> `sealed` + + UPDATE db_dbnode SET attributes = attributes - '_sealed' + WHERE attributes ? '_sealed' AND node_type LIKE 'process.%'; + -- Delete `_sealed` + + UPDATE db_dbnode SET attributes = attributes - '_finished' + WHERE attributes ? '_finished' AND node_type LIKE 'process.%'; + -- Delete `_finished` + + UPDATE db_dbnode SET attributes = attributes - '_failed' + WHERE attributes ? '_failed' AND node_type LIKE 'process.%'; + -- Delete `_failed` + + UPDATE db_dbnode SET attributes = attributes - '_aborted' + WHERE attributes ? '_aborted' AND node_type LIKE 'process.%'; + -- Delete `_aborted` + + UPDATE db_dbnode SET attributes = attributes - '_do_abort' + WHERE attributes ? '_do_abort' AND node_type LIKE 'process.%'; + -- Delete `_do_abort` + + UPDATE db_dbnode + SET attributes = jsonb_set(attributes, '{"sealed"}', to_jsonb(True)) + WHERE + node_type LIKE 'process.%' AND + NOT (attributes ? 'sealed') AND + attributes->>'process_state' NOT IN ('created', 'running', 'waiting'); + -- Set `sealed=True` for process nodes that do not yet have a `sealed` attribute AND are not in an active state + """ + ) + conn.execute(statement) + + +def downgrade(): + """Migrations for the downgrade.""" + raise NotImplementedError('Downgrade of django_0040.') diff --git a/aiida/backends/sqlalchemy/migrations/versions/django_0041_seal_unsealed_processes.py b/aiida/backends/sqlalchemy/migrations/versions/django_0041_seal_unsealed_processes.py new file mode 100644 index 0000000000..d53ceec90c --- /dev/null +++ b/aiida/backends/sqlalchemy/migrations/versions/django_0041_seal_unsealed_processes.py @@ -0,0 +1,61 @@ +# -*- coding: utf-8 -*- +########################################################################### +# Copyright (c), The AiiDA team. All rights reserved. # +# This file is part of the AiiDA code. # +# # +# The code is hosted on GitHub at https://github.com/aiidateam/aiida-core # +# For further information on the license, see the LICENSE.txt file # +# For further information please visit http://www.aiida.net # +########################################################################### +# pylint: disable=invalid-name,no-member +"""Seal any process nodes that have not yet been sealed but should. + +This should have been accomplished by the last step in the previous migration, but because the WHERE clause was +incorrect, not all nodes that should have been targeted were included. The problem is with the statement: + + attributes->>'process_state' NOT IN ('created', 'running', 'waiting') + +The problem here is that this will yield `False` if the attribute `process_state` does not even exist. This will be the +case for legacy calculations like `InlineCalculation` nodes. Their node type was already migrated in `0020` but most of +them will be unsealed. + +This is identical to migration 7b38a9e783e7 + +Revision ID: django_0041 +Revises: django_0040 + +""" +from alembic import op +import sqlalchemy as sa + +revision = 'django_0041' +down_revision = 'django_0040' +branch_labels = None +depends_on = None + + +def upgrade(): + """Migrations for the upgrade.""" + conn = op.get_bind() + + statement = sa.text( + """ + UPDATE db_dbnode + SET attributes = jsonb_set(attributes, '{"sealed"}', to_jsonb(True)) + WHERE + node_type LIKE 'process.%' AND + NOT attributes ? 'sealed' AND + NOT ( + attributes ? 'process_state' AND + attributes->>'process_state' IN ('created', 'running', 'waiting') + ); + -- Set `sealed=True` for process nodes that do not yet have a `sealed` attribute AND are not in an active state + -- It is important to check that `process_state` exists at all before doing the IN check. + """ + ) + conn.execute(statement) + + +def downgrade(): + """Migrations for the downgrade.""" + raise NotImplementedError('Downgrade of django_0041.') diff --git a/aiida/backends/djsite/db/migrations/0042_prepare_schema_reset.py b/aiida/backends/sqlalchemy/migrations/versions/django_0042_prepare_schema_reset.py similarity index 58% rename from aiida/backends/djsite/db/migrations/0042_prepare_schema_reset.py rename to aiida/backends/sqlalchemy/migrations/versions/django_0042_prepare_schema_reset.py index ce825a6fee..8593a62d70 100644 --- a/aiida/backends/djsite/db/migrations/0042_prepare_schema_reset.py +++ b/aiida/backends/sqlalchemy/migrations/versions/django_0042_prepare_schema_reset.py @@ -7,25 +7,27 @@ # For further information on the license, see the LICENSE.txt file # # For further information please visit http://www.aiida.net # ########################################################################### -# pylint: disable=invalid-name -"""Prepare the schema reset.""" +# pylint: disable=invalid-name,no-member +"""Prepare schema reset. -# Remove when https://github.com/PyCQA/pylint/issues/1931 is fixed -# pylint: disable=no-name-in-module,import-error -from django.db import migrations +This is similar to migration 91b573400be5 -from aiida.backends.djsite.db.migrations import upgrade_schema_version +Revision ID: django_0042 +Revises: django_0041 -REVISION = '1.0.42' -DOWN_REVISION = '1.0.41' +""" +from alembic import op +import sqlalchemy as sa +revision = 'django_0042' +down_revision = 'django_0041' +branch_labels = None +depends_on = None -class Migration(migrations.Migration): - """Prepare the schema reset.""" - dependencies = [ - ('db', '0041_seal_unsealed_processes'), - ] +def upgrade(): + """Migrations for the upgrade.""" + conn = op.get_bind() # The following statement is trying to perform an UPSERT, i.e. an UPDATE of a given key or if it doesn't exist fall # back to an INSERT. This problem is notoriously difficult to solve as explained in great detail in this article: @@ -33,14 +35,16 @@ class Migration(migrations.Migration): # through the `ON CONFLICT` keyword, but since we also support 9.4 we cannot use it here. The snippet used below # taken from the provided link, is not safe for concurrent operations, but since our migrations always run in an # isolated way, we do not suffer from those problems and can safely use it. - operations = [ - migrations.RunSQL( - sql=r""" - INSERT INTO db_dbsetting (key, val, description, time) - SELECT 'schema_generation', '"1"', 'Database schema generation', NOW() - WHERE NOT EXISTS (SELECT * FROM db_dbsetting WHERE key = 'schema_generation'); - """, - reverse_sql='' - ), - upgrade_schema_version(REVISION, DOWN_REVISION) - ] + statement = sa.text( + """ + INSERT INTO db_dbsetting (key, val, description, time) + SELECT 'schema_generation', '"1"', 'Database schema generation', NOW() + WHERE NOT EXISTS (SELECT * FROM db_dbsetting WHERE key = 'schema_generation'); + """ + ) + conn.execute(statement) + + +def downgrade(): + """Migrations for the downgrade.""" + raise NotImplementedError('Downgrade of django_0042.') diff --git a/aiida/backends/djsite/db/migrations/0043_default_link_label.py b/aiida/backends/sqlalchemy/migrations/versions/django_0043_default_link_label.py similarity index 51% rename from aiida/backends/djsite/db/migrations/0043_default_link_label.py rename to aiida/backends/sqlalchemy/migrations/versions/django_0043_default_link_label.py index e98d730be5..5fd52c2aa5 100644 --- a/aiida/backends/djsite/db/migrations/0043_default_link_label.py +++ b/aiida/backends/sqlalchemy/migrations/versions/django_0043_default_link_label.py @@ -7,36 +7,40 @@ # For further information on the license, see the LICENSE.txt file # # For further information please visit http://www.aiida.net # ########################################################################### -# pylint: disable=invalid-name -"""Update all link labels with the value `_return` which is the legacy default single link label. +# pylint: disable=invalid-name,no-member +"""Update all link labels with the value `_return` +This is the legacy default single link label. The old process functions used to use `_return` as the default link label, however, since labels that start or end with and underscore are illegal because they are used for namespacing. -""" - -# Remove when https://github.com/PyCQA/pylint/issues/1931 is fixed -# pylint: disable=no-name-in-module,import-error -from django.db import migrations - -from aiida.backends.djsite.db.migrations import upgrade_schema_version -REVISION = '1.0.43' -DOWN_REVISION = '1.0.42' +This is identical to migration 118349c10896 +Revision ID: django_0043 +Revises: django_0042 -class Migration(migrations.Migration): - """Migrate.""" - - dependencies = [ - ('db', '0042_prepare_schema_reset'), - ] - - operations = [ - migrations.RunSQL( - sql=r""" - UPDATE db_dblink SET label='result' WHERE label = '_return'; - """, - reverse_sql='' - ), - upgrade_schema_version(REVISION, DOWN_REVISION) - ] +""" +from alembic import op +import sqlalchemy as sa + +revision = 'django_0043' +down_revision = 'django_0042' +branch_labels = None +depends_on = None + + +def upgrade(): + """Migrations for the upgrade.""" + conn = op.get_bind() + statement = sa.text(""" + UPDATE db_dblink SET label='result' WHERE label = '_return'; + """) + conn.execute(statement) + + +def downgrade(): + """Migrations for the downgrade.""" + statement = sa.text(""" + UPDATE db_dblink SET label='_result' WHERE label = 'return'; + """) + op.get_bind().execute(statement) diff --git a/aiida/backends/djsite/db/migrations/0044_dbgroup_type_string.py b/aiida/backends/sqlalchemy/migrations/versions/django_0044_dbgroup_type_string.py similarity index 52% rename from aiida/backends/djsite/db/migrations/0044_dbgroup_type_string.py rename to aiida/backends/sqlalchemy/migrations/versions/django_0044_dbgroup_type_string.py index 553131407f..02530a0ae4 100644 --- a/aiida/backends/djsite/db/migrations/0044_dbgroup_type_string.py +++ b/aiida/backends/sqlalchemy/migrations/versions/django_0044_dbgroup_type_string.py @@ -7,16 +7,20 @@ # For further information on the license, see the LICENSE.txt file # # For further information please visit http://www.aiida.net # ########################################################################### -# pylint: disable=invalid-name -"""Migration after the `Group` class became pluginnable and so the group `type_string` changed.""" +# pylint: disable=invalid-name,no-member +"""Migration after the `Group` class became pluginnable and so the group `type_string` changed. -# pylint: disable=no-name-in-module,import-error -from django.db import migrations +Revision ID: django_0044 +Revises: django_0043 -from aiida.backends.djsite.db.migrations import upgrade_schema_version +""" +from alembic import op +import sqlalchemy as sa -REVISION = '1.0.44' -DOWN_REVISION = '1.0.43' +revision = 'django_0044' +down_revision = 'django_0043' +branch_labels = None +depends_on = None forward_sql = [ """UPDATE db_dbgroup SET type_string = 'core' WHERE type_string = 'user';""", @@ -25,21 +29,14 @@ """UPDATE db_dbgroup SET type_string = 'core.auto' WHERE type_string = 'auto.run';""", ] -reverse_sql = [ - """UPDATE db_dbgroup SET type_string = 'user' WHERE type_string = 'core';""", - """UPDATE db_dbgroup SET type_string = 'data.upf' WHERE type_string = 'core.upf';""", - """UPDATE db_dbgroup SET type_string = 'auto.import' WHERE type_string = 'core.import';""", - """UPDATE db_dbgroup SET type_string = 'auto.run' WHERE type_string = 'core.auto';""", -] +def upgrade(): + """Migrations for the upgrade.""" + conn = op.get_bind() + statement = sa.text('\n'.join(forward_sql)) + conn.execute(statement) -class Migration(migrations.Migration): - """Migration after the update of group `type_string`""" - dependencies = [ - ('db', '0043_default_link_label'), - ] - operations = [ - migrations.RunSQL(sql='\n'.join(forward_sql), reverse_sql='\n'.join(reverse_sql)), - upgrade_schema_version(REVISION, DOWN_REVISION), - ] +def downgrade(): + """Migrations for the downgrade.""" + raise NotImplementedError('Downgrade of django_0044.') diff --git a/aiida/backends/sqlalchemy/migrations/versions/django_0045_dbgroup_extras.py b/aiida/backends/sqlalchemy/migrations/versions/django_0045_dbgroup_extras.py new file mode 100644 index 0000000000..ee6e4b10e1 --- /dev/null +++ b/aiida/backends/sqlalchemy/migrations/versions/django_0045_dbgroup_extras.py @@ -0,0 +1,40 @@ +# -*- coding: utf-8 -*- +########################################################################### +# Copyright (c), The AiiDA team. All rights reserved. # +# This file is part of the AiiDA code. # +# # +# The code is hosted on GitHub at https://github.com/aiidateam/aiida-core # +# For further information on the license, see the LICENSE.txt file # +# For further information please visit http://www.aiida.net # +########################################################################### +# pylint: disable=invalid-name,no-member +"""Migration to add the `extras` JSONB column to the `DbGroup` model. + +Revision ID: django_0045 +Revises: django_0044 + +""" +from alembic import op +import sqlalchemy as sa +from sqlalchemy.dialects import postgresql + +revision = 'django_0045' +down_revision = 'django_0044' +branch_labels = None +depends_on = None + + +def upgrade(): + """Migrations for the upgrade.""" + # We add the column with a `server_default` because otherwise the migration would fail since existing rows will not + # have a value and violate the not-nullable clause. However, the model doesn't use a server default but a default + # on the ORM level, so we remove the server default from the column directly after. + op.add_column( + 'db_dbgroup', sa.Column('extras', postgresql.JSONB(astext_type=sa.Text()), nullable=False, server_default='{}') + ) + op.alter_column('db_dbgroup', 'extras', server_default=None) + + +def downgrade(): + """Migrations for the downgrade.""" + op.drop_column('db_dbgroup', 'extras') diff --git a/aiida/backends/sqlalchemy/migrations/versions/django_0046_add_node_repository_metadata.py b/aiida/backends/sqlalchemy/migrations/versions/django_0046_add_node_repository_metadata.py new file mode 100644 index 0000000000..6d322441de --- /dev/null +++ b/aiida/backends/sqlalchemy/migrations/versions/django_0046_add_node_repository_metadata.py @@ -0,0 +1,38 @@ +# -*- coding: utf-8 -*- +########################################################################### +# Copyright (c), The AiiDA team. All rights reserved. # +# This file is part of the AiiDA code. # +# # +# The code is hosted on GitHub at https://github.com/aiidateam/aiida-core # +# For further information on the license, see the LICENSE.txt file # +# For further information please visit http://www.aiida.net # +########################################################################### +# pylint: disable=invalid-name,no-member +"""Add the `db_dbnode.repository_metadata` JSONB column. + +Revision ID: django_0046 +Revises: django_0045 + +""" +from alembic import op +import sqlalchemy as sa +from sqlalchemy.dialects import postgresql + +revision = 'django_0046' +down_revision = 'django_0045' +branch_labels = None +depends_on = None + + +def upgrade(): + """Migrations for the upgrade.""" + op.add_column( + 'db_dbnode', + sa.Column('repository_metadata', postgresql.JSONB(astext_type=sa.Text()), nullable=False, server_default='{}') + ) + op.alter_column('db_dbnode', 'repository_metadata', server_default=None) + + +def downgrade(): + """Migrations for the downgrade.""" + op.drop_column('db_dbnode', 'repository_metadata') diff --git a/aiida/backends/sqlalchemy/migrations/versions/django_0047_migrate_repository.py b/aiida/backends/sqlalchemy/migrations/versions/django_0047_migrate_repository.py new file mode 100644 index 0000000000..9ae077fe7f --- /dev/null +++ b/aiida/backends/sqlalchemy/migrations/versions/django_0047_migrate_repository.py @@ -0,0 +1,34 @@ +# -*- coding: utf-8 -*- +########################################################################### +# Copyright (c), The AiiDA team. All rights reserved. # +# This file is part of the AiiDA code. # +# # +# The code is hosted on GitHub at https://github.com/aiidateam/aiida-core # +# For further information on the license, see the LICENSE.txt file # +# For further information please visit http://www.aiida.net # +########################################################################### +# pylint: disable=invalid-name,no-member +"""Migrate the file repository to the new disk object store based implementation. + +Revision ID: django_0047 +Revises: django_0046 + +""" +from alembic import op + +revision = 'django_0047' +down_revision = 'django_0046' +branch_labels = None +depends_on = None + + +def upgrade(): + """Migrations for the upgrade.""" + from aiida.backends.sqlalchemy.migrations.utils.migrate_repository import migrate_repository + + migrate_repository(op.get_bind(), op.get_context().opts['aiida_profile']) + + +def downgrade(): + """Migrations for the downgrade.""" + raise NotImplementedError('Migration of the file repository is not reversible.') diff --git a/aiida/backends/sqlalchemy/migrations/versions/django_0048_computer_name_to_label.py b/aiida/backends/sqlalchemy/migrations/versions/django_0048_computer_name_to_label.py new file mode 100644 index 0000000000..311502f945 --- /dev/null +++ b/aiida/backends/sqlalchemy/migrations/versions/django_0048_computer_name_to_label.py @@ -0,0 +1,45 @@ +# -*- coding: utf-8 -*- +########################################################################### +# Copyright (c), The AiiDA team. All rights reserved. # +# This file is part of the AiiDA code. # +# # +# The code is hosted on GitHub at https://github.com/aiidateam/aiida-core # +# For further information on the license, see the LICENSE.txt file # +# For further information please visit http://www.aiida.net # +########################################################################### +# pylint: disable=invalid-name,no-member +"""Rename `db_dbcomputer.name` to `db_dbcomputer.label` + +Revision ID: django_0048 +Revises: django_0047 + +""" +from alembic import op + +from aiida.backends.sqlalchemy.migrations.utils import ReflectMigrations + +revision = 'django_0048' +down_revision = 'django_0047' +branch_labels = None +depends_on = None + + +def upgrade(): + """Migrations for the upgrade.""" + reflect = ReflectMigrations(op) + reflect.drop_unique_constraints('db_dbcomputer', ['name']) # db_dbcomputer_name_key + reflect.drop_indexes('db_dbcomputer', 'name') # db_dbcomputer_name_f1800b1a_like + op.alter_column('db_dbcomputer', 'name', new_column_name='label') + op.create_unique_constraint('db_dbcomputer_label_bc480bab_uniq', 'db_dbcomputer', ['label']) + op.create_index( + 'db_dbcomputer_label_bc480bab_like', + 'db_dbcomputer', + ['label'], + postgresql_using='btree', + postgresql_ops={'label': 'varchar_pattern_ops'}, + ) + + +def downgrade(): + """Migrations for the downgrade.""" + raise NotImplementedError('Downgrade of django_0048.') diff --git a/aiida/backends/sqlalchemy/migrations/versions/django_0049_entry_point_core_prefix.py b/aiida/backends/sqlalchemy/migrations/versions/django_0049_entry_point_core_prefix.py new file mode 100644 index 0000000000..b1a32ad123 --- /dev/null +++ b/aiida/backends/sqlalchemy/migrations/versions/django_0049_entry_point_core_prefix.py @@ -0,0 +1,76 @@ +# -*- coding: utf-8 -*- +########################################################################### +# Copyright (c), The AiiDA team. All rights reserved. # +# This file is part of the AiiDA code. # +# # +# The code is hosted on GitHub at https://github.com/aiidateam/aiida-core # +# For further information on the license, see the LICENSE.txt file # +# For further information please visit http://www.aiida.net # +########################################################################### +# pylint: disable=invalid-name,no-member,line-too-long +"""Update node types after `core.` prefix was added to entry point names. + +Revision ID: django_0049 +Revises: django_0048 + +""" +from alembic import op +import sqlalchemy as sa + +revision = 'django_0049' +down_revision = 'django_0048' +branch_labels = None +depends_on = None + + +def upgrade(): + """Migrations for the upgrade.""" + conn = op.get_bind() + statement = sa.text( + """ + UPDATE db_dbnode SET node_type = 'data.core.array.ArrayData.' WHERE node_type = 'data.array.ArrayData.'; + UPDATE db_dbnode SET node_type = 'data.core.array.bands.BandsData.' WHERE node_type = 'data.array.bands.BandsData.'; + UPDATE db_dbnode SET node_type = 'data.core.array.kpoints.KpointsData.' WHERE node_type = 'data.array.kpoints.KpointsData.'; + UPDATE db_dbnode SET node_type = 'data.core.array.projection.ProjectionData.' WHERE node_type = 'data.array.projection.ProjectionData.'; + UPDATE db_dbnode SET node_type = 'data.core.array.trajectory.TrajectoryData.' WHERE node_type = 'data.array.trajectory.TrajectoryData.'; + UPDATE db_dbnode SET node_type = 'data.core.array.xy.XyData.' WHERE node_type = 'data.array.xy.XyData.'; + UPDATE db_dbnode SET node_type = 'data.core.base.BaseData.' WHERE node_type = 'data.base.BaseData.'; + UPDATE db_dbnode SET node_type = 'data.core.bool.Bool.' WHERE node_type = 'data.bool.Bool.'; + UPDATE db_dbnode SET node_type = 'data.core.cif.CifData.' WHERE node_type = 'data.cif.CifData.'; + UPDATE db_dbnode SET node_type = 'data.core.code.Code.' WHERE node_type = 'data.code.Code.'; + UPDATE db_dbnode SET node_type = 'data.core.dict.Dict.' WHERE node_type = 'data.dict.Dict.'; + UPDATE db_dbnode SET node_type = 'data.core.float.Float.' WHERE node_type = 'data.float.Float.'; + UPDATE db_dbnode SET node_type = 'data.core.folder.FolderData.' WHERE node_type = 'data.folder.FolderData.'; + UPDATE db_dbnode SET node_type = 'data.core.int.Int.' WHERE node_type = 'data.int.Int.'; + UPDATE db_dbnode SET node_type = 'data.core.list.List.' WHERE node_type = 'data.list.List.'; + UPDATE db_dbnode SET node_type = 'data.core.numeric.NumericData.' WHERE node_type = 'data.numeric.NumericData.'; + UPDATE db_dbnode SET node_type = 'data.core.orbital.OrbitalData.' WHERE node_type = 'data.orbital.OrbitalData.'; + UPDATE db_dbnode SET node_type = 'data.core.remote.RemoteData.' WHERE node_type = 'data.remote.RemoteData.'; + UPDATE db_dbnode SET node_type = 'data.core.remote.stash.RemoteStashData.' WHERE node_type = 'data.remote.stash.RemoteStashData.'; + UPDATE db_dbnode SET node_type = 'data.core.remote.stash.folder.RemoteStashFolderData.' WHERE node_type = 'data.remote.stash.folder.RemoteStashFolderData.'; + UPDATE db_dbnode SET node_type = 'data.core.singlefile.SinglefileData.' WHERE node_type = 'data.singlefile.SinglefileData.'; + UPDATE db_dbnode SET node_type = 'data.core.str.Str.' WHERE node_type = 'data.str.Str.'; + UPDATE db_dbnode SET node_type = 'data.core.structure.StructureData.' WHERE node_type = 'data.structure.StructureData.'; + UPDATE db_dbnode SET node_type = 'data.core.upf.UpfData.' WHERE node_type = 'data.upf.UpfData.'; + UPDATE db_dbcomputer SET scheduler_type = 'core.direct' WHERE scheduler_type = 'direct'; + UPDATE db_dbcomputer SET scheduler_type = 'core.lsf' WHERE scheduler_type = 'lsf'; + UPDATE db_dbcomputer SET scheduler_type = 'core.pbspro' WHERE scheduler_type = 'pbspro'; + UPDATE db_dbcomputer SET scheduler_type = 'core.sge' WHERE scheduler_type = 'sge'; + UPDATE db_dbcomputer SET scheduler_type = 'core.slurm' WHERE scheduler_type = 'slurm'; + UPDATE db_dbcomputer SET scheduler_type = 'core.torque' WHERE scheduler_type = 'torque'; + UPDATE db_dbcomputer SET transport_type = 'core.local' WHERE transport_type = 'local'; + UPDATE db_dbcomputer SET transport_type = 'core.ssh' WHERE transport_type = 'ssh'; + UPDATE db_dbnode SET process_type = 'aiida.calculations:core.arithmetic.add' WHERE process_type = 'aiida.calculations:arithmetic.add'; + UPDATE db_dbnode SET process_type = 'aiida.calculations:core.templatereplacer' WHERE process_type = 'aiida.calculations:templatereplacer'; + UPDATE db_dbnode SET process_type = 'aiida.workflows:core.arithmetic.add_multiply' WHERE process_type = 'aiida.workflows:arithmetic.add_multiply'; + UPDATE db_dbnode SET process_type = 'aiida.workflows:core.arithmetic.multiply_add' WHERE process_type = 'aiida.workflows:arithmetic.multiply_add'; + UPDATE db_dbnode SET attributes = jsonb_set(attributes, '{"parser_name"}', '"core.arithmetic.add"') WHERE attributes->>'parser_name' = 'arithmetic.add'; + UPDATE db_dbnode SET attributes = jsonb_set(attributes, '{"parser_name"}', '"core.templatereplacer.doubler"') WHERE attributes->>'parser_name' = 'templatereplacer.doubler'; + """ + ) + conn.execute(statement) + + +def downgrade(): + """Migrations for the downgrade.""" + raise NotImplementedError('Downgrade of django_0049.') diff --git a/aiida/backends/sqlalchemy/migrations/versions/django_0050_sqlalchemy_parity.py b/aiida/backends/sqlalchemy/migrations/versions/django_0050_sqlalchemy_parity.py new file mode 100644 index 0000000000..e090306887 --- /dev/null +++ b/aiida/backends/sqlalchemy/migrations/versions/django_0050_sqlalchemy_parity.py @@ -0,0 +1,48 @@ +# -*- coding: utf-8 -*- +########################################################################### +# Copyright (c), The AiiDA team. All rights reserved. # +# This file is part of the AiiDA code. # +# # +# The code is hosted on GitHub at https://github.com/aiidateam/aiida-core # +# For further information on the license, see the LICENSE.txt file # +# For further information please visit http://www.aiida.net # +########################################################################### +# pylint: disable=invalid-name,no-member,line-too-long +"""Finalise parity of the legacy django branch with the sqlalchemy branch. + +1. Remove and recreate all (non-unique) indexes, with standard names and postgresql ops. +2. Remove and recreate all unique constraints, with standard names. +3. Remove and recreate all foreign key constraints, with standard names and other rules. +4. Drop the django specific tables + +It is of note that a number of foreign keys were missing comparable `ON DELETE` rules in django. +This is because django does not currently add these rules to the database, but instead tries to handle them on the +Python side, see: https://stackoverflow.com/a/35780859/5033292 + +Revision ID: django_0050 +Revises: django_0049 + +""" +from alembic import op + +from aiida.backends.sqlalchemy.migrations.utils.parity import synchronize_schemas + +revision = 'django_0050' +down_revision = 'django_0049' +branch_labels = None +depends_on = None + + +def upgrade(): + """Migrations for the upgrade.""" + synchronize_schemas(op) + + for tbl_name in ( + 'auth_group_permissions', 'auth_permission', 'auth_group', 'django_content_type', 'django_migrations' + ): + op.execute(f'DROP TABLE IF EXISTS {tbl_name} CASCADE') + + +def downgrade(): + """Migrations for the downgrade.""" + raise NotImplementedError('Downgrade of django_0050.') diff --git a/aiida/backends/sqlalchemy/migrations/versions/e15ef2630a1b_initial_schema.py b/aiida/backends/sqlalchemy/migrations/versions/e15ef2630a1b_initial_schema.py index c5d55dbf8f..5e877fa5a9 100644 --- a/aiida/backends/sqlalchemy/migrations/versions/e15ef2630a1b_initial_schema.py +++ b/aiida/backends/sqlalchemy/migrations/versions/e15ef2630a1b_initial_schema.py @@ -25,7 +25,7 @@ # revision identifiers, used by Alembic. revision = 'e15ef2630a1b' down_revision = None -branch_labels = None +branch_labels = ('sqlalchemy',) depends_on = None diff --git a/aiida/backends/sqlalchemy/migrations/versions/e72ad251bcdb_dbgroup_class_change_type_string_values.py b/aiida/backends/sqlalchemy/migrations/versions/e72ad251bcdb_dbgroup_class_change_type_string_values.py index dc5ee00764..57eec0e2b1 100644 --- a/aiida/backends/sqlalchemy/migrations/versions/e72ad251bcdb_dbgroup_class_change_type_string_values.py +++ b/aiida/backends/sqlalchemy/migrations/versions/e72ad251bcdb_dbgroup_class_change_type_string_values.py @@ -41,12 +41,14 @@ def upgrade(): + """Migrations for the upgrade.""" conn = op.get_bind() statement = text('\n'.join(forward_sql)) conn.execute(statement) def downgrade(): + """Migrations for the downgrade.""" conn = op.get_bind() statement = text('\n'.join(reverse_sql)) conn.execute(statement) diff --git a/aiida/backends/sqlalchemy/migrations/versions/e734dd5e50d7_data_migration_legacy_process_attributes.py b/aiida/backends/sqlalchemy/migrations/versions/e734dd5e50d7_data_migration_legacy_process_attributes.py index 7b73c85547..deeb7e8e33 100644 --- a/aiida/backends/sqlalchemy/migrations/versions/e734dd5e50d7_data_migration_legacy_process_attributes.py +++ b/aiida/backends/sqlalchemy/migrations/versions/e734dd5e50d7_data_migration_legacy_process_attributes.py @@ -8,7 +8,7 @@ # For further information please visit http://www.aiida.net # ########################################################################### # pylint: disable=invalid-name,no-member -"""Data migration for some legacy process attributes. +"""Migrate some legacy process attributes. Attribute keys that are renamed: @@ -25,6 +25,8 @@ it set to `True`. Excluding the nodes that have a `process_state` attribute of one of the active states `created`, running` or `waiting`, because those are actual valid active processes that are not yet sealed. +This is identical to migration django_0040 + Revision ID: e734dd5e50d7 Revises: e797afa09270 Create Date: 2019-07-04 18:23:56.127994 @@ -88,3 +90,4 @@ def upgrade(): def downgrade(): """Migrations for the downgrade.""" + raise NotImplementedError('Downgrade of e734dd5e50d7.') diff --git a/aiida/backends/sqlalchemy/migrations/versions/e797afa09270_reset_hash.py b/aiida/backends/sqlalchemy/migrations/versions/e797afa09270_reset_hash.py index 85be4c22fa..c8c2fed58a 100644 --- a/aiida/backends/sqlalchemy/migrations/versions/e797afa09270_reset_hash.py +++ b/aiida/backends/sqlalchemy/migrations/versions/e797afa09270_reset_hash.py @@ -8,7 +8,9 @@ # For further information please visit http://www.aiida.net # ########################################################################### # pylint: disable=invalid-name -"""Invalidating node hash - User should rehash nodes for caching +"""Invalidating node hash + +Users should rehash nodes for caching Revision ID: e797afa09270 Revises: 26d561acd560 @@ -16,11 +18,8 @@ """ from alembic import op -# Remove when https://github.com/PyCQA/pylint/issues/1931 is fixed -# pylint: disable=no-name-in-module,import-error -from sqlalchemy.sql import text -from aiida.cmdline.utils import echo +from aiida.backends.sqlalchemy.migrations.utils.integrity import drop_hashes # revision identifiers, used by Alembic. revision = 'e797afa09270' @@ -28,22 +27,6 @@ branch_labels = None depends_on = None -# Currently valid hash key -_HASH_EXTRA_KEY = '_aiida_hash' - - -def drop_hashes(conn): # pylint: disable=unused-argument - """Drop hashes of nodes. - - Print warning only if the DB actually contains nodes. - """ - n_nodes = conn.execute(text("""SELECT count(*) FROM db_dbnode;""")).fetchall()[0][0] - if n_nodes > 0: - echo.echo_warning('Invalidating the hashes of all nodes. Please run "verdi rehash".', bold=True) - - statement = text(f"UPDATE db_dbnode SET extras = extras #- '{{{_HASH_EXTRA_KEY}}}'::text[];") - conn.execute(statement) - def upgrade(): """drop the hashes when upgrading""" diff --git a/aiida/backends/sqlalchemy/migrations/versions/ea2f50e7f615_dblog_create_uuid_column.py b/aiida/backends/sqlalchemy/migrations/versions/ea2f50e7f615_dblog_create_uuid_column.py index c0b20bdfa3..a7aed64a55 100644 --- a/aiida/backends/sqlalchemy/migrations/versions/ea2f50e7f615_dblog_create_uuid_column.py +++ b/aiida/backends/sqlalchemy/migrations/versions/ea2f50e7f615_dblog_create_uuid_column.py @@ -10,11 +10,12 @@ # pylint: disable=invalid-name,no-member,no-name-in-module,import-error """This migration creates UUID column and populates it with distinct UUIDs -This migration corresponds to the 0024_dblog_update Django migration. +This migration corresponds to the 0024_dblog_update Django migration (only the final part). Revision ID: ea2f50e7f615 Revises: 041a79fc615f -Create Date: 2019-01-30 19:22:50.984380""" +Create Date: 2019-01-30 19:22:50.984380 +""" from alembic import op import sqlalchemy as sa from sqlalchemy.dialects import postgresql @@ -26,38 +27,11 @@ depends_on = None -def set_new_uuid(connection): - """ - Set new and distinct UUIDs to all the logs - """ - from aiida.common.utils import get_new_uuid - - # Exit if there are no rows - e.g. initial setup - id_query = connection.execute(sa.text('SELECT db_dblog.id FROM db_dblog')) - if id_query.rowcount == 0: - return - - id_res = id_query.fetchall() - ids = [] - for (curr_id,) in id_res: - ids.append(curr_id) - uuids = set() - while len(uuids) < len(ids): - uuids.add(get_new_uuid()) - - # Create the key/value pairs - key_values = ','.join(f"({curr_id}, '{curr_uuid}')" for curr_id, curr_uuid in zip(ids, uuids)) - - update_stm = f""" - UPDATE db_dblog as t SET - uuid = uuid(c.uuid) - from (values {key_values}) as c(id, uuid) where c.id = t.id""" - connection.execute(sa.text(update_stm)) - - def upgrade(): """ Add an UUID column an populate it with unique UUIDs """ + from aiida.backends.sqlalchemy.migrations.utils.dblog_update import set_new_uuid from aiida.common.utils import get_new_uuid + connection = op.get_bind() # Create the UUID column diff --git a/aiida/backends/sqlalchemy/migrations/versions/f9a69de76a9a_delete_kombu_tables.py b/aiida/backends/sqlalchemy/migrations/versions/f9a69de76a9a_delete_kombu_tables.py index a6543778a4..10ff453aa8 100644 --- a/aiida/backends/sqlalchemy/migrations/versions/f9a69de76a9a_delete_kombu_tables.py +++ b/aiida/backends/sqlalchemy/migrations/versions/f9a69de76a9a_delete_kombu_tables.py @@ -50,4 +50,4 @@ def upgrade(): def downgrade(): """Migrations for the downgrade.""" - print('There is no downgrade for the deletion of the kombu tables and the daemon timestamps') + raise NotImplementedError('Deletion of the kombu tables is not reversible.') diff --git a/aiida/backends/sqlalchemy/migrations/versions/main_0001_initial.py b/aiida/backends/sqlalchemy/migrations/versions/main_0001_initial.py new file mode 100644 index 0000000000..86382e700c --- /dev/null +++ b/aiida/backends/sqlalchemy/migrations/versions/main_0001_initial.py @@ -0,0 +1,302 @@ +# -*- coding: utf-8 -*- +########################################################################### +# Copyright (c), The AiiDA team. All rights reserved. # +# This file is part of the AiiDA code. # +# # +# The code is hosted on GitHub at https://github.com/aiidateam/aiida-core # +# For further information on the license, see the LICENSE.txt file # +# For further information please visit http://www.aiida.net # +########################################################################### +# pylint: disable=invalid-name,no-member +"""Initial main branch schema + +This revision is compatible with the heads of the django and sqlalchemy branches. + +Revision ID: main_0001 +Revises: +Create Date: 2021-02-02 + +""" +from alembic import op +import sqlalchemy as sa +from sqlalchemy.dialects import postgresql + +revision = 'main_0001' +down_revision = None +branch_labels = ('main',) +depends_on = None + + +def upgrade(): + """Migrations for the upgrade.""" + op.create_table( + 'db_dbcomputer', + sa.Column('id', sa.Integer(), nullable=False, primary_key=True), + sa.Column('uuid', postgresql.UUID(as_uuid=True), nullable=False, unique=True), + sa.Column('label', sa.String(length=255), nullable=False, unique=True), + sa.Column('hostname', sa.String(length=255), nullable=False), + sa.Column('description', sa.Text(), nullable=False), + sa.Column('scheduler_type', sa.String(length=255), nullable=False), + sa.Column('transport_type', sa.String(length=255), nullable=False), + sa.Column('metadata', postgresql.JSONB(astext_type=sa.Text()), nullable=False), + ) + op.create_index( + 'ix_pat_db_dbcomputer_label', + 'db_dbcomputer', ['label'], + unique=False, + postgresql_using='btree', + postgresql_ops={'label': 'varchar_pattern_ops'} + ) + op.create_table( + 'db_dbsetting', + sa.Column('id', sa.Integer(), nullable=False, primary_key=True), + sa.Column('key', sa.String(length=1024), nullable=False, unique=True), + sa.Column('val', postgresql.JSONB(astext_type=sa.Text()), nullable=True), + sa.Column('description', sa.Text(), nullable=False), + sa.Column('time', sa.DateTime(timezone=True), nullable=False), + ) + op.create_index( + 'ix_pat_db_dbsetting_key', + 'db_dbsetting', + ['key'], + unique=False, + postgresql_using='btree', + postgresql_ops={'key': 'varchar_pattern_ops'}, + ) + op.create_table( + 'db_dbuser', + sa.Column('id', sa.Integer(), nullable=False, primary_key=True), + sa.Column('email', sa.String(length=254), nullable=False, unique=True), + sa.Column('first_name', sa.String(length=254), nullable=False), + sa.Column('last_name', sa.String(length=254), nullable=False), + sa.Column('institution', sa.String(length=254), nullable=False), + ) + op.create_index( + 'ix_pat_db_dbuser_email', + 'db_dbuser', + ['email'], + unique=False, + postgresql_using='btree', + postgresql_ops={'email': 'varchar_pattern_ops'}, + ) + op.create_table( + 'db_dbauthinfo', + sa.Column('id', sa.Integer(), nullable=False, primary_key=True), + sa.Column('aiidauser_id', sa.Integer(), nullable=False, index=True), + sa.Column('dbcomputer_id', sa.Integer(), nullable=False, index=True), + sa.Column('metadata', postgresql.JSONB(astext_type=sa.Text()), nullable=False), + sa.Column('auth_params', postgresql.JSONB(astext_type=sa.Text()), nullable=False), + sa.Column('enabled', sa.Boolean(), nullable=False), + sa.ForeignKeyConstraint( + ['aiidauser_id'], + ['db_dbuser.id'], + ondelete='CASCADE', + initially='DEFERRED', + deferrable=True, + ), + sa.ForeignKeyConstraint( + ['dbcomputer_id'], + ['db_dbcomputer.id'], + ondelete='CASCADE', + initially='DEFERRED', + deferrable=True, + ), + sa.UniqueConstraint('aiidauser_id', 'dbcomputer_id'), + ) + op.create_table( + 'db_dbgroup', + sa.Column('id', sa.Integer(), nullable=False, primary_key=True), + sa.Column('uuid', postgresql.UUID(as_uuid=True), nullable=False, unique=True), + sa.Column('label', sa.String(length=255), nullable=False, index=True), + sa.Column('type_string', sa.String(length=255), nullable=False, index=True), + sa.Column('time', sa.DateTime(timezone=True), nullable=False), + sa.Column('description', sa.Text(), nullable=False), + sa.Column('extras', postgresql.JSONB(astext_type=sa.Text()), nullable=False), + sa.Column('user_id', sa.Integer(), nullable=False, index=True), + sa.ForeignKeyConstraint( + ['user_id'], + ['db_dbuser.id'], + ondelete='CASCADE', + initially='DEFERRED', + deferrable=True, + ), + sa.UniqueConstraint('label', 'type_string'), + ) + op.create_index( + 'ix_pat_db_dbgroup_label', + 'db_dbgroup', + ['label'], + unique=False, + postgresql_using='btree', + postgresql_ops={'label': 'varchar_pattern_ops'}, + ) + op.create_index( + 'ix_pat_db_dbgroup_type_string', + 'db_dbgroup', + ['type_string'], + unique=False, + postgresql_using='btree', + postgresql_ops={'type_string': 'varchar_pattern_ops'}, + ) + + op.create_table( + 'db_dbnode', + sa.Column('id', sa.Integer(), nullable=False, primary_key=True), + sa.Column('uuid', postgresql.UUID(as_uuid=True), nullable=False, unique=True), + sa.Column('node_type', sa.String(length=255), nullable=False, index=True), + sa.Column('process_type', sa.String(length=255), nullable=True, index=True), + sa.Column('label', sa.String(length=255), nullable=False, index=True), + sa.Column('description', sa.Text(), nullable=False), + sa.Column('ctime', sa.DateTime(timezone=True), nullable=False, index=True), + sa.Column('mtime', sa.DateTime(timezone=True), nullable=False, index=True), + sa.Column('attributes', postgresql.JSONB(astext_type=sa.Text()), nullable=True), + sa.Column('extras', postgresql.JSONB(astext_type=sa.Text()), nullable=True), + sa.Column('repository_metadata', postgresql.JSONB(astext_type=sa.Text()), nullable=False), + sa.Column('dbcomputer_id', sa.Integer(), nullable=True, index=True), + sa.Column('user_id', sa.Integer(), nullable=False, index=True), + sa.ForeignKeyConstraint( + ['dbcomputer_id'], + ['db_dbcomputer.id'], + ondelete='RESTRICT', + initially='DEFERRED', + deferrable=True, + ), + sa.ForeignKeyConstraint( + ['user_id'], + ['db_dbuser.id'], + ondelete='restrict', + initially='DEFERRED', + deferrable=True, + ), + ) + op.create_index( + 'ix_pat_db_dbnode_label', + 'db_dbnode', + ['label'], + unique=False, + postgresql_using='btree', + postgresql_ops={'label': 'varchar_pattern_ops'}, + ) + op.create_index( + 'ix_pat_db_dbnode_process_type', + 'db_dbnode', + ['process_type'], + unique=False, + postgresql_using='btree', + postgresql_ops={'process_type': 'varchar_pattern_ops'}, + ) + op.create_index( + 'ix_pat_db_dbnode_node_type', + 'db_dbnode', + ['node_type'], + unique=False, + postgresql_using='btree', + postgresql_ops={'node_type': 'varchar_pattern_ops'}, + ) + + op.create_table( + 'db_dbcomment', + sa.Column('id', sa.Integer(), nullable=False, primary_key=True), + sa.Column('uuid', postgresql.UUID(as_uuid=True), nullable=False, unique=True), + sa.Column('dbnode_id', sa.Integer(), nullable=False, index=True), + sa.Column('ctime', sa.DateTime(timezone=True), nullable=False), + sa.Column('mtime', sa.DateTime(timezone=True), nullable=False), + sa.Column('user_id', sa.Integer(), nullable=False, index=True), + sa.Column('content', sa.Text(), nullable=False), + sa.ForeignKeyConstraint( + ['dbnode_id'], + ['db_dbnode.id'], + ondelete='CASCADE', + initially='DEFERRED', + deferrable=True, + ), + sa.ForeignKeyConstraint( + ['user_id'], + ['db_dbuser.id'], + ondelete='CASCADE', + initially='DEFERRED', + deferrable=True, + ), + ) + + op.create_table( + 'db_dbgroup_dbnodes', + sa.Column('id', sa.Integer(), nullable=False, primary_key=True), + sa.Column('dbnode_id', sa.Integer(), nullable=False, index=True), + sa.Column('dbgroup_id', sa.Integer(), nullable=False, index=True), + sa.ForeignKeyConstraint(['dbgroup_id'], ['db_dbgroup.id'], initially='DEFERRED', deferrable=True), + sa.ForeignKeyConstraint(['dbnode_id'], ['db_dbnode.id'], initially='DEFERRED', deferrable=True), + sa.UniqueConstraint('dbgroup_id', 'dbnode_id'), + ) + op.create_table( + 'db_dblink', + sa.Column('id', sa.Integer(), nullable=False, primary_key=True), + sa.Column('input_id', sa.Integer(), nullable=False, index=True), + sa.Column('output_id', sa.Integer(), nullable=False, index=True), + sa.Column('label', sa.String(length=255), nullable=False, index=True), + sa.Column('type', sa.String(length=255), nullable=False, index=True), + sa.ForeignKeyConstraint(['input_id'], ['db_dbnode.id'], initially='DEFERRED', deferrable=True), + sa.ForeignKeyConstraint( + ['output_id'], + ['db_dbnode.id'], + ondelete='CASCADE', + initially='DEFERRED', + deferrable=True, + ), + ) + op.create_index( + 'ix_pat_db_dblink_label', + 'db_dblink', + ['label'], + unique=False, + postgresql_using='btree', + postgresql_ops={'label': 'varchar_pattern_ops'}, + ) + op.create_index( + 'ix_pat_db_dblink_type', + 'db_dblink', + ['type'], + unique=False, + postgresql_using='btree', + postgresql_ops={'type': 'varchar_pattern_ops'}, + ) + + op.create_table( + 'db_dblog', + sa.Column('id', sa.Integer(), nullable=False, primary_key=True), + sa.Column('uuid', postgresql.UUID(as_uuid=True), nullable=False, unique=True), + sa.Column('time', sa.DateTime(timezone=True), nullable=False), + sa.Column('loggername', sa.String(length=255), nullable=False, index=True), + sa.Column('levelname', sa.String(length=50), nullable=False, index=True), + sa.Column('dbnode_id', sa.Integer(), nullable=False, index=True), + sa.Column('message', sa.Text(), nullable=False), + sa.Column('metadata', postgresql.JSONB(astext_type=sa.Text()), nullable=False), + sa.ForeignKeyConstraint( + ['dbnode_id'], + ['db_dbnode.id'], + ondelete='CASCADE', + initially='DEFERRED', + deferrable=True, + ), + ) + op.create_index( + 'ix_pat_db_dblog_levelname', + 'db_dblog', + ['levelname'], + unique=False, + postgresql_using='btree', + postgresql_ops={'levelname': 'varchar_pattern_ops'}, + ) + op.create_index( + 'ix_pat_db_dblog_loggername', + 'db_dblog', + ['loggername'], + unique=False, + postgresql_using='btree', + postgresql_ops={'loggername': 'varchar_pattern_ops'}, + ) + + +def downgrade(): + """Migrations for the downgrade.""" + raise NotImplementedError('Downgrade of main_0001.') diff --git a/aiida/backends/sqlalchemy/migrator.py b/aiida/backends/sqlalchemy/migrator.py new file mode 100644 index 0000000000..4812fdf34c --- /dev/null +++ b/aiida/backends/sqlalchemy/migrator.py @@ -0,0 +1,341 @@ +# -*- coding: utf-8 -*- +########################################################################### +# Copyright (c), The AiiDA team. All rights reserved. # +# This file is part of the AiiDA code. # +# # +# The code is hosted on GitHub at https://github.com/aiidateam/aiida-core # +# For further information on the license, see the LICENSE.txt file # +# For further information please visit http://www.aiida.net # +########################################################################### +"""Schema validation and migration utilities. + +This code interacts directly with the database, outside of the ORM, +taking a `Profile` as input for the connection configuration. + +.. important:: This code should only be accessed via the storage backend class, not directly! +""" +import contextlib +import os +import pathlib +from typing import ContextManager, Dict, Iterator, Optional + +from alembic.command import downgrade, upgrade +from alembic.config import Config +from alembic.runtime.environment import EnvironmentContext +from alembic.runtime.migration import MigrationContext, MigrationInfo +from alembic.script import ScriptDirectory +from disk_objectstore import Container +from sqlalchemy import String, Table, column, desc, insert, inspect, select, table +from sqlalchemy.exc import OperationalError, ProgrammingError +from sqlalchemy.ext.automap import automap_base +from sqlalchemy.future.engine import Connection +from sqlalchemy.orm import Session + +from aiida.backends.sqlalchemy.models.settings import DbSetting +from aiida.backends.sqlalchemy.utils import create_sqlalchemy_engine +from aiida.common import exceptions +from aiida.manage.configuration.profile import Profile + +TEMPLATE_LEGACY_DJANGO_SCHEMA = """ +Database schema is using the legacy Django schema. +To migrate the database schema version to the current one, run the following command: + + verdi -p {profile_name} storage migrate +""" + +TEMPLATE_INVALID_SCHEMA_VERSION = """ +Database schema version `{schema_version_database}` is incompatible with the required schema version `{schema_version_code}`. +To migrate the database schema version to the current one, run the following command: + + verdi -p {profile_name} storage migrate +""" + +ALEMBIC_REL_PATH = 'migrations' + +REPOSITORY_UUID_KEY = 'repository|uuid' + + +class PsqlDostoreMigrator: + """Class for validating and migrating `psql_dos` storage instances. + + .. important:: This class should only be accessed via the storage backend class (apart from for test purposes) + """ + + alembic_version_tbl_name = 'alembic_version' + django_version_table = table( + 'django_migrations', column('id'), column('app', String(255)), column('name', String(255)), column('applied') + ) + + def __init__(self, profile: Profile) -> None: + self.profile = profile + + @classmethod + def get_schema_versions(cls) -> Dict[str, str]: + """Return all available schema versions (oldest to latest). + + :return: schema version -> description + """ + return {entry.revision: entry.doc for entry in reversed(list(cls._alembic_script().walk_revisions()))} + + @classmethod + def get_schema_version_head(cls) -> str: + """Return the head schema version for this storage, i.e. the latest schema this storage can be migrated to.""" + return cls._alembic_script().revision_map.get_current_head('main') + + def _connection_context(self, connection: Optional[Connection] = None) -> ContextManager[Connection]: + """Return a context manager, with a connection to the database. + + :raises: `UnreachableStorage` if the database connection fails + """ + if connection is not None: + return contextlib.nullcontext(connection) + try: + return create_sqlalchemy_engine(self.profile.storage_config).connect() + except OperationalError as exception: + raise exceptions.UnreachableStorage(f'Could not connect to database: {exception}') from exception + + def get_schema_version_profile(self, _connection: Optional[Connection] = None, check_legacy=False) -> Optional[str]: + """Return the schema version of the backend instance for this profile. + + Note, the version will be None if the database is empty or is a legacy django database. + """ + with self._migration_context(_connection) as context: + version = context.get_current_revision() + if version is None and check_legacy: + with self._connection_context(_connection) as connection: + stmt = select(self.django_version_table.c.name).where(self.django_version_table.c.app == 'db') + stmt = stmt.order_by(desc(self.django_version_table.c.id)).limit(1) + try: + return connection.execute(stmt).scalar() + except (OperationalError, ProgrammingError): + connection.rollback() + return version + + def validate_storage(self) -> None: + """Validate that the storage for this profile + + 1. That the database schema is at the head version, i.e. is compatible with the code API. + 2. That the repository ID is equal to the UUID set in the database + + :raises: :class:`aiida.common.exceptions.UnreachableStorage` if the storage cannot be connected to + :raises: :class:`aiida.common.exceptions.IncompatibleStorageSchema` + if the storage is not compatible with the code API. + :raises: :class:`aiida.common.exceptions.CorruptStorage` + if the repository ID is not equal to the UUID set in thedatabase. + """ + with self._connection_context() as connection: + + # check there is an alembic_version table from which to get the schema version + if not inspect(connection).has_table(self.alembic_version_tbl_name): + # if not present, it might be that this is a legacy django database + if inspect(connection).has_table(self.django_version_table.name): + raise exceptions.IncompatibleStorageSchema( + TEMPLATE_LEGACY_DJANGO_SCHEMA.format(profile_name=self.profile.name) + ) + raise exceptions.IncompatibleStorageSchema('The database has no known version.') + + # now we can check that the alembic version is the latest + schema_version_code = self.get_schema_version_head() + schema_version_database = self.get_schema_version_profile(connection, check_legacy=False) + if schema_version_database != schema_version_code: + raise exceptions.IncompatibleStorageSchema( + TEMPLATE_INVALID_SCHEMA_VERSION.format( + schema_version_database=schema_version_database, + schema_version_code=schema_version_code, + profile_name=self.profile.name + ) + ) + + # check that we can access the disk-objectstore container, and get its id + filepath = pathlib.Path(self.profile.repository_path) / 'container' + container = Container(filepath) + try: + container_id = container.container_id + except Exception as exc: + raise exceptions.UnreachableStorage(f'Could not access disk-objectstore {filepath}: {exc}') from exc + + # finally, we check that the ID set within the disk-objectstore is equal to the one saved in the database, + # i.e. this container is indeed the one associated with the db + stmt = select(DbSetting.val).where(DbSetting.key == REPOSITORY_UUID_KEY) + repo_uuid = connection.execute(stmt).scalar_one_or_none() + if repo_uuid is None: + raise exceptions.CorruptStorage('The database has no repository UUID set.') + if repo_uuid != container_id: + raise exceptions.CorruptStorage( + f'The database has a repository UUID configured to {repo_uuid} ' + f'but the disk-objectstore\'s is {container_id}.' + ) + + def initialise(self) -> None: + """Generate the initial storage schema for this profile, from the ORM models.""" + from aiida.backends.sqlalchemy.models.base import get_orm_metadata + from aiida.orm.implementation.sqlalchemy.backend import CONTAINER_DEFAULTS + + # setup the database + # see: https://alembic.sqlalchemy.org/en/latest/cookbook.html#building-an-up-to-date-database-from-scratch + get_orm_metadata().create_all(create_sqlalchemy_engine(self.profile.storage_config)) + + # setup the repository + filepath = pathlib.Path(self.profile.repository_path) / 'container' + container = Container(filepath) + container.init_container(clear=True, **CONTAINER_DEFAULTS) + + with create_sqlalchemy_engine(self.profile.storage_config).begin() as conn: + # Create a "sync" between the database and repository, by saving its UUID in the settings table + # this allows us to validate inconsistencies between the two + conn.execute( + insert(DbSetting + ).values(key=REPOSITORY_UUID_KEY, val=container.container_id, description='Repository UUID') + ) + + # finally, generate the version table, "stamping" it with the most recent revision + with self._migration_context(conn) as context: + context.stamp(context.script, 'main@head') + + def migrate(self) -> None: + """Migrate the storage for this profile to the head version. + + :raises: :class:`~aiida.common.exceptions.UnreachableStorage` if the storage cannot be accessed + """ + from aiida.cmdline.utils import echo + + # the database can be in one of a few states: + # 1. Completely empty -> we can simply initialise it with the current ORM schema + # 2. Legacy django database -> we transfer the version to alembic, migrate to the head of the django branch, + # reset the revision as one on the main branch, and then migrate to the head of the main branch + # 3. Legacy sqlalchemy database -> we migrate to the head of the sqlalchemy branch, + # reset the revision as one on the main branch, and then migrate to the head of the main branch + # 4. Already on the main branch -> we migrate to the head of the main branch + + with self._connection_context() as connection: + if not inspect(connection).has_table(self.alembic_version_tbl_name): + if not inspect(connection).has_table(self.django_version_table.name): + # the database is assumed to be empty, so we need to initialise it + echo.echo_report('initialising empty storage schema') + self.initialise() + return + # the database is a legacy django one, + # so we need to copy the version from the 'django_migrations' table to the 'alembic_version' one + legacy_version = self.get_schema_version_profile(connection, check_legacy=True) + # the version should be of the format '00XX_description' + assert legacy_version is not None + assert legacy_version[:4].startswith('00') + version = f'django_{legacy_version[:4]}' + with self._migration_context(connection) as context: + context.stamp(context.script, version) + connection.commit() + # now we can continue with the migration as normal + else: + version = self.get_schema_version_profile(connection) + + # find what branch the current version is on + branches = self._alembic_script().revision_map.get_revision(version).branch_labels + + if 'django' in branches or 'sqlalchemy' in branches: + # migrate up to the top of the respective legacy branches + if 'django' in branches: + echo.echo_report('Migrating to the head of the legacy django branch') + self.migrate_up('django@head') + elif 'sqlalchemy' in branches: + echo.echo_report('Migrating to the head of the legacy sqlalchemy branch') + self.migrate_up('sqlalchemy@head') + # now re-stamp with the comparable revision on the main branch + with self._connection_context() as connection: + with self._migration_context(connection) as context: + context._ensure_version_table(purge=True) # pylint: disable=protected-access + context.stamp(context.script, 'main_0001') + connection.commit() + + # finally migrate to the main head revision + echo.echo_report('Migrating to the head of the main branch') + self.migrate_up('main@head') + + def migrate_up(self, version: str) -> None: + """Migrate the database up to a specific version. + + :param version: string with schema version to migrate to + """ + with self._alembic_connect() as config: + upgrade(config, version) + + def migrate_down(self, version: str) -> None: + """Migrate the database down to a specific version. + + :param version: string with schema version to migrate to + """ + with self._alembic_connect() as config: + downgrade(config, version) + + @staticmethod + def _alembic_config(): + """Return an instance of an Alembic `Config`.""" + dir_path = os.path.dirname(os.path.realpath(__file__)) + config = Config() + config.set_main_option('script_location', os.path.join(dir_path, ALEMBIC_REL_PATH)) + return config + + @classmethod + def _alembic_script(cls): + """Return an instance of an Alembic `ScriptDirectory`.""" + return ScriptDirectory.from_config(cls._alembic_config()) + + @contextlib.contextmanager + def _alembic_connect(self, _connection: Optional[Connection] = None): + """Context manager to return an instance of an Alembic configuration. + + The profiles's database connection is added in the `attributes` property, through which it can then also be + retrieved, also in the `env.py` file, which is run when the database is migrated. + """ + with self._connection_context(_connection) as connection: + config = self._alembic_config() + config.attributes['connection'] = connection # pylint: disable=unsupported-assignment-operation + config.attributes['aiida_profile'] = self.profile # pylint: disable=unsupported-assignment-operation + + def _callback(step: MigrationInfo, **kwargs): # pylint: disable=unused-argument + """Callback to be called after a migration step is executed.""" + from aiida.cmdline.utils import echo + from_rev = step.down_revision_ids[0] if step.down_revision_ids else '' + echo.echo_report(f'- {from_rev} -> {step.up_revision_id}') + + config.attributes['on_version_apply'] = _callback # pylint: disable=unsupported-assignment-operation + + yield config + + @contextlib.contextmanager + def _migration_context(self, _connection: Optional[Connection] = None) -> MigrationContext: + """Context manager to return an instance of an Alembic migration context. + + This migration context will have been configured with the current database connection, which allows this context + to be used to inspect the contents of the database, such as the current revision. + """ + with self._alembic_connect(_connection) as config: + script = ScriptDirectory.from_config(config) + with EnvironmentContext(config, script) as context: + context.configure(context.config.attributes['connection']) + yield context.get_context() + + # the following are used for migration tests + + @contextlib.contextmanager + def session(self) -> Iterator[Session]: + """Context manager to return a session for the database.""" + with self._connection_context() as connection: + session = Session(connection.engine, future=True) + try: + yield session + except Exception: + session.rollback() + raise + finally: + session.close() + + def get_current_table(self, table_name: str) -> Table: + """Return a table instantiated at the correct migration. + + Note that this is obtained by inspecting the database and not by looking into the models file. + So, special methods possibly defined in the models files/classes are not present. + """ + with self._connection_context() as connection: + base = automap_base() + base.prepare(autoload_with=connection.engine) + return getattr(base.classes, table_name) diff --git a/aiida/backends/sqlalchemy/models/authinfo.py b/aiida/backends/sqlalchemy/models/authinfo.py index b8c7944235..a6e454b89a 100644 --- a/aiida/backends/sqlalchemy/models/authinfo.py +++ b/aiida/backends/sqlalchemy/models/authinfo.py @@ -9,12 +9,10 @@ ########################################################################### # pylint: disable=import-error,no-name-in-module """Module to manage authentification information for the SQLA backend.""" - from sqlalchemy import ForeignKey from sqlalchemy.dialects.postgresql import JSONB from sqlalchemy.orm import relationship from sqlalchemy.schema import Column, UniqueConstraint -from sqlalchemy.sql.schema import Index from sqlalchemy.types import Boolean, Integer from .base import Base @@ -30,34 +28,26 @@ class DbAuthInfo(Base): __tablename__ = 'db_dbauthinfo' id = Column(Integer, primary_key=True) # pylint: disable=invalid-name - aiidauser_id = Column( Integer, ForeignKey('db_dbuser.id', ondelete='CASCADE', deferrable=True, initially='DEFERRED'), nullable=False, + index=True ) dbcomputer_id = Column( Integer, ForeignKey('db_dbcomputer.id', ondelete='CASCADE', deferrable=True, initially='DEFERRED'), - nullable=False + nullable=False, + index=True ) - - aiidauser = relationship('DbUser', backref='authinfos') - dbcomputer = relationship('DbComputer', backref='authinfos') - _metadata = Column('metadata', JSONB, default=dict, nullable=False) auth_params = Column(JSONB, default=dict, nullable=False) - enabled = Column(Boolean, default=True, nullable=False) - __table_args__ = ( - # constraint/index names mirror django's auto-generated ones - UniqueConstraint( - 'aiidauser_id', 'dbcomputer_id', name='db_dbauthinfo_aiidauser_id_dbcomputer_id_777cdaa8_uniq' - ), - Index('db_dbauthinfo_aiidauser_id_0684fdfb', aiidauser_id), - Index('db_dbauthinfo_dbcomputer_id_424f7ac4', dbcomputer_id), - ) + aiidauser = relationship('DbUser', backref='authinfos') + dbcomputer = relationship('DbComputer', backref='authinfos') + + __table_args__ = (UniqueConstraint('aiidauser_id', 'dbcomputer_id'),) def __init__(self, *args, **kwargs): self._metadata = {} diff --git a/aiida/backends/sqlalchemy/models/base.py b/aiida/backends/sqlalchemy/models/base.py index dd7f6ab9ad..1c342e765f 100644 --- a/aiida/backends/sqlalchemy/models/base.py +++ b/aiida/backends/sqlalchemy/models/base.py @@ -9,86 +9,40 @@ ########################################################################### # pylint: disable=import-error,no-name-in-module """Base SQLAlchemy models.""" - -from sqlalchemy import orm +from sqlalchemy import MetaData from sqlalchemy.orm import declarative_base -from sqlalchemy.orm.exc import UnmappedClassError - -import aiida.backends.sqlalchemy -from aiida.backends.sqlalchemy import get_scoped_session -from aiida.common.exceptions import InvalidOperation - -# Taken from -# https://github.com/mitsuhiko/flask-sqlalchemy/blob/master/flask_sqlalchemy/__init__.py#L491 - - -class _QueryProperty: - """Query property.""" - - def __init__(self, query_class=orm.Query): - self.query_class = query_class - - def __get__(self, obj, _type): - """Get property of a query.""" - try: - mapper = orm.class_mapper(_type) - if mapper: - return self.query_class(mapper, session=aiida.backends.sqlalchemy.get_scoped_session()) - return None - except UnmappedClassError: - return None - - -class _SessionProperty: - """Session Property""" - - def __get__(self, obj, _type): - if not aiida.backends.sqlalchemy.get_scoped_session(): - raise InvalidOperation('You need to call load_dbenv before accessing the session of SQLALchemy.') - return aiida.backends.sqlalchemy.get_scoped_session() - - -class _AiidaQuery(orm.Query): - """AiiDA query.""" - - def __iter__(self): - """Iterator.""" - from aiida.orm.implementation.sqlalchemy import convert # pylint: disable=cyclic-import - - iterator = super().__iter__() - for result in iterator: - # Allow the use of with_entities - if issubclass(type(result), Model): - yield convert.get_backend_entity(result, None) - else: - yield result class Model: - """Query model.""" - query = _QueryProperty() - - session = _SessionProperty() - - def save(self, commit=True): - """Emulate the behavior of Django's save() method - - :param commit: whether to do a commit or just add to the session - :return: the SQLAlchemy instance""" - sess = get_scoped_session() - sess.add(self) - if commit: - sess.commit() - return self - - def delete(self, commit=True): - """Emulate the behavior of Django's delete() method - - :param commit: whether to do a commit or just remover from the session""" - sess = get_scoped_session() - sess.delete(self) - if commit: - sess.commit() - - -Base = declarative_base(cls=Model, name='Model') # pylint: disable=invalid-name + """Base ORM model.""" + + +# see https://alembic.sqlalchemy.org/en/latest/naming.html +naming_convention = ( + ('pk', '%(table_name)s_pkey'), # this is identical to the default PSQL convention + ('ix', 'ix_%(table_name)s_%(column_0_N_label)s'), + # note, indexes using varchar_pattern_ops should be named: 'ix_pat_%(table_name)s_%(column_0_N_label)s' + ('uq', 'uq_%(table_name)s_%(column_0_N_name)s'), + ('ck', 'ck_%(table_name)s_%(constraint_name)s'), + ('fk', 'fk_%(table_name)s_%(column_0_N_name)s_%(referred_table_name)s'), + # note, ideally we may also append with '_%(referred_column_0_N_name)s', but this causes ORM construction errors: + # https://github.com/sqlalchemy/sqlalchemy/issues/5350 +) + +Base = declarative_base(cls=Model, name='Model', metadata=MetaData(naming_convention=dict(naming_convention))) # pylint: disable=invalid-name + + +def get_orm_metadata() -> MetaData: + """Return the populated metadata object.""" + # we must load all models, to populate the ORM metadata + from aiida.backends.sqlalchemy.models import ( # pylint: disable=unused-import + authinfo, + comment, + computer, + group, + log, + node, + settings, + user, + ) + return Base.metadata diff --git a/aiida/backends/sqlalchemy/models/comment.py b/aiida/backends/sqlalchemy/models/comment.py index fe555c5315..7d320889ee 100644 --- a/aiida/backends/sqlalchemy/models/comment.py +++ b/aiida/backends/sqlalchemy/models/comment.py @@ -9,12 +9,9 @@ ########################################################################### # pylint: disable=import-error,no-name-in-module """Module to manage comments for the SQLA backend.""" - -from sqlalchemy import ForeignKey +from sqlalchemy import Column, ForeignKey from sqlalchemy.dialects.postgresql import UUID from sqlalchemy.orm import relationship -from sqlalchemy.schema import Column -from sqlalchemy.sql.schema import Index, UniqueConstraint from sqlalchemy.types import DateTime, Integer, Text from aiida.backends.sqlalchemy.models.base import Base @@ -28,30 +25,26 @@ class DbComment(Base): __tablename__ = 'db_dbcomment' id = Column(Integer, primary_key=True) # pylint: disable=invalid-name - - uuid = Column(UUID(as_uuid=True), default=get_new_uuid, nullable=False) + uuid = Column(UUID(as_uuid=True), default=get_new_uuid, nullable=False, unique=True) dbnode_id = Column( - Integer, ForeignKey('db_dbnode.id', ondelete='CASCADE', deferrable=True, initially='DEFERRED'), nullable=False + Integer, + ForeignKey('db_dbnode.id', ondelete='CASCADE', deferrable=True, initially='DEFERRED'), + nullable=False, + index=True ) - ctime = Column(DateTime(timezone=True), default=timezone.now, nullable=False) mtime = Column(DateTime(timezone=True), default=timezone.now, onupdate=timezone.now, nullable=False) - user_id = Column( - Integer, ForeignKey('db_dbuser.id', ondelete='CASCADE', deferrable=True, initially='DEFERRED'), nullable=False + Integer, + ForeignKey('db_dbuser.id', ondelete='CASCADE', deferrable=True, initially='DEFERRED'), + nullable=False, + index=True ) content = Column(Text, default='', nullable=False) dbnode = relationship('DbNode', backref='dbcomments') user = relationship('DbUser') - __table_args__ = ( - # index/constraint names mirror django's auto-generated ones - UniqueConstraint(uuid, name='db_dbcomment_uuid_49bac08c_uniq'), - Index('db_dbcomment_dbnode_id_3b812b6b', dbnode_id), - Index('db_dbcomment_user_id_8ed5e360', user_id), - ) - def __str__(self): return 'DbComment for [{} {}] on {}'.format( self.dbnode.get_simple_name(), self.dbnode.id, diff --git a/aiida/backends/sqlalchemy/models/computer.py b/aiida/backends/sqlalchemy/models/computer.py index 6a2d65eba0..90b2dc7da4 100644 --- a/aiida/backends/sqlalchemy/models/computer.py +++ b/aiida/backends/sqlalchemy/models/computer.py @@ -11,7 +11,7 @@ """Module to manage computers for the SQLA backend.""" from sqlalchemy.dialects.postgresql import JSONB, UUID from sqlalchemy.schema import Column -from sqlalchemy.sql.schema import Index, UniqueConstraint +from sqlalchemy.sql.schema import Index from sqlalchemy.types import Integer, String, Text from aiida.backends.sqlalchemy.models.base import Base @@ -32,8 +32,8 @@ class DbComputer(Base): __tablename__ = 'db_dbcomputer' id = Column(Integer, primary_key=True) # pylint: disable=invalid-name - uuid = Column(UUID(as_uuid=True), default=get_new_uuid, nullable=False) - label = Column(String(255), nullable=False) + uuid = Column(UUID(as_uuid=True), default=get_new_uuid, nullable=False, unique=True) + label = Column(String(255), nullable=False, unique=True) hostname = Column(String(255), default='', nullable=False) description = Column(Text, default='', nullable=False) scheduler_type = Column(String(255), default='', nullable=False) @@ -41,11 +41,8 @@ class DbComputer(Base): _metadata = Column('metadata', JSONB, default=dict, nullable=False) __table_args__ = ( - # index names mirror django's auto-generated ones - UniqueConstraint(uuid, name='db_dbcomputer_uuid_f35defa6_uniq'), - UniqueConstraint(label, name='db_dbcomputer_label_bc480bab_uniq'), Index( - 'db_dbcomputer_label_bc480bab_like', + 'ix_pat_db_dbcomputer_label', label, postgresql_using='btree', postgresql_ops={'label': 'varchar_pattern_ops'} diff --git a/aiida/backends/sqlalchemy/models/group.py b/aiida/backends/sqlalchemy/models/group.py index 8c50d49744..dacf12d004 100644 --- a/aiida/backends/sqlalchemy/models/group.py +++ b/aiida/backends/sqlalchemy/models/group.py @@ -9,11 +9,9 @@ ########################################################################### # pylint: disable=import-error,no-name-in-module """Module to manage computers for the SQLA backend.""" - -from sqlalchemy import ForeignKey from sqlalchemy.dialects.postgresql import JSONB, UUID from sqlalchemy.orm import backref, relationship -from sqlalchemy.schema import Column, Index, UniqueConstraint +from sqlalchemy.schema import Column, ForeignKey, Index, UniqueConstraint from sqlalchemy.types import DateTime, Integer, String, Text from aiida.common import timezone @@ -27,15 +25,15 @@ class DbGroupNode(Base): __tablename__ = 'db_dbgroup_dbnodes' id = Column(Integer, primary_key=True) - dbnode_id = Column(Integer, ForeignKey('db_dbnode.id', deferrable=True, initially='DEFERRED'), nullable=False) - dbgroup_id = Column(Integer, ForeignKey('db_dbgroup.id', deferrable=True, initially='DEFERRED'), nullable=False) - - __table_args__ = ( - UniqueConstraint('dbgroup_id', 'dbnode_id', name='db_dbgroup_dbnodes_dbgroup_id_dbnode_id_eee23cce_uniq'), - Index('db_dbgroup_dbnodes_dbgroup_id_9d3a0f9d', dbgroup_id), - Index('db_dbgroup_dbnodes_dbnode_id_118b9439', dbnode_id), + dbnode_id = Column( + Integer, ForeignKey('db_dbnode.id', deferrable=True, initially='DEFERRED'), nullable=False, index=True + ) + dbgroup_id = Column( + Integer, ForeignKey('db_dbgroup.id', deferrable=True, initially='DEFERRED'), nullable=False, index=True ) + __table_args__ = (UniqueConstraint('dbgroup_id', 'dbnode_id'),) + table_groups_nodes = DbGroupNode.__table__ @@ -52,41 +50,29 @@ class DbGroup(Base): __tablename__ = 'db_dbgroup' id = Column(Integer, primary_key=True) # pylint: disable=invalid-name - - uuid = Column(UUID(as_uuid=True), default=get_new_uuid, nullable=False) - label = Column(String(255), nullable=False) - - type_string = Column(String(255), default='', nullable=False) - + uuid = Column(UUID(as_uuid=True), default=get_new_uuid, nullable=False, unique=True) + label = Column(String(255), nullable=False, index=True) + type_string = Column(String(255), default='', nullable=False, index=True) time = Column(DateTime(timezone=True), default=timezone.now, nullable=False) description = Column(Text, default='', nullable=False) - extras = Column(JSONB, default=dict, nullable=False) - user_id = Column( Integer, ForeignKey('db_dbuser.id', ondelete='CASCADE', deferrable=True, initially='DEFERRED'), nullable=False, + index=True ) - user = relationship('DbUser', backref=backref('dbgroups', cascade='merge')) + user = relationship('DbUser', backref=backref('dbgroups', cascade='merge')) dbnodes = relationship('DbNode', secondary=table_groups_nodes, backref='dbgroups', lazy='dynamic') __table_args__ = ( - # index/constrinat names mirror django's auto-generated ones - UniqueConstraint('label', 'type_string', name='db_dbgroup_name_type_12656f33_uniq'), - UniqueConstraint(uuid, name='db_dbgroup_uuid_af896177_uniq'), - Index('db_dbgroup_name_66c75272', label), - Index('db_dbgroup_type_23b2a748', type_string), - Index('db_dbgroup_user_id_100f8a51', user_id), + UniqueConstraint('label', 'type_string'), Index( - 'db_dbgroup_name_66c75272_like', - label, - postgresql_using='btree', - postgresql_ops={'label': 'varchar_pattern_ops'} + 'ix_pat_db_dbgroup_label', label, postgresql_using='btree', postgresql_ops={'label': 'varchar_pattern_ops'} ), Index( - 'db_dbgroup_type_23b2a748_like', + 'ix_pat_db_dbgroup_type_string', type_string, postgresql_using='btree', postgresql_ops={'type_string': 'varchar_pattern_ops'} diff --git a/aiida/backends/sqlalchemy/models/log.py b/aiida/backends/sqlalchemy/models/log.py index e60b52fa99..cc37782c0b 100644 --- a/aiida/backends/sqlalchemy/models/log.py +++ b/aiida/backends/sqlalchemy/models/log.py @@ -9,12 +9,10 @@ ########################################################################### # pylint: disable=import-error,no-name-in-module """Module to manage logs for the SQLA backend.""" - -from sqlalchemy import ForeignKey from sqlalchemy.dialects.postgresql import JSONB, UUID from sqlalchemy.orm import backref, relationship from sqlalchemy.schema import Column -from sqlalchemy.sql.schema import Index, UniqueConstraint +from sqlalchemy.sql.schema import ForeignKey, Index from sqlalchemy.types import DateTime, Integer, String, Text from aiida.backends.sqlalchemy.models.base import Base @@ -27,12 +25,15 @@ class DbLog(Base): __tablename__ = 'db_dblog' id = Column(Integer, primary_key=True) # pylint: disable=invalid-name - uuid = Column(UUID(as_uuid=True), default=get_new_uuid, nullable=False) + uuid = Column(UUID(as_uuid=True), default=get_new_uuid, nullable=False, unique=True) time = Column(DateTime(timezone=True), default=timezone.now, nullable=False) - loggername = Column(String(255), nullable=False, doc='What process recorded the message') - levelname = Column(String(50), nullable=False, doc='How critical the message is') + loggername = Column(String(255), nullable=False, index=True, doc='What process recorded the message') + levelname = Column(String(50), nullable=False, index=True, doc='How critical the message is') dbnode_id = Column( - Integer, ForeignKey('db_dbnode.id', deferrable=True, initially='DEFERRED', ondelete='CASCADE'), nullable=False + Integer, + ForeignKey('db_dbnode.id', deferrable=True, initially='DEFERRED', ondelete='CASCADE'), + nullable=False, + index=True ) message = Column(Text(), default='', nullable=False) _metadata = Column('metadata', JSONB, default=dict, nullable=False) @@ -40,19 +41,14 @@ class DbLog(Base): dbnode = relationship('DbNode', backref=backref('dblogs', passive_deletes='all', cascade='merge')) __table_args__ = ( - # index/constrain names mirror django's auto-generated ones - UniqueConstraint(uuid, name='db_dblog_uuid_9cf77df3_uniq'), - Index('db_dblog_loggername_00b5ba16', loggername), - Index('db_dblog_levelname_ad5dc346', levelname), - Index('db_dblog_dbnode_id_da34b732', dbnode_id), Index( - 'db_dblog_loggername_00b5ba16_like', + 'ix_pat_db_dblog_loggername', loggername, postgresql_using='btree', postgresql_ops={'loggername': 'varchar_pattern_ops'} ), Index( - 'db_dblog_levelname_ad5dc346_like', + 'ix_pat_db_dblog_levelname', levelname, postgresql_using='btree', postgresql_ops={'levelname': 'varchar_pattern_ops'} diff --git a/aiida/backends/sqlalchemy/models/node.py b/aiida/backends/sqlalchemy/models/node.py index 410acefa52..5afe6817ab 100644 --- a/aiida/backends/sqlalchemy/models/node.py +++ b/aiida/backends/sqlalchemy/models/node.py @@ -9,15 +9,10 @@ ########################################################################### # pylint: disable=import-error,no-name-in-module """Module to manage nodes for the SQLA backend.""" - -from sqlalchemy import ForeignKey, text -# Specific to PGSQL. If needed to be agnostic -# http://docs.sqlalchemy.org/en/rel_0_9/core/custom_types.html?highlight=guid#backend-agnostic-guid-type -# Or maybe rely on sqlalchemy-utils UUID type from sqlalchemy.dialects.postgresql import JSONB, UUID from sqlalchemy.orm import backref, relationship from sqlalchemy.schema import Column -from sqlalchemy.sql.schema import Index, UniqueConstraint +from sqlalchemy.sql.schema import ForeignKey, Index from sqlalchemy.types import DateTime, Integer, String, Text from aiida.backends.sqlalchemy.models.base import Base @@ -44,26 +39,27 @@ class DbNode(Base): __tablename__ = 'db_dbnode' id = Column(Integer, primary_key=True) # pylint: disable=invalid-name - uuid = Column(UUID(as_uuid=True), default=get_new_uuid, nullable=False) - node_type = Column(String(255), default='', nullable=False) - process_type = Column(String(255)) - label = Column(String(255), nullable=False, default='') + uuid = Column(UUID(as_uuid=True), default=get_new_uuid, nullable=False, unique=True) + node_type = Column(String(255), default='', nullable=False, index=True) + process_type = Column(String(255), index=True) + label = Column(String(255), nullable=False, default='', index=True) description = Column(Text(), nullable=False, default='') - ctime = Column(DateTime(timezone=True), default=timezone.now, nullable=False) - mtime = Column(DateTime(timezone=True), default=timezone.now, onupdate=timezone.now, nullable=False) + ctime = Column(DateTime(timezone=True), default=timezone.now, nullable=False, index=True) + mtime = Column(DateTime(timezone=True), default=timezone.now, onupdate=timezone.now, nullable=False, index=True) attributes = Column(JSONB) extras = Column(JSONB) - repository_metadata = Column(JSONB, nullable=True, default=dict) - + repository_metadata = Column(JSONB, nullable=False, default=dict) dbcomputer_id = Column( Integer, ForeignKey('db_dbcomputer.id', deferrable=True, initially='DEFERRED', ondelete='RESTRICT'), - nullable=True + nullable=True, + index=True ) - - # This should have the same ondelete behaviour as db_computer_id, right? user_id = Column( - Integer, ForeignKey('db_dbuser.id', deferrable=True, initially='DEFERRED', ondelete='restrict'), nullable=False + Integer, + ForeignKey('db_dbuser.id', deferrable=True, initially='DEFERRED', ondelete='RESTRICT'), + nullable=False, + index=True ) # pylint: disable=fixme @@ -74,8 +70,6 @@ class DbNode(Base): # we would remove all link with x as an output. dbcomputer = relationship('DbComputer', backref=backref('dbnodes', passive_deletes='all', cascade='merge')) - - # User user = relationship('DbUser', backref=backref( 'dbnodes', passive_deletes='all', @@ -94,29 +88,17 @@ class DbNode(Base): ) __table_args__ = ( - # index/constraint names mirror django's auto-generated ones - UniqueConstraint(uuid, name='db_dbnode_uuid_62e0bf98_uniq'), - Index('db_dbnode_label_6469539e', label), - Index('db_dbnode_type_a8ce9753', node_type), - Index('db_dbnode_process_type_df7298d0', process_type), - Index('db_dbnode_ctime_71626ef5', ctime), - Index('db_dbnode_mtime_0554ea3d', mtime), - Index('db_dbnode_dbcomputer_id_315372a3', dbcomputer_id), - Index('db_dbnode_user_id_12e7aeaf', user_id), Index( - 'db_dbnode_label_6469539e_like', - label, - postgresql_using='btree', - postgresql_ops={'label': 'varchar_pattern_ops'} + 'ix_pat_db_dbnode_label', label, postgresql_using='btree', postgresql_ops={'label': 'varchar_pattern_ops'} ), Index( - 'db_dbnode_type_a8ce9753_like', + 'ix_pat_db_dbnode_node_type', node_type, postgresql_using='btree', postgresql_ops={'node_type': 'varchar_pattern_ops'} ), Index( - 'db_dbnode_process_type_df7298d0_like', + 'ix_pat_db_dbnode_process_type', process_type, postgresql_using='btree', postgresql_ops={'process_type': 'varchar_pattern_ops'} @@ -200,17 +182,22 @@ class DbLink(Base): __tablename__ = 'db_dblink' id = Column(Integer, primary_key=True) # pylint: disable=invalid-name - input_id = Column(Integer, ForeignKey('db_dbnode.id', deferrable=True, initially='DEFERRED'), nullable=False) + input_id = Column( + Integer, ForeignKey('db_dbnode.id', deferrable=True, initially='DEFERRED'), nullable=False, index=True + ) output_id = Column( - Integer, ForeignKey('db_dbnode.id', ondelete='CASCADE', deferrable=True, initially='DEFERRED'), nullable=False + Integer, + ForeignKey('db_dbnode.id', ondelete='CASCADE', deferrable=True, initially='DEFERRED'), + nullable=False, + index=True ) # https://docs.sqlalchemy.org/en/14/errors.html#relationship-x-will-copy-column-q-to-column-p-which-conflicts-with-relationship-s-y input = relationship('DbNode', primaryjoin='DbLink.input_id == DbNode.id', overlaps='inputs_q,outputs_q') output = relationship('DbNode', primaryjoin='DbLink.output_id == DbNode.id', overlaps='inputs_q,outputs_q') - label = Column(String(255), nullable=False) - type = Column(String(255), nullable=False) + label = Column(String(255), nullable=False, index=True) + type = Column(String(255), nullable=False, index=True) # A calculation can have both a 'return' and a 'create' link to # a single data output node, which would violate the unique constraint @@ -221,23 +208,10 @@ class DbLink(Base): # I cannot add twice the same link # I want unique labels among all inputs of a node # UniqueConstraint('output_id', 'label'), - # index names mirror django's auto-generated ones - Index('db_dblink_input_id_9245bd73', input_id), - Index('db_dblink_output_id_c0167528', output_id), - Index('db_dblink_label_f1343cfb', label), - Index('db_dblink_type_229f212b', type), - Index( - 'db_dblink_label_f1343cfb_like', - label, - postgresql_using='btree', - postgresql_ops={'label': 'varchar_pattern_ops'} - ), Index( - 'db_dblink_type_229f212b_like', - type, - postgresql_using='btree', - postgresql_ops={'type': 'varchar_pattern_ops'} + 'ix_pat_db_dblink_label', label, postgresql_using='btree', postgresql_ops={'label': 'varchar_pattern_ops'} ), + Index('ix_pat_db_dblink_type', type, postgresql_using='btree', postgresql_ops={'type': 'varchar_pattern_ops'}), ) def __str__(self): diff --git a/aiida/backends/sqlalchemy/models/settings.py b/aiida/backends/sqlalchemy/models/settings.py index 349d69f532..3675b20e21 100644 --- a/aiida/backends/sqlalchemy/models/settings.py +++ b/aiida/backends/sqlalchemy/models/settings.py @@ -9,15 +9,11 @@ ########################################################################### # pylint: disable=import-error,no-name-in-module """Module to manage node settings for the SQLA backend.""" -from pytz import UTC from sqlalchemy import Column from sqlalchemy.dialects.postgresql import JSONB -from sqlalchemy.orm.attributes import flag_modified -from sqlalchemy.schema import UniqueConstraint from sqlalchemy.sql.schema import Index from sqlalchemy.types import DateTime, Integer, String, Text -from aiida.backends import sqlalchemy as sa from aiida.backends.sqlalchemy.models.base import Base from aiida.common import timezone @@ -27,8 +23,7 @@ class DbSetting(Base): __tablename__ = 'db_dbsetting' id = Column(Integer, primary_key=True) # pylint: disable=invalid-name - - key = Column(String(1024), nullable=False) + key = Column(String(1024), nullable=False, unique=True) val = Column(JSONB, default={}) # I also add a description field for the variables @@ -36,51 +31,10 @@ class DbSetting(Base): time = Column(DateTime(timezone=True), default=timezone.now, onupdate=timezone.now, nullable=False) __table_args__ = ( - # index/constraint names mirror django's auto-generated ones - UniqueConstraint(key, name='db_dbsetting_key_1b84beb4_uniq'), Index( - 'db_dbsetting_key_1b84beb4_like', - key, - postgresql_using='btree', - postgresql_ops={'key': 'varchar_pattern_ops'} + 'ix_pat_db_dbsetting_key', 'key', postgresql_using='btree', postgresql_ops={'key': 'varchar_pattern_ops'} ), ) def __str__(self): - return f"'{self.key}'={self.getvalue()}" - - @classmethod - def set_value(cls, key, value, other_attribs=None, stop_if_existing=False): - """Set a setting value.""" - other_attribs = other_attribs if other_attribs is not None else {} - setting = sa.get_scoped_session().query(DbSetting).filter_by(key=key).first() - if setting is not None: - if stop_if_existing: - return - else: - setting = cls() - - setting.key = key - setting.val = value - flag_modified(setting, 'val') - setting.time = timezone.datetime.now(tz=UTC) - if 'description' in other_attribs.keys(): - setting.description = other_attribs['description'] - setting.save() - - def getvalue(self): - """This can be called on a given row and will get the corresponding value.""" - return self.val - - def get_description(self): - """This can be called on a given row and will get the corresponding description.""" - return self.description - - @classmethod - def del_value(cls, key): - """Delete a setting value.""" - setting = sa.get_scoped_session().query(DbSetting).filter(key=key) - setting.val = None - setting.time = timezone.datetime.utcnow() - flag_modified(setting, 'val') - setting.save() + return f"'{self.key}'={self.val}" diff --git a/aiida/backends/sqlalchemy/models/user.py b/aiida/backends/sqlalchemy/models/user.py index f16037d2c2..a3ff6c6e94 100644 --- a/aiida/backends/sqlalchemy/models/user.py +++ b/aiida/backends/sqlalchemy/models/user.py @@ -11,7 +11,7 @@ """Module to manage users for the SQLA backend.""" from sqlalchemy.schema import Column -from sqlalchemy.sql.schema import Index, UniqueConstraint +from sqlalchemy.sql.schema import Index from sqlalchemy.types import Integer, String from aiida.backends.sqlalchemy.models.base import Base @@ -25,19 +25,14 @@ class DbUser(Base): __tablename__ = 'db_dbuser' id = Column(Integer, primary_key=True) # pylint: disable=invalid-name - email = Column(String(254), nullable=False) + email = Column(String(254), nullable=False, unique=True) first_name = Column(String(254), default='', nullable=False) last_name = Column(String(254), default='', nullable=False) institution = Column(String(254), default='', nullable=False) __table_args__ = ( - # index/constraint names mirror django's auto-generated ones - UniqueConstraint(email, name='db_dbuser_email_30150b7e_uniq'), Index( - 'db_dbuser_email_30150b7e_like', - email, - postgresql_using='btree', - postgresql_ops={'email': 'varchar_pattern_ops'} + 'ix_pat_db_dbuser_email', email, postgresql_using='btree', postgresql_ops={'email': 'varchar_pattern_ops'} ), ) diff --git a/aiida/backends/sqlalchemy/testbase.py b/aiida/backends/sqlalchemy/testbase.py deleted file mode 100644 index 68b6d0f906..0000000000 --- a/aiida/backends/sqlalchemy/testbase.py +++ /dev/null @@ -1,46 +0,0 @@ -# -*- coding: utf-8 -*- -########################################################################### -# Copyright (c), The AiiDA team. All rights reserved. # -# This file is part of the AiiDA code. # -# # -# The code is hosted on GitHub at https://github.com/aiidateam/aiida-core # -# For further information on the license, see the LICENSE.txt file # -# For further information please visit http://www.aiida.net # -########################################################################### -# pylint: disable=import-error,no-name-in-module -""" This module contains the codebase for the setUpClass and tearDown methods used -internally by the AiidaTestCase. This inherits only from 'object' to avoid -that it is picked up by the automatic discovery of tests -(It shouldn't, as it risks to destroy the DB if there are not the checks -in place, and these are implemented in the AiidaTestCase. """ - -from aiida.backends.testimplbase import AiidaTestImplementation - - -class SqlAlchemyTests(AiidaTestImplementation): - """Base class to test SQLA-related functionalities.""" - connection = None - - def clean_db(self): - from sqlalchemy.sql import table - - # pylint: disable=invalid-name - DbGroupNodes = table('db_dbgroup_dbnodes') - DbGroup = table('db_dbgroup') - DbLink = table('db_dblink') - DbNode = table('db_dbnode') - DbLog = table('db_dblog') - DbAuthInfo = table('db_dbauthinfo') - DbUser = table('db_dbuser') - DbComputer = table('db_dbcomputer') - - with self.backend.transaction() as session: - session.execute(DbGroupNodes.delete()) - session.execute(DbGroup.delete()) - session.execute(DbLog.delete()) - session.execute(DbLink.delete()) - session.execute(DbNode.delete()) - session.execute(DbAuthInfo.delete()) - session.execute(DbComputer.delete()) - session.execute(DbUser.delete()) - session.commit() diff --git a/aiida/backends/sqlalchemy/utils.py b/aiida/backends/sqlalchemy/utils.py index 780df99bf3..187f19691b 100644 --- a/aiida/backends/sqlalchemy/utils.py +++ b/aiida/backends/sqlalchemy/utils.py @@ -9,6 +9,59 @@ ########################################################################### # pylint: disable=import-error,no-name-in-module """Utility functions specific to the SqlAlchemy backend.""" +from typing import TypedDict + + +class PsqlConfig(TypedDict, total=False): + """Configuration to connect to a PostgreSQL database.""" + database_hostname: str + database_port: int + database_username: str + database_password: str + database_name: str + + engine_kwargs: dict + """keyword argument that will be passed on to the SQLAlchemy engine.""" + + +def create_sqlalchemy_engine(config: PsqlConfig): + """Create SQLAlchemy engine (to be used for QueryBuilder queries) + + :param kwargs: keyword arguments that will be passed on to `sqlalchemy.create_engine`. + See https://docs.sqlalchemy.org/en/13/core/engines.html?highlight=create_engine#sqlalchemy.create_engine for + more info. + """ + from sqlalchemy import create_engine + + from aiida.common import json + + # The hostname may be `None`, which is a valid value in the case of peer authentication for example. In this case + # it should be converted to an empty string, because otherwise the `None` will be converted to string literal "None" + hostname = config['database_hostname'] or '' + separator = ':' if config['database_port'] else '' + + engine_url = 'postgresql://{user}:{password}@{hostname}{separator}{port}/{name}'.format( + separator=separator, + user=config['database_username'], + password=config['database_password'], + hostname=hostname, + port=config['database_port'], + name=config['database_name'] + ) + return create_engine( + engine_url, + json_serializer=json.dumps, + json_deserializer=json.loads, + future=True, + encoding='utf-8', + **config.get('engine_kwargs', {}), + ) + + +def create_scoped_session_factory(engine, **kwargs): + """Create scoped SQLAlchemy session factory""" + from sqlalchemy.orm import scoped_session, sessionmaker + return scoped_session(sessionmaker(bind=engine, future=True, **kwargs)) def flag_modified(instance, key): diff --git a/aiida/backends/testbase.py b/aiida/backends/testbase.py index 0b5628e77f..05aa9b7d8e 100644 --- a/aiida/backends/testbase.py +++ b/aiida/backends/testbase.py @@ -8,62 +8,32 @@ # For further information please visit http://www.aiida.net # ########################################################################### """Basic test classes.""" -import os import traceback +from typing import Optional import unittest from aiida import orm -from aiida.common.exceptions import ConfigurationError, InternalError, TestsNotAllowedError +from aiida.common.exceptions import TestsNotAllowedError from aiida.common.lang import classproperty -from aiida.manage import configuration -from aiida.manage.manager import get_manager, reset_manager +from aiida.manage import configuration, get_manager +from aiida.orm.implementation import Backend TEST_KEYWORD = 'test_' def check_if_tests_can_run(): """Verify that the currently loaded profile is a test profile, otherwise raise `TestsNotAllowedError`.""" - profile = configuration.PROFILE + profile = configuration.get_profile() + if not profile: + raise TestsNotAllowedError('No profile is loaded.') if not profile.is_test_profile: raise TestsNotAllowedError(f'currently loaded profile {profile.name} is not a valid test profile') class AiidaTestCase(unittest.TestCase): - """This is the base class for AiiDA tests, independent of the backend. - - Internally it loads the AiidaTestImplementation subclass according to the current backend.""" - _computer = None # type: aiida.orm.Computer - _user = None # type: aiida.orm.User + """This is the base class for AiiDA tests, independent of the backend.""" _class_was_setup = False - __backend_instance = None - backend = None # type: aiida.orm.implementation.Backend - - @classmethod - def get_backend_class(cls): - """Get backend class.""" - from aiida.backends import BACKEND_DJANGO, BACKEND_SQLA - from aiida.backends.testimplbase import AiidaTestImplementation - from aiida.manage.configuration import PROFILE - - # Freeze the __impl_class after the first run - if not hasattr(cls, '__impl_class'): - if PROFILE.storage_backend == BACKEND_SQLA: - from aiida.backends.sqlalchemy.testbase import SqlAlchemyTests - cls.__impl_class = SqlAlchemyTests - elif PROFILE.storage_backend == BACKEND_DJANGO: - from aiida.backends.djsite.db.testbase import DjangoTests - cls.__impl_class = DjangoTests - else: - raise ConfigurationError('Unknown backend type') - - # Check that it is of the right class - if not issubclass(cls.__impl_class, AiidaTestImplementation): - raise InternalError( - 'The AiiDA test implementation is not of type ' - '{}, that is not a subclass of AiidaTestImplementation'.format(cls.__impl_class.__name__) - ) - - return cls.__impl_class + backend: Optional[Backend] = None @classmethod def setUpClass(cls): @@ -74,43 +44,36 @@ def setUpClass(cls): check_if_tests_can_run() # Force the loading of the backend which will load the required database environment - cls.backend = get_manager().get_backend() - cls.__backend_instance = cls.get_backend_class()() cls._class_was_setup = True - - cls.refurbish_db() - cls.initialise_repository() + cls.clean_db() + cls.backend = get_manager().get_profile_storage() @classmethod def tearDownClass(cls): - """Tear down test class. - - Note: Also cleans file repository. - """ + """Tear down test class, by clearing all backend storage.""" # Double check for double security to avoid to run the tearDown # if this is not a test profile check_if_tests_can_run() - if orm.autogroup.CURRENT_AUTOGROUP is not None: - orm.autogroup.CURRENT_AUTOGROUP.clear_group_cache() cls.clean_db() - cls.clean_repository() def tearDown(self): - reset_manager() - - ### Database/repository-related methods - - @classmethod - def insert_data(cls): - """ - This method setups the database (by creating a default user) and - inserts default data into the database (which is for the moment a - default computer). - """ - orm.User.objects.reset() # clear Aiida's cache of the default user - # populate user cache of test clases - cls.user # pylint: disable=pointless-statement + manager = get_manager() + # this should really call reset profile, but that also resets the storage backend + # and causes issues for some existing tests that set class level entities + # manager.reset_profile() + # pylint: disable=protected-access + if manager._communicator is not None: + manager._communicator.close() + if manager._runner is not None: + manager._runner.stop() + manager._communicator = None + manager._runner = None + manager._daemon_client = None + manager._process_controller = None + manager._persister = None + + ### storage methods @classmethod def clean_db(cls): @@ -128,86 +91,38 @@ def clean_db(cls): if not cls._class_was_setup: raise InvalidOperation('You cannot call clean_db before running the setUpClass') - cls.__backend_instance.clean_db() - cls._computer = None - cls._user = None - - if orm.autogroup.CURRENT_AUTOGROUP is not None: - orm.autogroup.CURRENT_AUTOGROUP.clear_group_cache() - - reset_manager() - - @classmethod - def initialise_repository(cls): - """Initialise the repository""" - from aiida.manage.configuration import get_profile - profile = get_profile() - repository = cls.backend.get_repository() - repository.initialise(clear=True, **profile.defaults['repository']) + manager = get_manager() + manager.get_profile_storage()._clear(recreate_user=True) # pylint: disable=protected-access + manager.reset_profile() @classmethod def refurbish_db(cls): - """Clean up database and repopulate with initial data. - - Combines clean_db and insert_data. - """ + """Clean up database and repopulate with initial data.""" cls.clean_db() - cls.insert_data() - - @classmethod - def clean_repository(cls): - """ - Cleans up file repository. - """ - import shutil - - from aiida.common.exceptions import InvalidOperation - from aiida.manage.configuration import get_profile - - dirpath_repository = get_profile().repository_path - - base_repo_path = os.path.basename(os.path.normpath(dirpath_repository)) - if TEST_KEYWORD not in base_repo_path: - raise InvalidOperation( - 'Warning: The repository folder {} does not ' - 'seem to belong to a test profile and will therefore not be deleted.\n' - 'Full repository path: ' - '{}'.format(base_repo_path, dirpath_repository) - ) - - # Clean the test repository - shutil.rmtree(dirpath_repository, ignore_errors=True) - os.makedirs(dirpath_repository) - cls.initialise_repository() @classproperty - def computer(cls): # pylint: disable=no-self-argument + def computer(cls) -> orm.Computer: # pylint: disable=no-self-argument """Get the default computer for this test :return: the test computer - :rtype: :class:`aiida.orm.Computer`""" - if cls._computer is None: - created, computer = orm.Computer.objects.get_or_create( - label='localhost', - hostname='localhost', - transport_type='core.local', - scheduler_type='core.direct', - workdir='/tmp/aiida', - ) - if created: - computer.store() - cls._computer = computer - - return cls._computer + """ + created, computer = orm.Computer.objects.get_or_create( + label='localhost', + hostname='localhost', + transport_type='core.local', + scheduler_type='core.direct', + workdir='/tmp/aiida', + ) + if created: + computer.store() + return computer @classproperty - def user(cls): # pylint: disable=no-self-argument - if cls._user is None: - cls._user = get_default_user() - return cls._user + def user(cls) -> orm.User: # pylint: disable=no-self-argument + return get_default_user() @classproperty - def user_email(cls): # pylint: disable=no-self-argument + def user_email(cls) -> str: # pylint: disable=no-self-argument return cls.user.email # pylint: disable=no-member ### Usability methods @@ -247,8 +162,7 @@ def get_default_user(**kwargs): :param kwargs: Additional information to use for new user, i.e. 'first_name', 'last_name' or 'institution'. :returns: the :py:class:`~aiida.orm.User` """ - from aiida.manage.configuration import get_config - email = get_config().current_profile.default_user_email + email = configuration.get_profile().default_user_email if kwargs.pop('email', None): raise ValueError('Do not specify the user email (must coincide with default user email of profile).') diff --git a/aiida/backends/testimplbase.py b/aiida/backends/testimplbase.py deleted file mode 100644 index 6390b74949..0000000000 --- a/aiida/backends/testimplbase.py +++ /dev/null @@ -1,29 +0,0 @@ -# -*- coding: utf-8 -*- -########################################################################### -# Copyright (c), The AiiDA team. All rights reserved. # -# This file is part of the AiiDA code. # -# # -# The code is hosted on GitHub at https://github.com/aiidateam/aiida-core # -# For further information on the license, see the LICENSE.txt file # -# For further information please visit http://www.aiida.net # -########################################################################### -"""Implementation-dependednt base tests""" -from abc import ABC, abstractmethod - - -class AiidaTestImplementation(ABC): - """Backend-specific test implementations.""" - _backend = None - - @property - def backend(self): - """Get the backend.""" - if self._backend is None: - from aiida.manage.manager import get_manager - self._backend = get_manager().get_backend() - - return self._backend - - @abstractmethod - def clean_db(self): - """This method fully cleans the DB.""" diff --git a/aiida/backends/utils.py b/aiida/backends/utils.py deleted file mode 100644 index 8da39f6f7b..0000000000 --- a/aiida/backends/utils.py +++ /dev/null @@ -1,51 +0,0 @@ -# -*- coding: utf-8 -*- -########################################################################### -# Copyright (c), The AiiDA team. All rights reserved. # -# This file is part of the AiiDA code. # -# # -# The code is hosted on GitHub at https://github.com/aiidateam/aiida-core # -# For further information on the license, see the LICENSE.txt file # -# For further information please visit http://www.aiida.net # -########################################################################### -"""Backend-agnostic utility functions""" -from typing import TYPE_CHECKING - -if TYPE_CHECKING: - from aiida.manage.configuration.profile import Profile - -AIIDA_ATTRIBUTE_SEP = '.' - - -def create_sqlalchemy_engine(profile: 'Profile', **kwargs): - """Create SQLAlchemy engine (to be used for QueryBuilder queries) - - :param kwargs: keyword arguments that will be passed on to `sqlalchemy.create_engine`. - See https://docs.sqlalchemy.org/en/13/core/engines.html?highlight=create_engine#sqlalchemy.create_engine for - more info. - """ - from sqlalchemy import create_engine - - from aiida.common import json - - # The hostname may be `None`, which is a valid value in the case of peer authentication for example. In this case - # it should be converted to an empty string, because otherwise the `None` will be converted to string literal "None" - hostname = profile.storage_config['database_hostname'] or '' - separator = ':' if profile.storage_config['database_port'] else '' - - engine_url = 'postgresql://{user}:{password}@{hostname}{separator}{port}/{name}'.format( - separator=separator, - user=profile.storage_config['database_username'], - password=profile.storage_config['database_password'], - hostname=hostname, - port=profile.storage_config['database_port'], - name=profile.storage_config['database_name'] - ) - return create_engine( - engine_url, json_serializer=json.dumps, json_deserializer=json.loads, future=True, encoding='utf-8', **kwargs - ) - - -def create_scoped_session_factory(engine, **kwargs): - """Create scoped SQLAlchemy session factory""" - from sqlalchemy.orm import scoped_session, sessionmaker - return scoped_session(sessionmaker(bind=engine, future=True, **kwargs)) diff --git a/aiida/cmdline/commands/cmd_daemon.py b/aiida/cmdline/commands/cmd_daemon.py index e0f9b6bfd4..604c04dc3a 100644 --- a/aiida/cmdline/commands/cmd_daemon.py +++ b/aiida/cmdline/commands/cmd_daemon.py @@ -26,7 +26,7 @@ get_daemon_status, print_client_response_status, ) -from aiida.manage.configuration import get_config +from aiida.manage import get_manager def validate_daemon_workers(ctx, param, value): # pylint: disable=unused-argument,invalid-name @@ -98,12 +98,13 @@ def status(all_profiles): """ from aiida.engine.daemon.client import get_daemon_client - config = get_config() + manager = get_manager() + config = manager.get_config() if all_profiles is True: profiles = [profile for profile in config.profiles if not profile.is_test_profile] else: - profiles = [config.current_profile] + profiles = [manager.get_profile()] daemons_running = [] for profile in profiles: @@ -178,12 +179,13 @@ def stop(no_wait, all_profiles): """ from aiida.engine.daemon.client import get_daemon_client - config = get_config() + manager = get_manager() + config = manager.get_config() if all_profiles is True: profiles = [profile for profile in config.profiles if not profile.is_test_profile] else: - profiles = [config.current_profile] + profiles = [manager.get_profile()] for profile in profiles: diff --git a/aiida/cmdline/commands/cmd_database.py b/aiida/cmdline/commands/cmd_database.py index 3fd4e1f725..91817e90e0 100644 --- a/aiida/cmdline/commands/cmd_database.py +++ b/aiida/cmdline/commands/cmd_database.py @@ -12,7 +12,6 @@ import click -from aiida.backends.general.migrations.duplicate_uuids import TABLES_UUID_DEDUPLICATION from aiida.cmdline.commands.cmd_verdi import verdi from aiida.cmdline.params import options from aiida.cmdline.utils import decorators @@ -29,7 +28,7 @@ def verdi_database(): @verdi_database.command('version') @decorators.deprecated_command( 'This command has been deprecated and no longer has any effect. It will be removed soon from the CLI (in v2.1).\n' - 'The same information is now available through `verdi status`.\n' + 'The same information is now available through `verdi storage version`.\n' ) def database_version(): """Show the version of the database. @@ -68,8 +67,8 @@ def verdi_database_integrity(): @click.option( '-t', '--table', - type=click.Choice(TABLES_UUID_DEDUPLICATION), default='db_dbnode', + type=click.Choice(('db_dbcomment', 'db_dbcomputer', 'db_dbgroup', 'db_dbnode')), help='The database table to operate on.' ) @click.option( diff --git a/aiida/cmdline/commands/cmd_devel.py b/aiida/cmdline/commands/cmd_devel.py index 8b6d0d15df..1a7d0bd521 100644 --- a/aiida/cmdline/commands/cmd_devel.py +++ b/aiida/cmdline/commands/cmd_devel.py @@ -12,6 +12,7 @@ import click +from aiida import get_profile from aiida.cmdline.commands.cmd_verdi import verdi from aiida.cmdline.utils import decorators, echo @@ -33,7 +34,7 @@ def devel_check_load_time(): If either of these conditions are true, the command will raise a critical error """ - from aiida.manage.manager import get_manager + from aiida.manage import get_manager loaded_aiida_modules = [key for key in sys.modules if key.startswith('aiida.')] aiida_modules_str = '\n- '.join(sorted(loaded_aiida_modules)) @@ -41,7 +42,7 @@ def devel_check_load_time(): manager = get_manager() - if manager.backend_loaded: + if manager.profile_storage_loaded: echo.echo_critical('potential `verdi` speed problem: database backend is loaded.') allowed = ('aiida.backends', 'aiida.cmdline', 'aiida.common', 'aiida.manage', 'aiida.plugins', 'aiida.restapi') @@ -97,12 +98,15 @@ def devel_validate_plugins(): @verdi_devel.command('run-sql') @click.argument('sql', type=str) -@decorators.with_dbenv() def devel_run_sql(sql): - """Run a raw SQL command on the database.""" - from aiida.manage.manager import get_manager - manager = get_manager() - result = manager.get_backend().execute_raw(sql) + """Run a raw SQL command on the profile database (only available for 'psql_dos' storage).""" + from sqlalchemy import text + + from aiida.backends.sqlalchemy.utils import create_sqlalchemy_engine + assert get_profile().storage_backend == 'psql_dos' + with create_sqlalchemy_engine(get_profile().storage_config).connect() as connection: + result = connection.execute(text(sql)).fetchall() + if isinstance(result, (list, tuple)): for row in result: echo.echo(str(row)) diff --git a/aiida/cmdline/commands/cmd_process.py b/aiida/cmdline/commands/cmd_process.py index bfadd12590..8e42b661e3 100644 --- a/aiida/cmdline/commands/cmd_process.py +++ b/aiida/cmdline/commands/cmd_process.py @@ -17,7 +17,7 @@ from aiida.cmdline.utils import decorators, echo from aiida.cmdline.utils.query.calculation import CalculationQueryBuilder from aiida.common.log import LOG_LEVELS -from aiida.manage.manager import get_manager +from aiida.manage import get_manager @verdi.group('process') diff --git a/aiida/cmdline/commands/cmd_profile.py b/aiida/cmdline/commands/cmd_profile.py index 70594a4369..45645aefa0 100644 --- a/aiida/cmdline/commands/cmd_profile.py +++ b/aiida/cmdline/commands/cmd_profile.py @@ -9,7 +9,6 @@ ########################################################################### """`verdi profile` command.""" import click -import tabulate from aiida.cmdline.commands.cmd_verdi import verdi from aiida.cmdline.params import arguments, options @@ -46,6 +45,15 @@ def profile_list(): echo.echo_formatted_list(config.profiles, ['name'], sort=sort, highlight=highlight) +def _strip_private_keys(dct: dict): + """Remove private keys (starting `_`) from the dictionary.""" + return { + key: _strip_private_keys(value) if isinstance(value, dict) else value + for key, value in dct.items() + if not key.startswith('_') + } + + @verdi_profile.command('show') @arguments.PROFILE(default=defaults.get_default_profile) def profile_show(profile): @@ -55,8 +63,8 @@ def profile_show(profile): echo.echo_critical('no profile to show') echo.echo_report(f'Profile: {profile.name}') - data = sorted([(k.lower(), v) for k, v in profile.dictionary.items()]) - echo.echo(tabulate.tabulate(data)) + config = _strip_private_keys(profile.dictionary) + echo.echo_dictionary(config, fmt='yaml') @verdi_profile.command('setdefault') diff --git a/aiida/cmdline/commands/cmd_run.py b/aiida/cmdline/commands/cmd_run.py index 174bcd22be..343dc9a641 100644 --- a/aiida/cmdline/commands/cmd_run.py +++ b/aiida/cmdline/commands/cmd_run.py @@ -42,7 +42,7 @@ def validate_entry_point_strings(ctx, param, value): # pylint: disable=unused-a from aiida.orm import autogroup try: - autogroup.Autogroup.validate(value) + autogroup.AutogroupManager.validate(value) except (TypeError, ValueError) as exc: raise click.BadParameter(f'{str(exc)}: `{value}`') @@ -84,7 +84,7 @@ def run(scriptname, varargs, auto_group, auto_group_label_prefix, exclude, inclu # pylint: disable=too-many-arguments,exec-used """Execute scripts with preloaded AiiDA environment.""" from aiida.cmdline.utils.shell import DEFAULT_MODULES_LIST - from aiida.orm import autogroup + from aiida.manage import get_manager # Prepare the environment for the script to be run globals_dict = { @@ -100,15 +100,12 @@ def run(scriptname, varargs, auto_group, auto_group_label_prefix, exclude, inclu globals_dict[f'{alias}'] = getattr(__import__(app_mod, {}, {}, model_name), model_name) if auto_group: - aiida_verdilib_autogroup = autogroup.Autogroup() + storage_backend = get_manager().get_profile_storage() + storage_backend.autogroup.enable() # Set the ``group_label_prefix`` if defined, otherwise a default prefix will be used - if auto_group_label_prefix is not None: - aiida_verdilib_autogroup.set_group_label_prefix(auto_group_label_prefix) - aiida_verdilib_autogroup.set_exclude(exclude) - aiida_verdilib_autogroup.set_include(include) - - # Note: this is also set in the exec environment! This is the intended behavior - autogroup.CURRENT_AUTOGROUP = aiida_verdilib_autogroup + storage_backend.autogroup.set_group_label_prefix(auto_group_label_prefix) + storage_backend.autogroup.set_exclude(exclude) + storage_backend.autogroup.set_include(include) # Initialize the variable here, otherwise we get UnboundLocalError in the finally clause if it fails to open handle = None @@ -130,6 +127,7 @@ def run(scriptname, varargs, auto_group, auto_group_label_prefix, exclude, inclu # Re-raise the exception to have the error code properly returned at the end raise finally: - autogroup.current_autogroup = None + storage_backend = get_manager().get_profile_storage() + storage_backend.autogroup.disable() if handle: handle.close() diff --git a/aiida/cmdline/commands/cmd_setup.py b/aiida/cmdline/commands/cmd_setup.py index 774ac6ed0e..7795a7c616 100644 --- a/aiida/cmdline/commands/cmd_setup.py +++ b/aiida/cmdline/commands/cmd_setup.py @@ -15,8 +15,7 @@ from aiida.cmdline.params import options from aiida.cmdline.params.options.commands import setup as options_setup from aiida.cmdline.utils import echo -from aiida.manage.configuration import load_profile -from aiida.manage.manager import get_manager +from aiida.manage.configuration import Profile, load_profile @verdi.command('setup') @@ -42,11 +41,14 @@ @options_setup.SETUP_REPOSITORY_URI() @options.CONFIG_FILE() def setup( - non_interactive, profile, email, first_name, last_name, institution, db_engine, db_backend, db_host, db_port, - db_name, db_username, db_password, broker_protocol, broker_username, broker_password, broker_host, broker_port, - broker_virtual_host, repository + non_interactive, profile: Profile, email, first_name, last_name, institution, db_engine, db_backend, db_host, + db_port, db_name, db_username, db_password, broker_protocol, broker_username, broker_password, broker_host, + broker_port, broker_virtual_host, repository ): - """Setup a new profile.""" + """Setup a new profile. + + This method assumes that an empty PSQL database has been created and that the database user has been created. + """ # pylint: disable=too-many-arguments,too-many-locals,unused-argument from aiida import orm from aiida.manage.configuration import get_config @@ -75,40 +77,24 @@ def setup( config = get_config() - # Creating the profile + # Create the profile, set it as the default and load it config.add_profile(profile) config.set_default_profile(profile.name) - - # Load the profile load_profile(profile.name) echo.echo_success(f'created new profile `{profile.name}`.') - # Migrate the database - echo.echo_report('migrating the database.') - manager = get_manager() - backend = manager._load_backend(schema_check=False, repository_check=False) # pylint: disable=protected-access + # Initialise the storage + echo.echo_report('initialising the profile storage.') + storage_cls = profile.storage_cls try: - backend.migrate() + storage_cls.migrate(profile) except Exception as exception: # pylint: disable=broad-except echo.echo_critical( - f'database migration failed, probably because connection details are incorrect:\n{exception}' + f'storage initialisation failed, probably because connection details are incorrect:\n{exception}' ) else: - echo.echo_success('database migration completed.') - - # Retrieve the repository UUID from the database. If set, this means this database is associated with the repository - # with that UUID and we have to make sure that the provided repository corresponds to it. - backend_manager = manager.get_backend_manager() - repository_uuid_database = backend_manager.get_repository_uuid() - repository_uuid_profile = backend.get_repository().uuid - - if repository_uuid_database != repository_uuid_profile: - echo.echo_critical( - f'incompatible database and repository configured:\n' - f'Database `{db_name}` is associated with the repository with UUID `{repository_uuid_database}`\n' - f'However, the configured repository has UUID `{repository_uuid_profile}`.' - ) + echo.echo_success('storage initialisation completed.') # Optionally setting configuration default user settings config.set_option('autofill.user.email', email, override=False) @@ -124,6 +110,8 @@ def setup( user.store() profile.default_user_email = user.email config.update_profile(profile) + + # store the updated configuration config.store() diff --git a/aiida/cmdline/commands/cmd_status.py b/aiida/cmdline/commands/cmd_status.py index 4cece28f4a..c55640615d 100644 --- a/aiida/cmdline/commands/cmd_status.py +++ b/aiida/cmdline/commands/cmd_status.py @@ -16,7 +16,7 @@ from aiida.cmdline.commands.cmd_verdi import verdi from aiida.cmdline.params import options from aiida.cmdline.utils import echo -from aiida.common.exceptions import IncompatibleDatabaseSchema +from aiida.common.exceptions import CorruptStorage, IncompatibleStorageSchema, UnreachableStorage from aiida.common.log import override_log_level from ..utils.echo import ExitCode # pylint: disable=import-error,no-name-in-module @@ -59,9 +59,8 @@ def verdi_status(print_traceback, no_rmq): from aiida import __version__ from aiida.cmdline.utils.daemon import delete_stale_pid_file, get_daemon_status from aiida.common.utils import Capturing - from aiida.manage.configuration import get_rabbitmq_version, is_rabbitmq_version_supported from aiida.manage.configuration.settings import AIIDA_CONFIG_FOLDER - from aiida.manage.manager import get_manager + from aiida.manage.manager import check_rabbitmq_version, get_manager exit_code = ExitCode.SUCCESS @@ -85,68 +84,50 @@ def verdi_status(print_traceback, no_rmq): print_status(ServiceStatus.ERROR, 'profile', message, exception=exc, print_traceback=print_traceback) sys.exit(ExitCode.CRITICAL) # stop here - without a profile we cannot access anything - # Getting the repository - try: - repository = manager.get_backend().get_repository() - except Exception as exc: - message = 'Error with repository folder' - print_status(ServiceStatus.ERROR, 'repository', message, exception=exc, print_traceback=print_traceback) - exit_code = ExitCode.CRITICAL - else: - repository_status = f'Connected to {repository}' - print_status(ServiceStatus.UP, 'repository', repository_status) - - # Getting the postgres status by trying to get a database cursor - backend_manager = manager.get_backend_manager() - dbgen = backend_manager.get_schema_generation_database() - dbver = backend_manager.get_schema_version_backend() - database_data = [ - profile.storage_config['database_name'], - dbgen, - dbver, - profile.storage_config['database_username'], - profile.storage_config['database_hostname'], - profile.storage_config['database_port'], - ] + # Check the backend storage + storage_head_version = None try: with override_log_level(): # temporarily suppress noisy logging - backend = manager.get_backend() - backend.cursor() - - except IncompatibleDatabaseSchema: - message = f'Database schema {dbgen} / {dbver} (generation/version) is incompatible with the code. ' - message += 'Run `verdi storage migrate` to solve this.' - print_status(ServiceStatus.DOWN, 'postgres', message) + storage_cls = profile.storage_cls + storage_head_version = storage_cls.version_head() + storage_backend = storage_cls(profile) + except UnreachableStorage as exc: + message = 'Unable to connect to profile\'s storage.' + print_status(ServiceStatus.DOWN, 'storage', message, exception=exc, print_traceback=print_traceback) exit_code = ExitCode.CRITICAL - - except Exception as exc: - message = 'Unable to connect to database `{}` with schema {} / {} (generation/version) as {}@{}:{}'.format( - *database_data + except IncompatibleStorageSchema as exc: + message = ( + f'Storage schema version is incompatible with the code version {storage_head_version!r}. ' + 'Run `verdi storage migrate` to solve this.' ) - print_status(ServiceStatus.DOWN, 'postgres', message, exception=exc, print_traceback=print_traceback) + print_status(ServiceStatus.DOWN, 'storage', message) + exit_code = ExitCode.CRITICAL + except CorruptStorage as exc: + message = 'Storage is corrupted.' + print_status(ServiceStatus.DOWN, 'storage', message, exception=exc, print_traceback=print_traceback) + exit_code = ExitCode.CRITICAL + except Exception as exc: + message = 'Unable to instatiate profile\'s storage.' + print_status(ServiceStatus.ERROR, 'storage', message, exception=exc, print_traceback=print_traceback) exit_code = ExitCode.CRITICAL - else: - message = 'Connected to database `{}` with schema {} / {} (generation/version) as {}@{}:{}'.format( - *database_data - ) - print_status(ServiceStatus.UP, 'postgres', message) + message = str(storage_backend) + print_status(ServiceStatus.UP, 'storage', message) # Getting the rmq status if not no_rmq: try: with Capturing(capture_stderr=True): with override_log_level(): # temporarily suppress noisy logging - comm = manager.create_communicator(with_orm=False) - comm.close() + comm = manager.get_communicator() except Exception as exc: message = f'Unable to connect to rabbitmq with URL: {profile.get_rmq_url()}' print_status(ServiceStatus.ERROR, 'rabbitmq', message, exception=exc, print_traceback=print_traceback) exit_code = ExitCode.CRITICAL else: - version = get_rabbitmq_version() + version, supported = check_rabbitmq_version(comm) connection = f'Connected to RabbitMQ v{version} as {profile.get_rmq_url()}' - if is_rabbitmq_version_supported(): + if supported: print_status(ServiceStatus.UP, 'rabbitmq', connection) else: print_status(ServiceStatus.WARNING, 'rabbitmq', 'Incompatible RabbitMQ version detected! ' + connection) diff --git a/aiida/cmdline/commands/cmd_storage.py b/aiida/cmdline/commands/cmd_storage.py index a11c3a128b..a609a66525 100644 --- a/aiida/cmdline/commands/cmd_storage.py +++ b/aiida/cmdline/commands/cmd_storage.py @@ -10,6 +10,7 @@ """`verdi storage` commands.""" import click +from click_spinner import spinner from aiida.cmdline.commands.cmd_verdi import verdi from aiida.cmdline.params import options @@ -22,12 +23,23 @@ def verdi_storage(): """Inspect and manage stored data for a profile.""" +@verdi_storage.command('version') +def storage_version(): + """Print the current version of the storage schema.""" + from aiida import get_profile + profile = get_profile() + head_version = profile.storage_cls.version_head() + profile_version = profile.storage_cls.version_profile(profile) + echo.echo(f'Latest storage schema version: {head_version!r}') + echo.echo(f'Storage schema version of {profile.name!r}: {profile_version!r}') + + @verdi_storage.command('migrate') @options.FORCE() def storage_migrate(force): """Migrate the storage to the latest schema version.""" from aiida.engine.daemon.client import get_daemon_client - from aiida.manage.manager import get_manager + from aiida.manage import get_manager client = get_daemon_client() if client.is_daemon_running: @@ -35,44 +47,40 @@ def storage_migrate(force): manager = get_manager() profile = manager.get_profile() - backend = manager._load_backend(schema_check=False) # pylint: disable=protected-access + storage_cls = profile.storage_cls + + if not force: + + echo.echo_warning('Migrating your storage might take a while and is not reversible.') + echo.echo_warning('Before continuing, make sure you have completed the following steps:') + echo.echo_warning('') + echo.echo_warning(' 1. Make sure you have no active calculations and workflows.') + echo.echo_warning(' 2. If you do, revert the code to the previous version and finish running them first.') + echo.echo_warning(' 3. Stop the daemon using `verdi daemon stop`') + echo.echo_warning(' 4. Make a backup of your database and repository') + echo.echo_warning('') + echo.echo_warning('', nl=False) + + expected_answer = 'MIGRATE NOW' + confirm_message = 'If you have completed the steps above and want to migrate profile "{}", type {}'.format( + profile.name, expected_answer + ) - if force: try: - backend.migrate() - except (exceptions.ConfigurationError, exceptions.DatabaseMigrationError) as exception: - echo.echo_critical(str(exception)) - return - - echo.echo_warning('Migrating your storage might take a while and is not reversible.') - echo.echo_warning('Before continuing, make sure you have completed the following steps:') - echo.echo_warning('') - echo.echo_warning(' 1. Make sure you have no active calculations and workflows.') - echo.echo_warning(' 2. If you do, revert the code to the previous version and finish running them first.') - echo.echo_warning(' 3. Stop the daemon using `verdi daemon stop`') - echo.echo_warning(' 4. Make a backup of your database and repository') - echo.echo_warning('') - echo.echo_warning('', nl=False) - - expected_answer = 'MIGRATE NOW' - confirm_message = 'If you have completed the steps above and want to migrate profile "{}", type {}'.format( - profile.name, expected_answer - ) + response = click.prompt(confirm_message) + while response != expected_answer: + response = click.prompt(confirm_message) + except click.Abort: + echo.echo('\n') + echo.echo_critical('Migration aborted, the data has not been affected.') + return try: - response = click.prompt(confirm_message) - while response != expected_answer: - response = click.prompt(confirm_message) - except click.Abort: - echo.echo('\n') - echo.echo_critical('Migration aborted, the data has not been affected.') + storage_cls.migrate(profile) + except (exceptions.ConfigurationError, exceptions.StorageMigrationError) as exception: + echo.echo_critical(str(exception)) else: - try: - backend.migrate() - except (exceptions.ConfigurationError, exceptions.DatabaseMigrationError) as exception: - echo.echo_critical(str(exception)) - else: - echo.echo_success('migration completed') + echo.echo_success('migration completed') @verdi_storage.group('integrity') @@ -88,10 +96,11 @@ def storage_info(statistics): from aiida.cmdline.utils.common import get_database_summary from aiida.orm import QueryBuilder - data = { - 'database': get_database_summary(QueryBuilder, statistics), - 'repository': get_repository_info(statistics=statistics), - } + with spinner(): + data = { + 'database': get_database_summary(QueryBuilder, statistics), + 'repository': get_repository_info(statistics=statistics), + } echo.echo_dictionary(data, sort_keys=False, fmt='yaml') diff --git a/aiida/cmdline/params/options/commands/setup.py b/aiida/cmdline/params/options/commands/setup.py index 853d335751..14881ecf09 100644 --- a/aiida/cmdline/params/options/commands/setup.py +++ b/aiida/cmdline/params/options/commands/setup.py @@ -14,7 +14,6 @@ import click -from aiida.backends import BACKEND_DJANGO from aiida.cmdline.params import options, types from aiida.manage.configuration import Profile, get_config, get_config_option from aiida.manage.external.postgres import DEFAULT_DBINFO @@ -260,7 +259,7 @@ def get_quicksetup_password(ctx, param, value): # pylint: disable=unused-argume SETUP_DATABASE_BACKEND = QUICKSETUP_DATABASE_BACKEND.clone( prompt='Database backend', - contextual_default=functools.partial(get_profile_attribute_default, ('storage_backend', BACKEND_DJANGO)), + contextual_default=functools.partial(get_profile_attribute_default, ('storage_backend', 'psql_dos')), cls=options.interactive.InteractiveOption ) diff --git a/aiida/cmdline/params/options/main.py b/aiida/cmdline/params/options/main.py index 9985bac391..1125b66ec1 100644 --- a/aiida/cmdline/params/options/main.py +++ b/aiida/cmdline/params/options/main.py @@ -11,7 +11,6 @@ import click from pgsu import DEFAULT_DSN as DEFAULT_DBINFO # pylint: disable=no-name-in-module -from aiida.backends import BACKEND_DJANGO, BACKEND_SQLA from aiida.common.log import LOG_LEVELS, configure_logging from aiida.manage.external.rmq import BROKER_DEFAULTS @@ -283,10 +282,7 @@ def set_log_level(_ctx, _param, value): ) DB_BACKEND = OverridableOption( - '--db-backend', - type=click.Choice([BACKEND_DJANGO, BACKEND_SQLA]), - default=BACKEND_DJANGO, - help='Database backend to use.' + '--db-backend', type=click.Choice(['psql_dos']), default='psql_dos', help='Database backend to use.' ) DB_HOST = OverridableOption( diff --git a/aiida/cmdline/params/types/plugin.py b/aiida/cmdline/params/types/plugin.py index 005e9d2f30..c86b36ad7f 100644 --- a/aiida/cmdline/params/types/plugin.py +++ b/aiida/cmdline/params/types/plugin.py @@ -196,12 +196,14 @@ def get_entry_point_from_string(self, entry_point_string): elif entry_point_format == EntryPointFormat.MINIMAL: name = entry_point_string - matching_groups = [group for group, entry_point in self._entry_points if entry_point.name == name] + matching_groups = {group for group, entry_point in self._entry_points if entry_point.name == name} if len(matching_groups) > 1: raise ValueError( "entry point '{}' matches more than one valid entry point group [{}], " - 'please specify an explicit group prefix'.format(name, ' '.join(matching_groups)) + 'please specify an explicit group prefix: {}'.format( + name, ' '.join(matching_groups), self._entry_points + ) ) elif not matching_groups: raise ValueError( @@ -209,7 +211,7 @@ def get_entry_point_from_string(self, entry_point_string): 'entry point groups: {}'.format(name, ' '.join(self.groups)) ) - group = matching_groups[0] + group = matching_groups.pop() else: ValueError(f'invalid entry point string format: {entry_point_string}') diff --git a/aiida/cmdline/utils/common.py b/aiida/cmdline/utils/common.py index cb869b78b0..4bce9beb56 100644 --- a/aiida/cmdline/utils/common.py +++ b/aiida/cmdline/utils/common.py @@ -474,7 +474,7 @@ def get_num_workers(): Get the number of active daemon workers from the circus client """ from aiida.common.exceptions import CircusCallError - from aiida.manage.manager import get_manager + from aiida.manage import get_manager manager = get_manager() client = manager.get_daemon_client() @@ -506,12 +506,11 @@ def check_worker_load(active_slots): :param active_slots: the number of currently active worker slots """ from aiida.common.exceptions import CircusCallError - from aiida.manage.configuration import get_config + from aiida.manage import get_config_option warning_threshold = 0.9 # 90% - config = get_config() - slots_per_worker = config.get_option('daemon.worker_process_slots', config.current_profile.name) + slots_per_worker = get_config_option('daemon.worker_process_slots') try: active_workers = get_num_workers() diff --git a/aiida/cmdline/utils/decorators.py b/aiida/cmdline/utils/decorators.py index 9859f5351a..e6abd980ab 100644 --- a/aiida/cmdline/utils/decorators.py +++ b/aiida/cmdline/utils/decorators.py @@ -37,15 +37,14 @@ def load_backend_if_not_loaded(): If no profile has been loaded yet, the default profile will be loaded first. A spinner will be shown during both actions to indicate that the function is working and has not crashed, since loading can take a second. """ - from aiida.manage.configuration import get_profile, load_profile - from aiida.manage.manager import get_manager + from aiida.manage import get_manager manager = get_manager() - if get_profile() is None or not manager.backend_loaded: + if manager.get_profile() is None or not manager.profile_storage_loaded: with spinner(): - load_profile() # This will load the default profile if no profile has already been loaded - manager.get_backend() # This will load the backend of the loaded profile, if not already loaded + manager.load_profile() # This will load the default profile if no profile has already been loaded + manager.get_profile_storage() # This will load the backend of the loaded profile, if not already loaded def with_dbenv(): diff --git a/aiida/cmdline/utils/shell.py b/aiida/cmdline/utils/shell.py index afe85feb2f..2101583aec 100644 --- a/aiida/cmdline/utils/shell.py +++ b/aiida/cmdline/utils/shell.py @@ -89,17 +89,15 @@ def run_shell(interface=None): def get_start_namespace(): """Load all default and custom modules""" - from aiida.manage.configuration import get_config + from aiida.manage import get_config_option user_ns = {} - config = get_config() - # Load default modules for app_mod, model_name, alias in DEFAULT_MODULES_LIST: user_ns[alias] = getattr(__import__(app_mod, {}, {}, model_name), model_name) - verdi_shell_auto_import = config.get_option('verdi.shell.auto_import', config.current_profile.name).split(':') + verdi_shell_auto_import = get_config_option('verdi.shell.auto_import').split(':') # Load custom modules modules_list = [(str(e[0]), str(e[2])) for e in [p.rpartition('.') for p in verdi_shell_auto_import] if e[1] == '.'] diff --git a/aiida/common/__init__.py b/aiida/common/__init__.py index fcb30cb980..3c68731ff0 100644 --- a/aiida/common/__init__.py +++ b/aiida/common/__init__.py @@ -32,12 +32,13 @@ 'AttributeDict', 'CalcInfo', 'CalcJobState', + 'ClosedStorage', 'CodeInfo', 'CodeRunMode', 'ConfigurationError', 'ConfigurationVersionError', 'ContentNotExistent', - 'DatabaseMigrationError', + 'CorruptStorage', 'DbContentError', 'DefaultFieldsAttributeDict', 'EntryPointError', @@ -48,7 +49,7 @@ 'GraphTraversalRule', 'GraphTraversalRules', 'HashingError', - 'IncompatibleDatabaseSchema', + 'IncompatibleStorageSchema', 'InputValidationError', 'IntegrityError', 'InternalError', @@ -74,6 +75,7 @@ 'ProgressReporterAbstract', 'RemoteOperationError', 'StashMode', + 'StorageMigrationError', 'StoringNotAllowed', 'TQDM_BAR_FORMAT', 'TestsNotAllowedError', diff --git a/aiida/common/exceptions.py b/aiida/common/exceptions.py index 902feed9f2..eec8b94446 100644 --- a/aiida/common/exceptions.py +++ b/aiida/common/exceptions.py @@ -15,10 +15,10 @@ 'IntegrityError', 'UniquenessError', 'EntryPointError', 'MissingEntryPointError', 'MultipleEntryPointError', 'LoadingEntryPointError', 'InvalidEntryPointTypeError', 'InvalidOperation', 'ParsingError', 'InternalError', 'PluginInternalError', 'ValidationError', 'ConfigurationError', 'ProfileConfigurationError', - 'MissingConfigurationError', 'ConfigurationVersionError', 'IncompatibleDatabaseSchema', 'DbContentError', - 'InputValidationError', 'FeatureNotAvailable', 'FeatureDisabled', 'LicensingException', 'TestsNotAllowedError', - 'UnsupportedSpeciesError', 'TransportTaskException', 'OutputParsingError', 'HashingError', 'DatabaseMigrationError', - 'LockedProfileError', 'LockingProfileError' + 'MissingConfigurationError', 'ConfigurationVersionError', 'IncompatibleStorageSchema', 'CorruptStorage', + 'DbContentError', 'InputValidationError', 'FeatureNotAvailable', 'FeatureDisabled', 'LicensingException', + 'TestsNotAllowedError', 'UnsupportedSpeciesError', 'TransportTaskException', 'OutputParsingError', 'HashingError', + 'StorageMigrationError', 'LockedProfileError', 'LockingProfileError', 'ClosedStorage' ) @@ -183,12 +183,38 @@ class ConfigurationVersionError(ConfigurationError): """ +class ClosedStorage(AiidaException): + """Raised when trying to access data from a closed storage backend.""" + + +class UnreachableStorage(ConfigurationError): + """Raised when a connection to the storage backend fails.""" + + class IncompatibleDatabaseSchema(ConfigurationError): - """Raised when the database schema is incompatible with that of the code.""" + """Raised when the storage schema is incompatible with that of the code. + + Deprecated for ``IncompatibleStorageSchema`` + """ + + +class IncompatibleStorageSchema(IncompatibleDatabaseSchema): + """Raised when the storage schema is incompatible with that of the code.""" + + +class CorruptStorage(ConfigurationError): + """Raised when the storage is not found to be internally consistent on validation.""" class DatabaseMigrationError(AiidaException): - """Raised if a critical error is encountered during a database migration.""" + """Raised if a critical error is encountered during a storage migration. + + Deprecated for ``StorageMigrationError`` + """ + + +class StorageMigrationError(DatabaseMigrationError): + """Raised if a critical error is encountered during a storage migration.""" class DbContentError(AiidaException): diff --git a/aiida/common/log.py b/aiida/common/log.py index a0d1b10ffe..0324d6d0d2 100644 --- a/aiida/common/log.py +++ b/aiida/common/log.py @@ -154,6 +154,8 @@ def configure_logging(with_orm=False, daemon=False, daemon_log_file=None): will cause a 'daemon_handler' to be added to all the configured loggers, that is a RotatingFileHandler that writes to the log file. + :param with_orm: configure logging to the backend storage. + We don't configure this by default, since it would load the modules that slow the CLI :param daemon: configure the logging for a daemon task by adding a file handler instead of the default 'console' StreamHandler :param daemon_log_file: absolute filepath of the log file for the RotatingFileHandler diff --git a/aiida/engine/daemon/client.py b/aiida/engine/daemon/client.py index 157588597b..86a0dc7617 100644 --- a/aiida/engine/daemon/client.py +++ b/aiida/engine/daemon/client.py @@ -15,6 +15,7 @@ import tempfile from typing import TYPE_CHECKING, Any, Dict, Optional +from aiida import get_profile from aiida.manage.configuration import get_config, get_config_option from aiida.manage.configuration.profile import Profile @@ -55,7 +56,7 @@ def get_daemon_client(profile_name: Optional[str] = None) -> 'DaemonClient': if profile_name: profile = config.get_profile(profile_name) else: - profile = config.current_profile + profile = get_profile() return DaemonClient(profile) diff --git a/aiida/engine/daemon/runner.py b/aiida/engine/daemon/runner.py index 191bc7beaa..137fe36d8b 100644 --- a/aiida/engine/daemon/runner.py +++ b/aiida/engine/daemon/runner.py @@ -15,7 +15,7 @@ from aiida.common.log import configure_logging from aiida.engine.daemon.client import get_daemon_client from aiida.engine.runners import Runner -from aiida.manage.manager import get_manager +from aiida.manage import get_manager LOGGER = logging.getLogger(__name__) diff --git a/aiida/engine/processes/functions.py b/aiida/engine/processes/functions.py index aee84f7d86..b3bab6a6bf 100644 --- a/aiida/engine/processes/functions.py +++ b/aiida/engine/processes/functions.py @@ -16,7 +16,7 @@ from typing import TYPE_CHECKING, Any, Callable, Dict, Optional, Sequence, Tuple, Type from aiida.common.lang import override -from aiida.manage.manager import get_manager +from aiida.manage import get_manager from aiida.orm import CalcFunctionNode, Data, ProcessNode, WorkFunctionNode from aiida.orm.utils.mixins import FunctionCalculationMixin diff --git a/aiida/engine/utils.py b/aiida/engine/utils.py index f57f7500eb..b0d9066c67 100644 --- a/aiida/engine/utils.py +++ b/aiida/engine/utils.py @@ -255,8 +255,7 @@ def set_process_state_change_timestamp(process: 'Process') -> None: :param process: the Process instance that changed its state """ from aiida.common import timezone - from aiida.common.exceptions import UniquenessError - from aiida.manage.manager import get_manager # pylint: disable=cyclic-import + from aiida.manage import get_manager # pylint: disable=cyclic-import from aiida.orm import CalculationNode, ProcessNode, WorkflowNode if isinstance(process.node, CalculationNode): @@ -273,11 +272,8 @@ def set_process_state_change_timestamp(process: 'Process') -> None: description = PROCESS_STATE_CHANGE_DESCRIPTION.format(process_type) value = timezone.datetime_to_isoformat(timezone.now()) - try: - manager = get_manager() - manager.get_backend_manager().get_settings_manager().set(key, value, description) - except UniquenessError as exception: - process.logger.debug(f'could not update the {key} setting because of a UniquenessError: {exception}') + backend = get_manager().get_profile_storage() + backend.set_global_variable(key, value, description) def get_process_state_change_timestamp(process_type: Optional[str] = None) -> Optional[datetime]: @@ -291,10 +287,8 @@ def get_process_state_change_timestamp(process_type: Optional[str] = None) -> Op :return: a timestamp or None """ from aiida.common import timezone - from aiida.common.exceptions import NotExistent - from aiida.manage.manager import get_manager # pylint: disable=cyclic-import + from aiida.manage import get_manager # pylint: disable=cyclic-import - manager = get_manager().get_backend_manager().get_settings_manager() valid_process_types = ['calculation', 'work'] if process_type is not None and process_type not in valid_process_types: @@ -307,13 +301,15 @@ def get_process_state_change_timestamp(process_type: Optional[str] = None) -> Op timestamps: List[datetime] = [] + backend = get_manager().get_profile_storage() + for process_type_key in process_types: key = PROCESS_STATE_CHANGE_KEY.format(process_type_key) try: - time_stamp = timezone.isoformat_to_datetime(manager.get(key).value) + time_stamp = timezone.isoformat_to_datetime(backend.get_global_variable(key)) if time_stamp is not None: timestamps.append(time_stamp) - except NotExistent: + except KeyError: continue if not timestamps: diff --git a/aiida/manage/__init__.py b/aiida/manage/__init__.py index 40eab1d8d4..b33daffb28 100644 --- a/aiida/manage/__init__.py +++ b/aiida/manage/__init__.py @@ -27,7 +27,6 @@ from .caching import * from .configuration import * -from .database import * from .external import * from .manager import * @@ -59,9 +58,7 @@ 'get_option_names', 'get_use_cache', 'parse_option', - 'reset_manager', 'upgrade_config', - 'write_database_integrity_violation', ) # yapf: enable diff --git a/aiida/manage/configuration/__init__.py b/aiida/manage/configuration/__init__.py index 6f9bcb810b..7309d4695b 100644 --- a/aiida/manage/configuration/__init__.py +++ b/aiida/manage/configuration/__init__.py @@ -45,120 +45,22 @@ # pylint: disable=global-statement,redefined-outer-name,wrong-import-order __all__ += ( - 'get_config', 'get_config_option', 'get_config_path', 'get_profile', 'load_documentation_profile', 'load_profile', - 'reset_config', 'reset_profile', 'CONFIG', 'PROFILE', 'BACKEND_UUID' + 'get_config', 'get_config_option', 'get_config_path', 'get_profile', 'load_profile', 'reset_config', 'CONFIG' ) +from contextlib import contextmanager import os import shutil -from typing import Optional +from typing import TYPE_CHECKING, Any, Optional import warnings from aiida.common.warnings import AiidaDeprecationWarning -from . import options +if TYPE_CHECKING: + from aiida.manage.configuration import Config, Profile # pylint: disable=import-self -CONFIG = None -PROFILE = None -BACKEND_UUID = None # This will be set to the UUID of the profile as soon as its corresponding backend is loaded - - -def is_rabbitmq_version_supported(): - """Return whether the version of RabbitMQ configured for the current profile is supported. - - Versions 3.8 and above are not compatible with AiiDA with default configuration. - - :return: boolean whether the current RabbitMQ version is supported. - """ - from packaging.version import parse - return get_rabbitmq_version() < parse('3.8') - - -def get_rabbitmq_version(): - """Return the version of the RabbitMQ server that the current profile connects to. - - :return: :class:`packaging.version.Version` - """ - from packaging.version import parse - - from aiida.manage.manager import get_manager - communicator = get_manager().get_communicator() - return parse(communicator.server_properties['version'].decode('utf-8')) - - -def check_rabbitmq_version(): - """Check the version of RabbitMQ that is being connected to and emit warning if the version is not compatible.""" - from aiida.cmdline.utils import echo - if not is_rabbitmq_version_supported(): - echo.echo_warning(f'RabbitMQ v{get_rabbitmq_version()} is not supported and will cause unexpected problems!') - echo.echo_warning('It can cause long-running workflows to crash and jobs to be submitted multiple times.') - echo.echo_warning('See https://github.com/aiidateam/aiida-core/wiki/RabbitMQ-version-to-use for details.') - - -def check_version(): - """Check the currently installed version of ``aiida-core`` and warn if it is a post release development version. - - The ``aiida-core`` package maintains the protocol that the ``develop`` branch will use a post release version - number. This means it will always append `.post0` to the version of the latest release. This should mean that if - this protocol is maintained properly, this method will print a warning if the currently installed version is a - post release development branch and not an actual release. - """ - from packaging.version import parse - - from aiida import __version__ - from aiida.cmdline.utils import echo - - version = parse(__version__) - - # Showing of the warning can be turned off by setting the following option to false. - show_warning = get_config_option('warnings.development_version') - - if version.is_postrelease and show_warning: - echo.echo_warning(f'You are currently using a post release development version of AiiDA: {version}') - echo.echo_warning('Be aware that this is not recommended for production and is not officially supported.') - echo.echo_warning('Databases used with this version may not be compatible with future releases of AiiDA') - echo.echo_warning('as you might not be able to automatically migrate your data.\n') - - -def load_profile(profile: Optional[str] = None) -> Profile: - """Load a profile. - - .. note:: if a profile is already loaded and no explicit profile is specified, nothing will be done - - :param profile: the name of the profile to load, by default will use the one marked as default in the config - - :return: the loaded `Profile` instance - :raises `aiida.common.exceptions.InvalidOperation`: if the backend of another profile has already been loaded - """ - from aiida.common import InvalidOperation - from aiida.common.log import configure_logging - - global PROFILE # pylint: disable=global-variable-not-assigned - global BACKEND_UUID # pylint: disable=global-variable-not-assigned - - # If a profile is loaded and the specified profile name is None or that of the currently loaded, do nothing - if PROFILE and (profile is None or PROFILE.name is profile): - return PROFILE - - PROFILE = get_config().get_profile(profile) - - if BACKEND_UUID is not None and BACKEND_UUID != PROFILE.uuid: - # Once the switching of profiles with different backends becomes possible, the backend has to be reset properly - raise InvalidOperation('cannot switch profile because backend of another profile is already loaded') - - # Reconfigure the logging to make sure that profile specific logging configuration options are taken into account. - # Note that we do not configure with `with_orm=True` because that will force the backend to be loaded. This should - # instead be done lazily in `Manager._load_backend`. - configure_logging() - - # Check whether a development version is being run. Note that needs to be called after ``configure_logging`` because - # this function relies on the logging being properly configured for the warning to show. - check_version() - - # Check whether a compatible version of RabbitMQ is being used. - check_rabbitmq_version() - - return PROFILE +# global variables for aiida +CONFIG: Optional['Config'] = None def get_config_path(): @@ -168,7 +70,7 @@ def get_config_path(): return os.path.join(AIIDA_CONFIG_FOLDER, DEFAULT_CONFIG_FILE_NAME) -def load_config(create=False): +def load_config(create=False) -> 'Config': """Instantiate Config object representing an AiiDA configuration file. Warning: Contrary to :func:`~aiida.manage.configuration.get_config`, this function is uncached and will always @@ -234,25 +136,44 @@ def _merge_deprecated_cache_yaml(config, filepath): shutil.move(cache_path, cache_path_backup) -def get_profile() -> Profile: +def load_profile(profile: Optional[str] = None, allow_switch=False) -> 'Profile': + """Load a global profile, unloading any previously loaded profile. + + .. note:: if a profile is already loaded and no explicit profile is specified, nothing will be done + + :param profile: the name of the profile to load, by default will use the one marked as default in the config + :param allow_switch: if True, will allow switching to a different profile when storage is already loaded + + :return: the loaded `Profile` instance + :raises `aiida.common.exceptions.InvalidOperation`: + if another profile has already been loaded and allow_switch is False + """ + from aiida.manage import get_manager + return get_manager().load_profile(profile, allow_switch) + + +def get_profile() -> Optional['Profile']: """Return the currently loaded profile. :return: the globally loaded `Profile` instance or `None` """ - global PROFILE # pylint: disable=global-variable-not-assigned - return PROFILE + from aiida.manage import get_manager + return get_manager().get_profile() -def reset_profile(): - """Reset the globally loaded profile. +@contextmanager +def profile_context(profile: Optional[str] = None, allow_switch=False) -> 'Profile': + """Return a context manager for temporarily loading a profile, and unloading on exit. - .. warning:: This is experimental functionality and should for now be used only internally. If the reset is unclean - weird unknown side-effects may occur that end up corrupting or destroying data. + :param profile: the name of the profile to load, by default will use the one marked as default in the config + :param allow_switch: if True, will allow switching to a different profile + + :return: a context manager for temporarily loading a profile """ - global PROFILE - global BACKEND_UUID - PROFILE = None - BACKEND_UUID = None + from aiida.manage import get_manager + get_manager().load_profile(profile, allow_switch) + yield + get_manager().unload_profile() def reset_config(): @@ -300,65 +221,44 @@ def get_config(create=False): return CONFIG -def get_config_option(option_name): - """Return the value for the given configuration option. +def get_config_option(option_name: str) -> Any: + """Return the value of a configuration option. - This function will attempt to load the value of the option as defined for the current profile or otherwise as - defined configuration wide. If no configuration is yet loaded, this function will fall back on the default that may - be defined for the option itself. This is useful for options that need to be defined at loading time of AiiDA when - no configuration is yet loaded or may not even yet exist. In cases where one expects a profile to be loaded, - preference should be given to retrieving the option through the Config instance and its `get_option` method. + In order of priority, the option is returned from: - :param option_name: the name of the configuration option - :type option_name: str + 1. The current profile, if loaded and the option specified + 2. The current configuration, if loaded and the option specified + 3. The default value for the option - :return: option value as specified for the profile/configuration if loaded, otherwise option default + :param option_name: the name of the option to return + :return: the value of the option + :raises `aiida.common.exceptions.ConfigurationError`: if the option is not found """ - from aiida.common import exceptions - - option = options.get_option(option_name) - - try: - config = get_config(create=True) - except exceptions.ConfigurationError: - value = option.default if option.default is not options.NO_DEFAULT else None - else: - if config.current_profile: - # Try to get the option for the profile, but do not return the option default - value_profile = config.get_option(option_name, scope=config.current_profile.name, default=False) - else: - value_profile = None - - # Value is the profile value if defined or otherwise the global value, which will be None if not set - value = value_profile if value_profile else config.get_option(option_name) - - return value + from aiida.manage import get_manager + return get_manager().get_option(option_name) def load_documentation_profile(): """Load a dummy profile just for the purposes of being able to build the documentation. The building of the documentation will require importing the `aiida` package and some code will try to access the - loaded configuration and profile, which if not done will except. On top of that, Django will raise an exception if - the database models are loaded before its settings are loaded. This also is taken care of by loading a Django - profile and loading the corresponding backend. Calling this function will perform all these requirements allowing - the documentation to be built without having to install and configure AiiDA nor having an actual database present. + loaded configuration and profile, which if not done will except. + Calling this function allows the documentation to be built without having to install and configure AiiDA, + nor having an actual database present. """ import tempfile - from aiida.manage.manager import get_manager + from aiida.backends.sqlalchemy.models.base import get_orm_metadata from .config import Config - from .profile import Profile - global PROFILE global CONFIG with tempfile.NamedTemporaryFile() as handle: profile_name = 'readthedocs' profile_config = { 'storage': { - 'backend': 'django', + 'backend': 'psql_dos', 'config': { 'database_engine': 'postgresql_psycopg2', 'database_port': 5432, @@ -382,6 +282,9 @@ def load_documentation_profile(): }, } config = {'default_profile': profile_name, 'profiles': {profile_name: profile_config}} - PROFILE = Profile(profile_name, profile_config) CONFIG = Config(handle.name, config) - get_manager()._load_backend(schema_check=False, repository_check=False) # pylint: disable=protected-access + load_profile(profile_name) + + # we call this to make sure the ORM metadata is fully populated, + # so that ORM models can be properly documented + get_orm_metadata() diff --git a/aiida/manage/configuration/config.py b/aiida/manage/configuration/config.py index e62b9f3828..9f69c98d39 100644 --- a/aiida/manage/configuration/config.py +++ b/aiida/manage/configuration/config.py @@ -26,7 +26,7 @@ __all__ = ('Config', 'config_schema', 'ConfigValidationError') -SCHEMA_FILE = 'config-v6.schema.json' +SCHEMA_FILE = 'config-v7.schema.json' @lru_cache(1) @@ -255,15 +255,6 @@ def default_profile_name(self): """ return self._default_profile - @property - def current_profile(self): - """Return the currently loaded profile. - - :return: the current profile or None if not defined - """ - from . import get_profile - return get_profile() - @property def profile_names(self): """Return the list of profile names. @@ -354,7 +345,6 @@ def delete_profile( :param include_database_user: also delete the database user configured for the profile. :param include_repository: also delete the repository configured for the profile. """ - # to-do storage backend specific stuff should be handled by the backend itself from aiida.manage.external.postgres import Postgres profile = self.get_profile(name) diff --git a/aiida/manage/configuration/migrations/migrations.py b/aiida/manage/configuration/migrations/migrations.py index 8ecb00d7d5..6d4d3baedb 100644 --- a/aiida/manage/configuration/migrations/migrations.py +++ b/aiida/manage/configuration/migrations/migrations.py @@ -25,8 +25,8 @@ # When the configuration file format is changed in a backwards-incompatible way, the oldest compatible version should # be set to the new current version. -CURRENT_CONFIG_VERSION = 6 -OLDEST_COMPATIBLE_CONFIG_VERSION = 6 +CURRENT_CONFIG_VERSION = 7 +OLDEST_COMPATIBLE_CONFIG_VERSION = 7 CONFIG_LOGGER = AIIDA_LOGGER.getChild('config') @@ -103,16 +103,17 @@ class SimplifyDefaultProfiles(SingleMigration): up_compatible = 3 def upgrade(self, config: ConfigType) -> None: - from aiida.manage.configuration import PROFILE + from aiida.manage.configuration import get_profile + global_profile = get_profile() default_profiles = config.pop('default_profiles', None) if default_profiles and 'daemon' in default_profiles: config['default_profile'] = default_profiles['daemon'] elif default_profiles and 'verdi' in default_profiles: config['default_profile'] = default_profiles['verdi'] - elif PROFILE is not None: - config['default_profile'] = PROFILE.name + elif global_profile is not None: + config['default_profile'] = global_profile.name def downgrade(self, config: ConfigType) -> None: if 'default_profile' in config: @@ -231,11 +232,9 @@ class AbstractStorageAndProcess(SingleMigration): def upgrade(self, config: ConfigType) -> None: for profile_name, profile in config.get('profiles', {}).items(): profile.setdefault('storage', {}) - if 'AIIDADB_BACKEND' in profile: - profile['storage']['backend'] = profile.pop('AIIDADB_BACKEND') - else: + if 'AIIDADB_BACKEND' not in profile: CONFIG_LOGGER.warning(f'profile {profile_name!r} had no expected "AIIDADB_BACKEND" key') - profile['storage']['backend'] = 'sqlalchemy' + profile['storage']['backend'] = profile.pop('AIIDADB_BACKEND', None) profile['storage'].setdefault('config', {}) for old, new in self.storage_conversions: if old in profile: @@ -252,9 +251,10 @@ def upgrade(self, config: ConfigType) -> None: CONFIG_LOGGER.warning(f'profile {profile_name!r} had no expected {old!r} key') def downgrade(self, config: ConfigType) -> None: - for profile in config.get('profiles', {}).values(): - if 'backend' in profile.get('storage', {}): - profile['AIIDADB_BACKEND'] = profile['storage']['backend'] + for profile_name, profile in config.get('profiles', {}).items(): + profile['AIIDADB_BACKEND'] = profile.get('storage', {}).get('backend', None) + if profile['AIIDADB_BACKEND'] is None: + CONFIG_LOGGER.warning(f'profile {profile_name!r} had no expected "storage.backend" key') for old, new in self.storage_conversions: if new in profile.get('storage', {}).get('config', {}): profile[old] = profile['storage']['config'].pop(new) @@ -265,8 +265,45 @@ def downgrade(self, config: ConfigType) -> None: profile.pop('process_control', None) +class MergeStorageBackendTypes(SingleMigration): + """`django` and `sqlalchemy` are now merged into `psql_dos`. + + The legacy name is stored under the `_v6_backend` key, to allow for downgrades. + """ + down_revision = 6 + down_compatible = 6 + up_revision = 7 + up_compatible = 7 + + def upgrade(self, config: ConfigType) -> None: + for profile_name, profile in config.get('profiles', {}).items(): + if 'storage' in profile: + storage = profile['storage'] + if 'backend' in storage: + if storage['backend'] in ('django', 'sqlalchemy'): + profile['storage']['_v6_backend'] = storage['backend'] + storage['backend'] = 'psql_dos' + else: + CONFIG_LOGGER.warning( + f'profile {profile_name!r} had unknown storage backend {storage["backend"]!r}' + ) + + def downgrade(self, config: ConfigType) -> None: + for profile_name, profile in config.get('profiles', {}).items(): + if '_v6_backend' in profile.get('storage', {}): + profile.setdefault('storage', {})['backend'] = profile.pop('_v6_backend') + else: + CONFIG_LOGGER.warning(f'profile {profile_name!r} had no expected "storage._v6_backend" key') + + MIGRATIONS = ( - Initial, AddProfileUuid, SimplifyDefaultProfiles, AddMessageBroker, SimplifyOptions, AbstractStorageAndProcess + Initial, + AddProfileUuid, + SimplifyDefaultProfiles, + AddMessageBroker, + SimplifyOptions, + AbstractStorageAndProcess, + MergeStorageBackendTypes, ) diff --git a/aiida/manage/configuration/profile.py b/aiida/manage/configuration/profile.py index d6fdd0cd50..75923aa476 100644 --- a/aiida/manage/configuration/profile.py +++ b/aiida/manage/configuration/profile.py @@ -12,16 +12,15 @@ from copy import deepcopy import os import pathlib -from typing import TYPE_CHECKING, Any, Dict, Mapping, Optional +from typing import TYPE_CHECKING, Any, Dict, Mapping, Optional, Type from aiida.common import exceptions -from aiida.common.lang import classproperty from .options import parse_option from .settings import DAEMON_DIR, DAEMON_LOG_DIR if TYPE_CHECKING: - from aiida.repository import Repository # pylint: disable=ungrouped-imports + from aiida.orm.implementation.backends import Backend __all__ = ('Profile',) @@ -55,18 +54,6 @@ class Profile: # pylint: disable=too-many-public-methods KEY_PROCESS, ) - @classproperty - def defaults(cls): # pylint: disable=no-self-use,no-self-argument - """Return the dictionary of default values for profile settings.""" - return { - 'repository': { - 'pack_size_target': 4 * 1024 * 1024 * 1024, - 'loose_prefix_len': 2, - 'hash_type': 'sha256', - 'compression_algorithm': 'zlib+1' - } - } - def __init__(self, name: str, config: Mapping[str, Any], validate=True): """Load a profile with the profile configuration.""" if not isinstance(config, collections.abc.Mapping): @@ -84,8 +71,12 @@ def __init__(self, name: str, config: Mapping[str, Any], validate=True): from uuid import uuid4 self._attributes[self.KEY_UUID] = uuid4().hex - # Currently, whether a profile is a test profile is solely determined by its name starting with 'test_' - self._test_profile = bool(self.name.startswith('test_')) + def __str__(self) -> str: + return f'Profile<{self.uuid!r} ({self.name!r})>' + + def copy(self): + """Return a copy of the profile.""" + return self.__class__(self.name, self._attributes) @property def uuid(self) -> str: @@ -125,11 +116,21 @@ def set_storage(self, name: str, config: Dict[str, Any]) -> None: :param name: the name of the storage backend :param config: the configuration of the storage backend """ - # to-do validation (by loading the storage backend, and using a classmethod to validate the config) self._attributes.setdefault(self.KEY_STORAGE, {}) self._attributes[self.KEY_STORAGE][self.KEY_STORAGE_BACKEND] = name self._attributes[self.KEY_STORAGE][self.KEY_STORAGE_CONFIG] = config + @property + def storage_cls(self) -> Type['Backend']: + """Return the storage backend class for this profile.""" + if self.storage_backend == 'psql_dos': + from aiida.orm.implementation.sqlalchemy.backend import PsqlDosBackend + return PsqlDosBackend + if self.storage_backend == 'archive.sqlite': + from aiida.tools.archive.implementations.sqlite.backend import ArchiveReadOnlyBackend + return ArchiveReadOnlyBackend + raise ValueError(f'unknown storage backend type: {self.storage_backend}') + @property def process_control_backend(self) -> str: """Return the type of the process control backend.""" @@ -146,7 +147,6 @@ def set_process_controller(self, name: str, config: Dict[str, Any]) -> None: :param name: the name of the process backend :param config: the configuration of the process backend """ - # to-do validation (by loading the process backend, and using a classmethod to validate the config) self._attributes.setdefault(self.KEY_PROCESS, {}) self._attributes[self.KEY_PROCESS][self.KEY_PROCESS_BACKEND] = name self._attributes[self.KEY_PROCESS][self.KEY_PROCESS_CONFIG] = config @@ -199,23 +199,18 @@ def is_test_profile(self) -> bool: :return: boolean, True if test profile, False otherwise """ - return self._test_profile + # Currently, whether a profile is a test profile is solely determined by its name starting with 'test_' + return self.name.startswith('test_') @property def repository_path(self) -> pathlib.Path: """Return the absolute path of the repository configured for this profile. - :return: absolute filepath of the profile's file repository - """ - return pathlib.Path(self._parse_repository_uri()[1]) - - def _parse_repository_uri(self): - """ - This function validates the REPOSITORY_URI, that should be in the format protocol://address + The URI should be in the format `protocol://address` :note: At the moment, only the file protocol is supported. - :return: a tuple (protocol, address). + :return: absolute filepath of the profile's file repository """ from urllib.parse import urlparse @@ -227,7 +222,7 @@ def _parse_repository_uri(self): if not os.path.isabs(parts.path): raise exceptions.ConfigurationError('invalid repository URI: the path has to be absolute') - return parts.scheme, os.path.expanduser(parts.path) + return pathlib.Path(os.path.expanduser(parts.path)) @property def rmq_prefix(self) -> str: @@ -238,7 +233,13 @@ def rmq_prefix(self) -> str: return f'aiida-{self.uuid}' def get_rmq_url(self) -> str: + """Return the RMQ url for this profile.""" from aiida.manage.external.rmq import get_rmq_url + + if self.process_control_backend != 'rabbitmq': + raise exceptions.ConfigurationError( + f"invalid process control backend, only 'rabbitmq' is supported: {self.process_control_backend}" + ) kwargs = {key[7:]: val for key, val in self.process_control_config.items() if key.startswith('broker_')} additional_kwargs = kwargs.pop('parameters', {}) return get_rmq_url(**kwargs, **additional_kwargs) diff --git a/aiida/manage/configuration/schema/config-v5.schema.json b/aiida/manage/configuration/schema/config-v5.schema.json index 22af0f09cb..a502f2fb01 100644 --- a/aiida/manage/configuration/schema/config-v5.schema.json +++ b/aiida/manage/configuration/schema/config-v5.schema.json @@ -262,7 +262,7 @@ "django", "sqlalchemy" ], - "default": "django" + "default": "sqlalchemy" }, "AIIDADB_NAME": { "type": "string" diff --git a/aiida/manage/configuration/schema/config-v6.schema.json b/aiida/manage/configuration/schema/config-v6.schema.json index c04a9f708d..facb5af963 100644 --- a/aiida/manage/configuration/schema/config-v6.schema.json +++ b/aiida/manage/configuration/schema/config-v6.schema.json @@ -1,6 +1,6 @@ { "$schema": "http://json-schema.org/draft-07/schema", - "description": "Schema for AiiDA configuration files, format version 5", + "description": "Schema for AiiDA configuration files, format version 6", "type": "object", "definitions": { "options": { @@ -184,7 +184,7 @@ "backend": { "description": "The storage backend type to use", "type": "string", - "default": "django" + "default": "sqlalchemy" }, "config": { "description": "The configuration to pass to the storage backend", diff --git a/aiida/manage/configuration/schema/config-v7.schema.json b/aiida/manage/configuration/schema/config-v7.schema.json new file mode 100644 index 0000000000..ba1fe2abb3 --- /dev/null +++ b/aiida/manage/configuration/schema/config-v7.schema.json @@ -0,0 +1,338 @@ +{ + "$schema": "http://json-schema.org/draft-07/schema", + "description": "Schema for AiiDA configuration files, format version 7", + "type": "object", + "definitions": { + "options": { + "type": "object", + "properties": { + "runner.poll.interval": { + "type": "integer", + "default": 60, + "minimum": 0, + "description": "Polling interval in seconds to be used by process runners" + }, + "daemon.default_workers": { + "type": "integer", + "default": 1, + "minimum": 1, + "description": "Default number of workers to be launched by `verdi daemon start`" + }, + "daemon.timeout": { + "type": "integer", + "default": 20, + "minimum": 0, + "description": "Timeout in seconds for calls to the circus client" + }, + "daemon.worker_process_slots": { + "type": "integer", + "default": 200, + "minimum": 1, + "description": "Maximum number of concurrent process tasks that each daemon worker can handle" + }, + "db.batch_size": { + "type": "integer", + "default": 100000, + "minimum": 1, + "description": "Batch size for bulk CREATE operations in the database. Avoids hitting MaxAllocSize of PostgreSQL (1GB) when creating large numbers of database records in one go." + }, + "verdi.shell.auto_import": { + "type": "string", + "default": "", + "description": "Additional modules/functions/classes to be automatically loaded in `verdi shell`, split by ':'" + }, + "logging.aiida_loglevel": { + "type": "string", + "enum": ["CRITICAL", "ERROR", "WARNING", "REPORT", "INFO", "DEBUG"], + "default": "REPORT", + "description": "Minimum level to log to daemon log and the `DbLog` table for the `aiida` logger" + }, + "logging.db_loglevel": { + "type": "string", + "enum": ["CRITICAL", "ERROR", "WARNING", "REPORT", "INFO", "DEBUG"], + "default": "REPORT", + "description": "Minimum level to log to the DbLog table" + }, + "logging.plumpy_loglevel": { + "type": "string", + "enum": ["CRITICAL", "ERROR", "WARNING", "REPORT", "INFO", "DEBUG"], + "default": "WARNING", + "description": "Minimum level to log to daemon log and the `DbLog` table for the `plumpy` logger" + }, + "logging.kiwipy_loglevel": { + "type": "string", + "enum": ["CRITICAL", "ERROR", "WARNING", "REPORT", "INFO", "DEBUG"], + "default": "WARNING", + "description": "Minimum level to log to daemon log and the `DbLog` table for the `kiwipy` logger" + }, + "logging.paramiko_loglevel": { + "key": "logging_paramiko_log_level", + "type": "string", + "enum": ["CRITICAL", "ERROR", "WARNING", "REPORT", "INFO", "DEBUG"], + "default": "WARNING", + "description": "Minimum level to log to daemon log and the `DbLog` table for the `paramiko` logger" + }, + "logging.alembic_loglevel": { + "type": "string", + "enum": ["CRITICAL", "ERROR", "WARNING", "REPORT", "INFO", "DEBUG"], + "default": "WARNING", + "description": "Minimum level to log to daemon log and the `DbLog` table for the `alembic` logger" + }, + "logging.sqlalchemy_loglevel": { + "type": "string", + "enum": ["CRITICAL", "ERROR", "WARNING", "REPORT", "INFO", "DEBUG"], + "default": "WARNING", + "description": "Minimum level to log to daemon log and the `DbLog` table for the `sqlalchemy` logger" + }, + "logging.circus_loglevel": { + "type": "string", + "enum": ["CRITICAL", "ERROR", "WARNING", "REPORT", "INFO", "DEBUG"], + "default": "INFO", + "description": "Minimum level to log to daemon log and the `DbLog` table for the `circus` logger" + }, + "logging.aiopika_loglevel": { + "type": "string", + "enum": ["CRITICAL", "ERROR", "WARNING", "REPORT", "INFO", "DEBUG"], + "default": "WARNING", + "description": "Minimum level to log to daemon log and the `DbLog` table for the `aio_pika` logger" + }, + "warnings.showdeprecations": { + "type": "boolean", + "default": true, + "description": "Whether to print AiiDA deprecation warnings" + }, + "warnings.development_version": { + "type": "boolean", + "default": true, + "description": "Whether to print a warning when a profile is loaded while a development version is installed", + "global_only": true + }, + "transport.task_retry_initial_interval": { + "type": "integer", + "default": 20, + "minimum": 1, + "description": "Initial time interval for the exponential backoff mechanism." + }, + "transport.task_maximum_attempts": { + "type": "integer", + "default": 5, + "minimum": 1, + "description": "Maximum number of transport task attempts before a Process is Paused." + }, + "rmq.task_timeout": { + "type": "integer", + "default": 10, + "minimum": 1, + "description": "Timeout in seconds for communications with RabbitMQ" + }, + "caching.default_enabled": { + "type": "boolean", + "default": false, + "description": "Enable calculation caching by default" + }, + "caching.enabled_for": { + "description": "Calculation entry points to enable caching on", + "type": "array", + "default": [], + "items": { + "type": "string" + } + }, + "caching.disabled_for": { + "description": "Calculation entry points to disable caching on", + "type": "array", + "default": [], + "items": { + "type": "string" + } + }, + "autofill.user.email": { + "type": "string", + "global_only": true, + "description": "Default user email to use when creating new profiles." + }, + "autofill.user.first_name": { + "type": "string", + "global_only": true, + "description": "Default user first name to use when creating new profiles." + }, + "autofill.user.last_name": { + "type": "string", + "global_only": true, + "description": "Default user last name to use when creating new profiles." + }, + "autofill.user.institution": { + "type": "string", + "global_only": true, + "description": "Default user institution to use when creating new profiles." + } + } + }, + "profile": { + "type": "object", + "required": ["storage", "process_control"], + "properties": { + "PROFILE_UUID": { + "description": "The profile's unique key", + "type": "string" + }, + "storage": { + "description": "The storage configuration", + "type": "object", + "required": ["backend", "config"], + "properties": { + "backend": { + "description": "The storage backend type to use", + "type": "string", + "default": "psql_dos" + }, + "config": { + "description": "The configuration to pass to the storage backend", + "type": "object", + "properties": { + "database_engine": { + "type": "string", + "default": "postgresql_psycopg2" + }, + "database_port": { + "type": ["integer", "string"], + "minimum": 1, + "pattern": "\\d+", + "default": 5432 + }, + "database_hostname": { + "type": ["string", "null"], + "default": null + }, + "database_username": { + "type": "string" + }, + "database_password": { + "type": ["string", "null"], + "default": null + }, + "database_name": { + "type": "string" + }, + "repository_uri": { + "description": "URI to the AiiDA object store", + "type": "string" + } + } + } + } + }, + "process_control": { + "description": "The process control configuration", + "type": "object", + "required": ["backend", "config"], + "properties": { + "backend": { + "description": "The process execution backend type to use", + "type": "string", + "default": "rabbitmq" + }, + "config": { + "description": "The configuration to pass to the process execution backend", + "type": "object", + "parameters": { + "broker_protocol": { + "description": "Protocol for connecting to the RabbitMQ server", + "type": "string", + "enum": ["amqp", "amqps"], + "default": "amqp" + }, + "broker_username": { + "description": "Username for RabbitMQ authentication", + "type": "string", + "default": "guest" + }, + "broker_password": { + "description": "Password for RabbitMQ authentication", + "type": "string", + "default": "guest" + }, + "broker_host": { + "description": "Hostname of the RabbitMQ server", + "type": "string", + "default": "127.0.0.1" + }, + "broker_port": { + "description": "Port of the RabbitMQ server", + "type": "integer", + "minimum": 1, + "default": 5672 + }, + "broker_virtual_host": { + "description": "RabbitMQ virtual host to connect to", + "type": "string", + "default": "" + }, + "broker_parameters": { + "description": "RabbitMQ arguments that will be encoded as query parameters", + "type": "object", + "default": { + "heartbeat": 600 + }, + "properties": { + "heartbeat": { + "description": "After how many seconds the peer TCP connection should be considered unreachable", + "type": "integer", + "default": 600, + "minimum": 0 + } + } + } + } + } + } + }, + "default_user_email": { + "type": ["string", "null"], + "default": null + }, + "options": { + "description": "Profile specific options", + "$ref": "#/definitions/options" + } + } + } + }, + "required": [], + "properties": { + "CONFIG_VERSION": { + "description": "The configuration version", + "type": "object", + "required": ["CURRENT", "OLDEST_COMPATIBLE"], + "properties": { + "CURRENT": { + "description": "Version number of configuration file format", + "type": "integer", + "const": 7 + }, + "OLDEST_COMPATIBLE": { + "description": "Version number of oldest configuration file format this file is compatible with", + "type": "integer", + "const": 7 + } + } + }, + "profiles": { + "description": "Configured profiles", + "type": "object", + "patternProperties": { + ".+": { + "$ref": "#/definitions/profile" + } + } + }, + "default_profile": { + "description": "Default profile to use", + "type": "string" + }, + "options": { + "description": "Global options", + "$ref": "#/definitions/options" + } + } +} diff --git a/aiida/manage/database/integrity/sql/__init__.py b/aiida/manage/database/integrity/sql/__init__.py deleted file mode 100644 index 2776a55f97..0000000000 --- a/aiida/manage/database/integrity/sql/__init__.py +++ /dev/null @@ -1,9 +0,0 @@ -# -*- coding: utf-8 -*- -########################################################################### -# Copyright (c), The AiiDA team. All rights reserved. # -# This file is part of the AiiDA code. # -# # -# The code is hosted on GitHub at https://github.com/aiidateam/aiida-core # -# For further information on the license, see the LICENSE.txt file # -# For further information please visit http://www.aiida.net # -########################################################################### diff --git a/aiida/manage/database/integrity/sql/links.py b/aiida/manage/database/integrity/sql/links.py deleted file mode 100644 index 025040654b..0000000000 --- a/aiida/manage/database/integrity/sql/links.py +++ /dev/null @@ -1,113 +0,0 @@ -# -*- coding: utf-8 -*- -########################################################################### -# Copyright (c), The AiiDA team. All rights reserved. # -# This file is part of the AiiDA code. # -# # -# The code is hosted on GitHub at https://github.com/aiidateam/aiida-core # -# For further information on the license, see the LICENSE.txt file # -# For further information please visit http://www.aiida.net # -########################################################################### -"""SQL statements that test the integrity of the database with respect to links.""" - -from aiida.common.extendeddicts import AttributeDict -from aiida.common.links import LinkType - -VALID_LINK_TYPES = tuple(link_type.value for link_type in LinkType) - -SELECT_CALCULATIONS_WITH_OUTGOING_CALL = """ - SELECT link.id, node_in.uuid, node_out.uuid, link.type, link.label - FROM db_dbnode AS node_in - JOIN db_dblink AS link ON node_in.id = link.input_id - JOIN db_dbnode AS node_out ON node_out.id = link.output_id - WHERE node_in.node_type LIKE 'process.calculation%' - AND (link.type = 'call_calc' OR link.type = 'call_work'); - """ - -SELECT_CALCULATIONS_WITH_OUTGOING_RETURN = """ - SELECT link.id, node_in.uuid, node_out.uuid, link.type, link.label - FROM db_dbnode AS node_in - JOIN db_dblink AS link ON node_in.id = link.input_id - JOIN db_dbnode AS node_out ON node_out.id = link.output_id - WHERE node_in.node_type LIKE 'process.calculation%' - AND link.type = 'return'; - """ - -SELECT_WORKFLOWS_WITH_OUTGOING_CREATE = """ - SELECT link.id, node_in.uuid, node_out.uuid, link.type, link.label - FROM db_dbnode AS node_in - JOIN db_dblink AS link ON node_in.id = link.input_id - JOIN db_dbnode AS node_out ON node_out.id = link.output_id - WHERE node_in.node_type LIKE 'process.workflow%' - AND link.type = 'create'; - """ - -SELECT_LINKS_WITH_INVALID_TYPE = """ - SELECT link.id, node_in.uuid, node_out.uuid, link.type, link.label - FROM db_dbnode AS node_in - JOIN db_dblink AS link ON node_in.id = link.input_id - JOIN db_dbnode AS node_out ON node_out.id = link.output_id - WHERE link.type NOT IN %(valid_link_types)s; - """ - -SELECT_MULTIPLE_INCOMING_CREATE = """ - SELECT node.id, node.uuid, node.node_type, COUNT(link.id) - FROM db_dbnode AS node - JOIN db_dblink AS link - ON node.id = link.output_id - WHERE node.node_type LIKE 'data.%' - AND link.type = 'create' - GROUP BY node.id - HAVING COUNT(link.id) > 1; - """ - -SELECT_MULTIPLE_INCOMING_CALL = """ - SELECT node.id, node.uuid, node.node_type, COUNT(link.id) - FROM db_dbnode AS node - JOIN db_dblink AS link - ON node.id = link.output_id - WHERE node.node_type LIKE 'process.%' - AND (link.type = 'call_calc' OR link.type = 'call_work') - GROUP BY node.id - HAVING COUNT(link.id) > 1; - """ - -INVALID_LINK_SELECT_STATEMENTS = ( - AttributeDict({ - 'sql': SELECT_CALCULATIONS_WITH_OUTGOING_CALL, - 'parameters': None, - 'headers': ['ID', 'Input node', 'Output node', 'Type', 'Label'], - 'message': 'detected calculation nodes with outgoing `call` links' - }), - AttributeDict({ - 'sql': SELECT_CALCULATIONS_WITH_OUTGOING_RETURN, - 'parameters': None, - 'headers': ['ID', 'Input node', 'Output node', 'Type', 'Label'], - 'message': 'detected calculation nodes with outgoing `return` links' - }), - AttributeDict({ - 'sql': SELECT_WORKFLOWS_WITH_OUTGOING_CREATE, - 'parameters': None, - 'headers': ['ID', 'Input node', 'Output node', 'Type', 'Label'], - 'message': 'detected workflow nodes with outgoing `create` links' - }), - AttributeDict({ - 'sql': SELECT_LINKS_WITH_INVALID_TYPE, - 'parameters': { - 'valid_link_types': VALID_LINK_TYPES - }, - 'headers': ['ID', 'Input node', 'Output node', 'Type', 'Label'], - 'message': 'detected links with invalid type' - }), - AttributeDict({ - 'sql': SELECT_MULTIPLE_INCOMING_CREATE, - 'parameters': None, - 'headers': ['ID', 'UUID', 'Type', 'Count'], - 'message': 'detected nodes with more than one incoming `create` link' - }), - AttributeDict({ - 'sql': SELECT_MULTIPLE_INCOMING_CALL, - 'parameters': None, - 'headers': ['ID', 'UUID', 'Type', 'Count'], - 'message': 'detected nodes with more than one incoming `call` link' - }), -) diff --git a/aiida/manage/database/integrity/sql/nodes.py b/aiida/manage/database/integrity/sql/nodes.py deleted file mode 100644 index 6187654455..0000000000 --- a/aiida/manage/database/integrity/sql/nodes.py +++ /dev/null @@ -1,63 +0,0 @@ -# -*- coding: utf-8 -*- -########################################################################### -# Copyright (c), The AiiDA team. All rights reserved. # -# This file is part of the AiiDA code. # -# # -# The code is hosted on GitHub at https://github.com/aiidateam/aiida-core # -# For further information on the license, see the LICENSE.txt file # -# For further information please visit http://www.aiida.net # -########################################################################### -"""SQL statements that test the integrity of the database with respect to nodes.""" - -from aiida.common.extendeddicts import AttributeDict -from aiida.orm import CalculationNode, Data, WorkflowNode - - -def format_type_string_regex(node_class): - """Format the type string regex to match nodes that are a sub class of the given node class. - - For example, for the CalculationNode class, the type string is given by:: - - node.process.calculation.CalculationNode. - - To obtain the regex string that can be used to match sub classes, one has to strip the last period and - the class name:: - - nodes.process.calculation. - - Any node with a type string that starts with this sub string is a sub class of the `CalculationNode` class. - - :param node_class: the node class for which to get the sub class regex string - :return: a string that can be used as regex to match nodes that are a sub class of the given node class - """ - # 'nodes.process.calculation.CalculationNode.' - type_string = node_class._plugin_type_string # pylint: disable=protected-access - - # ['nodes', 'process', 'calculation'] - type_parts = type_string.split('.')[:-2] - - # 'nodes.process.calculation.' - type_string_regex = f"{'.'.join(type_parts)}." - - return type_string_regex - - -VALID_NODE_BASE_CLASSES = [Data, CalculationNode, WorkflowNode] -VALID_NODE_TYPE_STRING = f"({'|'.join([format_type_string_regex(cls) for cls in VALID_NODE_BASE_CLASSES])})%" - -SELECT_NODES_WITH_INVALID_TYPE = """ - SELECT node.id, node.uuid, node.node_type - FROM db_dbnode AS node - WHERE node.node_type NOT SIMILAR TO %(valid_node_types)s; - """ - -INVALID_NODE_SELECT_STATEMENTS = ( - AttributeDict({ - 'sql': SELECT_NODES_WITH_INVALID_TYPE, - 'parameters': { - 'valid_node_types': VALID_NODE_TYPE_STRING - }, - 'headers': ['ID', 'UUID', 'Type'], - 'message': 'detected nodes with invalid type' - }), -) diff --git a/aiida/manage/database/integrity/utils.py b/aiida/manage/database/integrity/utils.py deleted file mode 100644 index 3e05df1876..0000000000 --- a/aiida/manage/database/integrity/utils.py +++ /dev/null @@ -1,54 +0,0 @@ -# -*- coding: utf-8 -*- -########################################################################### -# Copyright (c), The AiiDA team. All rights reserved. # -# This file is part of the AiiDA code. # -# # -# The code is hosted on GitHub at https://github.com/aiidateam/aiida-core # -# For further information on the license, see the LICENSE.txt file # -# For further information please visit http://www.aiida.net # -########################################################################### -# pylint: disable=invalid-name -"""Methods to validate the database integrity and fix violations.""" -__all__ = ('write_database_integrity_violation',) - -WARNING_BORDER = '*' * 120 - - -def write_database_integrity_violation(results, headers, reason_message, action_message=None): - """Emit a integrity violation warning and write the violating records to a log file in the current directory - - :param results: a list of tuples representing the violating records - :param headers: a tuple of strings that will be used as a header for the log file. Should have the same length - as each tuple in the results list. - :param reason_message: a human readable message detailing the reason of the integrity violation - :param action_message: an optional human readable message detailing a performed action, if any - """ - # pylint: disable=duplicate-string-formatting-argument - from datetime import datetime - from tempfile import NamedTemporaryFile - - from tabulate import tabulate - - from aiida.cmdline.utils import echo - from aiida.manage import configuration - - if configuration.PROFILE.is_test_profile: - return - - if action_message is None: - action_message = 'nothing' - - with NamedTemporaryFile(prefix='migration-', suffix='.log', dir='.', delete=False, mode='w+') as handle: - echo.echo('') - echo.echo_warning( - '\n{}\nFound one or multiple records that violate the integrity of the database\nViolation reason: {}\n' - 'Performed action: {}\nViolators written to: {}\n{}\n'.format( - WARNING_BORDER, reason_message, action_message, handle.name, WARNING_BORDER - ) - ) - - handle.write(f'# {datetime.utcnow().isoformat()}\n') - handle.write(f'# Violation reason: {reason_message}\n') - handle.write(f'# Performed action: {action_message}\n') - handle.write('\n') - handle.write(tabulate(results, headers)) diff --git a/aiida/manage/manager.py b/aiida/manage/manager.py index 58e45e8522..5d3963f537 100644 --- a/aiida/manage/manager.py +++ b/aiida/manage/manager.py @@ -11,13 +11,13 @@ """AiiDA manager for global settings""" import asyncio import functools -from typing import TYPE_CHECKING, Any, Optional +from typing import TYPE_CHECKING, Any, Optional, Union +from warnings import warn if TYPE_CHECKING: from kiwipy.rmq import RmqThreadCommunicator from plumpy.process_comms import RemoteProcessThreadController - from aiida.backends.manager import BackendManager from aiida.engine.daemon.client import DaemonClient from aiida.engine.persistence import AiiDAPersister from aiida.engine.runners import Runner @@ -25,55 +25,54 @@ from aiida.manage.configuration.profile import Profile from aiida.orm.implementation import Backend -__all__ = ('get_manager', 'reset_manager') +__all__ = ('get_manager',) +MANAGER: Optional['Manager'] = None -class Manager: - """ - Manager singleton to provide global versions of commonly used profile/settings related objects - and methods to facilitate their construction. - In AiiDA the settings of many objects are tied to options defined in the current profile. This - means that certain objects should be constructed in a way that depends on the profile. Instead of - having disparate parts of the code accessing the profile we put together here the profile and methods - to create objects based on the current settings. +def get_manager() -> 'Manager': + """Return the AiiDA global manager instance.""" + global MANAGER # pylint: disable=global-statement + if MANAGER is None: + MANAGER = Manager() + return MANAGER - It is also a useful place to put objects where there can be a single 'global' (per profile) instance. - Future plans: - * reset manager cache when loading a new profile +class Manager: + """Manager singleton for globally loaded resources. + + AiiDA can have the following global resources loaded: + + 1. A single configuration object that contains: + - Global options overrides + - The name of a default profile + - A mapping of profile names to their configuration and option overrides + 2. A single profile object that contains: + - The name of the profile + - The UUID of the profile + - The configuration of the profile, for connecting to storage and processing resources + - The option overrides for the profile + 3. A single storage backend object for the profile, to connect to data storage resources + 5. A single daemon client object for the profile, to connect to the AiiDA daemon + 4. A single communicator object for the profile, to connect to the process control resources + 6. A single process controller object for the profile, which uses the communicator to control process tasks + 7. A single runner object for the profile, which uses the process controller to start and stop processes + 8. A single persister object for the profile, which can persist running processes to the profile storage + """ def __init__(self) -> None: - self._backend: Optional['Backend'] = None - self._backend_manager: Optional['BackendManager'] = None - self._config: Optional['Config'] = None - self._daemon_client: Optional['DaemonClient'] = None + # note: the config currently references the global variables self._profile: Optional['Profile'] = None + self._profile_storage: Optional['Backend'] = None + self._daemon_client: Optional['DaemonClient'] = None self._communicator: Optional['RmqThreadCommunicator'] = None self._process_controller: Optional['RemoteProcessThreadController'] = None self._persister: Optional['AiiDAPersister'] = None self._runner: Optional['Runner'] = None - def close(self) -> None: - """Reset the global settings entirely and release any global objects.""" - if self._communicator is not None: - self._communicator.close() - if self._runner is not None: - self._runner.stop() - - self._backend = None - self._backend_manager = None - self._config = None - self._profile = None - self._communicator = None - self._daemon_client = None - self._process_controller = None - self._persister = None - self._runner = None - @staticmethod - def get_config() -> 'Config': + def get_config(create=False) -> 'Config': """Return the current config. :return: current loaded config instance @@ -81,139 +80,161 @@ def get_config() -> 'Config': """ from .configuration import get_config - return get_config() + return get_config(create=create) - @staticmethod - def get_profile() -> Optional['Profile']: + def get_profile(self) -> Optional['Profile']: """Return the current loaded profile, if any :return: current loaded profile instance - """ - from .configuration import get_profile - return get_profile() + return self._profile - def unload_backend(self) -> None: - """Unload the current backend and its corresponding database environment.""" - manager = self.get_backend_manager() - manager.reset_backend_environment() - self._backend = None + def load_profile(self, profile: Union[None, str, 'Profile'] = None, allow_switch=False) -> 'Profile': + """Load a global profile, unloading any previously loaded profile. - def _load_backend(self, schema_check: bool = True, repository_check: bool = True) -> 'Backend': - """Load the backend for the currently configured profile and return it. + .. note:: if a profile is already loaded and no explicit profile is specified, nothing will be done - .. note:: this will reconstruct the `Backend` instance in `self._backend` so the preferred method to load the - backend is to call `get_backend` which will create it only when not yet instantiated. + :param profile: the name of the profile to load, by default will use the one marked as default in the config + :param allow_switch: if True, will allow switching to a different profile when storage is already loaded - :param schema_check: force a database schema check if the database environment has not yet been loaded. - :param repository_check: force a check that the database is associated with the repository that is configured - for the current profile. - :return: the database backend. + :return: the loaded `Profile` instance + :raises `aiida.common.exceptions.InvalidOperation`: + if another profile has already been loaded and allow_switch is False """ - from aiida.backends import BACKEND_DJANGO, BACKEND_SQLA, get_backend_manager - from aiida.common import ConfigurationError, InvalidOperation + from aiida.common.exceptions import InvalidOperation from aiida.common.log import configure_logging - from aiida.manage import configuration - from aiida.manage.profile_access import ProfileAccessManager + from aiida.manage.configuration.profile import Profile - profile = self.get_profile() + if profile is None or isinstance(profile, str): + profile = self.get_config().get_profile(profile) + elif not isinstance(profile, Profile): + raise TypeError(f'profile must be None, a string, or a Profile instance, got: {type(profile)}') - if profile is None: - raise ConfigurationError( - 'Could not determine the current profile. Consider loading a profile using `aiida.load_profile()`.' - ) + # If a profile is loaded and the specified profile name is that of the currently loaded, do nothing + if self._profile and (self._profile.name == profile.name): + return self._profile - if configuration.BACKEND_UUID is not None and configuration.BACKEND_UUID != profile.uuid: - raise InvalidOperation('cannot load backend because backend of another profile is already loaded') + if self._profile and self.profile_storage_loaded and not allow_switch: + raise InvalidOperation( + f'cannot switch to profile {profile.name!r} because profile {self._profile.name!r} storage ' + 'is already loaded and allow_switch is False' + ) - backend_manager = get_backend_manager(profile.storage_backend) + self.unload_profile() + self._profile = profile - # Do NOT reload the backend environment if already loaded, simply reload the backend instance after - if configuration.BACKEND_UUID is None: - access_manager = ProfileAccessManager(profile) - access_manager.request_access() - backend_manager.load_backend_environment(profile, validate_schema=schema_check) - configuration.BACKEND_UUID = profile.uuid + # Reconfigure the logging to make sure that profile specific logging config options are taken into account. + # Note that we do not configure with `with_orm=True` because that will force the backend to be loaded. + # This should instead be done lazily in `Manager.get_profile_storage`. + configure_logging() - backend_type = profile.storage_backend + # Check whether a development version is being run. Note that needs to be called after ``configure_logging`` + # because this function relies on the logging being properly configured for the warning to show. + check_version() - # Can only import the backend classes after the backend has been loaded - if backend_type == BACKEND_DJANGO: - from aiida.orm.implementation.django.backend import DjangoBackend - self._backend = DjangoBackend() - elif backend_type == BACKEND_SQLA: - from aiida.orm.implementation.sqlalchemy.backend import SqlaBackend - self._backend = SqlaBackend() - else: - raise ValueError(f'unknown database backend type: {backend_type}') - - # Perform the check on the repository compatibility. Since this is new functionality and the stability is not - # yet known, we issue a warning in the case the repo and database are incompatible. In the future this might - # then become an exception once we have verified that it is working reliably. - if repository_check and not profile.is_test_profile: - repository_uuid_config = self._backend.get_repository().uuid - repository_uuid_database = backend_manager.get_repository_uuid() - - from aiida.cmdline.utils import echo - if repository_uuid_config != repository_uuid_database: - echo.echo_warning( - f'the database and repository configured for profile `{profile.name}` are incompatible:\n\n' - f'Repository UUID in profile: {repository_uuid_config}\n' - f'Repository UUID in database: {repository_uuid_database}\n\n' - 'Using a database with an incompatible repository will prevent AiiDA from functioning properly.\n' - 'Please make sure that the configuration of your profile is correct.\n' - ) + return self._profile - # Reconfigure the logging with `with_orm=True` to make sure that profile specific logging configuration options - # are taken into account and the `DbLogHandler` is configured. - configure_logging(with_orm=True) + def reset_profile(self) -> None: + """Close and reset any associated resources for the current profile.""" + if self._profile_storage is not None: + self._profile_storage.close() + if self._communicator is not None: + self._communicator.close() + if self._runner is not None: + self._runner.stop() + self._profile_storage = None + self._communicator = None + self._daemon_client = None + self._process_controller = None + self._persister = None + self._runner = None - return self._backend + def unload_profile(self) -> None: + """Unload the current profile, closing any associated resources.""" + self.reset_profile() + self._profile = None @property - def backend_loaded(self) -> bool: - """Return whether a database backend has been loaded. + def profile_storage_loaded(self) -> bool: + """Return whether a storage backend has been loaded. :return: boolean, True if database backend is currently loaded, False otherwise """ - return self._backend is not None + return self._profile_storage is not None - def get_backend_manager(self) -> 'BackendManager': - """Return the database backend manager. + def get_option(self, option_name: str) -> Any: + """Return the value of a configuration option. - .. note:: this is not the actual backend, but a manager class that is necessary for database operations that - go around the actual ORM. For example when the schema version has not yet been validated. + In order of priority, the option is returned from: - :return: the database backend manager + 1. The current profile, if loaded and the option specified + 2. The current configuration, if loaded and the option specified + 3. The default value for the option + :param option_name: the name of the option to return + :return: the value of the option + :raises `aiida.common.exceptions.ConfigurationError`: if the option is not found """ - from aiida.backends import get_backend_manager - from aiida.common import ConfigurationError + from aiida.common.exceptions import ConfigurationError + from aiida.manage.configuration.options import get_option + + # try the profile + if self._profile and option_name in self._profile.options: + return self._profile.get_option(option_name) + # try the config + try: + config = self.get_config(create=True) + except ConfigurationError: + pass + else: + if option_name in config.options: + return config.get_option(option_name) + # try the defaults (will raise ConfigurationError if not present) + option = get_option(option_name) + return option.default - if self._backend_manager is None: + def get_backend(self) -> 'Backend': + """Return the current profile's storage backend, loading it if necessary. - if self._backend is None: - self._load_backend() + Deprecated: use `get_profile_storage` instead. + """ + from aiida.common.warnings import AiidaDeprecationWarning + warn('get_backend() is deprecated, use get_profile_storage() instead', AiidaDeprecationWarning) + return self.get_profile_storage() - profile = self.get_profile() - if profile is None: - raise ConfigurationError( - 'Could not determine the current profile. Consider loading a profile using `aiida.load_profile()`.' - ) - self._backend_manager = get_backend_manager(profile.storage_backend) + def get_profile_storage(self) -> 'Backend': + """Return the current profile's storage backend, loading it if necessary.""" + from aiida.common import ConfigurationError + from aiida.common.log import configure_logging + from aiida.manage.profile_access import ProfileAccessManager - return self._backend_manager + # if loaded, return the current storage backend (which is "synced" with the global profile) + if self._profile_storage is not None: + return self._profile_storage - def get_backend(self) -> 'Backend': - """Return the database backend + # get the currently loaded profile + profile = self.get_profile() + if profile is None: + raise ConfigurationError( + 'Could not determine the current profile. Consider loading a profile using `aiida.load_profile()`.' + ) - :return: the database backend + # request access to the profile (for example, if it is being used by a maintenance operation) + ProfileAccessManager(profile).request_access() - """ - if self._backend is None: - self._load_backend() + # retrieve the storage backend to use for the current profile + storage_cls = profile.storage_cls + + # now we can actually instatiate the backend and set the global variable, note: + # if the storage is not reachable, this will raise an exception + # if the storage schema is not at the latest version, this will except and the user will be informed to migrate + self._profile_storage = storage_cls(profile) + + # Reconfigure the logging with `with_orm=True` to make sure that profile specific logging configuration options + # are taken into account and the `DbLogHandler` is configured. + configure_logging(with_orm=True) - return self._backend + return self._profile_storage def get_persister(self) -> 'AiiDAPersister': """Return the persister @@ -239,14 +260,10 @@ def get_communicator(self) -> 'RmqThreadCommunicator': return self._communicator - def create_communicator( - self, task_prefetch_count: Optional[int] = None, with_orm: bool = True - ) -> 'RmqThreadCommunicator': + def create_communicator(self, task_prefetch_count: Optional[int] = None) -> 'RmqThreadCommunicator': """Create a Communicator. :param task_prefetch_count: optional specify how many tasks this communicator take simultaneously - :param with_orm: if True, use ORM (de)serializers. If false, use json. - This is used by verdi status to get a communicator without needing to load the dbenv. :return: the communicator instance @@ -255,6 +272,7 @@ def create_communicator( from aiida.common import ConfigurationError from aiida.manage.external import rmq + from aiida.orm.utils import serialize profile = self.get_profile() if profile is None: @@ -263,21 +281,14 @@ def create_communicator( ) if task_prefetch_count is None: - task_prefetch_count = self.get_config().get_option('daemon.worker_process_slots', profile.name) + task_prefetch_count = self.get_option('daemon.worker_process_slots') prefix = profile.rmq_prefix - if with_orm: - from aiida.orm.utils import serialize - encoder = functools.partial(serialize.serialize, encoding='utf-8') - decoder = serialize.deserialize_unsafe - else: - # used by verdi status to get a communicator without needing to load the dbenv - from aiida.common import json - encoder = functools.partial(json.dumps, encoding='utf-8') - decoder = json.loads + encoder = functools.partial(serialize.serialize, encoding='utf-8') + decoder = serialize.deserialize_unsafe - return kiwipy.rmq.RmqThreadCommunicator.connect( + communicator = kiwipy.rmq.RmqThreadCommunicator.connect( connection_params={'url': profile.get_rmq_url()}, message_exchange=rmq.get_message_exchange_name(prefix), encoder=encoder, @@ -285,11 +296,14 @@ def create_communicator( task_exchange=rmq.get_task_exchange_name(prefix), task_queue=rmq.get_launch_queue_name(prefix), task_prefetch_count=task_prefetch_count, - async_task_timeout=self.get_config().get_option('rmq.task_timeout', profile.name), + async_task_timeout=self.get_option('rmq.task_timeout'), # This is needed because the verdi commands will call this function and when called in unit tests the # testing_mode cannot be set. testing_mode=profile.is_test_profile, ) + # Check whether a compatible version of RabbitMQ is being used. + check_rabbitmq_version(communicator) + return communicator def get_daemon_client(self) -> 'DaemonClient': """Return the daemon client for the current profile. @@ -351,13 +365,12 @@ def create_runner(self, with_persistence: bool = True, **kwargs: Any) -> 'Runner from aiida.common import ConfigurationError from aiida.engine import runners - config = self.get_config() profile = self.get_profile() if profile is None: raise ConfigurationError( 'Could not determine the current profile. Consider loading a profile using `aiida.load_profile()`.' ) - poll_interval = 0.0 if profile.is_test_profile else config.get_option('runner.poll.interval', profile.name) + poll_interval = 0.0 if profile.is_test_profile else self.get_option('runner.poll.interval') settings = {'rmq_submit': False, 'poll_interval': poll_interval} settings.update(kwargs) @@ -403,18 +416,61 @@ def create_daemon_runner(self, loop: Optional[asyncio.AbstractEventLoop] = None) return runner -MANAGER: Optional[Manager] = None +def is_rabbitmq_version_supported(communicator: 'RmqThreadCommunicator') -> bool: + """Return whether the version of RabbitMQ configured for the current profile is supported. + Versions 3.8 and above are not compatible with AiiDA with default configuration. -def get_manager() -> Manager: - global MANAGER # pylint: disable=global-statement - if MANAGER is None: - MANAGER = Manager() - return MANAGER + :return: boolean whether the current RabbitMQ version is supported. + """ + from packaging.version import parse + return get_rabbitmq_version(communicator) < parse('3.8') -def reset_manager() -> None: - global MANAGER # pylint: disable=global-statement - if MANAGER is not None: - MANAGER.close() - MANAGER = None +def get_rabbitmq_version(communicator: 'RmqThreadCommunicator'): + """Return the version of the RabbitMQ server that the current profile connects to. + + :return: :class:`packaging.version.Version` + """ + from packaging.version import parse + return parse(communicator.server_properties['version'].decode('utf-8')) + + +def check_rabbitmq_version(communicator: 'RmqThreadCommunicator'): + """Check the version of RabbitMQ that is being connected to and emit warning if the version is not compatible.""" + from packaging.version import parse + + from aiida.cmdline.utils import echo + version = get_rabbitmq_version(communicator) + if version >= parse('3.8'): + echo.echo_warning(f'RabbitMQ v{version} is not supported and will cause unexpected problems!') + echo.echo_warning('It can cause long-running workflows to crash and jobs to be submitted multiple times.') + echo.echo_warning('See https://github.com/aiidateam/aiida-core/wiki/RabbitMQ-version-to-use for details.') + return version, False + return version, True + + +def check_version(): + """Check the currently installed version of ``aiida-core`` and warn if it is a post release development version. + + The ``aiida-core`` package maintains the protocol that the ``develop`` branch will use a post release version + number. This means it will always append `.post0` to the version of the latest release. This should mean that if + this protocol is maintained properly, this method will print a warning if the currently installed version is a + post release development branch and not an actual release. + """ + from packaging.version import parse + + from aiida import __version__ + from aiida.cmdline.utils import echo + from aiida.manage.configuration import get_config_option + + version = parse(__version__) + + # Showing of the warning can be turned off by setting the following option to false. + show_warning = get_config_option('warnings.development_version') + + if version.is_postrelease and show_warning: + echo.echo_warning(f'You are currently using a post release development version of AiiDA: {version}') + echo.echo_warning('Be aware that this is not recommended for production and is not officially supported.') + echo.echo_warning('Databases used with this version may not be compatible with future releases of AiiDA') + echo.echo_warning('as you might not be able to automatically migrate your data.\n') diff --git a/aiida/manage/tests/main.py b/aiida/manage/tests/main.py index 32377959a8..a5c07b0cd9 100644 --- a/aiida/manage/tests/main.py +++ b/aiida/manage/tests/main.py @@ -15,10 +15,10 @@ import os import shutil import tempfile +import warnings -from aiida.backends import BACKEND_DJANGO, BACKEND_SQLA -from aiida.common import exceptions -from aiida.manage import configuration, manager +from aiida.common.warnings import AiidaDeprecationWarning +from aiida.manage import configuration, get_manager from aiida.manage.configuration.settings import create_instance_directories from aiida.manage.external.postgres import Postgres @@ -39,7 +39,7 @@ 'first_name': 'AiiDA', 'last_name': 'Plugintest', 'institution': 'aiidateam', - 'storage_backend': 'django', + 'storage_backend': 'psql_dos', 'database_engine': 'postgresql_psycopg2', 'database_username': 'aiida', 'database_password': 'aiida_pw', @@ -80,6 +80,11 @@ class TestManager: def __init__(self): self._manager = None + @property + def manager(self) -> 'ProfileManager': + assert self._manager is not None + return self._manager + def use_temporary_profile(self, backend=None, pgtest=None): """Set up Test manager to use temporary AiiDA profile. @@ -90,8 +95,8 @@ def use_temporary_profile(self, backend=None, pgtest=None): e.g. {'pg_ctl': '/somepath/pg_ctl'}. Should usually not be necessary. """ - if configuration.PROFILE is not None: - raise TestManagerError('AiiDA dbenv must not be loaded before setting up a test profile.') + if configuration.get_profile() is not None: + raise TestManagerError('An AiiDA profile must not be loaded before setting up a test profile.') if self._manager is not None: raise TestManagerError('Profile manager already loaded.') @@ -106,19 +111,23 @@ def use_profile(self, profile_name): :param profile_name: Name of existing test profile to use. """ - if configuration.PROFILE is not None: - raise TestManagerError('AiiDA dbenv must not be loaded before setting up a test profile.') + if configuration.get_profile() is not None: + raise TestManagerError('an AiiDA profile must not be loaded before setting up a test profile.') if self._manager is not None: raise TestManagerError('Profile manager already loaded.') self._manager = ProfileManager(profile_name=profile_name) - self._manager.init_db() def has_profile_open(self): return self._manager and self._manager.has_profile_open() - def reset_db(self, with_user=True): - return self._manager.reset_db(with_user=with_user) + def reset_db(self): + warnings.warn('reset_db() is deprecated, use clear_profile() instead', AiidaDeprecationWarning) + return self._manager.clear_profile() + + def clear_profile(self): + """Reset the global profile, clearing all its data and closing any open resources.""" + return self._manager.clear_profile() def destroy_all(self): if self._manager: @@ -141,55 +150,20 @@ def __init__(self, profile_name): from aiida.backends.testbase import check_if_tests_can_run self._profile = None - self._user = None - try: self._profile = load_profile(profile_name) - manager.get_manager()._load_backend(schema_check=False) # pylint: disable=protected-access except Exception: raise TestManagerError('Unable to load test profile \'{}\'.'.format(profile_name)) check_if_tests_can_run() - self._select_db_test_case(backend=self._profile.storage_backend) - - def _select_db_test_case(self, backend): - """ - Selects tests case for the correct database backend. - """ - if backend == BACKEND_DJANGO: - from aiida.backends.djsite.db.testbase import DjangoTests - self._test_case = DjangoTests() - elif backend == BACKEND_SQLA: - from aiida.backends.sqlalchemy import get_scoped_session - from aiida.backends.sqlalchemy.testbase import SqlAlchemyTests - - self._test_case = SqlAlchemyTests() - self._test_case.test_session = get_scoped_session() - - def reset_db(self, with_user=True): - self._test_case.clean_db() # will drop all users - manager.reset_manager() - self.init_db(with_user=with_user) - - def init_db(self, with_user=True): - """Initialise the database state for running of tests. - - Adds default user if necessary. - """ - from aiida.cmdline.commands.cmd_user import set_default_user - from aiida.orm import User - - if with_user and not User.objects.get_default(): - user_dict = get_user_dict(_DEFAULT_PROFILE_INFO) - try: - user = User(**user_dict) - user.store() - except exceptions.IntegrityError: - # The user already exists, no problem - user = User.objects.get(**user_dict) - - set_default_user(self._profile, user) - User.objects.reset() # necessary to pick up new default user + @staticmethod + def clear_profile(): + """Reset the global profile, clearing all its data and closing any open resources.""" + manager = get_manager() + if manager.profile_storage_loaded: + manager.get_profile_storage()._clear(recreate_user=True) # pylint: disable=protected-access + manager.reset_profile() + manager.get_profile_storage() # reload the storage connection def has_profile_open(self): return self._profile is not None @@ -234,7 +208,7 @@ class TemporaryProfileManager(ProfileManager): # run tests 1 - tests.reset_db() + tests.clear_profile() # database ready for independent tests 2 # run tests 2 @@ -244,9 +218,7 @@ class TemporaryProfileManager(ProfileManager): """ - _test_case = None - - def __init__(self, backend=BACKEND_DJANGO, pgtest=None): # pylint: disable=super-init-not-called + def __init__(self, backend='psql_dos', pgtest=None): # pylint: disable=super-init-not-called """Construct a TemporaryProfileManager :param backend: a database backend @@ -268,7 +240,6 @@ def __init__(self, backend=BACKEND_DJANGO, pgtest=None): # pylint: disable=supe self._backup = { 'config': configuration.CONFIG, 'config_dir': settings.AIIDA_CONFIG_FOLDER, - 'profile': configuration.PROFILE, } @property @@ -321,8 +292,8 @@ def create_aiida_db(self): """ Create the necessary database on the temporary postgres instance. """ - if configuration.PROFILE is not None: - raise TestManagerError('AiiDA dbenv can not be loaded while creating a tests db environment') + if configuration.get_profile() is not None: + raise TestManagerError('An AiiDA profile can not be loaded while creating a tests db environment') if self.pg_cluster is None: self.create_db_cluster() self.postgres = Postgres(interactive=False, quiet=True, dbinfo=self.dbinfo) @@ -342,7 +313,10 @@ def create_profile(self): Warning: the AiiDA dbenv must not be loaded when this is called! """ - from aiida.manage.configuration import Profile, load_profile, settings + from aiida.manage.configuration import Profile, settings + from aiida.orm import User + + manager = get_manager() if not self._has_test_db: self.create_aiida_db() @@ -351,7 +325,7 @@ def create_profile(self): self.root_dir = tempfile.mkdtemp() configuration.CONFIG = None settings.AIIDA_CONFIG_FOLDER = self.config_dir - configuration.PROFILE = None + manager.unload_profile() create_instance_directories() profile_name = self.profile_info['name'] config = configuration.get_config(create=True) @@ -360,12 +334,15 @@ def create_profile(self): config.set_default_profile(profile_name).store() self._profile = profile - load_profile(profile_name) - backend = manager.get_manager()._load_backend(schema_check=False) - backend.migrate() - - self._select_db_test_case(backend=self._profile.storage_backend) - self.init_db() + # initialise the profile + profile = manager.load_profile(profile_name) + # initialize the profile storage + profile.storage_cls.migrate(profile) + # create the default user for the profile + created, user = User.objects.get_or_create(**get_user_dict(_DEFAULT_PROFILE_INFO)) + if created: + user.store() + profile.default_user_email = user.email def repo_ok(self): return bool(self.repo and os.path.isdir(os.path.dirname(self.repo))) @@ -389,7 +366,7 @@ def backend(self, backend): if self.has_profile_open(): raise TestManagerError('backend cannot be changed after setting up the environment') - valid_backends = [BACKEND_DJANGO, BACKEND_SQLA] + valid_backends = ['psql_dos'] if backend not in valid_backends: raise ValueError(f'invalid backend {backend}, must be one of {valid_backends}') self.profile_info['backend'] = backend @@ -425,14 +402,11 @@ def destroy_all(self): self.pg_cluster = None self._has_test_db = False self._profile = None - self._user = None if 'config' in self._backup: configuration.CONFIG = self._backup['config'] if 'config_dir' in self._backup: settings.AIIDA_CONFIG_FOLDER = self._backup['config_dir'] - if 'profile' in self._backup: - configuration.PROFILE = self._backup['profile'] def has_profile_open(self): return self._profile is not None @@ -442,7 +416,7 @@ def has_profile_open(self): @contextmanager -def test_manager(backend=BACKEND_DJANGO, profile_name=None, pgtest=None): +def test_manager(backend='psql_dos', profile_name=None, pgtest=None): """ Context manager for TestManager objects. Sets up temporary AiiDA environment for testing or reuses existing environment, @@ -461,7 +435,7 @@ def aiida_profile(): # everything cleaned up - :param backend: database backend, either BACKEND_SQLA or BACKEND_DJANGO + :param backend: storage backend type name :param profile_name: name of test profile to be used or None (to use temporary profile) :param pgtest: a dictionary of arguments to be passed to PGTest() for starting the postgresql cluster, e.g. {'pg_ctl': '/somepath/pg_ctl'}. Should usually not be necessary. @@ -482,14 +456,13 @@ def aiida_profile(): _GLOBAL_TEST_MANAGER.destroy_all() -def get_test_backend_name(): - """ Read name of database backend from environment variable or the specified test profile. +def get_test_backend_name() -> str: + """ Read name of storage backend from environment variable or the specified test profile. - Reads database backend ('django' or 'sqlalchemy') from 'AIIDA_TEST_BACKEND' environment variable, + Reads storage backend from 'AIIDA_TEST_BACKEND' environment variable, or the backend configured for the 'AIIDA_TEST_PROFILE'. - Defaults to django backend. - :returns: content of environment variable or `BACKEND_DJANGO` + :returns: name of storage backend :raises: ValueError if unknown backend name detected. :raises: ValueError if both 'AIIDA_TEST_BACKEND' and 'AIIDA_TEST_PROFILE' are set, and the two backends do not match. @@ -505,9 +478,9 @@ def get_test_backend_name(): ) backend_res = backend_profile else: - backend_res = backend_env or BACKEND_DJANGO + backend_res = backend_env or 'psql_dos' - if backend_res in (BACKEND_DJANGO, BACKEND_SQLA): + if backend_res in ('psql_dos',): return backend_res raise ValueError(f"Unknown backend '{backend_res}' read from AIIDA_TEST_BACKEND environment variable") diff --git a/aiida/manage/tests/pytest_fixtures.py b/aiida/manage/tests/pytest_fixtures.py index d3e506cfbc..de9c0757ec 100644 --- a/aiida/manage/tests/pytest_fixtures.py +++ b/aiida/manage/tests/pytest_fixtures.py @@ -12,7 +12,8 @@ Collection of pytest fixtures using the TestManager for easy testing of AiiDA plugins. * aiida_profile - * clear_database + * aiida_profile_clean + * aiida_profile_clean_class * aiida_localhost * aiida_local_code_factory @@ -20,10 +21,12 @@ import asyncio import shutil import tempfile +import warnings import pytest from aiida.common.log import AIIDA_LOGGER +from aiida.common.warnings import AiidaDeprecationWarning from aiida.manage.tests import get_test_backend_name, get_test_profile_name, test_manager @@ -48,6 +51,20 @@ def aiida_profile(): # Leaving the context manager will automatically cause the `TestManager` instance to be destroyed +@pytest.fixture(scope='function') +def aiida_profile_clean(aiida_profile): + """Provide an AiiDA test profile, with the storage reset at test function setup.""" + aiida_profile.clear_profile() + yield aiida_profile + + +@pytest.fixture(scope='class') +def aiida_profile_clean_class(aiida_profile): + """Provide an AiiDA test profile, with the storage reset at test class setup.""" + aiida_profile.clear_profile() + yield aiida_profile + + @pytest.fixture(scope='function') def clear_database(clear_database_after_test): """Alias for 'clear_database_after_test'. @@ -60,21 +77,31 @@ def clear_database(clear_database_after_test): @pytest.fixture(scope='function') def clear_database_after_test(aiida_profile): """Clear the database after the test.""" + warnings.warn( + 'the clear_database_after_test fixture is deprecated, use aiida_profile_clean instead', AiidaDeprecationWarning + ) yield aiida_profile - aiida_profile.reset_db() + aiida_profile.clear_profile() @pytest.fixture(scope='function') def clear_database_before_test(aiida_profile): """Clear the database before the test.""" - aiida_profile.reset_db() + warnings.warn( + 'the clear_database_before_test fixture deprecated, use aiida_profile_clean instead', AiidaDeprecationWarning + ) + aiida_profile.clear_profile() yield aiida_profile @pytest.fixture(scope='class') def clear_database_before_test_class(aiida_profile): """Clear the database before a test class.""" - aiida_profile.reset_db() + warnings.warn( + 'the clear_database_before_test_class is deprecated, use aiida_profile_clean_class instead', + AiidaDeprecationWarning + ) + aiida_profile.clear_profile() yield diff --git a/aiida/manage/tests/unittest_classes.py b/aiida/manage/tests/unittest_classes.py index 98d7816ffd..55f58aedcd 100644 --- a/aiida/manage/tests/unittest_classes.py +++ b/aiida/manage/tests/unittest_classes.py @@ -14,7 +14,7 @@ import warnings from aiida.common.warnings import AiidaDeprecationWarning -from aiida.manage.manager import get_manager +from aiida.manage import get_manager from .main import _GLOBAL_TEST_MANAGER, get_test_backend_name, get_test_profile_name, test_manager @@ -58,10 +58,12 @@ def setUpClass(cls): 'Please use aiida.manage.tests.unittest_classes.TestRunner to run these tests.' ) - cls.backend = get_manager().get_backend() + cls.backend = get_manager().get_profile_storage() def tearDown(self): - self.test_manager.reset_db() + manager = get_manager() + if manager.profile_storage_loaded: + manager.get_profile_storage()._clear(recreate_user=True) # pylint: disable=protected-access class TestRunner(unittest.runner.TextTestRunner): diff --git a/aiida/orm/authinfos.py b/aiida/orm/authinfos.py index 1466f2df54..d83718ba21 100644 --- a/aiida/orm/authinfos.py +++ b/aiida/orm/authinfos.py @@ -12,7 +12,7 @@ from aiida.common import exceptions from aiida.common.lang import classproperty -from aiida.manage.manager import get_manager +from aiida.manage import get_manager from aiida.plugins import TransportFactory from . import entities, users @@ -47,7 +47,7 @@ class AuthInfo(entities.Entity['BackendAuthInfo']): @classproperty def objects(cls: Type['AuthInfo']) -> AuthInfoCollection: # type: ignore[misc] # pylint: disable=no-self-argument - return AuthInfoCollection.get_cached(cls, get_manager().get_backend()) + return AuthInfoCollection.get_cached(cls, get_manager().get_profile_storage()) PROPERTY_WORKDIR = 'workdir' @@ -58,7 +58,7 @@ def __init__(self, computer: 'Computer', user: 'User', backend: Optional['Backen :param user: a `User` instance :param backend: the backend to use for the instance, or use the default backend if None """ - backend = backend or get_manager().get_backend() + backend = backend or get_manager().get_profile_storage() model = backend.authinfos.create(computer=computer.backend_entity, user=user.backend_entity) super().__init__(model) diff --git a/aiida/orm/autogroup.py b/aiida/orm/autogroup.py index 58b4ea135e..44f42a3f77 100644 --- a/aiida/orm/autogroup.py +++ b/aiida/orm/autogroup.py @@ -9,26 +9,26 @@ ########################################################################### """Module to manage the autogrouping functionality by ``verdi run``.""" import re +from typing import List, Optional from aiida.common import exceptions, timezone from aiida.common.escaping import escape_for_sql_like, get_regex_pattern_from_sql -from aiida.orm import AutoGroup +from aiida.orm import AutoGroup, QueryBuilder from aiida.plugins.entry_point import get_entry_point_string_from_class -CURRENT_AUTOGROUP = None +class AutogroupManager: + """Class to automatically add all newly stored ``Node``s to an ``AutoGroup`` (whilst enabled). -class Autogroup: - """Class to create a new `AutoGroup` instance that will, while active, automatically contain all nodes being stored. + This class should not be instantiated directly, but rather accessed through the backend storage instance. - The autogrouping is checked by the `Node.store()` method which, if `CURRENT_AUTOGROUP is not None` the method - `Autogroup.is_to_be_grouped` is called to decide whether to put the current node being stored in the current - `AutoGroup` instance. + The auto-grouping is checked by the ``Node.store()`` method which, if ``is_to_be_grouped`` is true, + will store the node in the associated ``AutoGroup``. The exclude/include lists are lists of strings like: ``aiida.data:core.int``, ``aiida.calculation:quantumespresso.pw``, ``aiida.data:core.array.%``, ... - i.e.: a string identifying the base class, followed a colona and by the path to the class + i.e.: a string identifying the base class, followed by a colon and the path to the class as accepted by CalculationFactory/DataFactory. Each string can contain one or more wildcard characters ``%``; in this case this is used in a ``like`` comparison with the QueryBuilder. @@ -39,18 +39,49 @@ class Autogroup: If none of the two is set, everything is included. """ - def __init__(self): - """Initialize with defaults.""" - self._exclude = None - self._include = None + def __init__(self, backend): + """Initialize the manager for the storage backend.""" + self._backend = backend - now = timezone.now() - default_label_prefix = f"Verdi autogroup on {now.strftime('%Y-%m-%d %H:%M:%S')}" - self._group_label_prefix = default_label_prefix + self._enabled = False + self._exclude: Optional[List[str]] = None + self._include: Optional[List[str]] = None + + self._group_label_prefix = f"Verdi autogroup on {timezone.now().strftime('%Y-%m-%d %H:%M:%S')}" self._group_label = None # Actual group label, set by `get_or_create_group` + @property + def is_enabled(self) -> bool: + """Return whether auto-grouping is enabled.""" + return self._enabled + + def enable(self) -> None: + """Enable the auto-grouping.""" + self._enabled = True + + def disable(self) -> None: + """Disable the auto-grouping.""" + self._enabled = False + + def get_exclude(self) -> Optional[List[str]]: + """Return the list of classes to exclude from autogrouping. + + Returns ``None`` if no exclusion list has been set.""" + return self._exclude + + def get_include(self) -> Optional[List[str]]: + """Return the list of classes to include in the autogrouping. + + Returns ``None`` if no inclusion list has been set.""" + return self._include + + def get_group_label_prefix(self) -> str: + """Get the prefix of the label of the group. + If no group label prefix was set, it will set a default one by itself.""" + return self._group_label_prefix + @staticmethod - def validate(strings): + def validate(strings: Optional[List[str]]): """Validate the list of strings passed to set_include and set_exclude.""" if strings is None: return @@ -66,24 +97,7 @@ def validate(strings): f"'{string}' has an invalid prefix, must be among: {sorted(valid_prefixes)}" ) - def get_exclude(self): - """Return the list of classes to exclude from autogrouping. - - Returns ``None`` if no exclusion list has been set.""" - return self._exclude - - def get_include(self): - """Return the list of classes to include in the autogrouping. - - Returns ``None`` if no inclusion list has been set.""" - return self._include - - def get_group_label_prefix(self): - """Get the prefix of the label of the group. - If no group label prefix was set, it will set a default one by itself.""" - return self._group_label_prefix - - def set_exclude(self, exclude): + def set_exclude(self, exclude: Optional[List[str]]) -> None: """Set the list of classes to exclude in the autogrouping. :param exclude: a list of valid entry point strings (might contain '%' to be used as @@ -98,7 +112,7 @@ def set_exclude(self, exclude): raise exceptions.ValidationError('Cannot both specify exclude and include') self._exclude = exclude - def set_include(self, include): + def set_include(self, include: Optional[List[str]]) -> None: """Set the list of classes to include in the autogrouping. :param include: a list of valid entry point strings (might contain '%' to be used as @@ -113,13 +127,14 @@ def set_include(self, include): raise exceptions.ValidationError('Cannot both specify exclude and include') self._include = include - def set_group_label_prefix(self, label_prefix): - """ - Set the label of the group to be created - """ + def set_group_label_prefix(self, label_prefix: Optional[str]) -> None: + """Set the label of the group to be created (or use a default).""" + if label_prefix is None: + label_prefix = f"Verdi autogroup on {timezone.now().strftime('%Y-%m-%d %H:%M:%S')}" if not isinstance(label_prefix, str): raise exceptions.ValidationError('group label must be a string') self._group_label_prefix = label_prefix + self._group_label = None # reset the actual group label @staticmethod def _matches(string, filter_string): @@ -127,7 +142,7 @@ def _matches(string, filter_string): If 'filter_string' does not contain any % sign, perform an exact match. Otherwise, match with a SQL-like query, where % means any character sequence, - and _ means a single character (these caracters can be escaped with a backslash). + and _ means a single character (these characters can be escaped with a backslash). :param string: the string to match. :param filter_string: the filter string. @@ -137,12 +152,10 @@ def _matches(string, filter_string): return re.match(regex_filter, string) is not None return string == filter_string - def is_to_be_grouped(self, node): - """ - Return whether the given node has to be included in the autogroup according to include/exclude list - - :return (bool): True if ``node`` is to be included in the autogroup - """ + def is_to_be_grouped(self, node) -> bool: + """Return whether the given node is to be auto-grouped according to enable state and include/exclude lists.""" + if not self._enabled: + return False # strings, including possibly 'all' include = self.get_include() exclude = self.get_exclude() @@ -165,14 +178,7 @@ def is_to_be_grouped(self, node): # soon as any of the filters matches) return not any(self._matches(entry_point_string, filter_string) for filter_string in exclude) - def clear_group_cache(self): - """Clear the cache of the group name. - - This is mostly used by tests when they reset the database. - """ - self._group_label = None - - def get_or_create_group(self): + def get_or_create_group(self) -> AutoGroup: """Return the current `AutoGroup`, or create one if None has been set yet. This function implements a somewhat complex logic that is however needed @@ -186,15 +192,13 @@ def get_or_create_group(self): trying to create a group with a different label (with a numeric suffix appended), until it manages to create it. """ - from aiida.orm import QueryBuilder - # When this function is called, if it is the first time, just generate # a new group name (later on, after this ``if`` block`). # In that case, we will later cache in ``self._group_label`` the group label, # So the group with the same name can be returned quickly in future # calls of this method. if self._group_label is not None: - builder = QueryBuilder().append(AutoGroup, filters={'label': self._group_label}) + builder = QueryBuilder(backend=self._backend).append(AutoGroup, filters={'label': self._group_label}) results = [res[0] for res in builder.iterall()] if results: # If it is not empty, it should have only one result due to the uniqueness constraints @@ -207,7 +211,7 @@ def get_or_create_group(self): label_prefix = self.get_group_label_prefix() # Try to do a preliminary QB query to avoid to do too many try/except # if many of the prefix_NUMBER groups already exist - queryb = QueryBuilder().append( + queryb = QueryBuilder(self._backend).append( AutoGroup, filters={ 'or': [{ @@ -243,7 +247,7 @@ def get_or_create_group(self): while True: try: label = label_prefix if counter == 0 else f'{label_prefix}_{counter}' - group = AutoGroup(label=label).store() + group = AutoGroup(backend=self._backend, label=label).store() self._group_label = group.label except exceptions.IntegrityError: counter += 1 diff --git a/aiida/orm/comments.py b/aiida/orm/comments.py index de7b74698d..a8e3a99ea2 100644 --- a/aiida/orm/comments.py +++ b/aiida/orm/comments.py @@ -12,7 +12,7 @@ from typing import TYPE_CHECKING, List, Optional, Type from aiida.common.lang import classproperty -from aiida.manage.manager import get_manager +from aiida.manage import get_manager from . import entities, users @@ -70,7 +70,7 @@ class Comment(entities.Entity['BackendComment']): @classproperty def objects(cls: Type['Comment']) -> CommentCollection: # type: ignore[misc] # pylint: disable=no-self-argument - return CommentCollection.get_cached(cls, get_manager().get_backend()) + return CommentCollection.get_cached(cls, get_manager().get_profile_storage()) def __init__(self, node: 'Node', user: 'User', content: Optional[str] = None, backend: Optional['Backend'] = None): """Create a Comment for a given node and user @@ -82,7 +82,7 @@ def __init__(self, node: 'Node', user: 'User', content: Optional[str] = None, ba :return: a Comment object associated to the given node and user """ - backend = backend or get_manager().get_backend() + backend = backend or get_manager().get_profile_storage() model = backend.comments.create(node=node.backend_entity, user=user.backend_entity, content=content) super().__init__(model) diff --git a/aiida/orm/computers.py b/aiida/orm/computers.py index 5a8b3b0c5d..2a2361aaa8 100644 --- a/aiida/orm/computers.py +++ b/aiida/orm/computers.py @@ -14,7 +14,7 @@ from aiida.common import exceptions from aiida.common.lang import classproperty -from aiida.manage.manager import get_manager +from aiida.manage import get_manager from aiida.plugins import SchedulerFactory, TransportFactory from . import entities, users @@ -79,7 +79,7 @@ class Computer(entities.Entity['BackendComputer']): @classproperty def objects(cls: Type['Computer']) -> ComputerCollection: # type: ignore[misc] # pylint: disable=no-self-argument - return ComputerCollection.get_cached(cls, get_manager().get_backend()) + return ComputerCollection.get_cached(cls, get_manager().get_profile_storage()) def __init__( # pylint: disable=too-many-arguments self, @@ -92,7 +92,7 @@ def __init__( # pylint: disable=too-many-arguments backend: Optional['Backend'] = None, ) -> None: """Construct a new computer.""" - backend = backend or get_manager().get_backend() + backend = backend or get_manager().get_profile_storage() model = backend.computers.create( label=label, hostname=hostname, diff --git a/aiida/orm/entities.py b/aiida/orm/entities.py index 527ad7d89d..f2c50c3754 100644 --- a/aiida/orm/entities.py +++ b/aiida/orm/entities.py @@ -18,7 +18,7 @@ from aiida.common import exceptions from aiida.common.lang import classproperty, type_check -from aiida.manage.manager import get_manager +from aiida.manage import get_manager try: from typing import Protocol @@ -79,7 +79,7 @@ def __init__(self, entity_class: Type[EntityType], backend: Optional['Backend'] from aiida.orm.implementation import Backend type_check(backend, Backend, allow_none=True) assert issubclass(entity_class, self._entity_base_cls()) - self._backend = backend or get_manager().get_backend() + self._backend = backend or get_manager().get_profile_storage() self._entity_type = entity_class def __call__(self: CollectionType, backend: 'Backend') -> CollectionType: diff --git a/aiida/orm/groups.py b/aiida/orm/groups.py index e19888cbcc..33d52dfb87 100644 --- a/aiida/orm/groups.py +++ b/aiida/orm/groups.py @@ -14,7 +14,7 @@ from aiida.common import exceptions from aiida.common.lang import classproperty, type_check -from aiida.manage.manager import get_manager +from aiida.manage import get_manager from . import convert, entities, users @@ -119,7 +119,7 @@ class Group(entities.Entity['BackendGroup'], entities.EntityExtrasMixin, metacla @classproperty def objects(cls: Type['Group']) -> GroupCollection: # type: ignore[misc] # pylint: disable=no-self-argument - return GroupCollection.get_cached(cls, get_manager().get_backend()) + return GroupCollection.get_cached(cls, get_manager().get_profile_storage()) def __init__( self, @@ -143,7 +143,7 @@ def __init__( if not label: raise ValueError('Group label must be provided') - backend = backend or get_manager().get_backend() + backend = backend or get_manager().get_profile_storage() user = user or users.User.objects(backend).get_default() type_check(user, users.User) type_string = self._type_string @@ -320,7 +320,7 @@ def is_user_defined(self) -> bool: class AutoGroup(Group): - """Group to be used to contain selected nodes generated while `aiida.orm.autogroup.CURRENT_AUTOGROUP` is set.""" + """Group to be used to contain selected nodes generated, whilst autogrouping is enabled.""" class ImportGroup(Group): diff --git a/aiida/orm/implementation/backends.py b/aiida/orm/implementation/backends.py index c1ffb8b0b1..cf99ea416f 100644 --- a/aiida/orm/implementation/backends.py +++ b/aiida/orm/implementation/backends.py @@ -9,11 +9,11 @@ ########################################################################### """Generic backend related objects""" import abc -from typing import TYPE_CHECKING, Any, ContextManager, List, Sequence, TypeVar +from typing import TYPE_CHECKING, Any, ContextManager, List, Optional, Sequence, TypeVar, Union if TYPE_CHECKING: - from sqlalchemy.orm.session import Session - + from aiida.manage.configuration.profile import Profile + from aiida.orm.autogroup import AutogroupManager from aiida.orm.entities import EntityTypes from aiida.orm.implementation import ( BackendAuthInfoCollection, @@ -32,21 +32,99 @@ TransactionType = TypeVar('TransactionType') -class Backend(abc.ABC): +class Backend(abc.ABC): # pylint: disable=too-many-public-methods """Abstraction for a backend to read/write persistent data for a profile's provenance graph. AiiDA splits data storage into two sources: - Searchable data, which is stored in the database and can be queried using the QueryBuilder - - Non-searchable data, which is stored in the repository and can be loaded using the RepositoryBackend + - Non-searchable (binary) data, which is stored in the repository and can be loaded using the RepositoryBackend The two sources are inter-linked by the ``Node.repository_metadata``. Once stored, the leaf values of this dictionary must be valid pointers to object keys in the repository. + + The class methods,`version_profile` and `migrate`, + should be able to be called for existing storage, at any supported schema version (or empty storage). + But an instance of this class should be created only for the latest schema version. + """ + @classmethod + @abc.abstractmethod + def version_head(cls) -> str: + """Return the head schema version of this storage backend type.""" + + @classmethod + @abc.abstractmethod + def version_profile(cls, profile: 'Profile') -> Optional[str]: + """Return the schema version of the given profile's storage, or None for empty/uninitialised storage. + + :raises: `~aiida.common.exceptions.UnreachableStorage` if the storage cannot be accessed + """ + + @classmethod + @abc.abstractmethod + def migrate(cls, profile: 'Profile') -> None: + """Migrate the storage of a profile to the latest schema version. + + If the schema version is already the latest version, this method does nothing. + If the storage is empty/uninitialised, then it will be initialised at head. + + :raises: `~aiida.common.exceptions.UnreachableStorage` if the storage cannot be accessed + """ + + @abc.abstractmethod + def __init__(self, profile: 'Profile') -> None: + """Initialize the backend, for this profile. + + :raises: `~aiida.common.exceptions.UnreachableStorage` if the storage cannot be accessed + :raises: `~aiida.common.exceptions.IncompatibleStorageSchema` + if the profile's storage schema is not at the latest version (and thus should be migrated) + :raises: :raises: :class:`aiida.common.exceptions.CorruptStorage` if the storage is internally inconsistent + """ + from aiida.orm.autogroup import AutogroupManager + self._profile = profile + self._autogroup = AutogroupManager(self) + @abc.abstractmethod - def migrate(self) -> None: - """Migrate the database to the latest schema generation or version.""" + def __str__(self) -> str: + """Return a string showing connection details for this instance.""" + + @property + def profile(self) -> 'Profile': + """Return the profile for this backend.""" + return self._profile + + @property + def autogroup(self) -> 'AutogroupManager': + """Return the autogroup manager for this backend.""" + return self._autogroup + + def version(self) -> str: + """Return the schema version of the profile's storage.""" + version = self.version_profile(self.profile) + assert version is not None + return version + + @abc.abstractmethod + def close(self): + """Close the storage access.""" + + @property + @abc.abstractmethod + def is_closed(self) -> bool: + """Return whether the storage is closed.""" + + @abc.abstractmethod + def _clear(self, recreate_user: bool = True) -> None: + """Clear the storage, removing all data. + + .. warning:: This is a destructive operation, and should only be used for testing purposes. + + :param recreate_user: Re-create the default `User` for the profile, after clearing the storage. + """ + from aiida.orm.autogroup import AutogroupManager + self._autogroup = AutogroupManager(self) @property @abc.abstractmethod @@ -87,13 +165,6 @@ def users(self) -> 'BackendUserCollection': def query(self) -> 'BackendQueryBuilder': """Return an instance of a query builder implementation for this backend""" - @abc.abstractmethod - def get_session(self) -> 'Session': - """Return a database session that can be used by the `QueryBuilder` to perform its query. - - :return: an instance of :class:`sqlalchemy.orm.session.Session` - """ - @abc.abstractmethod def transaction(self) -> ContextManager[Any]: """ @@ -149,3 +220,26 @@ def delete_nodes_and_connections(self, pks_to_delete: Sequence[int]): @abc.abstractmethod def get_repository(self) -> 'AbstractRepositoryBackend': """Return the object repository configured for this backend.""" + + @abc.abstractmethod + def set_global_variable( + self, key: str, value: Union[None, str, int, float], description: Optional[str] = None, overwrite=True + ) -> None: + """Set a global variable in the storage. + + :param key: the key of the setting + :param value: the value of the setting + :param description: the description of the setting (optional) + :param overwrite: if True, overwrite the setting if it already exists + + :raises: `ValueError` if the key already exists and `overwrite` is False + """ + + @abc.abstractmethod + def get_global_variable(self, key: str) -> Union[None, str, int, float]: + """Return a global variable from the storage. + + :param key: the key of the setting + + :raises: `KeyError` if the setting does not exist + """ diff --git a/aiida/orm/implementation/django/authinfos.py b/aiida/orm/implementation/django/authinfos.py deleted file mode 100644 index 3311e29f7e..0000000000 --- a/aiida/orm/implementation/django/authinfos.py +++ /dev/null @@ -1,135 +0,0 @@ -# -*- coding: utf-8 -*- -########################################################################### -# Copyright (c), The AiiDA team. All rights reserved. # -# This file is part of the AiiDA code. # -# # -# The code is hosted on GitHub at https://github.com/aiidateam/aiida-core # -# For further information on the license, see the LICENSE.txt file # -# For further information please visit http://www.aiida.net # -########################################################################### -"""Module for the Django backend implementation of the `AuthInfo` ORM class.""" - -from aiida.backends.djsite.db.models import DbAuthInfo -from aiida.common import exceptions -from aiida.common.lang import type_check - -from . import entities, utils -from ..authinfos import BackendAuthInfo, BackendAuthInfoCollection - - -class DjangoAuthInfo(entities.DjangoModelEntity[DbAuthInfo], BackendAuthInfo): - """Django backend implementation for the `AuthInfo` ORM class.""" - - MODEL_CLASS = DbAuthInfo - - def __init__(self, backend, computer, user): - """Construct a new instance. - - :param computer: a :class:`aiida.orm.implementation.computers.BackendComputer` instance - :param user: a :class:`aiida.orm.implementation.users.BackendUser` instance - :return: an :class:`aiida.orm.implementation.authinfos.BackendAuthInfo` instance - """ - from . import computers, users - super().__init__(backend) - type_check(user, users.DjangoUser) - type_check(computer, computers.DjangoComputer) - self._dbmodel = utils.ModelWrapper(DbAuthInfo(dbcomputer=computer.dbmodel, aiidauser=user.dbmodel)) - - @property - def id(self): # pylint: disable=invalid-name - return self._dbmodel.id - - @property - def is_stored(self): - """Return whether the entity is stored. - - :return: True if stored, False otherwise - :rtype: bool - """ - return self._dbmodel.is_saved() - - def store(self): - """Store and return the instance. - - :return: :class:`aiida.orm.implementation.authinfos.BackendAuthInfo` - """ - self._dbmodel.save() - return self - - @property - def enabled(self): - """Return whether this instance is enabled. - - :return: boolean, True if enabled, False otherwise - """ - return self._dbmodel.enabled - - @enabled.setter - def enabled(self, enabled): - """Set the enabled state - - :param enabled: boolean, True to enable the instance, False to disable it - """ - self._dbmodel.enabled = enabled - - @property - def computer(self): - """Return the computer associated with this instance. - - :return: :class:`aiida.orm.implementation.computers.BackendComputer` - """ - return self.backend.computers.from_dbmodel(self._dbmodel.dbcomputer) - - @property - def user(self): - """Return the user associated with this instance. - - :return: :class:`aiida.orm.implementation.users.BackendUser` - """ - return self._backend.users.from_dbmodel(self._dbmodel.aiidauser) - - def get_auth_params(self): - """Return the dictionary of authentication parameters - - :return: a dictionary with authentication parameters - """ - return self._dbmodel.auth_params - - def set_auth_params(self, auth_params): - """Set the dictionary of authentication parameters - - :param auth_params: a dictionary with authentication parameters - """ - self._dbmodel.auth_params = auth_params - - def get_metadata(self): - """Return the dictionary of metadata - - :return: a dictionary with metadata - """ - return self._dbmodel.metadata - - def set_metadata(self, metadata): - """Set the dictionary of metadata - - :param metadata: a dictionary with metadata - """ - self._dbmodel.metadata = metadata - - -class DjangoAuthInfoCollection(BackendAuthInfoCollection): - """The collection of Django backend `AuthInfo` entries.""" - - ENTITY_CLASS = DjangoAuthInfo - - def delete(self, pk): - """Delete an entry from the collection. - - :param pk: the pk of the entry to delete - """ - # pylint: disable=import-error,no-name-in-module - from django.core.exceptions import ObjectDoesNotExist - try: - DbAuthInfo.objects.get(pk=pk).delete() - except ObjectDoesNotExist: - raise exceptions.NotExistent(f'AuthInfo<{pk}> does not exist') diff --git a/aiida/orm/implementation/django/backend.py b/aiida/orm/implementation/django/backend.py deleted file mode 100644 index 915000c170..0000000000 --- a/aiida/orm/implementation/django/backend.py +++ /dev/null @@ -1,219 +0,0 @@ -# -*- coding: utf-8 -*- -########################################################################### -# Copyright (c), The AiiDA team. All rights reserved. # -# This file is part of the AiiDA code. # -# # -# The code is hosted on GitHub at https://github.com/aiidateam/aiida-core # -# For further information on the license, see the LICENSE.txt file # -# For further information please visit http://www.aiida.net # -########################################################################### -"""Django implementation of `aiida.orm.implementation.backends.Backend`.""" -from contextlib import contextmanager -import functools -from typing import Any, ContextManager, List, Sequence - -# pylint: disable=import-error,no-name-in-module -from django.apps import apps -from django.db import models -from django.db import transaction as django_transaction - -from aiida.backends.djsite.db import models as dbm -from aiida.backends.djsite.manager import DjangoBackendManager -from aiida.common.exceptions import IntegrityError -from aiida.orm.entities import EntityTypes - -from . import authinfos, comments, computers, convert, groups, logs, nodes, querybuilder, users -from ..sql.backends import SqlBackend - -__all__ = ('DjangoBackend',) - - -class DjangoBackend(SqlBackend[models.Model]): - """Django implementation of `aiida.orm.implementation.backends.Backend`.""" - - def __init__(self): - """Construct the backend instance by initializing all the collections.""" - self._authinfos = authinfos.DjangoAuthInfoCollection(self) - self._comments = comments.DjangoCommentCollection(self) - self._computers = computers.DjangoComputerCollection(self) - self._groups = groups.DjangoGroupCollection(self) - self._logs = logs.DjangoLogCollection(self) - self._nodes = nodes.DjangoNodeCollection(self) - self._backend_manager = DjangoBackendManager() - self._users = users.DjangoUserCollection(self) - - def migrate(self): - self._backend_manager.migrate() - - @property - def authinfos(self): - return self._authinfos - - @property - def comments(self): - return self._comments - - @property - def computers(self): - return self._computers - - @property - def groups(self): - return self._groups - - @property - def logs(self): - return self._logs - - @property - def nodes(self): - return self._nodes - - def query(self): - return querybuilder.DjangoQueryBuilder(self) - - @property - def users(self): - return self._users - - @staticmethod - def get_session(): - """Return a database session that can be used by the `QueryBuilder` to perform its query. - - If there is an exception within the context then the changes will be rolled back and the state will - be as before entering. Transactions can be nested. - - :return: an instance of :class:`sqlalchemy.orm.session.Session` - """ - from aiida.backends.djsite import get_scoped_session - return get_scoped_session() - - @staticmethod - def transaction() -> ContextManager[Any]: - """Open a transaction to be used as a context manager.""" - return django_transaction.atomic() - - @property - def in_transaction(self) -> bool: - return not django_transaction.get_autocommit() - - @staticmethod - @functools.lru_cache(maxsize=18) - def _get_model_from_entity(entity_type: EntityTypes, with_pk: bool): - """Return the Django model and fields corresponding to the given entity. - - :param with_pk: if True, the fields returned will include the primary key - """ - from sqlalchemy import inspect - - model = { - EntityTypes.AUTHINFO: dbm.DbAuthInfo, - EntityTypes.COMMENT: dbm.DbComment, - EntityTypes.COMPUTER: dbm.DbComputer, - EntityTypes.GROUP: dbm.DbGroup, - EntityTypes.LOG: dbm.DbLog, - EntityTypes.NODE: dbm.DbNode, - EntityTypes.USER: dbm.DbUser, - EntityTypes.LINK: dbm.DbLink, - EntityTypes.GROUP_NODE: - {model._meta.db_table: model for model in apps.get_models(include_auto_created=True)}['db_dbgroup_dbnodes'] - }[entity_type] - mapper = inspect(model.sa).mapper # here aldjemy provides us the SQLAlchemy model - keys = {key for key, col in mapper.c.items() if with_pk or col not in mapper.primary_key} - return model, keys - - def bulk_insert(self, entity_type: EntityTypes, rows: List[dict], allow_defaults: bool = False) -> List[int]: - model, keys = self._get_model_from_entity(entity_type, False) - if allow_defaults: - for row in rows: - if not keys.issuperset(row): - raise IntegrityError(f'Incorrect fields given for {entity_type}: {set(row)} not subset of {keys}') - else: - for row in rows: - if set(row) != keys: - raise IntegrityError(f'Incorrect fields given for {entity_type}: {set(row)} != {keys}') - objects = [model(**row) for row in rows] - # if there is an mtime field, disable the automatic update, so as not to change it - if entity_type in (EntityTypes.NODE, EntityTypes.COMMENT): - with dbm.suppress_auto_now([(model, ['mtime'])]): - model.objects.bulk_create(objects) - else: - model.objects.bulk_create(objects) - return [obj.id for obj in objects] - - def bulk_update(self, entity_type: EntityTypes, rows: List[dict]) -> None: - model, keys = self._get_model_from_entity(entity_type, True) - id_entries = {} - fields = None - for row in rows: - if not keys.issuperset(row): - raise IntegrityError(f'Incorrect fields given for {entity_type}: {set(row)} not subset of {keys}') - try: - id_entries[row['id']] = {k: v for k, v in row.items() if k != 'id'} - fields = fields or list(id_entries[row['id']]) - assert fields == list(id_entries[row['id']]) - except KeyError: - raise IntegrityError(f"'id' field not given for {entity_type}: {set(row)}") - except AssertionError: - # this is handled in sqlalchemy, but would require more complex logic here - raise NotImplementedError(f'Cannot bulk update {entity_type} with different fields') - if fields is None: - return - objects = [] - for pk, obj in model.objects.in_bulk(list(id_entries), field_name='id').items(): - for name, value in id_entries[pk].items(): - setattr(obj, name, value) - objects.append(obj) - model.objects.bulk_update(objects, fields) - - def delete_nodes_and_connections(self, pks_to_delete: Sequence[int]) -> None: - if not self.in_transaction: - raise AssertionError('Cannot delete nodes and links outside a transaction') - # Delete all links pointing to or from a given node - dbm.DbLink.objects.filter(models.Q(input__in=pks_to_delete) | models.Q(output__in=pks_to_delete)).delete() - # now delete nodes - dbm.DbNode.objects.filter(pk__in=pks_to_delete).delete() - - # Below are abstract methods inherited from `aiida.orm.implementation.sql.backends.SqlBackend` - - def get_backend_entity(self, model): - """Return a `BackendEntity` instance from a `DbModel` instance.""" - return convert.get_backend_entity(model, self) - - @contextmanager - def cursor(self): - """Return a psycopg cursor to be used in a context manager. - - :return: a psycopg cursor - :rtype: :class:`psycopg2.extensions.cursor` - """ - try: - yield self._get_connection().cursor() - finally: - pass - - def execute_raw(self, query): - """Execute a raw SQL statement and return the result. - - :param query: a string containing a raw SQL statement - :return: the result of the query - """ - with self.cursor() as cursor: - cursor.execute(query) - results = cursor.fetchall() - - return results - - @staticmethod - def _get_connection(): - """ - Get the Django connection - - :return: the django connection - """ - # pylint: disable=import-error,no-name-in-module - from django.db import connection - - # For now we just return the global but if we ever support multiple Django backends - # being loaded this should be specific to this backend - return connection diff --git a/aiida/orm/implementation/django/comments.py b/aiida/orm/implementation/django/comments.py deleted file mode 100644 index 53bc2cd4eb..0000000000 --- a/aiida/orm/implementation/django/comments.py +++ /dev/null @@ -1,181 +0,0 @@ -# -*- coding: utf-8 -*- -########################################################################### -# Copyright (c), The AiiDA team. All rights reserved. # -# This file is part of the AiiDA code. # -# # -# The code is hosted on GitHub at https://github.com/aiidateam/aiida-core # -# For further information on the license, see the LICENSE.txt file # -# For further information please visit http://www.aiida.net # -########################################################################### -"""Django implementations for the Comment entity and collection.""" -# pylint: disable=import-error,no-name-in-module -import contextlib -from datetime import datetime - -from django.core.exceptions import ObjectDoesNotExist - -from aiida.backends.djsite.db import models -from aiida.common import exceptions, lang - -from . import entities, users -from ..comments import BackendComment, BackendCommentCollection -from .utils import ModelWrapper - - -class DjangoComment(entities.DjangoModelEntity[models.DbComment], BackendComment): - """Comment implementation for Django.""" - - MODEL_CLASS = models.DbComment - _auto_flush = ('mtime',) - - # pylint: disable=too-many-arguments - def __init__(self, backend, node, user, content=None, ctime=None, mtime=None): - """ - Construct a DjangoComment. - - :param node: a Node instance - :param user: a User instance - :param content: the comment content - :param ctime: The creation time as datetime object - :param mtime: The modification time as datetime object - :return: a Comment object associated to the given node and user - """ - super().__init__(backend) - lang.type_check(user, users.DjangoUser) # pylint: disable=no-member - - arguments = { - 'dbnode': node.dbmodel, - 'user': user.dbmodel, - 'content': content, - } - - if ctime: - lang.type_check(ctime, datetime, f'the given ctime is of type {type(ctime)}') - arguments['ctime'] = ctime - - if mtime: - lang.type_check(mtime, datetime, f'the given mtime is of type {type(mtime)}') - arguments['mtime'] = mtime - - self._dbmodel = ModelWrapper(models.DbComment(**arguments), auto_flush=self._auto_flush) - - def store(self): - """Can only store if both the node and user are stored as well.""" - from aiida.backends.djsite.db.models import suppress_auto_now - - if self._dbmodel.dbnode.id is None or self._dbmodel.user.id is None: - raise exceptions.ModificationNotAllowed('The corresponding node and/or user are not stored') - - with suppress_auto_now([(models.DbComment, ['mtime'])]) if self.mtime else contextlib.nullcontext(): - super().store() - - @property - def uuid(self) -> str: - return str(self._dbmodel.uuid) - - @property - def ctime(self): - return self._dbmodel.ctime - - @property - def mtime(self): - return self._dbmodel.mtime - - def set_mtime(self, value): - self._dbmodel.mtime = value - - @property - def node(self): - return self._backend.nodes.from_dbmodel(self._dbmodel.dbnode) - - @property - def user(self): - return self._backend.users.from_dbmodel(self._dbmodel.user) - - def set_user(self, value): - self._dbmodel.user = value - - @property - def content(self): - return self._dbmodel.content - - def set_content(self, value): - self._dbmodel.content = value - - -class DjangoCommentCollection(BackendCommentCollection): - """Django implementation for the CommentCollection.""" - - ENTITY_CLASS = DjangoComment - - def create(self, node, user, content=None, **kwargs): - """ - Create a Comment for a given node and user - - :param node: a Node instance - :param user: a User instance - :param content: the comment content - :return: a Comment object associated to the given node and user - """ - return DjangoComment(self.backend, node, user, content, **kwargs) # pylint: disable=abstract-class-instantiated - - def delete(self, comment_id): - """ - Remove a Comment from the collection with the given id - - :param comment_id: the id of the comment to delete - :type comment_id: int - - :raises TypeError: if ``comment_id`` is not an `int` - :raises `~aiida.common.exceptions.NotExistent`: if Comment with ID ``comment_id`` is not found - """ - if not isinstance(comment_id, int): - raise TypeError('comment_id must be an int') - - try: - models.DbComment.objects.get(id=comment_id).delete() - except ObjectDoesNotExist: - raise exceptions.NotExistent(f"Comment with id '{comment_id}' not found") - - def delete_all(self): - """ - Delete all Comment entries. - - :raises `~aiida.common.exceptions.IntegrityError`: if all Comments could not be deleted - """ - from django.db import transaction - try: - with transaction.atomic(): - models.DbComment.objects.all().delete() - except Exception as exc: - raise exceptions.IntegrityError(f'Could not delete all Comments. Full exception: {exc}') - - def delete_many(self, filters): - """ - Delete Comments based on ``filters`` - - :param filters: similar to QueryBuilder filter - :type filters: dict - - :return: (former) ``PK`` s of deleted Comments - :rtype: list - - :raises TypeError: if ``filters`` is not a `dict` - :raises `~aiida.common.exceptions.ValidationError`: if ``filters`` is empty - """ - from aiida.orm import Comment, QueryBuilder - - # Checks - if not isinstance(filters, dict): - raise TypeError('filters must be a dictionary') - if not filters: - raise exceptions.ValidationError('filters must not be empty') - - # Apply filter and delete found entities - builder = QueryBuilder(backend=self.backend).append(Comment, filters=filters, project='id').all() - entities_to_delete = [_[0] for _ in builder] - for entity in entities_to_delete: - self.delete(entity) - - # Return list of deleted entities' (former) PKs for checking - return entities_to_delete diff --git a/aiida/orm/implementation/django/computers.py b/aiida/orm/implementation/django/computers.py deleted file mode 100644 index 8dae5ea249..0000000000 --- a/aiida/orm/implementation/django/computers.py +++ /dev/null @@ -1,126 +0,0 @@ -# -*- coding: utf-8 -*- -########################################################################### -# Copyright (c), The AiiDA team. All rights reserved. # -# This file is part of the AiiDA code. # -# # -# The code is hosted on GitHub at https://github.com/aiidateam/aiida-core # -# For further information on the license, see the LICENSE.txt file # -# For further information please visit http://www.aiida.net # -########################################################################### -"""Django implementations for the `Computer` entity and collection.""" - -# pylint: disable=import-error,no-name-in-module -from django.db import IntegrityError, transaction - -from aiida.backends.djsite.db import models -from aiida.common import exceptions - -from . import entities, utils -from ..computers import BackendComputer, BackendComputerCollection - - -class DjangoComputer(entities.DjangoModelEntity[models.DbComputer], BackendComputer): - """Django implementation for `BackendComputer`.""" - - # pylint: disable=too-many-public-methods - - MODEL_CLASS = models.DbComputer - - def __init__(self, backend, **kwargs): - """Construct a new `DjangoComputer` instance.""" - super().__init__(backend) - self._dbmodel = utils.ModelWrapper(models.DbComputer(**kwargs)) - - @property - def uuid(self): - return str(self._dbmodel.uuid) - - def copy(self): - """Create an unstored clone of an already stored `Computer`.""" - if not self.is_stored: - raise exceptions.InvalidOperation('You can copy a computer only after having stored it') - dbomputer = models.DbComputer.objects.get(pk=self.pk) - dbomputer.pk = None - - newobject = self.__class__.from_dbmodel(dbomputer) # pylint: disable=no-value-for-parameter - - return newobject - - def store(self): - """Store the `Computer` instance.""" - # As a first thing, I check if the data is valid - sid = transaction.savepoint() - try: - # transactions are needed here for Postgresql: - # https://docs.djangoproject.com/en/1.5/topics/db/transactions/#handling-exceptions-within-postgresql-transactions - self._dbmodel.save() - transaction.savepoint_commit(sid) - except IntegrityError: - transaction.savepoint_rollback(sid) - raise ValueError('Integrity error, probably the hostname already exists in the database') - - return self - - @property - def is_stored(self): - return self._dbmodel.id is not None - - @property - def label(self): - return self._dbmodel.label - - @property - def description(self): - return self._dbmodel.description - - @property - def hostname(self): - return self._dbmodel.hostname - - def get_metadata(self): - return self._dbmodel.metadata - - def set_metadata(self, metadata): - self._dbmodel.metadata = metadata - - def set_label(self, val): - self._dbmodel.label = val - - def set_hostname(self, val): - self._dbmodel.hostname = val - - def set_description(self, val): - self._dbmodel.description = val - - def get_scheduler_type(self): - return self._dbmodel.scheduler_type - - def set_scheduler_type(self, scheduler_type): - self._dbmodel.scheduler_type = scheduler_type - - def get_transport_type(self): - return self._dbmodel.transport_type - - def set_transport_type(self, transport_type): - self._dbmodel.transport_type = transport_type - - -class DjangoComputerCollection(BackendComputerCollection): - """Collection of `Computer` instances.""" - - ENTITY_CLASS = DjangoComputer - - @staticmethod - def list_names(): - return list(models.DbComputer.objects.filter().values_list('name', flat=True)) - - def delete(self, pk): - """Delete the computer with the given pk.""" - from django.db.models.deletion import ProtectedError - try: - models.DbComputer.objects.filter(pk=pk).delete() - except ProtectedError: - raise exceptions.InvalidOperation( - 'Unable to delete the requested computer: there' - 'is at least one node using this computer' - ) diff --git a/aiida/orm/implementation/django/convert.py b/aiida/orm/implementation/django/convert.py deleted file mode 100644 index 9e446b2532..0000000000 --- a/aiida/orm/implementation/django/convert.py +++ /dev/null @@ -1,234 +0,0 @@ -# -*- coding: utf-8 -*- -########################################################################### -# Copyright (c), The AiiDA team. All rights reserved. # -# This file is part of the AiiDA code. # -# # -# The code is hosted on GitHub at https://github.com/aiidateam/aiida-core # -# For further information on the license, see the LICENSE.txt file # -# For further information please visit http://www.aiida.net # -########################################################################### -# pylint: disable=cyclic-import,no-member -"""Module to get an ORM backend instance from a database model instance.""" - -try: # Python3 - from functools import singledispatch -except ImportError: # Python2 - from singledispatch import singledispatch - -import aiida.backends.djsite.db.models as djmodels - -__all__ = ('get_backend_entity',) - - -@singledispatch -def get_backend_entity(dbmodel, backend): # pylint: disable=unused-argument - """ - Default get_backend_entity from DbModel - - :param dbmodel: the db model instance - """ - raise TypeError( - f'No corresponding AiiDA backend class exists for the DbModel instance {dbmodel.__class__.__name__}' - ) - - -@get_backend_entity.register(djmodels.DbUser) -def _(dbmodel, backend): - """ - get_backend_entity for Django DbUser - """ - from . import users - return users.DjangoUser.from_dbmodel(dbmodel, backend) - - -@get_backend_entity.register(djmodels.DbGroup) -def _(dbmodel, backend): - """ - get_backend_entity for Django DbGroup - """ - from . import groups - return groups.DjangoGroup.from_dbmodel(dbmodel, backend) - - -@get_backend_entity.register(djmodels.DbComputer) -def _(dbmodel, backend): - """ - get_backend_entity for Django DbGroup - """ - from . import computers - return computers.DjangoComputer.from_dbmodel(dbmodel, backend) - - -@get_backend_entity.register(djmodels.DbNode) -def _(dbmodel, backend): - """ - get_backend_entity for Django DbNode. It will return an ORM instance since - there is not Node backend entity yet. - """ - from . import nodes - return nodes.DjangoNode.from_dbmodel(dbmodel, backend) - - -@get_backend_entity.register(djmodels.DbAuthInfo) -def _(dbmodel, backend): - """ - get_backend_entity for Django DbAuthInfo - """ - from . import authinfos - return authinfos.DjangoAuthInfo.from_dbmodel(dbmodel, backend) - - -@get_backend_entity.register(djmodels.DbComment) -def _(dbmodel, backend): - from . import comments - return comments.DjangoComment.from_dbmodel(dbmodel, backend) - - -@get_backend_entity.register(djmodels.DbLog) -def _(dbmodel, backend): - from . import logs - return logs.DjangoLog.from_dbmodel(dbmodel, backend) - - -@get_backend_entity.register(djmodels.DbUser.sa) -def _(dbmodel, backend): - """ - get_backend_entity for DummyModel DbUser. - DummyModel instances are created when QueryBuilder queries the Django backend. - """ - from . import users - djuser_instance = djmodels.DbUser( - id=dbmodel.id, - email=dbmodel.email, - first_name=dbmodel.first_name, - last_name=dbmodel.last_name, - institution=dbmodel.institution - ) - return users.DjangoUser.from_dbmodel(djuser_instance, backend) - - -@get_backend_entity.register(djmodels.DbGroup.sa) -def _(dbmodel, backend): - """ - get_backend_entity for DummyModel DbGroup. - DummyModel instances are created when QueryBuilder queries the Django backend. - """ - from . import groups - djgroup_instance = djmodels.DbGroup( - id=dbmodel.id, - type_string=dbmodel.type_string, - uuid=dbmodel.uuid, - label=dbmodel.label, - time=dbmodel.time, - description=dbmodel.description, - user_id=dbmodel.user_id, - ) - return groups.DjangoGroup.from_dbmodel(djgroup_instance, backend) - - -@get_backend_entity.register(djmodels.DbComputer.sa) -def _(dbmodel, backend): - """ - get_backend_entity for DummyModel DbComputer. - DummyModel instances are created when QueryBuilder queries the Django backend. - """ - from . import computers - djcomputer_instance = djmodels.DbComputer( - id=dbmodel.id, - uuid=dbmodel.uuid, - label=dbmodel.label, - hostname=dbmodel.hostname, - description=dbmodel.description, - transport_type=dbmodel.transport_type, - scheduler_type=dbmodel.scheduler_type, - metadata=dbmodel.metadata - ) - return computers.DjangoComputer.from_dbmodel(djcomputer_instance, backend) - - -@get_backend_entity.register(djmodels.DbNode.sa) -def _(dbmodel, backend): - """ - get_backend_entity for DummyModel DbNode. - DummyModel instances are created when QueryBuilder queries the Django backend. - """ - djnode_instance = djmodels.DbNode( - id=dbmodel.id, - node_type=dbmodel.node_type, - process_type=dbmodel.process_type, - uuid=dbmodel.uuid, - ctime=dbmodel.ctime, - mtime=dbmodel.mtime, - label=dbmodel.label, - description=dbmodel.description, - dbcomputer_id=dbmodel.dbcomputer_id, - user_id=dbmodel.user_id, - attributes=dbmodel.attributes, - extras=dbmodel.extras - ) - - from . import nodes - return nodes.DjangoNode.from_dbmodel(djnode_instance, backend) - - -@get_backend_entity.register(djmodels.DbAuthInfo.sa) -def _(dbmodel, backend): - """ - get_backend_entity for DummyModel DbAuthInfo. - DummyModel instances are created when QueryBuilder queries the Django backend. - """ - from . import authinfos - djauthinfo_instance = djmodels.DbAuthInfo( - id=dbmodel.id, - aiidauser_id=dbmodel.aiidauser_id, - dbcomputer_id=dbmodel.dbcomputer_id, - metadata=dbmodel.metadata, # pylint: disable=protected-access - auth_params=dbmodel.auth_params, - enabled=dbmodel.enabled, - ) - return authinfos.DjangoAuthInfo.from_dbmodel(djauthinfo_instance, backend) - - -@get_backend_entity.register(djmodels.DbComment.sa) -def _(dbmodel, backend): - """ - Convert a dbcomment to the backend entity - """ - from . import comments - djcomment = djmodels.DbComment( - id=dbmodel.id, - uuid=dbmodel.uuid, - dbnode_id=dbmodel.dbnode_id, - ctime=dbmodel.ctime, - mtime=dbmodel.mtime, - user_id=dbmodel.user_id, - content=dbmodel.content - ) - return comments.DjangoComment.from_dbmodel(djcomment, backend) - - -@get_backend_entity.register(djmodels.DbLog.sa) -def _(dbmodel, backend): - """ - Convert a dbcomment to the backend entity - """ - from . import logs - djlog = djmodels.DbLog( - id=dbmodel.id, - time=dbmodel.time, - loggername=dbmodel.loggername, - levelname=dbmodel.levelname, - dbnode_id=dbmodel.dbnode_id, - message=dbmodel.message, - metadata=dbmodel.metadata # pylint: disable=protected-access - ) - return logs.DjangoLog.from_dbmodel(djlog, backend) - - -@get_backend_entity.register(djmodels.DbLink.sa) -def _(dbmodel, backend): - """ - Convert a dblink to the backend entity - """ - from aiida.orm.utils.links import LinkQuadruple - return LinkQuadruple(dbmodel.input_id, dbmodel.output_id, dbmodel.type, dbmodel.label) diff --git a/aiida/orm/implementation/django/entities.py b/aiida/orm/implementation/django/entities.py deleted file mode 100644 index 5a31ab1f3a..0000000000 --- a/aiida/orm/implementation/django/entities.py +++ /dev/null @@ -1,99 +0,0 @@ -# -*- coding: utf-8 -*- -########################################################################### -# Copyright (c), The AiiDA team. All rights reserved. # -# This file is part of the AiiDA code. # -# # -# The code is hosted on GitHub at https://github.com/aiidateam/aiida-core # -# For further information on the license, see the LICENSE.txt file # -# For further information please visit http://www.aiida.net # -########################################################################### -"""Classes and methods for Django specific backend entities""" -from typing import Generic, Set, TypeVar - -from django.db.models import Model # pylint: disable=import-error, no-name-in-module - -from aiida.common.lang import type_check - -from . import utils - -ModelType = TypeVar('ModelType') # pylint: disable=invalid-name - - -class DjangoModelEntity(Generic[ModelType]): - """A mixin that adds some common Django backend entity methods""" - - MODEL_CLASS = None - _dbmodel = None - _auto_flush = () - - @classmethod - def _class_check(cls): - """Assert that the class is correctly configured""" - assert issubclass(cls.MODEL_CLASS, Model), 'Must set the MODEL_CLASS in the derived class to a SQLA model' - - @classmethod - def from_dbmodel(cls, dbmodel, backend): - """ - Create a DjangoEntity from the corresponding db model class - - :param dbmodel: the model to create the entity from - :param backend: the corresponding backend - :return: the Django entity - """ - from .backend import DjangoBackend # pylint: disable=cyclic-import - cls._class_check() - type_check(dbmodel, cls.MODEL_CLASS) - type_check(backend, DjangoBackend) - entity = cls.__new__(cls) - super(DjangoModelEntity, entity).__init__(backend) - entity._dbmodel = utils.ModelWrapper(dbmodel, auto_flush=cls._auto_flush) # pylint: disable=protected-access - return entity - - @classmethod - def get_dbmodel_attribute_name(cls, attr_name): - """ - Given the name of an attribute of the entity class give the corresponding name of the attribute - in the db model. It if doesn't exit this raises a ValueError - - :param attr_name: - :return: the dbmodel attribute name - :rtype: str - """ - if hasattr(cls.MODEL_CLASS, attr_name): - return attr_name - - raise ValueError(f"Unknown attribute '{attr_name}'") - - def __init__(self, *args, **kwargs): - super().__init__(*args, **kwargs) - self._class_check() - - @property - def dbmodel(self): - return self._dbmodel._model # pylint: disable=protected-access - - @property - def id(self): # pylint: disable=invalid-name - return self._dbmodel.pk - - @property - def is_stored(self): - """ - Is this entity stored? - - :return: True if stored, False otherwise - """ - return self._dbmodel.id is not None - - def store(self): - """ - Store the entity - - :return: the entity itself - """ - self._dbmodel.save() - return self - - def _flush_if_stored(self, fields: Set[str]) -> None: - if self._dbmodel.is_saved(): - self._dbmodel._flush(fields) # pylint: disable=protected-access diff --git a/aiida/orm/implementation/django/groups.py b/aiida/orm/implementation/django/groups.py deleted file mode 100644 index 4aefdbfeeb..0000000000 --- a/aiida/orm/implementation/django/groups.py +++ /dev/null @@ -1,193 +0,0 @@ -# -*- coding: utf-8 -*- -########################################################################### -# Copyright (c), The AiiDA team. All rights reserved. # -# This file is part of the AiiDA code. # -# # -# The code is hosted on GitHub at https://github.com/aiidateam/aiida-core # -# For further information on the license, see the LICENSE.txt file # -# For further information please visit http://www.aiida.net # -########################################################################### -# pylint: disable=no-member -"""Django Group entity""" -from collections.abc import Iterator, Sized - -from django.db import transaction - -from aiida.backends.djsite.db import models -from aiida.common.lang import type_check -from aiida.orm.implementation.groups import BackendGroup, BackendGroupCollection -from aiida.orm.implementation.sql.extras import SqlExtrasMixin - -from . import entities, users, utils - -__all__ = ('DjangoGroup', 'DjangoGroupCollection') - - -class DjangoGroup(entities.DjangoModelEntity[models.DbGroup], SqlExtrasMixin, BackendGroup): # pylint: disable=abstract-method - """The Django group object""" - MODEL_CLASS = models.DbGroup - - def __init__(self, backend, label, user, description='', type_string=''): - """Construct a new Django group""" - type_check(user, users.DjangoUser) - super().__init__(backend) - - self._dbmodel = utils.ModelWrapper( - models.DbGroup(label=label, description=description, user=user.dbmodel, type_string=type_string) - ) - - @property - def label(self): - return self._dbmodel.label - - @label.setter - def label(self, label): - """ - Attempt to change the label of the group instance. If the group is already stored - and the another group of the same type already exists with the desired label, a - UniquenessError will be raised - - :param label : the new group label - :raises aiida.common.UniquenessError: if another group of same type and label already exists - """ - self._dbmodel.label = label - - @property - def description(self): - return self._dbmodel.description - - @description.setter - def description(self, value): - self._dbmodel.description = value - - @property - def type_string(self): - return self._dbmodel.type_string - - @property - def user(self): - return self._backend.users.from_dbmodel(self._dbmodel.user) - - @user.setter - def user(self, new_user): - type_check(new_user, users.DjangoUser) - assert new_user.backend == self.backend, 'User from a different backend' - self._dbmodel.user = new_user.dbmodel - - @property - def uuid(self): - return str(self._dbmodel.uuid) - - def __int__(self): - if not self.is_stored: - return None - - return self._dbnode.pk - - def store(self): - if not self.is_stored: - with transaction.atomic(): - if self.user is not None and not self.user.is_stored: - self.user.store() - # We now have to reset the model's user entry because - # django will have assigned the user an ID but this - # is not automatically propagated to us - self._dbmodel.user = self.user.dbmodel - self._dbmodel.save() - - # To allow to do directly g = Group(...).store() - return self - - def count(self): - """Return the number of entities in this group. - - :return: integer number of entities contained within the group - """ - return self._dbmodel.dbnodes.count() - - def clear(self): - """Remove all the nodes from this group.""" - self._dbmodel.dbnodes.clear() - - @property - def nodes(self): - """Get an iterator to the nodes in the group""" - - class NodesIterator(Iterator, Sized): - """The nodes iterator""" - - def __init__(self, dbnodes, backend): - super().__init__() - self._backend = backend - self._dbnodes = dbnodes - self.generator = self._genfunction() - - def _genfunction(self): - # Best to use dbnodes.iterator() so we load entities from the database as we need them - # see: http://blog.etianen.com/blog/2013/06/08/django-querysets/ - for node in self._dbnodes.iterator(): - yield self._backend.get_backend_entity(node) - - def __iter__(self): - return self - - def __len__(self): - return len(self._dbnodes) - - def __getitem__(self, value): - if isinstance(value, slice): - return [self._backend.get_backend_entity(n) for n in self._dbnodes[value]] - - return self._backend.get_backend_entity(self._dbnodes[value]) - - def __next__(self): - return next(self.generator) - - return NodesIterator(self._dbmodel.dbnodes.all(), self._backend) - - def add_nodes(self, nodes, **kwargs): - from .nodes import DjangoNode - - super().add_nodes(nodes) - - node_pks = [] - - for node in nodes: - - if not isinstance(node, DjangoNode): - raise TypeError(f'invalid type {type(node)}, has to be {DjangoNode}') - - if not node.is_stored: - raise ValueError('At least one of the provided nodes is unstored, stopping...') - - node_pks.append(node.pk) - - self._dbmodel.dbnodes.add(*node_pks) - - def remove_nodes(self, nodes): - from .nodes import DjangoNode - - super().remove_nodes(nodes) - - node_pks = [] - - for node in nodes: - - if not isinstance(node, DjangoNode): - raise TypeError(f'invalid type {type(node)}, has to be {DjangoNode}') - - if not node.is_stored: - raise ValueError('At least one of the provided nodes is unstored, stopping...') - - node_pks.append(node.pk) - - self._dbmodel.dbnodes.remove(*node_pks) - - -class DjangoGroupCollection(BackendGroupCollection): - """The Django Group collection""" - - ENTITY_CLASS = DjangoGroup - - def delete(self, id): # pylint: disable=redefined-builtin - models.DbGroup.objects.filter(id=id).delete() diff --git a/aiida/orm/implementation/django/logs.py b/aiida/orm/implementation/django/logs.py deleted file mode 100644 index 4ddd8fe10f..0000000000 --- a/aiida/orm/implementation/django/logs.py +++ /dev/null @@ -1,153 +0,0 @@ -# -*- coding: utf-8 -*- -########################################################################### -# Copyright (c), The AiiDA team. All rights reserved. # -# This file is part of the AiiDA code. # -# # -# The code is hosted on GitHub at https://github.com/aiidateam/aiida-core # -# For further information on the license, see the LICENSE.txt file # -# For further information please visit http://www.aiida.net # -########################################################################### -"""The Django log and log collection module""" -# pylint: disable=import-error,no-name-in-module - -from django.core.exceptions import ObjectDoesNotExist - -from aiida.backends.djsite.db import models -from aiida.common import exceptions - -from . import entities -from .. import BackendLog, BackendLogCollection - - -class DjangoLog(entities.DjangoModelEntity[models.DbLog], BackendLog): - """Django Log backend class""" - - MODEL_CLASS = models.DbLog - - def __init__(self, backend, time, loggername, levelname, dbnode_id, message='', metadata=None): - # pylint: disable=too-many-arguments - super().__init__(backend) - self._dbmodel = models.DbLog( - time=time, - loggername=loggername, - levelname=levelname, - dbnode_id=dbnode_id, - message=message, - metadata=metadata or {} - ) - - @property - def uuid(self): - """ - Get the string representation of the uuid of the object that created the log entry - """ - return str(self._dbmodel.uuid) - - @property - def time(self): - """ - Get the time corresponding to the entry - """ - return self._dbmodel.time - - @property - def loggername(self): - """ - The name of the logger that created this entry - """ - return self._dbmodel.loggername - - @property - def levelname(self): - """ - The name of the log level - """ - return self._dbmodel.levelname - - @property - def dbnode_id(self): - """ - Get the id of the object that created the log entry - """ - return self._dbmodel.dbnode_id - - @property - def message(self): - """ - Get the message corresponding to the entry - """ - return self._dbmodel.message - - @property - def metadata(self): - """ - Get the metadata corresponding to the entry - """ - return self._dbmodel.metadata - - -class DjangoLogCollection(BackendLogCollection): - """Django log collection""" - - ENTITY_CLASS = DjangoLog - - def delete(self, log_id): - """ - Remove a Log entry from the collection with the given id - - :param log_id: id of the Log to delete - :type log_id: int - - :raises TypeError: if ``log_id`` is not an `int` - :raises `~aiida.common.exceptions.NotExistent`: if Log with ID ``log_id`` is not found - """ - if not isinstance(log_id, int): - raise TypeError('log_id must be an int') - - try: - models.DbLog.objects.get(id=log_id).delete() - except ObjectDoesNotExist: - raise exceptions.NotExistent(f"Log with id '{log_id}' not found") - - def delete_all(self): - """ - Delete all Log entries. - - :raises `~aiida.common.exceptions.IntegrityError`: if all Logs could not be deleted - """ - from django.db import transaction - try: - with transaction.atomic(): - models.DbLog.objects.all().delete() - except Exception as exc: - raise exceptions.IntegrityError(f'Could not delete all Logs. Full exception: {exc}') - - def delete_many(self, filters): - """ - Delete Logs based on ``filters`` - - :param filters: similar to QueryBuilder filter - :type filters: dict - - :return: (former) ``PK`` s of deleted Logs - :rtype: list - - :raises TypeError: if ``filters`` is not a `dict` - :raises `~aiida.common.exceptions.ValidationError`: if ``filters`` is empty - """ - from aiida.orm import Log, QueryBuilder - - # Checks - if not isinstance(filters, dict): - raise TypeError('filters must be a dictionary') - if not filters: - raise exceptions.ValidationError('filters must not be empty') - - # Apply filter and delete found entities - builder = QueryBuilder(backend=self.backend).append(Log, filters=filters, project='id') - entities_to_delete = builder.all(flat=True) - for entity in entities_to_delete: - self.delete(entity) - - # Return list of deleted entities' (former) PKs for checking - return entities_to_delete diff --git a/aiida/orm/implementation/django/nodes.py b/aiida/orm/implementation/django/nodes.py deleted file mode 100644 index 44c1adc66e..0000000000 --- a/aiida/orm/implementation/django/nodes.py +++ /dev/null @@ -1,325 +0,0 @@ -# -*- coding: utf-8 -*- -########################################################################### -# Copyright (c), The AiiDA team. All rights reserved. # -# This file is part of the AiiDA code. # -# # -# The code is hosted on GitHub at https://github.com/aiidateam/aiida-core # -# For further information on the license, see the LICENSE.txt file # -# For further information please visit http://www.aiida.net # -########################################################################### -"""Django implementation of the `BackendNode` and `BackendNodeCollection` classes.""" -# pylint: disable=import-error,no-name-in-module -from datetime import datetime -from typing import Any, Dict, Iterable, Tuple - -from django.core.exceptions import ObjectDoesNotExist -from django.db import IntegrityError, transaction - -from aiida.backends.djsite.db import models -from aiida.common import exceptions -from aiida.common.lang import type_check -from aiida.orm.implementation.sql.extras import SqlExtrasMixin -from aiida.orm.implementation.utils import clean_value, validate_attribute_extra_key - -from . import entities -from . import utils as dj_utils -from .. import BackendNode, BackendNodeCollection -from .computers import DjangoComputer -from .users import DjangoUser - - -class DjangoNode(entities.DjangoModelEntity[models.DbNode], SqlExtrasMixin, BackendNode): - """Django Node backend entity""" - - # pylint: disable=too-many-public-methods - - MODEL_CLASS = models.DbNode - LINK_CLASS = models.DbLink - - def __init__( - self, - backend, - node_type, - user, - computer=None, - process_type=None, - label='', - description='', - ctime=None, - mtime=None - ): - """Construct a new `BackendNode` instance wrapping a new `DbNode` instance. - - :param backend: the backend - :param node_type: the node type string - :param user: associated `BackendUser` - :param computer: associated `BackendComputer` - :param label: string label - :param description: string description - :param ctime: The creation time as datetime object - :param mtime: The modification time as datetime object - """ - # pylint: disable=too-many-arguments - super().__init__(backend) - - arguments = { - 'user': user.dbmodel, - 'node_type': node_type, - 'process_type': process_type, - 'label': label, - 'description': description, - } - - type_check(user, DjangoUser) - - if computer: - type_check(computer, DjangoComputer, f'computer is of type {type(computer)}') - arguments['dbcomputer'] = computer.dbmodel - - if ctime: - type_check(ctime, datetime, f'the given ctime is of type {type(ctime)}') - arguments['ctime'] = ctime - - if mtime: - type_check(mtime, datetime, f'the given mtime is of type {type(mtime)}') - arguments['mtime'] = mtime - - self._dbmodel = dj_utils.ModelWrapper(models.DbNode(**arguments)) - - def clone(self): - """Return an unstored clone of ourselves. - - :return: an unstored `BackendNode` with the exact same attributes and extras as self - """ - arguments = { - 'node_type': self._dbmodel.node_type, - 'process_type': self._dbmodel.process_type, - 'user': self._dbmodel.user, - 'dbcomputer': self._dbmodel.dbcomputer, - 'label': self._dbmodel.label, - 'description': self._dbmodel.description, - } - - clone = self.__class__.__new__(self.__class__) # pylint: disable=no-value-for-parameter - clone.__init__(self.backend, self.node_type, self.user) - clone._dbmodel = dj_utils.ModelWrapper(models.DbNode(**arguments)) # pylint: disable=protected-access - return clone - - @property - def ctime(self): - return self._dbmodel.ctime - - @property - def mtime(self): - return self._dbmodel.mtime - - @property - def uuid(self): - return str(self._dbmodel.uuid) - - @property - def node_type(self): - return self._dbmodel.node_type - - @property - def process_type(self): - return self._dbmodel.process_type - - @process_type.setter - def process_type(self, value): - self._dbmodel.process_type = value - - @property - def label(self): - return self._dbmodel.label - - @label.setter - def label(self, value): - self._dbmodel.label = value - - @property - def description(self): - return self._dbmodel.description - - @description.setter - def description(self, value): - self._dbmodel.description = value - - @property - def repository_metadata(self): - return self._dbmodel.repository_metadata or {} - - @repository_metadata.setter - def repository_metadata(self, value): - self._dbmodel.repository_metadata = value - - @property - def computer(self): - try: - return self.backend.computers.from_dbmodel(self._dbmodel.dbcomputer) - except TypeError: - return None - - @computer.setter - def computer(self, computer): - type_check(computer, DjangoComputer, allow_none=True) - - if computer is not None: - computer = computer.dbmodel - - self._dbmodel.dbcomputer = computer - - @property - def user(self): - return self.backend.users.from_dbmodel(self._dbmodel.user) - - @user.setter - def user(self, user): - type_check(user, DjangoUser) - self._dbmodel.user = user.dbmodel - - def add_incoming(self, source, link_type, link_label): - type_check(source, DjangoNode) - - if not self.is_stored: - raise exceptions.ModificationNotAllowed('node has to be stored when adding an incoming link') - - if not source.is_stored: - raise exceptions.ModificationNotAllowed('source node has to be stored when adding a link from it') - - self._add_link(source, link_type, link_label) - - def _add_link(self, source, link_type, link_label): - """Add a link of the given type from a given node to ourself. - - :param source: the node from which the link is coming - :param link_type: the link type - :param link_label: the link label - """ - savepoint_id = None - - try: - # Transactions are needed here for Postgresql: - # https://docs.djangoproject.com/en/1.5/topics/db/transactions/#handling-exceptions-within-postgresql-transactions - savepoint_id = transaction.savepoint() - self.LINK_CLASS(input_id=source.id, output_id=self.id, label=link_label, type=link_type.value).save() - transaction.savepoint_commit(savepoint_id) - except IntegrityError as exception: - transaction.savepoint_rollback(savepoint_id) - raise exceptions.UniquenessError(f'failed to create the link: {exception}') from exception - - def clean_values(self): - self._dbmodel.attributes = clean_value(self._dbmodel.attributes) - self._dbmodel.extras = clean_value(self._dbmodel.extras) - - def store(self, links=None, with_transaction=True, clean=True): # pylint: disable=arguments-differ - import contextlib - - from aiida.backends.djsite.db.models import suppress_auto_now - - if clean: - self.clean_values() - - with transaction.atomic() if with_transaction else contextlib.nullcontext(): - with suppress_auto_now([(models.DbNode, ['mtime'])]) if self.mtime else contextlib.nullcontext(): - # We need to save the node model instance itself first such that it has a pk - # that can be used in the foreign keys that will be needed for setting the - # attributes and links - self.dbmodel.save() - - if links: - for link_triple in links: - self._add_link(*link_triple) - - return self - - @property - def attributes(self): - return self._dbmodel.attributes - - def get_attribute(self, key: str) -> Any: - try: - return self._dbmodel.attributes[key] - except KeyError as exception: - raise AttributeError(f'attribute `{exception}` does not exist') from exception - - def set_attribute(self, key: str, value: Any) -> None: - validate_attribute_extra_key(key) - - if self.is_stored: - value = clean_value(value) - - self._dbmodel.attributes[key] = value - self._flush_if_stored({'attributes'}) - - def set_attribute_many(self, attributes: Dict[str, Any]) -> None: - for key in attributes: - validate_attribute_extra_key(key) - - if self.is_stored: - attributes = {key: clean_value(value) for key, value in attributes.items()} - - for key, value in attributes.items(): - # We need to use `self.dbmodel` without the underscore, because otherwise the second iteration will refetch - # what is in the database and we lose the initial changes. - self.dbmodel.attributes[key] = value - self._flush_if_stored({'attributes'}) - - def reset_attributes(self, attributes: Dict[str, Any]) -> None: - for key in attributes: - validate_attribute_extra_key(key) - - if self.is_stored: - attributes = clean_value(attributes) - - self.dbmodel.attributes = attributes - self._flush_if_stored({'attributes'}) - - def delete_attribute(self, key: str) -> None: - try: - self._dbmodel.attributes.pop(key) - except KeyError as exception: - raise AttributeError(f'attribute `{exception}` does not exist') from exception - else: - self._flush_if_stored({'attributes'}) - - def delete_attribute_many(self, keys: Iterable[str]) -> None: - non_existing_keys = [key for key in keys if key not in self._dbmodel.attributes] - - if non_existing_keys: - raise AttributeError(f"attributes `{', '.join(non_existing_keys)}` do not exist") - - for key in keys: - self.dbmodel.attributes.pop(key) - - self._flush_if_stored({'attributes'}) - - def clear_attributes(self): - self._dbmodel.attributes = {} - self._flush_if_stored({'attributes'}) - - def attributes_items(self) -> Iterable[Tuple[str, Any]]: - for key, value in self._dbmodel.attributes.items(): - yield key, value - - def attributes_keys(self) -> Iterable[str]: - for key in self._dbmodel.attributes.keys(): - yield key - - -class DjangoNodeCollection(BackendNodeCollection): - """The collection of Node entries.""" - - ENTITY_CLASS = DjangoNode - - def get(self, pk): - try: - return self.ENTITY_CLASS.from_dbmodel(models.DbNode.objects.get(pk=pk), self.backend) - except ObjectDoesNotExist: - raise exceptions.NotExistent(f"Node with pk '{pk}' not found") from ObjectDoesNotExist - - def delete(self, pk): - try: - models.DbNode.objects.filter(pk=pk).delete() # pylint: disable=no-member - except ObjectDoesNotExist: - raise exceptions.NotExistent(f"Node with pk '{pk}' not found") from ObjectDoesNotExist diff --git a/aiida/orm/implementation/django/querybuilder.py b/aiida/orm/implementation/django/querybuilder.py deleted file mode 100644 index 4c973dda81..0000000000 --- a/aiida/orm/implementation/django/querybuilder.py +++ /dev/null @@ -1,81 +0,0 @@ -# -*- coding: utf-8 -*- -########################################################################### -# Copyright (c), The AiiDA team. All rights reserved. # -# This file is part of the AiiDA code. # -# # -# The code is hosted on GitHub at https://github.com/aiidateam/aiida-core # -# For further information on the license, see the LICENSE.txt file # -# For further information please visit http://www.aiida.net # -########################################################################### -"""Django query builder implementation""" -from aiida.backends.djsite.db import models -from aiida.orm.implementation.sqlalchemy.querybuilder import SqlaQueryBuilder - - -class DjangoQueryBuilder(SqlaQueryBuilder): - """Django query builder - - With the Django backend, we actually still use SQLAlchemy, since Django does not support complex queries. - We use aldjemy to generate SQLAlchemy models by introspecting the Django models. - """ - - def set_field_mappings(self): - pass - - @property - def Node(self): - return models.DbNode.sa # pylint: disable=no-member - - @property - def Link(self): - return models.DbLink.sa # pylint: disable=no-member - - @property - def Computer(self): - return models.DbComputer.sa # pylint: disable=no-member - - @property - def User(self): - return models.DbUser.sa # pylint: disable=no-member - - @property - def Group(self): - return models.DbGroup.sa # pylint: disable=no-member - - @property - def AuthInfo(self): - return models.DbAuthInfo.sa # pylint: disable=no-member - - @property - def Comment(self): - return models.DbComment.sa # pylint: disable=no-member - - @property - def Log(self): - return models.DbLog.sa # pylint: disable=no-member - - @property - def table_groups_nodes(self): - return models.DbGroup.sa.table.metadata.tables['db_dbgroup_dbnodes'] # pylint: disable=no-member - - def modify_expansions(self, alias, expansions): - """ - For django, there are no additional expansions for now, so - I am returning an empty list - """ - return expansions - - @staticmethod - def get_table_name(aliased_class): - """Returns the table name given an Aliased class based on Aldjemy""" - return aliased_class._aliased_insp._target.table.name # pylint: disable=protected-access - - def get_column_names(self, alias): - """ - Given the backend specific alias, return the column names that correspond to the aliased table. - """ - # pylint: disable=protected-access - return [ - str(c).replace(f'{alias._aliased_insp.class_.table.name}.', '') - for c in alias._aliased_insp.class_.table._columns._all_columns - ] diff --git a/aiida/orm/implementation/django/users.py b/aiida/orm/implementation/django/users.py deleted file mode 100644 index 9eaa55ff00..0000000000 --- a/aiida/orm/implementation/django/users.py +++ /dev/null @@ -1,78 +0,0 @@ -# -*- coding: utf-8 -*- -########################################################################### -# Copyright (c), The AiiDA team. All rights reserved. # -# This file is part of the AiiDA code. # -# # -# The code is hosted on GitHub at https://github.com/aiidateam/aiida-core # -# For further information on the license, see the LICENSE.txt file # -# For further information please visit http://www.aiida.net # -########################################################################### -"""Django user module""" -from aiida.backends.djsite.db import models -from aiida.backends.djsite.db.models import DbUser -from aiida.orm.implementation.users import BackendUser, BackendUserCollection - -from . import entities, utils - -__all__ = ('DjangoUser', 'DjangoUserCollection') - - -class DjangoUser(entities.DjangoModelEntity[models.DbUser], BackendUser): - """The Django user class""" - - MODEL_CLASS = models.DbUser - - def __init__(self, backend, email, first_name, last_name, institution): - # pylint: disable=too-many-arguments - super().__init__(backend) - self._dbmodel = utils.ModelWrapper( - DbUser(email=email, first_name=first_name, last_name=last_name, institution=institution) - ) - - @property - def email(self): - return self._dbmodel.email - - @email.setter - def email(self, email): - self._dbmodel.email = email - - @property - def first_name(self): - return self._dbmodel.first_name - - @first_name.setter - def first_name(self, first_name): - self._dbmodel.first_name = first_name - - @property - def last_name(self): - return self._dbmodel.last_name - - @last_name.setter - def last_name(self, last_name): - self._dbmodel.last_name = last_name - - @property - def institution(self): - return self._dbmodel.institution - - @institution.setter - def institution(self, institution): - self._dbmodel.institution = institution - - -class DjangoUserCollection(BackendUserCollection): - """The Django collection of users""" - - ENTITY_CLASS = DjangoUser - - def create(self, email, first_name='', last_name='', institution=''): # pylint: disable=arguments-differ - """ - Create a user with the provided email address - - :return: A new user object - :rtype: :class:`aiida.orm.implementation.django.users.DjangoUser` - """ - # pylint: disable=abstract-class-instantiated - return DjangoUser(self.backend, email, first_name, last_name, institution) diff --git a/aiida/orm/implementation/django/utils.py b/aiida/orm/implementation/django/utils.py deleted file mode 100644 index ec984df6e8..0000000000 --- a/aiida/orm/implementation/django/utils.py +++ /dev/null @@ -1,146 +0,0 @@ -# -*- coding: utf-8 -*- -########################################################################### -# Copyright (c), The AiiDA team. All rights reserved. # -# This file is part of the AiiDA code. # -# # -# The code is hosted on GitHub at https://github.com/aiidateam/aiida-core # -# For further information on the license, see the LICENSE.txt file # -# For further information please visit http://www.aiida.net # -########################################################################### -"""Utilities for the implementation of the Django backend.""" - -# pylint: disable=import-error,no-name-in-module -from django.db import IntegrityError, transaction -from django.db.models.fields import FieldDoesNotExist - -from aiida.common import exceptions - -IMMUTABLE_MODEL_FIELDS = {'id', 'pk', 'uuid', 'node_type'} - - -class ModelWrapper: - """Wrap a database model instance to correctly update and flush the data model when getting or setting a field. - - If the model is not stored, the behavior of the get and set attributes is unaltered. However, if the model is - stored, which is to say, it has a primary key, the `getattr` and `setattr` are modified as follows: - - * `getattr`: if the item corresponds to a mutable model field, the model instance is refreshed first - * `setattr`: if the item corresponds to a mutable model field, changes are flushed after performing the change - """ - - # pylint: disable=too-many-instance-attributes - - def __init__(self, model, auto_flush=()): - """Construct the ModelWrapper. - - :param model: the database model instance to wrap - :param auto_flush: an optional tuple of database model fields that are always to be flushed, in addition to - the field that corresponds to the attribute being set through `__setattr__`. - """ - super().__init__() - # Have to do it this way because we overwrite __setattr__ - object.__setattr__(self, '_model', model) - object.__setattr__(self, '_auto_flush', auto_flush) - - def __getattr__(self, item): - """Get an attribute of the model instance. - - If the model is saved in the database, the item corresponds to a mutable model field and the current scope is - not in an open database connection, then the field's value is first refreshed from the database. - - :param item: the name of the model field - :return: the value of the model's attribute - """ - if self.is_saved() and self._is_mutable_model_field(item): - self._ensure_model_uptodate(fields=(item,)) - - return getattr(self._model, item) - - def __setattr__(self, key, value): - """Set the attribute on the model instance. - - If the field being set is a mutable model field and the model is saved, the changes are flushed. - - :param key: the name of the model field - :param value: the value to set - """ - setattr(self._model, key, value) - if self.is_saved() and self._is_mutable_model_field(key): - fields = set((key,) + self._auto_flush) - self._flush(fields=fields) - - def is_saved(self): - """Retun whether the wrapped model instance is saved in the database. - - :return: boolean, True if the model is saved in the database, False otherwise - """ - return self._model.pk is not None - - def save(self): - """Store the model instance. - - :raises `aiida.common.IntegrityError`: if a database integrity error is raised during the save. - """ - # transactions are needed here for Postgresql: - # https://docs.djangoproject.com/en/1.7/topics/db/transactions/#handling-exceptions-within-postgresql-transactions - with transaction.atomic(): - try: - self._model.save() - except IntegrityError as exception: - raise exceptions.IntegrityError(str(exception)) - - def _is_mutable_model_field(self, field): - """Return whether the field is a mutable field of the model. - - :return: boolean, True if the field is a model field and is not in the `IMMUTABLE_MODEL_FIELDS` set. - """ - if field in IMMUTABLE_MODEL_FIELDS: - return False - - return self._is_model_field(field) - - def _is_model_field(self, name): - """Return whether the field is a field of the model. - - :return: boolean, True if the field is a model field, False otherwise. - """ - try: - self._model.__class__._meta.get_field(name) # pylint: disable=protected-access - except FieldDoesNotExist: - return False - else: - return True - - def _flush(self, fields=None): - """Flush the fields of the model to the database. - - .. note:: If the wrapped model is not actually saved in the database yet, this method is a no-op. - - :param fields: the model fields whose current value to flush to the database - """ - if self.is_saved(): - try: - # Manually append the `mtime` to fields to update, because when using the `update_fields` keyword of the - # `save` method, the `auto_now` property of `mtime` column is not triggered. If `update_fields` is None - # everything is updated, so we do not have to add anything - if fields is not None and self._is_model_field('mtime'): - fields.add('mtime') - self._model.save(update_fields=fields) - except IntegrityError as exception: - raise exceptions.IntegrityError(str(exception)) - - def _ensure_model_uptodate(self, fields=None): - """Refresh all fields of the wrapped model instance by fetching the current state of the database instance. - - :param fields: optionally refresh only these fields, if `None` all fields are refreshed. - """ - if self.is_saved(): - self._model.refresh_from_db(fields=fields) - - @staticmethod - def _in_transaction(): - """Return whether the current scope is within an open database transaction. - - :return: boolean, True if currently in open transaction, False otherwise. - """ - return not transaction.get_autocommit() diff --git a/aiida/orm/implementation/entities.py b/aiida/orm/implementation/entities.py index 6b4eef700b..84d2531c9d 100644 --- a/aiida/orm/implementation/entities.py +++ b/aiida/orm/implementation/entities.py @@ -22,7 +22,7 @@ class BackendEntity(abc.ABC): """An first-class entity in the backend""" - def __init__(self, backend: 'Backend', **kwargs: Any): + def __init__(self, backend: 'Backend', **kwargs: Any): # pylint: disable=unused-argument self._backend = backend @property diff --git a/aiida/orm/implementation/sql/backends.py b/aiida/orm/implementation/sql/backends.py deleted file mode 100644 index fb8b9321e7..0000000000 --- a/aiida/orm/implementation/sql/backends.py +++ /dev/null @@ -1,89 +0,0 @@ -# -*- coding: utf-8 -*- -########################################################################### -# Copyright (c), The AiiDA team. All rights reserved. # -# This file is part of the AiiDA code. # -# # -# The code is hosted on GitHub at https://github.com/aiidateam/aiida-core # -# For further information on the license, see the LICENSE.txt file # -# For further information please visit http://www.aiida.net # -########################################################################### -"""Generic backend related objects""" -import abc -from typing import TYPE_CHECKING, Generic, TypeVar - -from .. import backends, entities - -if TYPE_CHECKING: - from aiida.repository.backend import DiskObjectStoreRepositoryBackend - -__all__ = ('SqlBackend',) - -# The template type for the base sqlalchemy/django ORM model type -ModelType = TypeVar('ModelType') # pylint: disable=invalid-name - - -class SqlBackend(Generic[ModelType], backends.Backend): - """ - A class for SQL based backends. Assumptions are that: - * there is an ORM - * that it is possible to convert from ORM model instances to backend instances - * that psycopg2 is used as the engine - - if any of these assumptions do not fit then just implement a backend from :class:`aiida.orm.implementation.Backend` - """ - - def get_repository(self) -> 'DiskObjectStoreRepositoryBackend': - from disk_objectstore import Container - - from aiida.manage.manager import get_manager - from aiida.repository.backend import DiskObjectStoreRepositoryBackend - - profile = get_manager().get_profile() - assert profile is not None, 'profile not loaded' - container = Container(profile.repository_path / 'container') - return DiskObjectStoreRepositoryBackend(container=container) - - @abc.abstractmethod - def get_backend_entity(self, model: ModelType) -> entities.BackendEntity: - """ - Return the backend entity that corresponds to the given Model instance - - :param model: the ORM model instance to promote to a backend instance - :return: the backend entity corresponding to the given model - """ - - @abc.abstractmethod - def cursor(self): - """ - Return a psycopg cursor. This method should be used as a context manager i.e.:: - - with backend.cursor(): - # Do stuff - - :return: a psycopg cursor - :rtype: :class:`psycopg2.extensions.cursor` - """ - - @abc.abstractmethod - def execute_raw(self, query): - """Execute a raw SQL statement and return the result. - - :param query: a string containing a raw SQL statement - :return: the result of the query - """ - - def execute_prepared_statement(self, sql, parameters): - """Execute an SQL statement with optional prepared statements. - - :param sql: the SQL statement string - :param parameters: dictionary to use to populate the prepared statement - """ - results = [] - - with self.cursor() as cursor: - cursor.execute(sql, parameters) - - for row in cursor: - results.append(row) - - return results diff --git a/aiida/orm/implementation/sqlalchemy/__init__.py b/aiida/orm/implementation/sqlalchemy/__init__.py index 82a9691ef1..1606ed905e 100644 --- a/aiida/orm/implementation/sqlalchemy/__init__.py +++ b/aiida/orm/implementation/sqlalchemy/__init__.py @@ -20,7 +20,7 @@ from .users import * __all__ = ( - 'SqlaBackend', + 'PsqlDosBackend', 'SqlaGroup', 'SqlaGroupCollection', 'SqlaUser', diff --git a/aiida/orm/implementation/sqlalchemy/authinfos.py b/aiida/orm/implementation/sqlalchemy/authinfos.py index e849e2a08d..daec707b7d 100644 --- a/aiida/orm/implementation/sqlalchemy/authinfos.py +++ b/aiida/orm/implementation/sqlalchemy/authinfos.py @@ -32,7 +32,7 @@ def __init__(self, backend, computer, user): super().__init__(backend) type_check(user, users.SqlaUser) type_check(computer, computers.SqlaComputer) - self._dbmodel = utils.ModelWrapper(DbAuthInfo(dbcomputer=computer.dbmodel, aiidauser=user.dbmodel)) + self._dbmodel = utils.ModelWrapper(DbAuthInfo(dbcomputer=computer.dbmodel, aiidauser=user.dbmodel), backend) @property def id(self): # pylint: disable=invalid-name @@ -124,7 +124,8 @@ def delete(self, pk): session = self.backend.get_session() try: - session.query(DbAuthInfo).filter_by(id=pk).one().delete() + row = session.query(DbAuthInfo).filter_by(id=pk).one() + session.delete(row) session.commit() except NoResultFound: raise exceptions.NotExistent(f'AuthInfo<{pk}> does not exist') diff --git a/aiida/orm/implementation/sqlalchemy/backend.py b/aiida/orm/implementation/sqlalchemy/backend.py index 8cf67de0ae..d06df46cd4 100644 --- a/aiida/orm/implementation/sqlalchemy/backend.py +++ b/aiida/orm/implementation/sqlalchemy/backend.py @@ -11,37 +11,170 @@ # pylint: disable=missing-function-docstring from contextlib import contextmanager, nullcontext import functools -from typing import Iterator, List, Sequence +from typing import TYPE_CHECKING, Iterator, List, Optional, Sequence, Union -from sqlalchemy.orm import Session +from disk_objectstore import Container +from sqlalchemy import table +from sqlalchemy.orm import Session, scoped_session, sessionmaker -from aiida.backends.sqlalchemy.manager import SqlaBackendManager +from aiida.backends.sqlalchemy.migrator import REPOSITORY_UUID_KEY, PsqlDostoreMigrator from aiida.backends.sqlalchemy.models import base -from aiida.common.exceptions import IntegrityError +from aiida.common.exceptions import ClosedStorage, IntegrityError +from aiida.manage.configuration.profile import Profile +from aiida.orm import User from aiida.orm.entities import EntityTypes from . import authinfos, comments, computers, convert, groups, logs, nodes, querybuilder, users -from ..sql.backends import SqlBackend +from ..backends import Backend +from ..entities import BackendEntity -__all__ = ('SqlaBackend',) +if TYPE_CHECKING: + from aiida.repository.backend import DiskObjectStoreRepositoryBackend +__all__ = ('PsqlDosBackend',) -class SqlaBackend(SqlBackend[base.Base]): - """SqlAlchemy implementation of `aiida.orm.implementation.backends.Backend`.""" +CONTAINER_DEFAULTS: dict = { + 'pack_size_target': 4 * 1024 * 1024 * 1024, + 'loose_prefix_len': 2, + 'hash_type': 'sha256', + 'compression_algorithm': 'zlib+1' +} + + +class PsqlDosBackend(Backend): # pylint: disable=too-many-public-methods + """An AiiDA storage backend that stores data in a PostgreSQL database and disk-objectstore repository. + + Note, there were originally two such backends, `sqlalchemy` and `django`. + The `django` backend was removed, to consolidate access to this storage. + """ + + migrator = PsqlDostoreMigrator + + @classmethod + def version_head(cls) -> str: + return cls.migrator.get_schema_version_head() + + @classmethod + def version_profile(cls, profile: Profile) -> None: + return cls.migrator(profile).get_schema_version_profile(check_legacy=True) + + @classmethod + def migrate(cls, profile: Profile) -> None: + cls.migrator(profile).migrate() + + def __init__(self, profile: Profile) -> None: + super().__init__(profile) + + # check that the storage is reachable and at the correct version + self.migrator(profile).validate_storage() + + self._session_factory: Optional[scoped_session] = None + self._initialise_session() + # save the URL of the database, for use in the __str__ method + self._db_url = self.get_session().get_bind().url # type: ignore - def __init__(self): - """Construct the backend instance by initializing all the collections.""" self._authinfos = authinfos.SqlaAuthInfoCollection(self) self._comments = comments.SqlaCommentCollection(self) self._computers = computers.SqlaComputerCollection(self) self._groups = groups.SqlaGroupCollection(self) self._logs = logs.SqlaLogCollection(self) self._nodes = nodes.SqlaNodeCollection(self) - self._schema_manager = SqlaBackendManager() self._users = users.SqlaUserCollection(self) - def migrate(self): - self._schema_manager.migrate() + @property + def is_closed(self) -> bool: + return self._session_factory is None + + def __str__(self) -> str: + repo_uri = self.profile.storage_config['repository_uri'] + state = 'closed' if self.is_closed else 'open' + return f'Storage for {self.profile.name!r} [{state}] @ {self._db_url!r} / {repo_uri}' + + def _initialise_session(self): + """Initialise the SQLAlchemy session factory. + + Only one session factory is ever associated with a given class instance, + i.e. once the instance is closed, it cannot be reopened. + + The session factory, returns a session that is bound to the current thread. + Multi-thread support is currently required by the REST API. + Although, in the future, we may want to move the multi-thread handling to higher in the AiiDA stack. + """ + from aiida.backends.sqlalchemy.utils import create_sqlalchemy_engine + engine = create_sqlalchemy_engine(self._profile.storage_config) + self._session_factory = scoped_session(sessionmaker(bind=engine, future=True, expire_on_commit=True)) + + def get_session(self) -> Session: + """Return an SQLAlchemy session bound to the current thread.""" + if self._session_factory is None: + raise ClosedStorage(str(self)) + return self._session_factory() + + def close(self) -> None: + if self._session_factory is None: + return # the instance is already closed, and so this is a no-op + # reset the cached default user instance, since it will now have no associated session + User.objects(self).reset() + # close the connection + # pylint: disable=no-member + engine = self._session_factory.bind + if engine is not None: + engine.dispose() # type: ignore + self._session_factory.expunge_all() + self._session_factory.close() + self._session_factory = None + + def _clear(self, recreate_user: bool = True) -> None: + from aiida.backends.sqlalchemy.models.settings import DbSetting + from aiida.backends.sqlalchemy.models.user import DbUser + + super()._clear(recreate_user) + + session = self.get_session() + + # clear the database + with self.transaction(): + + # save the default user + default_user_kwargs = None + if recreate_user: + default_user = User.objects(self).get_default() + if default_user is not None: + default_user_kwargs = { + 'email': default_user.email, + 'first_name': default_user.first_name, + 'last_name': default_user.last_name, + 'institution': default_user.institution, + } + + # now clear the database + for table_name in ( + 'db_dbgroup_dbnodes', 'db_dbgroup', 'db_dblink', 'db_dbnode', 'db_dblog', 'db_dbauthinfo', 'db_dbuser', + 'db_dbcomputer' + ): + session.execute(table(table_name).delete()) + session.expunge_all() + + # restore the default user + if recreate_user and default_user_kwargs: + session.add(DbUser(**default_user_kwargs)) + # clear aiida's cache of the default user + User.objects(self).reset() + + # Clear the repository and reset the repository UUID + container = Container(self.profile.repository_path / 'container') + container.init_container(clear=True, **CONTAINER_DEFAULTS) + container_id = container.container_id + with self.transaction(): + session.execute( + DbSetting.__table__.update().where(DbSetting.key == REPOSITORY_UUID_KEY).values(val=container_id) + ) + + def get_repository(self) -> 'DiskObjectStoreRepositoryBackend': + from aiida.repository.backend import DiskObjectStoreRepositoryBackend + + container = Container(self.profile.repository_path / 'container') + return DiskObjectStoreRepositoryBackend(container=container) @property def authinfos(self): @@ -74,15 +207,6 @@ def query(self): def users(self): return self._users - @staticmethod - def get_session() -> Session: - """Return a database session that can be used by the `QueryBuilder` to perform its query. - - :return: an instance of :class:`sqlalchemy.orm.session.Session` - """ - from aiida.backends.sqlalchemy import get_scoped_session - return get_scoped_session() - @contextmanager def transaction(self) -> Iterator[Session]: """Open a transaction to be used as a context manager. @@ -190,39 +314,36 @@ def delete_nodes_and_connections(self, pks_to_delete: Sequence[int]) -> None: # # Delete the actual nodes session.query(DbNode).filter(DbNode.id.in_(list(pks_to_delete))).delete(synchronize_session='fetch') - # Below are abstract methods inherited from `aiida.orm.implementation.sql.backends.SqlBackend` + def get_backend_entity(self, model: base.Base) -> BackendEntity: + """ + Return the backend entity that corresponds to the given Model instance - def get_backend_entity(self, model): + :param model: the ORM model instance to promote to a backend instance + :return: the backend entity corresponding to the given model + """ return convert.get_backend_entity(model, self) - @contextmanager - def cursor(self): - from aiida.backends import sqlalchemy as sa - try: - connection = sa.ENGINE.raw_connection() - yield connection.cursor() - finally: - self._get_connection().close() - - def execute_raw(self, query): - from sqlalchemy import text - from sqlalchemy.exc import ResourceClosedError # pylint: disable=import-error,no-name-in-module - - with self.transaction() as session: - queryset = session.execute(text(query)) + def set_global_variable( + self, key: str, value: Union[None, str, int, float], description: Optional[str] = None, overwrite=True + ) -> None: + from aiida.backends.sqlalchemy.models.settings import DbSetting - try: - results = queryset.fetchall() - except ResourceClosedError: - return None + session = self.get_session() + with (nullcontext() if self.in_transaction else self.transaction()): + if session.query(DbSetting).filter(DbSetting.key == key).count(): + if overwrite: + session.query(DbSetting).filter(DbSetting.key == key).update(dict(val=value)) + else: + raise ValueError(f'The setting {key} already exists') + else: + session.add(DbSetting(key=key, val=value, description=description or '')) - return results + def get_global_variable(self, key: str) -> Union[None, str, int, float]: + from aiida.backends.sqlalchemy.models.settings import DbSetting - @staticmethod - def _get_connection(): - """Get the SQLA database connection - - :return: the SQLA database connection - """ - from aiida.backends import sqlalchemy as sa - return sa.ENGINE.raw_connection() + session = self.get_session() + with (nullcontext() if self.in_transaction else self.transaction()): + setting = session.query(DbSetting).filter(DbSetting.key == key).one_or_none() + if setting is None: + raise KeyError(f'No setting found with key {key}') + return setting.val diff --git a/aiida/orm/implementation/sqlalchemy/comments.py b/aiida/orm/implementation/sqlalchemy/comments.py index 19c2b18c4c..5f977526d9 100644 --- a/aiida/orm/implementation/sqlalchemy/comments.py +++ b/aiida/orm/implementation/sqlalchemy/comments.py @@ -55,7 +55,7 @@ def __init__(self, backend, node, user, content=None, ctime=None, mtime=None): lang.type_check(mtime, datetime, f'the given mtime is of type {type(mtime)}') arguments['mtime'] = mtime - self._dbmodel = utils.ModelWrapper(models.DbComment(**arguments)) + self._dbmodel = utils.ModelWrapper(models.DbComment(**arguments), backend) def store(self): """Can only store if both the node and user are stored as well.""" @@ -131,7 +131,8 @@ def delete(self, comment_id): session = self.backend.get_session() try: - session.query(models.DbComment).filter_by(id=comment_id).one().delete() + row = session.query(models.DbComment).filter_by(id=comment_id).one() + session.delete(row) session.commit() except NoResultFound: session.rollback() diff --git a/aiida/orm/implementation/sqlalchemy/computers.py b/aiida/orm/implementation/sqlalchemy/computers.py index 1744b428fd..020cfc11dc 100644 --- a/aiida/orm/implementation/sqlalchemy/computers.py +++ b/aiida/orm/implementation/sqlalchemy/computers.py @@ -31,7 +31,7 @@ class SqlaComputer(entities.SqlaModelEntity[DbComputer], BackendComputer): def __init__(self, backend, **kwargs): super().__init__(backend) - self._dbmodel = utils.ModelWrapper(DbComputer(**kwargs)) + self._dbmodel = utils.ModelWrapper(DbComputer(**kwargs), backend) @property def uuid(self): @@ -125,7 +125,8 @@ def list_names(self): def delete(self, pk): try: session = self.backend.get_session() - session.get(DbComputer, pk).delete() + row = session.get(DbComputer, pk) + session.delete(row) session.commit() except SQLAlchemyError as exc: raise exceptions.InvalidOperation( diff --git a/aiida/orm/implementation/sqlalchemy/entities.py b/aiida/orm/implementation/sqlalchemy/entities.py index 78a4898ffc..4560e40d85 100644 --- a/aiida/orm/implementation/sqlalchemy/entities.py +++ b/aiida/orm/implementation/sqlalchemy/entities.py @@ -38,13 +38,13 @@ def from_dbmodel(cls, dbmodel, backend): :param backend: the corresponding backend :return: the Django entity """ - from .backend import SqlaBackend # pylint: disable=cyclic-import + from .backend import PsqlDosBackend # pylint: disable=cyclic-import cls._class_check() type_check(dbmodel, cls.MODEL_CLASS) - type_check(backend, SqlaBackend) + type_check(backend, PsqlDosBackend) entity = cls.__new__(cls) super(SqlaModelEntity, entity).__init__(backend) - entity._dbmodel = utils.ModelWrapper(dbmodel) # pylint: disable=protected-access + entity._dbmodel = utils.ModelWrapper(dbmodel, backend) # pylint: disable=protected-access return entity @classmethod diff --git a/aiida/orm/implementation/sql/extras.py b/aiida/orm/implementation/sqlalchemy/extras_mixin.py similarity index 99% rename from aiida/orm/implementation/sql/extras.py rename to aiida/orm/implementation/sqlalchemy/extras_mixin.py index 353d949a00..241dcc39c1 100644 --- a/aiida/orm/implementation/sql/extras.py +++ b/aiida/orm/implementation/sqlalchemy/extras_mixin.py @@ -14,7 +14,7 @@ from aiida.orm.implementation.utils import clean_value, validate_attribute_extra_key -class SqlExtrasMixin: +class ExtrasMixin: """Mixin class for SQL implementations of ``extras``.""" _dbmodel: Any diff --git a/aiida/orm/implementation/sqlalchemy/groups.py b/aiida/orm/implementation/sqlalchemy/groups.py index 5441cd2f50..9dd012b108 100644 --- a/aiida/orm/implementation/sqlalchemy/groups.py +++ b/aiida/orm/implementation/sqlalchemy/groups.py @@ -14,9 +14,9 @@ from aiida.common.exceptions import UniquenessError from aiida.common.lang import type_check from aiida.orm.implementation.groups import BackendGroup, BackendGroupCollection -from aiida.orm.implementation.sql.extras import SqlExtrasMixin from . import entities, users, utils +from .extras_mixin import ExtrasMixin __all__ = ('SqlaGroup', 'SqlaGroupCollection') @@ -25,7 +25,7 @@ # Unfortunately the linter doesn't seem to be able to pick up on the fact that the abstract property 'id' # of BackendGroup is actually implemented in SqlaModelEntity so disable the abstract check -class SqlaGroup(entities.SqlaModelEntity[DbGroup], SqlExtrasMixin, BackendGroup): # pylint: disable=abstract-method +class SqlaGroup(entities.SqlaModelEntity[DbGroup], ExtrasMixin, BackendGroup): # pylint: disable=abstract-method """The SQLAlchemy Group object""" MODEL_CLASS = DbGroup @@ -44,7 +44,7 @@ def __init__(self, backend, label, user, description='', type_string=''): super().__init__(backend) dbgroup = DbGroup(label=label, description=description, user=user.dbmodel, type_string=type_string) - self._dbmodel = utils.ModelWrapper(dbgroup) + self._dbmodel = utils.ModelWrapper(dbgroup, backend) @property def label(self): @@ -283,5 +283,6 @@ class SqlaGroupCollection(BackendGroupCollection): def delete(self, id): # pylint: disable=redefined-builtin session = self.backend.get_session() - session.get(DbGroup, id).delete() + row = session.get(DbGroup, id) + session.delete(row) session.commit() diff --git a/aiida/orm/implementation/sqlalchemy/logs.py b/aiida/orm/implementation/sqlalchemy/logs.py index 8abc1c8e53..c9d71bf23d 100644 --- a/aiida/orm/implementation/sqlalchemy/logs.py +++ b/aiida/orm/implementation/sqlalchemy/logs.py @@ -35,7 +35,7 @@ def __init__(self, backend, time, loggername, levelname, dbnode_id, message='', dbnode_id=dbnode_id, message=message, metadata=metadata - ) + ), backend ) @property @@ -109,7 +109,8 @@ def delete(self, log_id): session = self.backend.get_session() try: - session.query(models.DbLog).filter_by(id=log_id).one().delete() + row = session.query(models.DbLog).filter_by(id=log_id).one() + session.delete(row) session.commit() except NoResultFound: session.rollback() diff --git a/aiida/orm/implementation/sqlalchemy/nodes.py b/aiida/orm/implementation/sqlalchemy/nodes.py index cb5936ebbc..868b36bef7 100644 --- a/aiida/orm/implementation/sqlalchemy/nodes.py +++ b/aiida/orm/implementation/sqlalchemy/nodes.py @@ -18,17 +18,17 @@ from aiida.backends.sqlalchemy.models import node as models from aiida.common import exceptions from aiida.common.lang import type_check -from aiida.orm.implementation.sql.extras import SqlExtrasMixin from aiida.orm.implementation.utils import clean_value, validate_attribute_extra_key from . import entities from . import utils as sqla_utils from .. import BackendNode, BackendNodeCollection from .computers import SqlaComputer +from .extras_mixin import ExtrasMixin from .users import SqlaUser -class SqlaNode(entities.SqlaModelEntity[models.DbNode], SqlExtrasMixin, BackendNode): +class SqlaNode(entities.SqlaModelEntity[models.DbNode], ExtrasMixin, BackendNode): """SQLA Node backend entity""" # pylint: disable=too-many-public-methods @@ -83,7 +83,7 @@ def __init__( type_check(mtime, datetime, f'the given mtime is of type {type(mtime)}') arguments['mtime'] = mtime - self._dbmodel = sqla_utils.ModelWrapper(models.DbNode(**arguments)) + self._dbmodel = sqla_utils.ModelWrapper(models.DbNode(**arguments), backend) def clone(self): """Return an unstored clone of ourselves. @@ -103,7 +103,7 @@ def clone(self): clone = self.__class__.__new__(self.__class__) # pylint: disable=no-value-for-parameter clone.__init__(self.backend, self.node_type, self.user) - clone._dbmodel = sqla_utils.ModelWrapper(models.DbNode(**arguments)) # pylint: disable=protected-access + clone._dbmodel = sqla_utils.ModelWrapper(models.DbNode(**arguments), self.backend) # pylint: disable=protected-access return clone @property @@ -322,7 +322,8 @@ def delete(self, pk): session = self.backend.get_session() try: - session.query(models.DbNode).filter_by(id=pk).one().delete() + row = session.query(models.DbNode).filter_by(id=pk).one() + session.delete(row) session.commit() except NoResultFound: raise exceptions.NotExistent(f"Node with pk '{pk}' not found") from NoResultFound diff --git a/aiida/orm/implementation/sqlalchemy/users.py b/aiida/orm/implementation/sqlalchemy/users.py index a4d2841a77..b941928a61 100644 --- a/aiida/orm/implementation/sqlalchemy/users.py +++ b/aiida/orm/implementation/sqlalchemy/users.py @@ -25,7 +25,7 @@ def __init__(self, backend, email, first_name, last_name, institution): # pylint: disable=too-many-arguments super().__init__(backend) self._dbmodel = utils.ModelWrapper( - DbUser(email=email, first_name=first_name, last_name=last_name, institution=institution) + DbUser(email=email, first_name=first_name, last_name=last_name, institution=institution), backend ) @property diff --git a/aiida/orm/implementation/sqlalchemy/utils.py b/aiida/orm/implementation/sqlalchemy/utils.py index 42607c31c4..2a4ce7b9e5 100644 --- a/aiida/orm/implementation/sqlalchemy/utils.py +++ b/aiida/orm/implementation/sqlalchemy/utils.py @@ -8,43 +8,63 @@ # For further information please visit http://www.aiida.net # ########################################################################### """Utilities for the implementation of the SqlAlchemy backend.""" - import contextlib +from typing import TYPE_CHECKING # pylint: disable=import-error,no-name-in-module from sqlalchemy import inspect from sqlalchemy.exc import IntegrityError +from sqlalchemy.orm import Session from sqlalchemy.orm.attributes import flag_modified -from aiida.backends.sqlalchemy import get_scoped_session from aiida.common import exceptions +if TYPE_CHECKING: + from aiida.orm.implementation.sqlalchemy.backend import PsqlDosBackend + IMMUTABLE_MODEL_FIELDS = {'id', 'pk', 'uuid', 'node_type'} class ModelWrapper: - """Wrap a database model instance to correctly update and flush the data model when getting or setting a field. + """Wrap an SQLA ORM model and AiiDA storage backend instance together, + to correctly update and flush the data model when getting or setting a field. + + The ORM model represents a row in a database table, with a given schema, + and its attributes represent the fields (a.k.a. columns) of the table. + When an ORM model instance is created, it does not have any association with a particular database, + i.e. it is "unsaved". + At this point, its attributes can be freely retrieved or set. - If the model is not stored, the behavior of the get and set attributes is unaltered. However, if the model is - stored, which is to say, it has a primary key, the `getattr` and `setattr` are modified as follows: + When the ORM model instance is saved, it is associated with the database configured for the backend instance, + by adding it to the backend instances's session (i.e. its connection with the database). + At this point: + + - Whenever we retrieve a field of the model instance, unless we know it to be immutable, + we first ensure that the field represents the latest value in the database + (e.g. in case the database has been externally updated). + + - Whenever we set a field of the model instance, unless we know it to be immutable, + we flush the change to the database. - * `getattr`: if the item corresponds to a mutable model field, the model instance is refreshed first - * `setattr`: if the item corresponds to a mutable model field, changes are flushed after performing the change """ # pylint: disable=too-many-instance-attributes - def __init__(self, model, auto_flush=()): + def __init__(self, model, backend: 'PsqlDosBackend'): """Construct the ModelWrapper. - :param model: the database model instance to wrap - :param auto_flush: an optional tuple of database model fields that are always to be flushed, in addition to - the field that corresponds to the attribute being set through `__setattr__`. + :param model: the ORM model instance to wrap + :param backend: the storage backend instance """ super().__init__() # Have to do it this way because we overwrite __setattr__ object.__setattr__(self, '_model', model) - object.__setattr__(self, '_auto_flush', auto_flush) + object.__setattr__(self, '_backend', backend) + + @property + def session(self) -> Session: + """Return the session of the storage backend instance.""" + return self._backend.get_session() def __getattr__(self, item): """Get an attribute of the model instance. @@ -57,8 +77,8 @@ def __getattr__(self, item): """ # Python 3's implementation of copy.copy does not call __init__ on the new object # but manually restores attributes instead. Make sure we never get into a recursive - # loop by protecting the only special variable here: _model - if item == '_model': + # loop by protecting the special variables here + if item in ('_model', '_backend'): raise AttributeError() if self.is_saved() and self._is_mutable_model_field(item) and not self._in_transaction(): @@ -76,7 +96,7 @@ def __setattr__(self, key, value): """ setattr(self._model, key, value) if self.is_saved() and self._is_mutable_model_field(key): - fields = set((key,) + self._auto_flush) + fields = set((key,)) self._flush(fields=fields) def is_saved(self): @@ -86,7 +106,7 @@ def is_saved(self): """ # we should not flush here since it may lead to IntegrityErrors # which are handled later in the save method - with self._model.session.no_autoflush: + with self.session.no_autoflush: return self._model.id is not None def save(self): @@ -97,10 +117,11 @@ def save(self): :raises `aiida.common.IntegrityError`: if a database integrity error is raised during the save. """ try: - commit = not self._in_transaction() - self._model.save(commit=commit) + self.session.add(self._model) + if not self._in_transaction(): + self.session.commit() except IntegrityError as exception: - self._model.session.rollback() + self.session.rollback() raise exceptions.IntegrityError(str(exception)) def _is_mutable_model_field(self, field): @@ -138,15 +159,14 @@ def _ensure_model_uptodate(self, fields=None): :param fields: optionally refresh only these fields, if `None` all fields are refreshed. """ - self._model.session.expire(self._model, attribute_names=fields) + self.session.expire(self._model, attribute_names=fields) - @staticmethod - def _in_transaction(): + def _in_transaction(self): """Return whether the current scope is within an open database transaction. :return: boolean, True if currently in open transaction, False otherwise. """ - return get_scoped_session().in_nested_transaction() + return self.session.in_nested_transaction() @contextlib.contextmanager diff --git a/aiida/orm/logs.py b/aiida/orm/logs.py index 4975559ec4..885ffb7bb1 100644 --- a/aiida/orm/logs.py +++ b/aiida/orm/logs.py @@ -14,7 +14,7 @@ from aiida.common import timezone from aiida.common.lang import classproperty -from aiida.manage.manager import get_manager +from aiida.manage import get_manager from . import entities @@ -132,7 +132,7 @@ class Log(entities.Entity['BackendLog']): @classproperty def objects(cls: Type['Log']) -> LogCollection: # type: ignore[misc] # pylint: disable=no-self-argument - return LogCollection.get_cached(cls, get_manager().get_backend()) + return LogCollection.get_cached(cls, get_manager().get_profile_storage()) def __init__( self, @@ -162,7 +162,7 @@ def __init__( if not loggername or not levelname: raise exceptions.ValidationError('The loggername and levelname cannot be empty') - backend = backend or get_manager().get_backend() + backend = backend or get_manager().get_profile_storage() model = backend.logs.create( time=time, loggername=loggername, diff --git a/aiida/orm/nodes/node.py b/aiida/orm/nodes/node.py index f5024fcb8f..4b416e1b05 100644 --- a/aiida/orm/nodes/node.py +++ b/aiida/orm/nodes/node.py @@ -35,8 +35,7 @@ from aiida.common.hashing import make_hash from aiida.common.lang import classproperty, type_check from aiida.common.links import LinkType -from aiida.manage.manager import get_manager -from aiida.orm import autogroup +from aiida.manage import get_manager from aiida.orm.utils.links import LinkManager, LinkTriple from aiida.orm.utils.node import AbstractNodeMeta @@ -154,7 +153,7 @@ class Node( @classproperty def objects(cls: Type[NodeType]) -> NodeCollection[NodeType]: # pylint: disable=no-self-argument - return NodeCollection.get_cached(cls, get_manager().get_backend()) # type: ignore[arg-type] + return NodeCollection.get_cached(cls, get_manager().get_profile_storage()) # type: ignore[arg-type] def __init__( self, @@ -163,7 +162,7 @@ def __init__( computer: Optional[Computer] = None, **kwargs: Any ) -> None: - backend = backend or get_manager().get_backend() + backend = backend or get_manager().get_profile_storage() if computer and not computer.is_stored: raise ValueError('the computer is not stored') @@ -716,9 +715,8 @@ def store(self, with_transaction: bool = True) -> 'Node': # pylint: disable=arg else: self._store(with_transaction=with_transaction, clean=True) - # Set up autogrouping used by verdi run - if autogroup.CURRENT_AUTOGROUP is not None and autogroup.CURRENT_AUTOGROUP.is_to_be_grouped(self): - group = autogroup.CURRENT_AUTOGROUP.get_or_create_group() + if self.backend.autogroup.is_to_be_grouped(self): + group = self.backend.autogroup.get_or_create_group() group.add_nodes(self) return self diff --git a/aiida/orm/querybuilder.py b/aiida/orm/querybuilder.py index 94bdce9cde..fcdf6a5fbd 100644 --- a/aiida/orm/querybuilder.py +++ b/aiida/orm/querybuilder.py @@ -38,7 +38,7 @@ ) import warnings -from aiida.manage.manager import get_manager +from aiida.manage import get_manager from aiida.orm.entities import EntityTypes from aiida.orm.implementation.querybuilder import ( GROUP_ENTITY_TYPE_PREFIX, @@ -136,7 +136,7 @@ def __init__( :param distinct: Whether to return de-duplicated rows """ - self._backend = backend or get_manager().get_backend() + self._backend = backend or get_manager().get_profile_storage() self._impl: BackendQueryBuilder = self._backend.query() # SERIALISABLE ATTRIBUTES diff --git a/aiida/orm/users.py b/aiida/orm/users.py index d59007dc70..abc56e2e19 100644 --- a/aiida/orm/users.py +++ b/aiida/orm/users.py @@ -8,11 +8,11 @@ # For further information please visit http://www.aiida.net # ########################################################################### """Module for the ORM user class.""" -from typing import TYPE_CHECKING, Optional, Tuple, Type, Union, cast +from typing import TYPE_CHECKING, Optional, Tuple, Type from aiida.common import exceptions from aiida.common.lang import classproperty -from aiida.manage.manager import get_manager +from aiida.manage import get_manager from . import entities @@ -25,16 +25,13 @@ class UserCollection(entities.Collection['User']): """The collection of users stored in a backend.""" - UNDEFINED = 'UNDEFINED' - _default_user: Union[None, str, 'User'] = None - @staticmethod def _entity_base_cls() -> Type['User']: return User def __init__(self, entity_class: Type['User'], backend: Optional['Backend'] = None) -> None: super().__init__(entity_class=entity_class, backend=backend) - self._default_user = self.UNDEFINED + self._default_user: Optional[User] = None def get_or_create(self, email: str, **kwargs) -> Tuple[bool, 'User']: """Get the existing user with a given email address or create an unstored one @@ -51,10 +48,8 @@ def get_or_create(self, email: str, **kwargs) -> Tuple[bool, 'User']: def get_default(self) -> Optional['User']: """Get the current default user""" - if self._default_user is self.UNDEFINED: - from aiida.manage.configuration import get_profile - profile = get_profile() - email = profile.default_user_email + if self._default_user is None: + email = self.backend.profile.default_user_email if not email: self._default_user = None @@ -63,13 +58,13 @@ def get_default(self) -> Optional['User']: except (exceptions.MultipleObjectsError, exceptions.NotExistent): self._default_user = None - return cast(Optional['User'], self._default_user) + return self._default_user def reset(self) -> None: """ Reset internal caches (default user). """ - self._default_user = self.UNDEFINED + self._default_user = None class User(entities.Entity['BackendUser']): @@ -79,7 +74,7 @@ class User(entities.Entity['BackendUser']): @classproperty def objects(cls: Type['User']) -> UserCollection: # type: ignore[misc] # pylint: disable=no-self-argument - return UserCollection.get_cached(cls, get_manager().get_backend()) + return UserCollection.get_cached(cls, get_manager().get_profile_storage()) def __init__( self, @@ -91,7 +86,7 @@ def __init__( ): """Create a new `User`.""" # pylint: disable=too-many-arguments - backend = backend or get_manager().get_backend() + backend = backend or get_manager().get_profile_storage() email = self.normalize_email(email) backend_entity = backend.users.create(email, first_name, last_name, institution) super().__init__(backend_entity) diff --git a/aiida/orm/utils/log.py b/aiida/orm/utils/log.py index acf0a478c1..f4590d0ccd 100644 --- a/aiida/orm/utils/log.py +++ b/aiida/orm/utils/log.py @@ -21,8 +21,6 @@ def emit(self, record): # https://github.com/python/cpython/blob/1c2cb516e49ceb56f76e90645e67e8df4e5df01a/Lib/logging/handlers.py#L590 self.format(record) - from django.core.exceptions import ImproperlyConfigured # pylint: disable=no-name-in-module, import-error - from aiida import orm try: @@ -33,11 +31,6 @@ def emit(self, record): # The backend should be set. We silently absorb this error pass - except ImproperlyConfigured: - # Probably, the logger was called without the - # Django settings module loaded. Then, - # This ignore should be a no-op. - pass except Exception: # pylint: disable=broad-except # To avoid loops with the error handler, I just print. # Hopefully, though, this should not happen! diff --git a/aiida/restapi/common/utils.py b/aiida/restapi/common/utils.py index ddcd19a4fe..bfbe73c568 100644 --- a/aiida/restapi/common/utils.py +++ b/aiida/restapi/common/utils.py @@ -17,7 +17,7 @@ from aiida.common.exceptions import InputValidationError, ValidationError from aiida.common.utils import DatetimePrecision -from aiida.manage.manager import get_manager +from aiida.manage import get_manager from aiida.restapi.common.exceptions import RestInputValidationError, RestValidationError # Important to match querybuilder keys @@ -820,14 +820,16 @@ def list_routes(): @decorator -def close_session(wrapped, _, args, kwargs): - """Close AiiDA SQLAlchemy (QueryBuilder) session +def close_thread_connection(wrapped, _, args, kwargs): + """Close the profile's storage connection, for the current thread. - This decorator can be used for router endpoints to close the SQLAlchemy global scoped session after the response - has been created. This is needed, since the QueryBuilder uses a SQLAlchemy global scoped session no matter the - profile's database backend. + This decorator can be used for router endpoints. + It is needed due to the server running in threaded mode, i.e., creating a new thread for each incoming request, + and leaving connections unreleased. + + Note, this is currently hard-coded to the `PsqlDosBackend` storage backend. """ try: return wrapped(*args, **kwargs) finally: - get_manager().get_backend().get_session().close() + get_manager().get_profile_storage().get_session().close() diff --git a/aiida/restapi/resources.py b/aiida/restapi/resources.py index 99d9af5493..20da71b9ba 100644 --- a/aiida/restapi/resources.py +++ b/aiida/restapi/resources.py @@ -15,7 +15,7 @@ from aiida.common.lang import classproperty from aiida.restapi.common.exceptions import RestInputValidationError -from aiida.restapi.common.utils import Utils, close_session +from aiida.restapi.common.utils import Utils, close_thread_connection class ServerInfo(Resource): @@ -98,7 +98,7 @@ class BaseResource(Resource): _translator_class = BaseTranslator _parse_pk_uuid = None # Flag to tell the path parser whether to expect a pk or a uuid pattern - method_decorators = [close_session] # Close SQLA session after any method call + method_decorators = [close_thread_connection] # Close the thread's storage connection after any method call ## TODO add the caching support. I cache total count, results, and possibly diff --git a/aiida/restapi/translator/nodes/node.py b/aiida/restapi/translator/nodes/node.py index 886208b38b..2a38586afe 100644 --- a/aiida/restapi/translator/nodes/node.py +++ b/aiida/restapi/translator/nodes/node.py @@ -24,7 +24,7 @@ LoadingEntryPointError, ValidationError, ) -from aiida.manage.manager import get_manager +from aiida.manage import get_manager from aiida.orm import Data, Node from aiida.plugins.entry_point import get_entry_point_names, load_entry_point from aiida.restapi.common.exceptions import RestFeatureNotAvailable, RestInputValidationError, RestValidationError @@ -93,7 +93,7 @@ def __init__(self, **kwargs): """ self._subclasses = self._get_subclasses() - self._backend = get_manager().get_backend() + self._backend = get_manager().get_profile_storage() def set_query_type( self, diff --git a/aiida/tools/archive/abstract.py b/aiida/tools/archive/abstract.py index 53281afa5e..8192ef3e16 100644 --- a/aiida/tools/archive/abstract.py +++ b/aiida/tools/archive/abstract.py @@ -56,6 +56,7 @@ def __init__( raise ValueError(f'compression not in range 0-9: {compression}') self._compression = compression self._format = fmt + self._kwargs = kwargs @property def path(self) -> Path: @@ -123,7 +124,7 @@ def delete_object(self, key: str) -> None: class ArchiveReaderAbstract(ABC): """Reader of an archive, that will be used as a context manager.""" - def __init__(self, path: Union[str, Path], **kwargs: Any): + def __init__(self, path: Union[str, Path], **kwargs: Any): # pylint: disable=unused-argument """Initialise the reader. :param path: archive path diff --git a/aiida/tools/archive/create.py b/aiida/tools/archive/create.py index 8129297bed..34166badb8 100644 --- a/aiida/tools/archive/create.py +++ b/aiida/tools/archive/create.py @@ -27,7 +27,7 @@ from aiida.common.links import GraphTraversalRules from aiida.common.log import AIIDA_LOGGER from aiida.common.progress_reporter import get_progress_reporter -from aiida.manage.manager import get_manager +from aiida.manage import get_manager from aiida.orm.entities import EntityTypes from aiida.orm.implementation import Backend from aiida.orm.utils.links import LinkQuadruple @@ -150,7 +150,7 @@ def create_archive( """ # check the backend - backend = backend or get_manager().get_backend() + backend = backend or get_manager().get_profile_storage() type_check(backend, Backend) # create a function to get a query builder instance for the backend querybuilder = lambda: orm.QueryBuilder(backend=backend) diff --git a/aiida/tools/archive/implementations/sqlite/backend.py b/aiida/tools/archive/implementations/sqlite/backend.py index fa438f8ab1..75f8fd6159 100644 --- a/aiida/tools/archive/implementations/sqlite/backend.py +++ b/aiida/tools/archive/implementations/sqlite/backend.py @@ -12,10 +12,12 @@ from datetime import datetime from functools import singledispatch from pathlib import Path +import tempfile from typing import BinaryIO, Iterable, Iterator, List, Optional, Sequence, Tuple, Type, cast import zipfile from zipfile import ZipFile +from archive_path import extract_file_in_zip import pytz from sqlalchemy import CHAR, Text, orm, types from sqlalchemy.dialects.postgresql import JSONB, UUID @@ -24,14 +26,16 @@ # we need to import all models, to ensure they are loaded on the SQLA Metadata from aiida.backends.sqlalchemy.models import authinfo, base, comment, computer, group, log, node, user +from aiida.common.exceptions import UnreachableStorage +from aiida.manage import Profile from aiida.orm.entities import EntityTypes from aiida.orm.implementation.backends import Backend as BackendAbstract from aiida.orm.implementation.sqlalchemy import authinfos, comments, computers, entities, groups, logs, nodes, users from aiida.orm.implementation.sqlalchemy.querybuilder import SqlaQueryBuilder from aiida.repository.backend.abstract import AbstractRepositoryBackend -from aiida.tools.archive.exceptions import ArchiveClosedError, ReadOnlyError +from aiida.tools.archive.exceptions import ArchiveClosedError, CorruptArchive, ReadOnlyError -from .common import REPO_FOLDER +from .common import DB_FILENAME, REPO_FOLDER, create_sqla_engine class SqliteModel: @@ -240,30 +244,67 @@ def table_groups_nodes(self): return DbGroupNodes.__table__ # type: ignore[attr-defined] # pylint: disable=no-member -class ArchiveReadOnlyBackend(BackendAbstract): +class ArchiveReadOnlyBackend(BackendAbstract): # pylint: disable=too-many-public-methods """A read-only backend for the archive.""" - def __init__(self, path: Path, session: orm.Session): - super().__init__() - self._path = path - self._session: Optional[orm.Session] = session - # lazy open the archive zipfile + @classmethod + def version_head(cls) -> str: + raise NotImplementedError + + @classmethod + def version_profile(cls, profile: Profile) -> None: + raise NotImplementedError + + @classmethod + def migrate(cls, profile: Profile): + raise ReadOnlyError() + + def __init__(self, profile: Profile): + super().__init__(profile) + self._path = Path(profile.storage_config['path']) + if not self._path.is_file(): + raise UnreachableStorage(f'archive file `{self._path}` does not exist.') + # lazy open the archive zipfile and extract the database file + self._db_file: Optional[Path] = None + self._session: Optional[orm.Session] = None self._zipfile: Optional[zipfile.ZipFile] = None self._closed = False + def __str__(self) -> str: + state = 'closed' if self.is_closed else 'open' + return f'Aiida archive (read-only) [{state}] @ {self._path}' + + @property + def is_closed(self) -> bool: + return self._closed + def close(self): """Close the backend""" if self._session: self._session.close() + if self._db_file and self._db_file.exists(): + self._db_file.unlink() if self._zipfile: self._zipfile.close() self._session = None + self._db_file = None self._zipfile = None self._closed = True def get_session(self) -> orm.Session: - if not self._session: + """Return an SQLAlchemy session.""" + if self._closed: raise ArchiveClosedError() + if self._db_file is None: + _, path = tempfile.mkstemp() + self._db_file = Path(path) + with self._db_file.open('wb') as handle: + try: + extract_file_in_zip(self._path, DB_FILENAME, handle, search_limit=4) + except Exception as exc: + raise CorruptArchive(f'database could not be read: {exc}') from exc + if self._session is None: + self._session = orm.Session(create_sqla_engine(self._db_file)) return self._session def get_repository(self) -> ZipfileBackendRepository: @@ -309,7 +350,7 @@ def nodes(self): def users(self): return create_backend_collection(users.SqlaUserCollection, self, users.SqlaUser, DbUser) - def migrate(self): + def _clear(self, recreate_user: bool = True) -> None: raise ReadOnlyError() def transaction(self): @@ -328,6 +369,12 @@ def bulk_update(self, entity_type: EntityTypes, rows: List[dict]) -> None: def delete_nodes_and_connections(self, pks_to_delete: Sequence[int]): raise ReadOnlyError() + def get_global_variable(self, key: str): + raise NotImplementedError + + def set_global_variable(self, key: str, value, description: Optional[str] = None, overwrite=True) -> None: + raise ReadOnlyError() + def create_backend_cls(base_class, model_cls): """Create an archive backend class for the given model class.""" @@ -339,16 +386,13 @@ class ReadOnlyEntityBackend(base_class): # type: ignore def __init__(self, _backend, model): """Initialise the backend entity.""" + from aiida.orm.implementation.sqlalchemy.utils import ModelWrapper self._backend = _backend - # In the SQLA base classes, the SQLA model instance is wrapped in a proxy class, - # to handle attributes get/set on stored/unstored instances, and saving instances to the database. - # However, since the wrapper is currently tied to the global session (see #5172) - # and this is a read-only archive, we don't need to do that. - self._dbmodel = model + self._dbmodel = ModelWrapper(model, _backend) @property def dbmodel(self): - return self._dbmodel + return self._dbmodel._model # pylint: disable=protected-access @classmethod def from_dbmodel(cls, model, _backend): diff --git a/aiida/tools/archive/implementations/sqlite/migrations/legacy/v05_to_v06.py b/aiida/tools/archive/implementations/sqlite/migrations/legacy/v05_to_v06.py index 6229ac9afb..aa065f180b 100644 --- a/aiida/tools/archive/implementations/sqlite/migrations/legacy/v05_to_v06.py +++ b/aiida/tools/archive/implementations/sqlite/migrations/legacy/v05_to_v06.py @@ -98,7 +98,7 @@ def migration_migrate_legacy_job_calculation_data(data): `process_status`. These are inferred from the old `state` attribute, which is then discarded as its values have been deprecated. """ - from aiida.backends.general.migrations.calc_state import STATE_MAPPING + from aiida.backends.sqlalchemy.migrations.utils.calc_state import STATE_MAPPING calc_job_node_type = 'process.calculation.calcjob.CalcJobNode.' node_data = data['export_data'].get('Node', {}) diff --git a/aiida/tools/archive/implementations/sqlite/migrations/legacy/v06_to_v07.py b/aiida/tools/archive/implementations/sqlite/migrations/legacy/v06_to_v07.py index 56bdd93816..4479b9f4ea 100644 --- a/aiida/tools/archive/implementations/sqlite/migrations/legacy/v06_to_v07.py +++ b/aiida/tools/archive/implementations/sqlite/migrations/legacy/v06_to_v07.py @@ -52,7 +52,7 @@ def data_migration_legacy_process_attributes(data): the ProcessNode is in an active state, i.e. `process_state` is one of ('created', 'running', 'waiting'). A log-file, listing all illegal ProcessNodes, will be produced in the current directory. """ - from aiida.manage.database.integrity import write_database_integrity_violation + from aiida.backends.sqlalchemy.migrations.utils.integrity import write_database_integrity_violation from aiida.tools.archive.exceptions import CorruptArchive attrs_to_remove = ['_sealed', '_finished', '_failed', '_aborted', '_do_abort'] diff --git a/aiida/tools/archive/implementations/sqlite/reader.py b/aiida/tools/archive/implementations/sqlite/reader.py index 169312316d..f3cdebbe74 100644 --- a/aiida/tools/archive/implementations/sqlite/reader.py +++ b/aiida/tools/archive/implementations/sqlite/reader.py @@ -10,20 +10,18 @@ """AiiDA archive reader implementation.""" import json from pathlib import Path -import shutil import tarfile -import tempfile from typing import Any, Dict, Optional, Union import zipfile -from archive_path import extract_file_in_zip, read_file_in_tar, read_file_in_zip -from sqlalchemy import orm +from archive_path import read_file_in_tar, read_file_in_zip +from aiida.manage import Profile from aiida.tools.archive.abstract import ArchiveReaderAbstract from aiida.tools.archive.exceptions import CorruptArchive, UnreadableArchiveError from . import backend as db -from .common import DB_FILENAME, META_FILENAME, create_sqla_engine +from .common import META_FILENAME class ArchiveReaderSqlZip(ArchiveReaderAbstract): @@ -32,8 +30,7 @@ class ArchiveReaderSqlZip(ArchiveReaderAbstract): def __init__(self, path: Union[str, Path], **kwargs: Any): super().__init__(path, **kwargs) self._in_context = False - # we lazily create the temp dir / session when needed, then clean up on exit - self._temp_dir: Optional[Path] = None + # we lazily create the storage backend, then clean up on exit self._backend: Optional[db.ArchiveReadOnlyBackend] = None def __enter__(self) -> 'ArchiveReaderSqlZip': @@ -41,14 +38,11 @@ def __enter__(self) -> 'ArchiveReaderSqlZip': return self def __exit__(self, *args, **kwargs) -> None: - """Finalise the archive.""" + """Close the archive backend.""" super().__exit__(*args, **kwargs) if self._backend: self._backend.close() self._backend = None - if self._temp_dir: - shutil.rmtree(self._temp_dir, ignore_errors=False) - self._temp_dir = None self._in_context = False def get_metadata(self) -> Dict[str, Any]: @@ -62,19 +56,21 @@ def get_backend(self) -> db.ArchiveReadOnlyBackend: raise AssertionError('Not in context') if self._backend is not None: return self._backend - if not self._temp_dir: - # create the work folder - self._temp_dir = Path(tempfile.mkdtemp()) - db_file = self._temp_dir / DB_FILENAME - if not db_file.exists(): - # extract the database to the work folder - with db_file.open('wb') as handle: - try: - extract_file_in_zip(self.path, DB_FILENAME, handle, search_limit=4) - except Exception as exc: - raise CorruptArchive(f'database could not be read: {exc}') from exc - engine = create_sqla_engine(db_file) - self._backend = db.ArchiveReadOnlyBackend(self.path, orm.Session(engine)) + profile = Profile( + 'default', { + 'storage': { + 'backend': 'archive.sqlite', + 'config': { + 'path': str(self.path) + } + }, + 'process_control': { + 'backend': 'null', + 'config': {} + } + } + ) + self._backend = db.ArchiveReadOnlyBackend(profile) return self._backend diff --git a/aiida/tools/archive/imports.py b/aiida/tools/archive/imports.py index 052c3e9722..62eac9ac30 100644 --- a/aiida/tools/archive/imports.py +++ b/aiida/tools/archive/imports.py @@ -20,7 +20,7 @@ from aiida.common.links import LinkType from aiida.common.log import AIIDA_LOGGER from aiida.common.progress_reporter import get_progress_reporter -from aiida.manage.manager import get_manager +from aiida.manage import get_manager from aiida.orm.entities import EntityTypes from aiida.orm.implementation import Backend from aiida.orm.querybuilder import QueryBuilder @@ -120,7 +120,7 @@ def import_archive( raise ValueError(f"merge_comments not in {('leave', 'newest', 'overwrite')!r}") type_check(group, orm.Group, allow_none=True) type_check(test_run, bool) - backend = backend or get_manager().get_backend() + backend = backend or get_manager().get_profile_storage() type_check(backend, Backend) if group and not group.is_stored: diff --git a/aiida/tools/graph/deletions.py b/aiida/tools/graph/deletions.py index 61e0454f1d..48011a2550 100644 --- a/aiida/tools/graph/deletions.py +++ b/aiida/tools/graph/deletions.py @@ -12,7 +12,7 @@ from typing import Callable, Iterable, Set, Tuple, Union from aiida.common.log import AIIDA_LOGGER -from aiida.manage.manager import get_manager +from aiida.manage import get_manager from aiida.orm import Group, Node, QueryBuilder from aiida.tools.graph.graph_traversers import get_nodes_delete @@ -63,7 +63,7 @@ def delete_nodes( :returns: (pks to delete, whether they were deleted) """ - backend = backend or get_manager().get_backend() + backend = backend or get_manager().get_profile_storage() # pylint: disable=too-many-arguments,too-many-branches,too-many-locals,too-many-statements diff --git a/aiida/tools/visualization/graph.py b/aiida/tools/visualization/graph.py index b11ce15ff7..5803f53f69 100644 --- a/aiida/tools/visualization/graph.py +++ b/aiida/tools/visualization/graph.py @@ -18,7 +18,7 @@ from aiida import orm from aiida.common import LinkType -from aiida.manage.manager import get_manager +from aiida.manage import get_manager from aiida.orm.utils.links import LinkPair from aiida.tools.graph.graph_traversers import traverse_graph @@ -403,7 +403,7 @@ def __init__( self._node_styles = node_style_fn or default_node_styles self._node_sublabels = node_sublabel_fn or default_node_sublabels self._node_id_type = node_id_type - self._backend = backend or get_manager().get_backend() + self._backend = backend or get_manager().get_profile_storage() self._ignore_node_style = _OVERRIDE_STYLES_DICT['ignore_node'] self._origin_node_style = _OVERRIDE_STYLES_DICT['origin_node'] diff --git a/aiida/transports/cli.py b/aiida/transports/cli.py index 3da97eae7b..1a70164b0c 100644 --- a/aiida/transports/cli.py +++ b/aiida/transports/cli.py @@ -18,7 +18,7 @@ from aiida.cmdline.utils import echo from aiida.cmdline.utils.decorators import with_dbenv from aiida.common.exceptions import NotExistent -from aiida.manage.manager import get_manager +from aiida.manage import get_manager TRANSPORT_PARAMS = [] diff --git a/aiida/transports/transport.py b/aiida/transports/transport.py index 71d79e7420..67e5821e98 100644 --- a/aiida/transports/transport.py +++ b/aiida/transports/transport.py @@ -166,12 +166,12 @@ def set_logger_extra(self, logger_extra): """ Pass the data that should be passed automatically to self.logger as 'extra' keyword. This is typically useful if you pass data - obtained using get_dblogger_extra in aiida.backends.djsite.utils, to automatically + obtained using get_dblogger_extra in aiida.orm.utils.log, to automatically log also to the DbLog table. :param logger_extra: data that you want to pass as extra to the self.logger. To write to DbLog, it should be created by the - aiida.backends.djsite.utils.get_dblogger_extra function. Pass None if you + aiida.orm.utils.log.get_dblogger_extra function. Pass None if you do not want to have extras passed. """ self._logger_extra = logger_extra diff --git a/docs/source/conf.py b/docs/source/conf.py index ff7129744e..971c0c65a2 100644 --- a/docs/source/conf.py +++ b/docs/source/conf.py @@ -24,8 +24,7 @@ import aiida from aiida.manage.configuration import load_documentation_profile -# Load the dummy profile even if we are running locally, this way the documentation will succeed even if the current -# default profile of the AiiDA installation does not use a Django backend. +# Load the dummy profile for sphinx autodoc to use when loading modules load_documentation_profile() # If extensions (or modules to document with autodoc) are in another directory, @@ -35,9 +34,6 @@ # -- General configuration ----------------------------------------------------- -# If your documentation needs a minimal Sphinx version, state it here. -needs_sphinx = '1.5.0' - # Add any Sphinx extension module names here, as strings. They can be extensions # coming with Sphinx (named 'sphinx.ext.*') or your custom ones. extensions = [ @@ -277,13 +273,6 @@ # If false, no module index is generated. #latex_domain_indices = True -# this is to avoid the error -# aiida/orm/implementation/django/querybuilder.py:docstring of -# aiida.orm.implementation.django.querybuilder.array_length._compiler_dispatch:1: WARNING: Unknown target name: "visit". -autodoc_default_options = { - 'exclude-members': '_compiler_dispatch' -} - def run_apidoc(_): """Runs sphinx-apidoc when building the documentation. @@ -296,6 +285,7 @@ def run_apidoc(_): source_dir = os.path.abspath(os.path.dirname(__file__)) apidoc_dir = os.path.join(source_dir, 'reference', 'apidoc') package_dir = os.path.join(source_dir, os.pardir, os.pardir, 'aiida') + exclude_api_patterns = [] # In #1139, they suggest the route below, but for me this ended up # calling sphinx-build, not sphinx-apidoc @@ -309,7 +299,9 @@ def run_apidoc(_): cmd_path = os.path.abspath(os.path.join(sys.prefix, 'bin', 'sphinx-apidoc')) options = [ - '-o', apidoc_dir, package_dir, + package_dir, + *exclude_api_patterns, + '-o', apidoc_dir, '--private', '--force', '--no-headings', diff --git a/docs/source/howto/installation.rst b/docs/source/howto/installation.rst index cf50589b18..43af5365e0 100644 --- a/docs/source/howto/installation.rst +++ b/docs/source/howto/installation.rst @@ -40,20 +40,28 @@ To display these parameters, use ``verdi profile show``: .. code:: bash - Info: Profile: project-one - ---------------------- ------------------------------------------------ - aiidadb_backend django - aiidadb_engine postgresql_psycopg2 - aiidadb_host localhost - aiidadb_name aiida_project_one - aiidadb_pass correcthorsebatterystaple - aiidadb_port 5432 - aiidadb_repository_uri file:///home/user/.virtualenvs/aiida/repository/ - aiidadb_user aiida - default_user_email user@email.com - options {'daemon_default_workers': 3} - profile_uuid 4c272a87d7f543b08da9fe738d88bb13 - ---------------------- ------------------------------------------------ + Report: Profile: a-import-sqla + PROFILE_UUID: fede89dae42b4df3bf46ab27e2b500ca + default_user_email: user@email.com + process_control: + backend: rabbitmq + config: + broker_host: 127.0.0.1 + broker_password: guest + broker_port: 5672 + broker_protocol: amqp + broker_username: guest + broker_virtual_host: '' + storage: + backend: psql_dos + config: + database_engine: postgresql_psycopg2 + database_hostname: localhost + database_name: name + database_password: abc + database_port: 5432 + database_username: username + repository_uri: file:///path/to/repository By default, the parameters of the default profile are shown, but one can pass the profile name of another, e.g., ``verdi profile show project-two`` to change that. diff --git a/docs/source/intro/install_conda.rst b/docs/source/intro/install_conda.rst index 521d81aa6f..22e5c6ffc6 100644 --- a/docs/source/intro/install_conda.rst +++ b/docs/source/intro/install_conda.rst @@ -116,13 +116,13 @@ If you want to install AiiDA onto you own personal workstation/laptop, it is rec .. code-block:: console - (aiida) $ verdi status - ✓ config dir: /home/ubuntu/.aiida - ✓ profile: On profile me - ✓ repository: /home/ubuntu/.aiida/repository/me - ✓ postgres: Connected as aiida_qs_ubuntu_c6a4f69d255fbe9cdb7385dcdcf3c050@localhost:5432 - ✓ rabbitmq: Connected as amqp://127.0.0.1?heartbeat=600 - ✓ daemon: Daemon is running as PID 16430 since 2020-04-29 12:17:31 + (aiida) $ verdi status + ✓ version: AiiDA v2.0.0 + ✓ config: /path/to/.aiida + ✓ profile: default + ✓ storage: Storage for 'default' @ postgresql://username:***@localhost:5432/db_name / file:///path/to/repository + ✓ rabbitmq: Connected as amqp://127.0.0.1?heartbeat=600 + ✓ daemon: Daemon is running as PID 2809 since 2019-03-15 16:27:52 At this point you now have a working AiiDA environment, from which you can add and retrieve data. diff --git a/docs/source/intro/install_system.rst b/docs/source/intro/install_system.rst index 9e4931783b..0f80ef3d6d 100644 --- a/docs/source/intro/install_system.rst +++ b/docs/source/intro/install_system.rst @@ -277,12 +277,12 @@ This is the *recommended* installation method to setup AiiDA on a personal lapto .. code-block:: console (aiida) $ verdi status - ✓ config dir: /home/ubuntu/.aiida - ✓ profile: On profile me - ✓ repository: /home/ubuntu/.aiida/repository/me - ✓ postgres: Connected as aiida_qs_ubuntu_c6a4f69d255fbe9cdb7385dcdcf3c050@localhost:5432 + ✓ version: AiiDA v2.0.0 + ✓ config: /path/to/.aiida + ✓ profile: default + ✓ storage: Storage for 'default' @ postgresql://username:***@localhost:5432/db_name / file:///path/to/repository ✓ rabbitmq: Connected as amqp://127.0.0.1?heartbeat=600 - ✓ daemon: Daemon is running as PID 16430 since 2020-04-29 12:17:31 + ✓ daemon: Daemon is running as PID 2809 since 2019-03-15 16:27:52 At this point you should now have a working AiiDA environment, from which you can add and retrieve data. diff --git a/docs/source/intro/troubleshooting.rst b/docs/source/intro/troubleshooting.rst index d4006c241f..e7dd103fe3 100644 --- a/docs/source/intro/troubleshooting.rst +++ b/docs/source/intro/troubleshooting.rst @@ -10,9 +10,10 @@ If you experience any problems, first check that all services are up and running $ verdi status - ✓ profile: On profile django - ✓ repository: /repo/aiida_dev/django - ✓ postgres: Connected as aiida@localhost:5432 + ✓ version: AiiDA v2.0.0 + ✓ config: /path/to/.aiida + ✓ profile: default + ✓ storage: Storage for 'default' @ postgresql://username:***@localhost:5432/db_name / file:///path/to/repository ✓ rabbitmq: Connected as amqp://127.0.0.1?heartbeat=600 ✓ daemon: Daemon is running as PID 2809 since 2019-03-15 16:27:52 diff --git a/docs/source/nitpick-exceptions b/docs/source/nitpick-exceptions index 79fd9ab914..9e82f6060d 100644 --- a/docs/source/nitpick-exceptions +++ b/docs/source/nitpick-exceptions @@ -51,6 +51,7 @@ py:class EntityTypes py:class IO py:class QueryBuilder py:class SelfType +py:class Profile ### AiiDA @@ -122,6 +123,10 @@ py:class User py:class WorkChain py:class WorkChainSpec py:func QueryBuilder._get_ormclass +py:class orm.implementation.Backend +py:class aiida.common.exceptions.UnreachableStorage +py:class aiida.common.exceptions.IncompatibleDatabaseSchema +py:class aiida.common.exceptions.DatabaseMigrationError py:class AuthInfoCollection py:class CommentCollection @@ -178,18 +183,6 @@ py:class html.parser.HTMLParser py:class disk_objectstore.container.Container -py:class django.contrib.auth.base_user.AbstractBaseUser -py:class django.contrib.auth.base_user.BaseUserManager -py:class django.contrib.auth.models.AbstractBaseUser -py:class django.contrib.auth.models.BaseUserManager -py:class django.contrib.auth.models.PermissionsMixin -py:class django.core.exceptions.MultipleObjectsReturned -py:class django.core.exceptions.ObjectDoesNotExist -py:class django.db.models.base.Model -py:class django.db.models.manager.Manager -py:class django.db.models.query.QuerySet -py:class django.db.migrations.migration.Migration - py:class flask.app.Flask py:class sqlalchemy.orm.decl_api.SqliteModel @@ -209,17 +202,8 @@ py:class uuid.UUID py:class psycopg2.extensions.cursor -py:class aldjemy.orm.DbNode -py:class aldjemy.orm.DbLink -py:class aldjemy.orm.DbComputer -py:class aldjemy.orm.DbUser -py:class aldjemy.orm.DbGroup -py:class aldjemy.orm.DbAuthInfo -py:class aldjemy.orm.DbComment -py:class aldjemy.orm.DbLog -py:class aldjemy.orm.DbSetting - py:class alembic.config.Config +py:class alembic.runtime.migration.MigrationContext py:class pgsu.PGSU py:meth pgsu.PGSU.__init__ diff --git a/docs/source/reference/command_line.rst b/docs/source/reference/command_line.rst index d8f4b574d3..1a42cef636 100644 --- a/docs/source/reference/command_line.rst +++ b/docs/source/reference/command_line.rst @@ -213,7 +213,7 @@ Below is a list with all available subcommands. Commands: check-load-time Check for common indicators that slowdown `verdi`. check-undesired-imports Check that verdi does not import python modules it shouldn't. - run-sql Run a raw SQL command on the database. + run-sql Run a raw SQL command on the profile database (only... run_daemon Run a daemon instance in the current interpreter. validate-plugins Validate all plugins by checking they can be loaded. @@ -376,8 +376,7 @@ Below is a list with all available subcommands. --institution NONEMPTYSTRING Institution of the user. [required] --db-engine [postgresql_psycopg2] Engine to use to connect to the database. - --db-backend [django|sqlalchemy] - Database backend to use. + --db-backend [psql_dos] Database backend to use. --db-host HOSTNAME Database server host. Leave empty for "peer" authentication. --db-port INTEGER Database server port. @@ -464,6 +463,9 @@ Below is a list with all available subcommands. Setup a new profile. + This method assumes that an empty PSQL database has been created and that the database + user has been created. + Options: -n, --non-interactive In non-interactive mode, the CLI never prompts but simply uses default values for options that define one. @@ -476,8 +478,7 @@ Below is a list with all available subcommands. --institution NONEMPTYSTRING Institution of the user. [required] --db-engine [postgresql_psycopg2] Engine to use to connect to the database. - --db-backend [django|sqlalchemy] - Database backend to use. + --db-backend [psql_dos] Database backend to use. --db-host HOSTNAME Database server host. Leave empty for "peer" authentication. --db-port INTEGER Database server port. @@ -557,6 +558,7 @@ Below is a list with all available subcommands. integrity Checks for the integrity of the data storage. maintain Performs maintenance tasks on the repository. migrate Migrate the storage to the latest schema version. + version Print the current version of the storage schema. .. _reference:command-line:verdi-user: diff --git a/environment.yml b/environment.yml index 78dc4288dc..e955049449 100644 --- a/environment.yml +++ b/environment.yml @@ -6,7 +6,6 @@ channels: - defaults dependencies: - python~=3.8 -- aldjemy~=2.4 - alembic~=1.2 - archive-path~=0.3.6 - aio-pika~=6.6 @@ -15,7 +14,6 @@ dependencies: - click-spinner~=0.1.8 - click>=8.0.3,~=8.0 - disk-objectstore~=0.6.0 -- django~=2.2 - python-graphviz~=0.13 - ipython~=7.20 - jinja2~=3.0 diff --git a/open_source_licenses.txt b/open_source_licenses.txt index 4fbf6fbc01..c2f87f2fa9 100644 --- a/open_source_licenses.txt +++ b/open_source_licenses.txt @@ -2,7 +2,6 @@ Some files in AiiDA include snippets of code taken from other open-source projects: Django: - * aiida/backends/djsite/settings/settings.py * aiida/common/hashing.py * aiida/utils/timezone.py diff --git a/pyproject.toml b/pyproject.toml index 71f7de0a1d..bad767ddbc 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -24,7 +24,6 @@ classifiers = [ keywords = ["aiida", "workflows"] requires-python = ">=3.8" dependencies = [ - "aldjemy~=2.4", "alembic~=1.2", "archive-path~=0.3.6", "aio-pika~=6.6", @@ -33,7 +32,6 @@ dependencies = [ "click-spinner~=0.1.8", "click~=8.0,>=8.0.3", "disk-objectstore~=0.6.0", - "django~=2.2", "graphviz~=0.13", "ipython~=7.20", "jinja2~=3.0", @@ -110,7 +108,6 @@ pre-commit = [ "pre-commit~=2.2", "pylint~=2.11.1", "pylint-aiida~=0.1.1", - "pylint-django", "sqlalchemy[mypy]~=1.4.29", "tomli", "types-PyYAML", @@ -128,7 +125,6 @@ tests = [ "pytest-regressions~=2.2", "pympler~=0.9", "coverage<5.0", - "sqlalchemy-diff~=0.1.3", "sqlalchemy-utils~=0.37.2", "sphinx~=3.2.1" ] @@ -258,10 +254,7 @@ exclude = [ ] [tool.pylint.master] -load-plugins = ["pylint_aiida", "pylint_django"] -# this currently fails with aiida.common.exceptions.ProfileConfigurationError: no profile has been loaded -# we woud need a static settings module to use this -# django-settings-module = "aiida.backends.djsite.settings" +load-plugins = ["pylint_aiida"] [tool.pylint.format] max-line-length = 120 @@ -272,7 +265,6 @@ disable = [ "bad-option-value", "consider-using-f-string", "cyclic-import", - "django-not-configured", "duplicate-code", "import-outside-toplevel", "inconsistent-return-statements", @@ -318,7 +310,6 @@ testpaths = [ ] filterwarnings = [ "ignore::DeprecationWarning:babel:", - "ignore::DeprecationWarning:django:", "ignore::DeprecationWarning:frozendict:", "ignore::DeprecationWarning:sqlalchemy:", "ignore::DeprecationWarning:yaml:", @@ -371,7 +362,6 @@ check_untyped_defs = false [[tool.mypy.overrides]] module = [ 'circus.*', - 'django.*', 'kiwipy.*', 'numpy.*', 'pytz.*', @@ -386,7 +376,7 @@ ignore_missing_imports = true [tool.tox] legacy_tox_ini = """ [tox] -envlist = py38-django +envlist = py38 [testenv] usedevelop=True @@ -395,18 +385,17 @@ deps = py39: -rrequirements/requirements-py-3.9.txt py310: -rrequirements/requirements-py-3.10.txt -[testenv:py{38,39,310}-{django,sqla}] +[testenv:py{38,39,310}] passenv = PYTHONASYNCIODEBUG setenv = - django: AIIDA_TEST_BACKEND = django - sqla: AIIDA_TEST_BACKEND = sqlalchemy SQLALCHEMY_WARN_20 = 1 commands = pytest {posargs} [testenv:py{38,39,310}-verdi] +passenv = + AIIDA_TEST_BACKEND setenv = - AIIDA_TEST_BACKEND = django AIIDA_PATH = {toxinidir}/.tox/.aiida commands = verdi {posargs} @@ -444,7 +433,7 @@ description = Run the pre-commit checks extras = pre-commit commands = pre-commit run {posargs} -[testenv:molecule-{django,sqla}] +[testenv:molecule] description = Run the molecule containerised tests skip_install = true parallel_show_output = true @@ -454,9 +443,8 @@ deps = molecule[docker]~=3.1.0 setenv = MOLECULE_GLOB = .molecule/*/config_local.yml - django: AIIDA_TEST_BACKEND = django - sqla: AIIDA_TEST_BACKEND = sqlalchemy passenv = + AIIDA_TEST_BACKEND AIIDA_TEST_WORKERS commands = molecule {posargs:test} """ diff --git a/requirements/requirements-py-3.10.txt b/requirements/requirements-py-3.10.txt index e05452d76d..1f8ed25291 100644 --- a/requirements/requirements-py-3.10.txt +++ b/requirements/requirements-py-3.10.txt @@ -2,7 +2,6 @@ aiida-export-migration-tests==0.9.0 aio-pika==6.8.0 aiormq==3.3.1 alabaster==0.7.12 -aldjemy==2.6 alembic==1.7.5 aniso8601==9.0.1 archive-path==0.3.6 @@ -32,7 +31,6 @@ decorator==5.1.0 defusedxml==0.7.1 deprecation==2.1.0 disk-objectstore==0.6.0 -Django==2.2.26 docutils==0.15.2 entrypoints==0.3 Flask==1.1.2 @@ -153,9 +151,7 @@ sphinxcontrib-qthelp==1.0.3 sphinxcontrib-serializinghtml==1.1.5 sphinxext-rediraffe==0.2.7 SQLAlchemy==1.4.27 -sqlalchemy-diff==0.1.5 SQLAlchemy-Utils==0.37.9 -sqlparse==0.4.2 sympy==1.9 tabulate==0.8.9 tenacity==8.0.1 diff --git a/requirements/requirements-py-3.8.txt b/requirements/requirements-py-3.8.txt index e900b5a51a..f5c1f21684 100644 --- a/requirements/requirements-py-3.8.txt +++ b/requirements/requirements-py-3.8.txt @@ -2,7 +2,6 @@ aiida-export-migration-tests==0.9.0 aio-pika==6.8.0 aiormq==3.3.1 alabaster==0.7.12 -aldjemy==2.6 alembic==1.7.5 aniso8601==9.0.1 archive-path==0.3.6 @@ -32,7 +31,6 @@ decorator==5.1.0 defusedxml==0.7.1 deprecation==2.1.0 disk-objectstore==0.6.0 -Django==2.2.26 docutils==0.15.2 entrypoints==0.3 Flask==1.1.2 @@ -155,9 +153,7 @@ sphinxcontrib-qthelp==1.0.3 sphinxcontrib-serializinghtml==1.1.5 sphinxext-rediraffe==0.2.7 SQLAlchemy==1.4.27 -sqlalchemy-diff==0.1.5 SQLAlchemy-Utils==0.37.9 -sqlparse==0.4.2 sympy==1.9 tabulate==0.8.9 tenacity==8.0.1 diff --git a/requirements/requirements-py-3.9.txt b/requirements/requirements-py-3.9.txt index 29af607d3d..c86749a743 100644 --- a/requirements/requirements-py-3.9.txt +++ b/requirements/requirements-py-3.9.txt @@ -2,7 +2,6 @@ aiida-export-migration-tests==0.9.0 aio-pika==6.8.0 aiormq==3.3.1 alabaster==0.7.12 -aldjemy==2.6 alembic==1.7.5 aniso8601==9.0.1 archive-path==0.3.6 @@ -32,7 +31,6 @@ decorator==5.1.0 defusedxml==0.7.1 deprecation==2.1.0 disk-objectstore==0.6.0 -Django==2.2.26 docutils==0.15.2 entrypoints==0.3 Flask==1.1.2 @@ -154,9 +152,7 @@ sphinxcontrib-qthelp==1.0.3 sphinxcontrib-serializinghtml==1.1.5 sphinxext-rediraffe==0.2.7 SQLAlchemy==1.4.27 -sqlalchemy-diff==0.1.5 SQLAlchemy-Utils==0.37.9 -sqlparse==0.4.2 sympy==1.9 tabulate==0.8.9 tenacity==8.0.1 diff --git a/tests/backends/aiida_django/__init__.py b/tests/backends/aiida_django/__init__.py deleted file mode 100644 index 2776a55f97..0000000000 --- a/tests/backends/aiida_django/__init__.py +++ /dev/null @@ -1,9 +0,0 @@ -# -*- coding: utf-8 -*- -########################################################################### -# Copyright (c), The AiiDA team. All rights reserved. # -# This file is part of the AiiDA code. # -# # -# The code is hosted on GitHub at https://github.com/aiidateam/aiida-core # -# For further information on the license, see the LICENSE.txt file # -# For further information please visit http://www.aiida.net # -########################################################################### diff --git a/tests/backends/aiida_django/conftest.py b/tests/backends/aiida_django/conftest.py deleted file mode 100644 index d2c49e4b8f..0000000000 --- a/tests/backends/aiida_django/conftest.py +++ /dev/null @@ -1,16 +0,0 @@ -# -*- coding: utf-8 -*- -########################################################################### -# Copyright (c), The AiiDA team. All rights reserved. # -# This file is part of the AiiDA code. # -# # -# The code is hosted on GitHub at https://github.com/aiidateam/aiida-core # -# For further information on the license, see the LICENSE.txt file # -# For further information please visit http://www.aiida.net # -########################################################################### -"""Configuration file for pytest tests.""" - -from aiida.backends import BACKEND_DJANGO -from aiida.manage.tests import get_test_backend_name - -if get_test_backend_name() != BACKEND_DJANGO: - collect_ignore_glob = ['*'] # pylint: disable=invalid-name diff --git a/tests/backends/aiida_django/migrations/__init__.py b/tests/backends/aiida_django/migrations/__init__.py deleted file mode 100644 index 2776a55f97..0000000000 --- a/tests/backends/aiida_django/migrations/__init__.py +++ /dev/null @@ -1,9 +0,0 @@ -# -*- coding: utf-8 -*- -########################################################################### -# Copyright (c), The AiiDA team. All rights reserved. # -# This file is part of the AiiDA code. # -# # -# The code is hosted on GitHub at https://github.com/aiidateam/aiida-core # -# For further information on the license, see the LICENSE.txt file # -# For further information please visit http://www.aiida.net # -########################################################################### diff --git a/tests/backends/aiida_django/migrations/test_migrations_0037_attributes_extras_settings_json.py b/tests/backends/aiida_django/migrations/test_migrations_0037_attributes_extras_settings_json.py deleted file mode 100644 index 635ad901b2..0000000000 --- a/tests/backends/aiida_django/migrations/test_migrations_0037_attributes_extras_settings_json.py +++ /dev/null @@ -1,642 +0,0 @@ -# -*- coding: utf-8 -*- -########################################################################### -# Copyright (c), The AiiDA team. All rights reserved. # -# This file is part of the AiiDA code. # -# # -# The code is hosted on GitHub at https://github.com/aiidateam/aiida-core # -# For further information on the license, see the LICENSE.txt file # -# For further information please visit http://www.aiida.net # -########################################################################### -# pylint: disable=import-error,no-name-in-module,invalid-name -""" -Tests for the migrations of the attributes, extras and settings from EAV to JSONB -Migration 0037_attributes_extras_settings_json -""" - -import copy - -from django.db import transaction - -from .test_migrations_common import TestMigrations - -# The following sample dictionary can be used for the conversion test of attributes and extras -SAMPLE_DICT = { - 'bool': True, - '001': 2, - '17': 'string', - 'integer': 12, - 'float': 26.2, - 'string': 'a string', - 'dict': { - '25': [True, False], - 'a': 'b', - 'sublist': [1, 2, 3], - 'subdict': { - 'c': 'd' - } - }, - 'list': [1, True, 'ggg', { - 'h': 'j' - }, [9, 8, 7]], -} - -# The following base classes contain just model declaration for DbAttributes -# and DbExtras and are needed for the methods found at the -# DbAttributeFunctionality and DbExtraFunctionality and used for the deserialization -# of attribute and extras dictionaries -db_attribute_base_model = None -db_extra_base_model = None - - -class TestAttributesExtrasToJSONMigrationSimple(TestMigrations): - """ - A "simple" test for the attributes and extra migration from EAV to JSONB. - It stores a sample dictionary using the EAV deserialization of AiiDA Django - for the attributes and extras. Then the test checks that they are corerctly - converted to JSONB. - """ - migrate_from = '0036_drop_computer_transport_params' - migrate_to = '0037_attributes_extras_settings_json' - - # In the following dictionary we store the generated nodes (ids, attributes and extras) - # The correct migration of these nodes will be checked at the test - nodes_to_verify = {} - - def setUpBeforeMigration(self): - global db_attribute_base_model, db_extra_base_model # pylint: disable=global-statement - - db_node_model = self.apps.get_model('db', 'DbNode') - db_computer_model = self.apps.get_model('db', 'DbComputer') - # The following base models are initialized here since the model at this point - # it has the corresponding EAV tables - db_attribute_base_model = self.apps.get_model('db', 'DbAttribute') - db_extra_base_model = self.apps.get_model('db', 'DbExtra') - - computer = db_computer_model( - name='localhost_migration', - hostname='localhost', - transport_type='core.local', - scheduler_type='core.pbspro', - metadata={'workdir': '/tmp/aiida'} - ) - computer.save() - - node = db_node_model(node_type='data.Data.', dbcomputer_id=computer.id, user_id=self.default_user.id) - node.save() - - for key, value in SAMPLE_DICT.items(): - DbAttributeFunctionality.set_value_for_node(node, key, value) - - for key, value in SAMPLE_DICT.items(): - DbExtraFunctionality.set_value_for_node(node, key, value) - - self.nodes_to_verify[node.id] = {} - self.nodes_to_verify[node.id]['attr'] = copy.deepcopy(SAMPLE_DICT) - self.nodes_to_verify[node.id]['extr'] = copy.deepcopy(SAMPLE_DICT) - - def test_attributes_extras_migration(self): - """Verify that the attributes and extras were migrated correctly""" - db_node_model = self.apps.get_model('db', 'DbNode') - for curr_dbnode in db_node_model.objects.all(): - self.assertEqual(curr_dbnode.attributes, self.nodes_to_verify[curr_dbnode.id]['attr']) - self.assertEqual(curr_dbnode.extras, self.nodes_to_verify[curr_dbnode.id]['extr']) - - -class TestAttributesExtrasToJSONMigrationManyNodes(TestMigrations): - """ - This test comparing to the previous one (TestAttributesExtrasToJSONMigrationSimple), it - creates several nodes with different atributes and extras and checks their correct - migration one-by-one. - """ - migrate_from = '0036_drop_computer_transport_params' - migrate_to = '0037_attributes_extras_settings_json' - - # In the following dictionary we store the generated nodes (ids, attributes and extras) - # The correct migration of these nodes will be checked at the test - nodes_to_verify = {} - - # Number of nodes to create - nodes_no_to_create = 20 - - def setUpBeforeMigration(self): - global db_attribute_base_model, db_extra_base_model # pylint: disable=global-statement - - db_node_model = self.apps.get_model('db', 'DbNode') - db_computer_model = self.apps.get_model('db', 'DbComputer') - # The following base models are initialized here since the model at this point - # it has the corresponding EAV tables - db_attribute_base_model = self.apps.get_model('db', 'DbAttribute') - db_extra_base_model = self.apps.get_model('db', 'DbExtra') - - computer = db_computer_model( - name='localhost_migration', - hostname='localhost', - transport_type='core.local', - scheduler_type='core.pbspro', - metadata={'workdir': '/tmp/aiida'} - ) - computer.save() - - with transaction.atomic(): - for _ in range(self.nodes_no_to_create): - node = db_node_model(node_type='data.Data.', dbcomputer_id=computer.id, user_id=self.default_user.id) - node.save() - - attr_copy = copy.deepcopy(SAMPLE_DICT) - attr_copy['type_of_json'] = 'attr' - attr_copy['node_id'] = node.id - - # Setting the attributes as it used to be set (with the same methods) - for key in attr_copy.keys(): - DbAttributeFunctionality.set_value_for_node(node, key, attr_copy[key]) - - extr_copy = copy.deepcopy(SAMPLE_DICT) - extr_copy['type_of_json'] = 'extr' - extr_copy['node_id'] = node.id - - # Setting the extras as it used to be set (with the same methods) - for key in extr_copy.keys(): - DbExtraFunctionality.set_value_for_node(node, key, extr_copy[key]) - - self.nodes_to_verify[node.id] = {} - self.nodes_to_verify[node.id]['attr'] = attr_copy - self.nodes_to_verify[node.id]['extr'] = extr_copy - - def test_attributes_extras_migration_many(self): - """Verify that the attributes and extras were migrated correctly""" - db_node_model = self.apps.get_model('db', 'DbNode') - for curr_dbnode in db_node_model.objects.all(): - self.assertEqual(curr_dbnode.attributes, self.nodes_to_verify[curr_dbnode.id]['attr']) - self.assertEqual(curr_dbnode.extras, self.nodes_to_verify[curr_dbnode.id]['extr']) - - -class TestSettingsToJSONMigration(TestMigrations): - """ - This test checks the correct migration of the settings. Setting records were used as an - example from a typical settings table of Django EAV. - """ - migrate_from = '0036_drop_computer_transport_params' - migrate_to = '0037_attributes_extras_settings_json' - - # The settings to create and verify - settings_info = {} - - def setUpBeforeMigration(self): - from aiida.common import timezone - - db_setting_model = self.apps.get_model('db', 'DbSetting') - - self.settings_info['2daemon|task_stop|updater2'] = dict( - key='2daemon|task_stop|updater2', - datatype='date', - dval=timezone.datetime_to_isoformat(timezone.now()), - description='The last time the daemon finished to run ' - 'the task \'updater\' (updater)' - ) - self.settings_info['2daemon|task_start|updater2'] = dict( - key='2daemon|task_start|updater2', - datatype='date', - dval=timezone.datetime_to_isoformat(timezone.now()), - description='The last time the daemon started to run ' - 'the task \'updater\' (updater)' - ) - self.settings_info['2db|backend2'] = dict( - key='2db|backend2', - datatype='txt', - tval='django', - description='The backend used to communicate with the database.' - ) - self.settings_info['2daemon|user2'] = dict( - key='2daemon|user2', - datatype='txt', - tval='aiida@theossrv5.epfl.ch', - description='The only user that is allowed to run the AiiDA daemon on ' - 'this DB instance' - ) - self.settings_info['2db|schemaversion2'] = dict( - key='2db|schemaversion2', - datatype='txt', - tval=' 1.0.8', - description='The version of the schema used in this database.' - ) - - with transaction.atomic(): - for setting_info in self.settings_info.values(): - setting = db_setting_model(**setting_info) - setting.save() - - def test_settings_migration(self): - """Verify that the settings were migrated correctly""" - db_setting_model = self.apps.get_model('db', 'DbSetting') - for curr_setting in db_setting_model.objects.filter(key__in=self.settings_info.keys()).all(): - curr_setting_info = self.settings_info[curr_setting.key] - self.assertEqual(curr_setting.description, curr_setting_info['description']) - if curr_setting_info['datatype'] == 'txt': - self.assertEqual(curr_setting.val, curr_setting_info['tval']) - elif curr_setting_info['datatype'] == 'date': - self.assertEqual(curr_setting.val, curr_setting_info['dval']) - - def tearDown(self): - """ - Deletion of settings - this is needed because settings are not deleted by the - typical test cleanup methods. - """ - db_setting_model = self.apps.get_model('db', 'DbSetting') - db_setting_model.objects.filter(key__in=self.settings_info.keys()).delete() - super().tearDown() - - -# pylint: disable=no-init,dangerous-default-value,too-many-statements,no-else-return,too-many-arguments,too-many-branches,fixme -class DbMultipleValueAttributeBaseClass: - """ - Abstract base class for tables storing attribute + value data, of - different data types (without any association to a Node). - """ - # separator for subfields - _sep = '.' # The AIIDA_ATTRIBUTE_SEP - - class Meta: - abstract = True - unique_together = (('key',),) - - # There are no subspecifiers. If instead you want to group attributes - # (e.g. by node, as it is done in the DbAttributeBaseClass), specify here - # the field name - _subspecifier_field_name = None - - @property - def subspecifier_pk(self): - """ - Return the subspecifier PK in the database (or None, if no - subspecifier should be used) - """ - if self._subspecifier_field_name is None: - return None - else: - return getattr(self, self._subspecifier_field_name).pk - - @classmethod - def validate_key(cls, key): - """ - Validate the key string to check if it is valid (e.g., if it does not - contain the separator symbol.). - - :return: None if the key is valid - :raise aiida.common.ValidationError: if the key is not valid - """ - from aiida.backends.utils import AIIDA_ATTRIBUTE_SEP - from aiida.common.exceptions import ValidationError - - if not isinstance(key, str): - raise ValidationError('The key must be a string.') - if not key: - raise ValidationError('The key cannot be an empty string.') - if AIIDA_ATTRIBUTE_SEP in key: - raise ValidationError( - f"The separator symbol '{AIIDA_ATTRIBUTE_SEP}' cannot be present in the key of attributes, extras, etc." - ) - - @classmethod - def set_value( - cls, key, value, with_transaction=True, subspecifier_value=None, other_attribs={}, stop_if_existing=False - ): - """ - Set a new value in the DB, possibly associated to the given subspecifier. - - :note: This method also stored directly in the DB. - - :param key: a string with the key to create (must be a level-0 - attribute, that is it cannot contain the separator cls._sep). - :param value: the value to store (a basic data type or a list or a dict) - :param subspecifier_value: must be None if this class has no - subspecifier set (e.g., the DbSetting class). - Must be the value of the subspecifier (e.g., the dbnode) for classes - that define it (e.g. DbAttribute and DbExtra) - :param with_transaction: True if you want this function to be managed - with transactions. Set to False if you already have a manual - management of transactions in the block where you are calling this - function (useful for speed improvements to avoid recursive - transactions) - :param other_attribs: a dictionary of other parameters, to store - only on the level-zero attribute (e.g. for description in DbSetting). - :param stop_if_existing: if True, it will stop with an - UniquenessError exception if the new entry would violate an - uniqueness constraint in the DB (same key, or same key+node, - depending on the specific subclass). Otherwise, it will - first delete the old value, if existent. The use with True is - useful if you want to use a given attribute as a "locking" value, - e.g. to avoid to perform an action twice on the same node. - Note that, if you are using transactions, you may get the error - only when the transaction is committed. - """ - cls.validate_key(key) - - try: - if with_transaction: - sid = transaction.savepoint() - - # create_value returns a list of nodes to store - to_store = cls.create_value(key, value, subspecifier_value=subspecifier_value, other_attribs=other_attribs) - - if to_store: - # if not stop_if_existing: - # # Delete the olf values if stop_if_existing is False, - # # otherwise don't delete them and hope they don't - # # exist. If they exist, I'll get an UniquenessError - # - # ## NOTE! Be careful in case the extra/attribute to - # ## store is not a simple attribute but a list or dict: - # ## like this, it should be ok because if we are - # ## overwriting an entry it will stop anyway to avoid - # ## to overwrite the main entry, but otherwise - # ## there is the risk that trailing pieces remain - # ## so in general it is good to recursively clean - # ## all sub-items. - # cls.del_value(key, - # subspecifier_value=subspecifier_value) - for my_obj in to_store: - my_obj.save() - - # cls.objects.bulk_create(to_store) - - if with_transaction: - transaction.savepoint_commit(sid) - except BaseException as exc: # All exceptions including CTRL+C, ... - from django.db.utils import IntegrityError - - from aiida.common.exceptions import UniquenessError - - if with_transaction: - transaction.savepoint_rollback(sid) - if isinstance(exc, IntegrityError) and stop_if_existing: - raise UniquenessError( - 'Impossible to create the required ' - 'entry ' - "in table '{}', " - 'another entry already exists and the creation would ' - 'violate an uniqueness constraint.\nFurther details: ' - '{}'.format(cls.__name__, exc) - ) - raise - - @classmethod - def create_value(cls, key, value, subspecifier_value=None, other_attribs={}): - """ - Create a new list of attributes, without storing them, associated - with the current key/value pair (and to the given subspecifier, - e.g. the DbNode for DbAttributes and DbExtras). - - :note: No hits are done on the DB, in particular no check is done - on the existence of the given nodes. - - :param key: a string with the key to create (can contain the - separator cls._sep if this is a sub-attribute: indeed, this - function calls itself recursively) - :param value: the value to store (a basic data type or a list or a dict) - :param subspecifier_value: must be None if this class has no - subspecifier set (e.g., the DbSetting class). - Must be the value of the subspecifier (e.g., the dbnode) for classes - that define it (e.g. DbAttribute and DbExtra) - :param other_attribs: a dictionary of other parameters, to store - only on the level-zero attribute (e.g. for description in DbSetting). - - :return: always a list of class instances; it is the user - responsibility to store such entries (typically with a Django - bulk_create() call). - """ - import datetime - - from aiida.common import json - from aiida.common.timezone import get_current_timezone, is_naive, make_aware - - if cls._subspecifier_field_name is None: - if subspecifier_value is not None: - raise ValueError( - f'You cannot specify a subspecifier value for class {cls.__name__} because it has no subspecifiers' - ) - if issubclass(cls, DbAttributeFunctionality): - new_entry = db_attribute_base_model(key=key, **other_attribs) - else: - new_entry = db_extra_base_model(key=key, **other_attribs) - else: - if subspecifier_value is None: - raise ValueError( - 'You also have to specify a subspecifier value ' - 'for class {} (the {})'.format(cls.__name__, cls._subspecifier_field_name) - ) - further_params = other_attribs.copy() - further_params.update({cls._subspecifier_field_name: subspecifier_value}) - # new_entry = cls(key=key, **further_params) - if issubclass(cls, DbAttributeFunctionality): - new_entry = db_attribute_base_model(key=key, **further_params) - else: - new_entry = db_extra_base_model(key=key, **further_params) - - list_to_return = [new_entry] - - if value is None: - new_entry.datatype = 'none' - new_entry.bval = None - new_entry.tval = '' - new_entry.ival = None - new_entry.fval = None - new_entry.dval = None - - elif isinstance(value, bool): - new_entry.datatype = 'bool' - new_entry.bval = value - new_entry.tval = '' - new_entry.ival = None - new_entry.fval = None - new_entry.dval = None - - elif isinstance(value, int): - new_entry.datatype = 'int' - new_entry.ival = value - new_entry.tval = '' - new_entry.bval = None - new_entry.fval = None - new_entry.dval = None - - elif isinstance(value, float): - new_entry.datatype = 'float' - new_entry.fval = value - new_entry.tval = '' - new_entry.ival = None - new_entry.bval = None - new_entry.dval = None - - elif isinstance(value, str): - new_entry.datatype = 'txt' - new_entry.tval = value - new_entry.bval = None - new_entry.ival = None - new_entry.fval = None - new_entry.dval = None - - elif isinstance(value, datetime.datetime): - - # current timezone is taken from the settings file of django - if is_naive(value): - value_to_set = make_aware(value, get_current_timezone()) - else: - value_to_set = value - - new_entry.datatype = 'date' - # TODO: time-aware and time-naive datetime objects, see - # https://docs.djangoproject.com/en/dev/topics/i18n/timezones/#naive-and-aware-datetime-objects - new_entry.dval = value_to_set - new_entry.tval = '' - new_entry.bval = None - new_entry.ival = None - new_entry.fval = None - - elif isinstance(value, (list, tuple)): - - new_entry.datatype = 'list' - new_entry.dval = None - new_entry.tval = '' - new_entry.bval = None - new_entry.ival = len(value) - new_entry.fval = None - - for i, subv in enumerate(value): - # I do not need get_or_create here, because - # above I deleted all children (and I - # expect no concurrency) - # NOTE: I do not pass other_attribs - list_to_return.extend( - cls.create_value(key=f'{key}{cls._sep}{i:d}', value=subv, subspecifier_value=subspecifier_value) - ) - - elif isinstance(value, dict): - - new_entry.datatype = 'dict' - new_entry.dval = None - new_entry.tval = '' - new_entry.bval = None - new_entry.ival = len(value) - new_entry.fval = None - - for subk, subv in value.items(): - cls.validate_key(subk) - - # I do not need get_or_create here, because - # above I deleted all children (and I - # expect no concurrency) - # NOTE: I do not pass other_attribs - list_to_return.extend( - cls.create_value(key=f'{key}{cls._sep}{subk}', value=subv, subspecifier_value=subspecifier_value) - ) - else: - try: - jsondata = json.dumps(value) - except TypeError: - raise ValueError( - f'Unable to store the value: it must be either a basic datatype, or json-serializable: {value}' - ) - - new_entry.datatype = 'json' - new_entry.tval = jsondata - new_entry.bval = None - new_entry.ival = None - new_entry.fval = None - - return list_to_return - - -class DbAttributeBaseClass(DbMultipleValueAttributeBaseClass): - """ - Abstract base class for tables storing element-attribute-value data. - Element is the dbnode; attribute is the key name. - Value is the specific value to store. - - This table had different SQL columns to store different types of data, and - a datatype field to know the actual datatype. - - Moreover, this class unpacks dictionaries and lists when possible, so that - it is possible to query inside recursive lists and dicts. - """ - - # In this way, the related name for the DbAttribute inherited class will be - # 'dbattributes' and for 'dbextra' will be 'dbextras' - # Moreover, automatically destroy attributes and extras if the parent - # node is deleted - # dbnode = m.ForeignKey('DbNode', related_name='%(class)ss', on_delete=m.CASCADE) - # max_length is required by MySql to have indexes and unique constraints - - _subspecifier_field_name = 'dbnode' - - @classmethod - def set_value_for_node(cls, dbnode, key, value, with_transaction=True, stop_if_existing=False): - """ - This is the raw-level method that accesses the DB. No checks are done - to prevent the user from (re)setting a valid key. - To be used only internally. - - :todo: there may be some error on concurrent write; - not checked in this unlucky case! - - :param dbnode: the dbnode for which the attribute should be stored; - if an integer is passed, it will raise, since this functionality is not - supported in the models for the migrations. - :param key: the key of the attribute to store; must be a level-zero - attribute (i.e., no separators in the key) - :param value: the value of the attribute to store - :param with_transaction: if True (default), do this within a transaction, - so that nothing gets stored if a subitem cannot be created. - Otherwise, if this parameter is False, no transaction management - is performed. - :param stop_if_existing: if True, it will stop with an - UniquenessError exception if the key already exists - for the given node. Otherwise, it will - first delete the old value, if existent. The use with True is - useful if you want to use a given attribute as a "locking" value, - e.g. to avoid to perform an action twice on the same node. - Note that, if you are using transactions, you may get the error - only when the transaction is committed. - - :raise ValueError: if the key contains the separator symbol used - internally to unpack dictionaries and lists (defined in cls._sep). - """ - if isinstance(dbnode, int): - raise ValueError('Integers (the dbnode pk) are not supported as input.') - else: - dbnode_node = dbnode - - cls.set_value( - key, - value, - with_transaction=with_transaction, - subspecifier_value=dbnode_node, - stop_if_existing=stop_if_existing - ) - - def __str__(self): - # pylint: disable=no-member - return '[{} ({})].{} ({})'.format( - self.dbnode.get_simple_name(invalid_result='Unknown node'), - self.dbnode.pk, - self.key, - self.datatype, - ) - - -class DbAttributeFunctionality(DbAttributeBaseClass): # pylint: disable=no-init - """ - This class defines all the methods that are needed for the correct - deserialization of given attribute dictionaries to the EAV table. - It is a stripped-down Django EAV schema to the absolutely necessary - methods for this deserialization. - """ - pass # pylint: disable=unnecessary-pass - - -class DbExtraFunctionality(DbAttributeBaseClass): # pylint: disable=no-init - """ - This class defines all the methods that are needed for the correct - deserialization of given extras dictionaries to the EAV table. - It is a stripped-down Django EAV schema to the absolutely necessary - methods for this deserialization. - """ - pass # pylint: disable=unnecessary-pass diff --git a/tests/backends/aiida_django/migrations/test_migrations_0038_data_migration_legacy_job_calculations.py b/tests/backends/aiida_django/migrations/test_migrations_0038_data_migration_legacy_job_calculations.py deleted file mode 100644 index 6b738053bd..0000000000 --- a/tests/backends/aiida_django/migrations/test_migrations_0038_data_migration_legacy_job_calculations.py +++ /dev/null @@ -1,53 +0,0 @@ -# -*- coding: utf-8 -*- -########################################################################### -# Copyright (c), The AiiDA team. All rights reserved. # -# This file is part of the AiiDA code. # -# # -# The code is hosted on GitHub at https://github.com/aiidateam/aiida-core # -# For further information on the license, see the LICENSE.txt file # -# For further information please visit http://www.aiida.net # -########################################################################### -# pylint: disable=import-error,no-name-in-module,invalid-name -""" -Tests for the migrations of the attributes, extras and settings from EAV to JSONB -Migration 0037_attributes_extras_settings_json -""" -from aiida.backends.general.migrations.calc_state import STATE_MAPPING - -from .test_migrations_common import TestMigrations - - -class TestLegacyJobCalcStateDataMigration(TestMigrations): - """Test the migration that performs a data migration of legacy `JobCalcState`.""" - - migrate_from = '0037_attributes_extras_settings_json' - migrate_to = '0038_data_migration_legacy_job_calculations' - - def setUpBeforeMigration(self): - self.nodes = {} - - for state in STATE_MAPPING: - node = self.DbNode( - node_type='process.calculation.calcjob.CalcJobNode.', - user_id=self.default_user.id, - attributes={'state': state} - ) - node.save() - - self.nodes[state] = node.id - - def test_data_migrated(self): - """Verify that the `process_state`, `process_status` and `exit_status` are set correctly.""" - for state, pk in self.nodes.items(): - node = self.load_node(pk) - self.assertEqual(node.attributes.get('process_state', None), STATE_MAPPING[state].process_state) - self.assertEqual(node.attributes.get('process_status', None), STATE_MAPPING[state].process_status) - self.assertEqual(node.attributes.get('exit_status', None), STATE_MAPPING[state].exit_status) - self.assertEqual( - node.attributes.get('process_label'), 'Legacy JobCalculation' - ) # All nodes should have this label - self.assertIsNone(node.attributes.get('state', None)) # The old state should have been removed - - exit_status = node.attributes.get('exit_status', None) - if exit_status is not None: - self.assertIsInstance(exit_status, int) diff --git a/tests/backends/aiida_django/migrations/test_migrations_0040_data_migration_legacy_process_attributes.py b/tests/backends/aiida_django/migrations/test_migrations_0040_data_migration_legacy_process_attributes.py deleted file mode 100644 index afc9822f0e..0000000000 --- a/tests/backends/aiida_django/migrations/test_migrations_0040_data_migration_legacy_process_attributes.py +++ /dev/null @@ -1,86 +0,0 @@ -# -*- coding: utf-8 -*- -########################################################################### -# Copyright (c), The AiiDA team. All rights reserved. # -# This file is part of the AiiDA code. # -# # -# The code is hosted on GitHub at https://github.com/aiidateam/aiida-core # -# For further information on the license, see the LICENSE.txt file # -# For further information please visit http://www.aiida.net # -########################################################################### -# pylint: disable=import-error,no-name-in-module,invalid-name -"""Tests for the migrations of legacy process attributes.""" - -from .test_migrations_common import TestMigrations - - -class TestLegacyProcessAttributeDataMigration(TestMigrations): - """Test the migration that performs a data migration of legacy `JobCalcState`.""" - - migrate_from = '0039_reset_hash' - migrate_to = '0040_data_migration_legacy_process_attributes' - - def setUpBeforeMigration(self): - node_process = self.DbNode( - node_type='process.calculation.calcjob.CalcJobNode.', - user_id=self.default_user.id, - attributes={ - 'process_state': 'finished', - '_sealed': True, - '_finished': True, - '_failed': False, - '_aborted': False, - '_do_abort': False, - } - ) - node_process.save() - self.node_process_id = node_process.id - - # This is an "active" modern process, due to its `process_state` and should *not* receive the `sealed` attribute - node_process_active = self.DbNode( - node_type='process.calculation.calcjob.CalcJobNode.', - user_id=self.default_user.id, - attributes={ - 'process_state': 'created', - '_finished': True, - '_failed': False, - '_aborted': False, - '_do_abort': False, - } - ) - node_process_active.save() - self.node_process_active_id = node_process_active.id - - # Note that `Data` nodes should not have these attributes in real databases but the migration explicitly - # excludes data nodes, which is what this test is verifying, by checking they are not deleted - node_data = self.DbNode( - node_type='data.core.dict.Dict.', - user_id=self.default_user.id, - attributes={ - '_sealed': True, - '_finished': True, - '_failed': False, - '_aborted': False, - '_do_abort': False, - } - ) - node_data.save() - self.node_data_id = node_data.id - - def test_data_migrated(self): - """Verify that the correct attributes are removed.""" - deleted_keys = ['_sealed', '_finished', '_failed', '_aborted', '_do_abort'] - - node_process = self.load_node(self.node_process_id) - self.assertEqual(node_process.attributes['sealed'], True) - for key in deleted_keys: - self.assertNotIn(key, node_process.attributes) - - node_process_active = self.load_node(self.node_process_active_id) - self.assertNotIn('sealed', node_process_active.attributes) - for key in deleted_keys: - self.assertNotIn(key, node_process_active.attributes) - - node_data = self.load_node(self.node_data_id) - self.assertEqual(node_data.attributes.get('sealed', None), None) - for key in deleted_keys: - self.assertIn(key, node_data.attributes) diff --git a/tests/backends/aiida_django/migrations/test_migrations_0041_seal_unsealed_processes.py b/tests/backends/aiida_django/migrations/test_migrations_0041_seal_unsealed_processes.py deleted file mode 100644 index a2f70c1e19..0000000000 --- a/tests/backends/aiida_django/migrations/test_migrations_0041_seal_unsealed_processes.py +++ /dev/null @@ -1,73 +0,0 @@ -# -*- coding: utf-8 -*- -########################################################################### -# Copyright (c), The AiiDA team. All rights reserved. # -# This file is part of the AiiDA code. # -# # -# The code is hosted on GitHub at https://github.com/aiidateam/aiida-core # -# For further information on the license, see the LICENSE.txt file # -# For further information please visit http://www.aiida.net # -########################################################################### -# pylint: disable=import-error,no-name-in-module,invalid-name -"""Tests for the migrations of legacy process attributes.""" - -from .test_migrations_common import TestMigrations - - -class TestSealUnsealedProcessesMigration(TestMigrations): - """Test the migration that performs a data migration of legacy `JobCalcState`.""" - - migrate_from = '0040_data_migration_legacy_process_attributes' - migrate_to = '0041_seal_unsealed_processes' - - def setUpBeforeMigration(self): - node_process = self.DbNode( - node_type='process.calculation.calcjob.CalcJobNode.', - user_id=self.default_user.id, - attributes={ - 'process_state': 'finished', - 'sealed': True, - } - ) - node_process.save() - self.node_process_id = node_process.id - - # This is an "active" modern process, due to its `process_state` and should *not* receive the `sealed` attribute - node_process_active = self.DbNode( - node_type='process.calculation.calcjob.CalcJobNode.', - user_id=self.default_user.id, - attributes={ - 'process_state': 'created', - } - ) - node_process_active.save() - self.node_process_active_id = node_process_active.id - - # This is a legacy process that does not even have a `process_state` - node_process_legacy = self.DbNode( - node_type='process.calculation.calcfunction.CalcFunctionNode.', user_id=self.default_user.id, attributes={} - ) - node_process_legacy.save() - self.node_process_legacy_id = node_process_legacy.id - - # Note that `Data` nodes should not have these attributes in real databases but the migration explicitly - # excludes data nodes, which is what this test is verifying, by checking they are not deleted - node_data = self.DbNode( - node_type='data.core.dict.Dict.', - user_id=self.default_user.id, - ) - node_data.save() - self.node_data_id = node_data.id - - def test_data_migrated(self): - """Verify that the correct attributes are removed.""" - node_process = self.load_node(self.node_process_id) - self.assertEqual(node_process.attributes['sealed'], True) - - node_process_active = self.load_node(self.node_process_active_id) - self.assertNotIn('sealed', node_process_active.attributes) - - node_process_legacy = self.load_node(self.node_process_legacy_id) - self.assertEqual(node_process_legacy.attributes['sealed'], True) - - node_data = self.load_node(self.node_data_id) - self.assertNotIn('sealed', node_data.attributes) diff --git a/tests/backends/aiida_django/migrations/test_migrations_0043_default_link_label.py b/tests/backends/aiida_django/migrations/test_migrations_0043_default_link_label.py deleted file mode 100644 index 668cf78693..0000000000 --- a/tests/backends/aiida_django/migrations/test_migrations_0043_default_link_label.py +++ /dev/null @@ -1,44 +0,0 @@ -# -*- coding: utf-8 -*- -########################################################################### -# Copyright (c), The AiiDA team. All rights reserved. # -# This file is part of the AiiDA code. # -# # -# The code is hosted on GitHub at https://github.com/aiidateam/aiida-core # -# For further information on the license, see the LICENSE.txt file # -# For further information please visit http://www.aiida.net # -########################################################################### -# pylint: disable=import-error,no-name-in-module,invalid-name -"""Tests for the migrations of legacy process attributes.""" - -from .test_migrations_common import TestMigrations - - -class TestSealUnsealedProcessesMigration(TestMigrations): - """Test the migration that performs a data migration of legacy `JobCalcState`.""" - - migrate_from = '0042_prepare_schema_reset' - migrate_to = '0043_default_link_label' - - def setUpBeforeMigration(self): - node_process = self.DbNode( - node_type='process.calculation.calcjob.CalcJobNode.', - user_id=self.default_user.id, - ) - node_process.save() - self.node_process_id = node_process.id - - node_data = self.DbNode( - node_type='data.core.dict.Dict.', - user_id=self.default_user.id, - ) - node_data.save() - self.node_data_id = node_data.id - - link = self.DbLink(input=node_data, output=node_process, type='input', label='_return') - link.save() - - def test_data_migrated(self): - """Verify that the link label has been renamed.""" - node = self.load_node(self.node_data_id) - link = self.DbLink.objects.get(input=node) - self.assertEqual(link.label, 'result') diff --git a/tests/backends/aiida_django/migrations/test_migrations_0044_dbgroup_type_string.py b/tests/backends/aiida_django/migrations/test_migrations_0044_dbgroup_type_string.py deleted file mode 100644 index 75b754d4c9..0000000000 --- a/tests/backends/aiida_django/migrations/test_migrations_0044_dbgroup_type_string.py +++ /dev/null @@ -1,63 +0,0 @@ -# -*- coding: utf-8 -*- -########################################################################### -# Copyright (c), The AiiDA team. All rights reserved. # -# This file is part of the AiiDA code. # -# # -# The code is hosted on GitHub at https://github.com/aiidateam/aiida-core # -# For further information on the license, see the LICENSE.txt file # -# For further information please visit http://www.aiida.net # -########################################################################### -# pylint: disable=import-error,no-name-in-module,invalid-name -"""Test migration of `type_string` after the `Group` class became pluginnable.""" - -from .test_migrations_common import TestMigrations - - -class TestGroupTypeStringMigration(TestMigrations): - """Test migration of `type_string` after the `Group` class became pluginnable.""" - - migrate_from = '0043_default_link_label' - migrate_to = '0044_dbgroup_type_string' - - def setUpBeforeMigration(self): - DbGroup = self.apps.get_model('db', 'DbGroup') - - # test user group type_string: 'user' -> 'core' - group_user = DbGroup(label='01', user_id=self.default_user.id, type_string='user') - group_user.save() - self.group_user_pk = group_user.pk - - # test data.upf group type_string: 'data.core.upf' -> 'core.upf' - group_data_upf = DbGroup(label='02', user_id=self.default_user.id, type_string='data.upf') - group_data_upf.save() - self.group_data_upf_pk = group_data_upf.pk - - # test auto.import group type_string: 'auto.import' -> 'core.import' - group_autoimport = DbGroup(label='03', user_id=self.default_user.id, type_string='auto.import') - group_autoimport.save() - self.group_autoimport_pk = group_autoimport.pk - - # test auto.run group type_string: 'auto.run' -> 'core.auto' - group_autorun = DbGroup(label='04', user_id=self.default_user.id, type_string='auto.run') - group_autorun.save() - self.group_autorun_pk = group_autorun.pk - - def test_group_string_update(self): - """Test that the type_string were updated correctly.""" - DbGroup = self.apps.get_model('db', 'DbGroup') - - # 'user' -> 'core' - group_user = DbGroup.objects.get(pk=self.group_user_pk) - self.assertEqual(group_user.type_string, 'core') - - # 'data.upf' -> 'core.upf' - group_data_upf = DbGroup.objects.get(pk=self.group_data_upf_pk) - self.assertEqual(group_data_upf.type_string, 'core.upf') - - # 'auto.import' -> 'core.import' - group_autoimport = DbGroup.objects.get(pk=self.group_autoimport_pk) - self.assertEqual(group_autoimport.type_string, 'core.import') - - # 'auto.run' -> 'core.auto' - group_autorun = DbGroup.objects.get(pk=self.group_autorun_pk) - self.assertEqual(group_autorun.type_string, 'core.auto') diff --git a/tests/backends/aiida_django/migrations/test_migrations_0045_dbgroup_extras.py b/tests/backends/aiida_django/migrations/test_migrations_0045_dbgroup_extras.py deleted file mode 100644 index f9c1686ff1..0000000000 --- a/tests/backends/aiida_django/migrations/test_migrations_0045_dbgroup_extras.py +++ /dev/null @@ -1,34 +0,0 @@ -# -*- coding: utf-8 -*- -########################################################################### -# Copyright (c), The AiiDA team. All rights reserved. # -# This file is part of the AiiDA code. # -# # -# The code is hosted on GitHub at https://github.com/aiidateam/aiida-core # -# For further information on the license, see the LICENSE.txt file # -# For further information please visit http://www.aiida.net # -########################################################################### -# pylint: disable=import-error,no-name-in-module,invalid-name -"""Test migration to add the `extras` JSONB column to the `DbGroup` model.""" - -from .test_migrations_common import TestMigrations - - -class TestGroupExtrasMigration(TestMigrations): - """Test migration to add the `extras` JSONB column to the `DbGroup` model.""" - - migrate_from = '0044_dbgroup_type_string' - migrate_to = '0045_dbgroup_extras' - - def setUpBeforeMigration(self): - DbGroup = self.apps.get_model('db', 'DbGroup') - - group = DbGroup(label='01', user_id=self.default_user.id, type_string='user') - group.save() - self.group_pk = group.pk - - def test_extras(self): - """Test that the model now has an extras column with empty dictionary as default.""" - DbGroup = self.apps.get_model('db', 'DbGroup') - - group = DbGroup.objects.get(pk=self.group_pk) - self.assertEqual(group.extras, {}) diff --git a/tests/backends/aiida_django/migrations/test_migrations_0046_add_node_repository_metadata.py b/tests/backends/aiida_django/migrations/test_migrations_0046_add_node_repository_metadata.py deleted file mode 100644 index 34708a4c0f..0000000000 --- a/tests/backends/aiida_django/migrations/test_migrations_0046_add_node_repository_metadata.py +++ /dev/null @@ -1,33 +0,0 @@ -# -*- coding: utf-8 -*- -########################################################################### -# Copyright (c), The AiiDA team. All rights reserved. # -# This file is part of the AiiDA code. # -# # -# The code is hosted on GitHub at https://github.com/aiidateam/aiida-core # -# For further information on the license, see the LICENSE.txt file # -# For further information please visit http://www.aiida.net # -########################################################################### -# pylint: disable=import-error,no-name-in-module,invalid-name -"""Test migration adding the `repository_metadata` column to the `Node` model.""" - -from .test_migrations_common import TestMigrations - - -class TestNodeRepositoryMetadataMigration(TestMigrations): - """Test migration adding the `repository_metadata` column to the `Node` model.""" - - migrate_from = '0045_dbgroup_extras' - migrate_to = '0046_add_node_repository_metadata' - - def setUpBeforeMigration(self): - DbNode = self.apps.get_model('db', 'DbNode') - dbnode = DbNode(user_id=self.default_user.id) - dbnode.save() - self.node_pk = dbnode.pk - - def test_migration(self): - """Test that the column is added and null by default.""" - DbNode = self.apps.get_model('db', 'DbNode') - node = DbNode.objects.get(pk=self.node_pk) - assert hasattr(node, 'repository_metadata') - assert node.repository_metadata == {} diff --git a/tests/backends/aiida_django/migrations/test_migrations_0047_migrate_repository.py b/tests/backends/aiida_django/migrations/test_migrations_0047_migrate_repository.py deleted file mode 100644 index 33d3a1cd05..0000000000 --- a/tests/backends/aiida_django/migrations/test_migrations_0047_migrate_repository.py +++ /dev/null @@ -1,118 +0,0 @@ -# -*- coding: utf-8 -*- -########################################################################### -# Copyright (c), The AiiDA team. All rights reserved. # -# This file is part of the AiiDA code. # -# # -# The code is hosted on GitHub at https://github.com/aiidateam/aiida-core # -# For further information on the license, see the LICENSE.txt file # -# For further information please visit http://www.aiida.net # -########################################################################### -# pylint: disable=import-error,no-name-in-module,invalid-name -"""Test migration of the old file repository to the disk object store.""" -import hashlib -import os - -from aiida.backends.general.migrations import utils - -from .test_migrations_common import TestMigrations - -REPOSITORY_UUID_KEY = 'repository|uuid' - - -class TestRepositoryMigration(TestMigrations): - """Test migration of the old file repository to the disk object store.""" - - migrate_from = '0046_add_node_repository_metadata' - migrate_to = '0047_migrate_repository' - - def setUpBeforeMigration(self): - DbNode = self.apps.get_model('db', 'DbNode') - DbSetting = self.apps.get_model('db', 'DbSetting') - - dbnode_01 = DbNode(user_id=self.default_user.id) - dbnode_01.save() - dbnode_02 = DbNode(user_id=self.default_user.id) - dbnode_02.save() - dbnode_03 = DbNode(user_id=self.default_user.id) - dbnode_03.save() - dbnode_04 = DbNode(user_id=self.default_user.id) - dbnode_04.save() - - self.node_01_pk = dbnode_01.pk - self.node_02_pk = dbnode_02.pk - self.node_03_pk = dbnode_03.pk - self.node_04_pk = dbnode_04.pk - - utils.put_object_from_string(dbnode_01.uuid, 'sub/path/file_b.txt', 'b') - utils.put_object_from_string(dbnode_01.uuid, 'sub/file_a.txt', 'a') - utils.put_object_from_string(dbnode_02.uuid, 'output.txt', 'output') - utils.put_object_from_string(dbnode_04.uuid, '.gitignore', 'test') - - # If both `path` and `raw_input` subfolders are present and `.gitignore` is in `path`, it should be ignored. - # Cannot use `put_object_from_string` here as it statically writes under the `path` folder. - raw_input_sub_folder = utils.get_node_repository_sub_folder(dbnode_04.uuid, subfolder='raw_input') - os.makedirs(raw_input_sub_folder, exist_ok=True) - with open(os.path.join(raw_input_sub_folder, 'input.txt'), 'w', encoding='utf-8') as handle: - handle.write('input') - - # When multiple migrations are ran, it is possible that migration 0047 is run at a point where the repository - # container does not have a UUID (at that point in the migration) and so the setting gets set to `None`. This - # should only happen during testing, and in this case we delete it first so the actual migration gets to set it. - if DbSetting.objects.filter(key=REPOSITORY_UUID_KEY).exists(): - DbSetting.objects.get(key=REPOSITORY_UUID_KEY).delete() - - def test_migration(self): - """Test that the files are correctly migrated.""" - DbNode = self.apps.get_model('db', 'DbNode') - DbSetting = self.apps.get_model('db', 'DbSetting') - - node_01 = DbNode.objects.get(pk=self.node_01_pk) - node_02 = DbNode.objects.get(pk=self.node_02_pk) - node_03 = DbNode.objects.get(pk=self.node_03_pk) - node_04 = DbNode.objects.get(pk=self.node_04_pk) - - assert node_01.repository_metadata == { - 'o': { - 'sub': { - 'o': { - 'path': { - 'o': { - 'file_b.txt': { - 'k': hashlib.sha256('b'.encode('utf-8')).hexdigest() - } - } - }, - 'file_a.txt': { - 'k': hashlib.sha256('a'.encode('utf-8')).hexdigest() - } - } - } - } - } - assert node_02.repository_metadata == { - 'o': { - 'output.txt': { - 'k': hashlib.sha256('output'.encode('utf-8')).hexdigest() - } - } - } - assert node_03.repository_metadata == {} - assert node_04.repository_metadata == { - 'o': { - 'input.txt': { - 'k': hashlib.sha256('input'.encode('utf-8')).hexdigest() - } - } - } - - for hashkey, content in ( - (node_01.repository_metadata['o']['sub']['o']['path']['o']['file_b.txt']['k'], b'b'), - (node_01.repository_metadata['o']['sub']['o']['file_a.txt']['k'], b'a'), - (node_02.repository_metadata['o']['output.txt']['k'], b'output'), - (node_04.repository_metadata['o']['input.txt']['k'], b'input'), - ): - assert utils.get_repository_object(hashkey) == content - - repository_uuid = DbSetting.objects.get(key=REPOSITORY_UUID_KEY) - assert repository_uuid is not None - assert isinstance(repository_uuid.val, str) diff --git a/tests/backends/aiida_django/migrations/test_migrations_0048_computer_name_to_label.py b/tests/backends/aiida_django/migrations/test_migrations_0048_computer_name_to_label.py deleted file mode 100644 index 916b1dd789..0000000000 --- a/tests/backends/aiida_django/migrations/test_migrations_0048_computer_name_to_label.py +++ /dev/null @@ -1,33 +0,0 @@ -# -*- coding: utf-8 -*- -########################################################################### -# Copyright (c), The AiiDA team. All rights reserved. # -# This file is part of the AiiDA code. # -# # -# The code is hosted on GitHub at https://github.com/aiidateam/aiida-core # -# For further information on the license, see the LICENSE.txt file # -# For further information please visit http://www.aiida.net # -########################################################################### -# pylint: disable=import-error,no-name-in-module,invalid-name -"""Test migration that renames the ``name`` column of the ``Computer`` entity to ``label``.""" -from .test_migrations_common import TestMigrations - - -class TestMigration(TestMigrations): - """Test migration that renames the ``name`` column of the ``Computer`` entity to ``label``.""" - - migrate_from = '0047_migrate_repository' - migrate_to = '0048_computer_name_to_label' - - def setUpBeforeMigration(self): - DbComputer = self.apps.get_model('db', 'DbComputer') - - computer = DbComputer(name='testing') - computer.save() - self.computer_pk = computer.pk - - def test_migration(self): - """Test that the migration was performed correctly.""" - DbComputer = self.apps.get_model('db', 'DbComputer') - - computer = DbComputer.objects.get(pk=self.computer_pk) - assert computer.label == 'testing' diff --git a/tests/backends/aiida_django/migrations/test_migrations_0049_entry_point_core_prefix.py b/tests/backends/aiida_django/migrations/test_migrations_0049_entry_point_core_prefix.py deleted file mode 100644 index 98b510c186..0000000000 --- a/tests/backends/aiida_django/migrations/test_migrations_0049_entry_point_core_prefix.py +++ /dev/null @@ -1,55 +0,0 @@ -# -*- coding: utf-8 -*- -########################################################################### -# Copyright (c), The AiiDA team. All rights reserved. # -# This file is part of the AiiDA code. # -# # -# The code is hosted on GitHub at https://github.com/aiidateam/aiida-core # -# For further information on the license, see the LICENSE.txt file # -# For further information please visit http://www.aiida.net # -########################################################################### -# pylint: disable=import-error,no-name-in-module,invalid-name -"""Test migration that updates node types after `core.` prefix was added to entry point names.""" -from .test_migrations_common import TestMigrations - - -class TestMigration(TestMigrations): - """Test migration that updates node types after `core.` prefix was added to entry point names.""" - - migrate_from = '0048_computer_name_to_label' - migrate_to = '0049_entry_point_core_prefix' - - def setUpBeforeMigration(self): - DbComputer = self.apps.get_model('db', 'DbComputer') - DbNode = self.apps.get_model('db', 'DbNode') - - computer = DbComputer(label='testing', scheduler_type='direct', transport_type='local') - computer.save() - self.computer_pk = computer.pk - - calcjob = DbNode( - user_id=self.default_user.id, - process_type='aiida.calculations:core.arithmetic.add', - attributes={'parser_name': 'core.arithmetic.add'} - ) - calcjob.save() - self.calcjob_pk = calcjob.pk - - workflow = DbNode(user_id=self.default_user.id, process_type='aiida.workflows:arithmetic.add_multiply') - workflow.save() - self.workflow_pk = workflow.pk - - def test_migration(self): - """Test that the migration was performed correctly.""" - DbComputer = self.apps.get_model('db', 'DbComputer') - DbNode = self.apps.get_model('db', 'DbNode') - - computer = DbComputer.objects.get(pk=self.computer_pk) - assert computer.scheduler_type == 'core.direct' - assert computer.transport_type == 'core.local' - - calcjob = DbNode.objects.get(pk=self.calcjob_pk) - assert calcjob.process_type == 'aiida.calculations:core.arithmetic.add' - assert calcjob.attributes['parser_name'] == 'core.arithmetic.add' - - workflow = DbNode.objects.get(pk=self.workflow_pk) - assert workflow.process_type == 'aiida.workflows:core.arithmetic.add_multiply' diff --git a/tests/backends/aiida_django/migrations/test_migrations_common.py b/tests/backends/aiida_django/migrations/test_migrations_common.py deleted file mode 100644 index 0784750bd3..0000000000 --- a/tests/backends/aiida_django/migrations/test_migrations_common.py +++ /dev/null @@ -1,102 +0,0 @@ -# -*- coding: utf-8 -*- -########################################################################### -# Copyright (c), The AiiDA team. All rights reserved. # -# This file is part of the AiiDA code. # -# # -# The code is hosted on GitHub at https://github.com/aiidateam/aiida-core # -# For further information on the license, see the LICENSE.txt file # -# For further information please visit http://www.aiida.net # -########################################################################### -# pylint: disable=import-error,no-name-in-module,invalid-name -""" The basic functionality for the migration tests""" -from django.apps import apps -from django.db import connection -from django.db.migrations.executor import MigrationExecutor - -from aiida.backends.testbase import AiidaTestCase -from aiida.common.utils import Capturing - - -class TestMigrations(AiidaTestCase): - """ - This is the common test class that is used by all migration tests. It migrates to a given - migration point, allows you to set up the database & AiiDA at that point with the necessary - data and migrates then to the final migration point. - In the end it forwards the database at the final migration (as it should be and found before - the migration tests). - """ - - @property - def app(self): - return apps.get_containing_app_config('aiida.backends.djsite.db').name.split('.')[-1] - - migrate_from = None - migrate_to = None - - def setUp(self): - """Go to a specific schema version before running tests.""" - from aiida.backends.djsite import get_scoped_session - from aiida.orm import autogroup - - self.current_autogroup = autogroup.CURRENT_AUTOGROUP - autogroup.CURRENT_AUTOGROUP = None - assert self.migrate_from and self.migrate_to, \ - f"TestCase '{type(self).__name__}' must define migrate_from and migrate_to properties" - self.migrate_from = [(self.app, self.migrate_from)] - self.migrate_to = [(self.app, self.migrate_to)] - executor = MigrationExecutor(connection) - self.apps = executor.loader.project_state(self.migrate_from).apps - self.schema_editor = connection.schema_editor() - - # Before running the migration, make sure we close the querybuilder session which may still contain references - # to objects whose mapping may be invalidated after resetting the schema to an older version. This can block - # the migrations so we first expunge those objects by closing the session. - get_scoped_session().close() - - # Reverse to the original migration - with Capturing(): - executor.migrate(self.migrate_from) - - self.DbLink = self.apps.get_model('db', 'DbLink') - self.DbNode = self.apps.get_model('db', 'DbNode') - self.DbUser = self.apps.get_model('db', 'DbUser') - self.DbUser.objects.all().delete() - self.default_user = self.DbUser(1, 'aiida@localhost') - self.default_user.save() - - try: - self.setUpBeforeMigration() - # Run the migration to test - executor = MigrationExecutor(connection) - executor.loader.build_graph() - - with Capturing(): - executor.migrate(self.migrate_to) - - self.apps = executor.loader.project_state(self.migrate_to).apps - except Exception: - # Bring back the DB to the correct state if this setup part fails - import traceback - traceback.print_stack() - self._revert_database_schema() - raise - - def tearDown(self): - """At the end make sure we go back to the latest schema version.""" - from aiida.orm import autogroup - self._revert_database_schema() - autogroup.CURRENT_AUTOGROUP = self.current_autogroup - - def setUpBeforeMigration(self): - """Anything to do before running the migrations, which should be implemented in test subclasses.""" - - def _revert_database_schema(self): - """Bring back the DB to the correct state.""" - from aiida.backends.djsite.db.migrations import LATEST_MIGRATION - self.migrate_to = [(self.app, LATEST_MIGRATION)] - executor = MigrationExecutor(connection) - with Capturing(): - executor.migrate(self.migrate_to) - - def load_node(self, pk): - return self.DbNode.objects.get(pk=pk) diff --git a/tests/backends/aiida_django/migrations/test_migrations_many.py b/tests/backends/aiida_django/migrations/test_migrations_many.py deleted file mode 100644 index 220e1500d6..0000000000 --- a/tests/backends/aiida_django/migrations/test_migrations_many.py +++ /dev/null @@ -1,843 +0,0 @@ -# -*- coding: utf-8 -*- -########################################################################### -# Copyright (c), The AiiDA team. All rights reserved. # -# This file is part of the AiiDA code. # -# # -# The code is hosted on GitHub at https://github.com/aiidateam/aiida-core # -# For further information on the license, see the LICENSE.txt file # -# For further information please visit http://www.aiida.net # -########################################################################### -# pylint: disable=invalid-name -""" -This file contains the majority of the migration tests that are too short to -go to a separate file. -""" -import numpy - -from aiida.backends.djsite.db.migrations import ModelModifierV0025 -from aiida.backends.general.migrations import utils -from aiida.backends.testbase import AiidaTestCase -from aiida.common.exceptions import IntegrityError - -from .test_migrations_common import TestMigrations - - -class TestMigrationsModelModifierV0025(TestMigrations): - """Sub class of `TestMigrations` that need to work on node attributes using the `ModelModifierV0025`.""" - - def set_attribute(self, node, key, value): - DbAttribute = self.apps.get_model('db', 'DbAttribute') - modifier = ModelModifierV0025(self.apps, DbAttribute) - modifier.set_value_for_node(node.pk, key, value) - - def get_attribute(self, node, key, default=None): # pylint: disable=missing-docstring - DbAttribute = self.apps.get_model('db', 'DbAttribute') - modifier = ModelModifierV0025(self.apps, DbAttribute) - try: - return modifier.get_value_for_node(node.pk, key) - except AttributeError: - return default - - @staticmethod - def get_node_array(node, name): - return utils.load_numpy_array_from_repository(node.uuid, name) - - def set_node_array(self, node, name, array): - """Store a new numpy array inside a node. Possibly overwrite the array if it already existed. - - Internally, it stores a name.npy file in numpy format. - - :param name: The name of the array. - :param array: The numpy array to store. - """ - utils.store_numpy_array_in_repository(node.uuid, name, array) - self.set_attribute(node, f'array|{name}', list(array.shape)) - - -class TestNoMigrations(AiidaTestCase): - """Verify that no django migrations remain.""" - - def test_no_remaining_migrations(self): # pylint: disable=no-self-use - """ - Verify that no django migrations remain. - Equivalent to python manage.py makemigrations --check - """ - - from django.core.management import call_command - - # Raises SystemExit, if migrations remain - call_command('makemigrations', '--check', verbosity=0) - - -class TestDuplicateNodeUuidMigration(TestMigrations): - """Test the migration that verifies that there are no duplicate UUIDs""" - - migrate_from = '0013_django_1_8' - migrate_to = '0014_add_node_uuid_unique_constraint' - - def setUpBeforeMigration(self): - from aiida.backends.general.migrations.duplicate_uuids import deduplicate_uuids, verify_uuid_uniqueness - from aiida.common.utils import get_new_uuid - self.file_name = 'test.temp' - self.file_content = '#!/bin/bash\n\necho test run\n' - - self.nodes_boolean = [] - self.nodes_integer = [] - self.n_bool_duplicates = 2 - self.n_int_duplicates = 4 - - node_bool = self.DbNode(type='data.core.bool.Bool.', user_id=self.default_user.id, uuid=get_new_uuid()) - node_bool.save() - - node_int = self.DbNode(type='data.core.int.Int.', user_id=self.default_user.id, uuid=get_new_uuid()) - node_int.save() - - self.nodes_boolean.append(node_bool) - self.nodes_integer.append(node_int) - - for _ in range(self.n_bool_duplicates): - node = self.DbNode(type='data.core.bool.Bool.', user_id=self.default_user.id, uuid=node_bool.uuid) - node.save() - utils.put_object_from_string(node.uuid, self.file_name, self.file_content) - self.nodes_boolean.append(node) - - for _ in range(self.n_int_duplicates): - node = self.DbNode(type='data.core.int.Int.', user_id=self.default_user.id, uuid=node_int.uuid) - node.save() - utils.put_object_from_string(node.uuid, self.file_name, self.file_content) - self.nodes_integer.append(node) - - # Verify that there are duplicate UUIDs by checking that the following function raises - with self.assertRaises(IntegrityError): - verify_uuid_uniqueness(table='db_dbnode') - - # Now run the function responsible for solving duplicate UUIDs which would also be called by the user - # through the `verdi database integrity detect-duplicate-uuid` command - deduplicate_uuids(table='db_dbnode', dry_run=False) - - def test_deduplicated_uuids(self): - """Verify that after the migration, all expected nodes are still there with unique UUIDs.""" - # If the duplicate UUIDs were successfully fixed, the following should not raise. - from aiida.backends.general.migrations.duplicate_uuids import verify_uuid_uniqueness - - verify_uuid_uniqueness(table='db_dbnode') - - # Reload the nodes by PK and check that all UUIDs are now unique - nodes_boolean = [self.load_node(node.pk) for node in self.nodes_boolean] - uuids_boolean = [node.uuid for node in nodes_boolean] - self.assertEqual(len(set(uuids_boolean)), len(nodes_boolean)) - - nodes_integer = [self.load_node(node.pk) for node in self.nodes_integer] - uuids_integer = [node.uuid for node in nodes_integer] - self.assertEqual(len(set(uuids_integer)), len(nodes_integer)) - - for node in nodes_boolean: - self.assertEqual(utils.get_object_from_repository(node.uuid, self.file_name), self.file_content) - - -class TestUuidMigration(TestMigrations): - """ - This test class checks the migration 0018_django_1_11 which switches from the django_extensions - UUID field to the native UUIDField of django 1.11. It also introduces unique constraints - on all uuid columns (previously existed only on dbnode). - """ - - migrate_from = '0017_drop_dbcalcstate' - migrate_to = '0018_django_1_11' - - def setUpBeforeMigration(self): - node = self.DbNode(type='node.process.calculation.calcjob.CalcJobNode.', user_id=self.default_user.id) - node.save() - - self.node_uuid = str(node.uuid) - self.node_id = node.id - - def test_uuid_untouched(self): - """Verify that Node uuids remain unchanged.""" - node = self.load_node(self.node_id) - self.assertEqual(self.node_uuid, str(node.uuid)) - - -class TestGroupRenamingMigration(TestMigrations): - """ - This test class checks the migration 0022_dbgroup_type_string_change_content which updates the - type_string column of the groups. - """ - - migrate_from = '0021_dbgroup_name_to_label_type_to_type_string' - migrate_to = '0022_dbgroup_type_string_change_content' - - def setUpBeforeMigration(self): - # Create group - DbGroup = self.apps.get_model('db', 'DbGroup') - - # test user group type_string: '' -> 'user' - group_user = DbGroup(label='test_user_group', user_id=self.default_user.id, type_string='') - group_user.save() - self.group_user_pk = group_user.pk - - # test data.upf group type_string: 'data.upf.family' -> 'data.upf' - group_data_upf = DbGroup( - label='test_data_upf_group', user_id=self.default_user.id, type_string='data.upf.family' - ) - group_data_upf.save() - self.group_data_upf_pk = group_data_upf.pk - - # test auto.import group type_string: 'aiida.import' -> 'auto.import' - group_autoimport = DbGroup(label='test_import_group', user_id=self.default_user.id, type_string='aiida.import') - group_autoimport.save() - self.group_autoimport_pk = group_autoimport.pk - - # test auto.run group type_string: 'autogroup.run' -> 'auto.run' - group_autorun = DbGroup(label='test_autorun_group', user_id=self.default_user.id, type_string='autogroup.run') - group_autorun.save() - self.group_autorun_pk = group_autorun.pk - - def test_group_string_update(self): - """ Test that the type_string were updated correctly """ - DbGroup = self.apps.get_model('db', 'DbGroup') - - # test user group type_string: '' -> 'user' - group_user = DbGroup.objects.get(pk=self.group_user_pk) - self.assertEqual(group_user.type_string, 'user') - - # test data.upf group type_string: 'data.upf.family' -> 'data.upf' - group_data_upf = DbGroup.objects.get(pk=self.group_data_upf_pk) - self.assertEqual(group_data_upf.type_string, 'data.upf') - - # test auto.import group type_string: 'aiida.import' -> 'auto.import' - group_autoimport = DbGroup.objects.get(pk=self.group_autoimport_pk) - self.assertEqual(group_autoimport.type_string, 'auto.import') - - # test auto.run group type_string: 'autogroup.run' -> 'auto.run' - group_autorun = DbGroup.objects.get(pk=self.group_autorun_pk) - self.assertEqual(group_autorun.type_string, 'auto.run') - - -class TestCalcAttributeKeysMigration(TestMigrationsModelModifierV0025): - """ - This test class checks that the migration 0023_calc_job_option_attribute_keys works as expected - which migrates CalcJobNode attributes for metadata options whose key changed. - """ - - migrate_from = '0022_dbgroup_type_string_change_content' - migrate_to = '0023_calc_job_option_attribute_keys' - - KEY_RESOURCES_OLD = 'jobresource_params' - KEY_RESOURCES_NEW = 'resources' - KEY_PARSER_NAME_OLD = 'parser' - KEY_PARSER_NAME_NEW = 'parser_name' - KEY_PROCESS_LABEL_OLD = '_process_label' - KEY_PROCESS_LABEL_NEW = 'process_label' - KEY_ENVIRONMENT_VARIABLES_OLD = 'custom_environment_variables' - KEY_ENVIRONMENT_VARIABLES_NEW = 'environment_variables' - - def setUpBeforeMigration(self): - self.process_label = 'TestLabel' - self.resources = {'number_machines': 1} - self.environment_variables = {} - self.parser_name = 'aiida.parsers:parser' - - self.node_work = self.DbNode(type='node.process.workflow.WorkflowNode.', user_id=self.default_user.id) - self.node_work.save() - self.set_attribute(self.node_work, self.KEY_PROCESS_LABEL_OLD, self.process_label) - - self.node_calc = self.DbNode(type='node.process.calculation.calcjob.CalcJobNode.', user_id=self.default_user.id) - self.node_calc.save() - self.set_attribute(self.node_calc, self.KEY_PROCESS_LABEL_OLD, self.process_label) - self.set_attribute(self.node_calc, self.KEY_RESOURCES_OLD, self.resources) - self.set_attribute(self.node_calc, self.KEY_ENVIRONMENT_VARIABLES_OLD, self.environment_variables) - self.set_attribute(self.node_calc, self.KEY_PARSER_NAME_OLD, self.parser_name) - - # Create a node of a different type to ensure that its attributes are not updated - self.node_other = self.DbNode(type='node.othernode.', user_id=self.default_user.id) - self.node_other.save() - self.set_attribute(self.node_other, self.KEY_PROCESS_LABEL_OLD, self.process_label) - self.set_attribute(self.node_other, self.KEY_RESOURCES_OLD, self.resources) - self.set_attribute(self.node_other, self.KEY_ENVIRONMENT_VARIABLES_OLD, self.environment_variables) - self.set_attribute(self.node_other, self.KEY_PARSER_NAME_OLD, self.parser_name) - - def test_attribute_key_changes(self): - """Verify that the keys are successfully changed of the affected attributes.""" - NOT_FOUND = tuple([0]) - - self.assertEqual(self.get_attribute(self.node_work, self.KEY_PROCESS_LABEL_NEW), self.process_label) - self.assertEqual(self.get_attribute(self.node_work, self.KEY_PROCESS_LABEL_OLD, default=NOT_FOUND), NOT_FOUND) - - self.assertEqual(self.get_attribute(self.node_calc, self.KEY_PROCESS_LABEL_NEW), self.process_label) - self.assertEqual(self.get_attribute(self.node_calc, self.KEY_RESOURCES_NEW), self.resources) - self.assertEqual( - self.get_attribute(self.node_calc, self.KEY_ENVIRONMENT_VARIABLES_NEW), self.environment_variables - ) - self.assertEqual(self.get_attribute(self.node_calc, self.KEY_PARSER_NAME_NEW), self.parser_name) - self.assertEqual(self.get_attribute(self.node_calc, self.KEY_PROCESS_LABEL_OLD, default=NOT_FOUND), NOT_FOUND) - self.assertEqual(self.get_attribute(self.node_calc, self.KEY_RESOURCES_OLD, default=NOT_FOUND), NOT_FOUND) - self.assertEqual( - self.get_attribute(self.node_calc, self.KEY_ENVIRONMENT_VARIABLES_OLD, default=NOT_FOUND), NOT_FOUND - ) - self.assertEqual(self.get_attribute(self.node_calc, self.KEY_PARSER_NAME_OLD, default=NOT_FOUND), NOT_FOUND) - - # The following node should not be migrated even if its attributes have the matching keys because - # the node is not a ProcessNode - self.assertEqual(self.get_attribute(self.node_other, self.KEY_PROCESS_LABEL_OLD), self.process_label) - self.assertEqual(self.get_attribute(self.node_other, self.KEY_RESOURCES_OLD), self.resources) - self.assertEqual( - self.get_attribute(self.node_other, self.KEY_ENVIRONMENT_VARIABLES_OLD), self.environment_variables - ) - self.assertEqual(self.get_attribute(self.node_other, self.KEY_PARSER_NAME_OLD), self.parser_name) - self.assertEqual(self.get_attribute(self.node_other, self.KEY_PROCESS_LABEL_NEW, default=NOT_FOUND), NOT_FOUND) - self.assertEqual(self.get_attribute(self.node_other, self.KEY_RESOURCES_NEW, default=NOT_FOUND), NOT_FOUND) - self.assertEqual( - self.get_attribute(self.node_other, self.KEY_ENVIRONMENT_VARIABLES_NEW, default=NOT_FOUND), NOT_FOUND - ) - self.assertEqual(self.get_attribute(self.node_other, self.KEY_PARSER_NAME_NEW, default=NOT_FOUND), NOT_FOUND) - - -class TestDbLogMigrationRecordCleaning(TestMigrations): - """ - This test class checks that the migration 0024_dblog_update works as expected. - That migration updates of the DbLog table and adds uuids - """ - - migrate_from = '0023_calc_job_option_attribute_keys' - migrate_to = '0024_dblog_update' - - def setUpBeforeMigration(self): # pylint: disable=too-many-locals - import importlib - import json - - from aiida.backends.general.migrations.utils import dumps_json - - update_024 = importlib.import_module('aiida.backends.djsite.db.migrations.0024_dblog_update') - - DbNode = self.apps.get_model('db', 'DbNode') - DbWorkflow = self.apps.get_model('db', 'DbWorkflow') - DbLog = self.apps.get_model('db', 'DbLog') - - # Creating the needed nodes & workflows - calc_1 = DbNode(type='node.process.calculation.CalculationNode.', user_id=self.default_user.id) - param = DbNode(type='data.core.dict.Dict.', user_id=self.default_user.id) - leg_workf = DbWorkflow(label='Legacy WorkflowNode', user_id=self.default_user.id) - calc_2 = DbNode(type='node.process.calculation.CalculationNode.', user_id=self.default_user.id) - - # Storing them - calc_1.save() - param.save() - leg_workf.save() - calc_2.save() - - # Creating the corresponding log records and storing them - log_1 = DbLog( - loggername='CalculationNode logger', - objpk=calc_1.pk, - objname='node.calculation.job.quantumespresso.pw.', - message='calculation node 1', - metadata=json.dumps({ - 'msecs': 719.0849781036377, - 'objpk': calc_1.pk, - 'lineno': 350, - 'thread': 140011612940032, - 'asctime': '10/21/2018 12:39:51 PM', - 'created': 1540118391.719085, - 'levelno': 23, - 'message': 'calculation node 1', - 'objname': 'node.calculation.job.quantumespresso.pw.', - }) - ) - log_2 = DbLog( - loggername='something.else logger', - objpk=param.pk, - objname='something.else.', - message='parameter data with log message' - ) - log_3 = DbLog( - loggername='TopologicalWorkflow logger', - objpk=leg_workf.pk, - objname='aiida.workflows.user.topologicalworkflows.topo.TopologicalWorkflow', - message='parameter data with log message' - ) - log_4 = DbLog( - loggername='CalculationNode logger', - objpk=calc_2.pk, - objname='node.calculation.job.quantumespresso.pw.', - message='calculation node 2', - metadata=json.dumps({ - 'msecs': 719.0849781036377, - 'objpk': calc_2.pk, - 'lineno': 360, - 'levelno': 23, - 'message': 'calculation node 1', - 'objname': 'node.calculation.job.quantumespresso.pw.', - }) - ) - # Creating two more log records that don't correspond to a node - log_5 = DbLog( - loggername='CalculationNode logger', - objpk=(calc_2.pk + 1000), - objname='node.calculation.job.quantumespresso.pw.', - message='calculation node 1000', - metadata=json.dumps({ - 'msecs': 718, - 'objpk': (calc_2.pk + 1000), - 'lineno': 361, - 'levelno': 25, - 'message': 'calculation node 1000', - 'objname': 'node.calculation.job.quantumespresso.pw.', - }) - ) - log_6 = DbLog( - loggername='CalculationNode logger', - objpk=(calc_2.pk + 1001), - objname='node.calculation.job.quantumespresso.pw.', - message='calculation node 10001', - metadata=json.dumps({ - 'msecs': 722, - 'objpk': (calc_2.pk + 1001), - 'lineno': 362, - 'levelno': 24, - 'message': 'calculation node 1001', - 'objname': 'node.calculation.job.quantumespresso.pw.', - }) - ) - - # Storing the log records - log_1.save() - log_2.save() - log_3.save() - log_4.save() - log_5.save() - log_6.save() - - # Storing temporarily information needed for the check at the test - self.to_check = {} - - # Keeping calculation & calculation log ids - self.to_check['CalculationNode'] = ( - calc_1.pk, - log_1.pk, - calc_2.pk, - log_4.pk, - ) - - # Getting the serialized Dict logs - param_data = DbLog.objects.filter(objpk=param.pk).filter(objname='something.else.' - ).values(*update_024.values_to_export)[:1] - serialized_param_data = dumps_json(list(param_data)) - # Getting the serialized logs for the unknown entity logs (as the export migration fuction - # provides them) - this should coincide to the above - serialized_unknown_exp_logs = update_024.get_serialized_unknown_entity_logs(self.schema_editor) - # Getting their number - unknown_exp_logs_number = update_024.get_unknown_entity_log_number(self.schema_editor) - self.to_check['Dict'] = (serialized_param_data, serialized_unknown_exp_logs, unknown_exp_logs_number) - - # Getting the serialized legacy workflow logs - leg_wf = DbLog.objects.filter(objpk=leg_workf.pk).filter( - objname='aiida.workflows.user.topologicalworkflows.topo.TopologicalWorkflow' - ).values(*update_024.values_to_export)[:1] - serialized_leg_wf_logs = dumps_json(list(leg_wf)) - # Getting the serialized logs for the legacy workflow logs (as the export migration function - # provides them) - this should coincide to the above - serialized_leg_wf_exp_logs = update_024.get_serialized_legacy_workflow_logs(self.schema_editor) - eg_wf_exp_logs_number = update_024.get_legacy_workflow_log_number(self.schema_editor) - self.to_check['WorkflowNode'] = (serialized_leg_wf_logs, serialized_leg_wf_exp_logs, eg_wf_exp_logs_number) - - # Getting the serialized logs that don't correspond to a DbNode record - logs_no_node = DbLog.objects.filter(id__in=[log_5.id, log_6.id]).values(*update_024.values_to_export) - serialized_logs_no_node = dumps_json(list(logs_no_node)) - # Getting the serialized logs that don't correspond to a node (as the export migration function - # provides them) - this should coincide to the above - serialized_logs_exp_no_node = update_024.get_serialized_logs_with_no_nodes(self.schema_editor) - logs_no_node_number = update_024.get_logs_with_no_nodes_number(self.schema_editor) - self.to_check['NoNode'] = (serialized_logs_no_node, serialized_logs_exp_no_node, logs_no_node_number) - - def tearDown(self): - """Cleaning the DbLog, DbUser, DbWorkflow and DbNode records""" - DbUser = self.apps.get_model('db', 'DbUser') - DbNode = self.apps.get_model('db', 'DbNode') - DbWorkflow = self.apps.get_model('db', 'DbWorkflow') - DbLog = self.apps.get_model('db', 'DbLog') - - DbLog.objects.all().delete() - DbNode.objects.all().delete() # pylint: disable=no-member - DbWorkflow.objects.all().delete() # pylint: disable=no-member - DbUser.objects.all().delete() # pylint: disable=no-member - super().tearDown() - - def test_dblog_calculation_node(self): - """ - Verify that after the migration there is only two log records left and verify that they corresponds to - the CalculationNodes. - """ - DbLog = self.apps.get_model('db', 'DbLog') - - # Check that only two log records exist - self.assertEqual(DbLog.objects.count(), 2, 'There should be two log records left') - - # Get the node id of the log record referencing the node and verify that it is the correct one - dbnode_id_1 = DbLog.objects.filter(pk=self.to_check['CalculationNode'][1] - ).values('dbnode_id')[:1].get()['dbnode_id'] - self.assertEqual(dbnode_id_1, self.to_check['CalculationNode'][0], 'referenced node is not the expected one') - dbnode_id_2 = DbLog.objects.filter(pk=self.to_check['CalculationNode'][3] - ).values('dbnode_id')[:1].get()['dbnode_id'] - self.assertEqual(dbnode_id_2, self.to_check['CalculationNode'][2], 'referenced node is not the expected one') - - def test_dblog_correct_export_of_logs(self): - """ - Verify that export log methods for legacy workflows, unknown entities and log records that - don't correspond to nodes, work as expected - """ - import json - - self.assertEqual(self.to_check['Dict'][0], self.to_check['Dict'][1]) - self.assertEqual(self.to_check['Dict'][2], 1) - - self.assertEqual(self.to_check['WorkflowNode'][0], self.to_check['WorkflowNode'][1]) - self.assertEqual(self.to_check['WorkflowNode'][2], 1) - - self.assertEqual( - sorted(list(json.loads(self.to_check['NoNode'][0])), key=lambda k: k['id']), - sorted(list(json.loads(self.to_check['NoNode'][1])), key=lambda k: k['id']) - ) - self.assertEqual(self.to_check['NoNode'][2], 2) - - def test_dblog_unique_uuids(self): - """ - Verify that the UUIDs of the log records are unique - """ - DbLog = self.apps.get_model('db', 'DbLog') - - l_uuids = list(_['uuid'] for _ in DbLog.objects.values('uuid')) - s_uuids = set(l_uuids) - self.assertEqual(len(l_uuids), len(s_uuids), 'The UUIDs are not all unique.') - - def test_metadata_correctness(self): - """ - Verify that the metadata of the remaining records don't have an objpk and objmetadata values. - """ - import json - - DbLog = self.apps.get_model('db', 'DbLog') - - metadata = list(json.loads(_['metadata']) for _ in DbLog.objects.values('metadata')) - # Verify that the objpk and objname are no longer part of the metadata - for m_res in metadata: - self.assertNotIn('objpk', m_res.keys(), 'objpk should not exist any more in metadata') - self.assertNotIn('objname', m_res.keys(), 'objname should not exist any more in metadata') - - -class TestDbLogMigrationBackward(TestMigrations): - """ - Check that backward migrations work also for the DbLog migration(s). - """ - - migrate_from = '0024_dblog_update' - migrate_to = '0023_calc_job_option_attribute_keys' - - def setUpBeforeMigration(self): - import json - - DbNode = self.apps.get_model('db', 'DbNode') - DbLog = self.apps.get_model('db', 'DbLog') - - # Creating the needed nodes & workflows - calc_1 = DbNode(type='node.process.calculation.CalculationNode.1', user_id=self.default_user.id) - calc_2 = DbNode(type='node.process.calculation.CalculationNode.2', user_id=self.default_user.id) - - # Storing them - calc_1.save() - calc_2.save() - - # Creating the corresponding log records and storing them - log_1 = DbLog( - loggername='CalculationNode logger', - dbnode_id=calc_1.pk, - message='calculation node 1', - metadata=json.dumps({ - 'msecs': 719.0849781036377, - 'lineno': 350, - 'thread': 140011612940032, - 'asctime': '10/21/2018 12:39:51 PM', - 'created': 1540118391.719085, - 'levelno': 23, - 'message': 'calculation node 1', - }) - ) - log_2 = DbLog( - loggername='CalculationNode logger', - dbnode_id=calc_2.pk, - message='calculation node 2', - metadata=json.dumps({ - 'msecs': 719.0849781036377, - 'lineno': 360, - 'levelno': 23, - 'message': 'calculation node 1', - }) - ) - - # Storing the log records - log_1.save() - log_2.save() - - # Keeping what is needed to be verified at the test - self.to_check = {} - self.to_check[log_1.pk] = (log_1.dbnode_id, calc_1.type) - self.to_check[log_2.pk] = (log_2.dbnode_id, calc_2.type) - - def test_objpk_objname(self): - """ - This test verifies that the objpk and objname have the right values - after a forward and a backward migration. - """ - import json - DbLog = self.apps.get_model('db', 'DbLog') - - # Check that only two log records exist with the correct objpk objname - for log_pk, to_check_value in self.to_check.items(): - log_entry = DbLog.objects.filter(pk=log_pk)[:1].get() - log_dbnode_id, node_type = to_check_value - self.assertEqual( - log_dbnode_id, log_entry.objpk, - 'The dbnode_id ({}) of the 0024 schema version should be identical to the objpk ({}) of ' - 'the 0023 schema version.'.format(log_dbnode_id, log_entry.objpk) - ) - self.assertEqual( - node_type, log_entry.objname, - 'The type ({}) of the linked node of the 0024 schema version should be identical to the ' - 'objname ({}) of the 0023 schema version.'.format(node_type, log_entry.objname) - ) - self.assertEqual( - log_dbnode_id, - json.loads(log_entry.metadata)['objpk'], - 'The dbnode_id ({}) of the 0024 schema version should be identical to the objpk ({}) of ' - 'the 0023 schema version stored in the metadata.'.format( - log_dbnode_id, - json.loads(log_entry.metadata)['objpk'] - ) - ) - self.assertEqual( - node_type, - json.loads(log_entry.metadata)['objname'], - 'The type ({}) of the linked node of the 0024 schema version should be identical to the ' - 'objname ({}) of the 0023 schema version stored in the metadata.'.format( - node_type, - json.loads(log_entry.metadata)['objname'] - ) - ) - - -class TestDataMoveWithinNodeMigration(TestMigrations): - """ - Check that backward migrations work also for the DbLog migration(s). - """ - - migrate_from = '0024_dblog_update' - migrate_to = '0025_move_data_within_node_module' - - def setUpBeforeMigration(self): - self.node_calc = self.DbNode(type='node.process.calculation.calcjob.CalcJobNode.', user_id=self.default_user.id) - self.node_data = self.DbNode(type='data.int.Int.', user_id=self.default_user.id) - self.node_calc.save() - self.node_data.save() - - def test_data_type_string(self): - """Verify that type string of the Data node was successfully adapted.""" - node_calc = self.load_node(self.node_calc.id) - node_data = self.load_node(self.node_data.id) - self.assertEqual(node_data.type, 'node.data.int.Int.') - self.assertEqual(node_calc.type, 'node.process.calculation.calcjob.CalcJobNode.') - - -class TestTrajectoryDataMigration(TestMigrationsModelModifierV0025): - """ - This test class checks that the migrations 0026_trajectory_symbols_to_attribute and - 0027_delete_trajectory_symbols_array work as expected. - These are data migrations for `TrajectoryData` nodes where symbol lists are moved - from repository array to attributes. - """ - - migrate_from = '0025_move_data_within_node_module' - migrate_to = '0027_delete_trajectory_symbols_array' - - stepids = numpy.array([60, 70]) - times = stepids * 0.01 - positions = numpy.array([[[0., 0., 0.], [0.5, 0.5, 0.5], [1.5, 1.5, 1.5]], - [[0., 0., 0.], [0.5, 0.5, 0.5], [1.5, 1.5, 1.5]]]) - velocities = numpy.array([[[0., 0., 0.], [0., 0., 0.], [0., 0., 0.]], - [[0.5, 0.5, 0.5], [0.5, 0.5, 0.5], [-0.5, -0.5, -0.5]]]) - cells = numpy.array([[[2., 0., 0.], [0., 2., 0.], [0., 0., 2.]], [[3., 0., 0.], [0., 3., 0.], [0., 0., 3.]]]) - - def setUpBeforeMigration(self): - symbols = numpy.array(['H', 'O', 'C']) - self.node = self.DbNode(type='node.data.array.trajectory.TrajectoryData.', user_id=self.default_user.id) - self.node.save() - - self.set_node_array(self.node, 'steps', self.stepids) - self.set_node_array(self.node, 'cells', self.cells) - self.set_node_array(self.node, 'symbols', symbols) - self.set_node_array(self.node, 'positions', self.positions) - self.set_node_array(self.node, 'times', self.times) - self.set_node_array(self.node, 'velocities', self.velocities) - - def test_trajectory_symbols(self): - """ Check that the trajectories are migrated correctly """ - node = self.load_node(self.node.id) - self.assertSequenceEqual(self.get_attribute(node, 'symbols'), ['H', 'O', 'C']) - self.assertSequenceEqual(self.get_node_array(node, 'velocities').tolist(), self.velocities.tolist()) - self.assertSequenceEqual(self.get_node_array(node, 'positions').tolist(), self.positions.tolist()) - with self.assertRaises(IOError): - self.get_node_array(node, 'symbols') - - -class TestNodePrefixRemovalMigration(TestMigrations): - """ - This test class checks that the migration 0028_remove_node_prefix works as expected. - - That is the final data migration for `Nodes` after `aiida.orm.nodes` reorganization - was finalized to remove the `node.` prefix - """ - - migrate_from = '0027_delete_trajectory_symbols_array' - migrate_to = '0028_remove_node_prefix' - - def setUpBeforeMigration(self): - self.node_calc = self.DbNode(type='node.process.calculation.calcjob.CalcJobNode.', user_id=self.default_user.id) - self.node_data = self.DbNode(type='node.data.int.Int.', user_id=self.default_user.id) - self.node_calc.save() - self.node_data.save() - - def test_data_node_type_string(self): - """Verify that type string of the nodes was successfully adapted.""" - node_calc = self.load_node(self.node_calc.id) - node_data = self.load_node(self.node_data.id) - - self.assertEqual(node_data.type, 'data.int.Int.') - self.assertEqual(node_calc.type, 'process.calculation.calcjob.CalcJobNode.') - - -class TestParameterDataToDictMigration(TestMigrations): - """ - This test class checks that the migration 0029_rename_parameter_data_to_dict works as expected. - - This is a data migration for the renaming of `ParameterData` to `Dict`. - """ - - migrate_from = '0028_remove_node_prefix' - migrate_to = '0029_rename_parameter_data_to_dict' - - def setUpBeforeMigration(self): - self.node = self.DbNode(type='data.parameter.ParameterData.', user_id=self.default_user.id) - self.node.save() - - def test_data_node_type_string(self): - """Verify that type string of the nodes was successfully adapted.""" - node = self.load_node(self.node.id) - self.assertEqual(node.type, 'data.dict.Dict.') - - -class TestTextFieldToJSONFieldMigration(TestMigrations): # pylint: disable=too-many-instance-attributes - """ - This test class checks that the migration 0033_replace_text_field_with_json_field works as expected. - - That migration replaces the use of text fields to store JSON data with builtin JSONFields. - """ - - migrate_from = '0032_remove_legacy_workflows' - migrate_to = '0033_replace_text_field_with_json_field' - - def setUpBeforeMigration(self): - from aiida.common import json - - self.DbNode = self.apps.get_model('db', 'DbNode') - self.DbComputer = self.apps.get_model('db', 'DbComputer') - self.DbAuthInfo = self.apps.get_model('db', 'DbAuthInfo') - self.DbLog = self.apps.get_model('db', 'DbLog') - - self.node = self.DbNode(node_type='node.process.calculation.CalculationNode.', user_id=self.default_user.id) - self.node.save() - - self.computer_metadata = { - 'shebang': '#!/bin/bash', - 'workdir': '/scratch/', - 'append_text': '', - 'prepend_text': '', - 'mpirun_command': ['mpirun', '-np', '{tot_num_mpiprocs}'], - 'default_mpiprocs_per_machine': 1 - } - self.computer_kwargs = { - 'name': 'localhost_testing', - 'hostname': 'localhost', - 'transport_type': 'core.local', - 'scheduler_type': 'core.direct', - 'metadata': json.dumps(self.computer_metadata), - } - self.computer = self.DbComputer(**self.computer_kwargs) - self.computer.save() - - self.auth_info_auth_params = {'safe_interval': 2} - self.auth_info_metadata = {'safe_interval': 2} - self.auth_info_kwargs = { - 'aiidauser_id': self.default_user.pk, - 'dbcomputer': self.computer, - 'auth_params': json.dumps(self.auth_info_auth_params), - 'metadata': json.dumps(self.auth_info_metadata), - } - self.auth_info = self.DbAuthInfo(**self.auth_info_kwargs) - self.auth_info.save() - - self.log_metadata = { - 'msecs': 719.0849781036377, - 'lineno': 350, - 'thread': 140011612940032, - 'asctime': '10/21/2018 12:39:51 PM', - 'created': 1540118391.719085, - 'levelno': 23, - 'message': 'calculation node 1', - } - self.log_kwargs = { - 'loggername': 'localhost', - 'levelname': 'localhost', - 'dbnode_id': self.node.id, - 'metadata': json.dumps(self.log_metadata) - } - self.log = self.DbLog(**self.log_kwargs) - self.log.save() - - def test_text_field_to_json_field_migration(self): - """Verify that the values in the text fields were maintained after migrating the field to JSONField.""" - # Reload the objects to make sure the new data is loaded - computer = self.DbComputer.objects.get(pk=self.computer.id) - auth_info = self.DbAuthInfo.objects.get(pk=self.auth_info.id) - log = self.DbLog.objects.get(pk=self.log.id) - - # Make sure that the migrated data matches the original - self.assertDictEqual(computer.metadata, self.computer_metadata) - self.assertDictEqual(auth_info.metadata, self.auth_info_metadata) - self.assertDictEqual(auth_info.auth_params, self.auth_info_auth_params) - self.assertDictEqual(log.metadata, self.log_metadata) - - -class TestResetHash(TestMigrations): - """ - This test class checks that only the hash extra is removed. - """ - - migrate_from = '0038_data_migration_legacy_job_calculations' - migrate_to = '0039_reset_hash' - - def setUpBeforeMigration(self): - self.node = self.DbNode( - node_type='process.calculation.calcjob.CalcJobNode.', - user_id=self.default_user.id, - extras={ - 'something': 123, - '_aiida_hash': 'abcd' - } - ) - self.node.save() - - def test_data_migrated(self): - """Verify that type string of the nodes was successfully adapted.""" - node = self.load_node(self.node.id) - extras = node.extras - self.assertEqual(extras.get('something'), 123) # Other extras should be untouched - self.assertNotIn('_aiida_hash', extras) # The hash extra should have been removed diff --git a/tests/backends/aiida_django/test_manager.py b/tests/backends/aiida_django/test_manager.py deleted file mode 100644 index 16f0c8c838..0000000000 --- a/tests/backends/aiida_django/test_manager.py +++ /dev/null @@ -1,50 +0,0 @@ -# -*- coding: utf-8 -*- -########################################################################### -# Copyright (c), The AiiDA team. All rights reserved. # -# This file is part of the AiiDA code. # -# # -# The code is hosted on GitHub at https://github.com/aiidateam/aiida-core # -# For further information on the license, see the LICENSE.txt file # -# For further information please visit http://www.aiida.net # -########################################################################### -"""Tests for the django backend manager.""" - -from aiida.backends.djsite.manager import DjangoSettingsManager -from aiida.backends.testbase import AiidaTestCase -from aiida.common import exceptions - - -class TestDjangoSettingsManager(AiidaTestCase): - """Test the DjangoSettingsManager class and its methods.""" - - def setUp(self): - super().setUp() - self.settings_manager = DjangoSettingsManager() - - def test_set_get(self): - """Test the get and set methods.""" - temp_key = 'temp_setting' - temp_value = 'Valuable value' - temp_description = 'Temporary value for testing' - - self.settings_manager.set(temp_key, temp_value, temp_description) - self.assertEqual(self.settings_manager.get(temp_key).value, temp_value) - self.assertEqual(self.settings_manager.get(temp_key).description, temp_description) - - non_existent_key = 'I_dont_exist' - - with self.assertRaises(exceptions.NotExistent): - self.settings_manager.get(non_existent_key) - - def test_delete(self): - """Test the delete method.""" - temp_key = 'temp_setting' - temp_value = 'Valuable value' - - self.settings_manager.set(temp_key, temp_value) - self.settings_manager.delete(temp_key) - - non_existent_key = 'I_dont_exist' - - with self.assertRaises(exceptions.NotExistent): - self.settings_manager.delete(non_existent_key) diff --git a/tests/backends/aiida_sqlalchemy/conftest.py b/tests/backends/aiida_sqlalchemy/conftest.py index 7ece28d211..7faf903e80 100644 --- a/tests/backends/aiida_sqlalchemy/conftest.py +++ b/tests/backends/aiida_sqlalchemy/conftest.py @@ -8,9 +8,7 @@ # For further information please visit http://www.aiida.net # ########################################################################### """Configuration file for pytest tests.""" - -from aiida.backends import BACKEND_SQLA from aiida.manage.tests import get_test_backend_name -if get_test_backend_name() != BACKEND_SQLA: +if get_test_backend_name() != 'psql_dos': collect_ignore_glob = ['*'] # pylint: disable=invalid-name diff --git a/tests/backends/aiida_sqlalchemy/migrations/conftest.py b/tests/backends/aiida_sqlalchemy/migrations/conftest.py index 8a1cfbea66..e93d83977e 100644 --- a/tests/backends/aiida_sqlalchemy/migrations/conftest.py +++ b/tests/backends/aiida_sqlalchemy/migrations/conftest.py @@ -8,86 +8,165 @@ # For further information please visit http://www.aiida.net # ########################################################################### """Tests for the migration engine (Alembic) as well as for the AiiDA migrations for SQLAlchemy.""" -from contextlib import contextmanager -from typing import Iterator +from uuid import uuid4 +from pgtest.pgtest import PGTest import pytest -from sqlalchemy.orm import Session - -from aiida.backends.sqlalchemy.manager import SqlaBackendManager - - -class Migrator: - """A class to yield from the ``perform_migrations`` fixture.""" - - def __init__(self, backend, manager: SqlaBackendManager) -> None: - self.backend = backend - self._manager = manager - - def migrate_up(self, revision: str) -> None: - """Migrate up to a given revision.""" - self._manager.migrate_up(revision) - if revision != 'head': - assert self._manager.get_schema_version_backend() == revision - - def migrate_down(self, revision: str) -> None: - """Migrate down to a given revision.""" - self._manager.migrate_down(revision) - assert self._manager.get_schema_version_backend() == revision - - def get_current_table(self, table_name): - """ - Return a Model instantiated at the correct migration. - Note that this is obtained by inspecting the database and not - by looking into the models file. - So, special methods possibly defined in the models files/classes are not present. - - For instance, you can do:: - - DbGroup = self.get_current_table('db_dbgroup') - - :param table_name: the name of the table. - """ - from alembic.migration import MigrationContext # pylint: disable=import-error - from sqlalchemy.ext.automap import automap_base # pylint: disable=import-error,no-name-in-module - - with self.backend.get_session().bind.begin() as connection: - context = MigrationContext.configure(connection) - bind = context.bind - - base = automap_base() - # reflect the tables - base.prepare(autoload_with=bind.engine) - - return getattr(base.classes, table_name) - - @contextmanager - def session(self) -> Iterator[Session]: - """A context manager for a new session.""" - with self.backend.get_session().bind.begin() as connection: - session = Session(connection.engine, future=True) - try: - yield session - except Exception: - session.rollback() - raise - finally: - session.close() +from sqlalchemy import text + +from aiida.backends.sqlalchemy.migrator import PsqlDostoreMigrator +from aiida.backends.sqlalchemy.utils import create_sqlalchemy_engine +from aiida.manage.configuration import Profile + + +@pytest.fixture(scope='session') +def empty_pg_cluster(): + """Create an empty PostgreSQL cluster, for the duration of the session.""" + pg_cluster = PGTest() + yield pg_cluster + pg_cluster.close() + + +@pytest.fixture +def uninitialised_profile(empty_pg_cluster: PGTest, tmp_path): # pylint: disable=redefined-outer-name + """Create a profile attached to an empty database and repository folder.""" + import psycopg2 + from psycopg2.extensions import ISOLATION_LEVEL_AUTOCOMMIT + + database_name = f'test_{uuid4().hex}' + + with psycopg2.connect(**empty_pg_cluster.dsn) as conn: + conn.set_isolation_level(ISOLATION_LEVEL_AUTOCOMMIT) + with conn.cursor() as cursor: + cursor.execute(f"CREATE DATABASE {database_name} ENCODING 'utf8';") + + yield Profile( + 'test_migrate', { + 'storage': { + 'backend': 'psql_dos', + 'config': { + 'database_engine': 'postgresql_psycopg2', + 'database_port': empty_pg_cluster.port, + 'database_hostname': empty_pg_cluster.dsn['host'], + 'database_name': database_name, + 'database_password': '', + 'database_username': empty_pg_cluster.username, + 'repository_uri': f'file:///{tmp_path}', + } + }, + 'process_control': { + 'backend': 'null', + 'config': {} + } + } + ) + + with psycopg2.connect(**empty_pg_cluster.dsn) as conn: + conn.set_isolation_level(ISOLATION_LEVEL_AUTOCOMMIT) + with conn.cursor() as cursor: + # note after postgresql 13 you can use 'DROP DATABASE name WITH (FORCE)' + # but for now, we first close all possible open connections to the database, before dropping it + # see: https://dba.stackexchange.com/questions/11893/force-drop-db-while-others-may-be-connected + cursor.execute(f"SELECT pg_terminate_backend(pid) FROM pg_stat_activity WHERE datname = '{database_name}';") + cursor.execute(f'DROP DATABASE {database_name};') @pytest.fixture() -def perform_migrations(aiida_profile, backend, request): - """A fixture to setup the database for migration tests""" - # note downgrading to 1830c8430131 requires adding columns to `DbUser` and hangs if a user is present - aiida_profile.reset_db(with_user=False) - migrator = Migrator(backend, SqlaBackendManager()) - marker = request.node.get_closest_marker('migrate_down') - if marker is not None: - assert marker.args, 'No version given' - migrator.migrate_down(marker.args[0]) - yield migrator - # clear the database - # note this assumes the current schema contains the tables specified in `clean_db` - aiida_profile.reset_db(with_user=False) - # ensure that the database is migrated back up to the latest version, once finished - migrator.migrate_up('head') +def perform_migrations(uninitialised_profile): # pylint: disable=redefined-outer-name + """A fixture to setup a database for migration tests.""" + yield PsqlDostoreMigrator(uninitialised_profile) + + +def _generate_column_schema(profile: Profile) -> dict: + """Create a dict containing all tables and fields of AiiDA tables.""" + # see https://www.postgresql.org/docs/9.1/infoschema-columns.html + with create_sqlalchemy_engine(profile.storage_config).connect() as conn: + table_data = conn.execute( + text( + 'SELECT table_name,column_name,data_type,is_nullable,column_default,character_maximum_length ' + 'FROM information_schema.columns ' + "WHERE table_schema = 'public' AND table_name LIKE 'db_%';" + ) + ) + data = {} + for tbl_name, col_name, data_type, is_nullable, column_default, char_max_length in table_data: + data.setdefault(tbl_name, {})[col_name] = { + 'data_type': data_type, + 'is_nullable': is_nullable.upper() == 'YES', + 'default': column_default, + } + if char_max_length: + data[tbl_name][col_name]['max_length'] = char_max_length + + return data + + +def _generate_constraints_schema(profile: Profile) -> dict: + """Create a dict containing constraints of AiiDA tables.""" + # see https://www.postgresql.org/docs/9.1/catalog-pg-constraint.html + data = {} + for cname, ctype in (('primary_key', 'p'), ('unique', 'u')): + with create_sqlalchemy_engine(profile.storage_config).connect() as conn: + constaints_data = conn.execute( + text( + 'SELECT tbl.relname,c.conname,ARRAY_AGG(a.attname) FROM pg_constraint AS c ' + 'INNER JOIN pg_class AS tbl ON tbl.oid = c.conrelid ' + 'INNER JOIN pg_attribute AS a ON a.attrelid = c.conrelid AND a.attnum = ANY(c.conkey) ' + f"WHERE c.contype='{ctype}' AND tbl.relname LIKE 'db_%' " + 'GROUP BY tbl.relname,c.conname;' + ) + ) + for tbl_name, name, col_names in sorted(constaints_data): + data.setdefault(cname, {}).setdefault(tbl_name, {})[name] = sorted(col_names) + return data + + +def _generate_fkey_schema(profile: Profile) -> dict: + """Create a dict containing foreign keys of AiiDA tables.""" + # see https://www.postgresql.org/docs/9.1/catalog-pg-constraint.html + data = {} + with create_sqlalchemy_engine(profile.storage_config).connect() as conn: + constaints_data = conn.execute( + text( + 'SELECT conrelid::regclass,conname, pg_get_constraintdef(oid) FROM pg_constraint ' + "WHERE contype='f' AND conrelid::regclass::text LIKE 'db_%' " + "AND connamespace='public'::regnamespace " + 'ORDER BY conrelid::regclass::text, contype DESC;' + ) + ) + for tbl_name, name, description in sorted(constaints_data): + data.setdefault(tbl_name, {})[name] = description + return data + + +def _generate_index_schema(profile: Profile) -> dict: + """Create a dict containing indexes of AiiDA tables.""" + # see https://www.postgresql.org/docs/9.1/view-pg-indexes.html + with create_sqlalchemy_engine(profile.storage_config).connect() as conn: + index_data = conn.execute( + text( + 'SELECT tablename,indexname,indexdef FROM pg_indexes ' + "WHERE tablename LIKE 'db_%' " + 'ORDER BY tablename,indexname;' + ) + ) + data = {} + for tbl_name, name, definition in sorted(index_data): + data.setdefault(tbl_name, {})[name] = definition + return data + + +@pytest.fixture +def reflect_schema(): + """A fixture to generate the schema of AiiDA tables for a given profile.""" + + def _generate_schema(profile: Profile) -> dict: + """Create a dict containing all tables and fields of AiiDA tables.""" + return { + 'columns': _generate_column_schema(profile), + 'constraints': _generate_constraints_schema(profile), + 'foreign_keys': _generate_fkey_schema(profile), + 'indexes': _generate_index_schema(profile), + } + + return _generate_schema diff --git a/tests/backends/aiida_sqlalchemy/migrations/django_branch/__init__.py b/tests/backends/aiida_sqlalchemy/migrations/django_branch/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/tests/backends/aiida_sqlalchemy/migrations/django_branch/test_0024_dblog_update.py b/tests/backends/aiida_sqlalchemy/migrations/django_branch/test_0024_dblog_update.py new file mode 100644 index 0000000000..1d007ef2fe --- /dev/null +++ b/tests/backends/aiida_sqlalchemy/migrations/django_branch/test_0024_dblog_update.py @@ -0,0 +1,180 @@ +# -*- coding: utf-8 -*- +########################################################################### +# Copyright (c), The AiiDA team. All rights reserved. # +# This file is part of the AiiDA code. # +# # +# The code is hosted on GitHub at https://github.com/aiidateam/aiida-core # +# For further information on the license, see the LICENSE.txt file # +# For further information please visit http://www.aiida.net # +########################################################################### +"""Test the update to the ``DbLog`` table.""" +import json + +from aiida.backends.sqlalchemy.migrator import PsqlDostoreMigrator +from aiida.common import timezone +from aiida.common.utils import get_new_uuid + + +def test_dblog_update(perform_migrations: PsqlDostoreMigrator): # pylint: disable=too-many-locals + """Test the update to the ``DbLog`` table.""" + # starting revision + perform_migrations.migrate_up('django@django_0023') + + # setup the database + user_model = perform_migrations.get_current_table('db_dbuser') + node_model = perform_migrations.get_current_table('db_dbnode') + wf_model = perform_migrations.get_current_table('db_dbworkflow') + log_model = perform_migrations.get_current_table('db_dblog') + with perform_migrations.session() as session: + user = user_model( + email='user@aiida.net', + first_name='John', + last_name='Doe', + institution='EPFL', + password='', + is_superuser=False, + is_staff=False, + is_active=True, + last_login=timezone.now(), + date_joined=timezone.now(), + ) + session.add(user) + session.commit() + node_kwargs = dict( + user_id=user.id, + ctime=timezone.now(), + mtime=timezone.now(), + label='test', + description='', + nodeversion=1, + public=True, + ) + calc_1 = node_model(uuid=get_new_uuid(), type='node.process.calculation.CalculationNode.', **node_kwargs) + calc_2 = node_model(uuid=get_new_uuid(), type='node.process.calculation.CalculationNode.', **node_kwargs) + param = node_model(uuid=get_new_uuid(), type='data.core.dict.Dict.', **node_kwargs) + session.add_all([calc_1, calc_2, param]) + session.commit() + leg_workf = wf_model( + label='Legacy WorkflowNode', + uuid=get_new_uuid(), + user_id=user.id, + ctime=timezone.now(), + mtime=timezone.now(), + module='', + module_class='', + lastsyncedversion=1, + nodeversion=1, + report='', + script_md5='', + script_path='', + state='', + description='', + ) + session.add(leg_workf) + session.commit() + + # Creating the corresponding log records + log_1 = log_model( + loggername='CalculationNode logger', + levelname='INFO', + time=timezone.now(), + objpk=calc_1.id, + objname='node.calculation.job.quantumespresso.pw.', + message='calculation node 1', + metadata=json.dumps({ + 'msecs': 719.0849781036377, + 'objpk': calc_1.id, + 'lineno': 350, + 'thread': 140011612940032, + 'asctime': '10/21/2018 12:39:51 PM', + 'created': 1540118391.719085, + 'levelno': 23, + 'message': 'calculation node 1', + 'objname': 'node.calculation.job.quantumespresso.pw.', + }) + ) + log_2 = log_model( + loggername='something.else logger', + levelname='INFO', + time=timezone.now(), + objpk=param.id, + objname='something.else.', + message='parameter data with log message', + metadata='{}' + ) + log_3 = log_model( + loggername='TopologicalWorkflow logger', + levelname='INFO', + time=timezone.now(), + objpk=leg_workf.id, + objname='aiida.workflows.user.topologicalworkflows.topo.TopologicalWorkflow', + message='parameter data with log message', + metadata='{}' + ) + log_4 = log_model( + loggername='CalculationNode logger', + levelname='INFO', + time=timezone.now(), + objpk=calc_2.id, + objname='node.calculation.job.quantumespresso.pw.', + message='calculation node 2', + metadata=json.dumps({ + 'msecs': 719.0849781036377, + 'objpk': calc_2.id, + 'lineno': 360, + 'levelno': 23, + 'message': 'calculation node 1', + 'objname': 'node.calculation.job.quantumespresso.pw.', + }) + ) + # Creating two more log records that don't correspond to a node + log_5 = log_model( + loggername='CalculationNode logger', + levelname='INFO', + time=timezone.now(), + objpk=(calc_2.id + 1000), + objname='node.calculation.job.quantumespresso.pw.', + message='calculation node 1000', + metadata=json.dumps({ + 'msecs': 718, + 'objpk': (calc_2.id + 1000), + 'lineno': 361, + 'levelno': 25, + 'message': 'calculation node 1000', + 'objname': 'node.calculation.job.quantumespresso.pw.', + }) + ) + log_6 = log_model( + loggername='CalculationNode logger', + levelname='INFO', + time=timezone.now(), + objpk=(calc_2.id + 1001), + objname='node.calculation.job.quantumespresso.pw.', + message='calculation node 10001', + metadata=json.dumps({ + 'msecs': 722, + 'objpk': (calc_2.id + 1001), + 'lineno': 362, + 'levelno': 24, + 'message': 'calculation node 1001', + 'objname': 'node.calculation.job.quantumespresso.pw.', + }) + ) + session.add_all([log_1, log_2, log_3, log_4, log_5, log_6]) + session.commit() + + log_1_id = log_1.id + log_4_id = log_4.id + + # final revision + perform_migrations.migrate_up('django@django_0024') + + log_model = perform_migrations.get_current_table('db_dblog') + with perform_migrations.session() as session: + logs = session.query(log_model).all() + # verify that there are only two log records left + assert len(logs) == 2 + # verify that they correspond to the CalculationNodes + assert {log.id for log in logs} == {log_1_id, log_4_id} + # check the uuid's are unique + assert len({log.uuid for log in logs}) == 2 diff --git a/tests/backends/aiida_sqlalchemy/migrations/django_branch/test_0026_0027_traj_data.py b/tests/backends/aiida_sqlalchemy/migrations/django_branch/test_0026_0027_traj_data.py new file mode 100644 index 0000000000..df64fb33a5 --- /dev/null +++ b/tests/backends/aiida_sqlalchemy/migrations/django_branch/test_0026_0027_traj_data.py @@ -0,0 +1,122 @@ +# -*- coding: utf-8 -*- +########################################################################### +# Copyright (c), The AiiDA team. All rights reserved. # +# This file is part of the AiiDA code. # +# # +# The code is hosted on GitHub at https://github.com/aiidateam/aiida-core # +# For further information on the license, see the LICENSE.txt file # +# For further information please visit http://www.aiida.net # +########################################################################### +"""Test `TrajectoryData` nodes migration, moving symbol lists from repository array to attributes.""" +import numpy +import pytest + +from aiida.backends.sqlalchemy.migrations.utils import utils +from aiida.backends.sqlalchemy.migrations.utils.create_dbattribute import create_rows +from aiida.backends.sqlalchemy.migrator import PsqlDostoreMigrator +from aiida.common import timezone +from aiida.common.utils import get_new_uuid + + +def test_traj_data(perform_migrations: PsqlDostoreMigrator): + """Test `TrajectoryData` nodes migration, moving symbol lists from repository array to attributes.""" + # starting revision + perform_migrations.migrate_up('django@django_0025') + + repo_path = perform_migrations.profile.repository_path + + # setup the database + user_model = perform_migrations.get_current_table('db_dbuser') + node_model = perform_migrations.get_current_table('db_dbnode') + attr_model = perform_migrations.get_current_table('db_dbattribute') + with perform_migrations.session() as session: + user = user_model( + email='user@aiida.net', + first_name='John', + last_name='Doe', + institution='EPFL', + password='', + is_superuser=False, + is_staff=False, + is_active=True, + last_login=timezone.now(), + date_joined=timezone.now(), + ) + session.add(user) + session.commit() + kwargs = dict( + user_id=user.id, + ctime=timezone.now(), + mtime=timezone.now(), + label='test', + description='', + nodeversion=1, + public=True, + ) + node = node_model(uuid=get_new_uuid(), type='node.data.array.trajectory.TrajectoryData.', **kwargs) + session.add(node) + session.commit() + node_id = node.id + node_uuid = node.uuid + + name = 'symbols' + array = numpy.array(['H', 'O', 'C']) + + utils.store_numpy_array_in_repository(repo_path, node.uuid, name, array) + array_key = f'array|{name}' + array_shape = list(array.shape) + rows = create_rows(array_key, array_shape, node_id) + session.add_all([attr_model(**row) for row in rows]) + session.commit() + + # final revision + perform_migrations.migrate_up('django@django_0027') + + # it should no longer be in the repository + with pytest.raises(IOError): + utils.load_numpy_array_from_repository(repo_path, node_uuid, name) + + # and instead, it should be in the attributes + attr_model = perform_migrations.get_current_table('db_dbattribute') + with perform_migrations.session() as session: + rows = session.query(attr_model).filter(attr_model.key.startswith('array|')).order_by(attr_model.key).all() + assert len(rows) == 0 + rows = session.query(attr_model).filter(attr_model.key.startswith(name)).order_by(attr_model.key).all() + data = [{x.name: getattr(row, x.name) for x in row.__table__.columns if x.name != 'id'} for row in rows] + assert data == [{ + 'datatype': 'list', + 'dbnode_id': node_id, + 'key': 'symbols', + 'bval': None, + 'ival': 3, + 'fval': None, + 'tval': '', + 'dval': None + }, { + 'datatype': 'txt', + 'dbnode_id': node_id, + 'key': 'symbols.0', + 'bval': None, + 'ival': None, + 'fval': None, + 'tval': 'H', + 'dval': None + }, { + 'datatype': 'txt', + 'dbnode_id': node_id, + 'key': 'symbols.1', + 'bval': None, + 'ival': None, + 'fval': None, + 'tval': 'O', + 'dval': None + }, { + 'datatype': 'txt', + 'dbnode_id': node_id, + 'key': 'symbols.2', + 'bval': None, + 'ival': None, + 'fval': None, + 'tval': 'C', + 'dval': None + }] diff --git a/tests/backends/aiida_sqlalchemy/migrations/django_branch/test_0028_0029_node_type.py b/tests/backends/aiida_sqlalchemy/migrations/django_branch/test_0028_0029_node_type.py new file mode 100644 index 0000000000..cedd36474c --- /dev/null +++ b/tests/backends/aiida_sqlalchemy/migrations/django_branch/test_0028_0029_node_type.py @@ -0,0 +1,67 @@ +# -*- coding: utf-8 -*- +########################################################################### +# Copyright (c), The AiiDA team. All rights reserved. # +# This file is part of the AiiDA code. # +# # +# The code is hosted on GitHub at https://github.com/aiidateam/aiida-core # +# For further information on the license, see the LICENSE.txt file # +# For further information please visit http://www.aiida.net # +########################################################################### +"""Test alterations to `db_dbnode.type`values.""" +from aiida.backends.sqlalchemy.migrator import PsqlDostoreMigrator +from aiida.common import timezone +from aiida.common.utils import get_new_uuid + + +def test_node_repository(perform_migrations: PsqlDostoreMigrator): + """Test migration adding the `repository_metadata` column to the `Node` model.""" + # starting revision + perform_migrations.migrate_up('django@django_0027') + + # setup the database + user_model = perform_migrations.get_current_table('db_dbuser') + node_model = perform_migrations.get_current_table('db_dbnode') + with perform_migrations.session() as session: + user = user_model( + email='user@aiida.net', + first_name='John', + last_name='Doe', + institution='EPFL', + password='', + is_superuser=False, + is_staff=False, + is_active=True, + last_login=timezone.now(), + date_joined=timezone.now(), + ) + session.add(user) + session.commit() + kwargs = dict( + user_id=user.id, + ctime=timezone.now(), + mtime=timezone.now(), + label='test', + description='', + nodeversion=1, + public=True, + ) + node_calc = node_model(uuid=get_new_uuid(), type='node.process.calculation.calcjob.CalcJobNode.', **kwargs) + node_data = node_model(uuid=get_new_uuid(), type='node.data.int.Int.', **kwargs) + node_paramdata = node_model(uuid=get_new_uuid(), type='node.data.parameter.ParameterData.', **kwargs) + session.add_all((node_calc, node_data, node_paramdata)) + session.commit() + node_calc_id = node_calc.id + node_data_id = node_data.id + node_paramdata_id = node_paramdata.id + + # final revision + perform_migrations.migrate_up('django@django_0029') + + node_model = perform_migrations.get_current_table('db_dbnode') + with perform_migrations.session() as session: + node_calc = session.get(node_model, node_calc_id) + assert node_calc.type == 'process.calculation.calcjob.CalcJobNode.' + node_data = session.get(node_model, node_data_id) + assert node_data.type == 'data.int.Int.' + node_paramdata = session.get(node_model, node_paramdata_id) + assert node_paramdata.type == 'data.dict.Dict.' diff --git a/tests/backends/aiida_sqlalchemy/migrations/django_branch/test_0032_remove_legacy_workflows.py b/tests/backends/aiida_sqlalchemy/migrations/django_branch/test_0032_remove_legacy_workflows.py new file mode 100644 index 0000000000..aefee23b34 --- /dev/null +++ b/tests/backends/aiida_sqlalchemy/migrations/django_branch/test_0032_remove_legacy_workflows.py @@ -0,0 +1,96 @@ +# -*- coding: utf-8 -*- +########################################################################### +# Copyright (c), The AiiDA team. All rights reserved. # +# This file is part of the AiiDA code. # +# # +# The code is hosted on GitHub at https://github.com/aiidateam/aiida-core # +# For further information on the license, see the LICENSE.txt file # +# For further information please visit http://www.aiida.net # +########################################################################### +"""Test removing legacy workflows.""" +from aiida.backends.sqlalchemy.migrator import PsqlDostoreMigrator +from aiida.common import timezone +from aiida.common.utils import get_new_uuid + + +def test_node_repository(perform_migrations: PsqlDostoreMigrator): + """Test removing legacy workflows.""" + # starting revision + perform_migrations.migrate_up('django@django_0031') + + # setup the database + user_model = perform_migrations.get_current_table('db_dbuser') + node_model = perform_migrations.get_current_table('db_dbnode') + wf_model = perform_migrations.get_current_table('db_dbworkflow') + wfdata_model = perform_migrations.get_current_table('db_dbworkflowdata') + wfstep_model = perform_migrations.get_current_table('db_dbworkflowstep') + with perform_migrations.session() as session: + user = user_model( + email='user@aiida.net', + first_name='John', + last_name='Doe', + institution='EPFL', + password='', + is_superuser=False, + is_staff=False, + is_active=True, + last_login=timezone.now(), + date_joined=timezone.now(), + ) + session.add(user) + session.commit() + node_calc = node_model( + uuid=get_new_uuid(), + node_type='node.process.calculation.calcjob.CalcJobNode.', + user_id=user.id, + ctime=timezone.now(), + mtime=timezone.now(), + label='test', + description='', + nodeversion=1, + public=True, + ) + session.add(node_calc) + session.commit() + workflow = wf_model( + label='Legacy WorkflowNode', + uuid=get_new_uuid(), + user_id=user.id, + ctime=timezone.now(), + mtime=timezone.now(), + module='', + module_class='', + lastsyncedversion=1, + nodeversion=1, + report='', + script_md5='', + script_path='', + state='', + description='', + ) + session.add(workflow) + session.commit() + workflow_data = wfdata_model( + parent_id=workflow.id, + aiida_obj_id=node_calc.id, + time=timezone.now(), + name='', + data_type='dict', + value_type='dict', + json_value='{}', + ) + session.add(workflow_data) + session.commit() + workflow_step = wfstep_model( + user_id=user.id, + parent_id=workflow.id, + time=timezone.now(), + name='', + nextcall='', + state='', + ) + session.add(workflow_step) + session.commit() + + # final revision + perform_migrations.migrate_up('django@django_0032') diff --git a/tests/backends/aiida_sqlalchemy/migrations/django_branch/test_0033_replace_text_field_with_json_field.py b/tests/backends/aiida_sqlalchemy/migrations/django_branch/test_0033_replace_text_field_with_json_field.py new file mode 100644 index 0000000000..d3fd301177 --- /dev/null +++ b/tests/backends/aiida_sqlalchemy/migrations/django_branch/test_0033_replace_text_field_with_json_field.py @@ -0,0 +1,139 @@ +# -*- coding: utf-8 -*- +########################################################################### +# Copyright (c), The AiiDA team. All rights reserved. # +# This file is part of the AiiDA code. # +# # +# The code is hosted on GitHub at https://github.com/aiidateam/aiida-core # +# For further information on the license, see the LICENSE.txt file # +# For further information please visit http://www.aiida.net # +########################################################################### +"""Test migration adding the `repository_metadata` column to the `Node` model.""" +from aiida.backends.sqlalchemy.migrator import PsqlDostoreMigrator +from aiida.common import json, timezone +from aiida.common.utils import get_new_uuid + + +def test_text_to_jsonb(perform_migrations: PsqlDostoreMigrator): # pylint: disable=too-many-locals + """Test replacing the use of text fields to store JSON data with JSONB fields. + + `db_dbauthinfo.auth_params`, `db_dbauthinfo.metadata`, + `db_dbauthinfo.transport_params`, `db_dbcomputer.metadata`, + `db_dblog.metadata` + """ + # starting revision + perform_migrations.migrate_up('django@django_0032') + + # setup the database + user_model = perform_migrations.get_current_table('db_dbuser') + node_model = perform_migrations.get_current_table('db_dbnode') + computer_model = perform_migrations.get_current_table('db_dbcomputer') + authinfo_model = perform_migrations.get_current_table('db_dbauthinfo') + log_model = perform_migrations.get_current_table('db_dblog') + with perform_migrations.session() as session: + user = user_model( + email='user@aiida.net', + first_name='John', + last_name='Doe', + institution='EPFL', + password='', + is_superuser=False, + is_staff=False, + is_active=True, + last_login=timezone.now(), + date_joined=timezone.now(), + ) + session.add(user) + session.commit() + node = node_model( + uuid=get_new_uuid(), + user_id=user.id, + ctime=timezone.now(), + mtime=timezone.now(), + label='test', + description='', + nodeversion=1, + public=True, + node_type='node.process.calculation.CalculationNode.', + ) + session.add(node) + session.commit() + + computer_metadata = { + 'shebang': '#!/bin/bash', + 'workdir': '/scratch/', + 'append_text': '', + 'prepend_text': '', + 'mpirun_command': ['mpirun', '-np', '{tot_num_mpiprocs}'], + 'default_mpiprocs_per_machine': 1 + } + computer_transport_params = {'a': 1} + computer_kwargs = { + 'uuid': get_new_uuid(), + 'name': 'localhost_testing', + 'description': '', + 'hostname': 'localhost', + 'transport_type': 'core.local', + 'scheduler_type': 'core.direct', + 'metadata': json.dumps(computer_metadata), + 'transport_params': json.dumps(computer_transport_params), + } + computer = computer_model(**computer_kwargs) + session.add(computer) + session.commit() + computer_id = computer.id + + auth_info_auth_params = {'safe_interval': 2} + auth_info_metadata = {'safe_interval': 2} + auth_info_kwargs = { + 'aiidauser_id': user.id, + 'dbcomputer_id': computer.id, + 'enabled': True, + 'auth_params': json.dumps(auth_info_auth_params), + 'metadata': json.dumps(auth_info_metadata), + } + authinfo = authinfo_model(**auth_info_kwargs) + session.add(authinfo) + session.commit() + authinfo_id = authinfo.id + + log_metadata = { + 'msecs': 719.0849781036377, + 'lineno': 350, + 'thread': 140011612940032, + 'asctime': '10/21/2018 12:39:51 PM', + 'created': 1540118391.719085, + 'levelno': 23, + 'message': 'calculation node 1', + } + log_kwargs = { + 'uuid': get_new_uuid(), + 'time': timezone.now(), + 'loggername': 'localhost', + 'levelname': 'localhost', + 'message': '', + 'dbnode_id': node.id, + 'metadata': json.dumps(log_metadata) + } + log = log_model(**log_kwargs) + session.add(log) + session.commit() + log_id = log.id + + # final revision + perform_migrations.migrate_up('django@django_0033') + + computer_model = perform_migrations.get_current_table('db_dbcomputer') + authinfo_model = perform_migrations.get_current_table('db_dbauthinfo') + log_model = perform_migrations.get_current_table('db_dblog') + with perform_migrations.session() as session: + + computer = session.query(computer_model).filter(computer_model.id == computer_id).one() + assert computer.metadata == computer_metadata + assert computer.transport_params == computer_transport_params + + authinfo = session.query(authinfo_model).filter(authinfo_model.id == authinfo_id).one() + assert authinfo.auth_params == auth_info_auth_params + assert authinfo.metadata == auth_info_metadata + + log = session.query(log_model).filter(log_model.id == log_id).one() + assert log.metadata == log_metadata diff --git a/tests/backends/aiida_sqlalchemy/migrations/django_branch/test_0037_attributes_extras_settings_json.py b/tests/backends/aiida_sqlalchemy/migrations/django_branch/test_0037_attributes_extras_settings_json.py new file mode 100644 index 0000000000..f507951f11 --- /dev/null +++ b/tests/backends/aiida_sqlalchemy/migrations/django_branch/test_0037_attributes_extras_settings_json.py @@ -0,0 +1,141 @@ +# -*- coding: utf-8 -*- +########################################################################### +# Copyright (c), The AiiDA team. All rights reserved. # +# This file is part of the AiiDA code. # +# # +# The code is hosted on GitHub at https://github.com/aiidateam/aiida-core # +# For further information on the license, see the LICENSE.txt file # +# For further information please visit http://www.aiida.net # +########################################################################### +"""Tests for the migrations of the attributes, extras and settings from EAV to JSONB.""" +from sqlalchemy import select + +from aiida.backends.sqlalchemy.migrator import PsqlDostoreMigrator +from aiida.common import timezone + + +def test_attr_extra_migration(perform_migrations: PsqlDostoreMigrator): + """ + A "simple" test for the attributes and extra migration from EAV to JSONB. + It stores a sample dictionary using the EAV deserialization of AiiDA Django + for the attributes and extras. Then the test checks that they are correctly + converted to JSONB. + """ + # starting revision + perform_migrations.migrate_up('django@django_0036') + + # setup the database + user_model = perform_migrations.get_current_table('db_dbuser') + node_model = perform_migrations.get_current_table('db_dbnode') + attr_model = perform_migrations.get_current_table('db_dbattribute') + extra_model = perform_migrations.get_current_table('db_dbextra') + + with perform_migrations.session() as session: + user = user_model( + email='user@aiida.net', + first_name='John', + last_name='Doe', + institution='EPFL', + ) + session.add(user) + session.commit() + node = node_model( + uuid='00000000-0000-0000-0000-000000000000', + node_type='any', + label='test', + description='', + user_id=user.id, + ctime=timezone.now(), + mtime=timezone.now(), + ) + session.add(node) + session.commit() + node_id = node.id + + for idx, kwargs in enumerate(({ + 'datatype': 'txt', + 'tval': 'test' + }, { + 'datatype': 'int', + 'ival': 1 + }, { + 'datatype': 'bool', + 'bval': True + }, { + 'datatype': 'float', + 'fval': 1.0 + }, { + 'datatype': 'date', + 'dval': timezone.isoformat_to_datetime('2022-01-01') + })): + kwargs['tval'] = 'test' + attr = attr_model(dbnode_id=node.id, key=f'attr_{idx}', **kwargs) + session.add(attr) + session.commit() + + extra = extra_model(dbnode_id=node.id, key=f'extra_{idx}', **kwargs) + session.add(extra) + session.commit() + + # final revision + perform_migrations.migrate_up('django@django_0037') + + node_model = perform_migrations.get_current_table('db_dbnode') + with perform_migrations.session() as session: + attrs = session.execute(select(node_model.attributes).where(node_model.id == node_id)).scalar_one() + extras = session.execute(select(node_model.extras).where(node_model.id == node_id)).scalar_one() + + attrs['attr_4'] = timezone.isoformat_to_datetime(attrs['attr_4']).year + extras['extra_4'] = timezone.isoformat_to_datetime(extras['extra_4']).year + assert attrs == {'attr_0': 'test', 'attr_1': 1, 'attr_2': True, 'attr_3': 1.0, 'attr_4': 2022} + assert extras == {'extra_0': 'test', 'extra_1': 1, 'extra_2': True, 'extra_3': 1.0, 'extra_4': 2022} + + +def test_settings_migration(perform_migrations: PsqlDostoreMigrator): + """ + This test checks the correct migration of the settings. + Setting records were used as an example from a typical settings table of Django EAV. + """ + # starting revision + perform_migrations.migrate_up('django@django_0036') + + # setup the database + setting_model = perform_migrations.get_current_table('db_dbsetting') + + with perform_migrations.session() as session: + + for idx, kwargs in enumerate(({ + 'datatype': 'txt', + 'tval': 'test' + }, { + 'datatype': 'int', + 'ival': 1 + }, { + 'datatype': 'bool', + 'bval': True + }, { + 'datatype': 'float', + 'fval': 1.0 + }, { + 'datatype': 'date', + 'dval': timezone.isoformat_to_datetime('2022-01-01') + })): + kwargs['tval'] = 'test' + kwargs['description'] = 'description' + kwargs['time'] = timezone.isoformat_to_datetime('2022-01-01') + attr = setting_model(key=f'key_{idx}', **kwargs) + session.add(attr) + session.commit() + + # final revision + perform_migrations.migrate_up('django@django_0037') + + setting_model = perform_migrations.get_current_table('db_dbsetting') + with perform_migrations.session() as session: + settings = { + row[0]: row[1] + for row in session.execute(select(setting_model.key, setting_model.val).order_by(setting_model.key)).all() + } + + settings['key_4'] = timezone.isoformat_to_datetime(settings['key_4']).year + assert settings == {'key_0': 'test', 'key_1': 1, 'key_2': True, 'key_3': 1.0, 'key_4': 2022} diff --git a/tests/backends/aiida_sqlalchemy/migrations/django_branch/test_0038_data_migration_legacy_job_calculations.py b/tests/backends/aiida_sqlalchemy/migrations/django_branch/test_0038_data_migration_legacy_job_calculations.py new file mode 100644 index 0000000000..56f73c6671 --- /dev/null +++ b/tests/backends/aiida_sqlalchemy/migrations/django_branch/test_0038_data_migration_legacy_job_calculations.py @@ -0,0 +1,66 @@ +# -*- coding: utf-8 -*- +########################################################################### +# Copyright (c), The AiiDA team. All rights reserved. # +# This file is part of the AiiDA code. # +# # +# The code is hosted on GitHub at https://github.com/aiidateam/aiida-core # +# For further information on the license, see the LICENSE.txt file # +# For further information please visit http://www.aiida.net # +########################################################################### +"""Tests the database migration from legacy calculations.""" +from __future__ import annotations + +from uuid import uuid4 + +from aiida.backends.sqlalchemy.migrations.utils.calc_state import STATE_MAPPING, StateMapping +from aiida.backends.sqlalchemy.migrator import PsqlDostoreMigrator +from aiida.common import timezone + + +def test_legacy_jobcalcstate(perform_migrations: PsqlDostoreMigrator): + """Test the migration that performs a data migration of legacy `JobCalcState`.""" + # starting revision + perform_migrations.migrate_up('django@django_0037') + + # setup the database + user_model = perform_migrations.get_current_table('db_dbuser') + node_model = perform_migrations.get_current_table('db_dbnode') + + with perform_migrations.session() as session: + user = user_model( + email='user@aiida.net', + first_name='John', + last_name='Doe', + institution='EPFL', + ) + session.add(user) + session.commit() + nodes: dict[int, StateMapping] = {} + for state, mapping in STATE_MAPPING.items(): + node = node_model( + uuid=str(uuid4()), + node_type='process.calculation.calcjob.CalcJobNode.', + attributes={'state': state}, + label='test', + description='', + user_id=user.id, + ctime=timezone.now(), + mtime=timezone.now(), + ) + session.add(node) + session.commit() + nodes[node.id] = mapping + + # final revision + perform_migrations.migrate_up('django@django_0038') + + node_model = perform_migrations.get_current_table('db_dbnode') + with perform_migrations.session() as session: + for node_id, mapping in nodes.items(): + attributes = session.get(node_model, node_id).attributes + assert attributes.get('process_state', None) == mapping.process_state + assert attributes.get('process_status', None) == mapping.process_status + assert attributes.get('exit_status', None) == mapping.exit_status + assert attributes.get('process_label', None) == 'Legacy JobCalculation' + assert attributes.get('state', None) is None + assert attributes.get('exit_message', None) is None or isinstance(attributes.get('exit_message'), int) diff --git a/tests/backends/aiida_sqlalchemy/migrations/django_branch/test_0039_reset_hash.py b/tests/backends/aiida_sqlalchemy/migrations/django_branch/test_0039_reset_hash.py new file mode 100644 index 0000000000..822a9eba49 --- /dev/null +++ b/tests/backends/aiida_sqlalchemy/migrations/django_branch/test_0039_reset_hash.py @@ -0,0 +1,57 @@ +# -*- coding: utf-8 -*- +########################################################################### +# Copyright (c), The AiiDA team. All rights reserved. # +# This file is part of the AiiDA code. # +# # +# The code is hosted on GitHub at https://github.com/aiidateam/aiida-core # +# For further information on the license, see the LICENSE.txt file # +# For further information please visit http://www.aiida.net # +########################################################################### +"""Test the node hash reset.""" +from aiida.backends.sqlalchemy.migrator import PsqlDostoreMigrator +from aiida.common import timezone +from aiida.common.utils import get_new_uuid + + +def test_reset_hash(perform_migrations: PsqlDostoreMigrator): + """Test the node hash reset.""" + # starting revision + perform_migrations.migrate_up('django@django_0038') + + # setup the database + user_model = perform_migrations.get_current_table('db_dbuser') + node_model = perform_migrations.get_current_table('db_dbnode') + with perform_migrations.session() as session: + user = user_model( + email='user@aiida.net', + first_name='John', + last_name='Doe', + institution='EPFL', + ) + session.add(user) + session.commit() + node = node_model( + uuid=get_new_uuid(), + user_id=user.id, + ctime=timezone.now(), + mtime=timezone.now(), + label='test', + description='', + node_type='data.', + extras={ + 'something': 123, + '_aiida_hash': 'abcd' + } + ) + session.add(node) + session.commit() + node_id = node.id + + # final revision + perform_migrations.migrate_up('django@django_0039') + + node_model = perform_migrations.get_current_table('db_dbnode') + with perform_migrations.session() as session: + node = session.get(node_model, node_id) + # The hash extra should have been removed + assert node.extras == {'something': 123} diff --git a/tests/backends/aiida_sqlalchemy/migrations/django_branch/test_0040_data_migration_legacy_process_attributes.py b/tests/backends/aiida_sqlalchemy/migrations/django_branch/test_0040_data_migration_legacy_process_attributes.py new file mode 100644 index 0000000000..0d9609e2f4 --- /dev/null +++ b/tests/backends/aiida_sqlalchemy/migrations/django_branch/test_0040_data_migration_legacy_process_attributes.py @@ -0,0 +1,114 @@ +# -*- coding: utf-8 -*- +########################################################################### +# Copyright (c), The AiiDA team. All rights reserved. # +# This file is part of the AiiDA code. # +# # +# The code is hosted on GitHub at https://github.com/aiidateam/aiida-core # +# For further information on the license, see the LICENSE.txt file # +# For further information please visit http://www.aiida.net # +########################################################################### +"""Tests the database migration of legacy process attributes.""" +from uuid import uuid4 + +from aiida.backends.sqlalchemy.migrator import PsqlDostoreMigrator +from aiida.common import timezone + + +def test_legacy_jobcalc_attrs(perform_migrations: PsqlDostoreMigrator): + """Test the migration that performs a data migration of legacy `JobCalcState`.""" + # starting revision + perform_migrations.migrate_up('django@django_0039') + + # setup the database + user_model = perform_migrations.get_current_table('db_dbuser') + node_model = perform_migrations.get_current_table('db_dbnode') + + with perform_migrations.session() as session: + user = user_model( + email='user@aiida.net', + first_name='John', + last_name='Doe', + institution='EPFL', + ) + session.add(user) + session.commit() + node_process = node_model( + uuid=str(uuid4()), + node_type='process.calculation.calcjob.CalcJobNode.', + label='test', + description='', + user_id=user.id, + ctime=timezone.now(), + mtime=timezone.now(), + attributes={ + 'process_state': 'finished', + '_sealed': True, + '_finished': True, + '_failed': False, + '_aborted': False, + '_do_abort': False, + }, + ) + node_process_active = node_model( + uuid=str(uuid4()), + node_type='process.calculation.calcjob.CalcJobNode.', + label='test', + description='', + user_id=user.id, + ctime=timezone.now(), + mtime=timezone.now(), + attributes={ + 'process_state': 'created', + '_finished': True, + '_failed': False, + '_aborted': False, + '_do_abort': False, + }, + ) + node_data = node_model( + uuid=str(uuid4()), + node_type='data.core.dict.Dict.', + label='test', + description='', + user_id=user.id, + ctime=timezone.now(), + mtime=timezone.now(), + attributes={ + '_sealed': True, + '_finished': True, + '_failed': False, + '_aborted': False, + '_do_abort': False, + }, + ) + session.add(node_process) + session.add(node_process_active) + session.add(node_data) + session.commit() + + node_process_id = node_process.id + node_process_active_id = node_process_active.id + node_data_id = node_data.id + + # final revision + perform_migrations.migrate_up('django@django_0040') + + deleted_keys = ['_sealed', '_finished', '_failed', '_aborted', '_do_abort'] + + node_model = perform_migrations.get_current_table('db_dbnode') + with perform_migrations.session() as session: + + node_process = session.get(node_model, node_process_id) + assert node_process.attributes['sealed'] is True + for key in deleted_keys: + assert key not in node_process.attributes + + node_process_active = session.get(node_model, node_process_active_id) + assert 'sealed' not in node_process_active.attributes + for key in deleted_keys: + assert key not in node_process_active.attributes + + node_data = session.get(node_model, node_data_id) + assert node_data.attributes.get('sealed') is None + for key in deleted_keys: + assert key in node_data.attributes diff --git a/tests/backends/aiida_sqlalchemy/migrations/django_branch/test_0041_seal_unsealed_processes.py b/tests/backends/aiida_sqlalchemy/migrations/django_branch/test_0041_seal_unsealed_processes.py new file mode 100644 index 0000000000..642f2129ce --- /dev/null +++ b/tests/backends/aiida_sqlalchemy/migrations/django_branch/test_0041_seal_unsealed_processes.py @@ -0,0 +1,107 @@ +# -*- coding: utf-8 -*- +########################################################################### +# Copyright (c), The AiiDA team. All rights reserved. # +# This file is part of the AiiDA code. # +# # +# The code is hosted on GitHub at https://github.com/aiidateam/aiida-core # +# For further information on the license, see the LICENSE.txt file # +# For further information please visit http://www.aiida.net # +########################################################################### +"""Test sealing of unsealed processes.""" +from uuid import uuid4 + +from aiida.backends.sqlalchemy.migrator import PsqlDostoreMigrator +from aiida.common import timezone + + +def test_legacy_jobcalc_attrs(perform_migrations: PsqlDostoreMigrator): + """Test sealing of unsealed processes.""" + # starting revision + perform_migrations.migrate_up('django@django_0040') + + # setup the database + user_model = perform_migrations.get_current_table('db_dbuser') + node_model = perform_migrations.get_current_table('db_dbnode') + + with perform_migrations.session() as session: + user = user_model( + email='user@aiida.net', + first_name='John', + last_name='Doe', + institution='EPFL', + ) + session.add(user) + session.commit() + node_process = node_model( + uuid=str(uuid4()), + node_type='process.calculation.calcjob.CalcJobNode.', + label='test', + description='', + user_id=user.id, + ctime=timezone.now(), + mtime=timezone.now(), + attributes={ + 'process_state': 'finished', + 'sealed': True, + }, + ) + node_process_active = node_model( + uuid=str(uuid4()), + node_type='process.calculation.calcjob.CalcJobNode.', + label='test', + description='', + user_id=user.id, + ctime=timezone.now(), + mtime=timezone.now(), + attributes={ + 'process_state': 'created', + }, + ) + node_process_legacy = node_model( + uuid=str(uuid4()), + node_type='process.calculation.calcjob.CalcFunctionNode.', + label='test', + description='', + user_id=user.id, + ctime=timezone.now(), + mtime=timezone.now(), + attributes={}, + ) + node_data = node_model( + uuid=str(uuid4()), + node_type='data.core.dict.Dict.', + label='test', + description='', + user_id=user.id, + ctime=timezone.now(), + mtime=timezone.now(), + attributes={}, + ) + session.add(node_process) + session.add(node_process_active) + session.add(node_process_legacy) + session.add(node_data) + session.commit() + + node_process_id = node_process.id + node_process_active_id = node_process_active.id + node_process_legacy_id = node_process_legacy.id + node_data_id = node_data.id + + # final revision + perform_migrations.migrate_up('django@django_0041') + + node_model = perform_migrations.get_current_table('db_dbnode') + with perform_migrations.session() as session: + + node_process = session.get(node_model, node_process_id) + assert node_process.attributes['sealed'] is True + + node_process_active = session.get(node_model, node_process_active_id) + assert 'sealed' not in node_process_active.attributes + + node_process_legacy = session.get(node_model, node_process_legacy_id) + assert node_process_legacy.attributes['sealed'] is True + + node_data = session.get(node_model, node_data_id) + assert 'sealed' not in node_data.attributes diff --git a/tests/backends/aiida_sqlalchemy/migrations/django_branch/test_0043_default_link_label.py b/tests/backends/aiida_sqlalchemy/migrations/django_branch/test_0043_default_link_label.py new file mode 100644 index 0000000000..cc11314466 --- /dev/null +++ b/tests/backends/aiida_sqlalchemy/migrations/django_branch/test_0043_default_link_label.py @@ -0,0 +1,74 @@ +# -*- coding: utf-8 -*- +########################################################################### +# Copyright (c), The AiiDA team. All rights reserved. # +# This file is part of the AiiDA code. # +# # +# The code is hosted on GitHub at https://github.com/aiidateam/aiida-core # +# For further information on the license, see the LICENSE.txt file # +# For further information please visit http://www.aiida.net # +########################################################################### +"""Test update of link labels.""" +from uuid import uuid4 + +from aiida.backends.sqlalchemy.migrator import PsqlDostoreMigrator +from aiida.common import timezone + + +def test_legacy_jobcalc_attrs(perform_migrations: PsqlDostoreMigrator): + """Test update of link labels.""" + # starting revision + perform_migrations.migrate_up('django@django_0042') + + # setup the database + user_model = perform_migrations.get_current_table('db_dbuser') + node_model = perform_migrations.get_current_table('db_dbnode') + link_model = perform_migrations.get_current_table('db_dblink') + + with perform_migrations.session() as session: + user = user_model( + email='user@aiida.net', + first_name='John', + last_name='Doe', + institution='EPFL', + ) + session.add(user) + session.commit() + node_process = node_model( + uuid=str(uuid4()), + node_type='process.calculation.calcjob.CalcJobNode.', + label='test', + description='', + user_id=user.id, + ctime=timezone.now(), + mtime=timezone.now(), + ) + node_data = node_model( + uuid=str(uuid4()), + node_type='data.core.dict.Dict.', + label='test', + description='', + user_id=user.id, + ctime=timezone.now(), + mtime=timezone.now(), + ) + session.add(node_process) + session.add(node_data) + session.commit() + + link = link_model( + input_id=node_data.id, + output_id=node_process.id, + type='input', + label='_return', + ) + session.add(link) + session.commit() + link_id = link.id + + # final revision + perform_migrations.migrate_up('django@django_0043') + + link_model = perform_migrations.get_current_table('db_dblink') + with perform_migrations.session() as session: + link = session.get(link_model, link_id) + assert link.label == 'result' diff --git a/tests/backends/aiida_sqlalchemy/migrations/django_branch/test_0044_dbgroup_type_string.py b/tests/backends/aiida_sqlalchemy/migrations/django_branch/test_0044_dbgroup_type_string.py new file mode 100644 index 0000000000..5b12bf8a32 --- /dev/null +++ b/tests/backends/aiida_sqlalchemy/migrations/django_branch/test_0044_dbgroup_type_string.py @@ -0,0 +1,76 @@ +# -*- coding: utf-8 -*- +########################################################################### +# Copyright (c), The AiiDA team. All rights reserved. # +# This file is part of the AiiDA code. # +# # +# The code is hosted on GitHub at https://github.com/aiidateam/aiida-core # +# For further information on the license, see the LICENSE.txt file # +# For further information please visit http://www.aiida.net # +########################################################################### +"""Test migration of `type_string` after the `Group` class became pluginnable.""" +from uuid import uuid4 + +from aiida.backends.sqlalchemy.migrator import PsqlDostoreMigrator +from aiida.common import timezone + + +def test_group_type_string(perform_migrations: PsqlDostoreMigrator): + """Test migration of `type_string` after the `Group` class became pluginnable.""" + # starting revision + perform_migrations.migrate_up('django@django_0043') + + # setup the database + user_model = perform_migrations.get_current_table('db_dbuser') + group_model = perform_migrations.get_current_table('db_dbgroup') + with perform_migrations.session() as session: + user = user_model( + email='user@aiida.net', + first_name='John', + last_name='Doe', + institution='EPFL', + ) + session.add(user) + session.commit() + kwargs = { + 'user_id': user.id, + 'time': timezone.now(), + 'label': 'test', + 'description': '', + } + group_user = group_model(uuid=str(uuid4()), type_string='user', **kwargs) + session.add(group_user) + group_data_upf = group_model(uuid=str(uuid4()), type_string='data.upf', **kwargs) + session.add(group_data_upf) + group_autoimport = group_model(uuid=str(uuid4()), type_string='auto.import', **kwargs) + session.add(group_autoimport) + group_autorun = group_model(uuid=str(uuid4()), type_string='auto.run', **kwargs) + session.add(group_autorun) + + session.commit() + + group_user_id = group_user.id + group_data_upf_id = group_data_upf.id + group_autoimport_id = group_autoimport.id + group_autorun_id = group_autorun.id + + # final revision + perform_migrations.migrate_up('django@django_0044') + + group_model = perform_migrations.get_current_table('db_dbgroup') + with perform_migrations.session() as session: + + # 'user' -> 'core' + group_user = session.get(group_model, group_user_id) + assert group_user.type_string == 'core' + + # 'data.upf' -> 'core.upf' + group_data_upf = session.get(group_model, group_data_upf_id) + assert group_data_upf.type_string == 'core.upf' + + # 'auto.import' -> 'core.import' + group_autoimport = session.get(group_model, group_autoimport_id) + assert group_autoimport.type_string == 'core.import' + + # 'auto.run' -> 'core.auto' + group_autorun = session.get(group_model, group_autorun_id) + assert group_autorun.type_string == 'core.auto' diff --git a/tests/backends/aiida_sqlalchemy/migrations/django_branch/test_0045_dbgroup_extras.py b/tests/backends/aiida_sqlalchemy/migrations/django_branch/test_0045_dbgroup_extras.py new file mode 100644 index 0000000000..6d4c953e3c --- /dev/null +++ b/tests/backends/aiida_sqlalchemy/migrations/django_branch/test_0045_dbgroup_extras.py @@ -0,0 +1,51 @@ +# -*- coding: utf-8 -*- +########################################################################### +# Copyright (c), The AiiDA team. All rights reserved. # +# This file is part of the AiiDA code. # +# # +# The code is hosted on GitHub at https://github.com/aiidateam/aiida-core # +# For further information on the license, see the LICENSE.txt file # +# For further information please visit http://www.aiida.net # +########################################################################### +"""Test migration to add the `extras` JSONB column to the `DbGroup` model.""" +from uuid import uuid4 + +from aiida.backends.sqlalchemy.migrator import PsqlDostoreMigrator +from aiida.common import timezone + + +def test_group_extras(perform_migrations: PsqlDostoreMigrator): + """Test migration to add the `extras` JSONB column to the `DbGroup` model.""" + # starting revision + perform_migrations.migrate_up('django@django_0044') + + # setup the database + user_model = perform_migrations.get_current_table('db_dbuser') + group_model = perform_migrations.get_current_table('db_dbgroup') + with perform_migrations.session() as session: + user = user_model( + email='user@aiida.net', + first_name='John', + last_name='Doe', + institution='EPFL', + ) + session.add(user) + session.commit() + kwargs = { + 'user_id': user.id, + 'time': timezone.now(), + 'label': 'test', + 'description': '', + } + group_user = group_model(uuid=str(uuid4()), type_string='core', **kwargs) + session.add(group_user) + session.commit() + group_user_id = group_user.id + + # final revision + perform_migrations.migrate_up('django@django_0045') + + group_model = perform_migrations.get_current_table('db_dbgroup') + with perform_migrations.session() as session: + group_user = session.get(group_model, group_user_id) + assert group_user.extras == {} diff --git a/tests/backends/aiida_sqlalchemy/migrations/django_branch/test_0046_add_node_repository_metadata.py b/tests/backends/aiida_sqlalchemy/migrations/django_branch/test_0046_add_node_repository_metadata.py new file mode 100644 index 0000000000..b04b6245fa --- /dev/null +++ b/tests/backends/aiida_sqlalchemy/migrations/django_branch/test_0046_add_node_repository_metadata.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +########################################################################### +# Copyright (c), The AiiDA team. All rights reserved. # +# This file is part of the AiiDA code. # +# # +# The code is hosted on GitHub at https://github.com/aiidateam/aiida-core # +# For further information on the license, see the LICENSE.txt file # +# For further information please visit http://www.aiida.net # +########################################################################### +"""Test migration adding the `repository_metadata` column to the `Node` model.""" +from aiida.backends.sqlalchemy.migrator import PsqlDostoreMigrator +from aiida.common import timezone +from aiida.common.utils import get_new_uuid + + +def test_node_repository(perform_migrations: PsqlDostoreMigrator): + """Test migration adding the `repository_metadata` column to the `Node` model.""" + # starting revision + perform_migrations.migrate_up('django@django_0045') + + # setup the database + user_model = perform_migrations.get_current_table('db_dbuser') + node_model = perform_migrations.get_current_table('db_dbnode') + with perform_migrations.session() as session: + user = user_model( + email='user@aiida.net', + first_name='John', + last_name='Doe', + institution='EPFL', + ) + session.add(user) + session.commit() + node = node_model( + uuid=get_new_uuid(), + user_id=user.id, + ctime=timezone.now(), + mtime=timezone.now(), + label='test', + description='', + node_type='data.', + ) + session.add(node) + session.commit() + node_id = node.id + + # final revision + perform_migrations.migrate_up('django@django_0046') + + node_model = perform_migrations.get_current_table('db_dbnode') + with perform_migrations.session() as session: + node = session.get(node_model, node_id) + assert node.repository_metadata == {} diff --git a/tests/backends/aiida_sqlalchemy/migrations/django_branch/test_0047_migrate_repository.py b/tests/backends/aiida_sqlalchemy/migrations/django_branch/test_0047_migrate_repository.py new file mode 100644 index 0000000000..da992d11a3 --- /dev/null +++ b/tests/backends/aiida_sqlalchemy/migrations/django_branch/test_0047_migrate_repository.py @@ -0,0 +1,136 @@ +# -*- coding: utf-8 -*- +########################################################################### +# Copyright (c), The AiiDA team. All rights reserved. # +# This file is part of the AiiDA code. # +# # +# The code is hosted on GitHub at https://github.com/aiidateam/aiida-core # +# For further information on the license, see the LICENSE.txt file # +# For further information please visit http://www.aiida.net # +########################################################################### +"""Test migration of the old file repository to the disk object store.""" +import hashlib +import os +from uuid import uuid4 + +from aiida.backends.sqlalchemy.migrations.utils import utils +from aiida.backends.sqlalchemy.migrator import PsqlDostoreMigrator +from aiida.common import timezone + +REPOSITORY_UUID_KEY = 'repository|uuid' + + +def test_node_repository(perform_migrations: PsqlDostoreMigrator): # pylint: disable=too-many-locals + """Test migration of the old file repository to the disk object store.""" + # starting revision + perform_migrations.migrate_up('django@django_0046') + + repo_path = perform_migrations.profile.repository_path + + # setup the storage + user_model = perform_migrations.get_current_table('db_dbuser') + node_model = perform_migrations.get_current_table('db_dbnode') + with perform_migrations.session() as session: + user = user_model( + email='user@aiida.net', + first_name='John', + last_name='Doe', + institution='EPFL', + ) + session.add(user) + session.commit() + kwargs = dict( + user_id=user.id, + ctime=timezone.now(), + mtime=timezone.now(), + label='test', + description='', + node_type='data.', + repository_metadata={}, + ) + dbnode_01 = node_model(uuid=str(uuid4()), **kwargs) + dbnode_02 = node_model(uuid=str(uuid4()), **kwargs) + dbnode_03 = node_model(uuid=str(uuid4()), **kwargs) + dbnode_04 = node_model(uuid=str(uuid4()), **kwargs) + + session.add_all((dbnode_01, dbnode_02, dbnode_03, dbnode_04)) + session.commit() + + dbnode_01_id = dbnode_01.id + dbnode_02_id = dbnode_02.id + dbnode_03_id = dbnode_03.id + dbnode_04_id = dbnode_04.id + + utils.put_object_from_string(repo_path, dbnode_01.uuid, 'sub/path/file_b.txt', 'b') + utils.put_object_from_string(repo_path, dbnode_01.uuid, 'sub/file_a.txt', 'a') + utils.put_object_from_string(repo_path, dbnode_02.uuid, 'output.txt', 'output') + utils.put_object_from_string(repo_path, dbnode_04.uuid, '.gitignore', 'test') + + # If both `path` and `raw_input` subfolders are present and `.gitignore` is in `path`, it should be ignored. + # Cannot use `put_object_from_string` here as it statically writes under the `path` folder. + os.makedirs(utils.get_node_repository_sub_folder(repo_path, dbnode_04.uuid, 'raw_input'), exist_ok=True) + with open( + os.path.join(utils.get_node_repository_sub_folder(repo_path, dbnode_04.uuid, 'raw_input'), 'input.txt'), + 'w', + encoding='utf-8', + ) as handle: + handle.write('input') + + # migrate up + perform_migrations.migrate_up('django@django_0047') + + node_model = perform_migrations.get_current_table('db_dbnode') + setting_model = perform_migrations.get_current_table('db_dbsetting') + with perform_migrations.session() as session: + + # check that the repository uuid is set + repository_uuid = session.query(setting_model).filter(setting_model.key == REPOSITORY_UUID_KEY).one() + assert repository_uuid.val is not None + assert repository_uuid.val != '' + assert isinstance(repository_uuid.val, str) + + node_01 = session.get(node_model, dbnode_01_id) + node_02 = session.get(node_model, dbnode_02_id) + node_03 = session.get(node_model, dbnode_03_id) + node_04 = session.get(node_model, dbnode_04_id) + + assert node_01.repository_metadata == { + 'o': { + 'sub': { + 'o': { + 'path': { + 'o': { + 'file_b.txt': { + 'k': hashlib.sha256('b'.encode('utf-8')).hexdigest() + } + } + }, + 'file_a.txt': { + 'k': hashlib.sha256('a'.encode('utf-8')).hexdigest() + } + } + } + } + } + assert node_02.repository_metadata == { + 'o': { + 'output.txt': { + 'k': hashlib.sha256('output'.encode('utf-8')).hexdigest() + } + } + } + assert node_03.repository_metadata == {} + assert node_04.repository_metadata == { + 'o': { + 'input.txt': { + 'k': hashlib.sha256('input'.encode('utf-8')).hexdigest() + } + } + } + + for hashkey, content in ( + (node_01.repository_metadata['o']['sub']['o']['path']['o']['file_b.txt']['k'], b'b'), + (node_01.repository_metadata['o']['sub']['o']['file_a.txt']['k'], b'a'), + (node_02.repository_metadata['o']['output.txt']['k'], b'output'), + (node_04.repository_metadata['o']['input.txt']['k'], b'input'), + ): + assert utils.get_repository_object(perform_migrations.profile, hashkey) == content diff --git a/tests/backends/aiida_sqlalchemy/migrations/django_branch/test_0048_computer_name_to_label.py b/tests/backends/aiida_sqlalchemy/migrations/django_branch/test_0048_computer_name_to_label.py new file mode 100644 index 0000000000..73d333bc85 --- /dev/null +++ b/tests/backends/aiida_sqlalchemy/migrations/django_branch/test_0048_computer_name_to_label.py @@ -0,0 +1,46 @@ +# -*- coding: utf-8 -*- +########################################################################### +# Copyright (c), The AiiDA team. All rights reserved. # +# This file is part of the AiiDA code. # +# # +# The code is hosted on GitHub at https://github.com/aiidateam/aiida-core # +# For further information on the license, see the LICENSE.txt file # +# For further information please visit http://www.aiida.net # +########################################################################### +"""Test the renaming of `name` to `label` for `db_dbcomputer`.""" +from aiida.backends.sqlalchemy.migrator import PsqlDostoreMigrator +from aiida.common.utils import get_new_uuid + + +def test_computer_name_to_label(perform_migrations: PsqlDostoreMigrator): + """Test the renaming of `name` to `label` for `db_dbcomputer`. + + Verify that the column was successfully renamed. + """ + # starting revision + perform_migrations.migrate_up('django@django_0047') + + # setup the database + comp_model = perform_migrations.get_current_table('db_dbcomputer') + with perform_migrations.session() as session: + computer = comp_model( + name='testing', + uuid=get_new_uuid(), + hostname='localhost', + description='', + transport_type='', + scheduler_type='', + metadata={}, + ) + session.add(computer) + session.commit() + computer_id = computer.id + + # migrate up + perform_migrations.migrate_up('django@django_0048') + + # perform some checks + comp_model = perform_migrations.get_current_table('db_dbcomputer') + with perform_migrations.session() as session: + computer = session.query(comp_model).filter(comp_model.id == computer_id).one() + assert computer.label == 'testing' diff --git a/tests/backends/aiida_sqlalchemy/migrations/django_branch/test_0049_entry_point_core_prefix.py b/tests/backends/aiida_sqlalchemy/migrations/django_branch/test_0049_entry_point_core_prefix.py new file mode 100644 index 0000000000..da92af4c10 --- /dev/null +++ b/tests/backends/aiida_sqlalchemy/migrations/django_branch/test_0049_entry_point_core_prefix.py @@ -0,0 +1,95 @@ +# -*- coding: utf-8 -*- +########################################################################### +# Copyright (c), The AiiDA team. All rights reserved. # +# This file is part of the AiiDA code. # +# # +# The code is hosted on GitHub at https://github.com/aiidateam/aiida-core # +# For further information on the license, see the LICENSE.txt file # +# For further information please visit http://www.aiida.net # +########################################################################### +"""Test migration that updates node types after `core.` prefix was added to entry point names.""" +from aiida.backends.sqlalchemy.migrator import PsqlDostoreMigrator +from aiida.common import timezone +from aiida.common.utils import get_new_uuid + + +def test_entry_point_core_prefix(perform_migrations: PsqlDostoreMigrator): + """Test the renaming of `name` to `label` for `db_dbcomputer`. + + Verify that the column was successfully renamed. + """ + # starting revision + perform_migrations.migrate_up('django@django_0048') + + # setup the database + comp_model = perform_migrations.get_current_table('db_dbcomputer') + user_model = perform_migrations.get_current_table('db_dbuser') + node_model = perform_migrations.get_current_table('db_dbnode') + with perform_migrations.session() as session: + user = user_model( + email='user@aiida.net', + first_name='John', + last_name='Doe', + institution='EPFL', + ) + computer = comp_model( + uuid=get_new_uuid(), + label='testing', + hostname='localhost', + description='', + transport_type='local', + scheduler_type='direct', + metadata={}, + ) + session.add_all((user, computer)) + session.commit() + computer_id = computer.id + + calcjob = node_model( + uuid=get_new_uuid(), + user_id=user.id, + ctime=timezone.now(), + mtime=timezone.now(), + label='test', + description='', + node_type='process.calcjob.', + process_type='aiida.calculations:core.arithmetic.add', + attributes={'parser_name': 'core.arithmetic.add'}, + repository_metadata={}, + ) + workflow = node_model( + uuid=get_new_uuid(), + user_id=user.id, + ctime=timezone.now(), + mtime=timezone.now(), + label='test', + description='', + node_type='process.workflow.', + process_type='aiida.workflows:arithmetic.add_multiply', + repository_metadata={}, + ) + + session.add_all((calcjob, workflow)) + session.commit() + + calcjob_id = calcjob.id + workflow_id = workflow.id + + # migrate up + perform_migrations.migrate_up('django@django_0049') + + # perform some checks + comp_model = perform_migrations.get_current_table('db_dbcomputer') + node_model = perform_migrations.get_current_table('db_dbnode') + with perform_migrations.session() as session: + + computer = session.query(comp_model).filter(comp_model.id == computer_id).one() + assert computer.scheduler_type == 'core.direct' + assert computer.transport_type == 'core.local' + + calcjob = session.query(node_model).filter(node_model.id == calcjob_id).one() + assert calcjob.process_type == 'aiida.calculations:core.arithmetic.add' + assert calcjob.attributes['parser_name'] == 'core.arithmetic.add' + + workflow = session.query(node_model).filter(node_model.id == workflow_id).one() + assert workflow.process_type == 'aiida.workflows:core.arithmetic.add_multiply' diff --git a/tests/backends/aiida_sqlalchemy/migrations/django_branch/test_0050_schema_parity.py b/tests/backends/aiida_sqlalchemy/migrations/django_branch/test_0050_schema_parity.py new file mode 100644 index 0000000000..e563b0f8e7 --- /dev/null +++ b/tests/backends/aiida_sqlalchemy/migrations/django_branch/test_0050_schema_parity.py @@ -0,0 +1,72 @@ +# -*- coding: utf-8 -*- +########################################################################### +# Copyright (c), The AiiDA team. All rights reserved. # +# This file is part of the AiiDA code. # +# # +# The code is hosted on GitHub at https://github.com/aiidateam/aiida-core # +# For further information on the license, see the LICENSE.txt file # +# For further information please visit http://www.aiida.net # +########################################################################### +"""Test migration that renames all index/constraint names, to have parity between django/sqlalchemy.""" +from aiida.backends.sqlalchemy.migrator import PsqlDostoreMigrator +from aiida.common import timezone +from aiida.common.utils import get_new_uuid + + +def test_schema_parity(perform_migrations: PsqlDostoreMigrator): + """Test the renaming of indexes and constaints works, when data is in the database.""" + # starting revision + perform_migrations.migrate_up('django@django_0049') + + # setup the database + comp_model = perform_migrations.get_current_table('db_dbcomputer') + user_model = perform_migrations.get_current_table('db_dbuser') + node_model = perform_migrations.get_current_table('db_dbnode') + with perform_migrations.session() as session: + user = user_model( + email='user@aiida.net', + first_name='John', + last_name='Doe', + institution='EPFL', + ) + computer = comp_model( + uuid=get_new_uuid(), + label='testing', + hostname='localhost', + description='', + transport_type='core.local', + scheduler_type='core.direct', + metadata={}, + ) + session.add_all((user, computer)) + session.commit() + + calcjob = node_model( + uuid=get_new_uuid(), + user_id=user.id, + ctime=timezone.now(), + mtime=timezone.now(), + label='test', + description='', + node_type='process.calcjob.', + process_type='aiida.calculations:core.arithmetic.add', + attributes={'parser_name': 'core.arithmetic.add'}, + repository_metadata={}, + ) + workflow = node_model( + uuid=get_new_uuid(), + user_id=user.id, + ctime=timezone.now(), + mtime=timezone.now(), + label='test', + description='', + node_type='process.workflow.', + process_type='aiida.workflows:core.arithmetic.add_multiply', + repository_metadata={}, + ) + + session.add_all((calcjob, workflow)) + session.commit() + + # migrate up + perform_migrations.migrate_up('django@django_0050') diff --git a/tests/backends/aiida_sqlalchemy/migrations/django_branch/test_legacy.py b/tests/backends/aiida_sqlalchemy/migrations/django_branch/test_legacy.py new file mode 100644 index 0000000000..782d8b174a --- /dev/null +++ b/tests/backends/aiida_sqlalchemy/migrations/django_branch/test_legacy.py @@ -0,0 +1,626 @@ +# -*- coding: utf-8 -*- +########################################################################### +# Copyright (c), The AiiDA team. All rights reserved. # +# This file is part of the AiiDA code. # +# # +# The code is hosted on GitHub at https://github.com/aiidateam/aiida-core # +# For further information on the license, see the LICENSE.txt file # +# For further information please visit http://www.aiida.net # +########################################################################### +"""Test migrations of legacy django databases. + +In django, it is not possible to explicitly specify constraints/indexes and their names, +instead they are implicitly created by internal "auto-generation" code +(as opposed to sqlalchemy, where one can explicitly specify the names). +For a specific django version, this auto-generation code is deterministic, +however, over time it has changed. +So is not possible to know declaratively exactly what constraints/indexes are present on a users database, +withtout knowing the exact django version that created it (and run migrations). +Therefore, we need to check that the migration code handles this correctly. +""" +import sqlalchemy as sa + +from aiida.backends.sqlalchemy.migrator import PsqlDostoreMigrator + + +def test_v0x_django_0003(perform_migrations: PsqlDostoreMigrator, reflect_schema, data_regression): # pylint: disable=too-many-locals + """Test against an archive database schema, created in aiida-core v0.x, at revision django_0003.""" + metadata = generate_schema() + with perform_migrations._connection_context() as conn: # pylint: disable=protected-access + metadata.create_all(conn.engine) + with perform_migrations._migration_context(conn) as context: # pylint: disable=protected-access + context.stamp(context.script, 'django@django_0003') + conn.commit() + + perform_migrations.migrate_up('django@django_0050') + data_regression.check(reflect_schema(perform_migrations.profile)) + + +def generate_schema() -> sa.MetaData: + """This database schema was reverse-engineered from an archive database, + created in aiida-core v0.x, at revision django_0003. + """ + metadata = sa.MetaData() + sa.Table('auth_group', metadata, sa.Column('id', sa.Integer, primary_key=True)) + sa.Table('auth_group_permissions', metadata, sa.Column('id', sa.Integer, primary_key=True)) + sa.Table('auth_permission', metadata, sa.Column('id', sa.Integer, primary_key=True)) + sa.Table('django_content_type', metadata, sa.Column('id', sa.Integer, primary_key=True)) + sa.Table('django_migrations', metadata, sa.Column('id', sa.Integer, primary_key=True)) + sa.Table( + 'db_dbattribute', + metadata, + sa.Column('bval', sa.Boolean(), nullable=True), + sa.Column('datatype', sa.String(length=10), nullable=False), + sa.Column('dbnode_id', sa.Integer(), nullable=False), + sa.Column('dval', sa.DateTime(timezone=True), nullable=True), + sa.Column('fval', sa.Float(), nullable=True), + sa.Column('id', sa.Integer(), nullable=False), + sa.Column('ival', sa.Integer(), nullable=True), + sa.Column('key', sa.String(length=1024), nullable=False), + sa.Column('tval', sa.Text(), nullable=False), + sa.PrimaryKeyConstraint('id', name='db_dbattribute_pkey'), + sa.UniqueConstraint('dbnode_id', 'key', name='db_dbattribute_dbnode_id_10206dc8cec3d0be_uniq'), + sa.ForeignKeyConstraint( + ['dbnode_id'], + ['db_dbnode.id'], + name='db_dbattribute_dbnode_id_783fe2b9b1ee948f_fk_db_dbnode_id', + deferrable=True, + initially='DEFERRED', + ), + sa.Index('db_dbattribute_3931108d', 'datatype'), + sa.Index('db_dbattribute_3c6e0b8a', 'key'), + sa.Index('db_dbattribute_7a672316', 'dbnode_id'), + sa.Index( + 'db_dbattribute_datatype_7e609aede7da800c_like', + 'datatype', + postgresql_ops={'datatype': 'varchar_pattern_ops'} + ), + sa.Index('db_dbattribute_key_6936ff5c4f96a1be_like', 'key', postgresql_ops={'key': 'varchar_pattern_ops'}), + ) + sa.Table( + 'db_dbauthinfo', + metadata, + sa.Column('aiidauser_id', sa.Integer(), nullable=False), + sa.Column('auth_params', sa.Text(), nullable=False), + sa.Column('dbcomputer_id', sa.Integer(), nullable=False), + sa.Column('enabled', sa.Boolean(), nullable=False), + sa.Column('id', sa.Integer(), nullable=False), + sa.Column('metadata', sa.Text(), nullable=False), + sa.PrimaryKeyConstraint('id', name='db_dbauthinfo_pkey'), + sa.UniqueConstraint('aiidauser_id', 'dbcomputer_id', name='db_dbauthinfo_aiidauser_id_5b91ddd9ac6ddd83_uniq'), + sa.ForeignKeyConstraint( + ['aiidauser_id'], + ['db_dbuser.id'], + name='db_dbauthinfo_aiidauser_id_b4dbd2ecdabaa58_fk_db_dbuser_id', + deferrable=True, + initially='DEFERRED', + ), + sa.ForeignKeyConstraint( + ['dbcomputer_id'], + ['db_dbcomputer.id'], + name='db_dbauthinfo_dbcomputer_id_be3c9b99107479b_fk_db_dbcomputer_id', + deferrable=True, + initially='DEFERRED', + ), + sa.Index('db_dbauthinfo_669c815a', 'aiidauser_id'), + sa.Index('db_dbauthinfo_9ed6a91c', 'dbcomputer_id'), + ) + sa.Table( + 'db_dbcalcstate', + metadata, + sa.Column('dbnode_id', sa.Integer(), nullable=False), + sa.Column('id', sa.Integer(), nullable=False), + sa.Column('state', sa.String(length=25), nullable=False), + sa.Column('time', sa.DateTime(timezone=True), nullable=False), + sa.PrimaryKeyConstraint('id', name='db_dbcalcstate_pkey'), + sa.UniqueConstraint('dbnode_id', 'state', name='db_dbcalcstate_dbnode_id_45de92d4e5e6b644_uniq'), + sa.ForeignKeyConstraint( + ['dbnode_id'], + ['db_dbnode.id'], + name='db_dbcalcstate_dbnode_id_5ab286e6811907a3_fk_db_dbnode_id', + deferrable=True, + initially='DEFERRED', + ), + sa.Index('db_dbcalcstate_7a672316', 'dbnode_id'), + sa.Index('db_dbcalcstate_9ed39e2e', 'state'), + sa.Index( + 'db_dbcalcstate_state_7b15f131504dbe38_like', 'state', postgresql_ops={'state': 'varchar_pattern_ops'} + ), + ) + sa.Table( + 'db_dbcomment', + metadata, + sa.Column('content', sa.Text(), nullable=False), + sa.Column('ctime', sa.DateTime(timezone=True), nullable=False), + sa.Column('dbnode_id', sa.Integer(), nullable=False), + sa.Column('id', sa.Integer(), nullable=False), + sa.Column('mtime', sa.DateTime(timezone=True), nullable=False), + sa.Column('user_id', sa.Integer(), nullable=False), + sa.Column('uuid', sa.String(length=36), nullable=False), + sa.PrimaryKeyConstraint('id', name='db_dbcomment_pkey'), + sa.ForeignKeyConstraint( + ['dbnode_id'], + ['db_dbnode.id'], + name='db_dbcomment_dbnode_id_e225ac462eb8f6c_fk_db_dbnode_id', + deferrable=True, + initially='DEFERRED', + ), + sa.ForeignKeyConstraint( + ['user_id'], + ['db_dbuser.id'], + name='db_dbcomment_user_id_2e215134d026c3a3_fk_db_dbuser_id', + deferrable=True, + initially='DEFERRED', + ), + sa.Index('db_dbcomment_7a672316', 'dbnode_id'), + sa.Index('db_dbcomment_e8701ad4', 'user_id'), + ) + sa.Table( + 'db_dbcomputer', + metadata, + sa.Column('description', sa.Text(), nullable=False), + sa.Column('enabled', sa.Boolean(), nullable=False), + sa.Column('hostname', sa.String(length=255), nullable=False), + sa.Column('id', sa.Integer(), nullable=False), + sa.Column('metadata', sa.Text(), nullable=False), + sa.Column('name', sa.String(length=255), nullable=False), + sa.Column('scheduler_type', sa.String(length=255), nullable=False), + sa.Column('transport_params', sa.Text(), nullable=False), + sa.Column('transport_type', sa.String(length=255), nullable=False), + sa.Column('uuid', sa.String(length=36), nullable=False), + sa.PrimaryKeyConstraint('id', name='db_dbcomputer_pkey'), + sa.UniqueConstraint('name', name='db_dbcomputer_name_key'), + sa.Index('db_dbcomputer_name_538c8da7bbe500af_like', 'name', postgresql_ops={'name': 'varchar_pattern_ops'}), + ) + sa.Table( + 'db_dbextra', + metadata, + sa.Column('bval', sa.Boolean(), nullable=True), + sa.Column('datatype', sa.String(length=10), nullable=False), + sa.Column('dbnode_id', sa.Integer(), nullable=False), + sa.Column('dval', sa.DateTime(timezone=True), nullable=True), + sa.Column('fval', sa.Float(), nullable=True), + sa.Column('id', sa.Integer(), nullable=False), + sa.Column('ival', sa.Integer(), nullable=True), + sa.Column('key', sa.String(length=1024), nullable=False), + sa.Column('tval', sa.Text(), nullable=False), + sa.PrimaryKeyConstraint('id', name='db_dbextra_pkey'), + sa.UniqueConstraint('dbnode_id', 'key', name='db_dbextra_dbnode_id_2a99ce873931fdd4_uniq'), + sa.ForeignKeyConstraint( + ['dbnode_id'], + ['db_dbnode.id'], + name='db_dbextra_dbnode_id_c556b194c79dec1_fk_db_dbnode_id', + deferrable=True, + initially='DEFERRED', + ), + sa.Index('db_dbextra_3931108d', 'datatype'), + sa.Index('db_dbextra_3c6e0b8a', 'key'), + sa.Index('db_dbextra_7a672316', 'dbnode_id'), + sa.Index( + 'db_dbextra_datatype_12730358b2c29a0a_like', 'datatype', postgresql_ops={'datatype': 'varchar_pattern_ops'} + ), + sa.Index('db_dbextra_key_67f77eb2ec05ed40_like', 'key', postgresql_ops={'key': 'varchar_pattern_ops'}), + ) + sa.Table( + 'db_dbgroup', + metadata, + sa.Column('description', sa.Text(), nullable=False), + sa.Column('id', sa.Integer(), nullable=False), + sa.Column('name', sa.String(length=255), nullable=False), + sa.Column('time', sa.DateTime(timezone=True), nullable=False), + sa.Column('type', sa.String(length=255), nullable=False), + sa.Column('user_id', sa.Integer(), nullable=False), + sa.Column('uuid', sa.String(length=36), nullable=False), + sa.PrimaryKeyConstraint('id', name='db_dbgroup_pkey'), + sa.UniqueConstraint('name', 'type', name='db_dbgroup_name_680159c7377fefd_uniq'), + sa.ForeignKeyConstraint( + ['user_id'], + ['db_dbuser.id'], + name='db_dbgroup_user_id_698e239e754dccc5_fk_db_dbuser_id', + deferrable=True, + initially='DEFERRED', + ), + sa.Index('db_dbgroup_599dcce2', 'type'), + sa.Index('db_dbgroup_b068931c', 'name'), + sa.Index('db_dbgroup_e8701ad4', 'user_id'), + sa.Index('db_dbgroup_name_30351f1c64285f22_like', 'name', postgresql_ops={'name': 'varchar_pattern_ops'}), + sa.Index('db_dbgroup_type_49745d6ede76abdd_like', 'type', postgresql_ops={'type': 'varchar_pattern_ops'}), + ) + sa.Table( + 'db_dbgroup_dbnodes', + metadata, + sa.Column('dbgroup_id', sa.Integer(), nullable=False), + sa.Column('dbnode_id', sa.Integer(), nullable=False), + sa.Column('id', sa.Integer(), nullable=False), + sa.PrimaryKeyConstraint('id', name='db_dbgroup_dbnodes_pkey'), + sa.UniqueConstraint('dbgroup_id', 'dbnode_id', name='db_dbgroup_dbnodes_dbgroup_id_dbnode_id_key'), + sa.ForeignKeyConstraint( + ['dbgroup_id'], + ['db_dbgroup.id'], + name='db_dbgroup_dbnodes_dbgroup_id_32d69f1acbc4c03c_fk_db_dbgroup_id', + deferrable=True, + initially='DEFERRED', + ), + sa.ForeignKeyConstraint( + ['dbnode_id'], + ['db_dbnode.id'], + name='db_dbgroup_dbnodes_dbnode_id_53a1829a1973b99c_fk_db_dbnode_id', + deferrable=True, + initially='DEFERRED', + ), + sa.Index('db_dbgroup_dbnodes_7a672316', 'dbnode_id'), + sa.Index('db_dbgroup_dbnodes_a0b4eda0', 'dbgroup_id'), + ) + sa.Table( + 'db_dblink', + metadata, + sa.Column('id', sa.Integer(), nullable=False), + sa.Column('input_id', sa.Integer(), nullable=False), + sa.Column('label', sa.String(length=255), nullable=False), + sa.Column('output_id', sa.Integer(), nullable=False), + sa.Column('type', sa.String(length=255), nullable=False), + sa.PrimaryKeyConstraint('id', name='db_dblink_pkey'), + sa.ForeignKeyConstraint( + ['input_id'], + ['db_dbnode.id'], + name='db_dblink_input_id_6feafb02380ed56f_fk_db_dbnode_id', + deferrable=True, + initially='DEFERRED', + ), + sa.ForeignKeyConstraint( + ['output_id'], + ['db_dbnode.id'], + name='db_dblink_output_id_6345a663e713ed93_fk_db_dbnode_id', + deferrable=True, + initially='DEFERRED', + ), + sa.Index('db_dblink_599dcce2', 'type'), + sa.Index('db_dblink_b082bddd', 'input_id'), + sa.Index('db_dblink_d304ba20', 'label'), + sa.Index('db_dblink_f7f1d83a', 'output_id'), + sa.Index('db_dblink_label_8f8811d475657bc_like', 'label', postgresql_ops={'label': 'varchar_pattern_ops'}), + ) + sa.Table( + 'db_dblock', + metadata, + sa.Column('creation', sa.DateTime(timezone=True), nullable=False), + sa.Column('key', sa.String(length=255), nullable=False), + sa.Column('owner', sa.String(length=255), nullable=False), + sa.Column('timeout', sa.Integer(), nullable=False), + sa.PrimaryKeyConstraint('key', name='db_dblock_pkey'), + sa.Index('db_dblock_key_47b06099dbb553de_like', 'key', postgresql_ops={'key': 'varchar_pattern_ops'}), + ) + sa.Table( + 'db_dblog', + metadata, + sa.Column('id', sa.Integer(), nullable=False), + sa.Column('levelname', sa.String(length=50), nullable=False), + sa.Column('loggername', sa.String(length=255), nullable=False), + sa.Column('message', sa.Text(), nullable=False), + sa.Column('metadata', sa.Text(), nullable=False), + sa.Column('objname', sa.String(length=255), nullable=False), + sa.Column('objpk', sa.Integer(), nullable=True), + sa.Column('time', sa.DateTime(timezone=True), nullable=False), + sa.PrimaryKeyConstraint('id', name='db_dblog_pkey'), + sa.Index('db_dblog_269f51f9', 'levelname'), + sa.Index('db_dblog_358be7bf', 'loggername'), + sa.Index('db_dblog_850eed5f', 'objpk'), + sa.Index('db_dblog_e3898037', 'objname'), + sa.Index( + 'db_dblog_levelname_14b334f2645c4b06_like', + 'levelname', + postgresql_ops={'levelname': 'varchar_pattern_ops'} + ), + sa.Index( + 'db_dblog_loggername_4f4ecb812e82233_like', + 'loggername', + postgresql_ops={'loggername': 'varchar_pattern_ops'} + ), + sa.Index( + 'db_dblog_objname_704cbe43c1c08fe5_like', 'objname', postgresql_ops={'objname': 'varchar_pattern_ops'} + ), + ) + sa.Table( + 'db_dbnode', + metadata, + sa.Column('ctime', sa.DateTime(timezone=True), nullable=False), + sa.Column('dbcomputer_id', sa.Integer(), nullable=True), + sa.Column('description', sa.Text(), nullable=False), + sa.Column('id', sa.Integer(), nullable=False), + sa.Column('label', sa.String(length=255), nullable=False), + sa.Column('mtime', sa.DateTime(timezone=True), nullable=False), + sa.Column('nodeversion', sa.Integer(), nullable=False), + sa.Column('public', sa.Boolean(), nullable=False), + sa.Column('type', sa.String(length=255), nullable=False), + sa.Column('user_id', sa.Integer(), nullable=False), + sa.Column('uuid', sa.String(length=36), nullable=False), + sa.PrimaryKeyConstraint('id', name='db_dbnode_pkey'), + sa.ForeignKeyConstraint( + ['dbcomputer_id'], + ['db_dbcomputer.id'], + name='db_dbnode_dbcomputer_id_2195c2d4d9b222ff_fk_db_dbcomputer_id', + deferrable=True, + initially='DEFERRED', + ), + sa.ForeignKeyConstraint( + ['user_id'], + ['db_dbuser.id'], + name='db_dbnode_user_id_43fd81cadf67f183_fk_db_dbuser_id', + deferrable=True, + initially='DEFERRED', + ), + sa.Index('db_dbnode_599dcce2', 'type'), + sa.Index('db_dbnode_9ed6a91c', 'dbcomputer_id'), + sa.Index('db_dbnode_d304ba20', 'label'), + sa.Index('db_dbnode_e8701ad4', 'user_id'), + sa.Index('db_dbnode_label_6242931c5b984b78_like', 'label', postgresql_ops={'label': 'varchar_pattern_ops'}), + sa.Index('db_dbnode_type_4cda33f938ccd765_like', 'type', postgresql_ops={'type': 'varchar_pattern_ops'}), + ) + sa.Table( + 'db_dbpath', + metadata, + sa.Column('child_id', sa.Integer(), nullable=False), + sa.Column('depth', sa.Integer(), nullable=False), + sa.Column('direct_edge_id', sa.Integer(), nullable=True), + sa.Column('entry_edge_id', sa.Integer(), nullable=True), + sa.Column('exit_edge_id', sa.Integer(), nullable=True), + sa.Column('id', sa.Integer(), nullable=False), + sa.Column('parent_id', sa.Integer(), nullable=False), + sa.PrimaryKeyConstraint('id', name='db_dbpath_pkey'), + sa.ForeignKeyConstraint( + ['child_id'], + ['db_dbnode.id'], + name='db_dbpath_child_id_29b8c02ce4515a02_fk_db_dbnode_id', + deferrable=True, + initially='DEFERRED', + ), + sa.ForeignKeyConstraint( + ['parent_id'], + ['db_dbnode.id'], + name='db_dbpath_parent_id_56b6292fab1ae2a1_fk_db_dbnode_id', + deferrable=True, + initially='DEFERRED', + ), + sa.Index('db_dbpath_6be37982', 'parent_id'), + sa.Index('db_dbpath_f36263a3', 'child_id'), + ) + sa.Table( + 'db_dbsetting', + metadata, + sa.Column('bval', sa.Boolean(), nullable=True), + sa.Column('datatype', sa.String(length=10), nullable=False), + sa.Column('description', sa.Text(), nullable=False), + sa.Column('dval', sa.DateTime(timezone=True), nullable=True), + sa.Column('fval', sa.Float(), nullable=True), + sa.Column('id', sa.Integer(), nullable=False), + sa.Column('ival', sa.Integer(), nullable=True), + sa.Column('key', sa.String(length=1024), nullable=False), + sa.Column('time', sa.DateTime(timezone=True), nullable=False), + sa.Column('tval', sa.Text(), nullable=False), + sa.PrimaryKeyConstraint('id', name='db_dbsetting_pkey'), + sa.UniqueConstraint('key', name='db_dbsetting_key_4cac773d062e1744_uniq'), + sa.Index('db_dbsetting_3931108d', 'datatype'), + sa.Index('db_dbsetting_3c6e0b8a', 'key'), + sa.Index( + 'db_dbsetting_datatype_50c0180f460a7006_like', + 'datatype', + postgresql_ops={'datatype': 'varchar_pattern_ops'} + ), + sa.Index('db_dbsetting_key_4cac773d062e1744_like', 'key', postgresql_ops={'key': 'varchar_pattern_ops'}), + ) + sa.Table( + 'db_dbuser', + metadata, + sa.Column('date_joined', sa.DateTime(timezone=True), nullable=False), + sa.Column('email', sa.String(length=75), nullable=False), + sa.Column('first_name', sa.String(length=254), nullable=False), + sa.Column('id', sa.Integer(), nullable=False), + sa.Column('institution', sa.String(length=254), nullable=False), + sa.Column('is_active', sa.Boolean(), nullable=False), + sa.Column('is_staff', sa.Boolean(), nullable=False), + sa.Column('is_superuser', sa.Boolean(), nullable=False), + sa.Column('last_login', sa.DateTime(timezone=True), nullable=False), + sa.Column('last_name', sa.String(length=254), nullable=False), + sa.Column('password', sa.String(length=128), nullable=False), + sa.PrimaryKeyConstraint('id', name='db_dbuser_pkey'), + sa.UniqueConstraint('email', name='db_dbuser_email_key'), + sa.Index('db_dbuser_email_e02af7a860b2501_like', 'email', postgresql_ops={'email': 'varchar_pattern_ops'}), + ) + sa.Table( + 'db_dbuser_groups', + metadata, + sa.Column('dbuser_id', sa.Integer(), nullable=False), + sa.Column('group_id', sa.Integer(), nullable=False), + sa.Column('id', sa.Integer(), nullable=False), + sa.PrimaryKeyConstraint('id', name='db_dbuser_groups_pkey'), + sa.UniqueConstraint('dbuser_id', 'group_id', name='db_dbuser_groups_dbuser_id_group_id_key'), + sa.ForeignKeyConstraint( + ['dbuser_id'], + ['db_dbuser.id'], + name='db_dbuser_groups_dbuser_id_6024db9daf8ecba_fk_db_dbuser_id', + deferrable=True, + initially='DEFERRED', + ), + sa.ForeignKeyConstraint( + ['group_id'], + ['auth_group.id'], + name='db_dbuser_groups_group_id_78e325354186e2b_fk_auth_group_id', + deferrable=True, + initially='DEFERRED', + ), + sa.Index('db_dbuser_groups_0e939a4f', 'group_id'), + sa.Index('db_dbuser_groups_b2c441d1', 'dbuser_id'), + ) + sa.Table( + 'db_dbuser_user_permissions', + metadata, + sa.Column('dbuser_id', sa.Integer(), nullable=False), + sa.Column('id', sa.Integer(), nullable=False), + sa.Column('permission_id', sa.Integer(), nullable=False), + sa.PrimaryKeyConstraint('id', name='db_dbuser_user_permissions_pkey'), + sa.UniqueConstraint( + 'dbuser_id', 'permission_id', name='db_dbuser_user_permissions_dbuser_id_permission_id_key' + ), + sa.ForeignKeyConstraint( + ['permission_id'], + ['auth_permission.id'], + name='db_dbuser__permission_id_77342b1287a009fe_fk_auth_permission_id', + deferrable=True, + initially='DEFERRED', + ), + sa.ForeignKeyConstraint( + ['dbuser_id'], + ['db_dbuser.id'], + name='db_dbuser_user_permi_dbuser_id_325dd28d66e30790_fk_db_dbuser_id', + deferrable=True, + initially='DEFERRED', + ), + sa.Index('db_dbuser_user_permissions_8373b171', 'permission_id'), + sa.Index('db_dbuser_user_permissions_b2c441d1', 'dbuser_id'), + ) + sa.Table( + 'db_dbworkflow', + metadata, + sa.Column('ctime', sa.DateTime(timezone=True), nullable=False), + sa.Column('description', sa.Text(), nullable=False), + sa.Column('id', sa.Integer(), nullable=False), + sa.Column('label', sa.String(length=255), nullable=False), + sa.Column('lastsyncedversion', sa.Integer(), nullable=False), + sa.Column('module', sa.Text(), nullable=False), + sa.Column('module_class', sa.Text(), nullable=False), + sa.Column('mtime', sa.DateTime(timezone=True), nullable=False), + sa.Column('nodeversion', sa.Integer(), nullable=False), + sa.Column('report', sa.Text(), nullable=False), + sa.Column('script_md5', sa.String(length=255), nullable=False), + sa.Column('script_path', sa.Text(), nullable=False), + sa.Column('state', sa.String(length=255), nullable=False), + sa.Column('user_id', sa.Integer(), nullable=False), + sa.Column('uuid', sa.String(length=36), nullable=False), + sa.PrimaryKeyConstraint('id', name='db_dbworkflow_pkey'), + sa.ForeignKeyConstraint( + ['user_id'], + ['db_dbuser.id'], + name='db_dbworkflow_user_id_745f0415fc9f135a_fk_db_dbuser_id', + deferrable=True, + initially='DEFERRED', + ), + sa.Index('db_dbworkflow_d304ba20', 'label'), + sa.Index('db_dbworkflow_e8701ad4', 'user_id'), + sa.Index('db_dbworkflow_label_55e5f0a232defa37_like', 'label', postgresql_ops={'label': 'varchar_pattern_ops'}), + ) + sa.Table( + 'db_dbworkflowdata', + metadata, + sa.Column('aiida_obj_id', sa.Integer(), nullable=True), + sa.Column('data_type', sa.String(length=255), nullable=False), + sa.Column('id', sa.Integer(), nullable=False), + sa.Column('json_value', sa.Text(), nullable=False), + sa.Column('name', sa.String(length=255), nullable=False), + sa.Column('parent_id', sa.Integer(), nullable=False), + sa.Column('time', sa.DateTime(timezone=True), nullable=False), + sa.Column('value_type', sa.String(length=255), nullable=False), + sa.PrimaryKeyConstraint('id', name='db_dbworkflowdata_pkey'), + sa.UniqueConstraint('data_type', 'name', 'parent_id', name='db_dbworkflowdata_parent_id_1f60f874e728c5f0_uniq'), + sa.ForeignKeyConstraint( + ['parent_id'], + ['db_dbworkflow.id'], + name='db_dbworkflowdat_parent_id_74e8079e6f1c8441_fk_db_dbworkflow_id', + deferrable=True, + initially='DEFERRED', + ), + sa.ForeignKeyConstraint( + ['aiida_obj_id'], + ['db_dbnode.id'], + name='db_dbworkflowdata_aiida_obj_id_28130672924934ca_fk_db_dbnode_id', + deferrable=True, + initially='DEFERRED', + ), + sa.Index('db_dbworkflowdata_668c0731', 'aiida_obj_id'), + sa.Index('db_dbworkflowdata_6be37982', 'parent_id'), + ) + sa.Table( + 'db_dbworkflowstep', + metadata, + sa.Column('id', sa.Integer(), nullable=False), + sa.Column('name', sa.String(length=255), nullable=False), + sa.Column('nextcall', sa.String(length=255), nullable=False), + sa.Column('parent_id', sa.Integer(), nullable=False), + sa.Column('state', sa.String(length=255), nullable=False), + sa.Column('time', sa.DateTime(timezone=True), nullable=False), + sa.Column('user_id', sa.Integer(), nullable=False), + sa.PrimaryKeyConstraint('id', name='db_dbworkflowstep_pkey'), + sa.UniqueConstraint('name', 'parent_id', name='db_dbworkflowstep_parent_id_57c505d36f0f2dd3_uniq'), + sa.ForeignKeyConstraint( + ['parent_id'], + ['db_dbworkflow.id'], + name='db_dbworkflowste_parent_id_33a89b7df301ebbd_fk_db_dbworkflow_id', + deferrable=True, + initially='DEFERRED', + ), + sa.ForeignKeyConstraint( + ['user_id'], + ['db_dbuser.id'], + name='db_dbworkflowstep_user_id_32681ba845c275dc_fk_db_dbuser_id', + deferrable=True, + initially='DEFERRED', + ), + sa.Index('db_dbworkflowstep_6be37982', 'parent_id'), + sa.Index('db_dbworkflowstep_e8701ad4', 'user_id'), + ) + sa.Table( + 'db_dbworkflowstep_calculations', + metadata, + sa.Column('dbnode_id', sa.Integer(), nullable=False), + sa.Column('dbworkflowstep_id', sa.Integer(), nullable=False), + sa.Column('id', sa.Integer(), nullable=False), + sa.PrimaryKeyConstraint('id', name='db_dbworkflowstep_calculations_pkey'), + sa.UniqueConstraint( + 'dbnode_id', 'dbworkflowstep_id', name='db_dbworkflowstep_calculations_dbworkflowstep_id_dbnode_id_key' + ), + sa.ForeignKeyConstraint( + ['dbworkflowstep_id'], + ['db_dbworkflowstep.id'], + name='db_d_dbworkflowstep_id_1f84ab0dccc60762_fk_db_dbworkflowstep_id', + deferrable=True, + initially='DEFERRED', + ), + sa.ForeignKeyConstraint( + ['dbnode_id'], + ['db_dbnode.id'], + name='db_dbworkflowstep_ca_dbnode_id_5ac7aa3704de0639_fk_db_dbnode_id', + deferrable=True, + initially='DEFERRED', + ), + sa.Index('db_dbworkflowstep_calculations_1df98a0a', 'dbworkflowstep_id'), + sa.Index('db_dbworkflowstep_calculations_7a672316', 'dbnode_id'), + ) + sa.Table( + 'db_dbworkflowstep_sub_workflows', + metadata, + sa.Column('dbworkflow_id', sa.Integer(), nullable=False), + sa.Column('dbworkflowstep_id', sa.Integer(), nullable=False), + sa.Column('id', sa.Integer(), nullable=False), + sa.PrimaryKeyConstraint('id', name='db_dbworkflowstep_sub_workflows_pkey'), + sa.UniqueConstraint( + 'dbworkflow_id', + 'dbworkflowstep_id', + name='db_dbworkflowstep_sub_workflo_dbworkflowstep_id_dbworkflow__key' + ), + sa.ForeignKeyConstraint( + ['dbworkflowstep_id'], + ['db_dbworkflowstep.id'], + name='db_d_dbworkflowstep_id_7798ce4345e8e576_fk_db_dbworkflowstep_id', + deferrable=True, + initially='DEFERRED', + ), + sa.ForeignKeyConstraint( + ['dbworkflow_id'], + ['db_dbworkflow.id'], + name='db_dbworkflo_dbworkflow_id_4a3395f4c392c63c_fk_db_dbworkflow_id', + deferrable=True, + initially='DEFERRED', + ), + sa.Index('db_dbworkflowstep_sub_workflows_1df98a0a', 'dbworkflowstep_id'), + sa.Index('db_dbworkflowstep_sub_workflows_b6a7b7c8', 'dbworkflow_id'), + ) + return metadata diff --git a/tests/backends/aiida_sqlalchemy/migrations/django_branch/test_legacy/test_v0x_django_0003.yml b/tests/backends/aiida_sqlalchemy/migrations/django_branch/test_legacy/test_v0x_django_0003.yml new file mode 100644 index 0000000000..30b7e96182 --- /dev/null +++ b/tests/backends/aiida_sqlalchemy/migrations/django_branch/test_legacy/test_v0x_django_0003.yml @@ -0,0 +1,523 @@ +columns: + db_dbauthinfo: + aiidauser_id: + data_type: integer + default: null + is_nullable: false + auth_params: + data_type: jsonb + default: null + is_nullable: false + dbcomputer_id: + data_type: integer + default: null + is_nullable: false + enabled: + data_type: boolean + default: null + is_nullable: false + id: + data_type: integer + default: nextval('db_dbauthinfo_id_seq'::regclass) + is_nullable: false + metadata: + data_type: jsonb + default: null + is_nullable: false + db_dbcomment: + content: + data_type: text + default: null + is_nullable: false + ctime: + data_type: timestamp with time zone + default: null + is_nullable: false + dbnode_id: + data_type: integer + default: null + is_nullable: false + id: + data_type: integer + default: nextval('db_dbcomment_id_seq'::regclass) + is_nullable: false + mtime: + data_type: timestamp with time zone + default: null + is_nullable: false + user_id: + data_type: integer + default: null + is_nullable: false + uuid: + data_type: uuid + default: null + is_nullable: false + db_dbcomputer: + description: + data_type: text + default: null + is_nullable: false + hostname: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + id: + data_type: integer + default: nextval('db_dbcomputer_id_seq'::regclass) + is_nullable: false + label: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + metadata: + data_type: jsonb + default: null + is_nullable: false + scheduler_type: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + transport_type: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + uuid: + data_type: uuid + default: null + is_nullable: false + db_dbgroup: + description: + data_type: text + default: null + is_nullable: false + extras: + data_type: jsonb + default: null + is_nullable: false + id: + data_type: integer + default: nextval('db_dbgroup_id_seq'::regclass) + is_nullable: false + label: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + time: + data_type: timestamp with time zone + default: null + is_nullable: false + type_string: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + user_id: + data_type: integer + default: null + is_nullable: false + uuid: + data_type: uuid + default: null + is_nullable: false + db_dbgroup_dbnodes: + dbgroup_id: + data_type: integer + default: null + is_nullable: false + dbnode_id: + data_type: integer + default: null + is_nullable: false + id: + data_type: integer + default: nextval('db_dbgroup_dbnodes_id_seq'::regclass) + is_nullable: false + db_dblink: + id: + data_type: integer + default: nextval('db_dblink_id_seq'::regclass) + is_nullable: false + input_id: + data_type: integer + default: null + is_nullable: false + label: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + output_id: + data_type: integer + default: null + is_nullable: false + type: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + db_dblog: + dbnode_id: + data_type: integer + default: null + is_nullable: false + id: + data_type: integer + default: nextval('db_dblog_id_seq'::regclass) + is_nullable: false + levelname: + data_type: character varying + default: null + is_nullable: false + max_length: 50 + loggername: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + message: + data_type: text + default: null + is_nullable: false + metadata: + data_type: jsonb + default: null + is_nullable: false + time: + data_type: timestamp with time zone + default: null + is_nullable: false + uuid: + data_type: uuid + default: null + is_nullable: false + db_dbnode: + attributes: + data_type: jsonb + default: null + is_nullable: true + ctime: + data_type: timestamp with time zone + default: null + is_nullable: false + dbcomputer_id: + data_type: integer + default: null + is_nullable: true + description: + data_type: text + default: null + is_nullable: false + extras: + data_type: jsonb + default: null + is_nullable: true + id: + data_type: integer + default: nextval('db_dbnode_id_seq'::regclass) + is_nullable: false + label: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + mtime: + data_type: timestamp with time zone + default: null + is_nullable: false + node_type: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + process_type: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + repository_metadata: + data_type: jsonb + default: null + is_nullable: false + user_id: + data_type: integer + default: null + is_nullable: false + uuid: + data_type: uuid + default: null + is_nullable: false + db_dbsetting: + description: + data_type: text + default: null + is_nullable: false + id: + data_type: integer + default: nextval('db_dbsetting_id_seq'::regclass) + is_nullable: false + key: + data_type: character varying + default: null + is_nullable: false + max_length: 1024 + time: + data_type: timestamp with time zone + default: null + is_nullable: false + val: + data_type: jsonb + default: null + is_nullable: true + db_dbuser: + email: + data_type: character varying + default: null + is_nullable: false + max_length: 254 + first_name: + data_type: character varying + default: null + is_nullable: false + max_length: 254 + id: + data_type: integer + default: nextval('db_dbuser_id_seq'::regclass) + is_nullable: false + institution: + data_type: character varying + default: null + is_nullable: false + max_length: 254 + last_name: + data_type: character varying + default: null + is_nullable: false + max_length: 254 +constraints: + primary_key: + db_dbauthinfo: + db_dbauthinfo_pkey: + - id + db_dbcomment: + db_dbcomment_pkey: + - id + db_dbcomputer: + db_dbcomputer_pkey: + - id + db_dbgroup: + db_dbgroup_pkey: + - id + db_dbgroup_dbnodes: + db_dbgroup_dbnodes_pkey: + - id + db_dblink: + db_dblink_pkey: + - id + db_dblog: + db_dblog_pkey: + - id + db_dbnode: + db_dbnode_pkey: + - id + db_dbsetting: + db_dbsetting_pkey: + - id + db_dbuser: + db_dbuser_pkey: + - id + unique: + db_dbauthinfo: + uq_db_dbauthinfo_aiidauser_id_dbcomputer_id: + - aiidauser_id + - dbcomputer_id + db_dbcomment: + uq_db_dbcomment_uuid: + - uuid + db_dbcomputer: + uq_db_dbcomputer_label: + - label + uq_db_dbcomputer_uuid: + - uuid + db_dbgroup: + uq_db_dbgroup_label_type_string: + - label + - type_string + uq_db_dbgroup_uuid: + - uuid + db_dbgroup_dbnodes: + uq_db_dbgroup_dbnodes_dbgroup_id_dbnode_id: + - dbgroup_id + - dbnode_id + db_dblog: + uq_db_dblog_uuid: + - uuid + db_dbnode: + uq_db_dbnode_uuid: + - uuid + db_dbsetting: + uq_db_dbsetting_key: + - key + db_dbuser: + uq_db_dbuser_email: + - email +foreign_keys: + db_dbauthinfo: + fk_db_dbauthinfo_aiidauser_id_db_dbuser: FOREIGN KEY (aiidauser_id) REFERENCES + db_dbuser(id) ON DELETE CASCADE DEFERRABLE INITIALLY DEFERRED + fk_db_dbauthinfo_dbcomputer_id_db_dbcomputer: FOREIGN KEY (dbcomputer_id) REFERENCES + db_dbcomputer(id) ON DELETE CASCADE DEFERRABLE INITIALLY DEFERRED + db_dbcomment: + fk_db_dbcomment_dbnode_id_db_dbnode: FOREIGN KEY (dbnode_id) REFERENCES db_dbnode(id) + ON DELETE CASCADE DEFERRABLE INITIALLY DEFERRED + fk_db_dbcomment_user_id_db_dbuser: FOREIGN KEY (user_id) REFERENCES db_dbuser(id) + ON DELETE CASCADE DEFERRABLE INITIALLY DEFERRED + db_dbgroup: + db_dbgroup_user_id_db_dbuser: FOREIGN KEY (user_id) REFERENCES db_dbuser(id) ON + DELETE CASCADE DEFERRABLE INITIALLY DEFERRED + db_dbgroup_dbnodes: + fk_db_dbgroup_dbnodes_dbgroup_id_db_dbgroup: FOREIGN KEY (dbgroup_id) REFERENCES + db_dbgroup(id) DEFERRABLE INITIALLY DEFERRED + fk_db_dbgroup_dbnodes_dbnode_id_db_dbnode: FOREIGN KEY (dbnode_id) REFERENCES + db_dbnode(id) DEFERRABLE INITIALLY DEFERRED + db_dblink: + fk_db_dblink_input_id_db_dbnode: FOREIGN KEY (input_id) REFERENCES db_dbnode(id) + DEFERRABLE INITIALLY DEFERRED + fk_db_dblink_output_id_db_dbnode: FOREIGN KEY (output_id) REFERENCES db_dbnode(id) + ON DELETE CASCADE DEFERRABLE INITIALLY DEFERRED + db_dblog: + fk_db_dblog_dbnode_id_db_dbnode: FOREIGN KEY (dbnode_id) REFERENCES db_dbnode(id) + ON DELETE CASCADE DEFERRABLE INITIALLY DEFERRED + db_dbnode: + fk_db_dbnode_dbcomputer_id_db_dbcomputer: FOREIGN KEY (dbcomputer_id) REFERENCES + db_dbcomputer(id) ON DELETE RESTRICT DEFERRABLE INITIALLY DEFERRED + fk_db_dbnode_user_id_db_dbuser: FOREIGN KEY (user_id) REFERENCES db_dbuser(id) + ON DELETE RESTRICT DEFERRABLE INITIALLY DEFERRED +indexes: + db_dbauthinfo: + db_dbauthinfo_pkey: CREATE UNIQUE INDEX db_dbauthinfo_pkey ON public.db_dbauthinfo + USING btree (id) + ix_db_dbauthinfo_aiidauser_id: CREATE INDEX ix_db_dbauthinfo_aiidauser_id ON public.db_dbauthinfo + USING btree (aiidauser_id) + ix_db_dbauthinfo_dbcomputer_id: CREATE INDEX ix_db_dbauthinfo_dbcomputer_id ON + public.db_dbauthinfo USING btree (dbcomputer_id) + uq_db_dbauthinfo_aiidauser_id_dbcomputer_id: CREATE UNIQUE INDEX uq_db_dbauthinfo_aiidauser_id_dbcomputer_id + ON public.db_dbauthinfo USING btree (aiidauser_id, dbcomputer_id) + db_dbcomment: + db_dbcomment_pkey: CREATE UNIQUE INDEX db_dbcomment_pkey ON public.db_dbcomment + USING btree (id) + ix_db_dbcomment_dbnode_id: CREATE INDEX ix_db_dbcomment_dbnode_id ON public.db_dbcomment + USING btree (dbnode_id) + ix_db_dbcomment_user_id: CREATE INDEX ix_db_dbcomment_user_id ON public.db_dbcomment + USING btree (user_id) + uq_db_dbcomment_uuid: CREATE UNIQUE INDEX uq_db_dbcomment_uuid ON public.db_dbcomment + USING btree (uuid) + db_dbcomputer: + db_dbcomputer_pkey: CREATE UNIQUE INDEX db_dbcomputer_pkey ON public.db_dbcomputer + USING btree (id) + ix_pat_db_dbcomputer_label: CREATE INDEX ix_pat_db_dbcomputer_label ON public.db_dbcomputer + USING btree (label varchar_pattern_ops) + uq_db_dbcomputer_label: CREATE UNIQUE INDEX uq_db_dbcomputer_label ON public.db_dbcomputer + USING btree (label) + uq_db_dbcomputer_uuid: CREATE UNIQUE INDEX uq_db_dbcomputer_uuid ON public.db_dbcomputer + USING btree (uuid) + db_dbgroup: + db_dbgroup_pkey: CREATE UNIQUE INDEX db_dbgroup_pkey ON public.db_dbgroup USING + btree (id) + ix_db_dbgroup_label: CREATE INDEX ix_db_dbgroup_label ON public.db_dbgroup USING + btree (label) + ix_db_dbgroup_type_string: CREATE INDEX ix_db_dbgroup_type_string ON public.db_dbgroup + USING btree (type_string) + ix_db_dbgroup_user_id: CREATE INDEX ix_db_dbgroup_user_id ON public.db_dbgroup + USING btree (user_id) + ix_pat_db_dbgroup_label: CREATE INDEX ix_pat_db_dbgroup_label ON public.db_dbgroup + USING btree (label varchar_pattern_ops) + ix_pat_db_dbgroup_type_string: CREATE INDEX ix_pat_db_dbgroup_type_string ON public.db_dbgroup + USING btree (type_string varchar_pattern_ops) + uq_db_dbgroup_label_type_string: CREATE UNIQUE INDEX uq_db_dbgroup_label_type_string + ON public.db_dbgroup USING btree (label, type_string) + uq_db_dbgroup_uuid: CREATE UNIQUE INDEX uq_db_dbgroup_uuid ON public.db_dbgroup + USING btree (uuid) + db_dbgroup_dbnodes: + db_dbgroup_dbnodes_pkey: CREATE UNIQUE INDEX db_dbgroup_dbnodes_pkey ON public.db_dbgroup_dbnodes + USING btree (id) + ix_db_dbgroup_dbnodes_dbgroup_id: CREATE INDEX ix_db_dbgroup_dbnodes_dbgroup_id + ON public.db_dbgroup_dbnodes USING btree (dbgroup_id) + ix_db_dbgroup_dbnodes_dbnode_id: CREATE INDEX ix_db_dbgroup_dbnodes_dbnode_id + ON public.db_dbgroup_dbnodes USING btree (dbnode_id) + uq_db_dbgroup_dbnodes_dbgroup_id_dbnode_id: CREATE UNIQUE INDEX uq_db_dbgroup_dbnodes_dbgroup_id_dbnode_id + ON public.db_dbgroup_dbnodes USING btree (dbgroup_id, dbnode_id) + db_dblink: + db_dblink_pkey: CREATE UNIQUE INDEX db_dblink_pkey ON public.db_dblink USING btree + (id) + ix_db_dblink_input_id: CREATE INDEX ix_db_dblink_input_id ON public.db_dblink + USING btree (input_id) + ix_db_dblink_label: CREATE INDEX ix_db_dblink_label ON public.db_dblink USING + btree (label) + ix_db_dblink_output_id: CREATE INDEX ix_db_dblink_output_id ON public.db_dblink + USING btree (output_id) + ix_db_dblink_type: CREATE INDEX ix_db_dblink_type ON public.db_dblink USING btree + (type) + ix_pat_db_dblink_label: CREATE INDEX ix_pat_db_dblink_label ON public.db_dblink + USING btree (label varchar_pattern_ops) + ix_pat_db_dblink_type: CREATE INDEX ix_pat_db_dblink_type ON public.db_dblink + USING btree (type varchar_pattern_ops) + db_dblog: + db_dblog_pkey: CREATE UNIQUE INDEX db_dblog_pkey ON public.db_dblog USING btree + (id) + ix_db_dblog_dbnode_id: CREATE INDEX ix_db_dblog_dbnode_id ON public.db_dblog USING + btree (dbnode_id) + ix_db_dblog_levelname: CREATE INDEX ix_db_dblog_levelname ON public.db_dblog USING + btree (levelname) + ix_db_dblog_loggername: CREATE INDEX ix_db_dblog_loggername ON public.db_dblog + USING btree (loggername) + ix_pat_db_dblog_levelname: CREATE INDEX ix_pat_db_dblog_levelname ON public.db_dblog + USING btree (levelname varchar_pattern_ops) + ix_pat_db_dblog_loggername: CREATE INDEX ix_pat_db_dblog_loggername ON public.db_dblog + USING btree (loggername varchar_pattern_ops) + uq_db_dblog_uuid: CREATE UNIQUE INDEX uq_db_dblog_uuid ON public.db_dblog USING + btree (uuid) + db_dbnode: + db_dbnode_pkey: CREATE UNIQUE INDEX db_dbnode_pkey ON public.db_dbnode USING btree + (id) + ix_db_dbnode_ctime: CREATE INDEX ix_db_dbnode_ctime ON public.db_dbnode USING + btree (ctime) + ix_db_dbnode_dbcomputer_id: CREATE INDEX ix_db_dbnode_dbcomputer_id ON public.db_dbnode + USING btree (dbcomputer_id) + ix_db_dbnode_label: CREATE INDEX ix_db_dbnode_label ON public.db_dbnode USING + btree (label) + ix_db_dbnode_mtime: CREATE INDEX ix_db_dbnode_mtime ON public.db_dbnode USING + btree (mtime) + ix_db_dbnode_node_type: CREATE INDEX ix_db_dbnode_node_type ON public.db_dbnode + USING btree (node_type) + ix_db_dbnode_process_type: CREATE INDEX ix_db_dbnode_process_type ON public.db_dbnode + USING btree (process_type) + ix_db_dbnode_user_id: CREATE INDEX ix_db_dbnode_user_id ON public.db_dbnode USING + btree (user_id) + ix_pat_db_dbnode_label: CREATE INDEX ix_pat_db_dbnode_label ON public.db_dbnode + USING btree (label varchar_pattern_ops) + ix_pat_db_dbnode_node_type: CREATE INDEX ix_pat_db_dbnode_node_type ON public.db_dbnode + USING btree (node_type varchar_pattern_ops) + ix_pat_db_dbnode_process_type: CREATE INDEX ix_pat_db_dbnode_process_type ON public.db_dbnode + USING btree (process_type varchar_pattern_ops) + uq_db_dbnode_uuid: CREATE UNIQUE INDEX uq_db_dbnode_uuid ON public.db_dbnode USING + btree (uuid) + db_dbsetting: + db_dbsetting_pkey: CREATE UNIQUE INDEX db_dbsetting_pkey ON public.db_dbsetting + USING btree (id) + ix_pat_db_dbsetting_key: CREATE INDEX ix_pat_db_dbsetting_key ON public.db_dbsetting + USING btree (key varchar_pattern_ops) + uq_db_dbsetting_key: CREATE UNIQUE INDEX uq_db_dbsetting_key ON public.db_dbsetting + USING btree (key) + db_dbuser: + db_dbuser_pkey: CREATE UNIQUE INDEX db_dbuser_pkey ON public.db_dbuser USING btree + (id) + ix_pat_db_dbuser_email: CREATE INDEX ix_pat_db_dbuser_email ON public.db_dbuser + USING btree (email varchar_pattern_ops) + uq_db_dbuser_email: CREATE UNIQUE INDEX uq_db_dbuser_email ON public.db_dbuser + USING btree (email) diff --git a/aiida/orm/implementation/django/__init__.py b/tests/backends/aiida_sqlalchemy/migrations/django_branch/test_migrate_to_head.py similarity index 57% rename from aiida/orm/implementation/django/__init__.py rename to tests/backends/aiida_sqlalchemy/migrations/django_branch/test_migrate_to_head.py index 5089f32237..65f08c0e73 100644 --- a/aiida/orm/implementation/django/__init__.py +++ b/tests/backends/aiida_sqlalchemy/migrations/django_branch/test_migrate_to_head.py @@ -7,25 +7,12 @@ # For further information on the license, see the LICENSE.txt file # # For further information please visit http://www.aiida.net # ########################################################################### -"""Implementation of Django backend.""" +"""Test migrating from the base of the django branch, to the main head.""" +from aiida.backends.sqlalchemy.migrator import PsqlDostoreMigrator -# AUTO-GENERATED -# yapf: disable -# pylint: disable=wildcard-import - -from .backend import * -from .convert import * -from .groups import * -from .users import * - -__all__ = ( - 'DjangoBackend', - 'DjangoGroup', - 'DjangoGroupCollection', - 'DjangoUser', - 'DjangoUserCollection', - 'get_backend_entity', -) - -# yapf: enable +def test_migrate(perform_migrations: PsqlDostoreMigrator): + """Test that the migrator can migrate from the base of the django branch, to the main head.""" + perform_migrations.migrate_up('django@django_0001') # the base of the django branch + perform_migrations.migrate() + perform_migrations.validate_storage() diff --git a/tests/backends/aiida_sqlalchemy/migrations/sqlalchemy_branch/__init__.py b/tests/backends/aiida_sqlalchemy/migrations/sqlalchemy_branch/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/tests/backends/aiida_sqlalchemy/migrations/test_10_group_update.py b/tests/backends/aiida_sqlalchemy/migrations/sqlalchemy_branch/test_10_group_update.py similarity index 87% rename from tests/backends/aiida_sqlalchemy/migrations/test_10_group_update.py rename to tests/backends/aiida_sqlalchemy/migrations/sqlalchemy_branch/test_10_group_update.py index d2e781b797..e5643b71f1 100644 --- a/tests/backends/aiida_sqlalchemy/migrations/test_10_group_update.py +++ b/tests/backends/aiida_sqlalchemy/migrations/sqlalchemy_branch/test_10_group_update.py @@ -8,16 +8,16 @@ # For further information please visit http://www.aiida.net # ########################################################################### """Tests for group migrations: 118349c10896 -> 0edcdd5a30f0""" -from .conftest import Migrator +from aiida.backends.sqlalchemy.migrator import PsqlDostoreMigrator -def test_group_typestring(perform_migrations: Migrator): +def test_group_typestring(perform_migrations: PsqlDostoreMigrator): """Test the migration that renames the DbGroup type strings. Verify that the type strings are properly migrated. """ # starting revision - perform_migrations.migrate_down('118349c10896') # 118349c10896_default_link_label.py + perform_migrations.migrate_up('sqlalchemy@118349c10896') # 118349c10896_default_link_label.py # setup the database DbGroup = perform_migrations.get_current_table('db_dbgroup') # pylint: disable=invalid-name @@ -49,7 +49,7 @@ def test_group_typestring(perform_migrations: Migrator): group_autorun_pk = group_autorun.id # migrate up - perform_migrations.migrate_up('bf591f31dd12') # bf591f31dd12_dbgroup_type_string.py + perform_migrations.migrate_up('sqlalchemy@bf591f31dd12') # bf591f31dd12_dbgroup_type_string.py # perform some checks DbGroup = perform_migrations.get_current_table('db_dbgroup') # pylint: disable=invalid-name @@ -70,13 +70,13 @@ def test_group_typestring(perform_migrations: Migrator): assert group_autorun.type_string == 'core.auto' -def test_group_extras(perform_migrations: Migrator): +def test_group_extras(perform_migrations: PsqlDostoreMigrator): """Test migration to add the `extras` JSONB column to the `DbGroup` model. Verify that the model now has an extras column with empty dictionary as default. """ # starting revision - perform_migrations.migrate_down('bf591f31dd12') # bf591f31dd12_dbgroup_type_string.py + perform_migrations.migrate_up('sqlalchemy@bf591f31dd12') # bf591f31dd12_dbgroup_type_string.py # setup the database DbGroup = perform_migrations.get_current_table('db_dbgroup') # pylint: disable=invalid-name @@ -93,7 +93,7 @@ def test_group_extras(perform_migrations: Migrator): group_pk = group.id # migrate up - perform_migrations.migrate_up('0edcdd5a30f0') # 0edcdd5a30f0_dbgroup_extras.py + perform_migrations.migrate_up('sqlalchemy@0edcdd5a30f0') # 0edcdd5a30f0_dbgroup_extras.py # perform some checks DbGroup = perform_migrations.get_current_table('db_dbgroup') # pylint: disable=invalid-name diff --git a/tests/backends/aiida_sqlalchemy/migrations/test_11_v2_repository.py b/tests/backends/aiida_sqlalchemy/migrations/sqlalchemy_branch/test_11_v2_repository.py similarity index 74% rename from tests/backends/aiida_sqlalchemy/migrations/test_11_v2_repository.py rename to tests/backends/aiida_sqlalchemy/migrations/sqlalchemy_branch/test_11_v2_repository.py index d2ae9a1cc4..a7e7cb9be1 100644 --- a/tests/backends/aiida_sqlalchemy/migrations/test_11_v2_repository.py +++ b/tests/backends/aiida_sqlalchemy/migrations/sqlalchemy_branch/test_11_v2_repository.py @@ -11,19 +11,18 @@ import hashlib import os -from aiida.backends.general.migrations import utils +from aiida.backends.sqlalchemy.migrations.utils import utils +from aiida.backends.sqlalchemy.migrator import PsqlDostoreMigrator from aiida.common.utils import get_new_uuid -from .conftest import Migrator - -def test_node_repository_metadata(perform_migrations: Migrator): +def test_node_repository_metadata(perform_migrations: PsqlDostoreMigrator): """Test migration adding the `repository_metadata` column to the `Node` model. Verify that the column is added and null by default. """ # starting revision - perform_migrations.migrate_down('0edcdd5a30f0') # 0edcdd5a30f0_dbgroup_extras.py + perform_migrations.migrate_up('sqlalchemy@0edcdd5a30f0') # 0edcdd5a30f0_dbgroup_extras.py # setup the database DbNode = perform_migrations.get_current_table('db_dbnode') # pylint: disable=invalid-name @@ -39,23 +38,23 @@ def test_node_repository_metadata(perform_migrations: Migrator): node_id = node.id # migrate up - perform_migrations.migrate_up('7536a82b2cc4') # 7536a82b2cc4_add_node_repository_metadata.py + perform_migrations.migrate_up('sqlalchemy@7536a82b2cc4') # 7536a82b2cc4_add_node_repository_metadata.py # perform some checks DbNode = perform_migrations.get_current_table('db_dbnode') # pylint: disable=invalid-name with perform_migrations.session() as session: node = session.query(DbNode).filter(DbNode.id == node_id).one() assert hasattr(node, 'repository_metadata') - assert node.repository_metadata is None + assert node.repository_metadata == {} -def test_entry_point_core_prefix(perform_migrations: Migrator): +def test_entry_point_core_prefix(perform_migrations: PsqlDostoreMigrator): """Test migration that updates node types after `core.` prefix was added to entry point names. Verify that the column was successfully renamed. """ # starting revision - perform_migrations.migrate_down('535039300e4a') # 535039300e4a_computer_name_to_label.py + perform_migrations.migrate_up('sqlalchemy@535039300e4a') # 535039300e4a_computer_name_to_label.py # setup the database DbComputer = perform_migrations.get_current_table('db_dbcomputer') # pylint: disable=invalid-name @@ -74,19 +73,24 @@ def test_entry_point_core_prefix(perform_migrations: Migrator): calcjob = DbNode( user_id=user.id, process_type='aiida.calculations:core.arithmetic.add', - attributes={'parser_name': 'core.arithmetic.add'} + attributes={'parser_name': 'core.arithmetic.add'}, + repository_metadata={}, ) session.add(calcjob) session.commit() calcjob_id = calcjob.id - workflow = DbNode(user_id=user.id, process_type='aiida.workflows:arithmetic.add_multiply') + workflow = DbNode( + user_id=user.id, + process_type='aiida.workflows:arithmetic.add_multiply', + repository_metadata={}, + ) session.add(workflow) session.commit() workflow_id = workflow.id # migrate up - perform_migrations.migrate_up('34a831f4286d') # 34a831f4286d_entry_point_core_prefix + perform_migrations.migrate_up('sqlalchemy@34a831f4286d') # 34a831f4286d_entry_point_core_prefix # perform some checks DbComputer = perform_migrations.get_current_table('db_dbcomputer') # pylint: disable=invalid-name @@ -104,13 +108,13 @@ def test_entry_point_core_prefix(perform_migrations: Migrator): assert workflow.process_type == 'aiida.workflows:core.arithmetic.add_multiply' -def test_repository_migration(perform_migrations: Migrator): # pylint: disable=too-many-statements +def test_repository_migration(perform_migrations: PsqlDostoreMigrator): # pylint: disable=too-many-statements,too-many-locals """Test migration of the old file repository to the disk object store. Verify that the files are correctly migrated. """ # starting revision - perform_migrations.migrate_down('7536a82b2cc4') + perform_migrations.migrate_up('sqlalchemy@7536a82b2cc4') # setup the database DbNode = perform_migrations.get_current_table('db_dbnode') # pylint: disable=invalid-name @@ -120,13 +124,11 @@ def test_repository_migration(perform_migrations: Migrator): # pylint: disable= session.add(default_user) session.commit() - # For some reasons, the UUIDs do not get created automatically through the column's default in the - # migrations so we set it manually using the same method. - node_01 = DbNode(user_id=default_user.id, uuid=get_new_uuid()) - node_02 = DbNode(user_id=default_user.id, uuid=get_new_uuid()) - node_03 = DbNode(user_id=default_user.id, uuid=get_new_uuid()) - node_04 = DbNode(user_id=default_user.id, uuid=get_new_uuid()) - node_05 = DbNode(user_id=default_user.id, uuid=get_new_uuid()) + node_01 = DbNode(user_id=default_user.id, uuid=get_new_uuid(), repository_metadata={}) + node_02 = DbNode(user_id=default_user.id, uuid=get_new_uuid(), repository_metadata={}) + node_03 = DbNode(user_id=default_user.id, uuid=get_new_uuid(), repository_metadata={}) + node_04 = DbNode(user_id=default_user.id, uuid=get_new_uuid(), repository_metadata={}) + node_05 = DbNode(user_id=default_user.id, uuid=get_new_uuid(), repository_metadata={}) session.add(node_01) session.add(node_02) @@ -146,28 +148,30 @@ def test_repository_migration(perform_migrations: Migrator): # pylint: disable= node_03_pk = node_03.id node_05_pk = node_05.id - utils.put_object_from_string(node_01.uuid, 'sub/path/file_b.txt', 'b') - utils.put_object_from_string(node_01.uuid, 'sub/file_a.txt', 'a') - utils.put_object_from_string(node_02.uuid, 'output.txt', 'output') + repo_path = perform_migrations.profile.repository_path + + utils.put_object_from_string(repo_path, node_01.uuid, 'sub/path/file_b.txt', 'b') + utils.put_object_from_string(repo_path, node_01.uuid, 'sub/file_a.txt', 'a') + utils.put_object_from_string(repo_path, node_02.uuid, 'output.txt', 'output') - os.makedirs(utils.get_node_repository_sub_folder(node_04.uuid, 'path'), exist_ok=True) - os.makedirs(utils.get_node_repository_sub_folder(node_04.uuid, 'raw_input'), exist_ok=True) - os.makedirs(utils.get_node_repository_sub_folder(node_05.uuid, 'path'), exist_ok=True) - os.makedirs(utils.get_node_repository_sub_folder(node_05.uuid, 'raw_input'), exist_ok=True) + os.makedirs(utils.get_node_repository_sub_folder(repo_path, node_04.uuid, 'path'), exist_ok=True) + os.makedirs(utils.get_node_repository_sub_folder(repo_path, node_04.uuid, 'raw_input'), exist_ok=True) + os.makedirs(utils.get_node_repository_sub_folder(repo_path, node_05.uuid, 'path'), exist_ok=True) + os.makedirs(utils.get_node_repository_sub_folder(repo_path, node_05.uuid, 'raw_input'), exist_ok=True) - utils.put_object_from_string(node_05.uuid, '.gitignore', 'test') + utils.put_object_from_string(repo_path, node_05.uuid, '.gitignore', 'test') with open( - os.path.join(utils.get_node_repository_sub_folder(node_05.uuid, 'raw_input'), 'input.txt'), + os.path.join(utils.get_node_repository_sub_folder(repo_path, node_05.uuid, 'raw_input'), 'input.txt'), 'w', encoding='utf-8', ) as handle: handle.write('input') # Add a repository folder for a node that no longer exists - i.e. it may have been deleted. - utils.put_object_from_string(get_new_uuid(), 'file_of_deleted_node', 'output') + utils.put_object_from_string(repo_path, get_new_uuid(), 'file_of_deleted_node', 'output') # migrate up - perform_migrations.migrate_up('1feaea71bd5a') + perform_migrations.migrate_up('sqlalchemy@1feaea71bd5a') # perform some checks repository_uuid_key = 'repository|uuid' @@ -204,7 +208,7 @@ def test_repository_migration(perform_migrations: Migrator): # pylint: disable= } } } - assert node_03.repository_metadata is None + assert node_03.repository_metadata == {} assert node_05.repository_metadata == { 'o': { 'input.txt': { @@ -219,20 +223,20 @@ def test_repository_migration(perform_migrations: Migrator): # pylint: disable= (node_02.repository_metadata['o']['output.txt']['k'], b'output'), (node_05.repository_metadata['o']['input.txt']['k'], b'input'), ): - assert utils.get_repository_object(hashkey) == content + assert utils.get_repository_object(perform_migrations.profile, hashkey) == content repository_uuid = session.query(DbSetting).filter(DbSetting.key == repository_uuid_key).one() assert repository_uuid is not None assert isinstance(repository_uuid.val, str) -def test_computer_name_to_label(perform_migrations: Migrator): +def test_computer_name_to_label(perform_migrations: PsqlDostoreMigrator): """Test the renaming of `name` to `label` for `DbComputer. Verify that the column was successfully renamed. """ # starting revision - perform_migrations.migrate_down('1feaea71bd5a') # 1feaea71bd5a_migrate_repository + perform_migrations.migrate_up('sqlalchemy@1feaea71bd5a') # 1feaea71bd5a_migrate_repository # setup the database DbComputer = perform_migrations.get_current_table('db_dbcomputer') # pylint: disable=invalid-name @@ -243,7 +247,7 @@ def test_computer_name_to_label(perform_migrations: Migrator): computer_id = computer.id # migrate up - perform_migrations.migrate_up('535039300e4a') # 5ddd24e52864_dbnode_type_to_dbnode_node_type + perform_migrations.migrate_up('sqlalchemy@535039300e4a') # 5ddd24e52864_dbnode_type_to_dbnode_node_type # perform some checks DbComputer = perform_migrations.get_current_table('db_dbcomputer') # pylint: disable=invalid-name diff --git a/tests/backends/aiida_sqlalchemy/migrations/test_12_sqla_django_parity.py b/tests/backends/aiida_sqlalchemy/migrations/sqlalchemy_branch/test_12_sqla_django_parity.py similarity index 94% rename from tests/backends/aiida_sqlalchemy/migrations/test_12_sqla_django_parity.py rename to tests/backends/aiida_sqlalchemy/migrations/sqlalchemy_branch/test_12_sqla_django_parity.py index 08da595f20..40d4c25fb9 100644 --- a/tests/backends/aiida_sqlalchemy/migrations/test_12_sqla_django_parity.py +++ b/tests/backends/aiida_sqlalchemy/migrations/sqlalchemy_branch/test_12_sqla_django_parity.py @@ -9,13 +9,13 @@ ########################################################################### """Tests for migrations to bring parity between SQLAlchemy and Django.""" # pylint: disable=invalid-name,too-many-locals,too-many-statements -from .conftest import Migrator +from aiida.backends.sqlalchemy.migrator import PsqlDostoreMigrator -def test_non_nullable(perform_migrations: Migrator): +def test_non_nullable(perform_migrations: PsqlDostoreMigrator): """Test making columns non-nullable.""" # starting revision - perform_migrations.migrate_down('34a831f4286d') + perform_migrations.migrate_up('sqlalchemy@34a831f4286d') # setup the database DbAuthInfo = perform_migrations.get_current_table('db_dbauthinfo') @@ -63,7 +63,8 @@ def test_non_nullable(perform_migrations: Migrator): node_type='', uuid=None, attributes={}, - extras={} + extras={}, + repository_metadata={}, ) session.add(node) session.commit() @@ -78,7 +79,7 @@ def test_non_nullable(perform_migrations: Migrator): log_id = log.id # migrate up - perform_migrations.migrate_up('1de112340b18') + perform_migrations.migrate_up('sqlalchemy@1de112340b18') # perform some checks DbAuthInfo = perform_migrations.get_current_table('db_dbauthinfo') diff --git a/tests/backends/aiida_sqlalchemy/migrations/test_1_provenance_redesign.py b/tests/backends/aiida_sqlalchemy/migrations/sqlalchemy_branch/test_1_provenance_redesign.py similarity index 92% rename from tests/backends/aiida_sqlalchemy/migrations/test_1_provenance_redesign.py rename to tests/backends/aiida_sqlalchemy/migrations/sqlalchemy_branch/test_1_provenance_redesign.py index bcee317ad8..6ddecd12fe 100644 --- a/tests/backends/aiida_sqlalchemy/migrations/test_1_provenance_redesign.py +++ b/tests/backends/aiida_sqlalchemy/migrations/sqlalchemy_branch/test_1_provenance_redesign.py @@ -8,22 +8,22 @@ # For further information please visit http://www.aiida.net # ########################################################################### """Tests for the provenance redesign: 140c971ae0a3 -> 239cea6d2452""" -from .conftest import Migrator +from aiida.backends.sqlalchemy.migrator import PsqlDostoreMigrator -def test_provenance_redesign(perform_migrations: Migrator): +def test_provenance_redesign(perform_migrations: PsqlDostoreMigrator): """Test the data migration part of the provenance redesign migration. Verify that type string of the Data node are successfully adapted. """ # starting revision - perform_migrations.migrate_down('140c971ae0a3') # 140c971ae0a3_migrate_builtin_calculations + perform_migrations.migrate_up('sqlalchemy@140c971ae0a3') # 140c971ae0a3_migrate_builtin_calculations # setup the database DbNode = perform_migrations.get_current_table('db_dbnode') # pylint: disable=invalid-name DbUser = perform_migrations.get_current_table('db_dbuser') # pylint: disable=invalid-name with perform_migrations.session() as session: - user = DbUser(email='user@aiida.net') + user = DbUser(email='user@aiida.net', is_superuser=True) session.add(user) session.commit() @@ -55,7 +55,7 @@ def test_provenance_redesign(perform_migrations: Migrator): node_function_id = node_function.id # migrate up - perform_migrations.migrate_up('239cea6d2452') # 239cea6d2452_provenance_redesign + perform_migrations.migrate_up('sqlalchemy@239cea6d2452') # 239cea6d2452_provenance_redesign # perform some checks DbNode = perform_migrations.get_current_table('db_dbnode') # pylint: disable=invalid-name diff --git a/tests/backends/aiida_sqlalchemy/migrations/test_2_group_renaming.py b/tests/backends/aiida_sqlalchemy/migrations/sqlalchemy_branch/test_2_group_renaming.py similarity index 87% rename from tests/backends/aiida_sqlalchemy/migrations/test_2_group_renaming.py rename to tests/backends/aiida_sqlalchemy/migrations/sqlalchemy_branch/test_2_group_renaming.py index f2238119d6..b10c08ea2e 100644 --- a/tests/backends/aiida_sqlalchemy/migrations/test_2_group_renaming.py +++ b/tests/backends/aiida_sqlalchemy/migrations/sqlalchemy_branch/test_2_group_renaming.py @@ -8,19 +8,21 @@ # For further information please visit http://www.aiida.net # ########################################################################### """Test renaming of type strings: b8b23ddefad4 -> e72ad251bcdb""" -from .conftest import Migrator +from aiida.backends.sqlalchemy.migrator import PsqlDostoreMigrator -def test_group_renaming(perform_migrations: Migrator): +def test_group_renaming(perform_migrations: PsqlDostoreMigrator): """Test the migration that renames the DbGroup type strings.""" # starting revision - perform_migrations.migrate_down('b8b23ddefad4') # b8b23ddefad4_dbgroup_name_to_label_type_to_type_string.py + perform_migrations.migrate_up( + 'sqlalchemy@b8b23ddefad4' + ) # b8b23ddefad4_dbgroup_name_to_label_type_to_type_string.py # setup the database DbGroup = perform_migrations.get_current_table('db_dbgroup') # pylint: disable=invalid-name DbUser = perform_migrations.get_current_table('db_dbuser') # pylint: disable=invalid-name with perform_migrations.session() as session: - default_user = DbUser(email='user@aiida.net') + default_user = DbUser(email='user@aiida.net', is_superuser=True) session.add(default_user) session.commit() @@ -44,7 +46,7 @@ def test_group_renaming(perform_migrations: Migrator): group_autorun_pk = group_autorun.id # migrate up - perform_migrations.migrate_up('e72ad251bcdb') # e72ad251bcdb_dbgroup_class_change_type_string_values.py + perform_migrations.migrate_up('sqlalchemy@e72ad251bcdb') # e72ad251bcdb_dbgroup_class_change_type_string_values.py # perform some checks DbGroup = perform_migrations.get_current_table('db_dbgroup') # pylint: disable=invalid-name diff --git a/tests/backends/aiida_sqlalchemy/migrations/test_3_calc_attributes_keys.py b/tests/backends/aiida_sqlalchemy/migrations/sqlalchemy_branch/test_3_calc_attributes_keys.py similarity index 91% rename from tests/backends/aiida_sqlalchemy/migrations/test_3_calc_attributes_keys.py rename to tests/backends/aiida_sqlalchemy/migrations/sqlalchemy_branch/test_3_calc_attributes_keys.py index ef1848e612..460584906d 100644 --- a/tests/backends/aiida_sqlalchemy/migrations/test_3_calc_attributes_keys.py +++ b/tests/backends/aiida_sqlalchemy/migrations/sqlalchemy_branch/test_3_calc_attributes_keys.py @@ -8,7 +8,7 @@ # For further information please visit http://www.aiida.net # ########################################################################### """Tests migration of the keys of certain attribute for ProcessNodes and CalcJobNodes: e72ad251bcdb -> 7ca08c391c49""" -from .conftest import Migrator +from aiida.backends.sqlalchemy.migrator import PsqlDostoreMigrator KEY_RESOURCES_OLD = 'jobresource_params' KEY_RESOURCES_NEW = 'resources' @@ -22,10 +22,10 @@ PROCESS_LABEL = 'TestLabel' -def test_calc_attributes_keys(perform_migrations: Migrator): +def test_calc_attributes_keys(perform_migrations: PsqlDostoreMigrator): """Test the migration of the keys of certain attribute for ProcessNodes and CalcJobNodes.""" # starting revision - perform_migrations.migrate_down('e72ad251bcdb') # e72ad251bcdb_dbgroup_class_change_type_string_values + perform_migrations.migrate_up('sqlalchemy@e72ad251bcdb') # e72ad251bcdb_dbgroup_class_change_type_string_values # setup the database DbNode = perform_migrations.get_current_table('db_dbnode') # pylint: disable=invalid-name @@ -35,7 +35,7 @@ def test_calc_attributes_keys(perform_migrations: Migrator): environment_variables = {} with perform_migrations.session() as session: - user = DbUser(email='user@aiida.net') + user = DbUser(email='user@aiida.net', is_superuser=True) session.add(user) session.commit() @@ -63,7 +63,7 @@ def test_calc_attributes_keys(perform_migrations: Migrator): node_other_id = node_other.id # migrate up - perform_migrations.migrate_up('7ca08c391c49') # 7ca08c391c49_calc_job_option_attribute_keys + perform_migrations.migrate_up('sqlalchemy@7ca08c391c49') # 7ca08c391c49_calc_job_option_attribute_keys # perform some checks DbNode = perform_migrations.get_current_table('db_dbnode') # pylint: disable=invalid-name diff --git a/tests/backends/aiida_sqlalchemy/migrations/test_4_dblog_update.py b/tests/backends/aiida_sqlalchemy/migrations/sqlalchemy_branch/test_4_dblog_update.py similarity index 71% rename from tests/backends/aiida_sqlalchemy/migrations/test_4_dblog_update.py rename to tests/backends/aiida_sqlalchemy/migrations/sqlalchemy_branch/test_4_dblog_update.py index 05952f68c1..c3d45bc6ad 100644 --- a/tests/backends/aiida_sqlalchemy/migrations/test_4_dblog_update.py +++ b/tests/backends/aiida_sqlalchemy/migrations/sqlalchemy_branch/test_4_dblog_update.py @@ -8,33 +8,32 @@ # For further information please visit http://www.aiida.net # ########################################################################### """Tests DbLog migration: 7ca08c391c49 -> 375c2db70663""" -import importlib import json import pytest from sqlalchemy import column -from .conftest import Migrator +from aiida.backends.sqlalchemy.migrations.utils import dblog_update +from aiida.backends.sqlalchemy.migrator import PsqlDostoreMigrator + +# The values that will be exported for the log records that will be deleted +values_to_export = ('id', 'time', 'loggername', 'levelname', 'objpk', 'objname', 'message', 'metadata') class TestDbLogMigrationRecordCleaning: """Test the migration of the keys of certain attribute for ProcessNodes and CalcJobNodes.""" - migrator: Migrator + migrator: PsqlDostoreMigrator @pytest.fixture(autouse=True) - def setup_db(self, perform_migrations: Migrator): # pylint: disable=too-many-locals,too-many-statements + def setup_db(self, perform_migrations: PsqlDostoreMigrator): # pylint: disable=too-many-locals,too-many-statements """Setup the database schema.""" - from aiida.backends.general.migrations.utils import dumps_json + from aiida.backends.sqlalchemy.migrations.utils.utils import dumps_json self.migrator = perform_migrations # starting revision - perform_migrations.migrate_down('7ca08c391c49') # 7ca08c391c49_calc_job_option_attribute_keys - - log_migration = importlib.import_module( - 'aiida.backends.sqlalchemy.migrations.versions.041a79fc615f_dblog_cleaning' - ) + perform_migrations.migrate_up('sqlalchemy@7ca08c391c49') # 7ca08c391c49_calc_job_option_attribute_keys DbUser = perform_migrations.get_current_table('db_dbuser') # pylint: disable=invalid-name DbNode = perform_migrations.get_current_table('db_dbnode') # pylint: disable=invalid-name @@ -43,7 +42,7 @@ def setup_db(self, perform_migrations: Migrator): # pylint: disable=too-many-lo with perform_migrations.session() as session: - user = DbUser(email='user@aiida.net') + user = DbUser(email='user@aiida.net', is_superuser=True) session.add(user) session.commit() @@ -153,7 +152,7 @@ def setup_db(self, perform_migrations: Migrator): # pylint: disable=too-many-lo # The columns to project cols_to_project = [] - for val in log_migration.values_to_export: + for val in values_to_export: cols_to_project.append(getattr(DbLog, val)) # Getting the serialized Dict logs @@ -163,9 +162,9 @@ def setup_db(self, perform_migrations: Migrator): # pylint: disable=too-many-lo serialized_param_data = dumps_json([param_data._asdict()]) # Getting the serialized logs for the unknown entity logs (as the export migration fuction # provides them) - this should coincide to the above - serialized_unknown_exp_logs = log_migration.get_serialized_unknown_entity_logs(session) + serialized_unknown_exp_logs = dblog_update.get_serialized_unknown_entity_logs(session) # Getting their number - unknown_exp_logs_number = log_migration.get_unknown_entity_log_number(session) + unknown_exp_logs_number = dblog_update.get_unknown_entity_log_number(session) self.to_check['Dict'] = (serialized_param_data, serialized_unknown_exp_logs, unknown_exp_logs_number) # Getting the serialized legacy workflow logs @@ -176,8 +175,8 @@ def setup_db(self, perform_migrations: Migrator): # pylint: disable=too-many-lo serialized_leg_wf_logs = dumps_json([leg_wf._asdict()]) # Getting the serialized logs for the legacy workflow logs (as the export migration function # provides them) - this should coincide to the above - serialized_leg_wf_exp_logs = log_migration.get_serialized_legacy_workflow_logs(session) - eg_wf_exp_logs_number = log_migration.get_legacy_workflow_log_number(session) + serialized_leg_wf_exp_logs = dblog_update.get_serialized_legacy_workflow_logs(session) + eg_wf_exp_logs_number = dblog_update.get_legacy_workflow_log_number(session) self.to_check['WorkflowNode'] = (serialized_leg_wf_logs, serialized_leg_wf_exp_logs, eg_wf_exp_logs_number) @@ -189,12 +188,12 @@ def setup_db(self, perform_migrations: Migrator): # pylint: disable=too-many-lo # Getting the serialized logs that don't correspond to a node (as the export migration function # provides them) - this should coincide to the above - serialized_logs_exp_no_node = log_migration.get_serialized_logs_with_no_nodes(session) - logs_no_node_number = log_migration.get_logs_with_no_nodes_number(session) + serialized_logs_exp_no_node = dblog_update.get_serialized_logs_with_no_nodes(session) + logs_no_node_number = dblog_update.get_logs_with_no_nodes_number(session) self.to_check['NoNode'] = (serialized_logs_no_node, serialized_logs_exp_no_node, logs_no_node_number) # migrate up - perform_migrations.migrate_up('041a79fc615f') # 041a79fc615f_dblog_cleaning + perform_migrations.migrate_up('sqlalchemy@041a79fc615f') # 041a79fc615f_dblog_cleaning yield @@ -259,105 +258,20 @@ def test_metadata_correctness(self): assert 'objname' not in m_res.keys(), 'objname should not exist any more in metadata' -def test_backward_migration(perform_migrations: Migrator): - """Check that backward migrations work also for the DbLog migration(s). - - This test verifies that the objpk and objname have the right values - after a forward and a backward migration. - """ - # starting revision - perform_migrations.migrate_down('041a79fc615f') # 041a79fc615f_dblog_cleaning - - # setup the database - DbLog = perform_migrations.get_current_table('db_dblog') # pylint: disable=invalid-name - DbNode = perform_migrations.get_current_table('db_dbnode') # pylint: disable=invalid-name - DbUser = perform_migrations.get_current_table('db_dbuser') # pylint: disable=invalid-name - with perform_migrations.session() as session: - user = DbUser(email='user@aiida.net') - session.add(user) - session.commit() - - calc_1 = DbNode(type='node.process.calculation.CalculationNode.1', user_id=user.id) - calc_2 = DbNode(type='node.process.calculation.CalculationNode.2', user_id=user.id) - - session.add(calc_1) - session.add(calc_2) - session.commit() - - log_1 = DbLog( - loggername='CalculationNode logger', - dbnode_id=calc_1.id, - message='calculation node 1', - metadata={ - 'msecs': 719.0849781036377, - 'lineno': 350, - 'thread': 140011612940032, - 'asctime': '10/21/2018 12:39:51 PM', - 'created': 1540118391.719085, - 'levelno': 23, - 'message': 'calculation node 1', - }) - log_2 = DbLog( - loggername='CalculationNode logger', - dbnode_id=calc_2.id, - message='calculation node 2', - metadata={ - 'msecs': 719.0849781036377, - 'lineno': 360, - 'levelno': 23, - 'message': 'calculation node 1', - }) - - session.add(log_1) - session.add(log_2) - - session.commit() - - # Keeping what is needed to be verified at the test - to_check = { - log_1.id: (log_1.dbnode_id, calc_1.type), - log_2.id: (log_2.dbnode_id, calc_2.type) - } - - # migrate down - perform_migrations.migrate_down('7ca08c391c49') # e72ad251bcdb_dbgroup_class_change_type_string_values - - # perform some checks - DbLog = perform_migrations.get_current_table('db_dblog') # pylint: disable=invalid-name - with perform_migrations.session() as session: - - for log_pk, to_check_value in to_check.items(): - log_entry = session.query(DbLog).filter(DbLog.id == log_pk).one() - log_dbnode_id, node_type = to_check_value - - assert log_dbnode_id == log_entry.objpk, ( - f'The dbnode_id ({log_dbnode_id}) of the 0024 schema version ' - f'should be identical to the objpk ({log_entry.objpk}) of the 0023 schema version.') - assert node_type == log_entry.objname, ( - f'The type ({node_type}) of the linked node of the 0024 schema version should be identical to the ' - f'objname ({log_entry.objname}) of the 0023 schema version.') - assert log_dbnode_id == log_entry.metadata['objpk'], ( - f'The dbnode_id ({log_dbnode_id}) of the 0024 schema version should be identical to the ' - f'objpk ({log_entry.metadata["objpk"]}) of the 0023 schema version stored in the metadata.') - assert node_type == log_entry.metadata['objname'], ( - f'The type ({node_type}) of the linked node of the 0024 schema version should be identical to the ' - f'objname ({log_entry.metadata["objname"]}) of the 0023 schema version stored in the metadata.') - - -def test_dblog_uuid_addition(perform_migrations: Migrator): +def test_dblog_uuid_addition(perform_migrations: PsqlDostoreMigrator): """Test that the UUID column is correctly added to the DbLog table, and that the uniqueness constraint is added without problems (if the migration arrives until 375c2db70663 then the constraint is added properly). """ # starting revision - perform_migrations.migrate_down('041a79fc615f') # 041a79fc615f_dblog_cleaning + perform_migrations.migrate_up('sqlalchemy@041a79fc615f') # 041a79fc615f_dblog_cleaning # setup the database DbLog = perform_migrations.get_current_table('db_dblog') # pylint: disable=invalid-name DbNode = perform_migrations.get_current_table('db_dbnode') # pylint: disable=invalid-name DbUser = perform_migrations.get_current_table('db_dbuser') # pylint: disable=invalid-name with perform_migrations.session() as session: - user = DbUser(email='user@aiida.net') + user = DbUser(email='user@aiida.net', is_superuser=True) session.add(user) session.commit() @@ -377,7 +291,7 @@ def test_dblog_uuid_addition(perform_migrations: Migrator): session.commit() # migrate up - perform_migrations.migrate_up('375c2db70663') # 375c2db70663_dblog_uuid_uniqueness_constraint + perform_migrations.migrate_up('sqlalchemy@375c2db70663') # 375c2db70663_dblog_uuid_uniqueness_constraint # perform some checks DbLog = perform_migrations.get_current_table('db_dblog') # pylint: disable=invalid-name diff --git a/tests/backends/aiida_sqlalchemy/migrations/test_5_data_move_with_node.py b/tests/backends/aiida_sqlalchemy/migrations/sqlalchemy_branch/test_5_data_move_with_node.py similarity index 84% rename from tests/backends/aiida_sqlalchemy/migrations/test_5_data_move_with_node.py rename to tests/backends/aiida_sqlalchemy/migrations/sqlalchemy_branch/test_5_data_move_with_node.py index fff389de22..52086a591a 100644 --- a/tests/backends/aiida_sqlalchemy/migrations/test_5_data_move_with_node.py +++ b/tests/backends/aiida_sqlalchemy/migrations/sqlalchemy_branch/test_5_data_move_with_node.py @@ -8,22 +8,22 @@ # For further information please visit http://www.aiida.net # ########################################################################### """Tests 041a79fc615f -> 6a5c2ea1439d""" -from .conftest import Migrator +from aiida.backends.sqlalchemy.migrator import PsqlDostoreMigrator -def test_data_move_with_node(perform_migrations: Migrator): +def test_data_move_with_node(perform_migrations: PsqlDostoreMigrator): """Test the migration of Data nodes after the data module was moved within the node module. Verify that type string of the Data node was successfully adapted. """ # starting revision - perform_migrations.migrate_down('041a79fc615f') # 041a79fc615f_dblog_update + perform_migrations.migrate_up('sqlalchemy@041a79fc615f') # 041a79fc615f_dblog_update # setup the database DbNode = perform_migrations.get_current_table('db_dbnode') # pylint: disable=invalid-name DbUser = perform_migrations.get_current_table('db_dbuser') # pylint: disable=invalid-name with perform_migrations.session() as session: - user = DbUser(email='user@aiida.net') + user = DbUser(email='user@aiida.net', is_superuser=True) session.add(user) session.commit() @@ -38,7 +38,7 @@ def test_data_move_with_node(perform_migrations: Migrator): node_data_id = node_data.id # migrate up - perform_migrations.migrate_up('6a5c2ea1439d') # 6a5c2ea1439d_move_data_within_node_module + perform_migrations.migrate_up('sqlalchemy@6a5c2ea1439d') # 6a5c2ea1439d_move_data_within_node_module # perform some checks DbNode = perform_migrations.get_current_table('db_dbnode') # pylint: disable=invalid-name diff --git a/tests/backends/aiida_sqlalchemy/migrations/test_6_trajectory_data.py b/tests/backends/aiida_sqlalchemy/migrations/sqlalchemy_branch/test_6_trajectory_data.py similarity index 67% rename from tests/backends/aiida_sqlalchemy/migrations/test_6_trajectory_data.py rename to tests/backends/aiida_sqlalchemy/migrations/sqlalchemy_branch/test_6_trajectory_data.py index 85dac7dca5..a51c912132 100644 --- a/tests/backends/aiida_sqlalchemy/migrations/test_6_trajectory_data.py +++ b/tests/backends/aiida_sqlalchemy/migrations/sqlalchemy_branch/test_6_trajectory_data.py @@ -11,13 +11,12 @@ import numpy as np import pytest -from aiida.backends.general.migrations import utils +from aiida.backends.sqlalchemy.migrations.utils import utils +from aiida.backends.sqlalchemy.migrator import PsqlDostoreMigrator from aiida.backends.sqlalchemy.utils import flag_modified -from .conftest import Migrator - -def set_node_array(node, name, array): +def set_node_array(node, repo_path, name, array): """Store a new numpy array inside a node. Possibly overwrite the array if it already existed. Internally, it stores a name.npy file in numpy format. @@ -25,7 +24,7 @@ def set_node_array(node, name, array): :param name: The name of the array. :param array: The numpy array to store. """ - utils.store_numpy_array_in_repository(node.uuid, name, array) + utils.store_numpy_array_in_repository(repo_path, node.uuid, name, array) attributes = node.attributes if attributes is None: attributes = {} @@ -34,18 +33,20 @@ def set_node_array(node, name, array): flag_modified(node, 'attributes') -def get_node_array(node, name): +def get_node_array(node, repo_path, name): """Retrieve a numpy array from a node.""" - return utils.load_numpy_array_from_repository(node.uuid, name) + return utils.load_numpy_array_from_repository(repo_path, node.uuid, name) -def test_trajectory_data(perform_migrations: Migrator): +def test_trajectory_data(perform_migrations: PsqlDostoreMigrator): """Test the migration of the symbols from numpy array to attribute for TrajectoryData nodes. Verify that migration of symbols from repository array to attribute works properly. """ + repo_path = perform_migrations.profile.repository_path + # starting revision - perform_migrations.migrate_down('37f3d4882837') # 37f3d4882837_make_all_uuid_columns_unique + perform_migrations.migrate_up('sqlalchemy@37f3d4882837') # 37f3d4882837_make_all_uuid_columns_unique # setup the database stepids = np.array([60, 70]) @@ -58,7 +59,7 @@ def test_trajectory_data(perform_migrations: Migrator): DbNode = perform_migrations.get_current_table('db_dbnode') # pylint: disable=invalid-name DbUser = perform_migrations.get_current_table('db_dbuser') # pylint: disable=invalid-name with perform_migrations.session() as session: - user = DbUser(email='user@aiida.net') + user = DbUser(email='user@aiida.net', is_superuser=True) session.add(user) session.commit() @@ -68,18 +69,18 @@ def test_trajectory_data(perform_migrations: Migrator): symbols = np.array(['H', 'O', 'C']) - set_node_array(node, 'steps', stepids) - set_node_array(node, 'cells', cells) - set_node_array(node, 'symbols', symbols) - set_node_array(node, 'positions', positions) - set_node_array(node, 'times', times) - set_node_array(node, 'velocities', velocities) + set_node_array(node, repo_path, 'steps', stepids) + set_node_array(node, repo_path, 'cells', cells) + set_node_array(node, repo_path, 'symbols', symbols) + set_node_array(node, repo_path, 'positions', positions) + set_node_array(node, repo_path, 'times', times) + set_node_array(node, repo_path, 'velocities', velocities) session.commit() node_uuid = node.uuid # migrate up - perform_migrations.migrate_up('ce56d84bcc35') # ce56d84bcc35_delete_trajectory_symbols_array + perform_migrations.migrate_up('sqlalchemy@ce56d84bcc35') # ce56d84bcc35_delete_trajectory_symbols_array # perform some checks DbNode = perform_migrations.get_current_table('db_dbnode') # pylint: disable=invalid-name @@ -87,7 +88,7 @@ def test_trajectory_data(perform_migrations: Migrator): node = session.query(DbNode).filter(DbNode.uuid == node_uuid).one() assert node.attributes['symbols'] == ['H', 'O', 'C'] - assert get_node_array(node, 'velocities').tolist() == velocities.tolist() - assert get_node_array(node, 'positions').tolist() == positions.tolist() + assert get_node_array(node, repo_path, 'velocities').tolist() == velocities.tolist() + assert get_node_array(node, repo_path, 'positions').tolist() == positions.tolist() with pytest.raises(IOError): - get_node_array(node, 'symbols') + get_node_array(node, repo_path, 'symbols') diff --git a/tests/backends/aiida_sqlalchemy/migrations/test_7_node_prefix_removal.py b/tests/backends/aiida_sqlalchemy/migrations/sqlalchemy_branch/test_7_node_prefix_removal.py similarity index 83% rename from tests/backends/aiida_sqlalchemy/migrations/test_7_node_prefix_removal.py rename to tests/backends/aiida_sqlalchemy/migrations/sqlalchemy_branch/test_7_node_prefix_removal.py index 4231e9150a..c9424e47b9 100644 --- a/tests/backends/aiida_sqlalchemy/migrations/test_7_node_prefix_removal.py +++ b/tests/backends/aiida_sqlalchemy/migrations/sqlalchemy_branch/test_7_node_prefix_removal.py @@ -8,22 +8,22 @@ # For further information please visit http://www.aiida.net # ########################################################################### """Tests ce56d84bcc35 -> 61fc0913fae9""" -from .conftest import Migrator +from aiida.backends.sqlalchemy.migrator import PsqlDostoreMigrator -def test_node_prefix_removal(perform_migrations: Migrator): +def test_node_prefix_removal(perform_migrations: PsqlDostoreMigrator): """Test the migration of Data nodes after the data module was moved within the node module. Verify that type string of the Data node was successfully adapted. """ # starting revision - perform_migrations.migrate_down('ce56d84bcc35') # ce56d84bcc35_delete_trajectory_symbols_array + perform_migrations.migrate_up('sqlalchemy@ce56d84bcc35') # ce56d84bcc35_delete_trajectory_symbols_array # setup the database DbNode = perform_migrations.get_current_table('db_dbnode') # pylint: disable=invalid-name DbUser = perform_migrations.get_current_table('db_dbuser') # pylint: disable=invalid-name with perform_migrations.session() as session: - user = DbUser(email='user@aiida.net') + user = DbUser(email='user@aiida.net', is_superuser=True) session.add(user) session.commit() @@ -38,7 +38,7 @@ def test_node_prefix_removal(perform_migrations: Migrator): node_data_id = node_data.id # migrate up - perform_migrations.migrate_up('61fc0913fae9') # 61fc0913fae9_remove_node_prefix + perform_migrations.migrate_up('sqlalchemy@61fc0913fae9') # 61fc0913fae9_remove_node_prefix # perform some checks DbNode = perform_migrations.get_current_table('db_dbnode') # pylint: disable=invalid-name diff --git a/tests/backends/aiida_sqlalchemy/migrations/test_8_parameter_data_to_dict.py b/tests/backends/aiida_sqlalchemy/migrations/sqlalchemy_branch/test_8_parameter_data_to_dict.py similarity index 80% rename from tests/backends/aiida_sqlalchemy/migrations/test_8_parameter_data_to_dict.py rename to tests/backends/aiida_sqlalchemy/migrations/sqlalchemy_branch/test_8_parameter_data_to_dict.py index 4e36ffef82..482865a312 100644 --- a/tests/backends/aiida_sqlalchemy/migrations/test_8_parameter_data_to_dict.py +++ b/tests/backends/aiida_sqlalchemy/migrations/sqlalchemy_branch/test_8_parameter_data_to_dict.py @@ -8,22 +8,22 @@ # For further information please visit http://www.aiida.net # ########################################################################### """Tests 61fc0913fae9 -> d254fdfed416""" -from .conftest import Migrator +from aiida.backends.sqlalchemy.migrator import PsqlDostoreMigrator -def test_parameter_data_to_dict(perform_migrations: Migrator): +def test_parameter_data_to_dict(perform_migrations: PsqlDostoreMigrator): """Test the data migration after `ParameterData` was renamed to `Dict`. Verify that type string of the Data node was successfully adapted. """ # starting revision - perform_migrations.migrate_down('61fc0913fae9') # 61fc0913fae9_remove_node_prefix + perform_migrations.migrate_up('sqlalchemy@61fc0913fae9') # 61fc0913fae9_remove_node_prefix # setup the database DbNode = perform_migrations.get_current_table('db_dbnode') # pylint: disable=invalid-name DbUser = perform_migrations.get_current_table('db_dbuser') # pylint: disable=invalid-name with perform_migrations.session() as session: - user = DbUser(email='user@aiida.net') + user = DbUser(email='user@aiida.net', is_superuser=True) session.add(user) session.commit() @@ -35,7 +35,7 @@ def test_parameter_data_to_dict(perform_migrations: Migrator): node_id = node.id # migrate up - perform_migrations.migrate_up('d254fdfed416') # d254fdfed416_rename_parameter_data_to_dict + perform_migrations.migrate_up('sqlalchemy@d254fdfed416') # d254fdfed416_rename_parameter_data_to_dict # perform some checks DbNode = perform_migrations.get_current_table('db_dbnode') # pylint: disable=invalid-name diff --git a/tests/backends/aiida_sqlalchemy/migrations/test_9_legacy_process.py b/tests/backends/aiida_sqlalchemy/migrations/sqlalchemy_branch/test_9_legacy_process.py similarity index 91% rename from tests/backends/aiida_sqlalchemy/migrations/test_9_legacy_process.py rename to tests/backends/aiida_sqlalchemy/migrations/sqlalchemy_branch/test_9_legacy_process.py index bc9ec0b44c..64c33118a3 100644 --- a/tests/backends/aiida_sqlalchemy/migrations/test_9_legacy_process.py +++ b/tests/backends/aiida_sqlalchemy/migrations/sqlalchemy_branch/test_9_legacy_process.py @@ -8,18 +8,18 @@ # For further information please visit http://www.aiida.net # ########################################################################### """Tests for legacy process migrations: 07fac78e6209 -> 118349c10896""" -from .conftest import Migrator +from aiida.backends.sqlalchemy.migrator import PsqlDostoreMigrator -def test_legacy_jobcalcstate_data(perform_migrations: Migrator): +def test_legacy_jobcalcstate_data(perform_migrations: PsqlDostoreMigrator): """Test the migration that performs a data migration of legacy `JobCalcState`. Verify that the `process_state`, `process_status` and `exit_status` are set correctly. """ - from aiida.backends.general.migrations.calc_state import STATE_MAPPING + from aiida.backends.sqlalchemy.migrations.utils.calc_state import STATE_MAPPING # starting revision - perform_migrations.migrate_down('07fac78e6209') + perform_migrations.migrate_up('sqlalchemy@07fac78e6209') # setup the database nodes = {} @@ -40,7 +40,7 @@ def test_legacy_jobcalcstate_data(perform_migrations: Migrator): nodes[state] = node.id # migrate up - perform_migrations.migrate_up('26d561acd560') + perform_migrations.migrate_up('sqlalchemy@26d561acd560') # perform some checks DbNode = perform_migrations.get_current_table('db_dbnode') # pylint: disable=invalid-name @@ -59,13 +59,13 @@ def test_legacy_jobcalcstate_data(perform_migrations: Migrator): assert isinstance(exit_status, int) -def test_reset_hash(perform_migrations: Migrator): +def test_reset_hash(perform_migrations: PsqlDostoreMigrator): """Test the migration that resets the node hash. Verify that only the _aiida_hash extra has been removed. """ # starting revision - perform_migrations.migrate_down('26d561acd560') + perform_migrations.migrate_up('sqlalchemy@26d561acd560') # setup the database DbNode = perform_migrations.get_current_table('db_dbnode') # pylint: disable=invalid-name @@ -89,7 +89,7 @@ def test_reset_hash(perform_migrations: Migrator): node_id = node.id # migrate up - perform_migrations.migrate_up('e797afa09270') + perform_migrations.migrate_up('sqlalchemy@e797afa09270') # perform some checks DbNode = perform_migrations.get_current_table('db_dbnode') # pylint: disable=invalid-name @@ -100,13 +100,13 @@ def test_reset_hash(perform_migrations: Migrator): assert '_aiida_hash' not in extras # The hash extra should have been removed -def test_legacy_process_attribute(perform_migrations: Migrator): +def test_legacy_process_attribute(perform_migrations: PsqlDostoreMigrator): """Test the migration that performs a data migration of legacy process attributes. Verify that the attributes for process node have been deleted and `_sealed` has been changed to `sealed`. """ # starting revision - perform_migrations.migrate_down('e797afa09270') + perform_migrations.migrate_up('sqlalchemy@e797afa09270') # setup the database DbNode = perform_migrations.get_current_table('db_dbnode') # pylint: disable=invalid-name @@ -166,7 +166,7 @@ def test_legacy_process_attribute(perform_migrations: Migrator): node_data_id = node_data.id # migrate up - perform_migrations.migrate_up('e734dd5e50d7') + perform_migrations.migrate_up('sqlalchemy@e734dd5e50d7') # perform some checks DbNode = perform_migrations.get_current_table('db_dbnode') # pylint: disable=invalid-name @@ -189,13 +189,13 @@ def test_legacy_process_attribute(perform_migrations: Migrator): assert key in node_data.attributes -def test_seal_unsealed_processes(perform_migrations: Migrator): +def test_seal_unsealed_processes(perform_migrations: PsqlDostoreMigrator): """Test the migration that performs a data migration of legacy process attributes. Verify that the attributes for process node have been deleted and `_sealed` has been changed to `sealed`. """ # starting revision - perform_migrations.migrate_down('e734dd5e50d7') + perform_migrations.migrate_up('sqlalchemy@e734dd5e50d7') # setup the database DbNode = perform_migrations.get_current_table('db_dbnode') # pylint: disable=invalid-name @@ -245,7 +245,7 @@ def test_seal_unsealed_processes(perform_migrations: Migrator): node_data_id = node_data.id # migrate up - perform_migrations.migrate_up('7b38a9e783e7') + perform_migrations.migrate_up('sqlalchemy@7b38a9e783e7') # perform some checks DbNode = perform_migrations.get_current_table('db_dbnode') # pylint: disable=invalid-name @@ -263,13 +263,13 @@ def test_seal_unsealed_processes(perform_migrations: Migrator): assert 'sealed' not in node_data.attributes -def test_default_link_label(perform_migrations: Migrator): +def test_default_link_label(perform_migrations: PsqlDostoreMigrator): """Test the migration that performs a data migration of legacy default link labels. Verify that the attributes for process node have been deleted and `_sealed` has been changed to `sealed`. """ # starting revision - perform_migrations.migrate_down('91b573400be5') + perform_migrations.migrate_up('sqlalchemy@91b573400be5') # setup the database DbLink = perform_migrations.get_current_table('db_dblink') # pylint: disable=invalid-name @@ -292,7 +292,7 @@ def test_default_link_label(perform_migrations: Migrator): link_id = link.id # migrate up - perform_migrations.migrate_up('118349c10896') + perform_migrations.migrate_up('sqlalchemy@118349c10896') # perform some checks DbLink = perform_migrations.get_current_table('db_dblink') # pylint: disable=invalid-name diff --git a/aiida/backends/djsite/db/migrations/0017_drop_dbcalcstate.py b/tests/backends/aiida_sqlalchemy/migrations/sqlalchemy_branch/test_migrate_to_head.py similarity index 56% rename from aiida/backends/djsite/db/migrations/0017_drop_dbcalcstate.py rename to tests/backends/aiida_sqlalchemy/migrations/sqlalchemy_branch/test_migrate_to_head.py index d8320feb03..f35ec554e8 100644 --- a/aiida/backends/djsite/db/migrations/0017_drop_dbcalcstate.py +++ b/tests/backends/aiida_sqlalchemy/migrations/sqlalchemy_branch/test_migrate_to_head.py @@ -7,21 +7,12 @@ # For further information on the license, see the LICENSE.txt file # # For further information please visit http://www.aiida.net # ########################################################################### -# pylint: disable=invalid-name -"""Database migration.""" -from django.db import migrations +"""Test migrating from the base of the sqlalchemy branch, to the main head.""" +from aiida.backends.sqlalchemy.migrator import PsqlDostoreMigrator -from aiida.backends.djsite.db.migrations import upgrade_schema_version -REVISION = '1.0.17' -DOWN_REVISION = '1.0.16' - - -class Migration(migrations.Migration): - """Database migration.""" - - dependencies = [ - ('db', '0016_code_sub_class_of_data'), - ] - - operations = [migrations.DeleteModel(name='DbCalcState',), upgrade_schema_version(REVISION, DOWN_REVISION)] +def test_migrate(perform_migrations: PsqlDostoreMigrator): + """Test that the migrator can migrate from the base of the sqlalchemy branch, to the main head.""" + perform_migrations.migrate_up('sqlalchemy@e15ef2630a1b') # the base of the sqlalchemy branch + perform_migrations.migrate() + perform_migrations.validate_storage() diff --git a/aiida/manage/database/__init__.py b/tests/backends/aiida_sqlalchemy/migrations/test_alembic_cli.py similarity index 64% rename from aiida/manage/database/__init__.py rename to tests/backends/aiida_sqlalchemy/migrations/test_alembic_cli.py index 9936f125fe..1585817981 100644 --- a/aiida/manage/database/__init__.py +++ b/tests/backends/aiida_sqlalchemy/migrations/test_alembic_cli.py @@ -7,17 +7,15 @@ # For further information on the license, see the LICENSE.txt file # # For further information please visit http://www.aiida.net # ########################################################################### -"""Management of the database.""" +"""Basic tests for the alembic_cli module.""" +from click.testing import CliRunner -# AUTO-GENERATED +from aiida.backends.sqlalchemy.alembic_cli import alembic_cli -# yapf: disable -# pylint: disable=wildcard-import -from .integrity import * - -__all__ = ( - 'write_database_integrity_violation', -) - -# yapf: enable +def test_history(): + """Test the 'history' command.""" + runner = CliRunner() + result = runner.invoke(alembic_cli, ['history']) + assert result.exit_code == 0 + assert 'head' in result.output diff --git a/tests/backends/aiida_sqlalchemy/migrations/test_all_basic.py b/tests/backends/aiida_sqlalchemy/migrations/test_all_basic.py deleted file mode 100644 index 6b533a2ab6..0000000000 --- a/tests/backends/aiida_sqlalchemy/migrations/test_all_basic.py +++ /dev/null @@ -1,34 +0,0 @@ -# -*- coding: utf-8 -*- -########################################################################### -# Copyright (c), The AiiDA team. All rights reserved. # -# This file is part of the AiiDA code. # -# # -# The code is hosted on GitHub at https://github.com/aiidateam/aiida-core # -# For further information on the license, see the LICENSE.txt file # -# For further information please visit http://www.aiida.net # -########################################################################### -"""Basic tests for all migratios""" -import pytest - - -@pytest.mark.usefixtures('perform_migrations') -def test_all_empty_migrations(): - """Test migrating down to a particular version, then back up, using an empty database. - - Note, migrating down+up with 59edaf8a8b79_adding_indexes_and_constraints_to_the_.py raises:: - - sqlalchemy.exc.ProgrammingError: - (psycopg2.errors.DuplicateTable) relation "db_dbgroup_dbnodes_dbgroup_id_dbnode_id_key" already exists - - So we only run for all versions later than this. - """ - from aiida.backends.sqlalchemy.manager import SqlaBackendManager - migrator = SqlaBackendManager() - all_versions = migrator.list_schema_versions() - first_index = all_versions.index('a514d673c163') + 1 - # ideally we would pytest parametrize this, but then we would need to call list_schema_versions on module load - for version in all_versions[first_index:]: - migrator.migrate_down(version) - assert migrator.get_schema_version_backend() == version - migrator.migrate_up('head') - assert migrator.get_schema_version_backend() == migrator.get_schema_version_head() diff --git a/tests/backends/aiida_sqlalchemy/migrations/test_all_schema.py b/tests/backends/aiida_sqlalchemy/migrations/test_all_schema.py new file mode 100644 index 0000000000..a64b755742 --- /dev/null +++ b/tests/backends/aiida_sqlalchemy/migrations/test_all_schema.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +########################################################################### +# Copyright (c), The AiiDA team. All rights reserved. # +# This file is part of the AiiDA code. # +# # +# The code is hosted on GitHub at https://github.com/aiidateam/aiida-core # +# For further information on the license, see the LICENSE.txt file # +# For further information please visit http://www.aiida.net # +########################################################################### +"""Basic tests for all migrations""" +import pytest + +from aiida.backends.sqlalchemy.migrator import PsqlDostoreMigrator + + +@pytest.mark.parametrize('version', list(v for v in PsqlDostoreMigrator.get_schema_versions() if v.startswith('main'))) +def test_main(version, uninitialised_profile, reflect_schema, data_regression): + """Test that the migrations produce the expected database schema.""" + migrator = PsqlDostoreMigrator(uninitialised_profile) + migrator.migrate_up(f'main@{version}') + data_regression.check(reflect_schema(uninitialised_profile)) + + +@pytest.mark.parametrize( + 'version', list(v for v in PsqlDostoreMigrator.get_schema_versions() if v.startswith('django')) +) +def test_django(version, uninitialised_profile, reflect_schema, data_regression): + """Test that the migrations (along the legacy django branch) produce the expected database schema.""" + migrator = PsqlDostoreMigrator(uninitialised_profile) + migrator.migrate_up(f'django@{version}') + data_regression.check(reflect_schema(uninitialised_profile)) + + +@pytest.mark.parametrize( + '_id,version', + enumerate( + v for v in PsqlDostoreMigrator.get_schema_versions() if not (v.startswith('django') or v.startswith('main')) + ) +) +def test_sqla(_id, version, uninitialised_profile, reflect_schema, data_regression): + """Test that the migrations produce the expected database schema.""" + migrator = PsqlDostoreMigrator(uninitialised_profile) + migrator.migrate_up(f'sqlalchemy@{version}') + data_regression.check(reflect_schema(uninitialised_profile)) + + +def test_head_vs_orm(uninitialised_profile, reflect_schema, data_regression): + """Test that the migrations produce the same database schema as the models.""" + migrator = PsqlDostoreMigrator(uninitialised_profile) + head_version = migrator.get_schema_version_head() + migrator.initialise() + data_regression.check(reflect_schema(uninitialised_profile), basename=f'test_main_{head_version}_') diff --git a/tests/backends/aiida_sqlalchemy/migrations/test_all_schema/test_django_django_0001_.yml b/tests/backends/aiida_sqlalchemy/migrations/test_all_schema/test_django_django_0001_.yml new file mode 100644 index 0000000000..375afef92a --- /dev/null +++ b/tests/backends/aiida_sqlalchemy/migrations/test_all_schema/test_django_django_0001_.yml @@ -0,0 +1,1116 @@ +columns: + db_dbattribute: + bval: + data_type: boolean + default: null + is_nullable: true + datatype: + data_type: character varying + default: null + is_nullable: false + max_length: 10 + dbnode_id: + data_type: integer + default: null + is_nullable: false + dval: + data_type: timestamp with time zone + default: null + is_nullable: true + fval: + data_type: double precision + default: null + is_nullable: true + id: + data_type: integer + default: nextval('db_dbattribute_id_seq'::regclass) + is_nullable: false + ival: + data_type: integer + default: null + is_nullable: true + key: + data_type: character varying + default: null + is_nullable: false + max_length: 1024 + tval: + data_type: text + default: null + is_nullable: false + db_dbauthinfo: + aiidauser_id: + data_type: integer + default: null + is_nullable: false + auth_params: + data_type: text + default: null + is_nullable: false + dbcomputer_id: + data_type: integer + default: null + is_nullable: false + enabled: + data_type: boolean + default: null + is_nullable: false + id: + data_type: integer + default: nextval('db_dbauthinfo_id_seq'::regclass) + is_nullable: false + metadata: + data_type: text + default: null + is_nullable: false + db_dbcalcstate: + dbnode_id: + data_type: integer + default: null + is_nullable: false + id: + data_type: integer + default: nextval('db_dbcalcstate_id_seq'::regclass) + is_nullable: false + state: + data_type: character varying + default: null + is_nullable: false + max_length: 25 + time: + data_type: timestamp with time zone + default: null + is_nullable: false + db_dbcomment: + content: + data_type: text + default: null + is_nullable: false + ctime: + data_type: timestamp with time zone + default: null + is_nullable: false + dbnode_id: + data_type: integer + default: null + is_nullable: false + id: + data_type: integer + default: nextval('db_dbcomment_id_seq'::regclass) + is_nullable: false + mtime: + data_type: timestamp with time zone + default: null + is_nullable: false + user_id: + data_type: integer + default: null + is_nullable: false + uuid: + data_type: character varying + default: null + is_nullable: false + max_length: 36 + db_dbcomputer: + description: + data_type: text + default: null + is_nullable: false + enabled: + data_type: boolean + default: null + is_nullable: false + hostname: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + id: + data_type: integer + default: nextval('db_dbcomputer_id_seq'::regclass) + is_nullable: false + metadata: + data_type: text + default: null + is_nullable: false + name: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + scheduler_type: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + transport_params: + data_type: text + default: null + is_nullable: false + transport_type: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + uuid: + data_type: character varying + default: null + is_nullable: false + max_length: 36 + db_dbextra: + bval: + data_type: boolean + default: null + is_nullable: true + datatype: + data_type: character varying + default: null + is_nullable: false + max_length: 10 + dbnode_id: + data_type: integer + default: null + is_nullable: false + dval: + data_type: timestamp with time zone + default: null + is_nullable: true + fval: + data_type: double precision + default: null + is_nullable: true + id: + data_type: integer + default: nextval('db_dbextra_id_seq'::regclass) + is_nullable: false + ival: + data_type: integer + default: null + is_nullable: true + key: + data_type: character varying + default: null + is_nullable: false + max_length: 1024 + tval: + data_type: text + default: null + is_nullable: false + db_dbgroup: + description: + data_type: text + default: null + is_nullable: false + id: + data_type: integer + default: nextval('db_dbgroup_id_seq'::regclass) + is_nullable: false + name: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + time: + data_type: timestamp with time zone + default: null + is_nullable: false + type: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + user_id: + data_type: integer + default: null + is_nullable: false + uuid: + data_type: character varying + default: null + is_nullable: false + max_length: 36 + db_dbgroup_dbnodes: + dbgroup_id: + data_type: integer + default: null + is_nullable: false + dbnode_id: + data_type: integer + default: null + is_nullable: false + id: + data_type: integer + default: nextval('db_dbgroup_dbnodes_id_seq'::regclass) + is_nullable: false + db_dblink: + id: + data_type: integer + default: nextval('db_dblink_id_seq'::regclass) + is_nullable: false + input_id: + data_type: integer + default: null + is_nullable: false + label: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + output_id: + data_type: integer + default: null + is_nullable: false + db_dblock: + creation: + data_type: timestamp with time zone + default: null + is_nullable: false + key: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + owner: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + timeout: + data_type: integer + default: null + is_nullable: false + db_dblog: + id: + data_type: integer + default: nextval('db_dblog_id_seq'::regclass) + is_nullable: false + levelname: + data_type: character varying + default: null + is_nullable: false + max_length: 50 + loggername: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + message: + data_type: text + default: null + is_nullable: false + metadata: + data_type: text + default: null + is_nullable: false + objname: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + objpk: + data_type: integer + default: null + is_nullable: true + time: + data_type: timestamp with time zone + default: null + is_nullable: false + db_dbnode: + ctime: + data_type: timestamp with time zone + default: null + is_nullable: false + dbcomputer_id: + data_type: integer + default: null + is_nullable: true + description: + data_type: text + default: null + is_nullable: false + id: + data_type: integer + default: nextval('db_dbnode_id_seq'::regclass) + is_nullable: false + label: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + mtime: + data_type: timestamp with time zone + default: null + is_nullable: false + nodeversion: + data_type: integer + default: null + is_nullable: false + public: + data_type: boolean + default: null + is_nullable: false + type: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + user_id: + data_type: integer + default: null + is_nullable: false + uuid: + data_type: character varying + default: null + is_nullable: false + max_length: 36 + db_dbpath: + child_id: + data_type: integer + default: null + is_nullable: false + depth: + data_type: integer + default: null + is_nullable: false + direct_edge_id: + data_type: integer + default: null + is_nullable: true + entry_edge_id: + data_type: integer + default: null + is_nullable: true + exit_edge_id: + data_type: integer + default: null + is_nullable: true + id: + data_type: integer + default: nextval('db_dbpath_id_seq'::regclass) + is_nullable: false + parent_id: + data_type: integer + default: null + is_nullable: false + db_dbsetting: + bval: + data_type: boolean + default: null + is_nullable: true + datatype: + data_type: character varying + default: null + is_nullable: false + max_length: 10 + description: + data_type: text + default: null + is_nullable: false + dval: + data_type: timestamp with time zone + default: null + is_nullable: true + fval: + data_type: double precision + default: null + is_nullable: true + id: + data_type: integer + default: nextval('db_dbsetting_id_seq'::regclass) + is_nullable: false + ival: + data_type: integer + default: null + is_nullable: true + key: + data_type: character varying + default: null + is_nullable: false + max_length: 1024 + time: + data_type: timestamp with time zone + default: null + is_nullable: false + tval: + data_type: text + default: null + is_nullable: false + db_dbuser: + date_joined: + data_type: timestamp with time zone + default: null + is_nullable: false + email: + data_type: character varying + default: null + is_nullable: false + max_length: 75 + first_name: + data_type: character varying + default: null + is_nullable: false + max_length: 254 + id: + data_type: integer + default: nextval('db_dbuser_id_seq'::regclass) + is_nullable: false + institution: + data_type: character varying + default: null + is_nullable: false + max_length: 254 + is_active: + data_type: boolean + default: null + is_nullable: false + is_staff: + data_type: boolean + default: null + is_nullable: false + is_superuser: + data_type: boolean + default: null + is_nullable: false + last_login: + data_type: timestamp with time zone + default: null + is_nullable: false + last_name: + data_type: character varying + default: null + is_nullable: false + max_length: 254 + password: + data_type: character varying + default: null + is_nullable: false + max_length: 128 + db_dbuser_groups: + dbuser_id: + data_type: integer + default: null + is_nullable: false + group_id: + data_type: integer + default: null + is_nullable: false + id: + data_type: integer + default: nextval('db_dbuser_groups_id_seq'::regclass) + is_nullable: false + db_dbuser_user_permissions: + dbuser_id: + data_type: integer + default: null + is_nullable: false + id: + data_type: integer + default: nextval('db_dbuser_user_permissions_id_seq'::regclass) + is_nullable: false + permission_id: + data_type: integer + default: null + is_nullable: false + db_dbworkflow: + ctime: + data_type: timestamp with time zone + default: null + is_nullable: false + description: + data_type: text + default: null + is_nullable: false + id: + data_type: integer + default: nextval('db_dbworkflow_id_seq'::regclass) + is_nullable: false + label: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + lastsyncedversion: + data_type: integer + default: null + is_nullable: false + module: + data_type: text + default: null + is_nullable: false + module_class: + data_type: text + default: null + is_nullable: false + mtime: + data_type: timestamp with time zone + default: null + is_nullable: false + nodeversion: + data_type: integer + default: null + is_nullable: false + report: + data_type: text + default: null + is_nullable: false + script_md5: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + script_path: + data_type: text + default: null + is_nullable: false + state: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + user_id: + data_type: integer + default: null + is_nullable: false + uuid: + data_type: character varying + default: null + is_nullable: false + max_length: 36 + db_dbworkflowdata: + aiida_obj_id: + data_type: integer + default: null + is_nullable: true + data_type: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + id: + data_type: integer + default: nextval('db_dbworkflowdata_id_seq'::regclass) + is_nullable: false + json_value: + data_type: text + default: null + is_nullable: false + name: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + parent_id: + data_type: integer + default: null + is_nullable: false + time: + data_type: timestamp with time zone + default: null + is_nullable: false + value_type: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + db_dbworkflowstep: + id: + data_type: integer + default: nextval('db_dbworkflowstep_id_seq'::regclass) + is_nullable: false + name: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + nextcall: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + parent_id: + data_type: integer + default: null + is_nullable: false + state: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + time: + data_type: timestamp with time zone + default: null + is_nullable: false + user_id: + data_type: integer + default: null + is_nullable: false + db_dbworkflowstep_calculations: + dbnode_id: + data_type: integer + default: null + is_nullable: false + dbworkflowstep_id: + data_type: integer + default: null + is_nullable: false + id: + data_type: integer + default: nextval('db_dbworkflowstep_calculations_id_seq'::regclass) + is_nullable: false + db_dbworkflowstep_sub_workflows: + dbworkflow_id: + data_type: integer + default: null + is_nullable: false + dbworkflowstep_id: + data_type: integer + default: null + is_nullable: false + id: + data_type: integer + default: nextval('db_dbworkflowstep_sub_workflows_id_seq'::regclass) + is_nullable: false +constraints: + primary_key: + db_dbattribute: + db_dbattribute_pkey: + - id + db_dbauthinfo: + db_dbauthinfo_pkey: + - id + db_dbcalcstate: + db_dbcalcstate_pkey: + - id + db_dbcomment: + db_dbcomment_pkey: + - id + db_dbcomputer: + db_dbcomputer_pkey: + - id + db_dbextra: + db_dbextra_pkey: + - id + db_dbgroup: + db_dbgroup_pkey: + - id + db_dbgroup_dbnodes: + db_dbgroup_dbnodes_pkey: + - id + db_dblink: + db_dblink_pkey: + - id + db_dblock: + db_dblock_pkey: + - key + db_dblog: + db_dblog_pkey: + - id + db_dbnode: + db_dbnode_pkey: + - id + db_dbpath: + db_dbpath_pkey: + - id + db_dbsetting: + db_dbsetting_pkey: + - id + db_dbuser: + db_dbuser_pkey: + - id + db_dbuser_groups: + db_dbuser_groups_pkey: + - id + db_dbuser_user_permissions: + db_dbuser_user_permissions_pkey: + - id + db_dbworkflow: + db_dbworkflow_pkey: + - id + db_dbworkflowdata: + db_dbworkflowdata_pkey: + - id + db_dbworkflowstep: + db_dbworkflowstep_pkey: + - id + db_dbworkflowstep_calculations: + db_dbworkflowstep_calculations_pkey: + - id + db_dbworkflowstep_sub_workflows: + db_dbworkflowstep_sub_workflows_pkey: + - id + unique: + db_dbattribute: + db_dbattribute_dbnode_id_key_c589e447_uniq: + - dbnode_id + - key + db_dbauthinfo: + db_dbauthinfo_aiidauser_id_dbcomputer_id_777cdaa8_uniq: + - aiidauser_id + - dbcomputer_id + db_dbcalcstate: + db_dbcalcstate_dbnode_id_state_b4a14db3_uniq: + - dbnode_id + - state + db_dbcomputer: + db_dbcomputer_name_key: + - name + db_dbextra: + db_dbextra_dbnode_id_key_aa56fd37_uniq: + - dbnode_id + - key + db_dbgroup: + db_dbgroup_name_type_12656f33_uniq: + - name + - type + db_dbgroup_dbnodes: + db_dbgroup_dbnodes_dbgroup_id_dbnode_id_eee23cce_uniq: + - dbgroup_id + - dbnode_id + db_dblink: + db_dblink_input_id_output_id_fbe99cb5_uniq: + - input_id + - output_id + db_dblink_output_id_label_00bdb9c7_uniq: + - label + - output_id + db_dbsetting: + db_dbsetting_key_1b84beb4_uniq: + - key + db_dbuser: + db_dbuser_email_key: + - email + db_dbuser_groups: + db_dbuser_groups_dbuser_id_group_id_9155eb4f_uniq: + - dbuser_id + - group_id + db_dbuser_user_permissions: + db_dbuser_user_permissio_dbuser_id_permission_id_e6cbabe4_uniq: + - dbuser_id + - permission_id + db_dbworkflowdata: + db_dbworkflowdata_parent_id_name_data_type_a4b50dae_uniq: + - data_type + - name + - parent_id + db_dbworkflowstep: + db_dbworkflowstep_parent_id_name_111027e3_uniq: + - name + - parent_id + db_dbworkflowstep_calculations: + db_dbworkflowstep_calcul_dbworkflowstep_id_dbnode_60f50d02_uniq: + - dbnode_id + - dbworkflowstep_id + db_dbworkflowstep_sub_workflows: + db_dbworkflowstep_sub_wo_dbworkflowstep_id_dbwork_e9b2b624_uniq: + - dbworkflow_id + - dbworkflowstep_id +foreign_keys: + db_dbattribute: + db_dbattribute_dbnode_id_253bf153_fk_db_dbnode_id: FOREIGN KEY (dbnode_id) REFERENCES + db_dbnode(id) DEFERRABLE INITIALLY DEFERRED + db_dbauthinfo: + db_dbauthinfo_aiidauser_id_0684fdfb_fk_db_dbuser_id: FOREIGN KEY (aiidauser_id) + REFERENCES db_dbuser(id) DEFERRABLE INITIALLY DEFERRED + db_dbauthinfo_dbcomputer_id_424f7ac4_fk_db_dbcomputer_id: FOREIGN KEY (dbcomputer_id) + REFERENCES db_dbcomputer(id) DEFERRABLE INITIALLY DEFERRED + db_dbcalcstate: + db_dbcalcstate_dbnode_id_f217a84c_fk_db_dbnode_id: FOREIGN KEY (dbnode_id) REFERENCES + db_dbnode(id) DEFERRABLE INITIALLY DEFERRED + db_dbcomment: + db_dbcomment_dbnode_id_3b812b6b_fk_db_dbnode_id: FOREIGN KEY (dbnode_id) REFERENCES + db_dbnode(id) DEFERRABLE INITIALLY DEFERRED + db_dbcomment_user_id_8ed5e360_fk_db_dbuser_id: FOREIGN KEY (user_id) REFERENCES + db_dbuser(id) DEFERRABLE INITIALLY DEFERRED + db_dbextra: + db_dbextra_dbnode_id_c7fe8961_fk_db_dbnode_id: FOREIGN KEY (dbnode_id) REFERENCES + db_dbnode(id) DEFERRABLE INITIALLY DEFERRED + db_dbgroup: + db_dbgroup_user_id_100f8a51_fk_db_dbuser_id: FOREIGN KEY (user_id) REFERENCES + db_dbuser(id) DEFERRABLE INITIALLY DEFERRED + db_dbgroup_dbnodes: + db_dbgroup_dbnodes_dbgroup_id_9d3a0f9d_fk_db_dbgroup_id: FOREIGN KEY (dbgroup_id) + REFERENCES db_dbgroup(id) DEFERRABLE INITIALLY DEFERRED + db_dbgroup_dbnodes_dbnode_id_118b9439_fk_db_dbnode_id: FOREIGN KEY (dbnode_id) + REFERENCES db_dbnode(id) DEFERRABLE INITIALLY DEFERRED + db_dblink: + db_dblink_input_id_9245bd73_fk_db_dbnode_id: FOREIGN KEY (input_id) REFERENCES + db_dbnode(id) DEFERRABLE INITIALLY DEFERRED + db_dblink_output_id_c0167528_fk_db_dbnode_id: FOREIGN KEY (output_id) REFERENCES + db_dbnode(id) DEFERRABLE INITIALLY DEFERRED + db_dbnode: + db_dbnode_dbcomputer_id_315372a3_fk_db_dbcomputer_id: FOREIGN KEY (dbcomputer_id) + REFERENCES db_dbcomputer(id) DEFERRABLE INITIALLY DEFERRED + db_dbnode_user_id_12e7aeaf_fk_db_dbuser_id: FOREIGN KEY (user_id) REFERENCES db_dbuser(id) + DEFERRABLE INITIALLY DEFERRED + db_dbpath: + db_dbpath_child_id_d8228636_fk_db_dbnode_id: FOREIGN KEY (child_id) REFERENCES + db_dbnode(id) DEFERRABLE INITIALLY DEFERRED + db_dbpath_parent_id_3b82d6c8_fk_db_dbnode_id: FOREIGN KEY (parent_id) REFERENCES + db_dbnode(id) DEFERRABLE INITIALLY DEFERRED + db_dbuser_groups: + db_dbuser_groups_dbuser_id_480b3520_fk_db_dbuser_id: FOREIGN KEY (dbuser_id) REFERENCES + db_dbuser(id) DEFERRABLE INITIALLY DEFERRED + db_dbuser_groups_group_id_8478d87e_fk_auth_group_id: FOREIGN KEY (group_id) REFERENCES + auth_group(id) DEFERRABLE INITIALLY DEFERRED + db_dbuser_user_permissions: + db_dbuser_user_permi_permission_id_c5aafc54_fk_auth_perm: FOREIGN KEY (permission_id) + REFERENCES auth_permission(id) DEFERRABLE INITIALLY DEFERRED + db_dbuser_user_permissions_dbuser_id_364456ee_fk_db_dbuser_id: FOREIGN KEY (dbuser_id) + REFERENCES db_dbuser(id) DEFERRABLE INITIALLY DEFERRED + db_dbworkflow: + db_dbworkflow_user_id_ef1f3251_fk_db_dbuser_id: FOREIGN KEY (user_id) REFERENCES + db_dbuser(id) DEFERRABLE INITIALLY DEFERRED + db_dbworkflowdata: + db_dbworkflowdata_aiida_obj_id_70a2d33b_fk_db_dbnode_id: FOREIGN KEY (aiida_obj_id) + REFERENCES db_dbnode(id) DEFERRABLE INITIALLY DEFERRED + db_dbworkflowdata_parent_id_ff4dbf8d_fk_db_dbworkflow_id: FOREIGN KEY (parent_id) + REFERENCES db_dbworkflow(id) DEFERRABLE INITIALLY DEFERRED + db_dbworkflowstep: + db_dbworkflowstep_parent_id_ffb754d9_fk_db_dbworkflow_id: FOREIGN KEY (parent_id) + REFERENCES db_dbworkflow(id) DEFERRABLE INITIALLY DEFERRED + db_dbworkflowstep_user_id_04282431_fk_db_dbuser_id: FOREIGN KEY (user_id) REFERENCES + db_dbuser(id) DEFERRABLE INITIALLY DEFERRED + db_dbworkflowstep_calculations: + db_dbworkflowstep_ca_dbnode_id_0d07b7a7_fk_db_dbnode: FOREIGN KEY (dbnode_id) + REFERENCES db_dbnode(id) DEFERRABLE INITIALLY DEFERRED + db_dbworkflowstep_ca_dbworkflowstep_id_575c3637_fk_db_dbwork: FOREIGN KEY (dbworkflowstep_id) + REFERENCES db_dbworkflowstep(id) DEFERRABLE INITIALLY DEFERRED + db_dbworkflowstep_sub_workflows: + db_dbworkflowstep_su_dbworkflow_id_dca4d103_fk_db_dbwork: FOREIGN KEY (dbworkflow_id) + REFERENCES db_dbworkflow(id) DEFERRABLE INITIALLY DEFERRED + db_dbworkflowstep_su_dbworkflowstep_id_e183bbb7_fk_db_dbwork: FOREIGN KEY (dbworkflowstep_id) + REFERENCES db_dbworkflowstep(id) DEFERRABLE INITIALLY DEFERRED +indexes: + db_dbattribute: + db_dbattribute_datatype_91c4dc04: CREATE INDEX db_dbattribute_datatype_91c4dc04 + ON public.db_dbattribute USING btree (datatype) + db_dbattribute_datatype_91c4dc04_like: CREATE INDEX db_dbattribute_datatype_91c4dc04_like + ON public.db_dbattribute USING btree (datatype varchar_pattern_ops) + db_dbattribute_dbnode_id_253bf153: CREATE INDEX db_dbattribute_dbnode_id_253bf153 + ON public.db_dbattribute USING btree (dbnode_id) + db_dbattribute_dbnode_id_key_c589e447_uniq: CREATE UNIQUE INDEX db_dbattribute_dbnode_id_key_c589e447_uniq + ON public.db_dbattribute USING btree (dbnode_id, key) + db_dbattribute_key_ac2bc4e4: CREATE INDEX db_dbattribute_key_ac2bc4e4 ON public.db_dbattribute + USING btree (key) + db_dbattribute_key_ac2bc4e4_like: CREATE INDEX db_dbattribute_key_ac2bc4e4_like + ON public.db_dbattribute USING btree (key varchar_pattern_ops) + db_dbattribute_pkey: CREATE UNIQUE INDEX db_dbattribute_pkey ON public.db_dbattribute + USING btree (id) + db_dbauthinfo: + db_dbauthinfo_aiidauser_id_0684fdfb: CREATE INDEX db_dbauthinfo_aiidauser_id_0684fdfb + ON public.db_dbauthinfo USING btree (aiidauser_id) + db_dbauthinfo_aiidauser_id_dbcomputer_id_777cdaa8_uniq: CREATE UNIQUE INDEX db_dbauthinfo_aiidauser_id_dbcomputer_id_777cdaa8_uniq + ON public.db_dbauthinfo USING btree (aiidauser_id, dbcomputer_id) + db_dbauthinfo_dbcomputer_id_424f7ac4: CREATE INDEX db_dbauthinfo_dbcomputer_id_424f7ac4 + ON public.db_dbauthinfo USING btree (dbcomputer_id) + db_dbauthinfo_pkey: CREATE UNIQUE INDEX db_dbauthinfo_pkey ON public.db_dbauthinfo + USING btree (id) + db_dbcalcstate: + db_dbcalcstate_dbnode_id_f217a84c: CREATE INDEX db_dbcalcstate_dbnode_id_f217a84c + ON public.db_dbcalcstate USING btree (dbnode_id) + db_dbcalcstate_dbnode_id_state_b4a14db3_uniq: CREATE UNIQUE INDEX db_dbcalcstate_dbnode_id_state_b4a14db3_uniq + ON public.db_dbcalcstate USING btree (dbnode_id, state) + db_dbcalcstate_pkey: CREATE UNIQUE INDEX db_dbcalcstate_pkey ON public.db_dbcalcstate + USING btree (id) + db_dbcalcstate_state_0bf54584: CREATE INDEX db_dbcalcstate_state_0bf54584 ON public.db_dbcalcstate + USING btree (state) + db_dbcalcstate_state_0bf54584_like: CREATE INDEX db_dbcalcstate_state_0bf54584_like + ON public.db_dbcalcstate USING btree (state varchar_pattern_ops) + db_dbcomment: + db_dbcomment_dbnode_id_3b812b6b: CREATE INDEX db_dbcomment_dbnode_id_3b812b6b + ON public.db_dbcomment USING btree (dbnode_id) + db_dbcomment_pkey: CREATE UNIQUE INDEX db_dbcomment_pkey ON public.db_dbcomment + USING btree (id) + db_dbcomment_user_id_8ed5e360: CREATE INDEX db_dbcomment_user_id_8ed5e360 ON public.db_dbcomment + USING btree (user_id) + db_dbcomputer: + db_dbcomputer_name_f1800b1a_like: CREATE INDEX db_dbcomputer_name_f1800b1a_like + ON public.db_dbcomputer USING btree (name varchar_pattern_ops) + db_dbcomputer_name_key: CREATE UNIQUE INDEX db_dbcomputer_name_key ON public.db_dbcomputer + USING btree (name) + db_dbcomputer_pkey: CREATE UNIQUE INDEX db_dbcomputer_pkey ON public.db_dbcomputer + USING btree (id) + db_dbextra: + db_dbextra_datatype_2eba38c6: CREATE INDEX db_dbextra_datatype_2eba38c6 ON public.db_dbextra + USING btree (datatype) + db_dbextra_datatype_2eba38c6_like: CREATE INDEX db_dbextra_datatype_2eba38c6_like + ON public.db_dbextra USING btree (datatype varchar_pattern_ops) + db_dbextra_dbnode_id_c7fe8961: CREATE INDEX db_dbextra_dbnode_id_c7fe8961 ON public.db_dbextra + USING btree (dbnode_id) + db_dbextra_dbnode_id_key_aa56fd37_uniq: CREATE UNIQUE INDEX db_dbextra_dbnode_id_key_aa56fd37_uniq + ON public.db_dbextra USING btree (dbnode_id, key) + db_dbextra_key_b1a8abc6: CREATE INDEX db_dbextra_key_b1a8abc6 ON public.db_dbextra + USING btree (key) + db_dbextra_key_b1a8abc6_like: CREATE INDEX db_dbextra_key_b1a8abc6_like ON public.db_dbextra + USING btree (key varchar_pattern_ops) + db_dbextra_pkey: CREATE UNIQUE INDEX db_dbextra_pkey ON public.db_dbextra USING + btree (id) + db_dbgroup: + db_dbgroup_name_66c75272: CREATE INDEX db_dbgroup_name_66c75272 ON public.db_dbgroup + USING btree (name) + db_dbgroup_name_66c75272_like: CREATE INDEX db_dbgroup_name_66c75272_like ON public.db_dbgroup + USING btree (name varchar_pattern_ops) + db_dbgroup_name_type_12656f33_uniq: CREATE UNIQUE INDEX db_dbgroup_name_type_12656f33_uniq + ON public.db_dbgroup USING btree (name, type) + db_dbgroup_pkey: CREATE UNIQUE INDEX db_dbgroup_pkey ON public.db_dbgroup USING + btree (id) + db_dbgroup_type_23b2a748: CREATE INDEX db_dbgroup_type_23b2a748 ON public.db_dbgroup + USING btree (type) + db_dbgroup_type_23b2a748_like: CREATE INDEX db_dbgroup_type_23b2a748_like ON public.db_dbgroup + USING btree (type varchar_pattern_ops) + db_dbgroup_user_id_100f8a51: CREATE INDEX db_dbgroup_user_id_100f8a51 ON public.db_dbgroup + USING btree (user_id) + db_dbgroup_dbnodes: + db_dbgroup_dbnodes_dbgroup_id_9d3a0f9d: CREATE INDEX db_dbgroup_dbnodes_dbgroup_id_9d3a0f9d + ON public.db_dbgroup_dbnodes USING btree (dbgroup_id) + db_dbgroup_dbnodes_dbgroup_id_dbnode_id_eee23cce_uniq: CREATE UNIQUE INDEX db_dbgroup_dbnodes_dbgroup_id_dbnode_id_eee23cce_uniq + ON public.db_dbgroup_dbnodes USING btree (dbgroup_id, dbnode_id) + db_dbgroup_dbnodes_dbnode_id_118b9439: CREATE INDEX db_dbgroup_dbnodes_dbnode_id_118b9439 + ON public.db_dbgroup_dbnodes USING btree (dbnode_id) + db_dbgroup_dbnodes_pkey: CREATE UNIQUE INDEX db_dbgroup_dbnodes_pkey ON public.db_dbgroup_dbnodes + USING btree (id) + db_dblink: + db_dblink_input_id_9245bd73: CREATE INDEX db_dblink_input_id_9245bd73 ON public.db_dblink + USING btree (input_id) + db_dblink_input_id_output_id_fbe99cb5_uniq: CREATE UNIQUE INDEX db_dblink_input_id_output_id_fbe99cb5_uniq + ON public.db_dblink USING btree (input_id, output_id) + db_dblink_label_f1343cfb: CREATE INDEX db_dblink_label_f1343cfb ON public.db_dblink + USING btree (label) + db_dblink_label_f1343cfb_like: CREATE INDEX db_dblink_label_f1343cfb_like ON public.db_dblink + USING btree (label varchar_pattern_ops) + db_dblink_output_id_c0167528: CREATE INDEX db_dblink_output_id_c0167528 ON public.db_dblink + USING btree (output_id) + db_dblink_output_id_label_00bdb9c7_uniq: CREATE UNIQUE INDEX db_dblink_output_id_label_00bdb9c7_uniq + ON public.db_dblink USING btree (output_id, label) + db_dblink_pkey: CREATE UNIQUE INDEX db_dblink_pkey ON public.db_dblink USING btree + (id) + db_dblock: + db_dblock_key_048c6767_like: CREATE INDEX db_dblock_key_048c6767_like ON public.db_dblock + USING btree (key varchar_pattern_ops) + db_dblock_pkey: CREATE UNIQUE INDEX db_dblock_pkey ON public.db_dblock USING btree + (key) + db_dblog: + db_dblog_levelname_ad5dc346: CREATE INDEX db_dblog_levelname_ad5dc346 ON public.db_dblog + USING btree (levelname) + db_dblog_levelname_ad5dc346_like: CREATE INDEX db_dblog_levelname_ad5dc346_like + ON public.db_dblog USING btree (levelname varchar_pattern_ops) + db_dblog_loggername_00b5ba16: CREATE INDEX db_dblog_loggername_00b5ba16 ON public.db_dblog + USING btree (loggername) + db_dblog_loggername_00b5ba16_like: CREATE INDEX db_dblog_loggername_00b5ba16_like + ON public.db_dblog USING btree (loggername varchar_pattern_ops) + db_dblog_objname_69932b1e: CREATE INDEX db_dblog_objname_69932b1e ON public.db_dblog + USING btree (objname) + db_dblog_objname_69932b1e_like: CREATE INDEX db_dblog_objname_69932b1e_like ON + public.db_dblog USING btree (objname varchar_pattern_ops) + db_dblog_objpk_fc47afa9: CREATE INDEX db_dblog_objpk_fc47afa9 ON public.db_dblog + USING btree (objpk) + db_dblog_pkey: CREATE UNIQUE INDEX db_dblog_pkey ON public.db_dblog USING btree + (id) + db_dbnode: + db_dbnode_dbcomputer_id_315372a3: CREATE INDEX db_dbnode_dbcomputer_id_315372a3 + ON public.db_dbnode USING btree (dbcomputer_id) + db_dbnode_label_6469539e: CREATE INDEX db_dbnode_label_6469539e ON public.db_dbnode + USING btree (label) + db_dbnode_label_6469539e_like: CREATE INDEX db_dbnode_label_6469539e_like ON public.db_dbnode + USING btree (label varchar_pattern_ops) + db_dbnode_pkey: CREATE UNIQUE INDEX db_dbnode_pkey ON public.db_dbnode USING btree + (id) + db_dbnode_type_a8ce9753: CREATE INDEX db_dbnode_type_a8ce9753 ON public.db_dbnode + USING btree (type) + db_dbnode_type_a8ce9753_like: CREATE INDEX db_dbnode_type_a8ce9753_like ON public.db_dbnode + USING btree (type varchar_pattern_ops) + db_dbnode_user_id_12e7aeaf: CREATE INDEX db_dbnode_user_id_12e7aeaf ON public.db_dbnode + USING btree (user_id) + db_dbpath: + db_dbpath_child_id_d8228636: CREATE INDEX db_dbpath_child_id_d8228636 ON public.db_dbpath + USING btree (child_id) + db_dbpath_parent_id_3b82d6c8: CREATE INDEX db_dbpath_parent_id_3b82d6c8 ON public.db_dbpath + USING btree (parent_id) + db_dbpath_pkey: CREATE UNIQUE INDEX db_dbpath_pkey ON public.db_dbpath USING btree + (id) + db_dbsetting: + db_dbsetting_datatype_49f4397c: CREATE INDEX db_dbsetting_datatype_49f4397c ON + public.db_dbsetting USING btree (datatype) + db_dbsetting_datatype_49f4397c_like: CREATE INDEX db_dbsetting_datatype_49f4397c_like + ON public.db_dbsetting USING btree (datatype varchar_pattern_ops) + db_dbsetting_key_1b84beb4: CREATE INDEX db_dbsetting_key_1b84beb4 ON public.db_dbsetting + USING btree (key) + db_dbsetting_key_1b84beb4_like: CREATE INDEX db_dbsetting_key_1b84beb4_like ON + public.db_dbsetting USING btree (key varchar_pattern_ops) + db_dbsetting_key_1b84beb4_uniq: CREATE UNIQUE INDEX db_dbsetting_key_1b84beb4_uniq + ON public.db_dbsetting USING btree (key) + db_dbsetting_pkey: CREATE UNIQUE INDEX db_dbsetting_pkey ON public.db_dbsetting + USING btree (id) + db_dbuser: + db_dbuser_email_30150b7e_like: CREATE INDEX db_dbuser_email_30150b7e_like ON public.db_dbuser + USING btree (email varchar_pattern_ops) + db_dbuser_email_key: CREATE UNIQUE INDEX db_dbuser_email_key ON public.db_dbuser + USING btree (email) + db_dbuser_pkey: CREATE UNIQUE INDEX db_dbuser_pkey ON public.db_dbuser USING btree + (id) + db_dbuser_groups: + db_dbuser_groups_dbuser_id_480b3520: CREATE INDEX db_dbuser_groups_dbuser_id_480b3520 + ON public.db_dbuser_groups USING btree (dbuser_id) + db_dbuser_groups_dbuser_id_group_id_9155eb4f_uniq: CREATE UNIQUE INDEX db_dbuser_groups_dbuser_id_group_id_9155eb4f_uniq + ON public.db_dbuser_groups USING btree (dbuser_id, group_id) + db_dbuser_groups_group_id_8478d87e: CREATE INDEX db_dbuser_groups_group_id_8478d87e + ON public.db_dbuser_groups USING btree (group_id) + db_dbuser_groups_pkey: CREATE UNIQUE INDEX db_dbuser_groups_pkey ON public.db_dbuser_groups + USING btree (id) + db_dbuser_user_permissions: + db_dbuser_user_permissio_dbuser_id_permission_id_e6cbabe4_uniq: CREATE UNIQUE + INDEX db_dbuser_user_permissio_dbuser_id_permission_id_e6cbabe4_uniq ON public.db_dbuser_user_permissions + USING btree (dbuser_id, permission_id) + db_dbuser_user_permissions_dbuser_id_364456ee: CREATE INDEX db_dbuser_user_permissions_dbuser_id_364456ee + ON public.db_dbuser_user_permissions USING btree (dbuser_id) + db_dbuser_user_permissions_permission_id_c5aafc54: CREATE INDEX db_dbuser_user_permissions_permission_id_c5aafc54 + ON public.db_dbuser_user_permissions USING btree (permission_id) + db_dbuser_user_permissions_pkey: CREATE UNIQUE INDEX db_dbuser_user_permissions_pkey + ON public.db_dbuser_user_permissions USING btree (id) + db_dbworkflow: + db_dbworkflow_label_7368f34a: CREATE INDEX db_dbworkflow_label_7368f34a ON public.db_dbworkflow + USING btree (label) + db_dbworkflow_label_7368f34a_like: CREATE INDEX db_dbworkflow_label_7368f34a_like + ON public.db_dbworkflow USING btree (label varchar_pattern_ops) + db_dbworkflow_pkey: CREATE UNIQUE INDEX db_dbworkflow_pkey ON public.db_dbworkflow + USING btree (id) + db_dbworkflow_user_id_ef1f3251: CREATE INDEX db_dbworkflow_user_id_ef1f3251 ON + public.db_dbworkflow USING btree (user_id) + db_dbworkflowdata: + db_dbworkflowdata_aiida_obj_id_70a2d33b: CREATE INDEX db_dbworkflowdata_aiida_obj_id_70a2d33b + ON public.db_dbworkflowdata USING btree (aiida_obj_id) + db_dbworkflowdata_parent_id_ff4dbf8d: CREATE INDEX db_dbworkflowdata_parent_id_ff4dbf8d + ON public.db_dbworkflowdata USING btree (parent_id) + db_dbworkflowdata_parent_id_name_data_type_a4b50dae_uniq: CREATE UNIQUE INDEX + db_dbworkflowdata_parent_id_name_data_type_a4b50dae_uniq ON public.db_dbworkflowdata + USING btree (parent_id, name, data_type) + db_dbworkflowdata_pkey: CREATE UNIQUE INDEX db_dbworkflowdata_pkey ON public.db_dbworkflowdata + USING btree (id) + db_dbworkflowstep: + db_dbworkflowstep_parent_id_ffb754d9: CREATE INDEX db_dbworkflowstep_parent_id_ffb754d9 + ON public.db_dbworkflowstep USING btree (parent_id) + db_dbworkflowstep_parent_id_name_111027e3_uniq: CREATE UNIQUE INDEX db_dbworkflowstep_parent_id_name_111027e3_uniq + ON public.db_dbworkflowstep USING btree (parent_id, name) + db_dbworkflowstep_pkey: CREATE UNIQUE INDEX db_dbworkflowstep_pkey ON public.db_dbworkflowstep + USING btree (id) + db_dbworkflowstep_user_id_04282431: CREATE INDEX db_dbworkflowstep_user_id_04282431 + ON public.db_dbworkflowstep USING btree (user_id) + db_dbworkflowstep_calculations: + db_dbworkflowstep_calcul_dbworkflowstep_id_dbnode_60f50d02_uniq: CREATE UNIQUE + INDEX db_dbworkflowstep_calcul_dbworkflowstep_id_dbnode_60f50d02_uniq ON public.db_dbworkflowstep_calculations + USING btree (dbworkflowstep_id, dbnode_id) + db_dbworkflowstep_calculations_dbnode_id_0d07b7a7: CREATE INDEX db_dbworkflowstep_calculations_dbnode_id_0d07b7a7 + ON public.db_dbworkflowstep_calculations USING btree (dbnode_id) + db_dbworkflowstep_calculations_dbworkflowstep_id_575c3637: CREATE INDEX db_dbworkflowstep_calculations_dbworkflowstep_id_575c3637 + ON public.db_dbworkflowstep_calculations USING btree (dbworkflowstep_id) + db_dbworkflowstep_calculations_pkey: CREATE UNIQUE INDEX db_dbworkflowstep_calculations_pkey + ON public.db_dbworkflowstep_calculations USING btree (id) + db_dbworkflowstep_sub_workflows: + db_dbworkflowstep_sub_wo_dbworkflowstep_id_dbwork_e9b2b624_uniq: CREATE UNIQUE + INDEX db_dbworkflowstep_sub_wo_dbworkflowstep_id_dbwork_e9b2b624_uniq ON public.db_dbworkflowstep_sub_workflows + USING btree (dbworkflowstep_id, dbworkflow_id) + db_dbworkflowstep_sub_workflows_dbworkflow_id_dca4d103: CREATE INDEX db_dbworkflowstep_sub_workflows_dbworkflow_id_dca4d103 + ON public.db_dbworkflowstep_sub_workflows USING btree (dbworkflow_id) + db_dbworkflowstep_sub_workflows_dbworkflowstep_id_e183bbb7: CREATE INDEX db_dbworkflowstep_sub_workflows_dbworkflowstep_id_e183bbb7 + ON public.db_dbworkflowstep_sub_workflows USING btree (dbworkflowstep_id) + db_dbworkflowstep_sub_workflows_pkey: CREATE UNIQUE INDEX db_dbworkflowstep_sub_workflows_pkey + ON public.db_dbworkflowstep_sub_workflows USING btree (id) diff --git a/tests/backends/aiida_sqlalchemy/migrations/test_all_schema/test_django_django_0002_.yml b/tests/backends/aiida_sqlalchemy/migrations/test_all_schema/test_django_django_0002_.yml new file mode 100644 index 0000000000..375afef92a --- /dev/null +++ b/tests/backends/aiida_sqlalchemy/migrations/test_all_schema/test_django_django_0002_.yml @@ -0,0 +1,1116 @@ +columns: + db_dbattribute: + bval: + data_type: boolean + default: null + is_nullable: true + datatype: + data_type: character varying + default: null + is_nullable: false + max_length: 10 + dbnode_id: + data_type: integer + default: null + is_nullable: false + dval: + data_type: timestamp with time zone + default: null + is_nullable: true + fval: + data_type: double precision + default: null + is_nullable: true + id: + data_type: integer + default: nextval('db_dbattribute_id_seq'::regclass) + is_nullable: false + ival: + data_type: integer + default: null + is_nullable: true + key: + data_type: character varying + default: null + is_nullable: false + max_length: 1024 + tval: + data_type: text + default: null + is_nullable: false + db_dbauthinfo: + aiidauser_id: + data_type: integer + default: null + is_nullable: false + auth_params: + data_type: text + default: null + is_nullable: false + dbcomputer_id: + data_type: integer + default: null + is_nullable: false + enabled: + data_type: boolean + default: null + is_nullable: false + id: + data_type: integer + default: nextval('db_dbauthinfo_id_seq'::regclass) + is_nullable: false + metadata: + data_type: text + default: null + is_nullable: false + db_dbcalcstate: + dbnode_id: + data_type: integer + default: null + is_nullable: false + id: + data_type: integer + default: nextval('db_dbcalcstate_id_seq'::regclass) + is_nullable: false + state: + data_type: character varying + default: null + is_nullable: false + max_length: 25 + time: + data_type: timestamp with time zone + default: null + is_nullable: false + db_dbcomment: + content: + data_type: text + default: null + is_nullable: false + ctime: + data_type: timestamp with time zone + default: null + is_nullable: false + dbnode_id: + data_type: integer + default: null + is_nullable: false + id: + data_type: integer + default: nextval('db_dbcomment_id_seq'::regclass) + is_nullable: false + mtime: + data_type: timestamp with time zone + default: null + is_nullable: false + user_id: + data_type: integer + default: null + is_nullable: false + uuid: + data_type: character varying + default: null + is_nullable: false + max_length: 36 + db_dbcomputer: + description: + data_type: text + default: null + is_nullable: false + enabled: + data_type: boolean + default: null + is_nullable: false + hostname: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + id: + data_type: integer + default: nextval('db_dbcomputer_id_seq'::regclass) + is_nullable: false + metadata: + data_type: text + default: null + is_nullable: false + name: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + scheduler_type: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + transport_params: + data_type: text + default: null + is_nullable: false + transport_type: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + uuid: + data_type: character varying + default: null + is_nullable: false + max_length: 36 + db_dbextra: + bval: + data_type: boolean + default: null + is_nullable: true + datatype: + data_type: character varying + default: null + is_nullable: false + max_length: 10 + dbnode_id: + data_type: integer + default: null + is_nullable: false + dval: + data_type: timestamp with time zone + default: null + is_nullable: true + fval: + data_type: double precision + default: null + is_nullable: true + id: + data_type: integer + default: nextval('db_dbextra_id_seq'::regclass) + is_nullable: false + ival: + data_type: integer + default: null + is_nullable: true + key: + data_type: character varying + default: null + is_nullable: false + max_length: 1024 + tval: + data_type: text + default: null + is_nullable: false + db_dbgroup: + description: + data_type: text + default: null + is_nullable: false + id: + data_type: integer + default: nextval('db_dbgroup_id_seq'::regclass) + is_nullable: false + name: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + time: + data_type: timestamp with time zone + default: null + is_nullable: false + type: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + user_id: + data_type: integer + default: null + is_nullable: false + uuid: + data_type: character varying + default: null + is_nullable: false + max_length: 36 + db_dbgroup_dbnodes: + dbgroup_id: + data_type: integer + default: null + is_nullable: false + dbnode_id: + data_type: integer + default: null + is_nullable: false + id: + data_type: integer + default: nextval('db_dbgroup_dbnodes_id_seq'::regclass) + is_nullable: false + db_dblink: + id: + data_type: integer + default: nextval('db_dblink_id_seq'::regclass) + is_nullable: false + input_id: + data_type: integer + default: null + is_nullable: false + label: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + output_id: + data_type: integer + default: null + is_nullable: false + db_dblock: + creation: + data_type: timestamp with time zone + default: null + is_nullable: false + key: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + owner: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + timeout: + data_type: integer + default: null + is_nullable: false + db_dblog: + id: + data_type: integer + default: nextval('db_dblog_id_seq'::regclass) + is_nullable: false + levelname: + data_type: character varying + default: null + is_nullable: false + max_length: 50 + loggername: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + message: + data_type: text + default: null + is_nullable: false + metadata: + data_type: text + default: null + is_nullable: false + objname: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + objpk: + data_type: integer + default: null + is_nullable: true + time: + data_type: timestamp with time zone + default: null + is_nullable: false + db_dbnode: + ctime: + data_type: timestamp with time zone + default: null + is_nullable: false + dbcomputer_id: + data_type: integer + default: null + is_nullable: true + description: + data_type: text + default: null + is_nullable: false + id: + data_type: integer + default: nextval('db_dbnode_id_seq'::regclass) + is_nullable: false + label: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + mtime: + data_type: timestamp with time zone + default: null + is_nullable: false + nodeversion: + data_type: integer + default: null + is_nullable: false + public: + data_type: boolean + default: null + is_nullable: false + type: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + user_id: + data_type: integer + default: null + is_nullable: false + uuid: + data_type: character varying + default: null + is_nullable: false + max_length: 36 + db_dbpath: + child_id: + data_type: integer + default: null + is_nullable: false + depth: + data_type: integer + default: null + is_nullable: false + direct_edge_id: + data_type: integer + default: null + is_nullable: true + entry_edge_id: + data_type: integer + default: null + is_nullable: true + exit_edge_id: + data_type: integer + default: null + is_nullable: true + id: + data_type: integer + default: nextval('db_dbpath_id_seq'::regclass) + is_nullable: false + parent_id: + data_type: integer + default: null + is_nullable: false + db_dbsetting: + bval: + data_type: boolean + default: null + is_nullable: true + datatype: + data_type: character varying + default: null + is_nullable: false + max_length: 10 + description: + data_type: text + default: null + is_nullable: false + dval: + data_type: timestamp with time zone + default: null + is_nullable: true + fval: + data_type: double precision + default: null + is_nullable: true + id: + data_type: integer + default: nextval('db_dbsetting_id_seq'::regclass) + is_nullable: false + ival: + data_type: integer + default: null + is_nullable: true + key: + data_type: character varying + default: null + is_nullable: false + max_length: 1024 + time: + data_type: timestamp with time zone + default: null + is_nullable: false + tval: + data_type: text + default: null + is_nullable: false + db_dbuser: + date_joined: + data_type: timestamp with time zone + default: null + is_nullable: false + email: + data_type: character varying + default: null + is_nullable: false + max_length: 75 + first_name: + data_type: character varying + default: null + is_nullable: false + max_length: 254 + id: + data_type: integer + default: nextval('db_dbuser_id_seq'::regclass) + is_nullable: false + institution: + data_type: character varying + default: null + is_nullable: false + max_length: 254 + is_active: + data_type: boolean + default: null + is_nullable: false + is_staff: + data_type: boolean + default: null + is_nullable: false + is_superuser: + data_type: boolean + default: null + is_nullable: false + last_login: + data_type: timestamp with time zone + default: null + is_nullable: false + last_name: + data_type: character varying + default: null + is_nullable: false + max_length: 254 + password: + data_type: character varying + default: null + is_nullable: false + max_length: 128 + db_dbuser_groups: + dbuser_id: + data_type: integer + default: null + is_nullable: false + group_id: + data_type: integer + default: null + is_nullable: false + id: + data_type: integer + default: nextval('db_dbuser_groups_id_seq'::regclass) + is_nullable: false + db_dbuser_user_permissions: + dbuser_id: + data_type: integer + default: null + is_nullable: false + id: + data_type: integer + default: nextval('db_dbuser_user_permissions_id_seq'::regclass) + is_nullable: false + permission_id: + data_type: integer + default: null + is_nullable: false + db_dbworkflow: + ctime: + data_type: timestamp with time zone + default: null + is_nullable: false + description: + data_type: text + default: null + is_nullable: false + id: + data_type: integer + default: nextval('db_dbworkflow_id_seq'::regclass) + is_nullable: false + label: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + lastsyncedversion: + data_type: integer + default: null + is_nullable: false + module: + data_type: text + default: null + is_nullable: false + module_class: + data_type: text + default: null + is_nullable: false + mtime: + data_type: timestamp with time zone + default: null + is_nullable: false + nodeversion: + data_type: integer + default: null + is_nullable: false + report: + data_type: text + default: null + is_nullable: false + script_md5: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + script_path: + data_type: text + default: null + is_nullable: false + state: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + user_id: + data_type: integer + default: null + is_nullable: false + uuid: + data_type: character varying + default: null + is_nullable: false + max_length: 36 + db_dbworkflowdata: + aiida_obj_id: + data_type: integer + default: null + is_nullable: true + data_type: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + id: + data_type: integer + default: nextval('db_dbworkflowdata_id_seq'::regclass) + is_nullable: false + json_value: + data_type: text + default: null + is_nullable: false + name: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + parent_id: + data_type: integer + default: null + is_nullable: false + time: + data_type: timestamp with time zone + default: null + is_nullable: false + value_type: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + db_dbworkflowstep: + id: + data_type: integer + default: nextval('db_dbworkflowstep_id_seq'::regclass) + is_nullable: false + name: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + nextcall: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + parent_id: + data_type: integer + default: null + is_nullable: false + state: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + time: + data_type: timestamp with time zone + default: null + is_nullable: false + user_id: + data_type: integer + default: null + is_nullable: false + db_dbworkflowstep_calculations: + dbnode_id: + data_type: integer + default: null + is_nullable: false + dbworkflowstep_id: + data_type: integer + default: null + is_nullable: false + id: + data_type: integer + default: nextval('db_dbworkflowstep_calculations_id_seq'::regclass) + is_nullable: false + db_dbworkflowstep_sub_workflows: + dbworkflow_id: + data_type: integer + default: null + is_nullable: false + dbworkflowstep_id: + data_type: integer + default: null + is_nullable: false + id: + data_type: integer + default: nextval('db_dbworkflowstep_sub_workflows_id_seq'::regclass) + is_nullable: false +constraints: + primary_key: + db_dbattribute: + db_dbattribute_pkey: + - id + db_dbauthinfo: + db_dbauthinfo_pkey: + - id + db_dbcalcstate: + db_dbcalcstate_pkey: + - id + db_dbcomment: + db_dbcomment_pkey: + - id + db_dbcomputer: + db_dbcomputer_pkey: + - id + db_dbextra: + db_dbextra_pkey: + - id + db_dbgroup: + db_dbgroup_pkey: + - id + db_dbgroup_dbnodes: + db_dbgroup_dbnodes_pkey: + - id + db_dblink: + db_dblink_pkey: + - id + db_dblock: + db_dblock_pkey: + - key + db_dblog: + db_dblog_pkey: + - id + db_dbnode: + db_dbnode_pkey: + - id + db_dbpath: + db_dbpath_pkey: + - id + db_dbsetting: + db_dbsetting_pkey: + - id + db_dbuser: + db_dbuser_pkey: + - id + db_dbuser_groups: + db_dbuser_groups_pkey: + - id + db_dbuser_user_permissions: + db_dbuser_user_permissions_pkey: + - id + db_dbworkflow: + db_dbworkflow_pkey: + - id + db_dbworkflowdata: + db_dbworkflowdata_pkey: + - id + db_dbworkflowstep: + db_dbworkflowstep_pkey: + - id + db_dbworkflowstep_calculations: + db_dbworkflowstep_calculations_pkey: + - id + db_dbworkflowstep_sub_workflows: + db_dbworkflowstep_sub_workflows_pkey: + - id + unique: + db_dbattribute: + db_dbattribute_dbnode_id_key_c589e447_uniq: + - dbnode_id + - key + db_dbauthinfo: + db_dbauthinfo_aiidauser_id_dbcomputer_id_777cdaa8_uniq: + - aiidauser_id + - dbcomputer_id + db_dbcalcstate: + db_dbcalcstate_dbnode_id_state_b4a14db3_uniq: + - dbnode_id + - state + db_dbcomputer: + db_dbcomputer_name_key: + - name + db_dbextra: + db_dbextra_dbnode_id_key_aa56fd37_uniq: + - dbnode_id + - key + db_dbgroup: + db_dbgroup_name_type_12656f33_uniq: + - name + - type + db_dbgroup_dbnodes: + db_dbgroup_dbnodes_dbgroup_id_dbnode_id_eee23cce_uniq: + - dbgroup_id + - dbnode_id + db_dblink: + db_dblink_input_id_output_id_fbe99cb5_uniq: + - input_id + - output_id + db_dblink_output_id_label_00bdb9c7_uniq: + - label + - output_id + db_dbsetting: + db_dbsetting_key_1b84beb4_uniq: + - key + db_dbuser: + db_dbuser_email_key: + - email + db_dbuser_groups: + db_dbuser_groups_dbuser_id_group_id_9155eb4f_uniq: + - dbuser_id + - group_id + db_dbuser_user_permissions: + db_dbuser_user_permissio_dbuser_id_permission_id_e6cbabe4_uniq: + - dbuser_id + - permission_id + db_dbworkflowdata: + db_dbworkflowdata_parent_id_name_data_type_a4b50dae_uniq: + - data_type + - name + - parent_id + db_dbworkflowstep: + db_dbworkflowstep_parent_id_name_111027e3_uniq: + - name + - parent_id + db_dbworkflowstep_calculations: + db_dbworkflowstep_calcul_dbworkflowstep_id_dbnode_60f50d02_uniq: + - dbnode_id + - dbworkflowstep_id + db_dbworkflowstep_sub_workflows: + db_dbworkflowstep_sub_wo_dbworkflowstep_id_dbwork_e9b2b624_uniq: + - dbworkflow_id + - dbworkflowstep_id +foreign_keys: + db_dbattribute: + db_dbattribute_dbnode_id_253bf153_fk_db_dbnode_id: FOREIGN KEY (dbnode_id) REFERENCES + db_dbnode(id) DEFERRABLE INITIALLY DEFERRED + db_dbauthinfo: + db_dbauthinfo_aiidauser_id_0684fdfb_fk_db_dbuser_id: FOREIGN KEY (aiidauser_id) + REFERENCES db_dbuser(id) DEFERRABLE INITIALLY DEFERRED + db_dbauthinfo_dbcomputer_id_424f7ac4_fk_db_dbcomputer_id: FOREIGN KEY (dbcomputer_id) + REFERENCES db_dbcomputer(id) DEFERRABLE INITIALLY DEFERRED + db_dbcalcstate: + db_dbcalcstate_dbnode_id_f217a84c_fk_db_dbnode_id: FOREIGN KEY (dbnode_id) REFERENCES + db_dbnode(id) DEFERRABLE INITIALLY DEFERRED + db_dbcomment: + db_dbcomment_dbnode_id_3b812b6b_fk_db_dbnode_id: FOREIGN KEY (dbnode_id) REFERENCES + db_dbnode(id) DEFERRABLE INITIALLY DEFERRED + db_dbcomment_user_id_8ed5e360_fk_db_dbuser_id: FOREIGN KEY (user_id) REFERENCES + db_dbuser(id) DEFERRABLE INITIALLY DEFERRED + db_dbextra: + db_dbextra_dbnode_id_c7fe8961_fk_db_dbnode_id: FOREIGN KEY (dbnode_id) REFERENCES + db_dbnode(id) DEFERRABLE INITIALLY DEFERRED + db_dbgroup: + db_dbgroup_user_id_100f8a51_fk_db_dbuser_id: FOREIGN KEY (user_id) REFERENCES + db_dbuser(id) DEFERRABLE INITIALLY DEFERRED + db_dbgroup_dbnodes: + db_dbgroup_dbnodes_dbgroup_id_9d3a0f9d_fk_db_dbgroup_id: FOREIGN KEY (dbgroup_id) + REFERENCES db_dbgroup(id) DEFERRABLE INITIALLY DEFERRED + db_dbgroup_dbnodes_dbnode_id_118b9439_fk_db_dbnode_id: FOREIGN KEY (dbnode_id) + REFERENCES db_dbnode(id) DEFERRABLE INITIALLY DEFERRED + db_dblink: + db_dblink_input_id_9245bd73_fk_db_dbnode_id: FOREIGN KEY (input_id) REFERENCES + db_dbnode(id) DEFERRABLE INITIALLY DEFERRED + db_dblink_output_id_c0167528_fk_db_dbnode_id: FOREIGN KEY (output_id) REFERENCES + db_dbnode(id) DEFERRABLE INITIALLY DEFERRED + db_dbnode: + db_dbnode_dbcomputer_id_315372a3_fk_db_dbcomputer_id: FOREIGN KEY (dbcomputer_id) + REFERENCES db_dbcomputer(id) DEFERRABLE INITIALLY DEFERRED + db_dbnode_user_id_12e7aeaf_fk_db_dbuser_id: FOREIGN KEY (user_id) REFERENCES db_dbuser(id) + DEFERRABLE INITIALLY DEFERRED + db_dbpath: + db_dbpath_child_id_d8228636_fk_db_dbnode_id: FOREIGN KEY (child_id) REFERENCES + db_dbnode(id) DEFERRABLE INITIALLY DEFERRED + db_dbpath_parent_id_3b82d6c8_fk_db_dbnode_id: FOREIGN KEY (parent_id) REFERENCES + db_dbnode(id) DEFERRABLE INITIALLY DEFERRED + db_dbuser_groups: + db_dbuser_groups_dbuser_id_480b3520_fk_db_dbuser_id: FOREIGN KEY (dbuser_id) REFERENCES + db_dbuser(id) DEFERRABLE INITIALLY DEFERRED + db_dbuser_groups_group_id_8478d87e_fk_auth_group_id: FOREIGN KEY (group_id) REFERENCES + auth_group(id) DEFERRABLE INITIALLY DEFERRED + db_dbuser_user_permissions: + db_dbuser_user_permi_permission_id_c5aafc54_fk_auth_perm: FOREIGN KEY (permission_id) + REFERENCES auth_permission(id) DEFERRABLE INITIALLY DEFERRED + db_dbuser_user_permissions_dbuser_id_364456ee_fk_db_dbuser_id: FOREIGN KEY (dbuser_id) + REFERENCES db_dbuser(id) DEFERRABLE INITIALLY DEFERRED + db_dbworkflow: + db_dbworkflow_user_id_ef1f3251_fk_db_dbuser_id: FOREIGN KEY (user_id) REFERENCES + db_dbuser(id) DEFERRABLE INITIALLY DEFERRED + db_dbworkflowdata: + db_dbworkflowdata_aiida_obj_id_70a2d33b_fk_db_dbnode_id: FOREIGN KEY (aiida_obj_id) + REFERENCES db_dbnode(id) DEFERRABLE INITIALLY DEFERRED + db_dbworkflowdata_parent_id_ff4dbf8d_fk_db_dbworkflow_id: FOREIGN KEY (parent_id) + REFERENCES db_dbworkflow(id) DEFERRABLE INITIALLY DEFERRED + db_dbworkflowstep: + db_dbworkflowstep_parent_id_ffb754d9_fk_db_dbworkflow_id: FOREIGN KEY (parent_id) + REFERENCES db_dbworkflow(id) DEFERRABLE INITIALLY DEFERRED + db_dbworkflowstep_user_id_04282431_fk_db_dbuser_id: FOREIGN KEY (user_id) REFERENCES + db_dbuser(id) DEFERRABLE INITIALLY DEFERRED + db_dbworkflowstep_calculations: + db_dbworkflowstep_ca_dbnode_id_0d07b7a7_fk_db_dbnode: FOREIGN KEY (dbnode_id) + REFERENCES db_dbnode(id) DEFERRABLE INITIALLY DEFERRED + db_dbworkflowstep_ca_dbworkflowstep_id_575c3637_fk_db_dbwork: FOREIGN KEY (dbworkflowstep_id) + REFERENCES db_dbworkflowstep(id) DEFERRABLE INITIALLY DEFERRED + db_dbworkflowstep_sub_workflows: + db_dbworkflowstep_su_dbworkflow_id_dca4d103_fk_db_dbwork: FOREIGN KEY (dbworkflow_id) + REFERENCES db_dbworkflow(id) DEFERRABLE INITIALLY DEFERRED + db_dbworkflowstep_su_dbworkflowstep_id_e183bbb7_fk_db_dbwork: FOREIGN KEY (dbworkflowstep_id) + REFERENCES db_dbworkflowstep(id) DEFERRABLE INITIALLY DEFERRED +indexes: + db_dbattribute: + db_dbattribute_datatype_91c4dc04: CREATE INDEX db_dbattribute_datatype_91c4dc04 + ON public.db_dbattribute USING btree (datatype) + db_dbattribute_datatype_91c4dc04_like: CREATE INDEX db_dbattribute_datatype_91c4dc04_like + ON public.db_dbattribute USING btree (datatype varchar_pattern_ops) + db_dbattribute_dbnode_id_253bf153: CREATE INDEX db_dbattribute_dbnode_id_253bf153 + ON public.db_dbattribute USING btree (dbnode_id) + db_dbattribute_dbnode_id_key_c589e447_uniq: CREATE UNIQUE INDEX db_dbattribute_dbnode_id_key_c589e447_uniq + ON public.db_dbattribute USING btree (dbnode_id, key) + db_dbattribute_key_ac2bc4e4: CREATE INDEX db_dbattribute_key_ac2bc4e4 ON public.db_dbattribute + USING btree (key) + db_dbattribute_key_ac2bc4e4_like: CREATE INDEX db_dbattribute_key_ac2bc4e4_like + ON public.db_dbattribute USING btree (key varchar_pattern_ops) + db_dbattribute_pkey: CREATE UNIQUE INDEX db_dbattribute_pkey ON public.db_dbattribute + USING btree (id) + db_dbauthinfo: + db_dbauthinfo_aiidauser_id_0684fdfb: CREATE INDEX db_dbauthinfo_aiidauser_id_0684fdfb + ON public.db_dbauthinfo USING btree (aiidauser_id) + db_dbauthinfo_aiidauser_id_dbcomputer_id_777cdaa8_uniq: CREATE UNIQUE INDEX db_dbauthinfo_aiidauser_id_dbcomputer_id_777cdaa8_uniq + ON public.db_dbauthinfo USING btree (aiidauser_id, dbcomputer_id) + db_dbauthinfo_dbcomputer_id_424f7ac4: CREATE INDEX db_dbauthinfo_dbcomputer_id_424f7ac4 + ON public.db_dbauthinfo USING btree (dbcomputer_id) + db_dbauthinfo_pkey: CREATE UNIQUE INDEX db_dbauthinfo_pkey ON public.db_dbauthinfo + USING btree (id) + db_dbcalcstate: + db_dbcalcstate_dbnode_id_f217a84c: CREATE INDEX db_dbcalcstate_dbnode_id_f217a84c + ON public.db_dbcalcstate USING btree (dbnode_id) + db_dbcalcstate_dbnode_id_state_b4a14db3_uniq: CREATE UNIQUE INDEX db_dbcalcstate_dbnode_id_state_b4a14db3_uniq + ON public.db_dbcalcstate USING btree (dbnode_id, state) + db_dbcalcstate_pkey: CREATE UNIQUE INDEX db_dbcalcstate_pkey ON public.db_dbcalcstate + USING btree (id) + db_dbcalcstate_state_0bf54584: CREATE INDEX db_dbcalcstate_state_0bf54584 ON public.db_dbcalcstate + USING btree (state) + db_dbcalcstate_state_0bf54584_like: CREATE INDEX db_dbcalcstate_state_0bf54584_like + ON public.db_dbcalcstate USING btree (state varchar_pattern_ops) + db_dbcomment: + db_dbcomment_dbnode_id_3b812b6b: CREATE INDEX db_dbcomment_dbnode_id_3b812b6b + ON public.db_dbcomment USING btree (dbnode_id) + db_dbcomment_pkey: CREATE UNIQUE INDEX db_dbcomment_pkey ON public.db_dbcomment + USING btree (id) + db_dbcomment_user_id_8ed5e360: CREATE INDEX db_dbcomment_user_id_8ed5e360 ON public.db_dbcomment + USING btree (user_id) + db_dbcomputer: + db_dbcomputer_name_f1800b1a_like: CREATE INDEX db_dbcomputer_name_f1800b1a_like + ON public.db_dbcomputer USING btree (name varchar_pattern_ops) + db_dbcomputer_name_key: CREATE UNIQUE INDEX db_dbcomputer_name_key ON public.db_dbcomputer + USING btree (name) + db_dbcomputer_pkey: CREATE UNIQUE INDEX db_dbcomputer_pkey ON public.db_dbcomputer + USING btree (id) + db_dbextra: + db_dbextra_datatype_2eba38c6: CREATE INDEX db_dbextra_datatype_2eba38c6 ON public.db_dbextra + USING btree (datatype) + db_dbextra_datatype_2eba38c6_like: CREATE INDEX db_dbextra_datatype_2eba38c6_like + ON public.db_dbextra USING btree (datatype varchar_pattern_ops) + db_dbextra_dbnode_id_c7fe8961: CREATE INDEX db_dbextra_dbnode_id_c7fe8961 ON public.db_dbextra + USING btree (dbnode_id) + db_dbextra_dbnode_id_key_aa56fd37_uniq: CREATE UNIQUE INDEX db_dbextra_dbnode_id_key_aa56fd37_uniq + ON public.db_dbextra USING btree (dbnode_id, key) + db_dbextra_key_b1a8abc6: CREATE INDEX db_dbextra_key_b1a8abc6 ON public.db_dbextra + USING btree (key) + db_dbextra_key_b1a8abc6_like: CREATE INDEX db_dbextra_key_b1a8abc6_like ON public.db_dbextra + USING btree (key varchar_pattern_ops) + db_dbextra_pkey: CREATE UNIQUE INDEX db_dbextra_pkey ON public.db_dbextra USING + btree (id) + db_dbgroup: + db_dbgroup_name_66c75272: CREATE INDEX db_dbgroup_name_66c75272 ON public.db_dbgroup + USING btree (name) + db_dbgroup_name_66c75272_like: CREATE INDEX db_dbgroup_name_66c75272_like ON public.db_dbgroup + USING btree (name varchar_pattern_ops) + db_dbgroup_name_type_12656f33_uniq: CREATE UNIQUE INDEX db_dbgroup_name_type_12656f33_uniq + ON public.db_dbgroup USING btree (name, type) + db_dbgroup_pkey: CREATE UNIQUE INDEX db_dbgroup_pkey ON public.db_dbgroup USING + btree (id) + db_dbgroup_type_23b2a748: CREATE INDEX db_dbgroup_type_23b2a748 ON public.db_dbgroup + USING btree (type) + db_dbgroup_type_23b2a748_like: CREATE INDEX db_dbgroup_type_23b2a748_like ON public.db_dbgroup + USING btree (type varchar_pattern_ops) + db_dbgroup_user_id_100f8a51: CREATE INDEX db_dbgroup_user_id_100f8a51 ON public.db_dbgroup + USING btree (user_id) + db_dbgroup_dbnodes: + db_dbgroup_dbnodes_dbgroup_id_9d3a0f9d: CREATE INDEX db_dbgroup_dbnodes_dbgroup_id_9d3a0f9d + ON public.db_dbgroup_dbnodes USING btree (dbgroup_id) + db_dbgroup_dbnodes_dbgroup_id_dbnode_id_eee23cce_uniq: CREATE UNIQUE INDEX db_dbgroup_dbnodes_dbgroup_id_dbnode_id_eee23cce_uniq + ON public.db_dbgroup_dbnodes USING btree (dbgroup_id, dbnode_id) + db_dbgroup_dbnodes_dbnode_id_118b9439: CREATE INDEX db_dbgroup_dbnodes_dbnode_id_118b9439 + ON public.db_dbgroup_dbnodes USING btree (dbnode_id) + db_dbgroup_dbnodes_pkey: CREATE UNIQUE INDEX db_dbgroup_dbnodes_pkey ON public.db_dbgroup_dbnodes + USING btree (id) + db_dblink: + db_dblink_input_id_9245bd73: CREATE INDEX db_dblink_input_id_9245bd73 ON public.db_dblink + USING btree (input_id) + db_dblink_input_id_output_id_fbe99cb5_uniq: CREATE UNIQUE INDEX db_dblink_input_id_output_id_fbe99cb5_uniq + ON public.db_dblink USING btree (input_id, output_id) + db_dblink_label_f1343cfb: CREATE INDEX db_dblink_label_f1343cfb ON public.db_dblink + USING btree (label) + db_dblink_label_f1343cfb_like: CREATE INDEX db_dblink_label_f1343cfb_like ON public.db_dblink + USING btree (label varchar_pattern_ops) + db_dblink_output_id_c0167528: CREATE INDEX db_dblink_output_id_c0167528 ON public.db_dblink + USING btree (output_id) + db_dblink_output_id_label_00bdb9c7_uniq: CREATE UNIQUE INDEX db_dblink_output_id_label_00bdb9c7_uniq + ON public.db_dblink USING btree (output_id, label) + db_dblink_pkey: CREATE UNIQUE INDEX db_dblink_pkey ON public.db_dblink USING btree + (id) + db_dblock: + db_dblock_key_048c6767_like: CREATE INDEX db_dblock_key_048c6767_like ON public.db_dblock + USING btree (key varchar_pattern_ops) + db_dblock_pkey: CREATE UNIQUE INDEX db_dblock_pkey ON public.db_dblock USING btree + (key) + db_dblog: + db_dblog_levelname_ad5dc346: CREATE INDEX db_dblog_levelname_ad5dc346 ON public.db_dblog + USING btree (levelname) + db_dblog_levelname_ad5dc346_like: CREATE INDEX db_dblog_levelname_ad5dc346_like + ON public.db_dblog USING btree (levelname varchar_pattern_ops) + db_dblog_loggername_00b5ba16: CREATE INDEX db_dblog_loggername_00b5ba16 ON public.db_dblog + USING btree (loggername) + db_dblog_loggername_00b5ba16_like: CREATE INDEX db_dblog_loggername_00b5ba16_like + ON public.db_dblog USING btree (loggername varchar_pattern_ops) + db_dblog_objname_69932b1e: CREATE INDEX db_dblog_objname_69932b1e ON public.db_dblog + USING btree (objname) + db_dblog_objname_69932b1e_like: CREATE INDEX db_dblog_objname_69932b1e_like ON + public.db_dblog USING btree (objname varchar_pattern_ops) + db_dblog_objpk_fc47afa9: CREATE INDEX db_dblog_objpk_fc47afa9 ON public.db_dblog + USING btree (objpk) + db_dblog_pkey: CREATE UNIQUE INDEX db_dblog_pkey ON public.db_dblog USING btree + (id) + db_dbnode: + db_dbnode_dbcomputer_id_315372a3: CREATE INDEX db_dbnode_dbcomputer_id_315372a3 + ON public.db_dbnode USING btree (dbcomputer_id) + db_dbnode_label_6469539e: CREATE INDEX db_dbnode_label_6469539e ON public.db_dbnode + USING btree (label) + db_dbnode_label_6469539e_like: CREATE INDEX db_dbnode_label_6469539e_like ON public.db_dbnode + USING btree (label varchar_pattern_ops) + db_dbnode_pkey: CREATE UNIQUE INDEX db_dbnode_pkey ON public.db_dbnode USING btree + (id) + db_dbnode_type_a8ce9753: CREATE INDEX db_dbnode_type_a8ce9753 ON public.db_dbnode + USING btree (type) + db_dbnode_type_a8ce9753_like: CREATE INDEX db_dbnode_type_a8ce9753_like ON public.db_dbnode + USING btree (type varchar_pattern_ops) + db_dbnode_user_id_12e7aeaf: CREATE INDEX db_dbnode_user_id_12e7aeaf ON public.db_dbnode + USING btree (user_id) + db_dbpath: + db_dbpath_child_id_d8228636: CREATE INDEX db_dbpath_child_id_d8228636 ON public.db_dbpath + USING btree (child_id) + db_dbpath_parent_id_3b82d6c8: CREATE INDEX db_dbpath_parent_id_3b82d6c8 ON public.db_dbpath + USING btree (parent_id) + db_dbpath_pkey: CREATE UNIQUE INDEX db_dbpath_pkey ON public.db_dbpath USING btree + (id) + db_dbsetting: + db_dbsetting_datatype_49f4397c: CREATE INDEX db_dbsetting_datatype_49f4397c ON + public.db_dbsetting USING btree (datatype) + db_dbsetting_datatype_49f4397c_like: CREATE INDEX db_dbsetting_datatype_49f4397c_like + ON public.db_dbsetting USING btree (datatype varchar_pattern_ops) + db_dbsetting_key_1b84beb4: CREATE INDEX db_dbsetting_key_1b84beb4 ON public.db_dbsetting + USING btree (key) + db_dbsetting_key_1b84beb4_like: CREATE INDEX db_dbsetting_key_1b84beb4_like ON + public.db_dbsetting USING btree (key varchar_pattern_ops) + db_dbsetting_key_1b84beb4_uniq: CREATE UNIQUE INDEX db_dbsetting_key_1b84beb4_uniq + ON public.db_dbsetting USING btree (key) + db_dbsetting_pkey: CREATE UNIQUE INDEX db_dbsetting_pkey ON public.db_dbsetting + USING btree (id) + db_dbuser: + db_dbuser_email_30150b7e_like: CREATE INDEX db_dbuser_email_30150b7e_like ON public.db_dbuser + USING btree (email varchar_pattern_ops) + db_dbuser_email_key: CREATE UNIQUE INDEX db_dbuser_email_key ON public.db_dbuser + USING btree (email) + db_dbuser_pkey: CREATE UNIQUE INDEX db_dbuser_pkey ON public.db_dbuser USING btree + (id) + db_dbuser_groups: + db_dbuser_groups_dbuser_id_480b3520: CREATE INDEX db_dbuser_groups_dbuser_id_480b3520 + ON public.db_dbuser_groups USING btree (dbuser_id) + db_dbuser_groups_dbuser_id_group_id_9155eb4f_uniq: CREATE UNIQUE INDEX db_dbuser_groups_dbuser_id_group_id_9155eb4f_uniq + ON public.db_dbuser_groups USING btree (dbuser_id, group_id) + db_dbuser_groups_group_id_8478d87e: CREATE INDEX db_dbuser_groups_group_id_8478d87e + ON public.db_dbuser_groups USING btree (group_id) + db_dbuser_groups_pkey: CREATE UNIQUE INDEX db_dbuser_groups_pkey ON public.db_dbuser_groups + USING btree (id) + db_dbuser_user_permissions: + db_dbuser_user_permissio_dbuser_id_permission_id_e6cbabe4_uniq: CREATE UNIQUE + INDEX db_dbuser_user_permissio_dbuser_id_permission_id_e6cbabe4_uniq ON public.db_dbuser_user_permissions + USING btree (dbuser_id, permission_id) + db_dbuser_user_permissions_dbuser_id_364456ee: CREATE INDEX db_dbuser_user_permissions_dbuser_id_364456ee + ON public.db_dbuser_user_permissions USING btree (dbuser_id) + db_dbuser_user_permissions_permission_id_c5aafc54: CREATE INDEX db_dbuser_user_permissions_permission_id_c5aafc54 + ON public.db_dbuser_user_permissions USING btree (permission_id) + db_dbuser_user_permissions_pkey: CREATE UNIQUE INDEX db_dbuser_user_permissions_pkey + ON public.db_dbuser_user_permissions USING btree (id) + db_dbworkflow: + db_dbworkflow_label_7368f34a: CREATE INDEX db_dbworkflow_label_7368f34a ON public.db_dbworkflow + USING btree (label) + db_dbworkflow_label_7368f34a_like: CREATE INDEX db_dbworkflow_label_7368f34a_like + ON public.db_dbworkflow USING btree (label varchar_pattern_ops) + db_dbworkflow_pkey: CREATE UNIQUE INDEX db_dbworkflow_pkey ON public.db_dbworkflow + USING btree (id) + db_dbworkflow_user_id_ef1f3251: CREATE INDEX db_dbworkflow_user_id_ef1f3251 ON + public.db_dbworkflow USING btree (user_id) + db_dbworkflowdata: + db_dbworkflowdata_aiida_obj_id_70a2d33b: CREATE INDEX db_dbworkflowdata_aiida_obj_id_70a2d33b + ON public.db_dbworkflowdata USING btree (aiida_obj_id) + db_dbworkflowdata_parent_id_ff4dbf8d: CREATE INDEX db_dbworkflowdata_parent_id_ff4dbf8d + ON public.db_dbworkflowdata USING btree (parent_id) + db_dbworkflowdata_parent_id_name_data_type_a4b50dae_uniq: CREATE UNIQUE INDEX + db_dbworkflowdata_parent_id_name_data_type_a4b50dae_uniq ON public.db_dbworkflowdata + USING btree (parent_id, name, data_type) + db_dbworkflowdata_pkey: CREATE UNIQUE INDEX db_dbworkflowdata_pkey ON public.db_dbworkflowdata + USING btree (id) + db_dbworkflowstep: + db_dbworkflowstep_parent_id_ffb754d9: CREATE INDEX db_dbworkflowstep_parent_id_ffb754d9 + ON public.db_dbworkflowstep USING btree (parent_id) + db_dbworkflowstep_parent_id_name_111027e3_uniq: CREATE UNIQUE INDEX db_dbworkflowstep_parent_id_name_111027e3_uniq + ON public.db_dbworkflowstep USING btree (parent_id, name) + db_dbworkflowstep_pkey: CREATE UNIQUE INDEX db_dbworkflowstep_pkey ON public.db_dbworkflowstep + USING btree (id) + db_dbworkflowstep_user_id_04282431: CREATE INDEX db_dbworkflowstep_user_id_04282431 + ON public.db_dbworkflowstep USING btree (user_id) + db_dbworkflowstep_calculations: + db_dbworkflowstep_calcul_dbworkflowstep_id_dbnode_60f50d02_uniq: CREATE UNIQUE + INDEX db_dbworkflowstep_calcul_dbworkflowstep_id_dbnode_60f50d02_uniq ON public.db_dbworkflowstep_calculations + USING btree (dbworkflowstep_id, dbnode_id) + db_dbworkflowstep_calculations_dbnode_id_0d07b7a7: CREATE INDEX db_dbworkflowstep_calculations_dbnode_id_0d07b7a7 + ON public.db_dbworkflowstep_calculations USING btree (dbnode_id) + db_dbworkflowstep_calculations_dbworkflowstep_id_575c3637: CREATE INDEX db_dbworkflowstep_calculations_dbworkflowstep_id_575c3637 + ON public.db_dbworkflowstep_calculations USING btree (dbworkflowstep_id) + db_dbworkflowstep_calculations_pkey: CREATE UNIQUE INDEX db_dbworkflowstep_calculations_pkey + ON public.db_dbworkflowstep_calculations USING btree (id) + db_dbworkflowstep_sub_workflows: + db_dbworkflowstep_sub_wo_dbworkflowstep_id_dbwork_e9b2b624_uniq: CREATE UNIQUE + INDEX db_dbworkflowstep_sub_wo_dbworkflowstep_id_dbwork_e9b2b624_uniq ON public.db_dbworkflowstep_sub_workflows + USING btree (dbworkflowstep_id, dbworkflow_id) + db_dbworkflowstep_sub_workflows_dbworkflow_id_dca4d103: CREATE INDEX db_dbworkflowstep_sub_workflows_dbworkflow_id_dca4d103 + ON public.db_dbworkflowstep_sub_workflows USING btree (dbworkflow_id) + db_dbworkflowstep_sub_workflows_dbworkflowstep_id_e183bbb7: CREATE INDEX db_dbworkflowstep_sub_workflows_dbworkflowstep_id_e183bbb7 + ON public.db_dbworkflowstep_sub_workflows USING btree (dbworkflowstep_id) + db_dbworkflowstep_sub_workflows_pkey: CREATE UNIQUE INDEX db_dbworkflowstep_sub_workflows_pkey + ON public.db_dbworkflowstep_sub_workflows USING btree (id) diff --git a/tests/backends/aiida_sqlalchemy/migrations/test_all_schema/test_django_django_0003_.yml b/tests/backends/aiida_sqlalchemy/migrations/test_all_schema/test_django_django_0003_.yml new file mode 100644 index 0000000000..1d3f6d9ba0 --- /dev/null +++ b/tests/backends/aiida_sqlalchemy/migrations/test_all_schema/test_django_django_0003_.yml @@ -0,0 +1,1114 @@ +columns: + db_dbattribute: + bval: + data_type: boolean + default: null + is_nullable: true + datatype: + data_type: character varying + default: null + is_nullable: false + max_length: 10 + dbnode_id: + data_type: integer + default: null + is_nullable: false + dval: + data_type: timestamp with time zone + default: null + is_nullable: true + fval: + data_type: double precision + default: null + is_nullable: true + id: + data_type: integer + default: nextval('db_dbattribute_id_seq'::regclass) + is_nullable: false + ival: + data_type: integer + default: null + is_nullable: true + key: + data_type: character varying + default: null + is_nullable: false + max_length: 1024 + tval: + data_type: text + default: null + is_nullable: false + db_dbauthinfo: + aiidauser_id: + data_type: integer + default: null + is_nullable: false + auth_params: + data_type: text + default: null + is_nullable: false + dbcomputer_id: + data_type: integer + default: null + is_nullable: false + enabled: + data_type: boolean + default: null + is_nullable: false + id: + data_type: integer + default: nextval('db_dbauthinfo_id_seq'::regclass) + is_nullable: false + metadata: + data_type: text + default: null + is_nullable: false + db_dbcalcstate: + dbnode_id: + data_type: integer + default: null + is_nullable: false + id: + data_type: integer + default: nextval('db_dbcalcstate_id_seq'::regclass) + is_nullable: false + state: + data_type: character varying + default: null + is_nullable: false + max_length: 25 + time: + data_type: timestamp with time zone + default: null + is_nullable: false + db_dbcomment: + content: + data_type: text + default: null + is_nullable: false + ctime: + data_type: timestamp with time zone + default: null + is_nullable: false + dbnode_id: + data_type: integer + default: null + is_nullable: false + id: + data_type: integer + default: nextval('db_dbcomment_id_seq'::regclass) + is_nullable: false + mtime: + data_type: timestamp with time zone + default: null + is_nullable: false + user_id: + data_type: integer + default: null + is_nullable: false + uuid: + data_type: character varying + default: null + is_nullable: false + max_length: 36 + db_dbcomputer: + description: + data_type: text + default: null + is_nullable: false + enabled: + data_type: boolean + default: null + is_nullable: false + hostname: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + id: + data_type: integer + default: nextval('db_dbcomputer_id_seq'::regclass) + is_nullable: false + metadata: + data_type: text + default: null + is_nullable: false + name: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + scheduler_type: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + transport_params: + data_type: text + default: null + is_nullable: false + transport_type: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + uuid: + data_type: character varying + default: null + is_nullable: false + max_length: 36 + db_dbextra: + bval: + data_type: boolean + default: null + is_nullable: true + datatype: + data_type: character varying + default: null + is_nullable: false + max_length: 10 + dbnode_id: + data_type: integer + default: null + is_nullable: false + dval: + data_type: timestamp with time zone + default: null + is_nullable: true + fval: + data_type: double precision + default: null + is_nullable: true + id: + data_type: integer + default: nextval('db_dbextra_id_seq'::regclass) + is_nullable: false + ival: + data_type: integer + default: null + is_nullable: true + key: + data_type: character varying + default: null + is_nullable: false + max_length: 1024 + tval: + data_type: text + default: null + is_nullable: false + db_dbgroup: + description: + data_type: text + default: null + is_nullable: false + id: + data_type: integer + default: nextval('db_dbgroup_id_seq'::regclass) + is_nullable: false + name: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + time: + data_type: timestamp with time zone + default: null + is_nullable: false + type: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + user_id: + data_type: integer + default: null + is_nullable: false + uuid: + data_type: character varying + default: null + is_nullable: false + max_length: 36 + db_dbgroup_dbnodes: + dbgroup_id: + data_type: integer + default: null + is_nullable: false + dbnode_id: + data_type: integer + default: null + is_nullable: false + id: + data_type: integer + default: nextval('db_dbgroup_dbnodes_id_seq'::regclass) + is_nullable: false + db_dblink: + id: + data_type: integer + default: nextval('db_dblink_id_seq'::regclass) + is_nullable: false + input_id: + data_type: integer + default: null + is_nullable: false + label: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + output_id: + data_type: integer + default: null + is_nullable: false + type: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + db_dblock: + creation: + data_type: timestamp with time zone + default: null + is_nullable: false + key: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + owner: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + timeout: + data_type: integer + default: null + is_nullable: false + db_dblog: + id: + data_type: integer + default: nextval('db_dblog_id_seq'::regclass) + is_nullable: false + levelname: + data_type: character varying + default: null + is_nullable: false + max_length: 50 + loggername: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + message: + data_type: text + default: null + is_nullable: false + metadata: + data_type: text + default: null + is_nullable: false + objname: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + objpk: + data_type: integer + default: null + is_nullable: true + time: + data_type: timestamp with time zone + default: null + is_nullable: false + db_dbnode: + ctime: + data_type: timestamp with time zone + default: null + is_nullable: false + dbcomputer_id: + data_type: integer + default: null + is_nullable: true + description: + data_type: text + default: null + is_nullable: false + id: + data_type: integer + default: nextval('db_dbnode_id_seq'::regclass) + is_nullable: false + label: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + mtime: + data_type: timestamp with time zone + default: null + is_nullable: false + nodeversion: + data_type: integer + default: null + is_nullable: false + public: + data_type: boolean + default: null + is_nullable: false + type: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + user_id: + data_type: integer + default: null + is_nullable: false + uuid: + data_type: character varying + default: null + is_nullable: false + max_length: 36 + db_dbpath: + child_id: + data_type: integer + default: null + is_nullable: false + depth: + data_type: integer + default: null + is_nullable: false + direct_edge_id: + data_type: integer + default: null + is_nullable: true + entry_edge_id: + data_type: integer + default: null + is_nullable: true + exit_edge_id: + data_type: integer + default: null + is_nullable: true + id: + data_type: integer + default: nextval('db_dbpath_id_seq'::regclass) + is_nullable: false + parent_id: + data_type: integer + default: null + is_nullable: false + db_dbsetting: + bval: + data_type: boolean + default: null + is_nullable: true + datatype: + data_type: character varying + default: null + is_nullable: false + max_length: 10 + description: + data_type: text + default: null + is_nullable: false + dval: + data_type: timestamp with time zone + default: null + is_nullable: true + fval: + data_type: double precision + default: null + is_nullable: true + id: + data_type: integer + default: nextval('db_dbsetting_id_seq'::regclass) + is_nullable: false + ival: + data_type: integer + default: null + is_nullable: true + key: + data_type: character varying + default: null + is_nullable: false + max_length: 1024 + time: + data_type: timestamp with time zone + default: null + is_nullable: false + tval: + data_type: text + default: null + is_nullable: false + db_dbuser: + date_joined: + data_type: timestamp with time zone + default: null + is_nullable: false + email: + data_type: character varying + default: null + is_nullable: false + max_length: 75 + first_name: + data_type: character varying + default: null + is_nullable: false + max_length: 254 + id: + data_type: integer + default: nextval('db_dbuser_id_seq'::regclass) + is_nullable: false + institution: + data_type: character varying + default: null + is_nullable: false + max_length: 254 + is_active: + data_type: boolean + default: null + is_nullable: false + is_staff: + data_type: boolean + default: null + is_nullable: false + is_superuser: + data_type: boolean + default: null + is_nullable: false + last_login: + data_type: timestamp with time zone + default: null + is_nullable: false + last_name: + data_type: character varying + default: null + is_nullable: false + max_length: 254 + password: + data_type: character varying + default: null + is_nullable: false + max_length: 128 + db_dbuser_groups: + dbuser_id: + data_type: integer + default: null + is_nullable: false + group_id: + data_type: integer + default: null + is_nullable: false + id: + data_type: integer + default: nextval('db_dbuser_groups_id_seq'::regclass) + is_nullable: false + db_dbuser_user_permissions: + dbuser_id: + data_type: integer + default: null + is_nullable: false + id: + data_type: integer + default: nextval('db_dbuser_user_permissions_id_seq'::regclass) + is_nullable: false + permission_id: + data_type: integer + default: null + is_nullable: false + db_dbworkflow: + ctime: + data_type: timestamp with time zone + default: null + is_nullable: false + description: + data_type: text + default: null + is_nullable: false + id: + data_type: integer + default: nextval('db_dbworkflow_id_seq'::regclass) + is_nullable: false + label: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + lastsyncedversion: + data_type: integer + default: null + is_nullable: false + module: + data_type: text + default: null + is_nullable: false + module_class: + data_type: text + default: null + is_nullable: false + mtime: + data_type: timestamp with time zone + default: null + is_nullable: false + nodeversion: + data_type: integer + default: null + is_nullable: false + report: + data_type: text + default: null + is_nullable: false + script_md5: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + script_path: + data_type: text + default: null + is_nullable: false + state: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + user_id: + data_type: integer + default: null + is_nullable: false + uuid: + data_type: character varying + default: null + is_nullable: false + max_length: 36 + db_dbworkflowdata: + aiida_obj_id: + data_type: integer + default: null + is_nullable: true + data_type: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + id: + data_type: integer + default: nextval('db_dbworkflowdata_id_seq'::regclass) + is_nullable: false + json_value: + data_type: text + default: null + is_nullable: false + name: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + parent_id: + data_type: integer + default: null + is_nullable: false + time: + data_type: timestamp with time zone + default: null + is_nullable: false + value_type: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + db_dbworkflowstep: + id: + data_type: integer + default: nextval('db_dbworkflowstep_id_seq'::regclass) + is_nullable: false + name: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + nextcall: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + parent_id: + data_type: integer + default: null + is_nullable: false + state: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + time: + data_type: timestamp with time zone + default: null + is_nullable: false + user_id: + data_type: integer + default: null + is_nullable: false + db_dbworkflowstep_calculations: + dbnode_id: + data_type: integer + default: null + is_nullable: false + dbworkflowstep_id: + data_type: integer + default: null + is_nullable: false + id: + data_type: integer + default: nextval('db_dbworkflowstep_calculations_id_seq'::regclass) + is_nullable: false + db_dbworkflowstep_sub_workflows: + dbworkflow_id: + data_type: integer + default: null + is_nullable: false + dbworkflowstep_id: + data_type: integer + default: null + is_nullable: false + id: + data_type: integer + default: nextval('db_dbworkflowstep_sub_workflows_id_seq'::regclass) + is_nullable: false +constraints: + primary_key: + db_dbattribute: + db_dbattribute_pkey: + - id + db_dbauthinfo: + db_dbauthinfo_pkey: + - id + db_dbcalcstate: + db_dbcalcstate_pkey: + - id + db_dbcomment: + db_dbcomment_pkey: + - id + db_dbcomputer: + db_dbcomputer_pkey: + - id + db_dbextra: + db_dbextra_pkey: + - id + db_dbgroup: + db_dbgroup_pkey: + - id + db_dbgroup_dbnodes: + db_dbgroup_dbnodes_pkey: + - id + db_dblink: + db_dblink_pkey: + - id + db_dblock: + db_dblock_pkey: + - key + db_dblog: + db_dblog_pkey: + - id + db_dbnode: + db_dbnode_pkey: + - id + db_dbpath: + db_dbpath_pkey: + - id + db_dbsetting: + db_dbsetting_pkey: + - id + db_dbuser: + db_dbuser_pkey: + - id + db_dbuser_groups: + db_dbuser_groups_pkey: + - id + db_dbuser_user_permissions: + db_dbuser_user_permissions_pkey: + - id + db_dbworkflow: + db_dbworkflow_pkey: + - id + db_dbworkflowdata: + db_dbworkflowdata_pkey: + - id + db_dbworkflowstep: + db_dbworkflowstep_pkey: + - id + db_dbworkflowstep_calculations: + db_dbworkflowstep_calculations_pkey: + - id + db_dbworkflowstep_sub_workflows: + db_dbworkflowstep_sub_workflows_pkey: + - id + unique: + db_dbattribute: + db_dbattribute_dbnode_id_key_c589e447_uniq: + - dbnode_id + - key + db_dbauthinfo: + db_dbauthinfo_aiidauser_id_dbcomputer_id_777cdaa8_uniq: + - aiidauser_id + - dbcomputer_id + db_dbcalcstate: + db_dbcalcstate_dbnode_id_state_b4a14db3_uniq: + - dbnode_id + - state + db_dbcomputer: + db_dbcomputer_name_key: + - name + db_dbextra: + db_dbextra_dbnode_id_key_aa56fd37_uniq: + - dbnode_id + - key + db_dbgroup: + db_dbgroup_name_type_12656f33_uniq: + - name + - type + db_dbgroup_dbnodes: + db_dbgroup_dbnodes_dbgroup_id_dbnode_id_eee23cce_uniq: + - dbgroup_id + - dbnode_id + db_dbsetting: + db_dbsetting_key_1b84beb4_uniq: + - key + db_dbuser: + db_dbuser_email_key: + - email + db_dbuser_groups: + db_dbuser_groups_dbuser_id_group_id_9155eb4f_uniq: + - dbuser_id + - group_id + db_dbuser_user_permissions: + db_dbuser_user_permissio_dbuser_id_permission_id_e6cbabe4_uniq: + - dbuser_id + - permission_id + db_dbworkflowdata: + db_dbworkflowdata_parent_id_name_data_type_a4b50dae_uniq: + - data_type + - name + - parent_id + db_dbworkflowstep: + db_dbworkflowstep_parent_id_name_111027e3_uniq: + - name + - parent_id + db_dbworkflowstep_calculations: + db_dbworkflowstep_calcul_dbworkflowstep_id_dbnode_60f50d02_uniq: + - dbnode_id + - dbworkflowstep_id + db_dbworkflowstep_sub_workflows: + db_dbworkflowstep_sub_wo_dbworkflowstep_id_dbwork_e9b2b624_uniq: + - dbworkflow_id + - dbworkflowstep_id +foreign_keys: + db_dbattribute: + db_dbattribute_dbnode_id_253bf153_fk_db_dbnode_id: FOREIGN KEY (dbnode_id) REFERENCES + db_dbnode(id) DEFERRABLE INITIALLY DEFERRED + db_dbauthinfo: + db_dbauthinfo_aiidauser_id_0684fdfb_fk_db_dbuser_id: FOREIGN KEY (aiidauser_id) + REFERENCES db_dbuser(id) DEFERRABLE INITIALLY DEFERRED + db_dbauthinfo_dbcomputer_id_424f7ac4_fk_db_dbcomputer_id: FOREIGN KEY (dbcomputer_id) + REFERENCES db_dbcomputer(id) DEFERRABLE INITIALLY DEFERRED + db_dbcalcstate: + db_dbcalcstate_dbnode_id_f217a84c_fk_db_dbnode_id: FOREIGN KEY (dbnode_id) REFERENCES + db_dbnode(id) DEFERRABLE INITIALLY DEFERRED + db_dbcomment: + db_dbcomment_dbnode_id_3b812b6b_fk_db_dbnode_id: FOREIGN KEY (dbnode_id) REFERENCES + db_dbnode(id) DEFERRABLE INITIALLY DEFERRED + db_dbcomment_user_id_8ed5e360_fk_db_dbuser_id: FOREIGN KEY (user_id) REFERENCES + db_dbuser(id) DEFERRABLE INITIALLY DEFERRED + db_dbextra: + db_dbextra_dbnode_id_c7fe8961_fk_db_dbnode_id: FOREIGN KEY (dbnode_id) REFERENCES + db_dbnode(id) DEFERRABLE INITIALLY DEFERRED + db_dbgroup: + db_dbgroup_user_id_100f8a51_fk_db_dbuser_id: FOREIGN KEY (user_id) REFERENCES + db_dbuser(id) DEFERRABLE INITIALLY DEFERRED + db_dbgroup_dbnodes: + db_dbgroup_dbnodes_dbgroup_id_9d3a0f9d_fk_db_dbgroup_id: FOREIGN KEY (dbgroup_id) + REFERENCES db_dbgroup(id) DEFERRABLE INITIALLY DEFERRED + db_dbgroup_dbnodes_dbnode_id_118b9439_fk_db_dbnode_id: FOREIGN KEY (dbnode_id) + REFERENCES db_dbnode(id) DEFERRABLE INITIALLY DEFERRED + db_dblink: + db_dblink_input_id_9245bd73_fk_db_dbnode_id: FOREIGN KEY (input_id) REFERENCES + db_dbnode(id) DEFERRABLE INITIALLY DEFERRED + db_dblink_output_id_c0167528_fk_db_dbnode_id: FOREIGN KEY (output_id) REFERENCES + db_dbnode(id) DEFERRABLE INITIALLY DEFERRED + db_dbnode: + db_dbnode_dbcomputer_id_315372a3_fk_db_dbcomputer_id: FOREIGN KEY (dbcomputer_id) + REFERENCES db_dbcomputer(id) DEFERRABLE INITIALLY DEFERRED + db_dbnode_user_id_12e7aeaf_fk_db_dbuser_id: FOREIGN KEY (user_id) REFERENCES db_dbuser(id) + DEFERRABLE INITIALLY DEFERRED + db_dbpath: + db_dbpath_child_id_d8228636_fk_db_dbnode_id: FOREIGN KEY (child_id) REFERENCES + db_dbnode(id) DEFERRABLE INITIALLY DEFERRED + db_dbpath_parent_id_3b82d6c8_fk_db_dbnode_id: FOREIGN KEY (parent_id) REFERENCES + db_dbnode(id) DEFERRABLE INITIALLY DEFERRED + db_dbuser_groups: + db_dbuser_groups_dbuser_id_480b3520_fk_db_dbuser_id: FOREIGN KEY (dbuser_id) REFERENCES + db_dbuser(id) DEFERRABLE INITIALLY DEFERRED + db_dbuser_groups_group_id_8478d87e_fk_auth_group_id: FOREIGN KEY (group_id) REFERENCES + auth_group(id) DEFERRABLE INITIALLY DEFERRED + db_dbuser_user_permissions: + db_dbuser_user_permi_permission_id_c5aafc54_fk_auth_perm: FOREIGN KEY (permission_id) + REFERENCES auth_permission(id) DEFERRABLE INITIALLY DEFERRED + db_dbuser_user_permissions_dbuser_id_364456ee_fk_db_dbuser_id: FOREIGN KEY (dbuser_id) + REFERENCES db_dbuser(id) DEFERRABLE INITIALLY DEFERRED + db_dbworkflow: + db_dbworkflow_user_id_ef1f3251_fk_db_dbuser_id: FOREIGN KEY (user_id) REFERENCES + db_dbuser(id) DEFERRABLE INITIALLY DEFERRED + db_dbworkflowdata: + db_dbworkflowdata_aiida_obj_id_70a2d33b_fk_db_dbnode_id: FOREIGN KEY (aiida_obj_id) + REFERENCES db_dbnode(id) DEFERRABLE INITIALLY DEFERRED + db_dbworkflowdata_parent_id_ff4dbf8d_fk_db_dbworkflow_id: FOREIGN KEY (parent_id) + REFERENCES db_dbworkflow(id) DEFERRABLE INITIALLY DEFERRED + db_dbworkflowstep: + db_dbworkflowstep_parent_id_ffb754d9_fk_db_dbworkflow_id: FOREIGN KEY (parent_id) + REFERENCES db_dbworkflow(id) DEFERRABLE INITIALLY DEFERRED + db_dbworkflowstep_user_id_04282431_fk_db_dbuser_id: FOREIGN KEY (user_id) REFERENCES + db_dbuser(id) DEFERRABLE INITIALLY DEFERRED + db_dbworkflowstep_calculations: + db_dbworkflowstep_ca_dbnode_id_0d07b7a7_fk_db_dbnode: FOREIGN KEY (dbnode_id) + REFERENCES db_dbnode(id) DEFERRABLE INITIALLY DEFERRED + db_dbworkflowstep_ca_dbworkflowstep_id_575c3637_fk_db_dbwork: FOREIGN KEY (dbworkflowstep_id) + REFERENCES db_dbworkflowstep(id) DEFERRABLE INITIALLY DEFERRED + db_dbworkflowstep_sub_workflows: + db_dbworkflowstep_su_dbworkflow_id_dca4d103_fk_db_dbwork: FOREIGN KEY (dbworkflow_id) + REFERENCES db_dbworkflow(id) DEFERRABLE INITIALLY DEFERRED + db_dbworkflowstep_su_dbworkflowstep_id_e183bbb7_fk_db_dbwork: FOREIGN KEY (dbworkflowstep_id) + REFERENCES db_dbworkflowstep(id) DEFERRABLE INITIALLY DEFERRED +indexes: + db_dbattribute: + db_dbattribute_datatype_91c4dc04: CREATE INDEX db_dbattribute_datatype_91c4dc04 + ON public.db_dbattribute USING btree (datatype) + db_dbattribute_datatype_91c4dc04_like: CREATE INDEX db_dbattribute_datatype_91c4dc04_like + ON public.db_dbattribute USING btree (datatype varchar_pattern_ops) + db_dbattribute_dbnode_id_253bf153: CREATE INDEX db_dbattribute_dbnode_id_253bf153 + ON public.db_dbattribute USING btree (dbnode_id) + db_dbattribute_dbnode_id_key_c589e447_uniq: CREATE UNIQUE INDEX db_dbattribute_dbnode_id_key_c589e447_uniq + ON public.db_dbattribute USING btree (dbnode_id, key) + db_dbattribute_key_ac2bc4e4: CREATE INDEX db_dbattribute_key_ac2bc4e4 ON public.db_dbattribute + USING btree (key) + db_dbattribute_key_ac2bc4e4_like: CREATE INDEX db_dbattribute_key_ac2bc4e4_like + ON public.db_dbattribute USING btree (key varchar_pattern_ops) + db_dbattribute_pkey: CREATE UNIQUE INDEX db_dbattribute_pkey ON public.db_dbattribute + USING btree (id) + db_dbauthinfo: + db_dbauthinfo_aiidauser_id_0684fdfb: CREATE INDEX db_dbauthinfo_aiidauser_id_0684fdfb + ON public.db_dbauthinfo USING btree (aiidauser_id) + db_dbauthinfo_aiidauser_id_dbcomputer_id_777cdaa8_uniq: CREATE UNIQUE INDEX db_dbauthinfo_aiidauser_id_dbcomputer_id_777cdaa8_uniq + ON public.db_dbauthinfo USING btree (aiidauser_id, dbcomputer_id) + db_dbauthinfo_dbcomputer_id_424f7ac4: CREATE INDEX db_dbauthinfo_dbcomputer_id_424f7ac4 + ON public.db_dbauthinfo USING btree (dbcomputer_id) + db_dbauthinfo_pkey: CREATE UNIQUE INDEX db_dbauthinfo_pkey ON public.db_dbauthinfo + USING btree (id) + db_dbcalcstate: + db_dbcalcstate_dbnode_id_f217a84c: CREATE INDEX db_dbcalcstate_dbnode_id_f217a84c + ON public.db_dbcalcstate USING btree (dbnode_id) + db_dbcalcstate_dbnode_id_state_b4a14db3_uniq: CREATE UNIQUE INDEX db_dbcalcstate_dbnode_id_state_b4a14db3_uniq + ON public.db_dbcalcstate USING btree (dbnode_id, state) + db_dbcalcstate_pkey: CREATE UNIQUE INDEX db_dbcalcstate_pkey ON public.db_dbcalcstate + USING btree (id) + db_dbcalcstate_state_0bf54584: CREATE INDEX db_dbcalcstate_state_0bf54584 ON public.db_dbcalcstate + USING btree (state) + db_dbcalcstate_state_0bf54584_like: CREATE INDEX db_dbcalcstate_state_0bf54584_like + ON public.db_dbcalcstate USING btree (state varchar_pattern_ops) + db_dbcomment: + db_dbcomment_dbnode_id_3b812b6b: CREATE INDEX db_dbcomment_dbnode_id_3b812b6b + ON public.db_dbcomment USING btree (dbnode_id) + db_dbcomment_pkey: CREATE UNIQUE INDEX db_dbcomment_pkey ON public.db_dbcomment + USING btree (id) + db_dbcomment_user_id_8ed5e360: CREATE INDEX db_dbcomment_user_id_8ed5e360 ON public.db_dbcomment + USING btree (user_id) + db_dbcomputer: + db_dbcomputer_name_f1800b1a_like: CREATE INDEX db_dbcomputer_name_f1800b1a_like + ON public.db_dbcomputer USING btree (name varchar_pattern_ops) + db_dbcomputer_name_key: CREATE UNIQUE INDEX db_dbcomputer_name_key ON public.db_dbcomputer + USING btree (name) + db_dbcomputer_pkey: CREATE UNIQUE INDEX db_dbcomputer_pkey ON public.db_dbcomputer + USING btree (id) + db_dbextra: + db_dbextra_datatype_2eba38c6: CREATE INDEX db_dbextra_datatype_2eba38c6 ON public.db_dbextra + USING btree (datatype) + db_dbextra_datatype_2eba38c6_like: CREATE INDEX db_dbextra_datatype_2eba38c6_like + ON public.db_dbextra USING btree (datatype varchar_pattern_ops) + db_dbextra_dbnode_id_c7fe8961: CREATE INDEX db_dbextra_dbnode_id_c7fe8961 ON public.db_dbextra + USING btree (dbnode_id) + db_dbextra_dbnode_id_key_aa56fd37_uniq: CREATE UNIQUE INDEX db_dbextra_dbnode_id_key_aa56fd37_uniq + ON public.db_dbextra USING btree (dbnode_id, key) + db_dbextra_key_b1a8abc6: CREATE INDEX db_dbextra_key_b1a8abc6 ON public.db_dbextra + USING btree (key) + db_dbextra_key_b1a8abc6_like: CREATE INDEX db_dbextra_key_b1a8abc6_like ON public.db_dbextra + USING btree (key varchar_pattern_ops) + db_dbextra_pkey: CREATE UNIQUE INDEX db_dbextra_pkey ON public.db_dbextra USING + btree (id) + db_dbgroup: + db_dbgroup_name_66c75272: CREATE INDEX db_dbgroup_name_66c75272 ON public.db_dbgroup + USING btree (name) + db_dbgroup_name_66c75272_like: CREATE INDEX db_dbgroup_name_66c75272_like ON public.db_dbgroup + USING btree (name varchar_pattern_ops) + db_dbgroup_name_type_12656f33_uniq: CREATE UNIQUE INDEX db_dbgroup_name_type_12656f33_uniq + ON public.db_dbgroup USING btree (name, type) + db_dbgroup_pkey: CREATE UNIQUE INDEX db_dbgroup_pkey ON public.db_dbgroup USING + btree (id) + db_dbgroup_type_23b2a748: CREATE INDEX db_dbgroup_type_23b2a748 ON public.db_dbgroup + USING btree (type) + db_dbgroup_type_23b2a748_like: CREATE INDEX db_dbgroup_type_23b2a748_like ON public.db_dbgroup + USING btree (type varchar_pattern_ops) + db_dbgroup_user_id_100f8a51: CREATE INDEX db_dbgroup_user_id_100f8a51 ON public.db_dbgroup + USING btree (user_id) + db_dbgroup_dbnodes: + db_dbgroup_dbnodes_dbgroup_id_9d3a0f9d: CREATE INDEX db_dbgroup_dbnodes_dbgroup_id_9d3a0f9d + ON public.db_dbgroup_dbnodes USING btree (dbgroup_id) + db_dbgroup_dbnodes_dbgroup_id_dbnode_id_eee23cce_uniq: CREATE UNIQUE INDEX db_dbgroup_dbnodes_dbgroup_id_dbnode_id_eee23cce_uniq + ON public.db_dbgroup_dbnodes USING btree (dbgroup_id, dbnode_id) + db_dbgroup_dbnodes_dbnode_id_118b9439: CREATE INDEX db_dbgroup_dbnodes_dbnode_id_118b9439 + ON public.db_dbgroup_dbnodes USING btree (dbnode_id) + db_dbgroup_dbnodes_pkey: CREATE UNIQUE INDEX db_dbgroup_dbnodes_pkey ON public.db_dbgroup_dbnodes + USING btree (id) + db_dblink: + db_dblink_input_id_9245bd73: CREATE INDEX db_dblink_input_id_9245bd73 ON public.db_dblink + USING btree (input_id) + db_dblink_label_f1343cfb: CREATE INDEX db_dblink_label_f1343cfb ON public.db_dblink + USING btree (label) + db_dblink_label_f1343cfb_like: CREATE INDEX db_dblink_label_f1343cfb_like ON public.db_dblink + USING btree (label varchar_pattern_ops) + db_dblink_output_id_c0167528: CREATE INDEX db_dblink_output_id_c0167528 ON public.db_dblink + USING btree (output_id) + db_dblink_pkey: CREATE UNIQUE INDEX db_dblink_pkey ON public.db_dblink USING btree + (id) + db_dblink_type_229f212b: CREATE INDEX db_dblink_type_229f212b ON public.db_dblink + USING btree (type) + db_dblink_type_229f212b_like: CREATE INDEX db_dblink_type_229f212b_like ON public.db_dblink + USING btree (type varchar_pattern_ops) + db_dblock: + db_dblock_key_048c6767_like: CREATE INDEX db_dblock_key_048c6767_like ON public.db_dblock + USING btree (key varchar_pattern_ops) + db_dblock_pkey: CREATE UNIQUE INDEX db_dblock_pkey ON public.db_dblock USING btree + (key) + db_dblog: + db_dblog_levelname_ad5dc346: CREATE INDEX db_dblog_levelname_ad5dc346 ON public.db_dblog + USING btree (levelname) + db_dblog_levelname_ad5dc346_like: CREATE INDEX db_dblog_levelname_ad5dc346_like + ON public.db_dblog USING btree (levelname varchar_pattern_ops) + db_dblog_loggername_00b5ba16: CREATE INDEX db_dblog_loggername_00b5ba16 ON public.db_dblog + USING btree (loggername) + db_dblog_loggername_00b5ba16_like: CREATE INDEX db_dblog_loggername_00b5ba16_like + ON public.db_dblog USING btree (loggername varchar_pattern_ops) + db_dblog_objname_69932b1e: CREATE INDEX db_dblog_objname_69932b1e ON public.db_dblog + USING btree (objname) + db_dblog_objname_69932b1e_like: CREATE INDEX db_dblog_objname_69932b1e_like ON + public.db_dblog USING btree (objname varchar_pattern_ops) + db_dblog_objpk_fc47afa9: CREATE INDEX db_dblog_objpk_fc47afa9 ON public.db_dblog + USING btree (objpk) + db_dblog_pkey: CREATE UNIQUE INDEX db_dblog_pkey ON public.db_dblog USING btree + (id) + db_dbnode: + db_dbnode_dbcomputer_id_315372a3: CREATE INDEX db_dbnode_dbcomputer_id_315372a3 + ON public.db_dbnode USING btree (dbcomputer_id) + db_dbnode_label_6469539e: CREATE INDEX db_dbnode_label_6469539e ON public.db_dbnode + USING btree (label) + db_dbnode_label_6469539e_like: CREATE INDEX db_dbnode_label_6469539e_like ON public.db_dbnode + USING btree (label varchar_pattern_ops) + db_dbnode_pkey: CREATE UNIQUE INDEX db_dbnode_pkey ON public.db_dbnode USING btree + (id) + db_dbnode_type_a8ce9753: CREATE INDEX db_dbnode_type_a8ce9753 ON public.db_dbnode + USING btree (type) + db_dbnode_type_a8ce9753_like: CREATE INDEX db_dbnode_type_a8ce9753_like ON public.db_dbnode + USING btree (type varchar_pattern_ops) + db_dbnode_user_id_12e7aeaf: CREATE INDEX db_dbnode_user_id_12e7aeaf ON public.db_dbnode + USING btree (user_id) + db_dbpath: + db_dbpath_child_id_d8228636: CREATE INDEX db_dbpath_child_id_d8228636 ON public.db_dbpath + USING btree (child_id) + db_dbpath_parent_id_3b82d6c8: CREATE INDEX db_dbpath_parent_id_3b82d6c8 ON public.db_dbpath + USING btree (parent_id) + db_dbpath_pkey: CREATE UNIQUE INDEX db_dbpath_pkey ON public.db_dbpath USING btree + (id) + db_dbsetting: + db_dbsetting_datatype_49f4397c: CREATE INDEX db_dbsetting_datatype_49f4397c ON + public.db_dbsetting USING btree (datatype) + db_dbsetting_datatype_49f4397c_like: CREATE INDEX db_dbsetting_datatype_49f4397c_like + ON public.db_dbsetting USING btree (datatype varchar_pattern_ops) + db_dbsetting_key_1b84beb4: CREATE INDEX db_dbsetting_key_1b84beb4 ON public.db_dbsetting + USING btree (key) + db_dbsetting_key_1b84beb4_like: CREATE INDEX db_dbsetting_key_1b84beb4_like ON + public.db_dbsetting USING btree (key varchar_pattern_ops) + db_dbsetting_key_1b84beb4_uniq: CREATE UNIQUE INDEX db_dbsetting_key_1b84beb4_uniq + ON public.db_dbsetting USING btree (key) + db_dbsetting_pkey: CREATE UNIQUE INDEX db_dbsetting_pkey ON public.db_dbsetting + USING btree (id) + db_dbuser: + db_dbuser_email_30150b7e_like: CREATE INDEX db_dbuser_email_30150b7e_like ON public.db_dbuser + USING btree (email varchar_pattern_ops) + db_dbuser_email_key: CREATE UNIQUE INDEX db_dbuser_email_key ON public.db_dbuser + USING btree (email) + db_dbuser_pkey: CREATE UNIQUE INDEX db_dbuser_pkey ON public.db_dbuser USING btree + (id) + db_dbuser_groups: + db_dbuser_groups_dbuser_id_480b3520: CREATE INDEX db_dbuser_groups_dbuser_id_480b3520 + ON public.db_dbuser_groups USING btree (dbuser_id) + db_dbuser_groups_dbuser_id_group_id_9155eb4f_uniq: CREATE UNIQUE INDEX db_dbuser_groups_dbuser_id_group_id_9155eb4f_uniq + ON public.db_dbuser_groups USING btree (dbuser_id, group_id) + db_dbuser_groups_group_id_8478d87e: CREATE INDEX db_dbuser_groups_group_id_8478d87e + ON public.db_dbuser_groups USING btree (group_id) + db_dbuser_groups_pkey: CREATE UNIQUE INDEX db_dbuser_groups_pkey ON public.db_dbuser_groups + USING btree (id) + db_dbuser_user_permissions: + db_dbuser_user_permissio_dbuser_id_permission_id_e6cbabe4_uniq: CREATE UNIQUE + INDEX db_dbuser_user_permissio_dbuser_id_permission_id_e6cbabe4_uniq ON public.db_dbuser_user_permissions + USING btree (dbuser_id, permission_id) + db_dbuser_user_permissions_dbuser_id_364456ee: CREATE INDEX db_dbuser_user_permissions_dbuser_id_364456ee + ON public.db_dbuser_user_permissions USING btree (dbuser_id) + db_dbuser_user_permissions_permission_id_c5aafc54: CREATE INDEX db_dbuser_user_permissions_permission_id_c5aafc54 + ON public.db_dbuser_user_permissions USING btree (permission_id) + db_dbuser_user_permissions_pkey: CREATE UNIQUE INDEX db_dbuser_user_permissions_pkey + ON public.db_dbuser_user_permissions USING btree (id) + db_dbworkflow: + db_dbworkflow_label_7368f34a: CREATE INDEX db_dbworkflow_label_7368f34a ON public.db_dbworkflow + USING btree (label) + db_dbworkflow_label_7368f34a_like: CREATE INDEX db_dbworkflow_label_7368f34a_like + ON public.db_dbworkflow USING btree (label varchar_pattern_ops) + db_dbworkflow_pkey: CREATE UNIQUE INDEX db_dbworkflow_pkey ON public.db_dbworkflow + USING btree (id) + db_dbworkflow_user_id_ef1f3251: CREATE INDEX db_dbworkflow_user_id_ef1f3251 ON + public.db_dbworkflow USING btree (user_id) + db_dbworkflowdata: + db_dbworkflowdata_aiida_obj_id_70a2d33b: CREATE INDEX db_dbworkflowdata_aiida_obj_id_70a2d33b + ON public.db_dbworkflowdata USING btree (aiida_obj_id) + db_dbworkflowdata_parent_id_ff4dbf8d: CREATE INDEX db_dbworkflowdata_parent_id_ff4dbf8d + ON public.db_dbworkflowdata USING btree (parent_id) + db_dbworkflowdata_parent_id_name_data_type_a4b50dae_uniq: CREATE UNIQUE INDEX + db_dbworkflowdata_parent_id_name_data_type_a4b50dae_uniq ON public.db_dbworkflowdata + USING btree (parent_id, name, data_type) + db_dbworkflowdata_pkey: CREATE UNIQUE INDEX db_dbworkflowdata_pkey ON public.db_dbworkflowdata + USING btree (id) + db_dbworkflowstep: + db_dbworkflowstep_parent_id_ffb754d9: CREATE INDEX db_dbworkflowstep_parent_id_ffb754d9 + ON public.db_dbworkflowstep USING btree (parent_id) + db_dbworkflowstep_parent_id_name_111027e3_uniq: CREATE UNIQUE INDEX db_dbworkflowstep_parent_id_name_111027e3_uniq + ON public.db_dbworkflowstep USING btree (parent_id, name) + db_dbworkflowstep_pkey: CREATE UNIQUE INDEX db_dbworkflowstep_pkey ON public.db_dbworkflowstep + USING btree (id) + db_dbworkflowstep_user_id_04282431: CREATE INDEX db_dbworkflowstep_user_id_04282431 + ON public.db_dbworkflowstep USING btree (user_id) + db_dbworkflowstep_calculations: + db_dbworkflowstep_calcul_dbworkflowstep_id_dbnode_60f50d02_uniq: CREATE UNIQUE + INDEX db_dbworkflowstep_calcul_dbworkflowstep_id_dbnode_60f50d02_uniq ON public.db_dbworkflowstep_calculations + USING btree (dbworkflowstep_id, dbnode_id) + db_dbworkflowstep_calculations_dbnode_id_0d07b7a7: CREATE INDEX db_dbworkflowstep_calculations_dbnode_id_0d07b7a7 + ON public.db_dbworkflowstep_calculations USING btree (dbnode_id) + db_dbworkflowstep_calculations_dbworkflowstep_id_575c3637: CREATE INDEX db_dbworkflowstep_calculations_dbworkflowstep_id_575c3637 + ON public.db_dbworkflowstep_calculations USING btree (dbworkflowstep_id) + db_dbworkflowstep_calculations_pkey: CREATE UNIQUE INDEX db_dbworkflowstep_calculations_pkey + ON public.db_dbworkflowstep_calculations USING btree (id) + db_dbworkflowstep_sub_workflows: + db_dbworkflowstep_sub_wo_dbworkflowstep_id_dbwork_e9b2b624_uniq: CREATE UNIQUE + INDEX db_dbworkflowstep_sub_wo_dbworkflowstep_id_dbwork_e9b2b624_uniq ON public.db_dbworkflowstep_sub_workflows + USING btree (dbworkflowstep_id, dbworkflow_id) + db_dbworkflowstep_sub_workflows_dbworkflow_id_dca4d103: CREATE INDEX db_dbworkflowstep_sub_workflows_dbworkflow_id_dca4d103 + ON public.db_dbworkflowstep_sub_workflows USING btree (dbworkflow_id) + db_dbworkflowstep_sub_workflows_dbworkflowstep_id_e183bbb7: CREATE INDEX db_dbworkflowstep_sub_workflows_dbworkflowstep_id_e183bbb7 + ON public.db_dbworkflowstep_sub_workflows USING btree (dbworkflowstep_id) + db_dbworkflowstep_sub_workflows_pkey: CREATE UNIQUE INDEX db_dbworkflowstep_sub_workflows_pkey + ON public.db_dbworkflowstep_sub_workflows USING btree (id) diff --git a/tests/backends/aiida_sqlalchemy/migrations/test_all_schema/test_django_django_0004_.yml b/tests/backends/aiida_sqlalchemy/migrations/test_all_schema/test_django_django_0004_.yml new file mode 100644 index 0000000000..a94a9035ea --- /dev/null +++ b/tests/backends/aiida_sqlalchemy/migrations/test_all_schema/test_django_django_0004_.yml @@ -0,0 +1,1121 @@ +columns: + db_dbattribute: + bval: + data_type: boolean + default: null + is_nullable: true + datatype: + data_type: character varying + default: null + is_nullable: false + max_length: 10 + dbnode_id: + data_type: integer + default: null + is_nullable: false + dval: + data_type: timestamp with time zone + default: null + is_nullable: true + fval: + data_type: double precision + default: null + is_nullable: true + id: + data_type: integer + default: nextval('db_dbattribute_id_seq'::regclass) + is_nullable: false + ival: + data_type: integer + default: null + is_nullable: true + key: + data_type: character varying + default: null + is_nullable: false + max_length: 1024 + tval: + data_type: text + default: null + is_nullable: false + db_dbauthinfo: + aiidauser_id: + data_type: integer + default: null + is_nullable: false + auth_params: + data_type: text + default: null + is_nullable: false + dbcomputer_id: + data_type: integer + default: null + is_nullable: false + enabled: + data_type: boolean + default: null + is_nullable: false + id: + data_type: integer + default: nextval('db_dbauthinfo_id_seq'::regclass) + is_nullable: false + metadata: + data_type: text + default: null + is_nullable: false + db_dbcalcstate: + dbnode_id: + data_type: integer + default: null + is_nullable: false + id: + data_type: integer + default: nextval('db_dbcalcstate_id_seq'::regclass) + is_nullable: false + state: + data_type: character varying + default: null + is_nullable: false + max_length: 25 + time: + data_type: timestamp with time zone + default: null + is_nullable: false + db_dbcomment: + content: + data_type: text + default: null + is_nullable: false + ctime: + data_type: timestamp with time zone + default: null + is_nullable: false + dbnode_id: + data_type: integer + default: null + is_nullable: false + id: + data_type: integer + default: nextval('db_dbcomment_id_seq'::regclass) + is_nullable: false + mtime: + data_type: timestamp with time zone + default: null + is_nullable: false + user_id: + data_type: integer + default: null + is_nullable: false + uuid: + data_type: character varying + default: null + is_nullable: false + max_length: 36 + db_dbcomputer: + description: + data_type: text + default: null + is_nullable: false + enabled: + data_type: boolean + default: null + is_nullable: false + hostname: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + id: + data_type: integer + default: nextval('db_dbcomputer_id_seq'::regclass) + is_nullable: false + metadata: + data_type: text + default: null + is_nullable: false + name: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + scheduler_type: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + transport_params: + data_type: text + default: null + is_nullable: false + transport_type: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + uuid: + data_type: character varying + default: null + is_nullable: false + max_length: 36 + db_dbextra: + bval: + data_type: boolean + default: null + is_nullable: true + datatype: + data_type: character varying + default: null + is_nullable: false + max_length: 10 + dbnode_id: + data_type: integer + default: null + is_nullable: false + dval: + data_type: timestamp with time zone + default: null + is_nullable: true + fval: + data_type: double precision + default: null + is_nullable: true + id: + data_type: integer + default: nextval('db_dbextra_id_seq'::regclass) + is_nullable: false + ival: + data_type: integer + default: null + is_nullable: true + key: + data_type: character varying + default: null + is_nullable: false + max_length: 1024 + tval: + data_type: text + default: null + is_nullable: false + db_dbgroup: + description: + data_type: text + default: null + is_nullable: false + id: + data_type: integer + default: nextval('db_dbgroup_id_seq'::regclass) + is_nullable: false + name: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + time: + data_type: timestamp with time zone + default: null + is_nullable: false + type: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + user_id: + data_type: integer + default: null + is_nullable: false + uuid: + data_type: character varying + default: null + is_nullable: false + max_length: 36 + db_dbgroup_dbnodes: + dbgroup_id: + data_type: integer + default: null + is_nullable: false + dbnode_id: + data_type: integer + default: null + is_nullable: false + id: + data_type: integer + default: nextval('db_dbgroup_dbnodes_id_seq'::regclass) + is_nullable: false + db_dblink: + id: + data_type: integer + default: nextval('db_dblink_id_seq'::regclass) + is_nullable: false + input_id: + data_type: integer + default: null + is_nullable: false + label: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + output_id: + data_type: integer + default: null + is_nullable: false + type: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + db_dblock: + creation: + data_type: timestamp with time zone + default: null + is_nullable: false + key: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + owner: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + timeout: + data_type: integer + default: null + is_nullable: false + db_dblog: + id: + data_type: integer + default: nextval('db_dblog_id_seq'::regclass) + is_nullable: false + levelname: + data_type: character varying + default: null + is_nullable: false + max_length: 50 + loggername: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + message: + data_type: text + default: null + is_nullable: false + metadata: + data_type: text + default: null + is_nullable: false + objname: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + objpk: + data_type: integer + default: null + is_nullable: true + time: + data_type: timestamp with time zone + default: null + is_nullable: false + db_dbnode: + ctime: + data_type: timestamp with time zone + default: null + is_nullable: false + dbcomputer_id: + data_type: integer + default: null + is_nullable: true + description: + data_type: text + default: null + is_nullable: false + id: + data_type: integer + default: nextval('db_dbnode_id_seq'::regclass) + is_nullable: false + label: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + mtime: + data_type: timestamp with time zone + default: null + is_nullable: false + nodeversion: + data_type: integer + default: null + is_nullable: false + public: + data_type: boolean + default: null + is_nullable: false + type: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + user_id: + data_type: integer + default: null + is_nullable: false + uuid: + data_type: character varying + default: null + is_nullable: false + max_length: 36 + db_dbpath: + child_id: + data_type: integer + default: null + is_nullable: false + depth: + data_type: integer + default: null + is_nullable: false + direct_edge_id: + data_type: integer + default: null + is_nullable: true + entry_edge_id: + data_type: integer + default: null + is_nullable: true + exit_edge_id: + data_type: integer + default: null + is_nullable: true + id: + data_type: integer + default: nextval('db_dbpath_id_seq'::regclass) + is_nullable: false + parent_id: + data_type: integer + default: null + is_nullable: false + db_dbsetting: + bval: + data_type: boolean + default: null + is_nullable: true + datatype: + data_type: character varying + default: null + is_nullable: false + max_length: 10 + description: + data_type: text + default: null + is_nullable: false + dval: + data_type: timestamp with time zone + default: null + is_nullable: true + fval: + data_type: double precision + default: null + is_nullable: true + id: + data_type: integer + default: nextval('db_dbsetting_id_seq'::regclass) + is_nullable: false + ival: + data_type: integer + default: null + is_nullable: true + key: + data_type: character varying + default: null + is_nullable: false + max_length: 1024 + time: + data_type: timestamp with time zone + default: null + is_nullable: false + tval: + data_type: text + default: null + is_nullable: false + db_dbuser: + date_joined: + data_type: timestamp with time zone + default: null + is_nullable: false + email: + data_type: character varying + default: null + is_nullable: false + max_length: 75 + first_name: + data_type: character varying + default: null + is_nullable: false + max_length: 254 + id: + data_type: integer + default: nextval('db_dbuser_id_seq'::regclass) + is_nullable: false + institution: + data_type: character varying + default: null + is_nullable: false + max_length: 254 + is_active: + data_type: boolean + default: null + is_nullable: false + is_staff: + data_type: boolean + default: null + is_nullable: false + is_superuser: + data_type: boolean + default: null + is_nullable: false + last_login: + data_type: timestamp with time zone + default: null + is_nullable: false + last_name: + data_type: character varying + default: null + is_nullable: false + max_length: 254 + password: + data_type: character varying + default: null + is_nullable: false + max_length: 128 + db_dbuser_groups: + dbuser_id: + data_type: integer + default: null + is_nullable: false + group_id: + data_type: integer + default: null + is_nullable: false + id: + data_type: integer + default: nextval('db_dbuser_groups_id_seq'::regclass) + is_nullable: false + db_dbuser_user_permissions: + dbuser_id: + data_type: integer + default: null + is_nullable: false + id: + data_type: integer + default: nextval('db_dbuser_user_permissions_id_seq'::regclass) + is_nullable: false + permission_id: + data_type: integer + default: null + is_nullable: false + db_dbworkflow: + ctime: + data_type: timestamp with time zone + default: null + is_nullable: false + description: + data_type: text + default: null + is_nullable: false + id: + data_type: integer + default: nextval('db_dbworkflow_id_seq'::regclass) + is_nullable: false + label: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + lastsyncedversion: + data_type: integer + default: null + is_nullable: false + module: + data_type: text + default: null + is_nullable: false + module_class: + data_type: text + default: null + is_nullable: false + mtime: + data_type: timestamp with time zone + default: null + is_nullable: false + nodeversion: + data_type: integer + default: null + is_nullable: false + report: + data_type: text + default: null + is_nullable: false + script_md5: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + script_path: + data_type: text + default: null + is_nullable: false + state: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + user_id: + data_type: integer + default: null + is_nullable: false + uuid: + data_type: character varying + default: null + is_nullable: false + max_length: 36 + db_dbworkflowdata: + aiida_obj_id: + data_type: integer + default: null + is_nullable: true + data_type: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + id: + data_type: integer + default: nextval('db_dbworkflowdata_id_seq'::regclass) + is_nullable: false + json_value: + data_type: text + default: null + is_nullable: false + name: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + parent_id: + data_type: integer + default: null + is_nullable: false + time: + data_type: timestamp with time zone + default: null + is_nullable: false + value_type: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + db_dbworkflowstep: + id: + data_type: integer + default: nextval('db_dbworkflowstep_id_seq'::regclass) + is_nullable: false + name: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + nextcall: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + parent_id: + data_type: integer + default: null + is_nullable: false + state: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + time: + data_type: timestamp with time zone + default: null + is_nullable: false + user_id: + data_type: integer + default: null + is_nullable: false + db_dbworkflowstep_calculations: + dbnode_id: + data_type: integer + default: null + is_nullable: false + dbworkflowstep_id: + data_type: integer + default: null + is_nullable: false + id: + data_type: integer + default: nextval('db_dbworkflowstep_calculations_id_seq'::regclass) + is_nullable: false + db_dbworkflowstep_sub_workflows: + dbworkflow_id: + data_type: integer + default: null + is_nullable: false + dbworkflowstep_id: + data_type: integer + default: null + is_nullable: false + id: + data_type: integer + default: nextval('db_dbworkflowstep_sub_workflows_id_seq'::regclass) + is_nullable: false +constraints: + primary_key: + db_dbattribute: + db_dbattribute_pkey: + - id + db_dbauthinfo: + db_dbauthinfo_pkey: + - id + db_dbcalcstate: + db_dbcalcstate_pkey: + - id + db_dbcomment: + db_dbcomment_pkey: + - id + db_dbcomputer: + db_dbcomputer_pkey: + - id + db_dbextra: + db_dbextra_pkey: + - id + db_dbgroup: + db_dbgroup_pkey: + - id + db_dbgroup_dbnodes: + db_dbgroup_dbnodes_pkey: + - id + db_dblink: + db_dblink_pkey: + - id + db_dblock: + db_dblock_pkey: + - key + db_dblog: + db_dblog_pkey: + - id + db_dbnode: + db_dbnode_pkey: + - id + db_dbpath: + db_dbpath_pkey: + - id + db_dbsetting: + db_dbsetting_pkey: + - id + db_dbuser: + db_dbuser_pkey: + - id + db_dbuser_groups: + db_dbuser_groups_pkey: + - id + db_dbuser_user_permissions: + db_dbuser_user_permissions_pkey: + - id + db_dbworkflow: + db_dbworkflow_pkey: + - id + db_dbworkflowdata: + db_dbworkflowdata_pkey: + - id + db_dbworkflowstep: + db_dbworkflowstep_pkey: + - id + db_dbworkflowstep_calculations: + db_dbworkflowstep_calculations_pkey: + - id + db_dbworkflowstep_sub_workflows: + db_dbworkflowstep_sub_workflows_pkey: + - id + unique: + db_dbattribute: + db_dbattribute_dbnode_id_key_c589e447_uniq: + - dbnode_id + - key + db_dbauthinfo: + db_dbauthinfo_aiidauser_id_dbcomputer_id_777cdaa8_uniq: + - aiidauser_id + - dbcomputer_id + db_dbcalcstate: + db_dbcalcstate_dbnode_id_state_b4a14db3_uniq: + - dbnode_id + - state + db_dbcomputer: + db_dbcomputer_name_key: + - name + db_dbextra: + db_dbextra_dbnode_id_key_aa56fd37_uniq: + - dbnode_id + - key + db_dbgroup: + db_dbgroup_name_type_12656f33_uniq: + - name + - type + db_dbgroup_dbnodes: + db_dbgroup_dbnodes_dbgroup_id_dbnode_id_eee23cce_uniq: + - dbgroup_id + - dbnode_id + db_dbsetting: + db_dbsetting_key_1b84beb4_uniq: + - key + db_dbuser: + db_dbuser_email_key: + - email + db_dbuser_groups: + db_dbuser_groups_dbuser_id_group_id_9155eb4f_uniq: + - dbuser_id + - group_id + db_dbuser_user_permissions: + db_dbuser_user_permissio_dbuser_id_permission_id_e6cbabe4_uniq: + - dbuser_id + - permission_id + db_dbworkflowdata: + db_dbworkflowdata_parent_id_name_data_type_a4b50dae_uniq: + - data_type + - name + - parent_id + db_dbworkflowstep: + db_dbworkflowstep_parent_id_name_111027e3_uniq: + - name + - parent_id + db_dbworkflowstep_calculations: + db_dbworkflowstep_calcul_dbworkflowstep_id_dbnode_60f50d02_uniq: + - dbnode_id + - dbworkflowstep_id + db_dbworkflowstep_sub_workflows: + db_dbworkflowstep_sub_wo_dbworkflowstep_id_dbwork_e9b2b624_uniq: + - dbworkflow_id + - dbworkflowstep_id +foreign_keys: + db_dbattribute: + db_dbattribute_dbnode_id_253bf153_fk_db_dbnode_id: FOREIGN KEY (dbnode_id) REFERENCES + db_dbnode(id) DEFERRABLE INITIALLY DEFERRED + db_dbauthinfo: + db_dbauthinfo_aiidauser_id_0684fdfb_fk_db_dbuser_id: FOREIGN KEY (aiidauser_id) + REFERENCES db_dbuser(id) DEFERRABLE INITIALLY DEFERRED + db_dbauthinfo_dbcomputer_id_424f7ac4_fk_db_dbcomputer_id: FOREIGN KEY (dbcomputer_id) + REFERENCES db_dbcomputer(id) DEFERRABLE INITIALLY DEFERRED + db_dbcalcstate: + db_dbcalcstate_dbnode_id_f217a84c_fk_db_dbnode_id: FOREIGN KEY (dbnode_id) REFERENCES + db_dbnode(id) DEFERRABLE INITIALLY DEFERRED + db_dbcomment: + db_dbcomment_dbnode_id_3b812b6b_fk_db_dbnode_id: FOREIGN KEY (dbnode_id) REFERENCES + db_dbnode(id) DEFERRABLE INITIALLY DEFERRED + db_dbcomment_user_id_8ed5e360_fk_db_dbuser_id: FOREIGN KEY (user_id) REFERENCES + db_dbuser(id) DEFERRABLE INITIALLY DEFERRED + db_dbextra: + db_dbextra_dbnode_id_c7fe8961_fk_db_dbnode_id: FOREIGN KEY (dbnode_id) REFERENCES + db_dbnode(id) DEFERRABLE INITIALLY DEFERRED + db_dbgroup: + db_dbgroup_user_id_100f8a51_fk_db_dbuser_id: FOREIGN KEY (user_id) REFERENCES + db_dbuser(id) DEFERRABLE INITIALLY DEFERRED + db_dbgroup_dbnodes: + db_dbgroup_dbnodes_dbgroup_id_9d3a0f9d_fk_db_dbgroup_id: FOREIGN KEY (dbgroup_id) + REFERENCES db_dbgroup(id) DEFERRABLE INITIALLY DEFERRED + db_dbgroup_dbnodes_dbnode_id_118b9439_fk_db_dbnode_id: FOREIGN KEY (dbnode_id) + REFERENCES db_dbnode(id) DEFERRABLE INITIALLY DEFERRED + db_dblink: + db_dblink_input_id_9245bd73_fk_db_dbnode_id: FOREIGN KEY (input_id) REFERENCES + db_dbnode(id) DEFERRABLE INITIALLY DEFERRED + db_dblink_output_id_c0167528_fk_db_dbnode_id: FOREIGN KEY (output_id) REFERENCES + db_dbnode(id) DEFERRABLE INITIALLY DEFERRED + db_dbnode: + db_dbnode_dbcomputer_id_315372a3_fk_db_dbcomputer_id: FOREIGN KEY (dbcomputer_id) + REFERENCES db_dbcomputer(id) DEFERRABLE INITIALLY DEFERRED + db_dbnode_user_id_12e7aeaf_fk_db_dbuser_id: FOREIGN KEY (user_id) REFERENCES db_dbuser(id) + DEFERRABLE INITIALLY DEFERRED + db_dbpath: + db_dbpath_child_id_d8228636_fk_db_dbnode_id: FOREIGN KEY (child_id) REFERENCES + db_dbnode(id) DEFERRABLE INITIALLY DEFERRED + db_dbpath_parent_id_3b82d6c8_fk_db_dbnode_id: FOREIGN KEY (parent_id) REFERENCES + db_dbnode(id) DEFERRABLE INITIALLY DEFERRED + db_dbuser_groups: + db_dbuser_groups_dbuser_id_480b3520_fk_db_dbuser_id: FOREIGN KEY (dbuser_id) REFERENCES + db_dbuser(id) DEFERRABLE INITIALLY DEFERRED + db_dbuser_groups_group_id_8478d87e_fk_auth_group_id: FOREIGN KEY (group_id) REFERENCES + auth_group(id) DEFERRABLE INITIALLY DEFERRED + db_dbuser_user_permissions: + db_dbuser_user_permi_permission_id_c5aafc54_fk_auth_perm: FOREIGN KEY (permission_id) + REFERENCES auth_permission(id) DEFERRABLE INITIALLY DEFERRED + db_dbuser_user_permissions_dbuser_id_364456ee_fk_db_dbuser_id: FOREIGN KEY (dbuser_id) + REFERENCES db_dbuser(id) DEFERRABLE INITIALLY DEFERRED + db_dbworkflow: + db_dbworkflow_user_id_ef1f3251_fk_db_dbuser_id: FOREIGN KEY (user_id) REFERENCES + db_dbuser(id) DEFERRABLE INITIALLY DEFERRED + db_dbworkflowdata: + db_dbworkflowdata_aiida_obj_id_70a2d33b_fk_db_dbnode_id: FOREIGN KEY (aiida_obj_id) + REFERENCES db_dbnode(id) DEFERRABLE INITIALLY DEFERRED + db_dbworkflowdata_parent_id_ff4dbf8d_fk_db_dbworkflow_id: FOREIGN KEY (parent_id) + REFERENCES db_dbworkflow(id) DEFERRABLE INITIALLY DEFERRED + db_dbworkflowstep: + db_dbworkflowstep_parent_id_ffb754d9_fk_db_dbworkflow_id: FOREIGN KEY (parent_id) + REFERENCES db_dbworkflow(id) DEFERRABLE INITIALLY DEFERRED + db_dbworkflowstep_user_id_04282431_fk_db_dbuser_id: FOREIGN KEY (user_id) REFERENCES + db_dbuser(id) DEFERRABLE INITIALLY DEFERRED + db_dbworkflowstep_calculations: + db_dbworkflowstep_ca_dbnode_id_0d07b7a7_fk_db_dbnode: FOREIGN KEY (dbnode_id) + REFERENCES db_dbnode(id) DEFERRABLE INITIALLY DEFERRED + db_dbworkflowstep_ca_dbworkflowstep_id_575c3637_fk_db_dbwork: FOREIGN KEY (dbworkflowstep_id) + REFERENCES db_dbworkflowstep(id) DEFERRABLE INITIALLY DEFERRED + db_dbworkflowstep_sub_workflows: + db_dbworkflowstep_su_dbworkflow_id_dca4d103_fk_db_dbwork: FOREIGN KEY (dbworkflow_id) + REFERENCES db_dbworkflow(id) DEFERRABLE INITIALLY DEFERRED + db_dbworkflowstep_su_dbworkflowstep_id_e183bbb7_fk_db_dbwork: FOREIGN KEY (dbworkflowstep_id) + REFERENCES db_dbworkflowstep(id) DEFERRABLE INITIALLY DEFERRED +indexes: + db_dbattribute: + db_dbattribute_datatype_91c4dc04: CREATE INDEX db_dbattribute_datatype_91c4dc04 + ON public.db_dbattribute USING btree (datatype) + db_dbattribute_datatype_91c4dc04_like: CREATE INDEX db_dbattribute_datatype_91c4dc04_like + ON public.db_dbattribute USING btree (datatype varchar_pattern_ops) + db_dbattribute_dbnode_id_253bf153: CREATE INDEX db_dbattribute_dbnode_id_253bf153 + ON public.db_dbattribute USING btree (dbnode_id) + db_dbattribute_dbnode_id_key_c589e447_uniq: CREATE UNIQUE INDEX db_dbattribute_dbnode_id_key_c589e447_uniq + ON public.db_dbattribute USING btree (dbnode_id, key) + db_dbattribute_key_ac2bc4e4: CREATE INDEX db_dbattribute_key_ac2bc4e4 ON public.db_dbattribute + USING btree (key) + db_dbattribute_key_ac2bc4e4_like: CREATE INDEX db_dbattribute_key_ac2bc4e4_like + ON public.db_dbattribute USING btree (key varchar_pattern_ops) + db_dbattribute_pkey: CREATE UNIQUE INDEX db_dbattribute_pkey ON public.db_dbattribute + USING btree (id) + tval_idx_for_daemon: CREATE INDEX tval_idx_for_daemon ON public.db_dbattribute + USING btree (tval) WHERE (tval = ANY (ARRAY['COMPUTED'::text, 'WITHSCHEDULER'::text, + 'TOSUBMIT'::text])) + db_dbauthinfo: + db_dbauthinfo_aiidauser_id_0684fdfb: CREATE INDEX db_dbauthinfo_aiidauser_id_0684fdfb + ON public.db_dbauthinfo USING btree (aiidauser_id) + db_dbauthinfo_aiidauser_id_dbcomputer_id_777cdaa8_uniq: CREATE UNIQUE INDEX db_dbauthinfo_aiidauser_id_dbcomputer_id_777cdaa8_uniq + ON public.db_dbauthinfo USING btree (aiidauser_id, dbcomputer_id) + db_dbauthinfo_dbcomputer_id_424f7ac4: CREATE INDEX db_dbauthinfo_dbcomputer_id_424f7ac4 + ON public.db_dbauthinfo USING btree (dbcomputer_id) + db_dbauthinfo_pkey: CREATE UNIQUE INDEX db_dbauthinfo_pkey ON public.db_dbauthinfo + USING btree (id) + db_dbcalcstate: + db_dbcalcstate_dbnode_id_f217a84c: CREATE INDEX db_dbcalcstate_dbnode_id_f217a84c + ON public.db_dbcalcstate USING btree (dbnode_id) + db_dbcalcstate_dbnode_id_state_b4a14db3_uniq: CREATE UNIQUE INDEX db_dbcalcstate_dbnode_id_state_b4a14db3_uniq + ON public.db_dbcalcstate USING btree (dbnode_id, state) + db_dbcalcstate_pkey: CREATE UNIQUE INDEX db_dbcalcstate_pkey ON public.db_dbcalcstate + USING btree (id) + db_dbcalcstate_state_0bf54584: CREATE INDEX db_dbcalcstate_state_0bf54584 ON public.db_dbcalcstate + USING btree (state) + db_dbcalcstate_state_0bf54584_like: CREATE INDEX db_dbcalcstate_state_0bf54584_like + ON public.db_dbcalcstate USING btree (state varchar_pattern_ops) + db_dbcomment: + db_dbcomment_dbnode_id_3b812b6b: CREATE INDEX db_dbcomment_dbnode_id_3b812b6b + ON public.db_dbcomment USING btree (dbnode_id) + db_dbcomment_pkey: CREATE UNIQUE INDEX db_dbcomment_pkey ON public.db_dbcomment + USING btree (id) + db_dbcomment_user_id_8ed5e360: CREATE INDEX db_dbcomment_user_id_8ed5e360 ON public.db_dbcomment + USING btree (user_id) + db_dbcomputer: + db_dbcomputer_name_f1800b1a_like: CREATE INDEX db_dbcomputer_name_f1800b1a_like + ON public.db_dbcomputer USING btree (name varchar_pattern_ops) + db_dbcomputer_name_key: CREATE UNIQUE INDEX db_dbcomputer_name_key ON public.db_dbcomputer + USING btree (name) + db_dbcomputer_pkey: CREATE UNIQUE INDEX db_dbcomputer_pkey ON public.db_dbcomputer + USING btree (id) + db_dbextra: + db_dbextra_datatype_2eba38c6: CREATE INDEX db_dbextra_datatype_2eba38c6 ON public.db_dbextra + USING btree (datatype) + db_dbextra_datatype_2eba38c6_like: CREATE INDEX db_dbextra_datatype_2eba38c6_like + ON public.db_dbextra USING btree (datatype varchar_pattern_ops) + db_dbextra_dbnode_id_c7fe8961: CREATE INDEX db_dbextra_dbnode_id_c7fe8961 ON public.db_dbextra + USING btree (dbnode_id) + db_dbextra_dbnode_id_key_aa56fd37_uniq: CREATE UNIQUE INDEX db_dbextra_dbnode_id_key_aa56fd37_uniq + ON public.db_dbextra USING btree (dbnode_id, key) + db_dbextra_key_b1a8abc6: CREATE INDEX db_dbextra_key_b1a8abc6 ON public.db_dbextra + USING btree (key) + db_dbextra_key_b1a8abc6_like: CREATE INDEX db_dbextra_key_b1a8abc6_like ON public.db_dbextra + USING btree (key varchar_pattern_ops) + db_dbextra_pkey: CREATE UNIQUE INDEX db_dbextra_pkey ON public.db_dbextra USING + btree (id) + db_dbgroup: + db_dbgroup_name_66c75272: CREATE INDEX db_dbgroup_name_66c75272 ON public.db_dbgroup + USING btree (name) + db_dbgroup_name_66c75272_like: CREATE INDEX db_dbgroup_name_66c75272_like ON public.db_dbgroup + USING btree (name varchar_pattern_ops) + db_dbgroup_name_type_12656f33_uniq: CREATE UNIQUE INDEX db_dbgroup_name_type_12656f33_uniq + ON public.db_dbgroup USING btree (name, type) + db_dbgroup_pkey: CREATE UNIQUE INDEX db_dbgroup_pkey ON public.db_dbgroup USING + btree (id) + db_dbgroup_type_23b2a748: CREATE INDEX db_dbgroup_type_23b2a748 ON public.db_dbgroup + USING btree (type) + db_dbgroup_type_23b2a748_like: CREATE INDEX db_dbgroup_type_23b2a748_like ON public.db_dbgroup + USING btree (type varchar_pattern_ops) + db_dbgroup_user_id_100f8a51: CREATE INDEX db_dbgroup_user_id_100f8a51 ON public.db_dbgroup + USING btree (user_id) + db_dbgroup_dbnodes: + db_dbgroup_dbnodes_dbgroup_id_9d3a0f9d: CREATE INDEX db_dbgroup_dbnodes_dbgroup_id_9d3a0f9d + ON public.db_dbgroup_dbnodes USING btree (dbgroup_id) + db_dbgroup_dbnodes_dbgroup_id_dbnode_id_eee23cce_uniq: CREATE UNIQUE INDEX db_dbgroup_dbnodes_dbgroup_id_dbnode_id_eee23cce_uniq + ON public.db_dbgroup_dbnodes USING btree (dbgroup_id, dbnode_id) + db_dbgroup_dbnodes_dbnode_id_118b9439: CREATE INDEX db_dbgroup_dbnodes_dbnode_id_118b9439 + ON public.db_dbgroup_dbnodes USING btree (dbnode_id) + db_dbgroup_dbnodes_pkey: CREATE UNIQUE INDEX db_dbgroup_dbnodes_pkey ON public.db_dbgroup_dbnodes + USING btree (id) + db_dblink: + db_dblink_input_id_9245bd73: CREATE INDEX db_dblink_input_id_9245bd73 ON public.db_dblink + USING btree (input_id) + db_dblink_label_f1343cfb: CREATE INDEX db_dblink_label_f1343cfb ON public.db_dblink + USING btree (label) + db_dblink_label_f1343cfb_like: CREATE INDEX db_dblink_label_f1343cfb_like ON public.db_dblink + USING btree (label varchar_pattern_ops) + db_dblink_output_id_c0167528: CREATE INDEX db_dblink_output_id_c0167528 ON public.db_dblink + USING btree (output_id) + db_dblink_pkey: CREATE UNIQUE INDEX db_dblink_pkey ON public.db_dblink USING btree + (id) + db_dblink_type_229f212b: CREATE INDEX db_dblink_type_229f212b ON public.db_dblink + USING btree (type) + db_dblink_type_229f212b_like: CREATE INDEX db_dblink_type_229f212b_like ON public.db_dblink + USING btree (type varchar_pattern_ops) + db_dblock: + db_dblock_key_048c6767_like: CREATE INDEX db_dblock_key_048c6767_like ON public.db_dblock + USING btree (key varchar_pattern_ops) + db_dblock_pkey: CREATE UNIQUE INDEX db_dblock_pkey ON public.db_dblock USING btree + (key) + db_dblog: + db_dblog_levelname_ad5dc346: CREATE INDEX db_dblog_levelname_ad5dc346 ON public.db_dblog + USING btree (levelname) + db_dblog_levelname_ad5dc346_like: CREATE INDEX db_dblog_levelname_ad5dc346_like + ON public.db_dblog USING btree (levelname varchar_pattern_ops) + db_dblog_loggername_00b5ba16: CREATE INDEX db_dblog_loggername_00b5ba16 ON public.db_dblog + USING btree (loggername) + db_dblog_loggername_00b5ba16_like: CREATE INDEX db_dblog_loggername_00b5ba16_like + ON public.db_dblog USING btree (loggername varchar_pattern_ops) + db_dblog_objname_69932b1e: CREATE INDEX db_dblog_objname_69932b1e ON public.db_dblog + USING btree (objname) + db_dblog_objname_69932b1e_like: CREATE INDEX db_dblog_objname_69932b1e_like ON + public.db_dblog USING btree (objname varchar_pattern_ops) + db_dblog_objpk_fc47afa9: CREATE INDEX db_dblog_objpk_fc47afa9 ON public.db_dblog + USING btree (objpk) + db_dblog_pkey: CREATE UNIQUE INDEX db_dblog_pkey ON public.db_dblog USING btree + (id) + db_dbnode: + db_dbnode_dbcomputer_id_315372a3: CREATE INDEX db_dbnode_dbcomputer_id_315372a3 + ON public.db_dbnode USING btree (dbcomputer_id) + db_dbnode_label_6469539e: CREATE INDEX db_dbnode_label_6469539e ON public.db_dbnode + USING btree (label) + db_dbnode_label_6469539e_like: CREATE INDEX db_dbnode_label_6469539e_like ON public.db_dbnode + USING btree (label varchar_pattern_ops) + db_dbnode_pkey: CREATE UNIQUE INDEX db_dbnode_pkey ON public.db_dbnode USING btree + (id) + db_dbnode_type_a8ce9753: CREATE INDEX db_dbnode_type_a8ce9753 ON public.db_dbnode + USING btree (type) + db_dbnode_type_a8ce9753_like: CREATE INDEX db_dbnode_type_a8ce9753_like ON public.db_dbnode + USING btree (type varchar_pattern_ops) + db_dbnode_user_id_12e7aeaf: CREATE INDEX db_dbnode_user_id_12e7aeaf ON public.db_dbnode + USING btree (user_id) + db_dbnode_uuid_62e0bf98: CREATE INDEX db_dbnode_uuid_62e0bf98 ON public.db_dbnode + USING btree (uuid) + db_dbnode_uuid_62e0bf98_like: CREATE INDEX db_dbnode_uuid_62e0bf98_like ON public.db_dbnode + USING btree (uuid varchar_pattern_ops) + db_dbpath: + db_dbpath_child_id_d8228636: CREATE INDEX db_dbpath_child_id_d8228636 ON public.db_dbpath + USING btree (child_id) + db_dbpath_parent_id_3b82d6c8: CREATE INDEX db_dbpath_parent_id_3b82d6c8 ON public.db_dbpath + USING btree (parent_id) + db_dbpath_pkey: CREATE UNIQUE INDEX db_dbpath_pkey ON public.db_dbpath USING btree + (id) + db_dbsetting: + db_dbsetting_datatype_49f4397c: CREATE INDEX db_dbsetting_datatype_49f4397c ON + public.db_dbsetting USING btree (datatype) + db_dbsetting_datatype_49f4397c_like: CREATE INDEX db_dbsetting_datatype_49f4397c_like + ON public.db_dbsetting USING btree (datatype varchar_pattern_ops) + db_dbsetting_key_1b84beb4: CREATE INDEX db_dbsetting_key_1b84beb4 ON public.db_dbsetting + USING btree (key) + db_dbsetting_key_1b84beb4_like: CREATE INDEX db_dbsetting_key_1b84beb4_like ON + public.db_dbsetting USING btree (key varchar_pattern_ops) + db_dbsetting_key_1b84beb4_uniq: CREATE UNIQUE INDEX db_dbsetting_key_1b84beb4_uniq + ON public.db_dbsetting USING btree (key) + db_dbsetting_pkey: CREATE UNIQUE INDEX db_dbsetting_pkey ON public.db_dbsetting + USING btree (id) + db_dbuser: + db_dbuser_email_30150b7e_like: CREATE INDEX db_dbuser_email_30150b7e_like ON public.db_dbuser + USING btree (email varchar_pattern_ops) + db_dbuser_email_key: CREATE UNIQUE INDEX db_dbuser_email_key ON public.db_dbuser + USING btree (email) + db_dbuser_pkey: CREATE UNIQUE INDEX db_dbuser_pkey ON public.db_dbuser USING btree + (id) + db_dbuser_groups: + db_dbuser_groups_dbuser_id_480b3520: CREATE INDEX db_dbuser_groups_dbuser_id_480b3520 + ON public.db_dbuser_groups USING btree (dbuser_id) + db_dbuser_groups_dbuser_id_group_id_9155eb4f_uniq: CREATE UNIQUE INDEX db_dbuser_groups_dbuser_id_group_id_9155eb4f_uniq + ON public.db_dbuser_groups USING btree (dbuser_id, group_id) + db_dbuser_groups_group_id_8478d87e: CREATE INDEX db_dbuser_groups_group_id_8478d87e + ON public.db_dbuser_groups USING btree (group_id) + db_dbuser_groups_pkey: CREATE UNIQUE INDEX db_dbuser_groups_pkey ON public.db_dbuser_groups + USING btree (id) + db_dbuser_user_permissions: + db_dbuser_user_permissio_dbuser_id_permission_id_e6cbabe4_uniq: CREATE UNIQUE + INDEX db_dbuser_user_permissio_dbuser_id_permission_id_e6cbabe4_uniq ON public.db_dbuser_user_permissions + USING btree (dbuser_id, permission_id) + db_dbuser_user_permissions_dbuser_id_364456ee: CREATE INDEX db_dbuser_user_permissions_dbuser_id_364456ee + ON public.db_dbuser_user_permissions USING btree (dbuser_id) + db_dbuser_user_permissions_permission_id_c5aafc54: CREATE INDEX db_dbuser_user_permissions_permission_id_c5aafc54 + ON public.db_dbuser_user_permissions USING btree (permission_id) + db_dbuser_user_permissions_pkey: CREATE UNIQUE INDEX db_dbuser_user_permissions_pkey + ON public.db_dbuser_user_permissions USING btree (id) + db_dbworkflow: + db_dbworkflow_label_7368f34a: CREATE INDEX db_dbworkflow_label_7368f34a ON public.db_dbworkflow + USING btree (label) + db_dbworkflow_label_7368f34a_like: CREATE INDEX db_dbworkflow_label_7368f34a_like + ON public.db_dbworkflow USING btree (label varchar_pattern_ops) + db_dbworkflow_pkey: CREATE UNIQUE INDEX db_dbworkflow_pkey ON public.db_dbworkflow + USING btree (id) + db_dbworkflow_user_id_ef1f3251: CREATE INDEX db_dbworkflow_user_id_ef1f3251 ON + public.db_dbworkflow USING btree (user_id) + db_dbworkflowdata: + db_dbworkflowdata_aiida_obj_id_70a2d33b: CREATE INDEX db_dbworkflowdata_aiida_obj_id_70a2d33b + ON public.db_dbworkflowdata USING btree (aiida_obj_id) + db_dbworkflowdata_parent_id_ff4dbf8d: CREATE INDEX db_dbworkflowdata_parent_id_ff4dbf8d + ON public.db_dbworkflowdata USING btree (parent_id) + db_dbworkflowdata_parent_id_name_data_type_a4b50dae_uniq: CREATE UNIQUE INDEX + db_dbworkflowdata_parent_id_name_data_type_a4b50dae_uniq ON public.db_dbworkflowdata + USING btree (parent_id, name, data_type) + db_dbworkflowdata_pkey: CREATE UNIQUE INDEX db_dbworkflowdata_pkey ON public.db_dbworkflowdata + USING btree (id) + db_dbworkflowstep: + db_dbworkflowstep_parent_id_ffb754d9: CREATE INDEX db_dbworkflowstep_parent_id_ffb754d9 + ON public.db_dbworkflowstep USING btree (parent_id) + db_dbworkflowstep_parent_id_name_111027e3_uniq: CREATE UNIQUE INDEX db_dbworkflowstep_parent_id_name_111027e3_uniq + ON public.db_dbworkflowstep USING btree (parent_id, name) + db_dbworkflowstep_pkey: CREATE UNIQUE INDEX db_dbworkflowstep_pkey ON public.db_dbworkflowstep + USING btree (id) + db_dbworkflowstep_user_id_04282431: CREATE INDEX db_dbworkflowstep_user_id_04282431 + ON public.db_dbworkflowstep USING btree (user_id) + db_dbworkflowstep_calculations: + db_dbworkflowstep_calcul_dbworkflowstep_id_dbnode_60f50d02_uniq: CREATE UNIQUE + INDEX db_dbworkflowstep_calcul_dbworkflowstep_id_dbnode_60f50d02_uniq ON public.db_dbworkflowstep_calculations + USING btree (dbworkflowstep_id, dbnode_id) + db_dbworkflowstep_calculations_dbnode_id_0d07b7a7: CREATE INDEX db_dbworkflowstep_calculations_dbnode_id_0d07b7a7 + ON public.db_dbworkflowstep_calculations USING btree (dbnode_id) + db_dbworkflowstep_calculations_dbworkflowstep_id_575c3637: CREATE INDEX db_dbworkflowstep_calculations_dbworkflowstep_id_575c3637 + ON public.db_dbworkflowstep_calculations USING btree (dbworkflowstep_id) + db_dbworkflowstep_calculations_pkey: CREATE UNIQUE INDEX db_dbworkflowstep_calculations_pkey + ON public.db_dbworkflowstep_calculations USING btree (id) + db_dbworkflowstep_sub_workflows: + db_dbworkflowstep_sub_wo_dbworkflowstep_id_dbwork_e9b2b624_uniq: CREATE UNIQUE + INDEX db_dbworkflowstep_sub_wo_dbworkflowstep_id_dbwork_e9b2b624_uniq ON public.db_dbworkflowstep_sub_workflows + USING btree (dbworkflowstep_id, dbworkflow_id) + db_dbworkflowstep_sub_workflows_dbworkflow_id_dca4d103: CREATE INDEX db_dbworkflowstep_sub_workflows_dbworkflow_id_dca4d103 + ON public.db_dbworkflowstep_sub_workflows USING btree (dbworkflow_id) + db_dbworkflowstep_sub_workflows_dbworkflowstep_id_e183bbb7: CREATE INDEX db_dbworkflowstep_sub_workflows_dbworkflowstep_id_e183bbb7 + ON public.db_dbworkflowstep_sub_workflows USING btree (dbworkflowstep_id) + db_dbworkflowstep_sub_workflows_pkey: CREATE UNIQUE INDEX db_dbworkflowstep_sub_workflows_pkey + ON public.db_dbworkflowstep_sub_workflows USING btree (id) diff --git a/tests/backends/aiida_sqlalchemy/migrations/test_all_schema/test_django_django_0005_.yml b/tests/backends/aiida_sqlalchemy/migrations/test_all_schema/test_django_django_0005_.yml new file mode 100644 index 0000000000..76f3c6cfcd --- /dev/null +++ b/tests/backends/aiida_sqlalchemy/migrations/test_all_schema/test_django_django_0005_.yml @@ -0,0 +1,1125 @@ +columns: + db_dbattribute: + bval: + data_type: boolean + default: null + is_nullable: true + datatype: + data_type: character varying + default: null + is_nullable: false + max_length: 10 + dbnode_id: + data_type: integer + default: null + is_nullable: false + dval: + data_type: timestamp with time zone + default: null + is_nullable: true + fval: + data_type: double precision + default: null + is_nullable: true + id: + data_type: integer + default: nextval('db_dbattribute_id_seq'::regclass) + is_nullable: false + ival: + data_type: integer + default: null + is_nullable: true + key: + data_type: character varying + default: null + is_nullable: false + max_length: 1024 + tval: + data_type: text + default: null + is_nullable: false + db_dbauthinfo: + aiidauser_id: + data_type: integer + default: null + is_nullable: false + auth_params: + data_type: text + default: null + is_nullable: false + dbcomputer_id: + data_type: integer + default: null + is_nullable: false + enabled: + data_type: boolean + default: null + is_nullable: false + id: + data_type: integer + default: nextval('db_dbauthinfo_id_seq'::regclass) + is_nullable: false + metadata: + data_type: text + default: null + is_nullable: false + db_dbcalcstate: + dbnode_id: + data_type: integer + default: null + is_nullable: false + id: + data_type: integer + default: nextval('db_dbcalcstate_id_seq'::regclass) + is_nullable: false + state: + data_type: character varying + default: null + is_nullable: false + max_length: 25 + time: + data_type: timestamp with time zone + default: null + is_nullable: false + db_dbcomment: + content: + data_type: text + default: null + is_nullable: false + ctime: + data_type: timestamp with time zone + default: null + is_nullable: false + dbnode_id: + data_type: integer + default: null + is_nullable: false + id: + data_type: integer + default: nextval('db_dbcomment_id_seq'::regclass) + is_nullable: false + mtime: + data_type: timestamp with time zone + default: null + is_nullable: false + user_id: + data_type: integer + default: null + is_nullable: false + uuid: + data_type: character varying + default: null + is_nullable: false + max_length: 36 + db_dbcomputer: + description: + data_type: text + default: null + is_nullable: false + enabled: + data_type: boolean + default: null + is_nullable: false + hostname: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + id: + data_type: integer + default: nextval('db_dbcomputer_id_seq'::regclass) + is_nullable: false + metadata: + data_type: text + default: null + is_nullable: false + name: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + scheduler_type: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + transport_params: + data_type: text + default: null + is_nullable: false + transport_type: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + uuid: + data_type: character varying + default: null + is_nullable: false + max_length: 36 + db_dbextra: + bval: + data_type: boolean + default: null + is_nullable: true + datatype: + data_type: character varying + default: null + is_nullable: false + max_length: 10 + dbnode_id: + data_type: integer + default: null + is_nullable: false + dval: + data_type: timestamp with time zone + default: null + is_nullable: true + fval: + data_type: double precision + default: null + is_nullable: true + id: + data_type: integer + default: nextval('db_dbextra_id_seq'::regclass) + is_nullable: false + ival: + data_type: integer + default: null + is_nullable: true + key: + data_type: character varying + default: null + is_nullable: false + max_length: 1024 + tval: + data_type: text + default: null + is_nullable: false + db_dbgroup: + description: + data_type: text + default: null + is_nullable: false + id: + data_type: integer + default: nextval('db_dbgroup_id_seq'::regclass) + is_nullable: false + name: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + time: + data_type: timestamp with time zone + default: null + is_nullable: false + type: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + user_id: + data_type: integer + default: null + is_nullable: false + uuid: + data_type: character varying + default: null + is_nullable: false + max_length: 36 + db_dbgroup_dbnodes: + dbgroup_id: + data_type: integer + default: null + is_nullable: false + dbnode_id: + data_type: integer + default: null + is_nullable: false + id: + data_type: integer + default: nextval('db_dbgroup_dbnodes_id_seq'::regclass) + is_nullable: false + db_dblink: + id: + data_type: integer + default: nextval('db_dblink_id_seq'::regclass) + is_nullable: false + input_id: + data_type: integer + default: null + is_nullable: false + label: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + output_id: + data_type: integer + default: null + is_nullable: false + type: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + db_dblock: + creation: + data_type: timestamp with time zone + default: null + is_nullable: false + key: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + owner: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + timeout: + data_type: integer + default: null + is_nullable: false + db_dblog: + id: + data_type: integer + default: nextval('db_dblog_id_seq'::regclass) + is_nullable: false + levelname: + data_type: character varying + default: null + is_nullable: false + max_length: 50 + loggername: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + message: + data_type: text + default: null + is_nullable: false + metadata: + data_type: text + default: null + is_nullable: false + objname: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + objpk: + data_type: integer + default: null + is_nullable: true + time: + data_type: timestamp with time zone + default: null + is_nullable: false + db_dbnode: + ctime: + data_type: timestamp with time zone + default: null + is_nullable: false + dbcomputer_id: + data_type: integer + default: null + is_nullable: true + description: + data_type: text + default: null + is_nullable: false + id: + data_type: integer + default: nextval('db_dbnode_id_seq'::regclass) + is_nullable: false + label: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + mtime: + data_type: timestamp with time zone + default: null + is_nullable: false + nodeversion: + data_type: integer + default: null + is_nullable: false + public: + data_type: boolean + default: null + is_nullable: false + type: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + user_id: + data_type: integer + default: null + is_nullable: false + uuid: + data_type: character varying + default: null + is_nullable: false + max_length: 36 + db_dbpath: + child_id: + data_type: integer + default: null + is_nullable: false + depth: + data_type: integer + default: null + is_nullable: false + direct_edge_id: + data_type: integer + default: null + is_nullable: true + entry_edge_id: + data_type: integer + default: null + is_nullable: true + exit_edge_id: + data_type: integer + default: null + is_nullable: true + id: + data_type: integer + default: nextval('db_dbpath_id_seq'::regclass) + is_nullable: false + parent_id: + data_type: integer + default: null + is_nullable: false + db_dbsetting: + bval: + data_type: boolean + default: null + is_nullable: true + datatype: + data_type: character varying + default: null + is_nullable: false + max_length: 10 + description: + data_type: text + default: null + is_nullable: false + dval: + data_type: timestamp with time zone + default: null + is_nullable: true + fval: + data_type: double precision + default: null + is_nullable: true + id: + data_type: integer + default: nextval('db_dbsetting_id_seq'::regclass) + is_nullable: false + ival: + data_type: integer + default: null + is_nullable: true + key: + data_type: character varying + default: null + is_nullable: false + max_length: 1024 + time: + data_type: timestamp with time zone + default: null + is_nullable: false + tval: + data_type: text + default: null + is_nullable: false + db_dbuser: + date_joined: + data_type: timestamp with time zone + default: null + is_nullable: false + email: + data_type: character varying + default: null + is_nullable: false + max_length: 75 + first_name: + data_type: character varying + default: null + is_nullable: false + max_length: 254 + id: + data_type: integer + default: nextval('db_dbuser_id_seq'::regclass) + is_nullable: false + institution: + data_type: character varying + default: null + is_nullable: false + max_length: 254 + is_active: + data_type: boolean + default: null + is_nullable: false + is_staff: + data_type: boolean + default: null + is_nullable: false + is_superuser: + data_type: boolean + default: null + is_nullable: false + last_login: + data_type: timestamp with time zone + default: null + is_nullable: false + last_name: + data_type: character varying + default: null + is_nullable: false + max_length: 254 + password: + data_type: character varying + default: null + is_nullable: false + max_length: 128 + db_dbuser_groups: + dbuser_id: + data_type: integer + default: null + is_nullable: false + group_id: + data_type: integer + default: null + is_nullable: false + id: + data_type: integer + default: nextval('db_dbuser_groups_id_seq'::regclass) + is_nullable: false + db_dbuser_user_permissions: + dbuser_id: + data_type: integer + default: null + is_nullable: false + id: + data_type: integer + default: nextval('db_dbuser_user_permissions_id_seq'::regclass) + is_nullable: false + permission_id: + data_type: integer + default: null + is_nullable: false + db_dbworkflow: + ctime: + data_type: timestamp with time zone + default: null + is_nullable: false + description: + data_type: text + default: null + is_nullable: false + id: + data_type: integer + default: nextval('db_dbworkflow_id_seq'::regclass) + is_nullable: false + label: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + lastsyncedversion: + data_type: integer + default: null + is_nullable: false + module: + data_type: text + default: null + is_nullable: false + module_class: + data_type: text + default: null + is_nullable: false + mtime: + data_type: timestamp with time zone + default: null + is_nullable: false + nodeversion: + data_type: integer + default: null + is_nullable: false + report: + data_type: text + default: null + is_nullable: false + script_md5: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + script_path: + data_type: text + default: null + is_nullable: false + state: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + user_id: + data_type: integer + default: null + is_nullable: false + uuid: + data_type: character varying + default: null + is_nullable: false + max_length: 36 + db_dbworkflowdata: + aiida_obj_id: + data_type: integer + default: null + is_nullable: true + data_type: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + id: + data_type: integer + default: nextval('db_dbworkflowdata_id_seq'::regclass) + is_nullable: false + json_value: + data_type: text + default: null + is_nullable: false + name: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + parent_id: + data_type: integer + default: null + is_nullable: false + time: + data_type: timestamp with time zone + default: null + is_nullable: false + value_type: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + db_dbworkflowstep: + id: + data_type: integer + default: nextval('db_dbworkflowstep_id_seq'::regclass) + is_nullable: false + name: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + nextcall: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + parent_id: + data_type: integer + default: null + is_nullable: false + state: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + time: + data_type: timestamp with time zone + default: null + is_nullable: false + user_id: + data_type: integer + default: null + is_nullable: false + db_dbworkflowstep_calculations: + dbnode_id: + data_type: integer + default: null + is_nullable: false + dbworkflowstep_id: + data_type: integer + default: null + is_nullable: false + id: + data_type: integer + default: nextval('db_dbworkflowstep_calculations_id_seq'::regclass) + is_nullable: false + db_dbworkflowstep_sub_workflows: + dbworkflow_id: + data_type: integer + default: null + is_nullable: false + dbworkflowstep_id: + data_type: integer + default: null + is_nullable: false + id: + data_type: integer + default: nextval('db_dbworkflowstep_sub_workflows_id_seq'::regclass) + is_nullable: false +constraints: + primary_key: + db_dbattribute: + db_dbattribute_pkey: + - id + db_dbauthinfo: + db_dbauthinfo_pkey: + - id + db_dbcalcstate: + db_dbcalcstate_pkey: + - id + db_dbcomment: + db_dbcomment_pkey: + - id + db_dbcomputer: + db_dbcomputer_pkey: + - id + db_dbextra: + db_dbextra_pkey: + - id + db_dbgroup: + db_dbgroup_pkey: + - id + db_dbgroup_dbnodes: + db_dbgroup_dbnodes_pkey: + - id + db_dblink: + db_dblink_pkey: + - id + db_dblock: + db_dblock_pkey: + - key + db_dblog: + db_dblog_pkey: + - id + db_dbnode: + db_dbnode_pkey: + - id + db_dbpath: + db_dbpath_pkey: + - id + db_dbsetting: + db_dbsetting_pkey: + - id + db_dbuser: + db_dbuser_pkey: + - id + db_dbuser_groups: + db_dbuser_groups_pkey: + - id + db_dbuser_user_permissions: + db_dbuser_user_permissions_pkey: + - id + db_dbworkflow: + db_dbworkflow_pkey: + - id + db_dbworkflowdata: + db_dbworkflowdata_pkey: + - id + db_dbworkflowstep: + db_dbworkflowstep_pkey: + - id + db_dbworkflowstep_calculations: + db_dbworkflowstep_calculations_pkey: + - id + db_dbworkflowstep_sub_workflows: + db_dbworkflowstep_sub_workflows_pkey: + - id + unique: + db_dbattribute: + db_dbattribute_dbnode_id_key_c589e447_uniq: + - dbnode_id + - key + db_dbauthinfo: + db_dbauthinfo_aiidauser_id_dbcomputer_id_777cdaa8_uniq: + - aiidauser_id + - dbcomputer_id + db_dbcalcstate: + db_dbcalcstate_dbnode_id_state_b4a14db3_uniq: + - dbnode_id + - state + db_dbcomputer: + db_dbcomputer_name_key: + - name + db_dbextra: + db_dbextra_dbnode_id_key_aa56fd37_uniq: + - dbnode_id + - key + db_dbgroup: + db_dbgroup_name_type_12656f33_uniq: + - name + - type + db_dbgroup_dbnodes: + db_dbgroup_dbnodes_dbgroup_id_dbnode_id_eee23cce_uniq: + - dbgroup_id + - dbnode_id + db_dbsetting: + db_dbsetting_key_1b84beb4_uniq: + - key + db_dbuser: + db_dbuser_email_key: + - email + db_dbuser_groups: + db_dbuser_groups_dbuser_id_group_id_9155eb4f_uniq: + - dbuser_id + - group_id + db_dbuser_user_permissions: + db_dbuser_user_permissio_dbuser_id_permission_id_e6cbabe4_uniq: + - dbuser_id + - permission_id + db_dbworkflowdata: + db_dbworkflowdata_parent_id_name_data_type_a4b50dae_uniq: + - data_type + - name + - parent_id + db_dbworkflowstep: + db_dbworkflowstep_parent_id_name_111027e3_uniq: + - name + - parent_id + db_dbworkflowstep_calculations: + db_dbworkflowstep_calcul_dbworkflowstep_id_dbnode_60f50d02_uniq: + - dbnode_id + - dbworkflowstep_id + db_dbworkflowstep_sub_workflows: + db_dbworkflowstep_sub_wo_dbworkflowstep_id_dbwork_e9b2b624_uniq: + - dbworkflow_id + - dbworkflowstep_id +foreign_keys: + db_dbattribute: + db_dbattribute_dbnode_id_253bf153_fk_db_dbnode_id: FOREIGN KEY (dbnode_id) REFERENCES + db_dbnode(id) DEFERRABLE INITIALLY DEFERRED + db_dbauthinfo: + db_dbauthinfo_aiidauser_id_0684fdfb_fk_db_dbuser_id: FOREIGN KEY (aiidauser_id) + REFERENCES db_dbuser(id) DEFERRABLE INITIALLY DEFERRED + db_dbauthinfo_dbcomputer_id_424f7ac4_fk_db_dbcomputer_id: FOREIGN KEY (dbcomputer_id) + REFERENCES db_dbcomputer(id) DEFERRABLE INITIALLY DEFERRED + db_dbcalcstate: + db_dbcalcstate_dbnode_id_f217a84c_fk_db_dbnode_id: FOREIGN KEY (dbnode_id) REFERENCES + db_dbnode(id) DEFERRABLE INITIALLY DEFERRED + db_dbcomment: + db_dbcomment_dbnode_id_3b812b6b_fk_db_dbnode_id: FOREIGN KEY (dbnode_id) REFERENCES + db_dbnode(id) DEFERRABLE INITIALLY DEFERRED + db_dbcomment_user_id_8ed5e360_fk_db_dbuser_id: FOREIGN KEY (user_id) REFERENCES + db_dbuser(id) DEFERRABLE INITIALLY DEFERRED + db_dbextra: + db_dbextra_dbnode_id_c7fe8961_fk_db_dbnode_id: FOREIGN KEY (dbnode_id) REFERENCES + db_dbnode(id) DEFERRABLE INITIALLY DEFERRED + db_dbgroup: + db_dbgroup_user_id_100f8a51_fk_db_dbuser_id: FOREIGN KEY (user_id) REFERENCES + db_dbuser(id) DEFERRABLE INITIALLY DEFERRED + db_dbgroup_dbnodes: + db_dbgroup_dbnodes_dbgroup_id_9d3a0f9d_fk_db_dbgroup_id: FOREIGN KEY (dbgroup_id) + REFERENCES db_dbgroup(id) DEFERRABLE INITIALLY DEFERRED + db_dbgroup_dbnodes_dbnode_id_118b9439_fk_db_dbnode_id: FOREIGN KEY (dbnode_id) + REFERENCES db_dbnode(id) DEFERRABLE INITIALLY DEFERRED + db_dblink: + db_dblink_input_id_9245bd73_fk_db_dbnode_id: FOREIGN KEY (input_id) REFERENCES + db_dbnode(id) DEFERRABLE INITIALLY DEFERRED + db_dblink_output_id_c0167528_fk_db_dbnode_id: FOREIGN KEY (output_id) REFERENCES + db_dbnode(id) DEFERRABLE INITIALLY DEFERRED + db_dbnode: + db_dbnode_dbcomputer_id_315372a3_fk_db_dbcomputer_id: FOREIGN KEY (dbcomputer_id) + REFERENCES db_dbcomputer(id) DEFERRABLE INITIALLY DEFERRED + db_dbnode_user_id_12e7aeaf_fk_db_dbuser_id: FOREIGN KEY (user_id) REFERENCES db_dbuser(id) + DEFERRABLE INITIALLY DEFERRED + db_dbpath: + db_dbpath_child_id_d8228636_fk_db_dbnode_id: FOREIGN KEY (child_id) REFERENCES + db_dbnode(id) DEFERRABLE INITIALLY DEFERRED + db_dbpath_parent_id_3b82d6c8_fk_db_dbnode_id: FOREIGN KEY (parent_id) REFERENCES + db_dbnode(id) DEFERRABLE INITIALLY DEFERRED + db_dbuser_groups: + db_dbuser_groups_dbuser_id_480b3520_fk_db_dbuser_id: FOREIGN KEY (dbuser_id) REFERENCES + db_dbuser(id) DEFERRABLE INITIALLY DEFERRED + db_dbuser_groups_group_id_8478d87e_fk_auth_group_id: FOREIGN KEY (group_id) REFERENCES + auth_group(id) DEFERRABLE INITIALLY DEFERRED + db_dbuser_user_permissions: + db_dbuser_user_permi_permission_id_c5aafc54_fk_auth_perm: FOREIGN KEY (permission_id) + REFERENCES auth_permission(id) DEFERRABLE INITIALLY DEFERRED + db_dbuser_user_permissions_dbuser_id_364456ee_fk_db_dbuser_id: FOREIGN KEY (dbuser_id) + REFERENCES db_dbuser(id) DEFERRABLE INITIALLY DEFERRED + db_dbworkflow: + db_dbworkflow_user_id_ef1f3251_fk_db_dbuser_id: FOREIGN KEY (user_id) REFERENCES + db_dbuser(id) DEFERRABLE INITIALLY DEFERRED + db_dbworkflowdata: + db_dbworkflowdata_aiida_obj_id_70a2d33b_fk_db_dbnode_id: FOREIGN KEY (aiida_obj_id) + REFERENCES db_dbnode(id) DEFERRABLE INITIALLY DEFERRED + db_dbworkflowdata_parent_id_ff4dbf8d_fk_db_dbworkflow_id: FOREIGN KEY (parent_id) + REFERENCES db_dbworkflow(id) DEFERRABLE INITIALLY DEFERRED + db_dbworkflowstep: + db_dbworkflowstep_parent_id_ffb754d9_fk_db_dbworkflow_id: FOREIGN KEY (parent_id) + REFERENCES db_dbworkflow(id) DEFERRABLE INITIALLY DEFERRED + db_dbworkflowstep_user_id_04282431_fk_db_dbuser_id: FOREIGN KEY (user_id) REFERENCES + db_dbuser(id) DEFERRABLE INITIALLY DEFERRED + db_dbworkflowstep_calculations: + db_dbworkflowstep_ca_dbnode_id_0d07b7a7_fk_db_dbnode: FOREIGN KEY (dbnode_id) + REFERENCES db_dbnode(id) DEFERRABLE INITIALLY DEFERRED + db_dbworkflowstep_ca_dbworkflowstep_id_575c3637_fk_db_dbwork: FOREIGN KEY (dbworkflowstep_id) + REFERENCES db_dbworkflowstep(id) DEFERRABLE INITIALLY DEFERRED + db_dbworkflowstep_sub_workflows: + db_dbworkflowstep_su_dbworkflow_id_dca4d103_fk_db_dbwork: FOREIGN KEY (dbworkflow_id) + REFERENCES db_dbworkflow(id) DEFERRABLE INITIALLY DEFERRED + db_dbworkflowstep_su_dbworkflowstep_id_e183bbb7_fk_db_dbwork: FOREIGN KEY (dbworkflowstep_id) + REFERENCES db_dbworkflowstep(id) DEFERRABLE INITIALLY DEFERRED +indexes: + db_dbattribute: + db_dbattribute_datatype_91c4dc04: CREATE INDEX db_dbattribute_datatype_91c4dc04 + ON public.db_dbattribute USING btree (datatype) + db_dbattribute_datatype_91c4dc04_like: CREATE INDEX db_dbattribute_datatype_91c4dc04_like + ON public.db_dbattribute USING btree (datatype varchar_pattern_ops) + db_dbattribute_dbnode_id_253bf153: CREATE INDEX db_dbattribute_dbnode_id_253bf153 + ON public.db_dbattribute USING btree (dbnode_id) + db_dbattribute_dbnode_id_key_c589e447_uniq: CREATE UNIQUE INDEX db_dbattribute_dbnode_id_key_c589e447_uniq + ON public.db_dbattribute USING btree (dbnode_id, key) + db_dbattribute_key_ac2bc4e4: CREATE INDEX db_dbattribute_key_ac2bc4e4 ON public.db_dbattribute + USING btree (key) + db_dbattribute_key_ac2bc4e4_like: CREATE INDEX db_dbattribute_key_ac2bc4e4_like + ON public.db_dbattribute USING btree (key varchar_pattern_ops) + db_dbattribute_pkey: CREATE UNIQUE INDEX db_dbattribute_pkey ON public.db_dbattribute + USING btree (id) + tval_idx_for_daemon: CREATE INDEX tval_idx_for_daemon ON public.db_dbattribute + USING btree (tval) WHERE (tval = ANY (ARRAY['COMPUTED'::text, 'WITHSCHEDULER'::text, + 'TOSUBMIT'::text])) + db_dbauthinfo: + db_dbauthinfo_aiidauser_id_0684fdfb: CREATE INDEX db_dbauthinfo_aiidauser_id_0684fdfb + ON public.db_dbauthinfo USING btree (aiidauser_id) + db_dbauthinfo_aiidauser_id_dbcomputer_id_777cdaa8_uniq: CREATE UNIQUE INDEX db_dbauthinfo_aiidauser_id_dbcomputer_id_777cdaa8_uniq + ON public.db_dbauthinfo USING btree (aiidauser_id, dbcomputer_id) + db_dbauthinfo_dbcomputer_id_424f7ac4: CREATE INDEX db_dbauthinfo_dbcomputer_id_424f7ac4 + ON public.db_dbauthinfo USING btree (dbcomputer_id) + db_dbauthinfo_pkey: CREATE UNIQUE INDEX db_dbauthinfo_pkey ON public.db_dbauthinfo + USING btree (id) + db_dbcalcstate: + db_dbcalcstate_dbnode_id_f217a84c: CREATE INDEX db_dbcalcstate_dbnode_id_f217a84c + ON public.db_dbcalcstate USING btree (dbnode_id) + db_dbcalcstate_dbnode_id_state_b4a14db3_uniq: CREATE UNIQUE INDEX db_dbcalcstate_dbnode_id_state_b4a14db3_uniq + ON public.db_dbcalcstate USING btree (dbnode_id, state) + db_dbcalcstate_pkey: CREATE UNIQUE INDEX db_dbcalcstate_pkey ON public.db_dbcalcstate + USING btree (id) + db_dbcalcstate_state_0bf54584: CREATE INDEX db_dbcalcstate_state_0bf54584 ON public.db_dbcalcstate + USING btree (state) + db_dbcalcstate_state_0bf54584_like: CREATE INDEX db_dbcalcstate_state_0bf54584_like + ON public.db_dbcalcstate USING btree (state varchar_pattern_ops) + db_dbcomment: + db_dbcomment_dbnode_id_3b812b6b: CREATE INDEX db_dbcomment_dbnode_id_3b812b6b + ON public.db_dbcomment USING btree (dbnode_id) + db_dbcomment_pkey: CREATE UNIQUE INDEX db_dbcomment_pkey ON public.db_dbcomment + USING btree (id) + db_dbcomment_user_id_8ed5e360: CREATE INDEX db_dbcomment_user_id_8ed5e360 ON public.db_dbcomment + USING btree (user_id) + db_dbcomputer: + db_dbcomputer_name_f1800b1a_like: CREATE INDEX db_dbcomputer_name_f1800b1a_like + ON public.db_dbcomputer USING btree (name varchar_pattern_ops) + db_dbcomputer_name_key: CREATE UNIQUE INDEX db_dbcomputer_name_key ON public.db_dbcomputer + USING btree (name) + db_dbcomputer_pkey: CREATE UNIQUE INDEX db_dbcomputer_pkey ON public.db_dbcomputer + USING btree (id) + db_dbextra: + db_dbextra_datatype_2eba38c6: CREATE INDEX db_dbextra_datatype_2eba38c6 ON public.db_dbextra + USING btree (datatype) + db_dbextra_datatype_2eba38c6_like: CREATE INDEX db_dbextra_datatype_2eba38c6_like + ON public.db_dbextra USING btree (datatype varchar_pattern_ops) + db_dbextra_dbnode_id_c7fe8961: CREATE INDEX db_dbextra_dbnode_id_c7fe8961 ON public.db_dbextra + USING btree (dbnode_id) + db_dbextra_dbnode_id_key_aa56fd37_uniq: CREATE UNIQUE INDEX db_dbextra_dbnode_id_key_aa56fd37_uniq + ON public.db_dbextra USING btree (dbnode_id, key) + db_dbextra_key_b1a8abc6: CREATE INDEX db_dbextra_key_b1a8abc6 ON public.db_dbextra + USING btree (key) + db_dbextra_key_b1a8abc6_like: CREATE INDEX db_dbextra_key_b1a8abc6_like ON public.db_dbextra + USING btree (key varchar_pattern_ops) + db_dbextra_pkey: CREATE UNIQUE INDEX db_dbextra_pkey ON public.db_dbextra USING + btree (id) + db_dbgroup: + db_dbgroup_name_66c75272: CREATE INDEX db_dbgroup_name_66c75272 ON public.db_dbgroup + USING btree (name) + db_dbgroup_name_66c75272_like: CREATE INDEX db_dbgroup_name_66c75272_like ON public.db_dbgroup + USING btree (name varchar_pattern_ops) + db_dbgroup_name_type_12656f33_uniq: CREATE UNIQUE INDEX db_dbgroup_name_type_12656f33_uniq + ON public.db_dbgroup USING btree (name, type) + db_dbgroup_pkey: CREATE UNIQUE INDEX db_dbgroup_pkey ON public.db_dbgroup USING + btree (id) + db_dbgroup_type_23b2a748: CREATE INDEX db_dbgroup_type_23b2a748 ON public.db_dbgroup + USING btree (type) + db_dbgroup_type_23b2a748_like: CREATE INDEX db_dbgroup_type_23b2a748_like ON public.db_dbgroup + USING btree (type varchar_pattern_ops) + db_dbgroup_user_id_100f8a51: CREATE INDEX db_dbgroup_user_id_100f8a51 ON public.db_dbgroup + USING btree (user_id) + db_dbgroup_dbnodes: + db_dbgroup_dbnodes_dbgroup_id_9d3a0f9d: CREATE INDEX db_dbgroup_dbnodes_dbgroup_id_9d3a0f9d + ON public.db_dbgroup_dbnodes USING btree (dbgroup_id) + db_dbgroup_dbnodes_dbgroup_id_dbnode_id_eee23cce_uniq: CREATE UNIQUE INDEX db_dbgroup_dbnodes_dbgroup_id_dbnode_id_eee23cce_uniq + ON public.db_dbgroup_dbnodes USING btree (dbgroup_id, dbnode_id) + db_dbgroup_dbnodes_dbnode_id_118b9439: CREATE INDEX db_dbgroup_dbnodes_dbnode_id_118b9439 + ON public.db_dbgroup_dbnodes USING btree (dbnode_id) + db_dbgroup_dbnodes_pkey: CREATE UNIQUE INDEX db_dbgroup_dbnodes_pkey ON public.db_dbgroup_dbnodes + USING btree (id) + db_dblink: + db_dblink_input_id_9245bd73: CREATE INDEX db_dblink_input_id_9245bd73 ON public.db_dblink + USING btree (input_id) + db_dblink_label_f1343cfb: CREATE INDEX db_dblink_label_f1343cfb ON public.db_dblink + USING btree (label) + db_dblink_label_f1343cfb_like: CREATE INDEX db_dblink_label_f1343cfb_like ON public.db_dblink + USING btree (label varchar_pattern_ops) + db_dblink_output_id_c0167528: CREATE INDEX db_dblink_output_id_c0167528 ON public.db_dblink + USING btree (output_id) + db_dblink_pkey: CREATE UNIQUE INDEX db_dblink_pkey ON public.db_dblink USING btree + (id) + db_dblink_type_229f212b: CREATE INDEX db_dblink_type_229f212b ON public.db_dblink + USING btree (type) + db_dblink_type_229f212b_like: CREATE INDEX db_dblink_type_229f212b_like ON public.db_dblink + USING btree (type varchar_pattern_ops) + db_dblock: + db_dblock_key_048c6767_like: CREATE INDEX db_dblock_key_048c6767_like ON public.db_dblock + USING btree (key varchar_pattern_ops) + db_dblock_pkey: CREATE UNIQUE INDEX db_dblock_pkey ON public.db_dblock USING btree + (key) + db_dblog: + db_dblog_levelname_ad5dc346: CREATE INDEX db_dblog_levelname_ad5dc346 ON public.db_dblog + USING btree (levelname) + db_dblog_levelname_ad5dc346_like: CREATE INDEX db_dblog_levelname_ad5dc346_like + ON public.db_dblog USING btree (levelname varchar_pattern_ops) + db_dblog_loggername_00b5ba16: CREATE INDEX db_dblog_loggername_00b5ba16 ON public.db_dblog + USING btree (loggername) + db_dblog_loggername_00b5ba16_like: CREATE INDEX db_dblog_loggername_00b5ba16_like + ON public.db_dblog USING btree (loggername varchar_pattern_ops) + db_dblog_objname_69932b1e: CREATE INDEX db_dblog_objname_69932b1e ON public.db_dblog + USING btree (objname) + db_dblog_objname_69932b1e_like: CREATE INDEX db_dblog_objname_69932b1e_like ON + public.db_dblog USING btree (objname varchar_pattern_ops) + db_dblog_objpk_fc47afa9: CREATE INDEX db_dblog_objpk_fc47afa9 ON public.db_dblog + USING btree (objpk) + db_dblog_pkey: CREATE UNIQUE INDEX db_dblog_pkey ON public.db_dblog USING btree + (id) + db_dbnode: + db_dbnode_ctime_71626ef5: CREATE INDEX db_dbnode_ctime_71626ef5 ON public.db_dbnode + USING btree (ctime) + db_dbnode_dbcomputer_id_315372a3: CREATE INDEX db_dbnode_dbcomputer_id_315372a3 + ON public.db_dbnode USING btree (dbcomputer_id) + db_dbnode_label_6469539e: CREATE INDEX db_dbnode_label_6469539e ON public.db_dbnode + USING btree (label) + db_dbnode_label_6469539e_like: CREATE INDEX db_dbnode_label_6469539e_like ON public.db_dbnode + USING btree (label varchar_pattern_ops) + db_dbnode_mtime_0554ea3d: CREATE INDEX db_dbnode_mtime_0554ea3d ON public.db_dbnode + USING btree (mtime) + db_dbnode_pkey: CREATE UNIQUE INDEX db_dbnode_pkey ON public.db_dbnode USING btree + (id) + db_dbnode_type_a8ce9753: CREATE INDEX db_dbnode_type_a8ce9753 ON public.db_dbnode + USING btree (type) + db_dbnode_type_a8ce9753_like: CREATE INDEX db_dbnode_type_a8ce9753_like ON public.db_dbnode + USING btree (type varchar_pattern_ops) + db_dbnode_user_id_12e7aeaf: CREATE INDEX db_dbnode_user_id_12e7aeaf ON public.db_dbnode + USING btree (user_id) + db_dbnode_uuid_62e0bf98: CREATE INDEX db_dbnode_uuid_62e0bf98 ON public.db_dbnode + USING btree (uuid) + db_dbnode_uuid_62e0bf98_like: CREATE INDEX db_dbnode_uuid_62e0bf98_like ON public.db_dbnode + USING btree (uuid varchar_pattern_ops) + db_dbpath: + db_dbpath_child_id_d8228636: CREATE INDEX db_dbpath_child_id_d8228636 ON public.db_dbpath + USING btree (child_id) + db_dbpath_parent_id_3b82d6c8: CREATE INDEX db_dbpath_parent_id_3b82d6c8 ON public.db_dbpath + USING btree (parent_id) + db_dbpath_pkey: CREATE UNIQUE INDEX db_dbpath_pkey ON public.db_dbpath USING btree + (id) + db_dbsetting: + db_dbsetting_datatype_49f4397c: CREATE INDEX db_dbsetting_datatype_49f4397c ON + public.db_dbsetting USING btree (datatype) + db_dbsetting_datatype_49f4397c_like: CREATE INDEX db_dbsetting_datatype_49f4397c_like + ON public.db_dbsetting USING btree (datatype varchar_pattern_ops) + db_dbsetting_key_1b84beb4: CREATE INDEX db_dbsetting_key_1b84beb4 ON public.db_dbsetting + USING btree (key) + db_dbsetting_key_1b84beb4_like: CREATE INDEX db_dbsetting_key_1b84beb4_like ON + public.db_dbsetting USING btree (key varchar_pattern_ops) + db_dbsetting_key_1b84beb4_uniq: CREATE UNIQUE INDEX db_dbsetting_key_1b84beb4_uniq + ON public.db_dbsetting USING btree (key) + db_dbsetting_pkey: CREATE UNIQUE INDEX db_dbsetting_pkey ON public.db_dbsetting + USING btree (id) + db_dbuser: + db_dbuser_email_30150b7e_like: CREATE INDEX db_dbuser_email_30150b7e_like ON public.db_dbuser + USING btree (email varchar_pattern_ops) + db_dbuser_email_key: CREATE UNIQUE INDEX db_dbuser_email_key ON public.db_dbuser + USING btree (email) + db_dbuser_pkey: CREATE UNIQUE INDEX db_dbuser_pkey ON public.db_dbuser USING btree + (id) + db_dbuser_groups: + db_dbuser_groups_dbuser_id_480b3520: CREATE INDEX db_dbuser_groups_dbuser_id_480b3520 + ON public.db_dbuser_groups USING btree (dbuser_id) + db_dbuser_groups_dbuser_id_group_id_9155eb4f_uniq: CREATE UNIQUE INDEX db_dbuser_groups_dbuser_id_group_id_9155eb4f_uniq + ON public.db_dbuser_groups USING btree (dbuser_id, group_id) + db_dbuser_groups_group_id_8478d87e: CREATE INDEX db_dbuser_groups_group_id_8478d87e + ON public.db_dbuser_groups USING btree (group_id) + db_dbuser_groups_pkey: CREATE UNIQUE INDEX db_dbuser_groups_pkey ON public.db_dbuser_groups + USING btree (id) + db_dbuser_user_permissions: + db_dbuser_user_permissio_dbuser_id_permission_id_e6cbabe4_uniq: CREATE UNIQUE + INDEX db_dbuser_user_permissio_dbuser_id_permission_id_e6cbabe4_uniq ON public.db_dbuser_user_permissions + USING btree (dbuser_id, permission_id) + db_dbuser_user_permissions_dbuser_id_364456ee: CREATE INDEX db_dbuser_user_permissions_dbuser_id_364456ee + ON public.db_dbuser_user_permissions USING btree (dbuser_id) + db_dbuser_user_permissions_permission_id_c5aafc54: CREATE INDEX db_dbuser_user_permissions_permission_id_c5aafc54 + ON public.db_dbuser_user_permissions USING btree (permission_id) + db_dbuser_user_permissions_pkey: CREATE UNIQUE INDEX db_dbuser_user_permissions_pkey + ON public.db_dbuser_user_permissions USING btree (id) + db_dbworkflow: + db_dbworkflow_label_7368f34a: CREATE INDEX db_dbworkflow_label_7368f34a ON public.db_dbworkflow + USING btree (label) + db_dbworkflow_label_7368f34a_like: CREATE INDEX db_dbworkflow_label_7368f34a_like + ON public.db_dbworkflow USING btree (label varchar_pattern_ops) + db_dbworkflow_pkey: CREATE UNIQUE INDEX db_dbworkflow_pkey ON public.db_dbworkflow + USING btree (id) + db_dbworkflow_user_id_ef1f3251: CREATE INDEX db_dbworkflow_user_id_ef1f3251 ON + public.db_dbworkflow USING btree (user_id) + db_dbworkflowdata: + db_dbworkflowdata_aiida_obj_id_70a2d33b: CREATE INDEX db_dbworkflowdata_aiida_obj_id_70a2d33b + ON public.db_dbworkflowdata USING btree (aiida_obj_id) + db_dbworkflowdata_parent_id_ff4dbf8d: CREATE INDEX db_dbworkflowdata_parent_id_ff4dbf8d + ON public.db_dbworkflowdata USING btree (parent_id) + db_dbworkflowdata_parent_id_name_data_type_a4b50dae_uniq: CREATE UNIQUE INDEX + db_dbworkflowdata_parent_id_name_data_type_a4b50dae_uniq ON public.db_dbworkflowdata + USING btree (parent_id, name, data_type) + db_dbworkflowdata_pkey: CREATE UNIQUE INDEX db_dbworkflowdata_pkey ON public.db_dbworkflowdata + USING btree (id) + db_dbworkflowstep: + db_dbworkflowstep_parent_id_ffb754d9: CREATE INDEX db_dbworkflowstep_parent_id_ffb754d9 + ON public.db_dbworkflowstep USING btree (parent_id) + db_dbworkflowstep_parent_id_name_111027e3_uniq: CREATE UNIQUE INDEX db_dbworkflowstep_parent_id_name_111027e3_uniq + ON public.db_dbworkflowstep USING btree (parent_id, name) + db_dbworkflowstep_pkey: CREATE UNIQUE INDEX db_dbworkflowstep_pkey ON public.db_dbworkflowstep + USING btree (id) + db_dbworkflowstep_user_id_04282431: CREATE INDEX db_dbworkflowstep_user_id_04282431 + ON public.db_dbworkflowstep USING btree (user_id) + db_dbworkflowstep_calculations: + db_dbworkflowstep_calcul_dbworkflowstep_id_dbnode_60f50d02_uniq: CREATE UNIQUE + INDEX db_dbworkflowstep_calcul_dbworkflowstep_id_dbnode_60f50d02_uniq ON public.db_dbworkflowstep_calculations + USING btree (dbworkflowstep_id, dbnode_id) + db_dbworkflowstep_calculations_dbnode_id_0d07b7a7: CREATE INDEX db_dbworkflowstep_calculations_dbnode_id_0d07b7a7 + ON public.db_dbworkflowstep_calculations USING btree (dbnode_id) + db_dbworkflowstep_calculations_dbworkflowstep_id_575c3637: CREATE INDEX db_dbworkflowstep_calculations_dbworkflowstep_id_575c3637 + ON public.db_dbworkflowstep_calculations USING btree (dbworkflowstep_id) + db_dbworkflowstep_calculations_pkey: CREATE UNIQUE INDEX db_dbworkflowstep_calculations_pkey + ON public.db_dbworkflowstep_calculations USING btree (id) + db_dbworkflowstep_sub_workflows: + db_dbworkflowstep_sub_wo_dbworkflowstep_id_dbwork_e9b2b624_uniq: CREATE UNIQUE + INDEX db_dbworkflowstep_sub_wo_dbworkflowstep_id_dbwork_e9b2b624_uniq ON public.db_dbworkflowstep_sub_workflows + USING btree (dbworkflowstep_id, dbworkflow_id) + db_dbworkflowstep_sub_workflows_dbworkflow_id_dca4d103: CREATE INDEX db_dbworkflowstep_sub_workflows_dbworkflow_id_dca4d103 + ON public.db_dbworkflowstep_sub_workflows USING btree (dbworkflow_id) + db_dbworkflowstep_sub_workflows_dbworkflowstep_id_e183bbb7: CREATE INDEX db_dbworkflowstep_sub_workflows_dbworkflowstep_id_e183bbb7 + ON public.db_dbworkflowstep_sub_workflows USING btree (dbworkflowstep_id) + db_dbworkflowstep_sub_workflows_pkey: CREATE UNIQUE INDEX db_dbworkflowstep_sub_workflows_pkey + ON public.db_dbworkflowstep_sub_workflows USING btree (id) diff --git a/tests/backends/aiida_sqlalchemy/migrations/test_all_schema/test_django_django_0006_.yml b/tests/backends/aiida_sqlalchemy/migrations/test_all_schema/test_django_django_0006_.yml new file mode 100644 index 0000000000..597f85a532 --- /dev/null +++ b/tests/backends/aiida_sqlalchemy/migrations/test_all_schema/test_django_django_0006_.yml @@ -0,0 +1,1081 @@ +columns: + db_dbattribute: + bval: + data_type: boolean + default: null + is_nullable: true + datatype: + data_type: character varying + default: null + is_nullable: false + max_length: 10 + dbnode_id: + data_type: integer + default: null + is_nullable: false + dval: + data_type: timestamp with time zone + default: null + is_nullable: true + fval: + data_type: double precision + default: null + is_nullable: true + id: + data_type: integer + default: nextval('db_dbattribute_id_seq'::regclass) + is_nullable: false + ival: + data_type: integer + default: null + is_nullable: true + key: + data_type: character varying + default: null + is_nullable: false + max_length: 1024 + tval: + data_type: text + default: null + is_nullable: false + db_dbauthinfo: + aiidauser_id: + data_type: integer + default: null + is_nullable: false + auth_params: + data_type: text + default: null + is_nullable: false + dbcomputer_id: + data_type: integer + default: null + is_nullable: false + enabled: + data_type: boolean + default: null + is_nullable: false + id: + data_type: integer + default: nextval('db_dbauthinfo_id_seq'::regclass) + is_nullable: false + metadata: + data_type: text + default: null + is_nullable: false + db_dbcalcstate: + dbnode_id: + data_type: integer + default: null + is_nullable: false + id: + data_type: integer + default: nextval('db_dbcalcstate_id_seq'::regclass) + is_nullable: false + state: + data_type: character varying + default: null + is_nullable: false + max_length: 25 + time: + data_type: timestamp with time zone + default: null + is_nullable: false + db_dbcomment: + content: + data_type: text + default: null + is_nullable: false + ctime: + data_type: timestamp with time zone + default: null + is_nullable: false + dbnode_id: + data_type: integer + default: null + is_nullable: false + id: + data_type: integer + default: nextval('db_dbcomment_id_seq'::regclass) + is_nullable: false + mtime: + data_type: timestamp with time zone + default: null + is_nullable: false + user_id: + data_type: integer + default: null + is_nullable: false + uuid: + data_type: character varying + default: null + is_nullable: false + max_length: 36 + db_dbcomputer: + description: + data_type: text + default: null + is_nullable: false + enabled: + data_type: boolean + default: null + is_nullable: false + hostname: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + id: + data_type: integer + default: nextval('db_dbcomputer_id_seq'::regclass) + is_nullable: false + metadata: + data_type: text + default: null + is_nullable: false + name: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + scheduler_type: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + transport_params: + data_type: text + default: null + is_nullable: false + transport_type: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + uuid: + data_type: character varying + default: null + is_nullable: false + max_length: 36 + db_dbextra: + bval: + data_type: boolean + default: null + is_nullable: true + datatype: + data_type: character varying + default: null + is_nullable: false + max_length: 10 + dbnode_id: + data_type: integer + default: null + is_nullable: false + dval: + data_type: timestamp with time zone + default: null + is_nullable: true + fval: + data_type: double precision + default: null + is_nullable: true + id: + data_type: integer + default: nextval('db_dbextra_id_seq'::regclass) + is_nullable: false + ival: + data_type: integer + default: null + is_nullable: true + key: + data_type: character varying + default: null + is_nullable: false + max_length: 1024 + tval: + data_type: text + default: null + is_nullable: false + db_dbgroup: + description: + data_type: text + default: null + is_nullable: false + id: + data_type: integer + default: nextval('db_dbgroup_id_seq'::regclass) + is_nullable: false + name: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + time: + data_type: timestamp with time zone + default: null + is_nullable: false + type: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + user_id: + data_type: integer + default: null + is_nullable: false + uuid: + data_type: character varying + default: null + is_nullable: false + max_length: 36 + db_dbgroup_dbnodes: + dbgroup_id: + data_type: integer + default: null + is_nullable: false + dbnode_id: + data_type: integer + default: null + is_nullable: false + id: + data_type: integer + default: nextval('db_dbgroup_dbnodes_id_seq'::regclass) + is_nullable: false + db_dblink: + id: + data_type: integer + default: nextval('db_dblink_id_seq'::regclass) + is_nullable: false + input_id: + data_type: integer + default: null + is_nullable: false + label: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + output_id: + data_type: integer + default: null + is_nullable: false + type: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + db_dblock: + creation: + data_type: timestamp with time zone + default: null + is_nullable: false + key: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + owner: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + timeout: + data_type: integer + default: null + is_nullable: false + db_dblog: + id: + data_type: integer + default: nextval('db_dblog_id_seq'::regclass) + is_nullable: false + levelname: + data_type: character varying + default: null + is_nullable: false + max_length: 50 + loggername: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + message: + data_type: text + default: null + is_nullable: false + metadata: + data_type: text + default: null + is_nullable: false + objname: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + objpk: + data_type: integer + default: null + is_nullable: true + time: + data_type: timestamp with time zone + default: null + is_nullable: false + db_dbnode: + ctime: + data_type: timestamp with time zone + default: null + is_nullable: false + dbcomputer_id: + data_type: integer + default: null + is_nullable: true + description: + data_type: text + default: null + is_nullable: false + id: + data_type: integer + default: nextval('db_dbnode_id_seq'::regclass) + is_nullable: false + label: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + mtime: + data_type: timestamp with time zone + default: null + is_nullable: false + nodeversion: + data_type: integer + default: null + is_nullable: false + public: + data_type: boolean + default: null + is_nullable: false + type: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + user_id: + data_type: integer + default: null + is_nullable: false + uuid: + data_type: character varying + default: null + is_nullable: false + max_length: 36 + db_dbsetting: + bval: + data_type: boolean + default: null + is_nullable: true + datatype: + data_type: character varying + default: null + is_nullable: false + max_length: 10 + description: + data_type: text + default: null + is_nullable: false + dval: + data_type: timestamp with time zone + default: null + is_nullable: true + fval: + data_type: double precision + default: null + is_nullable: true + id: + data_type: integer + default: nextval('db_dbsetting_id_seq'::regclass) + is_nullable: false + ival: + data_type: integer + default: null + is_nullable: true + key: + data_type: character varying + default: null + is_nullable: false + max_length: 1024 + time: + data_type: timestamp with time zone + default: null + is_nullable: false + tval: + data_type: text + default: null + is_nullable: false + db_dbuser: + date_joined: + data_type: timestamp with time zone + default: null + is_nullable: false + email: + data_type: character varying + default: null + is_nullable: false + max_length: 75 + first_name: + data_type: character varying + default: null + is_nullable: false + max_length: 254 + id: + data_type: integer + default: nextval('db_dbuser_id_seq'::regclass) + is_nullable: false + institution: + data_type: character varying + default: null + is_nullable: false + max_length: 254 + is_active: + data_type: boolean + default: null + is_nullable: false + is_staff: + data_type: boolean + default: null + is_nullable: false + is_superuser: + data_type: boolean + default: null + is_nullable: false + last_login: + data_type: timestamp with time zone + default: null + is_nullable: false + last_name: + data_type: character varying + default: null + is_nullable: false + max_length: 254 + password: + data_type: character varying + default: null + is_nullable: false + max_length: 128 + db_dbuser_groups: + dbuser_id: + data_type: integer + default: null + is_nullable: false + group_id: + data_type: integer + default: null + is_nullable: false + id: + data_type: integer + default: nextval('db_dbuser_groups_id_seq'::regclass) + is_nullable: false + db_dbuser_user_permissions: + dbuser_id: + data_type: integer + default: null + is_nullable: false + id: + data_type: integer + default: nextval('db_dbuser_user_permissions_id_seq'::regclass) + is_nullable: false + permission_id: + data_type: integer + default: null + is_nullable: false + db_dbworkflow: + ctime: + data_type: timestamp with time zone + default: null + is_nullable: false + description: + data_type: text + default: null + is_nullable: false + id: + data_type: integer + default: nextval('db_dbworkflow_id_seq'::regclass) + is_nullable: false + label: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + lastsyncedversion: + data_type: integer + default: null + is_nullable: false + module: + data_type: text + default: null + is_nullable: false + module_class: + data_type: text + default: null + is_nullable: false + mtime: + data_type: timestamp with time zone + default: null + is_nullable: false + nodeversion: + data_type: integer + default: null + is_nullable: false + report: + data_type: text + default: null + is_nullable: false + script_md5: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + script_path: + data_type: text + default: null + is_nullable: false + state: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + user_id: + data_type: integer + default: null + is_nullable: false + uuid: + data_type: character varying + default: null + is_nullable: false + max_length: 36 + db_dbworkflowdata: + aiida_obj_id: + data_type: integer + default: null + is_nullable: true + data_type: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + id: + data_type: integer + default: nextval('db_dbworkflowdata_id_seq'::regclass) + is_nullable: false + json_value: + data_type: text + default: null + is_nullable: false + name: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + parent_id: + data_type: integer + default: null + is_nullable: false + time: + data_type: timestamp with time zone + default: null + is_nullable: false + value_type: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + db_dbworkflowstep: + id: + data_type: integer + default: nextval('db_dbworkflowstep_id_seq'::regclass) + is_nullable: false + name: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + nextcall: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + parent_id: + data_type: integer + default: null + is_nullable: false + state: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + time: + data_type: timestamp with time zone + default: null + is_nullable: false + user_id: + data_type: integer + default: null + is_nullable: false + db_dbworkflowstep_calculations: + dbnode_id: + data_type: integer + default: null + is_nullable: false + dbworkflowstep_id: + data_type: integer + default: null + is_nullable: false + id: + data_type: integer + default: nextval('db_dbworkflowstep_calculations_id_seq'::regclass) + is_nullable: false + db_dbworkflowstep_sub_workflows: + dbworkflow_id: + data_type: integer + default: null + is_nullable: false + dbworkflowstep_id: + data_type: integer + default: null + is_nullable: false + id: + data_type: integer + default: nextval('db_dbworkflowstep_sub_workflows_id_seq'::regclass) + is_nullable: false +constraints: + primary_key: + db_dbattribute: + db_dbattribute_pkey: + - id + db_dbauthinfo: + db_dbauthinfo_pkey: + - id + db_dbcalcstate: + db_dbcalcstate_pkey: + - id + db_dbcomment: + db_dbcomment_pkey: + - id + db_dbcomputer: + db_dbcomputer_pkey: + - id + db_dbextra: + db_dbextra_pkey: + - id + db_dbgroup: + db_dbgroup_pkey: + - id + db_dbgroup_dbnodes: + db_dbgroup_dbnodes_pkey: + - id + db_dblink: + db_dblink_pkey: + - id + db_dblock: + db_dblock_pkey: + - key + db_dblog: + db_dblog_pkey: + - id + db_dbnode: + db_dbnode_pkey: + - id + db_dbsetting: + db_dbsetting_pkey: + - id + db_dbuser: + db_dbuser_pkey: + - id + db_dbuser_groups: + db_dbuser_groups_pkey: + - id + db_dbuser_user_permissions: + db_dbuser_user_permissions_pkey: + - id + db_dbworkflow: + db_dbworkflow_pkey: + - id + db_dbworkflowdata: + db_dbworkflowdata_pkey: + - id + db_dbworkflowstep: + db_dbworkflowstep_pkey: + - id + db_dbworkflowstep_calculations: + db_dbworkflowstep_calculations_pkey: + - id + db_dbworkflowstep_sub_workflows: + db_dbworkflowstep_sub_workflows_pkey: + - id + unique: + db_dbattribute: + db_dbattribute_dbnode_id_key_c589e447_uniq: + - dbnode_id + - key + db_dbauthinfo: + db_dbauthinfo_aiidauser_id_dbcomputer_id_777cdaa8_uniq: + - aiidauser_id + - dbcomputer_id + db_dbcalcstate: + db_dbcalcstate_dbnode_id_state_b4a14db3_uniq: + - dbnode_id + - state + db_dbcomputer: + db_dbcomputer_name_key: + - name + db_dbextra: + db_dbextra_dbnode_id_key_aa56fd37_uniq: + - dbnode_id + - key + db_dbgroup: + db_dbgroup_name_type_12656f33_uniq: + - name + - type + db_dbgroup_dbnodes: + db_dbgroup_dbnodes_dbgroup_id_dbnode_id_eee23cce_uniq: + - dbgroup_id + - dbnode_id + db_dbsetting: + db_dbsetting_key_1b84beb4_uniq: + - key + db_dbuser: + db_dbuser_email_key: + - email + db_dbuser_groups: + db_dbuser_groups_dbuser_id_group_id_9155eb4f_uniq: + - dbuser_id + - group_id + db_dbuser_user_permissions: + db_dbuser_user_permissio_dbuser_id_permission_id_e6cbabe4_uniq: + - dbuser_id + - permission_id + db_dbworkflowdata: + db_dbworkflowdata_parent_id_name_data_type_a4b50dae_uniq: + - data_type + - name + - parent_id + db_dbworkflowstep: + db_dbworkflowstep_parent_id_name_111027e3_uniq: + - name + - parent_id + db_dbworkflowstep_calculations: + db_dbworkflowstep_calcul_dbworkflowstep_id_dbnode_60f50d02_uniq: + - dbnode_id + - dbworkflowstep_id + db_dbworkflowstep_sub_workflows: + db_dbworkflowstep_sub_wo_dbworkflowstep_id_dbwork_e9b2b624_uniq: + - dbworkflow_id + - dbworkflowstep_id +foreign_keys: + db_dbattribute: + db_dbattribute_dbnode_id_253bf153_fk_db_dbnode_id: FOREIGN KEY (dbnode_id) REFERENCES + db_dbnode(id) DEFERRABLE INITIALLY DEFERRED + db_dbauthinfo: + db_dbauthinfo_aiidauser_id_0684fdfb_fk_db_dbuser_id: FOREIGN KEY (aiidauser_id) + REFERENCES db_dbuser(id) DEFERRABLE INITIALLY DEFERRED + db_dbauthinfo_dbcomputer_id_424f7ac4_fk_db_dbcomputer_id: FOREIGN KEY (dbcomputer_id) + REFERENCES db_dbcomputer(id) DEFERRABLE INITIALLY DEFERRED + db_dbcalcstate: + db_dbcalcstate_dbnode_id_f217a84c_fk_db_dbnode_id: FOREIGN KEY (dbnode_id) REFERENCES + db_dbnode(id) DEFERRABLE INITIALLY DEFERRED + db_dbcomment: + db_dbcomment_dbnode_id_3b812b6b_fk_db_dbnode_id: FOREIGN KEY (dbnode_id) REFERENCES + db_dbnode(id) DEFERRABLE INITIALLY DEFERRED + db_dbcomment_user_id_8ed5e360_fk_db_dbuser_id: FOREIGN KEY (user_id) REFERENCES + db_dbuser(id) DEFERRABLE INITIALLY DEFERRED + db_dbextra: + db_dbextra_dbnode_id_c7fe8961_fk_db_dbnode_id: FOREIGN KEY (dbnode_id) REFERENCES + db_dbnode(id) DEFERRABLE INITIALLY DEFERRED + db_dbgroup: + db_dbgroup_user_id_100f8a51_fk_db_dbuser_id: FOREIGN KEY (user_id) REFERENCES + db_dbuser(id) DEFERRABLE INITIALLY DEFERRED + db_dbgroup_dbnodes: + db_dbgroup_dbnodes_dbgroup_id_9d3a0f9d_fk_db_dbgroup_id: FOREIGN KEY (dbgroup_id) + REFERENCES db_dbgroup(id) DEFERRABLE INITIALLY DEFERRED + db_dbgroup_dbnodes_dbnode_id_118b9439_fk_db_dbnode_id: FOREIGN KEY (dbnode_id) + REFERENCES db_dbnode(id) DEFERRABLE INITIALLY DEFERRED + db_dblink: + db_dblink_input_id_9245bd73_fk_db_dbnode_id: FOREIGN KEY (input_id) REFERENCES + db_dbnode(id) DEFERRABLE INITIALLY DEFERRED + db_dblink_output_id_c0167528_fk_db_dbnode_id: FOREIGN KEY (output_id) REFERENCES + db_dbnode(id) DEFERRABLE INITIALLY DEFERRED + db_dbnode: + db_dbnode_dbcomputer_id_315372a3_fk_db_dbcomputer_id: FOREIGN KEY (dbcomputer_id) + REFERENCES db_dbcomputer(id) DEFERRABLE INITIALLY DEFERRED + db_dbnode_user_id_12e7aeaf_fk_db_dbuser_id: FOREIGN KEY (user_id) REFERENCES db_dbuser(id) + DEFERRABLE INITIALLY DEFERRED + db_dbuser_groups: + db_dbuser_groups_dbuser_id_480b3520_fk_db_dbuser_id: FOREIGN KEY (dbuser_id) REFERENCES + db_dbuser(id) DEFERRABLE INITIALLY DEFERRED + db_dbuser_groups_group_id_8478d87e_fk_auth_group_id: FOREIGN KEY (group_id) REFERENCES + auth_group(id) DEFERRABLE INITIALLY DEFERRED + db_dbuser_user_permissions: + db_dbuser_user_permi_permission_id_c5aafc54_fk_auth_perm: FOREIGN KEY (permission_id) + REFERENCES auth_permission(id) DEFERRABLE INITIALLY DEFERRED + db_dbuser_user_permissions_dbuser_id_364456ee_fk_db_dbuser_id: FOREIGN KEY (dbuser_id) + REFERENCES db_dbuser(id) DEFERRABLE INITIALLY DEFERRED + db_dbworkflow: + db_dbworkflow_user_id_ef1f3251_fk_db_dbuser_id: FOREIGN KEY (user_id) REFERENCES + db_dbuser(id) DEFERRABLE INITIALLY DEFERRED + db_dbworkflowdata: + db_dbworkflowdata_aiida_obj_id_70a2d33b_fk_db_dbnode_id: FOREIGN KEY (aiida_obj_id) + REFERENCES db_dbnode(id) DEFERRABLE INITIALLY DEFERRED + db_dbworkflowdata_parent_id_ff4dbf8d_fk_db_dbworkflow_id: FOREIGN KEY (parent_id) + REFERENCES db_dbworkflow(id) DEFERRABLE INITIALLY DEFERRED + db_dbworkflowstep: + db_dbworkflowstep_parent_id_ffb754d9_fk_db_dbworkflow_id: FOREIGN KEY (parent_id) + REFERENCES db_dbworkflow(id) DEFERRABLE INITIALLY DEFERRED + db_dbworkflowstep_user_id_04282431_fk_db_dbuser_id: FOREIGN KEY (user_id) REFERENCES + db_dbuser(id) DEFERRABLE INITIALLY DEFERRED + db_dbworkflowstep_calculations: + db_dbworkflowstep_ca_dbnode_id_0d07b7a7_fk_db_dbnode: FOREIGN KEY (dbnode_id) + REFERENCES db_dbnode(id) DEFERRABLE INITIALLY DEFERRED + db_dbworkflowstep_ca_dbworkflowstep_id_575c3637_fk_db_dbwork: FOREIGN KEY (dbworkflowstep_id) + REFERENCES db_dbworkflowstep(id) DEFERRABLE INITIALLY DEFERRED + db_dbworkflowstep_sub_workflows: + db_dbworkflowstep_su_dbworkflow_id_dca4d103_fk_db_dbwork: FOREIGN KEY (dbworkflow_id) + REFERENCES db_dbworkflow(id) DEFERRABLE INITIALLY DEFERRED + db_dbworkflowstep_su_dbworkflowstep_id_e183bbb7_fk_db_dbwork: FOREIGN KEY (dbworkflowstep_id) + REFERENCES db_dbworkflowstep(id) DEFERRABLE INITIALLY DEFERRED +indexes: + db_dbattribute: + db_dbattribute_datatype_91c4dc04: CREATE INDEX db_dbattribute_datatype_91c4dc04 + ON public.db_dbattribute USING btree (datatype) + db_dbattribute_datatype_91c4dc04_like: CREATE INDEX db_dbattribute_datatype_91c4dc04_like + ON public.db_dbattribute USING btree (datatype varchar_pattern_ops) + db_dbattribute_dbnode_id_253bf153: CREATE INDEX db_dbattribute_dbnode_id_253bf153 + ON public.db_dbattribute USING btree (dbnode_id) + db_dbattribute_dbnode_id_key_c589e447_uniq: CREATE UNIQUE INDEX db_dbattribute_dbnode_id_key_c589e447_uniq + ON public.db_dbattribute USING btree (dbnode_id, key) + db_dbattribute_key_ac2bc4e4: CREATE INDEX db_dbattribute_key_ac2bc4e4 ON public.db_dbattribute + USING btree (key) + db_dbattribute_key_ac2bc4e4_like: CREATE INDEX db_dbattribute_key_ac2bc4e4_like + ON public.db_dbattribute USING btree (key varchar_pattern_ops) + db_dbattribute_pkey: CREATE UNIQUE INDEX db_dbattribute_pkey ON public.db_dbattribute + USING btree (id) + tval_idx_for_daemon: CREATE INDEX tval_idx_for_daemon ON public.db_dbattribute + USING btree (tval) WHERE (tval = ANY (ARRAY['COMPUTED'::text, 'WITHSCHEDULER'::text, + 'TOSUBMIT'::text])) + db_dbauthinfo: + db_dbauthinfo_aiidauser_id_0684fdfb: CREATE INDEX db_dbauthinfo_aiidauser_id_0684fdfb + ON public.db_dbauthinfo USING btree (aiidauser_id) + db_dbauthinfo_aiidauser_id_dbcomputer_id_777cdaa8_uniq: CREATE UNIQUE INDEX db_dbauthinfo_aiidauser_id_dbcomputer_id_777cdaa8_uniq + ON public.db_dbauthinfo USING btree (aiidauser_id, dbcomputer_id) + db_dbauthinfo_dbcomputer_id_424f7ac4: CREATE INDEX db_dbauthinfo_dbcomputer_id_424f7ac4 + ON public.db_dbauthinfo USING btree (dbcomputer_id) + db_dbauthinfo_pkey: CREATE UNIQUE INDEX db_dbauthinfo_pkey ON public.db_dbauthinfo + USING btree (id) + db_dbcalcstate: + db_dbcalcstate_dbnode_id_f217a84c: CREATE INDEX db_dbcalcstate_dbnode_id_f217a84c + ON public.db_dbcalcstate USING btree (dbnode_id) + db_dbcalcstate_dbnode_id_state_b4a14db3_uniq: CREATE UNIQUE INDEX db_dbcalcstate_dbnode_id_state_b4a14db3_uniq + ON public.db_dbcalcstate USING btree (dbnode_id, state) + db_dbcalcstate_pkey: CREATE UNIQUE INDEX db_dbcalcstate_pkey ON public.db_dbcalcstate + USING btree (id) + db_dbcalcstate_state_0bf54584: CREATE INDEX db_dbcalcstate_state_0bf54584 ON public.db_dbcalcstate + USING btree (state) + db_dbcalcstate_state_0bf54584_like: CREATE INDEX db_dbcalcstate_state_0bf54584_like + ON public.db_dbcalcstate USING btree (state varchar_pattern_ops) + db_dbcomment: + db_dbcomment_dbnode_id_3b812b6b: CREATE INDEX db_dbcomment_dbnode_id_3b812b6b + ON public.db_dbcomment USING btree (dbnode_id) + db_dbcomment_pkey: CREATE UNIQUE INDEX db_dbcomment_pkey ON public.db_dbcomment + USING btree (id) + db_dbcomment_user_id_8ed5e360: CREATE INDEX db_dbcomment_user_id_8ed5e360 ON public.db_dbcomment + USING btree (user_id) + db_dbcomputer: + db_dbcomputer_name_f1800b1a_like: CREATE INDEX db_dbcomputer_name_f1800b1a_like + ON public.db_dbcomputer USING btree (name varchar_pattern_ops) + db_dbcomputer_name_key: CREATE UNIQUE INDEX db_dbcomputer_name_key ON public.db_dbcomputer + USING btree (name) + db_dbcomputer_pkey: CREATE UNIQUE INDEX db_dbcomputer_pkey ON public.db_dbcomputer + USING btree (id) + db_dbextra: + db_dbextra_datatype_2eba38c6: CREATE INDEX db_dbextra_datatype_2eba38c6 ON public.db_dbextra + USING btree (datatype) + db_dbextra_datatype_2eba38c6_like: CREATE INDEX db_dbextra_datatype_2eba38c6_like + ON public.db_dbextra USING btree (datatype varchar_pattern_ops) + db_dbextra_dbnode_id_c7fe8961: CREATE INDEX db_dbextra_dbnode_id_c7fe8961 ON public.db_dbextra + USING btree (dbnode_id) + db_dbextra_dbnode_id_key_aa56fd37_uniq: CREATE UNIQUE INDEX db_dbextra_dbnode_id_key_aa56fd37_uniq + ON public.db_dbextra USING btree (dbnode_id, key) + db_dbextra_key_b1a8abc6: CREATE INDEX db_dbextra_key_b1a8abc6 ON public.db_dbextra + USING btree (key) + db_dbextra_key_b1a8abc6_like: CREATE INDEX db_dbextra_key_b1a8abc6_like ON public.db_dbextra + USING btree (key varchar_pattern_ops) + db_dbextra_pkey: CREATE UNIQUE INDEX db_dbextra_pkey ON public.db_dbextra USING + btree (id) + db_dbgroup: + db_dbgroup_name_66c75272: CREATE INDEX db_dbgroup_name_66c75272 ON public.db_dbgroup + USING btree (name) + db_dbgroup_name_66c75272_like: CREATE INDEX db_dbgroup_name_66c75272_like ON public.db_dbgroup + USING btree (name varchar_pattern_ops) + db_dbgroup_name_type_12656f33_uniq: CREATE UNIQUE INDEX db_dbgroup_name_type_12656f33_uniq + ON public.db_dbgroup USING btree (name, type) + db_dbgroup_pkey: CREATE UNIQUE INDEX db_dbgroup_pkey ON public.db_dbgroup USING + btree (id) + db_dbgroup_type_23b2a748: CREATE INDEX db_dbgroup_type_23b2a748 ON public.db_dbgroup + USING btree (type) + db_dbgroup_type_23b2a748_like: CREATE INDEX db_dbgroup_type_23b2a748_like ON public.db_dbgroup + USING btree (type varchar_pattern_ops) + db_dbgroup_user_id_100f8a51: CREATE INDEX db_dbgroup_user_id_100f8a51 ON public.db_dbgroup + USING btree (user_id) + db_dbgroup_dbnodes: + db_dbgroup_dbnodes_dbgroup_id_9d3a0f9d: CREATE INDEX db_dbgroup_dbnodes_dbgroup_id_9d3a0f9d + ON public.db_dbgroup_dbnodes USING btree (dbgroup_id) + db_dbgroup_dbnodes_dbgroup_id_dbnode_id_eee23cce_uniq: CREATE UNIQUE INDEX db_dbgroup_dbnodes_dbgroup_id_dbnode_id_eee23cce_uniq + ON public.db_dbgroup_dbnodes USING btree (dbgroup_id, dbnode_id) + db_dbgroup_dbnodes_dbnode_id_118b9439: CREATE INDEX db_dbgroup_dbnodes_dbnode_id_118b9439 + ON public.db_dbgroup_dbnodes USING btree (dbnode_id) + db_dbgroup_dbnodes_pkey: CREATE UNIQUE INDEX db_dbgroup_dbnodes_pkey ON public.db_dbgroup_dbnodes + USING btree (id) + db_dblink: + db_dblink_input_id_9245bd73: CREATE INDEX db_dblink_input_id_9245bd73 ON public.db_dblink + USING btree (input_id) + db_dblink_label_f1343cfb: CREATE INDEX db_dblink_label_f1343cfb ON public.db_dblink + USING btree (label) + db_dblink_label_f1343cfb_like: CREATE INDEX db_dblink_label_f1343cfb_like ON public.db_dblink + USING btree (label varchar_pattern_ops) + db_dblink_output_id_c0167528: CREATE INDEX db_dblink_output_id_c0167528 ON public.db_dblink + USING btree (output_id) + db_dblink_pkey: CREATE UNIQUE INDEX db_dblink_pkey ON public.db_dblink USING btree + (id) + db_dblink_type_229f212b: CREATE INDEX db_dblink_type_229f212b ON public.db_dblink + USING btree (type) + db_dblink_type_229f212b_like: CREATE INDEX db_dblink_type_229f212b_like ON public.db_dblink + USING btree (type varchar_pattern_ops) + db_dblock: + db_dblock_key_048c6767_like: CREATE INDEX db_dblock_key_048c6767_like ON public.db_dblock + USING btree (key varchar_pattern_ops) + db_dblock_pkey: CREATE UNIQUE INDEX db_dblock_pkey ON public.db_dblock USING btree + (key) + db_dblog: + db_dblog_levelname_ad5dc346: CREATE INDEX db_dblog_levelname_ad5dc346 ON public.db_dblog + USING btree (levelname) + db_dblog_levelname_ad5dc346_like: CREATE INDEX db_dblog_levelname_ad5dc346_like + ON public.db_dblog USING btree (levelname varchar_pattern_ops) + db_dblog_loggername_00b5ba16: CREATE INDEX db_dblog_loggername_00b5ba16 ON public.db_dblog + USING btree (loggername) + db_dblog_loggername_00b5ba16_like: CREATE INDEX db_dblog_loggername_00b5ba16_like + ON public.db_dblog USING btree (loggername varchar_pattern_ops) + db_dblog_objname_69932b1e: CREATE INDEX db_dblog_objname_69932b1e ON public.db_dblog + USING btree (objname) + db_dblog_objname_69932b1e_like: CREATE INDEX db_dblog_objname_69932b1e_like ON + public.db_dblog USING btree (objname varchar_pattern_ops) + db_dblog_objpk_fc47afa9: CREATE INDEX db_dblog_objpk_fc47afa9 ON public.db_dblog + USING btree (objpk) + db_dblog_pkey: CREATE UNIQUE INDEX db_dblog_pkey ON public.db_dblog USING btree + (id) + db_dbnode: + db_dbnode_ctime_71626ef5: CREATE INDEX db_dbnode_ctime_71626ef5 ON public.db_dbnode + USING btree (ctime) + db_dbnode_dbcomputer_id_315372a3: CREATE INDEX db_dbnode_dbcomputer_id_315372a3 + ON public.db_dbnode USING btree (dbcomputer_id) + db_dbnode_label_6469539e: CREATE INDEX db_dbnode_label_6469539e ON public.db_dbnode + USING btree (label) + db_dbnode_label_6469539e_like: CREATE INDEX db_dbnode_label_6469539e_like ON public.db_dbnode + USING btree (label varchar_pattern_ops) + db_dbnode_mtime_0554ea3d: CREATE INDEX db_dbnode_mtime_0554ea3d ON public.db_dbnode + USING btree (mtime) + db_dbnode_pkey: CREATE UNIQUE INDEX db_dbnode_pkey ON public.db_dbnode USING btree + (id) + db_dbnode_type_a8ce9753: CREATE INDEX db_dbnode_type_a8ce9753 ON public.db_dbnode + USING btree (type) + db_dbnode_type_a8ce9753_like: CREATE INDEX db_dbnode_type_a8ce9753_like ON public.db_dbnode + USING btree (type varchar_pattern_ops) + db_dbnode_user_id_12e7aeaf: CREATE INDEX db_dbnode_user_id_12e7aeaf ON public.db_dbnode + USING btree (user_id) + db_dbnode_uuid_62e0bf98: CREATE INDEX db_dbnode_uuid_62e0bf98 ON public.db_dbnode + USING btree (uuid) + db_dbnode_uuid_62e0bf98_like: CREATE INDEX db_dbnode_uuid_62e0bf98_like ON public.db_dbnode + USING btree (uuid varchar_pattern_ops) + db_dbsetting: + db_dbsetting_datatype_49f4397c: CREATE INDEX db_dbsetting_datatype_49f4397c ON + public.db_dbsetting USING btree (datatype) + db_dbsetting_datatype_49f4397c_like: CREATE INDEX db_dbsetting_datatype_49f4397c_like + ON public.db_dbsetting USING btree (datatype varchar_pattern_ops) + db_dbsetting_key_1b84beb4: CREATE INDEX db_dbsetting_key_1b84beb4 ON public.db_dbsetting + USING btree (key) + db_dbsetting_key_1b84beb4_like: CREATE INDEX db_dbsetting_key_1b84beb4_like ON + public.db_dbsetting USING btree (key varchar_pattern_ops) + db_dbsetting_key_1b84beb4_uniq: CREATE UNIQUE INDEX db_dbsetting_key_1b84beb4_uniq + ON public.db_dbsetting USING btree (key) + db_dbsetting_pkey: CREATE UNIQUE INDEX db_dbsetting_pkey ON public.db_dbsetting + USING btree (id) + db_dbuser: + db_dbuser_email_30150b7e_like: CREATE INDEX db_dbuser_email_30150b7e_like ON public.db_dbuser + USING btree (email varchar_pattern_ops) + db_dbuser_email_key: CREATE UNIQUE INDEX db_dbuser_email_key ON public.db_dbuser + USING btree (email) + db_dbuser_pkey: CREATE UNIQUE INDEX db_dbuser_pkey ON public.db_dbuser USING btree + (id) + db_dbuser_groups: + db_dbuser_groups_dbuser_id_480b3520: CREATE INDEX db_dbuser_groups_dbuser_id_480b3520 + ON public.db_dbuser_groups USING btree (dbuser_id) + db_dbuser_groups_dbuser_id_group_id_9155eb4f_uniq: CREATE UNIQUE INDEX db_dbuser_groups_dbuser_id_group_id_9155eb4f_uniq + ON public.db_dbuser_groups USING btree (dbuser_id, group_id) + db_dbuser_groups_group_id_8478d87e: CREATE INDEX db_dbuser_groups_group_id_8478d87e + ON public.db_dbuser_groups USING btree (group_id) + db_dbuser_groups_pkey: CREATE UNIQUE INDEX db_dbuser_groups_pkey ON public.db_dbuser_groups + USING btree (id) + db_dbuser_user_permissions: + db_dbuser_user_permissio_dbuser_id_permission_id_e6cbabe4_uniq: CREATE UNIQUE + INDEX db_dbuser_user_permissio_dbuser_id_permission_id_e6cbabe4_uniq ON public.db_dbuser_user_permissions + USING btree (dbuser_id, permission_id) + db_dbuser_user_permissions_dbuser_id_364456ee: CREATE INDEX db_dbuser_user_permissions_dbuser_id_364456ee + ON public.db_dbuser_user_permissions USING btree (dbuser_id) + db_dbuser_user_permissions_permission_id_c5aafc54: CREATE INDEX db_dbuser_user_permissions_permission_id_c5aafc54 + ON public.db_dbuser_user_permissions USING btree (permission_id) + db_dbuser_user_permissions_pkey: CREATE UNIQUE INDEX db_dbuser_user_permissions_pkey + ON public.db_dbuser_user_permissions USING btree (id) + db_dbworkflow: + db_dbworkflow_label_7368f34a: CREATE INDEX db_dbworkflow_label_7368f34a ON public.db_dbworkflow + USING btree (label) + db_dbworkflow_label_7368f34a_like: CREATE INDEX db_dbworkflow_label_7368f34a_like + ON public.db_dbworkflow USING btree (label varchar_pattern_ops) + db_dbworkflow_pkey: CREATE UNIQUE INDEX db_dbworkflow_pkey ON public.db_dbworkflow + USING btree (id) + db_dbworkflow_user_id_ef1f3251: CREATE INDEX db_dbworkflow_user_id_ef1f3251 ON + public.db_dbworkflow USING btree (user_id) + db_dbworkflowdata: + db_dbworkflowdata_aiida_obj_id_70a2d33b: CREATE INDEX db_dbworkflowdata_aiida_obj_id_70a2d33b + ON public.db_dbworkflowdata USING btree (aiida_obj_id) + db_dbworkflowdata_parent_id_ff4dbf8d: CREATE INDEX db_dbworkflowdata_parent_id_ff4dbf8d + ON public.db_dbworkflowdata USING btree (parent_id) + db_dbworkflowdata_parent_id_name_data_type_a4b50dae_uniq: CREATE UNIQUE INDEX + db_dbworkflowdata_parent_id_name_data_type_a4b50dae_uniq ON public.db_dbworkflowdata + USING btree (parent_id, name, data_type) + db_dbworkflowdata_pkey: CREATE UNIQUE INDEX db_dbworkflowdata_pkey ON public.db_dbworkflowdata + USING btree (id) + db_dbworkflowstep: + db_dbworkflowstep_parent_id_ffb754d9: CREATE INDEX db_dbworkflowstep_parent_id_ffb754d9 + ON public.db_dbworkflowstep USING btree (parent_id) + db_dbworkflowstep_parent_id_name_111027e3_uniq: CREATE UNIQUE INDEX db_dbworkflowstep_parent_id_name_111027e3_uniq + ON public.db_dbworkflowstep USING btree (parent_id, name) + db_dbworkflowstep_pkey: CREATE UNIQUE INDEX db_dbworkflowstep_pkey ON public.db_dbworkflowstep + USING btree (id) + db_dbworkflowstep_user_id_04282431: CREATE INDEX db_dbworkflowstep_user_id_04282431 + ON public.db_dbworkflowstep USING btree (user_id) + db_dbworkflowstep_calculations: + db_dbworkflowstep_calcul_dbworkflowstep_id_dbnode_60f50d02_uniq: CREATE UNIQUE + INDEX db_dbworkflowstep_calcul_dbworkflowstep_id_dbnode_60f50d02_uniq ON public.db_dbworkflowstep_calculations + USING btree (dbworkflowstep_id, dbnode_id) + db_dbworkflowstep_calculations_dbnode_id_0d07b7a7: CREATE INDEX db_dbworkflowstep_calculations_dbnode_id_0d07b7a7 + ON public.db_dbworkflowstep_calculations USING btree (dbnode_id) + db_dbworkflowstep_calculations_dbworkflowstep_id_575c3637: CREATE INDEX db_dbworkflowstep_calculations_dbworkflowstep_id_575c3637 + ON public.db_dbworkflowstep_calculations USING btree (dbworkflowstep_id) + db_dbworkflowstep_calculations_pkey: CREATE UNIQUE INDEX db_dbworkflowstep_calculations_pkey + ON public.db_dbworkflowstep_calculations USING btree (id) + db_dbworkflowstep_sub_workflows: + db_dbworkflowstep_sub_wo_dbworkflowstep_id_dbwork_e9b2b624_uniq: CREATE UNIQUE + INDEX db_dbworkflowstep_sub_wo_dbworkflowstep_id_dbwork_e9b2b624_uniq ON public.db_dbworkflowstep_sub_workflows + USING btree (dbworkflowstep_id, dbworkflow_id) + db_dbworkflowstep_sub_workflows_dbworkflow_id_dca4d103: CREATE INDEX db_dbworkflowstep_sub_workflows_dbworkflow_id_dca4d103 + ON public.db_dbworkflowstep_sub_workflows USING btree (dbworkflow_id) + db_dbworkflowstep_sub_workflows_dbworkflowstep_id_e183bbb7: CREATE INDEX db_dbworkflowstep_sub_workflows_dbworkflowstep_id_e183bbb7 + ON public.db_dbworkflowstep_sub_workflows USING btree (dbworkflowstep_id) + db_dbworkflowstep_sub_workflows_pkey: CREATE UNIQUE INDEX db_dbworkflowstep_sub_workflows_pkey + ON public.db_dbworkflowstep_sub_workflows USING btree (id) diff --git a/tests/backends/aiida_sqlalchemy/migrations/test_all_schema/test_django_django_0007_.yml b/tests/backends/aiida_sqlalchemy/migrations/test_all_schema/test_django_django_0007_.yml new file mode 100644 index 0000000000..597f85a532 --- /dev/null +++ b/tests/backends/aiida_sqlalchemy/migrations/test_all_schema/test_django_django_0007_.yml @@ -0,0 +1,1081 @@ +columns: + db_dbattribute: + bval: + data_type: boolean + default: null + is_nullable: true + datatype: + data_type: character varying + default: null + is_nullable: false + max_length: 10 + dbnode_id: + data_type: integer + default: null + is_nullable: false + dval: + data_type: timestamp with time zone + default: null + is_nullable: true + fval: + data_type: double precision + default: null + is_nullable: true + id: + data_type: integer + default: nextval('db_dbattribute_id_seq'::regclass) + is_nullable: false + ival: + data_type: integer + default: null + is_nullable: true + key: + data_type: character varying + default: null + is_nullable: false + max_length: 1024 + tval: + data_type: text + default: null + is_nullable: false + db_dbauthinfo: + aiidauser_id: + data_type: integer + default: null + is_nullable: false + auth_params: + data_type: text + default: null + is_nullable: false + dbcomputer_id: + data_type: integer + default: null + is_nullable: false + enabled: + data_type: boolean + default: null + is_nullable: false + id: + data_type: integer + default: nextval('db_dbauthinfo_id_seq'::regclass) + is_nullable: false + metadata: + data_type: text + default: null + is_nullable: false + db_dbcalcstate: + dbnode_id: + data_type: integer + default: null + is_nullable: false + id: + data_type: integer + default: nextval('db_dbcalcstate_id_seq'::regclass) + is_nullable: false + state: + data_type: character varying + default: null + is_nullable: false + max_length: 25 + time: + data_type: timestamp with time zone + default: null + is_nullable: false + db_dbcomment: + content: + data_type: text + default: null + is_nullable: false + ctime: + data_type: timestamp with time zone + default: null + is_nullable: false + dbnode_id: + data_type: integer + default: null + is_nullable: false + id: + data_type: integer + default: nextval('db_dbcomment_id_seq'::regclass) + is_nullable: false + mtime: + data_type: timestamp with time zone + default: null + is_nullable: false + user_id: + data_type: integer + default: null + is_nullable: false + uuid: + data_type: character varying + default: null + is_nullable: false + max_length: 36 + db_dbcomputer: + description: + data_type: text + default: null + is_nullable: false + enabled: + data_type: boolean + default: null + is_nullable: false + hostname: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + id: + data_type: integer + default: nextval('db_dbcomputer_id_seq'::regclass) + is_nullable: false + metadata: + data_type: text + default: null + is_nullable: false + name: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + scheduler_type: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + transport_params: + data_type: text + default: null + is_nullable: false + transport_type: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + uuid: + data_type: character varying + default: null + is_nullable: false + max_length: 36 + db_dbextra: + bval: + data_type: boolean + default: null + is_nullable: true + datatype: + data_type: character varying + default: null + is_nullable: false + max_length: 10 + dbnode_id: + data_type: integer + default: null + is_nullable: false + dval: + data_type: timestamp with time zone + default: null + is_nullable: true + fval: + data_type: double precision + default: null + is_nullable: true + id: + data_type: integer + default: nextval('db_dbextra_id_seq'::regclass) + is_nullable: false + ival: + data_type: integer + default: null + is_nullable: true + key: + data_type: character varying + default: null + is_nullable: false + max_length: 1024 + tval: + data_type: text + default: null + is_nullable: false + db_dbgroup: + description: + data_type: text + default: null + is_nullable: false + id: + data_type: integer + default: nextval('db_dbgroup_id_seq'::regclass) + is_nullable: false + name: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + time: + data_type: timestamp with time zone + default: null + is_nullable: false + type: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + user_id: + data_type: integer + default: null + is_nullable: false + uuid: + data_type: character varying + default: null + is_nullable: false + max_length: 36 + db_dbgroup_dbnodes: + dbgroup_id: + data_type: integer + default: null + is_nullable: false + dbnode_id: + data_type: integer + default: null + is_nullable: false + id: + data_type: integer + default: nextval('db_dbgroup_dbnodes_id_seq'::regclass) + is_nullable: false + db_dblink: + id: + data_type: integer + default: nextval('db_dblink_id_seq'::regclass) + is_nullable: false + input_id: + data_type: integer + default: null + is_nullable: false + label: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + output_id: + data_type: integer + default: null + is_nullable: false + type: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + db_dblock: + creation: + data_type: timestamp with time zone + default: null + is_nullable: false + key: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + owner: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + timeout: + data_type: integer + default: null + is_nullable: false + db_dblog: + id: + data_type: integer + default: nextval('db_dblog_id_seq'::regclass) + is_nullable: false + levelname: + data_type: character varying + default: null + is_nullable: false + max_length: 50 + loggername: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + message: + data_type: text + default: null + is_nullable: false + metadata: + data_type: text + default: null + is_nullable: false + objname: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + objpk: + data_type: integer + default: null + is_nullable: true + time: + data_type: timestamp with time zone + default: null + is_nullable: false + db_dbnode: + ctime: + data_type: timestamp with time zone + default: null + is_nullable: false + dbcomputer_id: + data_type: integer + default: null + is_nullable: true + description: + data_type: text + default: null + is_nullable: false + id: + data_type: integer + default: nextval('db_dbnode_id_seq'::regclass) + is_nullable: false + label: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + mtime: + data_type: timestamp with time zone + default: null + is_nullable: false + nodeversion: + data_type: integer + default: null + is_nullable: false + public: + data_type: boolean + default: null + is_nullable: false + type: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + user_id: + data_type: integer + default: null + is_nullable: false + uuid: + data_type: character varying + default: null + is_nullable: false + max_length: 36 + db_dbsetting: + bval: + data_type: boolean + default: null + is_nullable: true + datatype: + data_type: character varying + default: null + is_nullable: false + max_length: 10 + description: + data_type: text + default: null + is_nullable: false + dval: + data_type: timestamp with time zone + default: null + is_nullable: true + fval: + data_type: double precision + default: null + is_nullable: true + id: + data_type: integer + default: nextval('db_dbsetting_id_seq'::regclass) + is_nullable: false + ival: + data_type: integer + default: null + is_nullable: true + key: + data_type: character varying + default: null + is_nullable: false + max_length: 1024 + time: + data_type: timestamp with time zone + default: null + is_nullable: false + tval: + data_type: text + default: null + is_nullable: false + db_dbuser: + date_joined: + data_type: timestamp with time zone + default: null + is_nullable: false + email: + data_type: character varying + default: null + is_nullable: false + max_length: 75 + first_name: + data_type: character varying + default: null + is_nullable: false + max_length: 254 + id: + data_type: integer + default: nextval('db_dbuser_id_seq'::regclass) + is_nullable: false + institution: + data_type: character varying + default: null + is_nullable: false + max_length: 254 + is_active: + data_type: boolean + default: null + is_nullable: false + is_staff: + data_type: boolean + default: null + is_nullable: false + is_superuser: + data_type: boolean + default: null + is_nullable: false + last_login: + data_type: timestamp with time zone + default: null + is_nullable: false + last_name: + data_type: character varying + default: null + is_nullable: false + max_length: 254 + password: + data_type: character varying + default: null + is_nullable: false + max_length: 128 + db_dbuser_groups: + dbuser_id: + data_type: integer + default: null + is_nullable: false + group_id: + data_type: integer + default: null + is_nullable: false + id: + data_type: integer + default: nextval('db_dbuser_groups_id_seq'::regclass) + is_nullable: false + db_dbuser_user_permissions: + dbuser_id: + data_type: integer + default: null + is_nullable: false + id: + data_type: integer + default: nextval('db_dbuser_user_permissions_id_seq'::regclass) + is_nullable: false + permission_id: + data_type: integer + default: null + is_nullable: false + db_dbworkflow: + ctime: + data_type: timestamp with time zone + default: null + is_nullable: false + description: + data_type: text + default: null + is_nullable: false + id: + data_type: integer + default: nextval('db_dbworkflow_id_seq'::regclass) + is_nullable: false + label: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + lastsyncedversion: + data_type: integer + default: null + is_nullable: false + module: + data_type: text + default: null + is_nullable: false + module_class: + data_type: text + default: null + is_nullable: false + mtime: + data_type: timestamp with time zone + default: null + is_nullable: false + nodeversion: + data_type: integer + default: null + is_nullable: false + report: + data_type: text + default: null + is_nullable: false + script_md5: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + script_path: + data_type: text + default: null + is_nullable: false + state: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + user_id: + data_type: integer + default: null + is_nullable: false + uuid: + data_type: character varying + default: null + is_nullable: false + max_length: 36 + db_dbworkflowdata: + aiida_obj_id: + data_type: integer + default: null + is_nullable: true + data_type: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + id: + data_type: integer + default: nextval('db_dbworkflowdata_id_seq'::regclass) + is_nullable: false + json_value: + data_type: text + default: null + is_nullable: false + name: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + parent_id: + data_type: integer + default: null + is_nullable: false + time: + data_type: timestamp with time zone + default: null + is_nullable: false + value_type: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + db_dbworkflowstep: + id: + data_type: integer + default: nextval('db_dbworkflowstep_id_seq'::regclass) + is_nullable: false + name: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + nextcall: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + parent_id: + data_type: integer + default: null + is_nullable: false + state: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + time: + data_type: timestamp with time zone + default: null + is_nullable: false + user_id: + data_type: integer + default: null + is_nullable: false + db_dbworkflowstep_calculations: + dbnode_id: + data_type: integer + default: null + is_nullable: false + dbworkflowstep_id: + data_type: integer + default: null + is_nullable: false + id: + data_type: integer + default: nextval('db_dbworkflowstep_calculations_id_seq'::regclass) + is_nullable: false + db_dbworkflowstep_sub_workflows: + dbworkflow_id: + data_type: integer + default: null + is_nullable: false + dbworkflowstep_id: + data_type: integer + default: null + is_nullable: false + id: + data_type: integer + default: nextval('db_dbworkflowstep_sub_workflows_id_seq'::regclass) + is_nullable: false +constraints: + primary_key: + db_dbattribute: + db_dbattribute_pkey: + - id + db_dbauthinfo: + db_dbauthinfo_pkey: + - id + db_dbcalcstate: + db_dbcalcstate_pkey: + - id + db_dbcomment: + db_dbcomment_pkey: + - id + db_dbcomputer: + db_dbcomputer_pkey: + - id + db_dbextra: + db_dbextra_pkey: + - id + db_dbgroup: + db_dbgroup_pkey: + - id + db_dbgroup_dbnodes: + db_dbgroup_dbnodes_pkey: + - id + db_dblink: + db_dblink_pkey: + - id + db_dblock: + db_dblock_pkey: + - key + db_dblog: + db_dblog_pkey: + - id + db_dbnode: + db_dbnode_pkey: + - id + db_dbsetting: + db_dbsetting_pkey: + - id + db_dbuser: + db_dbuser_pkey: + - id + db_dbuser_groups: + db_dbuser_groups_pkey: + - id + db_dbuser_user_permissions: + db_dbuser_user_permissions_pkey: + - id + db_dbworkflow: + db_dbworkflow_pkey: + - id + db_dbworkflowdata: + db_dbworkflowdata_pkey: + - id + db_dbworkflowstep: + db_dbworkflowstep_pkey: + - id + db_dbworkflowstep_calculations: + db_dbworkflowstep_calculations_pkey: + - id + db_dbworkflowstep_sub_workflows: + db_dbworkflowstep_sub_workflows_pkey: + - id + unique: + db_dbattribute: + db_dbattribute_dbnode_id_key_c589e447_uniq: + - dbnode_id + - key + db_dbauthinfo: + db_dbauthinfo_aiidauser_id_dbcomputer_id_777cdaa8_uniq: + - aiidauser_id + - dbcomputer_id + db_dbcalcstate: + db_dbcalcstate_dbnode_id_state_b4a14db3_uniq: + - dbnode_id + - state + db_dbcomputer: + db_dbcomputer_name_key: + - name + db_dbextra: + db_dbextra_dbnode_id_key_aa56fd37_uniq: + - dbnode_id + - key + db_dbgroup: + db_dbgroup_name_type_12656f33_uniq: + - name + - type + db_dbgroup_dbnodes: + db_dbgroup_dbnodes_dbgroup_id_dbnode_id_eee23cce_uniq: + - dbgroup_id + - dbnode_id + db_dbsetting: + db_dbsetting_key_1b84beb4_uniq: + - key + db_dbuser: + db_dbuser_email_key: + - email + db_dbuser_groups: + db_dbuser_groups_dbuser_id_group_id_9155eb4f_uniq: + - dbuser_id + - group_id + db_dbuser_user_permissions: + db_dbuser_user_permissio_dbuser_id_permission_id_e6cbabe4_uniq: + - dbuser_id + - permission_id + db_dbworkflowdata: + db_dbworkflowdata_parent_id_name_data_type_a4b50dae_uniq: + - data_type + - name + - parent_id + db_dbworkflowstep: + db_dbworkflowstep_parent_id_name_111027e3_uniq: + - name + - parent_id + db_dbworkflowstep_calculations: + db_dbworkflowstep_calcul_dbworkflowstep_id_dbnode_60f50d02_uniq: + - dbnode_id + - dbworkflowstep_id + db_dbworkflowstep_sub_workflows: + db_dbworkflowstep_sub_wo_dbworkflowstep_id_dbwork_e9b2b624_uniq: + - dbworkflow_id + - dbworkflowstep_id +foreign_keys: + db_dbattribute: + db_dbattribute_dbnode_id_253bf153_fk_db_dbnode_id: FOREIGN KEY (dbnode_id) REFERENCES + db_dbnode(id) DEFERRABLE INITIALLY DEFERRED + db_dbauthinfo: + db_dbauthinfo_aiidauser_id_0684fdfb_fk_db_dbuser_id: FOREIGN KEY (aiidauser_id) + REFERENCES db_dbuser(id) DEFERRABLE INITIALLY DEFERRED + db_dbauthinfo_dbcomputer_id_424f7ac4_fk_db_dbcomputer_id: FOREIGN KEY (dbcomputer_id) + REFERENCES db_dbcomputer(id) DEFERRABLE INITIALLY DEFERRED + db_dbcalcstate: + db_dbcalcstate_dbnode_id_f217a84c_fk_db_dbnode_id: FOREIGN KEY (dbnode_id) REFERENCES + db_dbnode(id) DEFERRABLE INITIALLY DEFERRED + db_dbcomment: + db_dbcomment_dbnode_id_3b812b6b_fk_db_dbnode_id: FOREIGN KEY (dbnode_id) REFERENCES + db_dbnode(id) DEFERRABLE INITIALLY DEFERRED + db_dbcomment_user_id_8ed5e360_fk_db_dbuser_id: FOREIGN KEY (user_id) REFERENCES + db_dbuser(id) DEFERRABLE INITIALLY DEFERRED + db_dbextra: + db_dbextra_dbnode_id_c7fe8961_fk_db_dbnode_id: FOREIGN KEY (dbnode_id) REFERENCES + db_dbnode(id) DEFERRABLE INITIALLY DEFERRED + db_dbgroup: + db_dbgroup_user_id_100f8a51_fk_db_dbuser_id: FOREIGN KEY (user_id) REFERENCES + db_dbuser(id) DEFERRABLE INITIALLY DEFERRED + db_dbgroup_dbnodes: + db_dbgroup_dbnodes_dbgroup_id_9d3a0f9d_fk_db_dbgroup_id: FOREIGN KEY (dbgroup_id) + REFERENCES db_dbgroup(id) DEFERRABLE INITIALLY DEFERRED + db_dbgroup_dbnodes_dbnode_id_118b9439_fk_db_dbnode_id: FOREIGN KEY (dbnode_id) + REFERENCES db_dbnode(id) DEFERRABLE INITIALLY DEFERRED + db_dblink: + db_dblink_input_id_9245bd73_fk_db_dbnode_id: FOREIGN KEY (input_id) REFERENCES + db_dbnode(id) DEFERRABLE INITIALLY DEFERRED + db_dblink_output_id_c0167528_fk_db_dbnode_id: FOREIGN KEY (output_id) REFERENCES + db_dbnode(id) DEFERRABLE INITIALLY DEFERRED + db_dbnode: + db_dbnode_dbcomputer_id_315372a3_fk_db_dbcomputer_id: FOREIGN KEY (dbcomputer_id) + REFERENCES db_dbcomputer(id) DEFERRABLE INITIALLY DEFERRED + db_dbnode_user_id_12e7aeaf_fk_db_dbuser_id: FOREIGN KEY (user_id) REFERENCES db_dbuser(id) + DEFERRABLE INITIALLY DEFERRED + db_dbuser_groups: + db_dbuser_groups_dbuser_id_480b3520_fk_db_dbuser_id: FOREIGN KEY (dbuser_id) REFERENCES + db_dbuser(id) DEFERRABLE INITIALLY DEFERRED + db_dbuser_groups_group_id_8478d87e_fk_auth_group_id: FOREIGN KEY (group_id) REFERENCES + auth_group(id) DEFERRABLE INITIALLY DEFERRED + db_dbuser_user_permissions: + db_dbuser_user_permi_permission_id_c5aafc54_fk_auth_perm: FOREIGN KEY (permission_id) + REFERENCES auth_permission(id) DEFERRABLE INITIALLY DEFERRED + db_dbuser_user_permissions_dbuser_id_364456ee_fk_db_dbuser_id: FOREIGN KEY (dbuser_id) + REFERENCES db_dbuser(id) DEFERRABLE INITIALLY DEFERRED + db_dbworkflow: + db_dbworkflow_user_id_ef1f3251_fk_db_dbuser_id: FOREIGN KEY (user_id) REFERENCES + db_dbuser(id) DEFERRABLE INITIALLY DEFERRED + db_dbworkflowdata: + db_dbworkflowdata_aiida_obj_id_70a2d33b_fk_db_dbnode_id: FOREIGN KEY (aiida_obj_id) + REFERENCES db_dbnode(id) DEFERRABLE INITIALLY DEFERRED + db_dbworkflowdata_parent_id_ff4dbf8d_fk_db_dbworkflow_id: FOREIGN KEY (parent_id) + REFERENCES db_dbworkflow(id) DEFERRABLE INITIALLY DEFERRED + db_dbworkflowstep: + db_dbworkflowstep_parent_id_ffb754d9_fk_db_dbworkflow_id: FOREIGN KEY (parent_id) + REFERENCES db_dbworkflow(id) DEFERRABLE INITIALLY DEFERRED + db_dbworkflowstep_user_id_04282431_fk_db_dbuser_id: FOREIGN KEY (user_id) REFERENCES + db_dbuser(id) DEFERRABLE INITIALLY DEFERRED + db_dbworkflowstep_calculations: + db_dbworkflowstep_ca_dbnode_id_0d07b7a7_fk_db_dbnode: FOREIGN KEY (dbnode_id) + REFERENCES db_dbnode(id) DEFERRABLE INITIALLY DEFERRED + db_dbworkflowstep_ca_dbworkflowstep_id_575c3637_fk_db_dbwork: FOREIGN KEY (dbworkflowstep_id) + REFERENCES db_dbworkflowstep(id) DEFERRABLE INITIALLY DEFERRED + db_dbworkflowstep_sub_workflows: + db_dbworkflowstep_su_dbworkflow_id_dca4d103_fk_db_dbwork: FOREIGN KEY (dbworkflow_id) + REFERENCES db_dbworkflow(id) DEFERRABLE INITIALLY DEFERRED + db_dbworkflowstep_su_dbworkflowstep_id_e183bbb7_fk_db_dbwork: FOREIGN KEY (dbworkflowstep_id) + REFERENCES db_dbworkflowstep(id) DEFERRABLE INITIALLY DEFERRED +indexes: + db_dbattribute: + db_dbattribute_datatype_91c4dc04: CREATE INDEX db_dbattribute_datatype_91c4dc04 + ON public.db_dbattribute USING btree (datatype) + db_dbattribute_datatype_91c4dc04_like: CREATE INDEX db_dbattribute_datatype_91c4dc04_like + ON public.db_dbattribute USING btree (datatype varchar_pattern_ops) + db_dbattribute_dbnode_id_253bf153: CREATE INDEX db_dbattribute_dbnode_id_253bf153 + ON public.db_dbattribute USING btree (dbnode_id) + db_dbattribute_dbnode_id_key_c589e447_uniq: CREATE UNIQUE INDEX db_dbattribute_dbnode_id_key_c589e447_uniq + ON public.db_dbattribute USING btree (dbnode_id, key) + db_dbattribute_key_ac2bc4e4: CREATE INDEX db_dbattribute_key_ac2bc4e4 ON public.db_dbattribute + USING btree (key) + db_dbattribute_key_ac2bc4e4_like: CREATE INDEX db_dbattribute_key_ac2bc4e4_like + ON public.db_dbattribute USING btree (key varchar_pattern_ops) + db_dbattribute_pkey: CREATE UNIQUE INDEX db_dbattribute_pkey ON public.db_dbattribute + USING btree (id) + tval_idx_for_daemon: CREATE INDEX tval_idx_for_daemon ON public.db_dbattribute + USING btree (tval) WHERE (tval = ANY (ARRAY['COMPUTED'::text, 'WITHSCHEDULER'::text, + 'TOSUBMIT'::text])) + db_dbauthinfo: + db_dbauthinfo_aiidauser_id_0684fdfb: CREATE INDEX db_dbauthinfo_aiidauser_id_0684fdfb + ON public.db_dbauthinfo USING btree (aiidauser_id) + db_dbauthinfo_aiidauser_id_dbcomputer_id_777cdaa8_uniq: CREATE UNIQUE INDEX db_dbauthinfo_aiidauser_id_dbcomputer_id_777cdaa8_uniq + ON public.db_dbauthinfo USING btree (aiidauser_id, dbcomputer_id) + db_dbauthinfo_dbcomputer_id_424f7ac4: CREATE INDEX db_dbauthinfo_dbcomputer_id_424f7ac4 + ON public.db_dbauthinfo USING btree (dbcomputer_id) + db_dbauthinfo_pkey: CREATE UNIQUE INDEX db_dbauthinfo_pkey ON public.db_dbauthinfo + USING btree (id) + db_dbcalcstate: + db_dbcalcstate_dbnode_id_f217a84c: CREATE INDEX db_dbcalcstate_dbnode_id_f217a84c + ON public.db_dbcalcstate USING btree (dbnode_id) + db_dbcalcstate_dbnode_id_state_b4a14db3_uniq: CREATE UNIQUE INDEX db_dbcalcstate_dbnode_id_state_b4a14db3_uniq + ON public.db_dbcalcstate USING btree (dbnode_id, state) + db_dbcalcstate_pkey: CREATE UNIQUE INDEX db_dbcalcstate_pkey ON public.db_dbcalcstate + USING btree (id) + db_dbcalcstate_state_0bf54584: CREATE INDEX db_dbcalcstate_state_0bf54584 ON public.db_dbcalcstate + USING btree (state) + db_dbcalcstate_state_0bf54584_like: CREATE INDEX db_dbcalcstate_state_0bf54584_like + ON public.db_dbcalcstate USING btree (state varchar_pattern_ops) + db_dbcomment: + db_dbcomment_dbnode_id_3b812b6b: CREATE INDEX db_dbcomment_dbnode_id_3b812b6b + ON public.db_dbcomment USING btree (dbnode_id) + db_dbcomment_pkey: CREATE UNIQUE INDEX db_dbcomment_pkey ON public.db_dbcomment + USING btree (id) + db_dbcomment_user_id_8ed5e360: CREATE INDEX db_dbcomment_user_id_8ed5e360 ON public.db_dbcomment + USING btree (user_id) + db_dbcomputer: + db_dbcomputer_name_f1800b1a_like: CREATE INDEX db_dbcomputer_name_f1800b1a_like + ON public.db_dbcomputer USING btree (name varchar_pattern_ops) + db_dbcomputer_name_key: CREATE UNIQUE INDEX db_dbcomputer_name_key ON public.db_dbcomputer + USING btree (name) + db_dbcomputer_pkey: CREATE UNIQUE INDEX db_dbcomputer_pkey ON public.db_dbcomputer + USING btree (id) + db_dbextra: + db_dbextra_datatype_2eba38c6: CREATE INDEX db_dbextra_datatype_2eba38c6 ON public.db_dbextra + USING btree (datatype) + db_dbextra_datatype_2eba38c6_like: CREATE INDEX db_dbextra_datatype_2eba38c6_like + ON public.db_dbextra USING btree (datatype varchar_pattern_ops) + db_dbextra_dbnode_id_c7fe8961: CREATE INDEX db_dbextra_dbnode_id_c7fe8961 ON public.db_dbextra + USING btree (dbnode_id) + db_dbextra_dbnode_id_key_aa56fd37_uniq: CREATE UNIQUE INDEX db_dbextra_dbnode_id_key_aa56fd37_uniq + ON public.db_dbextra USING btree (dbnode_id, key) + db_dbextra_key_b1a8abc6: CREATE INDEX db_dbextra_key_b1a8abc6 ON public.db_dbextra + USING btree (key) + db_dbextra_key_b1a8abc6_like: CREATE INDEX db_dbextra_key_b1a8abc6_like ON public.db_dbextra + USING btree (key varchar_pattern_ops) + db_dbextra_pkey: CREATE UNIQUE INDEX db_dbextra_pkey ON public.db_dbextra USING + btree (id) + db_dbgroup: + db_dbgroup_name_66c75272: CREATE INDEX db_dbgroup_name_66c75272 ON public.db_dbgroup + USING btree (name) + db_dbgroup_name_66c75272_like: CREATE INDEX db_dbgroup_name_66c75272_like ON public.db_dbgroup + USING btree (name varchar_pattern_ops) + db_dbgroup_name_type_12656f33_uniq: CREATE UNIQUE INDEX db_dbgroup_name_type_12656f33_uniq + ON public.db_dbgroup USING btree (name, type) + db_dbgroup_pkey: CREATE UNIQUE INDEX db_dbgroup_pkey ON public.db_dbgroup USING + btree (id) + db_dbgroup_type_23b2a748: CREATE INDEX db_dbgroup_type_23b2a748 ON public.db_dbgroup + USING btree (type) + db_dbgroup_type_23b2a748_like: CREATE INDEX db_dbgroup_type_23b2a748_like ON public.db_dbgroup + USING btree (type varchar_pattern_ops) + db_dbgroup_user_id_100f8a51: CREATE INDEX db_dbgroup_user_id_100f8a51 ON public.db_dbgroup + USING btree (user_id) + db_dbgroup_dbnodes: + db_dbgroup_dbnodes_dbgroup_id_9d3a0f9d: CREATE INDEX db_dbgroup_dbnodes_dbgroup_id_9d3a0f9d + ON public.db_dbgroup_dbnodes USING btree (dbgroup_id) + db_dbgroup_dbnodes_dbgroup_id_dbnode_id_eee23cce_uniq: CREATE UNIQUE INDEX db_dbgroup_dbnodes_dbgroup_id_dbnode_id_eee23cce_uniq + ON public.db_dbgroup_dbnodes USING btree (dbgroup_id, dbnode_id) + db_dbgroup_dbnodes_dbnode_id_118b9439: CREATE INDEX db_dbgroup_dbnodes_dbnode_id_118b9439 + ON public.db_dbgroup_dbnodes USING btree (dbnode_id) + db_dbgroup_dbnodes_pkey: CREATE UNIQUE INDEX db_dbgroup_dbnodes_pkey ON public.db_dbgroup_dbnodes + USING btree (id) + db_dblink: + db_dblink_input_id_9245bd73: CREATE INDEX db_dblink_input_id_9245bd73 ON public.db_dblink + USING btree (input_id) + db_dblink_label_f1343cfb: CREATE INDEX db_dblink_label_f1343cfb ON public.db_dblink + USING btree (label) + db_dblink_label_f1343cfb_like: CREATE INDEX db_dblink_label_f1343cfb_like ON public.db_dblink + USING btree (label varchar_pattern_ops) + db_dblink_output_id_c0167528: CREATE INDEX db_dblink_output_id_c0167528 ON public.db_dblink + USING btree (output_id) + db_dblink_pkey: CREATE UNIQUE INDEX db_dblink_pkey ON public.db_dblink USING btree + (id) + db_dblink_type_229f212b: CREATE INDEX db_dblink_type_229f212b ON public.db_dblink + USING btree (type) + db_dblink_type_229f212b_like: CREATE INDEX db_dblink_type_229f212b_like ON public.db_dblink + USING btree (type varchar_pattern_ops) + db_dblock: + db_dblock_key_048c6767_like: CREATE INDEX db_dblock_key_048c6767_like ON public.db_dblock + USING btree (key varchar_pattern_ops) + db_dblock_pkey: CREATE UNIQUE INDEX db_dblock_pkey ON public.db_dblock USING btree + (key) + db_dblog: + db_dblog_levelname_ad5dc346: CREATE INDEX db_dblog_levelname_ad5dc346 ON public.db_dblog + USING btree (levelname) + db_dblog_levelname_ad5dc346_like: CREATE INDEX db_dblog_levelname_ad5dc346_like + ON public.db_dblog USING btree (levelname varchar_pattern_ops) + db_dblog_loggername_00b5ba16: CREATE INDEX db_dblog_loggername_00b5ba16 ON public.db_dblog + USING btree (loggername) + db_dblog_loggername_00b5ba16_like: CREATE INDEX db_dblog_loggername_00b5ba16_like + ON public.db_dblog USING btree (loggername varchar_pattern_ops) + db_dblog_objname_69932b1e: CREATE INDEX db_dblog_objname_69932b1e ON public.db_dblog + USING btree (objname) + db_dblog_objname_69932b1e_like: CREATE INDEX db_dblog_objname_69932b1e_like ON + public.db_dblog USING btree (objname varchar_pattern_ops) + db_dblog_objpk_fc47afa9: CREATE INDEX db_dblog_objpk_fc47afa9 ON public.db_dblog + USING btree (objpk) + db_dblog_pkey: CREATE UNIQUE INDEX db_dblog_pkey ON public.db_dblog USING btree + (id) + db_dbnode: + db_dbnode_ctime_71626ef5: CREATE INDEX db_dbnode_ctime_71626ef5 ON public.db_dbnode + USING btree (ctime) + db_dbnode_dbcomputer_id_315372a3: CREATE INDEX db_dbnode_dbcomputer_id_315372a3 + ON public.db_dbnode USING btree (dbcomputer_id) + db_dbnode_label_6469539e: CREATE INDEX db_dbnode_label_6469539e ON public.db_dbnode + USING btree (label) + db_dbnode_label_6469539e_like: CREATE INDEX db_dbnode_label_6469539e_like ON public.db_dbnode + USING btree (label varchar_pattern_ops) + db_dbnode_mtime_0554ea3d: CREATE INDEX db_dbnode_mtime_0554ea3d ON public.db_dbnode + USING btree (mtime) + db_dbnode_pkey: CREATE UNIQUE INDEX db_dbnode_pkey ON public.db_dbnode USING btree + (id) + db_dbnode_type_a8ce9753: CREATE INDEX db_dbnode_type_a8ce9753 ON public.db_dbnode + USING btree (type) + db_dbnode_type_a8ce9753_like: CREATE INDEX db_dbnode_type_a8ce9753_like ON public.db_dbnode + USING btree (type varchar_pattern_ops) + db_dbnode_user_id_12e7aeaf: CREATE INDEX db_dbnode_user_id_12e7aeaf ON public.db_dbnode + USING btree (user_id) + db_dbnode_uuid_62e0bf98: CREATE INDEX db_dbnode_uuid_62e0bf98 ON public.db_dbnode + USING btree (uuid) + db_dbnode_uuid_62e0bf98_like: CREATE INDEX db_dbnode_uuid_62e0bf98_like ON public.db_dbnode + USING btree (uuid varchar_pattern_ops) + db_dbsetting: + db_dbsetting_datatype_49f4397c: CREATE INDEX db_dbsetting_datatype_49f4397c ON + public.db_dbsetting USING btree (datatype) + db_dbsetting_datatype_49f4397c_like: CREATE INDEX db_dbsetting_datatype_49f4397c_like + ON public.db_dbsetting USING btree (datatype varchar_pattern_ops) + db_dbsetting_key_1b84beb4: CREATE INDEX db_dbsetting_key_1b84beb4 ON public.db_dbsetting + USING btree (key) + db_dbsetting_key_1b84beb4_like: CREATE INDEX db_dbsetting_key_1b84beb4_like ON + public.db_dbsetting USING btree (key varchar_pattern_ops) + db_dbsetting_key_1b84beb4_uniq: CREATE UNIQUE INDEX db_dbsetting_key_1b84beb4_uniq + ON public.db_dbsetting USING btree (key) + db_dbsetting_pkey: CREATE UNIQUE INDEX db_dbsetting_pkey ON public.db_dbsetting + USING btree (id) + db_dbuser: + db_dbuser_email_30150b7e_like: CREATE INDEX db_dbuser_email_30150b7e_like ON public.db_dbuser + USING btree (email varchar_pattern_ops) + db_dbuser_email_key: CREATE UNIQUE INDEX db_dbuser_email_key ON public.db_dbuser + USING btree (email) + db_dbuser_pkey: CREATE UNIQUE INDEX db_dbuser_pkey ON public.db_dbuser USING btree + (id) + db_dbuser_groups: + db_dbuser_groups_dbuser_id_480b3520: CREATE INDEX db_dbuser_groups_dbuser_id_480b3520 + ON public.db_dbuser_groups USING btree (dbuser_id) + db_dbuser_groups_dbuser_id_group_id_9155eb4f_uniq: CREATE UNIQUE INDEX db_dbuser_groups_dbuser_id_group_id_9155eb4f_uniq + ON public.db_dbuser_groups USING btree (dbuser_id, group_id) + db_dbuser_groups_group_id_8478d87e: CREATE INDEX db_dbuser_groups_group_id_8478d87e + ON public.db_dbuser_groups USING btree (group_id) + db_dbuser_groups_pkey: CREATE UNIQUE INDEX db_dbuser_groups_pkey ON public.db_dbuser_groups + USING btree (id) + db_dbuser_user_permissions: + db_dbuser_user_permissio_dbuser_id_permission_id_e6cbabe4_uniq: CREATE UNIQUE + INDEX db_dbuser_user_permissio_dbuser_id_permission_id_e6cbabe4_uniq ON public.db_dbuser_user_permissions + USING btree (dbuser_id, permission_id) + db_dbuser_user_permissions_dbuser_id_364456ee: CREATE INDEX db_dbuser_user_permissions_dbuser_id_364456ee + ON public.db_dbuser_user_permissions USING btree (dbuser_id) + db_dbuser_user_permissions_permission_id_c5aafc54: CREATE INDEX db_dbuser_user_permissions_permission_id_c5aafc54 + ON public.db_dbuser_user_permissions USING btree (permission_id) + db_dbuser_user_permissions_pkey: CREATE UNIQUE INDEX db_dbuser_user_permissions_pkey + ON public.db_dbuser_user_permissions USING btree (id) + db_dbworkflow: + db_dbworkflow_label_7368f34a: CREATE INDEX db_dbworkflow_label_7368f34a ON public.db_dbworkflow + USING btree (label) + db_dbworkflow_label_7368f34a_like: CREATE INDEX db_dbworkflow_label_7368f34a_like + ON public.db_dbworkflow USING btree (label varchar_pattern_ops) + db_dbworkflow_pkey: CREATE UNIQUE INDEX db_dbworkflow_pkey ON public.db_dbworkflow + USING btree (id) + db_dbworkflow_user_id_ef1f3251: CREATE INDEX db_dbworkflow_user_id_ef1f3251 ON + public.db_dbworkflow USING btree (user_id) + db_dbworkflowdata: + db_dbworkflowdata_aiida_obj_id_70a2d33b: CREATE INDEX db_dbworkflowdata_aiida_obj_id_70a2d33b + ON public.db_dbworkflowdata USING btree (aiida_obj_id) + db_dbworkflowdata_parent_id_ff4dbf8d: CREATE INDEX db_dbworkflowdata_parent_id_ff4dbf8d + ON public.db_dbworkflowdata USING btree (parent_id) + db_dbworkflowdata_parent_id_name_data_type_a4b50dae_uniq: CREATE UNIQUE INDEX + db_dbworkflowdata_parent_id_name_data_type_a4b50dae_uniq ON public.db_dbworkflowdata + USING btree (parent_id, name, data_type) + db_dbworkflowdata_pkey: CREATE UNIQUE INDEX db_dbworkflowdata_pkey ON public.db_dbworkflowdata + USING btree (id) + db_dbworkflowstep: + db_dbworkflowstep_parent_id_ffb754d9: CREATE INDEX db_dbworkflowstep_parent_id_ffb754d9 + ON public.db_dbworkflowstep USING btree (parent_id) + db_dbworkflowstep_parent_id_name_111027e3_uniq: CREATE UNIQUE INDEX db_dbworkflowstep_parent_id_name_111027e3_uniq + ON public.db_dbworkflowstep USING btree (parent_id, name) + db_dbworkflowstep_pkey: CREATE UNIQUE INDEX db_dbworkflowstep_pkey ON public.db_dbworkflowstep + USING btree (id) + db_dbworkflowstep_user_id_04282431: CREATE INDEX db_dbworkflowstep_user_id_04282431 + ON public.db_dbworkflowstep USING btree (user_id) + db_dbworkflowstep_calculations: + db_dbworkflowstep_calcul_dbworkflowstep_id_dbnode_60f50d02_uniq: CREATE UNIQUE + INDEX db_dbworkflowstep_calcul_dbworkflowstep_id_dbnode_60f50d02_uniq ON public.db_dbworkflowstep_calculations + USING btree (dbworkflowstep_id, dbnode_id) + db_dbworkflowstep_calculations_dbnode_id_0d07b7a7: CREATE INDEX db_dbworkflowstep_calculations_dbnode_id_0d07b7a7 + ON public.db_dbworkflowstep_calculations USING btree (dbnode_id) + db_dbworkflowstep_calculations_dbworkflowstep_id_575c3637: CREATE INDEX db_dbworkflowstep_calculations_dbworkflowstep_id_575c3637 + ON public.db_dbworkflowstep_calculations USING btree (dbworkflowstep_id) + db_dbworkflowstep_calculations_pkey: CREATE UNIQUE INDEX db_dbworkflowstep_calculations_pkey + ON public.db_dbworkflowstep_calculations USING btree (id) + db_dbworkflowstep_sub_workflows: + db_dbworkflowstep_sub_wo_dbworkflowstep_id_dbwork_e9b2b624_uniq: CREATE UNIQUE + INDEX db_dbworkflowstep_sub_wo_dbworkflowstep_id_dbwork_e9b2b624_uniq ON public.db_dbworkflowstep_sub_workflows + USING btree (dbworkflowstep_id, dbworkflow_id) + db_dbworkflowstep_sub_workflows_dbworkflow_id_dca4d103: CREATE INDEX db_dbworkflowstep_sub_workflows_dbworkflow_id_dca4d103 + ON public.db_dbworkflowstep_sub_workflows USING btree (dbworkflow_id) + db_dbworkflowstep_sub_workflows_dbworkflowstep_id_e183bbb7: CREATE INDEX db_dbworkflowstep_sub_workflows_dbworkflowstep_id_e183bbb7 + ON public.db_dbworkflowstep_sub_workflows USING btree (dbworkflowstep_id) + db_dbworkflowstep_sub_workflows_pkey: CREATE UNIQUE INDEX db_dbworkflowstep_sub_workflows_pkey + ON public.db_dbworkflowstep_sub_workflows USING btree (id) diff --git a/tests/backends/aiida_sqlalchemy/migrations/test_all_schema/test_django_django_0008_.yml b/tests/backends/aiida_sqlalchemy/migrations/test_all_schema/test_django_django_0008_.yml new file mode 100644 index 0000000000..597f85a532 --- /dev/null +++ b/tests/backends/aiida_sqlalchemy/migrations/test_all_schema/test_django_django_0008_.yml @@ -0,0 +1,1081 @@ +columns: + db_dbattribute: + bval: + data_type: boolean + default: null + is_nullable: true + datatype: + data_type: character varying + default: null + is_nullable: false + max_length: 10 + dbnode_id: + data_type: integer + default: null + is_nullable: false + dval: + data_type: timestamp with time zone + default: null + is_nullable: true + fval: + data_type: double precision + default: null + is_nullable: true + id: + data_type: integer + default: nextval('db_dbattribute_id_seq'::regclass) + is_nullable: false + ival: + data_type: integer + default: null + is_nullable: true + key: + data_type: character varying + default: null + is_nullable: false + max_length: 1024 + tval: + data_type: text + default: null + is_nullable: false + db_dbauthinfo: + aiidauser_id: + data_type: integer + default: null + is_nullable: false + auth_params: + data_type: text + default: null + is_nullable: false + dbcomputer_id: + data_type: integer + default: null + is_nullable: false + enabled: + data_type: boolean + default: null + is_nullable: false + id: + data_type: integer + default: nextval('db_dbauthinfo_id_seq'::regclass) + is_nullable: false + metadata: + data_type: text + default: null + is_nullable: false + db_dbcalcstate: + dbnode_id: + data_type: integer + default: null + is_nullable: false + id: + data_type: integer + default: nextval('db_dbcalcstate_id_seq'::regclass) + is_nullable: false + state: + data_type: character varying + default: null + is_nullable: false + max_length: 25 + time: + data_type: timestamp with time zone + default: null + is_nullable: false + db_dbcomment: + content: + data_type: text + default: null + is_nullable: false + ctime: + data_type: timestamp with time zone + default: null + is_nullable: false + dbnode_id: + data_type: integer + default: null + is_nullable: false + id: + data_type: integer + default: nextval('db_dbcomment_id_seq'::regclass) + is_nullable: false + mtime: + data_type: timestamp with time zone + default: null + is_nullable: false + user_id: + data_type: integer + default: null + is_nullable: false + uuid: + data_type: character varying + default: null + is_nullable: false + max_length: 36 + db_dbcomputer: + description: + data_type: text + default: null + is_nullable: false + enabled: + data_type: boolean + default: null + is_nullable: false + hostname: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + id: + data_type: integer + default: nextval('db_dbcomputer_id_seq'::regclass) + is_nullable: false + metadata: + data_type: text + default: null + is_nullable: false + name: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + scheduler_type: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + transport_params: + data_type: text + default: null + is_nullable: false + transport_type: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + uuid: + data_type: character varying + default: null + is_nullable: false + max_length: 36 + db_dbextra: + bval: + data_type: boolean + default: null + is_nullable: true + datatype: + data_type: character varying + default: null + is_nullable: false + max_length: 10 + dbnode_id: + data_type: integer + default: null + is_nullable: false + dval: + data_type: timestamp with time zone + default: null + is_nullable: true + fval: + data_type: double precision + default: null + is_nullable: true + id: + data_type: integer + default: nextval('db_dbextra_id_seq'::regclass) + is_nullable: false + ival: + data_type: integer + default: null + is_nullable: true + key: + data_type: character varying + default: null + is_nullable: false + max_length: 1024 + tval: + data_type: text + default: null + is_nullable: false + db_dbgroup: + description: + data_type: text + default: null + is_nullable: false + id: + data_type: integer + default: nextval('db_dbgroup_id_seq'::regclass) + is_nullable: false + name: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + time: + data_type: timestamp with time zone + default: null + is_nullable: false + type: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + user_id: + data_type: integer + default: null + is_nullable: false + uuid: + data_type: character varying + default: null + is_nullable: false + max_length: 36 + db_dbgroup_dbnodes: + dbgroup_id: + data_type: integer + default: null + is_nullable: false + dbnode_id: + data_type: integer + default: null + is_nullable: false + id: + data_type: integer + default: nextval('db_dbgroup_dbnodes_id_seq'::regclass) + is_nullable: false + db_dblink: + id: + data_type: integer + default: nextval('db_dblink_id_seq'::regclass) + is_nullable: false + input_id: + data_type: integer + default: null + is_nullable: false + label: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + output_id: + data_type: integer + default: null + is_nullable: false + type: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + db_dblock: + creation: + data_type: timestamp with time zone + default: null + is_nullable: false + key: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + owner: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + timeout: + data_type: integer + default: null + is_nullable: false + db_dblog: + id: + data_type: integer + default: nextval('db_dblog_id_seq'::regclass) + is_nullable: false + levelname: + data_type: character varying + default: null + is_nullable: false + max_length: 50 + loggername: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + message: + data_type: text + default: null + is_nullable: false + metadata: + data_type: text + default: null + is_nullable: false + objname: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + objpk: + data_type: integer + default: null + is_nullable: true + time: + data_type: timestamp with time zone + default: null + is_nullable: false + db_dbnode: + ctime: + data_type: timestamp with time zone + default: null + is_nullable: false + dbcomputer_id: + data_type: integer + default: null + is_nullable: true + description: + data_type: text + default: null + is_nullable: false + id: + data_type: integer + default: nextval('db_dbnode_id_seq'::regclass) + is_nullable: false + label: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + mtime: + data_type: timestamp with time zone + default: null + is_nullable: false + nodeversion: + data_type: integer + default: null + is_nullable: false + public: + data_type: boolean + default: null + is_nullable: false + type: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + user_id: + data_type: integer + default: null + is_nullable: false + uuid: + data_type: character varying + default: null + is_nullable: false + max_length: 36 + db_dbsetting: + bval: + data_type: boolean + default: null + is_nullable: true + datatype: + data_type: character varying + default: null + is_nullable: false + max_length: 10 + description: + data_type: text + default: null + is_nullable: false + dval: + data_type: timestamp with time zone + default: null + is_nullable: true + fval: + data_type: double precision + default: null + is_nullable: true + id: + data_type: integer + default: nextval('db_dbsetting_id_seq'::regclass) + is_nullable: false + ival: + data_type: integer + default: null + is_nullable: true + key: + data_type: character varying + default: null + is_nullable: false + max_length: 1024 + time: + data_type: timestamp with time zone + default: null + is_nullable: false + tval: + data_type: text + default: null + is_nullable: false + db_dbuser: + date_joined: + data_type: timestamp with time zone + default: null + is_nullable: false + email: + data_type: character varying + default: null + is_nullable: false + max_length: 75 + first_name: + data_type: character varying + default: null + is_nullable: false + max_length: 254 + id: + data_type: integer + default: nextval('db_dbuser_id_seq'::regclass) + is_nullable: false + institution: + data_type: character varying + default: null + is_nullable: false + max_length: 254 + is_active: + data_type: boolean + default: null + is_nullable: false + is_staff: + data_type: boolean + default: null + is_nullable: false + is_superuser: + data_type: boolean + default: null + is_nullable: false + last_login: + data_type: timestamp with time zone + default: null + is_nullable: false + last_name: + data_type: character varying + default: null + is_nullable: false + max_length: 254 + password: + data_type: character varying + default: null + is_nullable: false + max_length: 128 + db_dbuser_groups: + dbuser_id: + data_type: integer + default: null + is_nullable: false + group_id: + data_type: integer + default: null + is_nullable: false + id: + data_type: integer + default: nextval('db_dbuser_groups_id_seq'::regclass) + is_nullable: false + db_dbuser_user_permissions: + dbuser_id: + data_type: integer + default: null + is_nullable: false + id: + data_type: integer + default: nextval('db_dbuser_user_permissions_id_seq'::regclass) + is_nullable: false + permission_id: + data_type: integer + default: null + is_nullable: false + db_dbworkflow: + ctime: + data_type: timestamp with time zone + default: null + is_nullable: false + description: + data_type: text + default: null + is_nullable: false + id: + data_type: integer + default: nextval('db_dbworkflow_id_seq'::regclass) + is_nullable: false + label: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + lastsyncedversion: + data_type: integer + default: null + is_nullable: false + module: + data_type: text + default: null + is_nullable: false + module_class: + data_type: text + default: null + is_nullable: false + mtime: + data_type: timestamp with time zone + default: null + is_nullable: false + nodeversion: + data_type: integer + default: null + is_nullable: false + report: + data_type: text + default: null + is_nullable: false + script_md5: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + script_path: + data_type: text + default: null + is_nullable: false + state: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + user_id: + data_type: integer + default: null + is_nullable: false + uuid: + data_type: character varying + default: null + is_nullable: false + max_length: 36 + db_dbworkflowdata: + aiida_obj_id: + data_type: integer + default: null + is_nullable: true + data_type: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + id: + data_type: integer + default: nextval('db_dbworkflowdata_id_seq'::regclass) + is_nullable: false + json_value: + data_type: text + default: null + is_nullable: false + name: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + parent_id: + data_type: integer + default: null + is_nullable: false + time: + data_type: timestamp with time zone + default: null + is_nullable: false + value_type: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + db_dbworkflowstep: + id: + data_type: integer + default: nextval('db_dbworkflowstep_id_seq'::regclass) + is_nullable: false + name: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + nextcall: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + parent_id: + data_type: integer + default: null + is_nullable: false + state: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + time: + data_type: timestamp with time zone + default: null + is_nullable: false + user_id: + data_type: integer + default: null + is_nullable: false + db_dbworkflowstep_calculations: + dbnode_id: + data_type: integer + default: null + is_nullable: false + dbworkflowstep_id: + data_type: integer + default: null + is_nullable: false + id: + data_type: integer + default: nextval('db_dbworkflowstep_calculations_id_seq'::regclass) + is_nullable: false + db_dbworkflowstep_sub_workflows: + dbworkflow_id: + data_type: integer + default: null + is_nullable: false + dbworkflowstep_id: + data_type: integer + default: null + is_nullable: false + id: + data_type: integer + default: nextval('db_dbworkflowstep_sub_workflows_id_seq'::regclass) + is_nullable: false +constraints: + primary_key: + db_dbattribute: + db_dbattribute_pkey: + - id + db_dbauthinfo: + db_dbauthinfo_pkey: + - id + db_dbcalcstate: + db_dbcalcstate_pkey: + - id + db_dbcomment: + db_dbcomment_pkey: + - id + db_dbcomputer: + db_dbcomputer_pkey: + - id + db_dbextra: + db_dbextra_pkey: + - id + db_dbgroup: + db_dbgroup_pkey: + - id + db_dbgroup_dbnodes: + db_dbgroup_dbnodes_pkey: + - id + db_dblink: + db_dblink_pkey: + - id + db_dblock: + db_dblock_pkey: + - key + db_dblog: + db_dblog_pkey: + - id + db_dbnode: + db_dbnode_pkey: + - id + db_dbsetting: + db_dbsetting_pkey: + - id + db_dbuser: + db_dbuser_pkey: + - id + db_dbuser_groups: + db_dbuser_groups_pkey: + - id + db_dbuser_user_permissions: + db_dbuser_user_permissions_pkey: + - id + db_dbworkflow: + db_dbworkflow_pkey: + - id + db_dbworkflowdata: + db_dbworkflowdata_pkey: + - id + db_dbworkflowstep: + db_dbworkflowstep_pkey: + - id + db_dbworkflowstep_calculations: + db_dbworkflowstep_calculations_pkey: + - id + db_dbworkflowstep_sub_workflows: + db_dbworkflowstep_sub_workflows_pkey: + - id + unique: + db_dbattribute: + db_dbattribute_dbnode_id_key_c589e447_uniq: + - dbnode_id + - key + db_dbauthinfo: + db_dbauthinfo_aiidauser_id_dbcomputer_id_777cdaa8_uniq: + - aiidauser_id + - dbcomputer_id + db_dbcalcstate: + db_dbcalcstate_dbnode_id_state_b4a14db3_uniq: + - dbnode_id + - state + db_dbcomputer: + db_dbcomputer_name_key: + - name + db_dbextra: + db_dbextra_dbnode_id_key_aa56fd37_uniq: + - dbnode_id + - key + db_dbgroup: + db_dbgroup_name_type_12656f33_uniq: + - name + - type + db_dbgroup_dbnodes: + db_dbgroup_dbnodes_dbgroup_id_dbnode_id_eee23cce_uniq: + - dbgroup_id + - dbnode_id + db_dbsetting: + db_dbsetting_key_1b84beb4_uniq: + - key + db_dbuser: + db_dbuser_email_key: + - email + db_dbuser_groups: + db_dbuser_groups_dbuser_id_group_id_9155eb4f_uniq: + - dbuser_id + - group_id + db_dbuser_user_permissions: + db_dbuser_user_permissio_dbuser_id_permission_id_e6cbabe4_uniq: + - dbuser_id + - permission_id + db_dbworkflowdata: + db_dbworkflowdata_parent_id_name_data_type_a4b50dae_uniq: + - data_type + - name + - parent_id + db_dbworkflowstep: + db_dbworkflowstep_parent_id_name_111027e3_uniq: + - name + - parent_id + db_dbworkflowstep_calculations: + db_dbworkflowstep_calcul_dbworkflowstep_id_dbnode_60f50d02_uniq: + - dbnode_id + - dbworkflowstep_id + db_dbworkflowstep_sub_workflows: + db_dbworkflowstep_sub_wo_dbworkflowstep_id_dbwork_e9b2b624_uniq: + - dbworkflow_id + - dbworkflowstep_id +foreign_keys: + db_dbattribute: + db_dbattribute_dbnode_id_253bf153_fk_db_dbnode_id: FOREIGN KEY (dbnode_id) REFERENCES + db_dbnode(id) DEFERRABLE INITIALLY DEFERRED + db_dbauthinfo: + db_dbauthinfo_aiidauser_id_0684fdfb_fk_db_dbuser_id: FOREIGN KEY (aiidauser_id) + REFERENCES db_dbuser(id) DEFERRABLE INITIALLY DEFERRED + db_dbauthinfo_dbcomputer_id_424f7ac4_fk_db_dbcomputer_id: FOREIGN KEY (dbcomputer_id) + REFERENCES db_dbcomputer(id) DEFERRABLE INITIALLY DEFERRED + db_dbcalcstate: + db_dbcalcstate_dbnode_id_f217a84c_fk_db_dbnode_id: FOREIGN KEY (dbnode_id) REFERENCES + db_dbnode(id) DEFERRABLE INITIALLY DEFERRED + db_dbcomment: + db_dbcomment_dbnode_id_3b812b6b_fk_db_dbnode_id: FOREIGN KEY (dbnode_id) REFERENCES + db_dbnode(id) DEFERRABLE INITIALLY DEFERRED + db_dbcomment_user_id_8ed5e360_fk_db_dbuser_id: FOREIGN KEY (user_id) REFERENCES + db_dbuser(id) DEFERRABLE INITIALLY DEFERRED + db_dbextra: + db_dbextra_dbnode_id_c7fe8961_fk_db_dbnode_id: FOREIGN KEY (dbnode_id) REFERENCES + db_dbnode(id) DEFERRABLE INITIALLY DEFERRED + db_dbgroup: + db_dbgroup_user_id_100f8a51_fk_db_dbuser_id: FOREIGN KEY (user_id) REFERENCES + db_dbuser(id) DEFERRABLE INITIALLY DEFERRED + db_dbgroup_dbnodes: + db_dbgroup_dbnodes_dbgroup_id_9d3a0f9d_fk_db_dbgroup_id: FOREIGN KEY (dbgroup_id) + REFERENCES db_dbgroup(id) DEFERRABLE INITIALLY DEFERRED + db_dbgroup_dbnodes_dbnode_id_118b9439_fk_db_dbnode_id: FOREIGN KEY (dbnode_id) + REFERENCES db_dbnode(id) DEFERRABLE INITIALLY DEFERRED + db_dblink: + db_dblink_input_id_9245bd73_fk_db_dbnode_id: FOREIGN KEY (input_id) REFERENCES + db_dbnode(id) DEFERRABLE INITIALLY DEFERRED + db_dblink_output_id_c0167528_fk_db_dbnode_id: FOREIGN KEY (output_id) REFERENCES + db_dbnode(id) DEFERRABLE INITIALLY DEFERRED + db_dbnode: + db_dbnode_dbcomputer_id_315372a3_fk_db_dbcomputer_id: FOREIGN KEY (dbcomputer_id) + REFERENCES db_dbcomputer(id) DEFERRABLE INITIALLY DEFERRED + db_dbnode_user_id_12e7aeaf_fk_db_dbuser_id: FOREIGN KEY (user_id) REFERENCES db_dbuser(id) + DEFERRABLE INITIALLY DEFERRED + db_dbuser_groups: + db_dbuser_groups_dbuser_id_480b3520_fk_db_dbuser_id: FOREIGN KEY (dbuser_id) REFERENCES + db_dbuser(id) DEFERRABLE INITIALLY DEFERRED + db_dbuser_groups_group_id_8478d87e_fk_auth_group_id: FOREIGN KEY (group_id) REFERENCES + auth_group(id) DEFERRABLE INITIALLY DEFERRED + db_dbuser_user_permissions: + db_dbuser_user_permi_permission_id_c5aafc54_fk_auth_perm: FOREIGN KEY (permission_id) + REFERENCES auth_permission(id) DEFERRABLE INITIALLY DEFERRED + db_dbuser_user_permissions_dbuser_id_364456ee_fk_db_dbuser_id: FOREIGN KEY (dbuser_id) + REFERENCES db_dbuser(id) DEFERRABLE INITIALLY DEFERRED + db_dbworkflow: + db_dbworkflow_user_id_ef1f3251_fk_db_dbuser_id: FOREIGN KEY (user_id) REFERENCES + db_dbuser(id) DEFERRABLE INITIALLY DEFERRED + db_dbworkflowdata: + db_dbworkflowdata_aiida_obj_id_70a2d33b_fk_db_dbnode_id: FOREIGN KEY (aiida_obj_id) + REFERENCES db_dbnode(id) DEFERRABLE INITIALLY DEFERRED + db_dbworkflowdata_parent_id_ff4dbf8d_fk_db_dbworkflow_id: FOREIGN KEY (parent_id) + REFERENCES db_dbworkflow(id) DEFERRABLE INITIALLY DEFERRED + db_dbworkflowstep: + db_dbworkflowstep_parent_id_ffb754d9_fk_db_dbworkflow_id: FOREIGN KEY (parent_id) + REFERENCES db_dbworkflow(id) DEFERRABLE INITIALLY DEFERRED + db_dbworkflowstep_user_id_04282431_fk_db_dbuser_id: FOREIGN KEY (user_id) REFERENCES + db_dbuser(id) DEFERRABLE INITIALLY DEFERRED + db_dbworkflowstep_calculations: + db_dbworkflowstep_ca_dbnode_id_0d07b7a7_fk_db_dbnode: FOREIGN KEY (dbnode_id) + REFERENCES db_dbnode(id) DEFERRABLE INITIALLY DEFERRED + db_dbworkflowstep_ca_dbworkflowstep_id_575c3637_fk_db_dbwork: FOREIGN KEY (dbworkflowstep_id) + REFERENCES db_dbworkflowstep(id) DEFERRABLE INITIALLY DEFERRED + db_dbworkflowstep_sub_workflows: + db_dbworkflowstep_su_dbworkflow_id_dca4d103_fk_db_dbwork: FOREIGN KEY (dbworkflow_id) + REFERENCES db_dbworkflow(id) DEFERRABLE INITIALLY DEFERRED + db_dbworkflowstep_su_dbworkflowstep_id_e183bbb7_fk_db_dbwork: FOREIGN KEY (dbworkflowstep_id) + REFERENCES db_dbworkflowstep(id) DEFERRABLE INITIALLY DEFERRED +indexes: + db_dbattribute: + db_dbattribute_datatype_91c4dc04: CREATE INDEX db_dbattribute_datatype_91c4dc04 + ON public.db_dbattribute USING btree (datatype) + db_dbattribute_datatype_91c4dc04_like: CREATE INDEX db_dbattribute_datatype_91c4dc04_like + ON public.db_dbattribute USING btree (datatype varchar_pattern_ops) + db_dbattribute_dbnode_id_253bf153: CREATE INDEX db_dbattribute_dbnode_id_253bf153 + ON public.db_dbattribute USING btree (dbnode_id) + db_dbattribute_dbnode_id_key_c589e447_uniq: CREATE UNIQUE INDEX db_dbattribute_dbnode_id_key_c589e447_uniq + ON public.db_dbattribute USING btree (dbnode_id, key) + db_dbattribute_key_ac2bc4e4: CREATE INDEX db_dbattribute_key_ac2bc4e4 ON public.db_dbattribute + USING btree (key) + db_dbattribute_key_ac2bc4e4_like: CREATE INDEX db_dbattribute_key_ac2bc4e4_like + ON public.db_dbattribute USING btree (key varchar_pattern_ops) + db_dbattribute_pkey: CREATE UNIQUE INDEX db_dbattribute_pkey ON public.db_dbattribute + USING btree (id) + tval_idx_for_daemon: CREATE INDEX tval_idx_for_daemon ON public.db_dbattribute + USING btree (tval) WHERE (tval = ANY (ARRAY['COMPUTED'::text, 'WITHSCHEDULER'::text, + 'TOSUBMIT'::text])) + db_dbauthinfo: + db_dbauthinfo_aiidauser_id_0684fdfb: CREATE INDEX db_dbauthinfo_aiidauser_id_0684fdfb + ON public.db_dbauthinfo USING btree (aiidauser_id) + db_dbauthinfo_aiidauser_id_dbcomputer_id_777cdaa8_uniq: CREATE UNIQUE INDEX db_dbauthinfo_aiidauser_id_dbcomputer_id_777cdaa8_uniq + ON public.db_dbauthinfo USING btree (aiidauser_id, dbcomputer_id) + db_dbauthinfo_dbcomputer_id_424f7ac4: CREATE INDEX db_dbauthinfo_dbcomputer_id_424f7ac4 + ON public.db_dbauthinfo USING btree (dbcomputer_id) + db_dbauthinfo_pkey: CREATE UNIQUE INDEX db_dbauthinfo_pkey ON public.db_dbauthinfo + USING btree (id) + db_dbcalcstate: + db_dbcalcstate_dbnode_id_f217a84c: CREATE INDEX db_dbcalcstate_dbnode_id_f217a84c + ON public.db_dbcalcstate USING btree (dbnode_id) + db_dbcalcstate_dbnode_id_state_b4a14db3_uniq: CREATE UNIQUE INDEX db_dbcalcstate_dbnode_id_state_b4a14db3_uniq + ON public.db_dbcalcstate USING btree (dbnode_id, state) + db_dbcalcstate_pkey: CREATE UNIQUE INDEX db_dbcalcstate_pkey ON public.db_dbcalcstate + USING btree (id) + db_dbcalcstate_state_0bf54584: CREATE INDEX db_dbcalcstate_state_0bf54584 ON public.db_dbcalcstate + USING btree (state) + db_dbcalcstate_state_0bf54584_like: CREATE INDEX db_dbcalcstate_state_0bf54584_like + ON public.db_dbcalcstate USING btree (state varchar_pattern_ops) + db_dbcomment: + db_dbcomment_dbnode_id_3b812b6b: CREATE INDEX db_dbcomment_dbnode_id_3b812b6b + ON public.db_dbcomment USING btree (dbnode_id) + db_dbcomment_pkey: CREATE UNIQUE INDEX db_dbcomment_pkey ON public.db_dbcomment + USING btree (id) + db_dbcomment_user_id_8ed5e360: CREATE INDEX db_dbcomment_user_id_8ed5e360 ON public.db_dbcomment + USING btree (user_id) + db_dbcomputer: + db_dbcomputer_name_f1800b1a_like: CREATE INDEX db_dbcomputer_name_f1800b1a_like + ON public.db_dbcomputer USING btree (name varchar_pattern_ops) + db_dbcomputer_name_key: CREATE UNIQUE INDEX db_dbcomputer_name_key ON public.db_dbcomputer + USING btree (name) + db_dbcomputer_pkey: CREATE UNIQUE INDEX db_dbcomputer_pkey ON public.db_dbcomputer + USING btree (id) + db_dbextra: + db_dbextra_datatype_2eba38c6: CREATE INDEX db_dbextra_datatype_2eba38c6 ON public.db_dbextra + USING btree (datatype) + db_dbextra_datatype_2eba38c6_like: CREATE INDEX db_dbextra_datatype_2eba38c6_like + ON public.db_dbextra USING btree (datatype varchar_pattern_ops) + db_dbextra_dbnode_id_c7fe8961: CREATE INDEX db_dbextra_dbnode_id_c7fe8961 ON public.db_dbextra + USING btree (dbnode_id) + db_dbextra_dbnode_id_key_aa56fd37_uniq: CREATE UNIQUE INDEX db_dbextra_dbnode_id_key_aa56fd37_uniq + ON public.db_dbextra USING btree (dbnode_id, key) + db_dbextra_key_b1a8abc6: CREATE INDEX db_dbextra_key_b1a8abc6 ON public.db_dbextra + USING btree (key) + db_dbextra_key_b1a8abc6_like: CREATE INDEX db_dbextra_key_b1a8abc6_like ON public.db_dbextra + USING btree (key varchar_pattern_ops) + db_dbextra_pkey: CREATE UNIQUE INDEX db_dbextra_pkey ON public.db_dbextra USING + btree (id) + db_dbgroup: + db_dbgroup_name_66c75272: CREATE INDEX db_dbgroup_name_66c75272 ON public.db_dbgroup + USING btree (name) + db_dbgroup_name_66c75272_like: CREATE INDEX db_dbgroup_name_66c75272_like ON public.db_dbgroup + USING btree (name varchar_pattern_ops) + db_dbgroup_name_type_12656f33_uniq: CREATE UNIQUE INDEX db_dbgroup_name_type_12656f33_uniq + ON public.db_dbgroup USING btree (name, type) + db_dbgroup_pkey: CREATE UNIQUE INDEX db_dbgroup_pkey ON public.db_dbgroup USING + btree (id) + db_dbgroup_type_23b2a748: CREATE INDEX db_dbgroup_type_23b2a748 ON public.db_dbgroup + USING btree (type) + db_dbgroup_type_23b2a748_like: CREATE INDEX db_dbgroup_type_23b2a748_like ON public.db_dbgroup + USING btree (type varchar_pattern_ops) + db_dbgroup_user_id_100f8a51: CREATE INDEX db_dbgroup_user_id_100f8a51 ON public.db_dbgroup + USING btree (user_id) + db_dbgroup_dbnodes: + db_dbgroup_dbnodes_dbgroup_id_9d3a0f9d: CREATE INDEX db_dbgroup_dbnodes_dbgroup_id_9d3a0f9d + ON public.db_dbgroup_dbnodes USING btree (dbgroup_id) + db_dbgroup_dbnodes_dbgroup_id_dbnode_id_eee23cce_uniq: CREATE UNIQUE INDEX db_dbgroup_dbnodes_dbgroup_id_dbnode_id_eee23cce_uniq + ON public.db_dbgroup_dbnodes USING btree (dbgroup_id, dbnode_id) + db_dbgroup_dbnodes_dbnode_id_118b9439: CREATE INDEX db_dbgroup_dbnodes_dbnode_id_118b9439 + ON public.db_dbgroup_dbnodes USING btree (dbnode_id) + db_dbgroup_dbnodes_pkey: CREATE UNIQUE INDEX db_dbgroup_dbnodes_pkey ON public.db_dbgroup_dbnodes + USING btree (id) + db_dblink: + db_dblink_input_id_9245bd73: CREATE INDEX db_dblink_input_id_9245bd73 ON public.db_dblink + USING btree (input_id) + db_dblink_label_f1343cfb: CREATE INDEX db_dblink_label_f1343cfb ON public.db_dblink + USING btree (label) + db_dblink_label_f1343cfb_like: CREATE INDEX db_dblink_label_f1343cfb_like ON public.db_dblink + USING btree (label varchar_pattern_ops) + db_dblink_output_id_c0167528: CREATE INDEX db_dblink_output_id_c0167528 ON public.db_dblink + USING btree (output_id) + db_dblink_pkey: CREATE UNIQUE INDEX db_dblink_pkey ON public.db_dblink USING btree + (id) + db_dblink_type_229f212b: CREATE INDEX db_dblink_type_229f212b ON public.db_dblink + USING btree (type) + db_dblink_type_229f212b_like: CREATE INDEX db_dblink_type_229f212b_like ON public.db_dblink + USING btree (type varchar_pattern_ops) + db_dblock: + db_dblock_key_048c6767_like: CREATE INDEX db_dblock_key_048c6767_like ON public.db_dblock + USING btree (key varchar_pattern_ops) + db_dblock_pkey: CREATE UNIQUE INDEX db_dblock_pkey ON public.db_dblock USING btree + (key) + db_dblog: + db_dblog_levelname_ad5dc346: CREATE INDEX db_dblog_levelname_ad5dc346 ON public.db_dblog + USING btree (levelname) + db_dblog_levelname_ad5dc346_like: CREATE INDEX db_dblog_levelname_ad5dc346_like + ON public.db_dblog USING btree (levelname varchar_pattern_ops) + db_dblog_loggername_00b5ba16: CREATE INDEX db_dblog_loggername_00b5ba16 ON public.db_dblog + USING btree (loggername) + db_dblog_loggername_00b5ba16_like: CREATE INDEX db_dblog_loggername_00b5ba16_like + ON public.db_dblog USING btree (loggername varchar_pattern_ops) + db_dblog_objname_69932b1e: CREATE INDEX db_dblog_objname_69932b1e ON public.db_dblog + USING btree (objname) + db_dblog_objname_69932b1e_like: CREATE INDEX db_dblog_objname_69932b1e_like ON + public.db_dblog USING btree (objname varchar_pattern_ops) + db_dblog_objpk_fc47afa9: CREATE INDEX db_dblog_objpk_fc47afa9 ON public.db_dblog + USING btree (objpk) + db_dblog_pkey: CREATE UNIQUE INDEX db_dblog_pkey ON public.db_dblog USING btree + (id) + db_dbnode: + db_dbnode_ctime_71626ef5: CREATE INDEX db_dbnode_ctime_71626ef5 ON public.db_dbnode + USING btree (ctime) + db_dbnode_dbcomputer_id_315372a3: CREATE INDEX db_dbnode_dbcomputer_id_315372a3 + ON public.db_dbnode USING btree (dbcomputer_id) + db_dbnode_label_6469539e: CREATE INDEX db_dbnode_label_6469539e ON public.db_dbnode + USING btree (label) + db_dbnode_label_6469539e_like: CREATE INDEX db_dbnode_label_6469539e_like ON public.db_dbnode + USING btree (label varchar_pattern_ops) + db_dbnode_mtime_0554ea3d: CREATE INDEX db_dbnode_mtime_0554ea3d ON public.db_dbnode + USING btree (mtime) + db_dbnode_pkey: CREATE UNIQUE INDEX db_dbnode_pkey ON public.db_dbnode USING btree + (id) + db_dbnode_type_a8ce9753: CREATE INDEX db_dbnode_type_a8ce9753 ON public.db_dbnode + USING btree (type) + db_dbnode_type_a8ce9753_like: CREATE INDEX db_dbnode_type_a8ce9753_like ON public.db_dbnode + USING btree (type varchar_pattern_ops) + db_dbnode_user_id_12e7aeaf: CREATE INDEX db_dbnode_user_id_12e7aeaf ON public.db_dbnode + USING btree (user_id) + db_dbnode_uuid_62e0bf98: CREATE INDEX db_dbnode_uuid_62e0bf98 ON public.db_dbnode + USING btree (uuid) + db_dbnode_uuid_62e0bf98_like: CREATE INDEX db_dbnode_uuid_62e0bf98_like ON public.db_dbnode + USING btree (uuid varchar_pattern_ops) + db_dbsetting: + db_dbsetting_datatype_49f4397c: CREATE INDEX db_dbsetting_datatype_49f4397c ON + public.db_dbsetting USING btree (datatype) + db_dbsetting_datatype_49f4397c_like: CREATE INDEX db_dbsetting_datatype_49f4397c_like + ON public.db_dbsetting USING btree (datatype varchar_pattern_ops) + db_dbsetting_key_1b84beb4: CREATE INDEX db_dbsetting_key_1b84beb4 ON public.db_dbsetting + USING btree (key) + db_dbsetting_key_1b84beb4_like: CREATE INDEX db_dbsetting_key_1b84beb4_like ON + public.db_dbsetting USING btree (key varchar_pattern_ops) + db_dbsetting_key_1b84beb4_uniq: CREATE UNIQUE INDEX db_dbsetting_key_1b84beb4_uniq + ON public.db_dbsetting USING btree (key) + db_dbsetting_pkey: CREATE UNIQUE INDEX db_dbsetting_pkey ON public.db_dbsetting + USING btree (id) + db_dbuser: + db_dbuser_email_30150b7e_like: CREATE INDEX db_dbuser_email_30150b7e_like ON public.db_dbuser + USING btree (email varchar_pattern_ops) + db_dbuser_email_key: CREATE UNIQUE INDEX db_dbuser_email_key ON public.db_dbuser + USING btree (email) + db_dbuser_pkey: CREATE UNIQUE INDEX db_dbuser_pkey ON public.db_dbuser USING btree + (id) + db_dbuser_groups: + db_dbuser_groups_dbuser_id_480b3520: CREATE INDEX db_dbuser_groups_dbuser_id_480b3520 + ON public.db_dbuser_groups USING btree (dbuser_id) + db_dbuser_groups_dbuser_id_group_id_9155eb4f_uniq: CREATE UNIQUE INDEX db_dbuser_groups_dbuser_id_group_id_9155eb4f_uniq + ON public.db_dbuser_groups USING btree (dbuser_id, group_id) + db_dbuser_groups_group_id_8478d87e: CREATE INDEX db_dbuser_groups_group_id_8478d87e + ON public.db_dbuser_groups USING btree (group_id) + db_dbuser_groups_pkey: CREATE UNIQUE INDEX db_dbuser_groups_pkey ON public.db_dbuser_groups + USING btree (id) + db_dbuser_user_permissions: + db_dbuser_user_permissio_dbuser_id_permission_id_e6cbabe4_uniq: CREATE UNIQUE + INDEX db_dbuser_user_permissio_dbuser_id_permission_id_e6cbabe4_uniq ON public.db_dbuser_user_permissions + USING btree (dbuser_id, permission_id) + db_dbuser_user_permissions_dbuser_id_364456ee: CREATE INDEX db_dbuser_user_permissions_dbuser_id_364456ee + ON public.db_dbuser_user_permissions USING btree (dbuser_id) + db_dbuser_user_permissions_permission_id_c5aafc54: CREATE INDEX db_dbuser_user_permissions_permission_id_c5aafc54 + ON public.db_dbuser_user_permissions USING btree (permission_id) + db_dbuser_user_permissions_pkey: CREATE UNIQUE INDEX db_dbuser_user_permissions_pkey + ON public.db_dbuser_user_permissions USING btree (id) + db_dbworkflow: + db_dbworkflow_label_7368f34a: CREATE INDEX db_dbworkflow_label_7368f34a ON public.db_dbworkflow + USING btree (label) + db_dbworkflow_label_7368f34a_like: CREATE INDEX db_dbworkflow_label_7368f34a_like + ON public.db_dbworkflow USING btree (label varchar_pattern_ops) + db_dbworkflow_pkey: CREATE UNIQUE INDEX db_dbworkflow_pkey ON public.db_dbworkflow + USING btree (id) + db_dbworkflow_user_id_ef1f3251: CREATE INDEX db_dbworkflow_user_id_ef1f3251 ON + public.db_dbworkflow USING btree (user_id) + db_dbworkflowdata: + db_dbworkflowdata_aiida_obj_id_70a2d33b: CREATE INDEX db_dbworkflowdata_aiida_obj_id_70a2d33b + ON public.db_dbworkflowdata USING btree (aiida_obj_id) + db_dbworkflowdata_parent_id_ff4dbf8d: CREATE INDEX db_dbworkflowdata_parent_id_ff4dbf8d + ON public.db_dbworkflowdata USING btree (parent_id) + db_dbworkflowdata_parent_id_name_data_type_a4b50dae_uniq: CREATE UNIQUE INDEX + db_dbworkflowdata_parent_id_name_data_type_a4b50dae_uniq ON public.db_dbworkflowdata + USING btree (parent_id, name, data_type) + db_dbworkflowdata_pkey: CREATE UNIQUE INDEX db_dbworkflowdata_pkey ON public.db_dbworkflowdata + USING btree (id) + db_dbworkflowstep: + db_dbworkflowstep_parent_id_ffb754d9: CREATE INDEX db_dbworkflowstep_parent_id_ffb754d9 + ON public.db_dbworkflowstep USING btree (parent_id) + db_dbworkflowstep_parent_id_name_111027e3_uniq: CREATE UNIQUE INDEX db_dbworkflowstep_parent_id_name_111027e3_uniq + ON public.db_dbworkflowstep USING btree (parent_id, name) + db_dbworkflowstep_pkey: CREATE UNIQUE INDEX db_dbworkflowstep_pkey ON public.db_dbworkflowstep + USING btree (id) + db_dbworkflowstep_user_id_04282431: CREATE INDEX db_dbworkflowstep_user_id_04282431 + ON public.db_dbworkflowstep USING btree (user_id) + db_dbworkflowstep_calculations: + db_dbworkflowstep_calcul_dbworkflowstep_id_dbnode_60f50d02_uniq: CREATE UNIQUE + INDEX db_dbworkflowstep_calcul_dbworkflowstep_id_dbnode_60f50d02_uniq ON public.db_dbworkflowstep_calculations + USING btree (dbworkflowstep_id, dbnode_id) + db_dbworkflowstep_calculations_dbnode_id_0d07b7a7: CREATE INDEX db_dbworkflowstep_calculations_dbnode_id_0d07b7a7 + ON public.db_dbworkflowstep_calculations USING btree (dbnode_id) + db_dbworkflowstep_calculations_dbworkflowstep_id_575c3637: CREATE INDEX db_dbworkflowstep_calculations_dbworkflowstep_id_575c3637 + ON public.db_dbworkflowstep_calculations USING btree (dbworkflowstep_id) + db_dbworkflowstep_calculations_pkey: CREATE UNIQUE INDEX db_dbworkflowstep_calculations_pkey + ON public.db_dbworkflowstep_calculations USING btree (id) + db_dbworkflowstep_sub_workflows: + db_dbworkflowstep_sub_wo_dbworkflowstep_id_dbwork_e9b2b624_uniq: CREATE UNIQUE + INDEX db_dbworkflowstep_sub_wo_dbworkflowstep_id_dbwork_e9b2b624_uniq ON public.db_dbworkflowstep_sub_workflows + USING btree (dbworkflowstep_id, dbworkflow_id) + db_dbworkflowstep_sub_workflows_dbworkflow_id_dca4d103: CREATE INDEX db_dbworkflowstep_sub_workflows_dbworkflow_id_dca4d103 + ON public.db_dbworkflowstep_sub_workflows USING btree (dbworkflow_id) + db_dbworkflowstep_sub_workflows_dbworkflowstep_id_e183bbb7: CREATE INDEX db_dbworkflowstep_sub_workflows_dbworkflowstep_id_e183bbb7 + ON public.db_dbworkflowstep_sub_workflows USING btree (dbworkflowstep_id) + db_dbworkflowstep_sub_workflows_pkey: CREATE UNIQUE INDEX db_dbworkflowstep_sub_workflows_pkey + ON public.db_dbworkflowstep_sub_workflows USING btree (id) diff --git a/tests/backends/aiida_sqlalchemy/migrations/test_all_schema/test_django_django_0009_.yml b/tests/backends/aiida_sqlalchemy/migrations/test_all_schema/test_django_django_0009_.yml new file mode 100644 index 0000000000..597f85a532 --- /dev/null +++ b/tests/backends/aiida_sqlalchemy/migrations/test_all_schema/test_django_django_0009_.yml @@ -0,0 +1,1081 @@ +columns: + db_dbattribute: + bval: + data_type: boolean + default: null + is_nullable: true + datatype: + data_type: character varying + default: null + is_nullable: false + max_length: 10 + dbnode_id: + data_type: integer + default: null + is_nullable: false + dval: + data_type: timestamp with time zone + default: null + is_nullable: true + fval: + data_type: double precision + default: null + is_nullable: true + id: + data_type: integer + default: nextval('db_dbattribute_id_seq'::regclass) + is_nullable: false + ival: + data_type: integer + default: null + is_nullable: true + key: + data_type: character varying + default: null + is_nullable: false + max_length: 1024 + tval: + data_type: text + default: null + is_nullable: false + db_dbauthinfo: + aiidauser_id: + data_type: integer + default: null + is_nullable: false + auth_params: + data_type: text + default: null + is_nullable: false + dbcomputer_id: + data_type: integer + default: null + is_nullable: false + enabled: + data_type: boolean + default: null + is_nullable: false + id: + data_type: integer + default: nextval('db_dbauthinfo_id_seq'::regclass) + is_nullable: false + metadata: + data_type: text + default: null + is_nullable: false + db_dbcalcstate: + dbnode_id: + data_type: integer + default: null + is_nullable: false + id: + data_type: integer + default: nextval('db_dbcalcstate_id_seq'::regclass) + is_nullable: false + state: + data_type: character varying + default: null + is_nullable: false + max_length: 25 + time: + data_type: timestamp with time zone + default: null + is_nullable: false + db_dbcomment: + content: + data_type: text + default: null + is_nullable: false + ctime: + data_type: timestamp with time zone + default: null + is_nullable: false + dbnode_id: + data_type: integer + default: null + is_nullable: false + id: + data_type: integer + default: nextval('db_dbcomment_id_seq'::regclass) + is_nullable: false + mtime: + data_type: timestamp with time zone + default: null + is_nullable: false + user_id: + data_type: integer + default: null + is_nullable: false + uuid: + data_type: character varying + default: null + is_nullable: false + max_length: 36 + db_dbcomputer: + description: + data_type: text + default: null + is_nullable: false + enabled: + data_type: boolean + default: null + is_nullable: false + hostname: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + id: + data_type: integer + default: nextval('db_dbcomputer_id_seq'::regclass) + is_nullable: false + metadata: + data_type: text + default: null + is_nullable: false + name: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + scheduler_type: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + transport_params: + data_type: text + default: null + is_nullable: false + transport_type: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + uuid: + data_type: character varying + default: null + is_nullable: false + max_length: 36 + db_dbextra: + bval: + data_type: boolean + default: null + is_nullable: true + datatype: + data_type: character varying + default: null + is_nullable: false + max_length: 10 + dbnode_id: + data_type: integer + default: null + is_nullable: false + dval: + data_type: timestamp with time zone + default: null + is_nullable: true + fval: + data_type: double precision + default: null + is_nullable: true + id: + data_type: integer + default: nextval('db_dbextra_id_seq'::regclass) + is_nullable: false + ival: + data_type: integer + default: null + is_nullable: true + key: + data_type: character varying + default: null + is_nullable: false + max_length: 1024 + tval: + data_type: text + default: null + is_nullable: false + db_dbgroup: + description: + data_type: text + default: null + is_nullable: false + id: + data_type: integer + default: nextval('db_dbgroup_id_seq'::regclass) + is_nullable: false + name: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + time: + data_type: timestamp with time zone + default: null + is_nullable: false + type: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + user_id: + data_type: integer + default: null + is_nullable: false + uuid: + data_type: character varying + default: null + is_nullable: false + max_length: 36 + db_dbgroup_dbnodes: + dbgroup_id: + data_type: integer + default: null + is_nullable: false + dbnode_id: + data_type: integer + default: null + is_nullable: false + id: + data_type: integer + default: nextval('db_dbgroup_dbnodes_id_seq'::regclass) + is_nullable: false + db_dblink: + id: + data_type: integer + default: nextval('db_dblink_id_seq'::regclass) + is_nullable: false + input_id: + data_type: integer + default: null + is_nullable: false + label: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + output_id: + data_type: integer + default: null + is_nullable: false + type: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + db_dblock: + creation: + data_type: timestamp with time zone + default: null + is_nullable: false + key: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + owner: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + timeout: + data_type: integer + default: null + is_nullable: false + db_dblog: + id: + data_type: integer + default: nextval('db_dblog_id_seq'::regclass) + is_nullable: false + levelname: + data_type: character varying + default: null + is_nullable: false + max_length: 50 + loggername: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + message: + data_type: text + default: null + is_nullable: false + metadata: + data_type: text + default: null + is_nullable: false + objname: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + objpk: + data_type: integer + default: null + is_nullable: true + time: + data_type: timestamp with time zone + default: null + is_nullable: false + db_dbnode: + ctime: + data_type: timestamp with time zone + default: null + is_nullable: false + dbcomputer_id: + data_type: integer + default: null + is_nullable: true + description: + data_type: text + default: null + is_nullable: false + id: + data_type: integer + default: nextval('db_dbnode_id_seq'::regclass) + is_nullable: false + label: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + mtime: + data_type: timestamp with time zone + default: null + is_nullable: false + nodeversion: + data_type: integer + default: null + is_nullable: false + public: + data_type: boolean + default: null + is_nullable: false + type: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + user_id: + data_type: integer + default: null + is_nullable: false + uuid: + data_type: character varying + default: null + is_nullable: false + max_length: 36 + db_dbsetting: + bval: + data_type: boolean + default: null + is_nullable: true + datatype: + data_type: character varying + default: null + is_nullable: false + max_length: 10 + description: + data_type: text + default: null + is_nullable: false + dval: + data_type: timestamp with time zone + default: null + is_nullable: true + fval: + data_type: double precision + default: null + is_nullable: true + id: + data_type: integer + default: nextval('db_dbsetting_id_seq'::regclass) + is_nullable: false + ival: + data_type: integer + default: null + is_nullable: true + key: + data_type: character varying + default: null + is_nullable: false + max_length: 1024 + time: + data_type: timestamp with time zone + default: null + is_nullable: false + tval: + data_type: text + default: null + is_nullable: false + db_dbuser: + date_joined: + data_type: timestamp with time zone + default: null + is_nullable: false + email: + data_type: character varying + default: null + is_nullable: false + max_length: 75 + first_name: + data_type: character varying + default: null + is_nullable: false + max_length: 254 + id: + data_type: integer + default: nextval('db_dbuser_id_seq'::regclass) + is_nullable: false + institution: + data_type: character varying + default: null + is_nullable: false + max_length: 254 + is_active: + data_type: boolean + default: null + is_nullable: false + is_staff: + data_type: boolean + default: null + is_nullable: false + is_superuser: + data_type: boolean + default: null + is_nullable: false + last_login: + data_type: timestamp with time zone + default: null + is_nullable: false + last_name: + data_type: character varying + default: null + is_nullable: false + max_length: 254 + password: + data_type: character varying + default: null + is_nullable: false + max_length: 128 + db_dbuser_groups: + dbuser_id: + data_type: integer + default: null + is_nullable: false + group_id: + data_type: integer + default: null + is_nullable: false + id: + data_type: integer + default: nextval('db_dbuser_groups_id_seq'::regclass) + is_nullable: false + db_dbuser_user_permissions: + dbuser_id: + data_type: integer + default: null + is_nullable: false + id: + data_type: integer + default: nextval('db_dbuser_user_permissions_id_seq'::regclass) + is_nullable: false + permission_id: + data_type: integer + default: null + is_nullable: false + db_dbworkflow: + ctime: + data_type: timestamp with time zone + default: null + is_nullable: false + description: + data_type: text + default: null + is_nullable: false + id: + data_type: integer + default: nextval('db_dbworkflow_id_seq'::regclass) + is_nullable: false + label: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + lastsyncedversion: + data_type: integer + default: null + is_nullable: false + module: + data_type: text + default: null + is_nullable: false + module_class: + data_type: text + default: null + is_nullable: false + mtime: + data_type: timestamp with time zone + default: null + is_nullable: false + nodeversion: + data_type: integer + default: null + is_nullable: false + report: + data_type: text + default: null + is_nullable: false + script_md5: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + script_path: + data_type: text + default: null + is_nullable: false + state: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + user_id: + data_type: integer + default: null + is_nullable: false + uuid: + data_type: character varying + default: null + is_nullable: false + max_length: 36 + db_dbworkflowdata: + aiida_obj_id: + data_type: integer + default: null + is_nullable: true + data_type: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + id: + data_type: integer + default: nextval('db_dbworkflowdata_id_seq'::regclass) + is_nullable: false + json_value: + data_type: text + default: null + is_nullable: false + name: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + parent_id: + data_type: integer + default: null + is_nullable: false + time: + data_type: timestamp with time zone + default: null + is_nullable: false + value_type: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + db_dbworkflowstep: + id: + data_type: integer + default: nextval('db_dbworkflowstep_id_seq'::regclass) + is_nullable: false + name: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + nextcall: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + parent_id: + data_type: integer + default: null + is_nullable: false + state: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + time: + data_type: timestamp with time zone + default: null + is_nullable: false + user_id: + data_type: integer + default: null + is_nullable: false + db_dbworkflowstep_calculations: + dbnode_id: + data_type: integer + default: null + is_nullable: false + dbworkflowstep_id: + data_type: integer + default: null + is_nullable: false + id: + data_type: integer + default: nextval('db_dbworkflowstep_calculations_id_seq'::regclass) + is_nullable: false + db_dbworkflowstep_sub_workflows: + dbworkflow_id: + data_type: integer + default: null + is_nullable: false + dbworkflowstep_id: + data_type: integer + default: null + is_nullable: false + id: + data_type: integer + default: nextval('db_dbworkflowstep_sub_workflows_id_seq'::regclass) + is_nullable: false +constraints: + primary_key: + db_dbattribute: + db_dbattribute_pkey: + - id + db_dbauthinfo: + db_dbauthinfo_pkey: + - id + db_dbcalcstate: + db_dbcalcstate_pkey: + - id + db_dbcomment: + db_dbcomment_pkey: + - id + db_dbcomputer: + db_dbcomputer_pkey: + - id + db_dbextra: + db_dbextra_pkey: + - id + db_dbgroup: + db_dbgroup_pkey: + - id + db_dbgroup_dbnodes: + db_dbgroup_dbnodes_pkey: + - id + db_dblink: + db_dblink_pkey: + - id + db_dblock: + db_dblock_pkey: + - key + db_dblog: + db_dblog_pkey: + - id + db_dbnode: + db_dbnode_pkey: + - id + db_dbsetting: + db_dbsetting_pkey: + - id + db_dbuser: + db_dbuser_pkey: + - id + db_dbuser_groups: + db_dbuser_groups_pkey: + - id + db_dbuser_user_permissions: + db_dbuser_user_permissions_pkey: + - id + db_dbworkflow: + db_dbworkflow_pkey: + - id + db_dbworkflowdata: + db_dbworkflowdata_pkey: + - id + db_dbworkflowstep: + db_dbworkflowstep_pkey: + - id + db_dbworkflowstep_calculations: + db_dbworkflowstep_calculations_pkey: + - id + db_dbworkflowstep_sub_workflows: + db_dbworkflowstep_sub_workflows_pkey: + - id + unique: + db_dbattribute: + db_dbattribute_dbnode_id_key_c589e447_uniq: + - dbnode_id + - key + db_dbauthinfo: + db_dbauthinfo_aiidauser_id_dbcomputer_id_777cdaa8_uniq: + - aiidauser_id + - dbcomputer_id + db_dbcalcstate: + db_dbcalcstate_dbnode_id_state_b4a14db3_uniq: + - dbnode_id + - state + db_dbcomputer: + db_dbcomputer_name_key: + - name + db_dbextra: + db_dbextra_dbnode_id_key_aa56fd37_uniq: + - dbnode_id + - key + db_dbgroup: + db_dbgroup_name_type_12656f33_uniq: + - name + - type + db_dbgroup_dbnodes: + db_dbgroup_dbnodes_dbgroup_id_dbnode_id_eee23cce_uniq: + - dbgroup_id + - dbnode_id + db_dbsetting: + db_dbsetting_key_1b84beb4_uniq: + - key + db_dbuser: + db_dbuser_email_key: + - email + db_dbuser_groups: + db_dbuser_groups_dbuser_id_group_id_9155eb4f_uniq: + - dbuser_id + - group_id + db_dbuser_user_permissions: + db_dbuser_user_permissio_dbuser_id_permission_id_e6cbabe4_uniq: + - dbuser_id + - permission_id + db_dbworkflowdata: + db_dbworkflowdata_parent_id_name_data_type_a4b50dae_uniq: + - data_type + - name + - parent_id + db_dbworkflowstep: + db_dbworkflowstep_parent_id_name_111027e3_uniq: + - name + - parent_id + db_dbworkflowstep_calculations: + db_dbworkflowstep_calcul_dbworkflowstep_id_dbnode_60f50d02_uniq: + - dbnode_id + - dbworkflowstep_id + db_dbworkflowstep_sub_workflows: + db_dbworkflowstep_sub_wo_dbworkflowstep_id_dbwork_e9b2b624_uniq: + - dbworkflow_id + - dbworkflowstep_id +foreign_keys: + db_dbattribute: + db_dbattribute_dbnode_id_253bf153_fk_db_dbnode_id: FOREIGN KEY (dbnode_id) REFERENCES + db_dbnode(id) DEFERRABLE INITIALLY DEFERRED + db_dbauthinfo: + db_dbauthinfo_aiidauser_id_0684fdfb_fk_db_dbuser_id: FOREIGN KEY (aiidauser_id) + REFERENCES db_dbuser(id) DEFERRABLE INITIALLY DEFERRED + db_dbauthinfo_dbcomputer_id_424f7ac4_fk_db_dbcomputer_id: FOREIGN KEY (dbcomputer_id) + REFERENCES db_dbcomputer(id) DEFERRABLE INITIALLY DEFERRED + db_dbcalcstate: + db_dbcalcstate_dbnode_id_f217a84c_fk_db_dbnode_id: FOREIGN KEY (dbnode_id) REFERENCES + db_dbnode(id) DEFERRABLE INITIALLY DEFERRED + db_dbcomment: + db_dbcomment_dbnode_id_3b812b6b_fk_db_dbnode_id: FOREIGN KEY (dbnode_id) REFERENCES + db_dbnode(id) DEFERRABLE INITIALLY DEFERRED + db_dbcomment_user_id_8ed5e360_fk_db_dbuser_id: FOREIGN KEY (user_id) REFERENCES + db_dbuser(id) DEFERRABLE INITIALLY DEFERRED + db_dbextra: + db_dbextra_dbnode_id_c7fe8961_fk_db_dbnode_id: FOREIGN KEY (dbnode_id) REFERENCES + db_dbnode(id) DEFERRABLE INITIALLY DEFERRED + db_dbgroup: + db_dbgroup_user_id_100f8a51_fk_db_dbuser_id: FOREIGN KEY (user_id) REFERENCES + db_dbuser(id) DEFERRABLE INITIALLY DEFERRED + db_dbgroup_dbnodes: + db_dbgroup_dbnodes_dbgroup_id_9d3a0f9d_fk_db_dbgroup_id: FOREIGN KEY (dbgroup_id) + REFERENCES db_dbgroup(id) DEFERRABLE INITIALLY DEFERRED + db_dbgroup_dbnodes_dbnode_id_118b9439_fk_db_dbnode_id: FOREIGN KEY (dbnode_id) + REFERENCES db_dbnode(id) DEFERRABLE INITIALLY DEFERRED + db_dblink: + db_dblink_input_id_9245bd73_fk_db_dbnode_id: FOREIGN KEY (input_id) REFERENCES + db_dbnode(id) DEFERRABLE INITIALLY DEFERRED + db_dblink_output_id_c0167528_fk_db_dbnode_id: FOREIGN KEY (output_id) REFERENCES + db_dbnode(id) DEFERRABLE INITIALLY DEFERRED + db_dbnode: + db_dbnode_dbcomputer_id_315372a3_fk_db_dbcomputer_id: FOREIGN KEY (dbcomputer_id) + REFERENCES db_dbcomputer(id) DEFERRABLE INITIALLY DEFERRED + db_dbnode_user_id_12e7aeaf_fk_db_dbuser_id: FOREIGN KEY (user_id) REFERENCES db_dbuser(id) + DEFERRABLE INITIALLY DEFERRED + db_dbuser_groups: + db_dbuser_groups_dbuser_id_480b3520_fk_db_dbuser_id: FOREIGN KEY (dbuser_id) REFERENCES + db_dbuser(id) DEFERRABLE INITIALLY DEFERRED + db_dbuser_groups_group_id_8478d87e_fk_auth_group_id: FOREIGN KEY (group_id) REFERENCES + auth_group(id) DEFERRABLE INITIALLY DEFERRED + db_dbuser_user_permissions: + db_dbuser_user_permi_permission_id_c5aafc54_fk_auth_perm: FOREIGN KEY (permission_id) + REFERENCES auth_permission(id) DEFERRABLE INITIALLY DEFERRED + db_dbuser_user_permissions_dbuser_id_364456ee_fk_db_dbuser_id: FOREIGN KEY (dbuser_id) + REFERENCES db_dbuser(id) DEFERRABLE INITIALLY DEFERRED + db_dbworkflow: + db_dbworkflow_user_id_ef1f3251_fk_db_dbuser_id: FOREIGN KEY (user_id) REFERENCES + db_dbuser(id) DEFERRABLE INITIALLY DEFERRED + db_dbworkflowdata: + db_dbworkflowdata_aiida_obj_id_70a2d33b_fk_db_dbnode_id: FOREIGN KEY (aiida_obj_id) + REFERENCES db_dbnode(id) DEFERRABLE INITIALLY DEFERRED + db_dbworkflowdata_parent_id_ff4dbf8d_fk_db_dbworkflow_id: FOREIGN KEY (parent_id) + REFERENCES db_dbworkflow(id) DEFERRABLE INITIALLY DEFERRED + db_dbworkflowstep: + db_dbworkflowstep_parent_id_ffb754d9_fk_db_dbworkflow_id: FOREIGN KEY (parent_id) + REFERENCES db_dbworkflow(id) DEFERRABLE INITIALLY DEFERRED + db_dbworkflowstep_user_id_04282431_fk_db_dbuser_id: FOREIGN KEY (user_id) REFERENCES + db_dbuser(id) DEFERRABLE INITIALLY DEFERRED + db_dbworkflowstep_calculations: + db_dbworkflowstep_ca_dbnode_id_0d07b7a7_fk_db_dbnode: FOREIGN KEY (dbnode_id) + REFERENCES db_dbnode(id) DEFERRABLE INITIALLY DEFERRED + db_dbworkflowstep_ca_dbworkflowstep_id_575c3637_fk_db_dbwork: FOREIGN KEY (dbworkflowstep_id) + REFERENCES db_dbworkflowstep(id) DEFERRABLE INITIALLY DEFERRED + db_dbworkflowstep_sub_workflows: + db_dbworkflowstep_su_dbworkflow_id_dca4d103_fk_db_dbwork: FOREIGN KEY (dbworkflow_id) + REFERENCES db_dbworkflow(id) DEFERRABLE INITIALLY DEFERRED + db_dbworkflowstep_su_dbworkflowstep_id_e183bbb7_fk_db_dbwork: FOREIGN KEY (dbworkflowstep_id) + REFERENCES db_dbworkflowstep(id) DEFERRABLE INITIALLY DEFERRED +indexes: + db_dbattribute: + db_dbattribute_datatype_91c4dc04: CREATE INDEX db_dbattribute_datatype_91c4dc04 + ON public.db_dbattribute USING btree (datatype) + db_dbattribute_datatype_91c4dc04_like: CREATE INDEX db_dbattribute_datatype_91c4dc04_like + ON public.db_dbattribute USING btree (datatype varchar_pattern_ops) + db_dbattribute_dbnode_id_253bf153: CREATE INDEX db_dbattribute_dbnode_id_253bf153 + ON public.db_dbattribute USING btree (dbnode_id) + db_dbattribute_dbnode_id_key_c589e447_uniq: CREATE UNIQUE INDEX db_dbattribute_dbnode_id_key_c589e447_uniq + ON public.db_dbattribute USING btree (dbnode_id, key) + db_dbattribute_key_ac2bc4e4: CREATE INDEX db_dbattribute_key_ac2bc4e4 ON public.db_dbattribute + USING btree (key) + db_dbattribute_key_ac2bc4e4_like: CREATE INDEX db_dbattribute_key_ac2bc4e4_like + ON public.db_dbattribute USING btree (key varchar_pattern_ops) + db_dbattribute_pkey: CREATE UNIQUE INDEX db_dbattribute_pkey ON public.db_dbattribute + USING btree (id) + tval_idx_for_daemon: CREATE INDEX tval_idx_for_daemon ON public.db_dbattribute + USING btree (tval) WHERE (tval = ANY (ARRAY['COMPUTED'::text, 'WITHSCHEDULER'::text, + 'TOSUBMIT'::text])) + db_dbauthinfo: + db_dbauthinfo_aiidauser_id_0684fdfb: CREATE INDEX db_dbauthinfo_aiidauser_id_0684fdfb + ON public.db_dbauthinfo USING btree (aiidauser_id) + db_dbauthinfo_aiidauser_id_dbcomputer_id_777cdaa8_uniq: CREATE UNIQUE INDEX db_dbauthinfo_aiidauser_id_dbcomputer_id_777cdaa8_uniq + ON public.db_dbauthinfo USING btree (aiidauser_id, dbcomputer_id) + db_dbauthinfo_dbcomputer_id_424f7ac4: CREATE INDEX db_dbauthinfo_dbcomputer_id_424f7ac4 + ON public.db_dbauthinfo USING btree (dbcomputer_id) + db_dbauthinfo_pkey: CREATE UNIQUE INDEX db_dbauthinfo_pkey ON public.db_dbauthinfo + USING btree (id) + db_dbcalcstate: + db_dbcalcstate_dbnode_id_f217a84c: CREATE INDEX db_dbcalcstate_dbnode_id_f217a84c + ON public.db_dbcalcstate USING btree (dbnode_id) + db_dbcalcstate_dbnode_id_state_b4a14db3_uniq: CREATE UNIQUE INDEX db_dbcalcstate_dbnode_id_state_b4a14db3_uniq + ON public.db_dbcalcstate USING btree (dbnode_id, state) + db_dbcalcstate_pkey: CREATE UNIQUE INDEX db_dbcalcstate_pkey ON public.db_dbcalcstate + USING btree (id) + db_dbcalcstate_state_0bf54584: CREATE INDEX db_dbcalcstate_state_0bf54584 ON public.db_dbcalcstate + USING btree (state) + db_dbcalcstate_state_0bf54584_like: CREATE INDEX db_dbcalcstate_state_0bf54584_like + ON public.db_dbcalcstate USING btree (state varchar_pattern_ops) + db_dbcomment: + db_dbcomment_dbnode_id_3b812b6b: CREATE INDEX db_dbcomment_dbnode_id_3b812b6b + ON public.db_dbcomment USING btree (dbnode_id) + db_dbcomment_pkey: CREATE UNIQUE INDEX db_dbcomment_pkey ON public.db_dbcomment + USING btree (id) + db_dbcomment_user_id_8ed5e360: CREATE INDEX db_dbcomment_user_id_8ed5e360 ON public.db_dbcomment + USING btree (user_id) + db_dbcomputer: + db_dbcomputer_name_f1800b1a_like: CREATE INDEX db_dbcomputer_name_f1800b1a_like + ON public.db_dbcomputer USING btree (name varchar_pattern_ops) + db_dbcomputer_name_key: CREATE UNIQUE INDEX db_dbcomputer_name_key ON public.db_dbcomputer + USING btree (name) + db_dbcomputer_pkey: CREATE UNIQUE INDEX db_dbcomputer_pkey ON public.db_dbcomputer + USING btree (id) + db_dbextra: + db_dbextra_datatype_2eba38c6: CREATE INDEX db_dbextra_datatype_2eba38c6 ON public.db_dbextra + USING btree (datatype) + db_dbextra_datatype_2eba38c6_like: CREATE INDEX db_dbextra_datatype_2eba38c6_like + ON public.db_dbextra USING btree (datatype varchar_pattern_ops) + db_dbextra_dbnode_id_c7fe8961: CREATE INDEX db_dbextra_dbnode_id_c7fe8961 ON public.db_dbextra + USING btree (dbnode_id) + db_dbextra_dbnode_id_key_aa56fd37_uniq: CREATE UNIQUE INDEX db_dbextra_dbnode_id_key_aa56fd37_uniq + ON public.db_dbextra USING btree (dbnode_id, key) + db_dbextra_key_b1a8abc6: CREATE INDEX db_dbextra_key_b1a8abc6 ON public.db_dbextra + USING btree (key) + db_dbextra_key_b1a8abc6_like: CREATE INDEX db_dbextra_key_b1a8abc6_like ON public.db_dbextra + USING btree (key varchar_pattern_ops) + db_dbextra_pkey: CREATE UNIQUE INDEX db_dbextra_pkey ON public.db_dbextra USING + btree (id) + db_dbgroup: + db_dbgroup_name_66c75272: CREATE INDEX db_dbgroup_name_66c75272 ON public.db_dbgroup + USING btree (name) + db_dbgroup_name_66c75272_like: CREATE INDEX db_dbgroup_name_66c75272_like ON public.db_dbgroup + USING btree (name varchar_pattern_ops) + db_dbgroup_name_type_12656f33_uniq: CREATE UNIQUE INDEX db_dbgroup_name_type_12656f33_uniq + ON public.db_dbgroup USING btree (name, type) + db_dbgroup_pkey: CREATE UNIQUE INDEX db_dbgroup_pkey ON public.db_dbgroup USING + btree (id) + db_dbgroup_type_23b2a748: CREATE INDEX db_dbgroup_type_23b2a748 ON public.db_dbgroup + USING btree (type) + db_dbgroup_type_23b2a748_like: CREATE INDEX db_dbgroup_type_23b2a748_like ON public.db_dbgroup + USING btree (type varchar_pattern_ops) + db_dbgroup_user_id_100f8a51: CREATE INDEX db_dbgroup_user_id_100f8a51 ON public.db_dbgroup + USING btree (user_id) + db_dbgroup_dbnodes: + db_dbgroup_dbnodes_dbgroup_id_9d3a0f9d: CREATE INDEX db_dbgroup_dbnodes_dbgroup_id_9d3a0f9d + ON public.db_dbgroup_dbnodes USING btree (dbgroup_id) + db_dbgroup_dbnodes_dbgroup_id_dbnode_id_eee23cce_uniq: CREATE UNIQUE INDEX db_dbgroup_dbnodes_dbgroup_id_dbnode_id_eee23cce_uniq + ON public.db_dbgroup_dbnodes USING btree (dbgroup_id, dbnode_id) + db_dbgroup_dbnodes_dbnode_id_118b9439: CREATE INDEX db_dbgroup_dbnodes_dbnode_id_118b9439 + ON public.db_dbgroup_dbnodes USING btree (dbnode_id) + db_dbgroup_dbnodes_pkey: CREATE UNIQUE INDEX db_dbgroup_dbnodes_pkey ON public.db_dbgroup_dbnodes + USING btree (id) + db_dblink: + db_dblink_input_id_9245bd73: CREATE INDEX db_dblink_input_id_9245bd73 ON public.db_dblink + USING btree (input_id) + db_dblink_label_f1343cfb: CREATE INDEX db_dblink_label_f1343cfb ON public.db_dblink + USING btree (label) + db_dblink_label_f1343cfb_like: CREATE INDEX db_dblink_label_f1343cfb_like ON public.db_dblink + USING btree (label varchar_pattern_ops) + db_dblink_output_id_c0167528: CREATE INDEX db_dblink_output_id_c0167528 ON public.db_dblink + USING btree (output_id) + db_dblink_pkey: CREATE UNIQUE INDEX db_dblink_pkey ON public.db_dblink USING btree + (id) + db_dblink_type_229f212b: CREATE INDEX db_dblink_type_229f212b ON public.db_dblink + USING btree (type) + db_dblink_type_229f212b_like: CREATE INDEX db_dblink_type_229f212b_like ON public.db_dblink + USING btree (type varchar_pattern_ops) + db_dblock: + db_dblock_key_048c6767_like: CREATE INDEX db_dblock_key_048c6767_like ON public.db_dblock + USING btree (key varchar_pattern_ops) + db_dblock_pkey: CREATE UNIQUE INDEX db_dblock_pkey ON public.db_dblock USING btree + (key) + db_dblog: + db_dblog_levelname_ad5dc346: CREATE INDEX db_dblog_levelname_ad5dc346 ON public.db_dblog + USING btree (levelname) + db_dblog_levelname_ad5dc346_like: CREATE INDEX db_dblog_levelname_ad5dc346_like + ON public.db_dblog USING btree (levelname varchar_pattern_ops) + db_dblog_loggername_00b5ba16: CREATE INDEX db_dblog_loggername_00b5ba16 ON public.db_dblog + USING btree (loggername) + db_dblog_loggername_00b5ba16_like: CREATE INDEX db_dblog_loggername_00b5ba16_like + ON public.db_dblog USING btree (loggername varchar_pattern_ops) + db_dblog_objname_69932b1e: CREATE INDEX db_dblog_objname_69932b1e ON public.db_dblog + USING btree (objname) + db_dblog_objname_69932b1e_like: CREATE INDEX db_dblog_objname_69932b1e_like ON + public.db_dblog USING btree (objname varchar_pattern_ops) + db_dblog_objpk_fc47afa9: CREATE INDEX db_dblog_objpk_fc47afa9 ON public.db_dblog + USING btree (objpk) + db_dblog_pkey: CREATE UNIQUE INDEX db_dblog_pkey ON public.db_dblog USING btree + (id) + db_dbnode: + db_dbnode_ctime_71626ef5: CREATE INDEX db_dbnode_ctime_71626ef5 ON public.db_dbnode + USING btree (ctime) + db_dbnode_dbcomputer_id_315372a3: CREATE INDEX db_dbnode_dbcomputer_id_315372a3 + ON public.db_dbnode USING btree (dbcomputer_id) + db_dbnode_label_6469539e: CREATE INDEX db_dbnode_label_6469539e ON public.db_dbnode + USING btree (label) + db_dbnode_label_6469539e_like: CREATE INDEX db_dbnode_label_6469539e_like ON public.db_dbnode + USING btree (label varchar_pattern_ops) + db_dbnode_mtime_0554ea3d: CREATE INDEX db_dbnode_mtime_0554ea3d ON public.db_dbnode + USING btree (mtime) + db_dbnode_pkey: CREATE UNIQUE INDEX db_dbnode_pkey ON public.db_dbnode USING btree + (id) + db_dbnode_type_a8ce9753: CREATE INDEX db_dbnode_type_a8ce9753 ON public.db_dbnode + USING btree (type) + db_dbnode_type_a8ce9753_like: CREATE INDEX db_dbnode_type_a8ce9753_like ON public.db_dbnode + USING btree (type varchar_pattern_ops) + db_dbnode_user_id_12e7aeaf: CREATE INDEX db_dbnode_user_id_12e7aeaf ON public.db_dbnode + USING btree (user_id) + db_dbnode_uuid_62e0bf98: CREATE INDEX db_dbnode_uuid_62e0bf98 ON public.db_dbnode + USING btree (uuid) + db_dbnode_uuid_62e0bf98_like: CREATE INDEX db_dbnode_uuid_62e0bf98_like ON public.db_dbnode + USING btree (uuid varchar_pattern_ops) + db_dbsetting: + db_dbsetting_datatype_49f4397c: CREATE INDEX db_dbsetting_datatype_49f4397c ON + public.db_dbsetting USING btree (datatype) + db_dbsetting_datatype_49f4397c_like: CREATE INDEX db_dbsetting_datatype_49f4397c_like + ON public.db_dbsetting USING btree (datatype varchar_pattern_ops) + db_dbsetting_key_1b84beb4: CREATE INDEX db_dbsetting_key_1b84beb4 ON public.db_dbsetting + USING btree (key) + db_dbsetting_key_1b84beb4_like: CREATE INDEX db_dbsetting_key_1b84beb4_like ON + public.db_dbsetting USING btree (key varchar_pattern_ops) + db_dbsetting_key_1b84beb4_uniq: CREATE UNIQUE INDEX db_dbsetting_key_1b84beb4_uniq + ON public.db_dbsetting USING btree (key) + db_dbsetting_pkey: CREATE UNIQUE INDEX db_dbsetting_pkey ON public.db_dbsetting + USING btree (id) + db_dbuser: + db_dbuser_email_30150b7e_like: CREATE INDEX db_dbuser_email_30150b7e_like ON public.db_dbuser + USING btree (email varchar_pattern_ops) + db_dbuser_email_key: CREATE UNIQUE INDEX db_dbuser_email_key ON public.db_dbuser + USING btree (email) + db_dbuser_pkey: CREATE UNIQUE INDEX db_dbuser_pkey ON public.db_dbuser USING btree + (id) + db_dbuser_groups: + db_dbuser_groups_dbuser_id_480b3520: CREATE INDEX db_dbuser_groups_dbuser_id_480b3520 + ON public.db_dbuser_groups USING btree (dbuser_id) + db_dbuser_groups_dbuser_id_group_id_9155eb4f_uniq: CREATE UNIQUE INDEX db_dbuser_groups_dbuser_id_group_id_9155eb4f_uniq + ON public.db_dbuser_groups USING btree (dbuser_id, group_id) + db_dbuser_groups_group_id_8478d87e: CREATE INDEX db_dbuser_groups_group_id_8478d87e + ON public.db_dbuser_groups USING btree (group_id) + db_dbuser_groups_pkey: CREATE UNIQUE INDEX db_dbuser_groups_pkey ON public.db_dbuser_groups + USING btree (id) + db_dbuser_user_permissions: + db_dbuser_user_permissio_dbuser_id_permission_id_e6cbabe4_uniq: CREATE UNIQUE + INDEX db_dbuser_user_permissio_dbuser_id_permission_id_e6cbabe4_uniq ON public.db_dbuser_user_permissions + USING btree (dbuser_id, permission_id) + db_dbuser_user_permissions_dbuser_id_364456ee: CREATE INDEX db_dbuser_user_permissions_dbuser_id_364456ee + ON public.db_dbuser_user_permissions USING btree (dbuser_id) + db_dbuser_user_permissions_permission_id_c5aafc54: CREATE INDEX db_dbuser_user_permissions_permission_id_c5aafc54 + ON public.db_dbuser_user_permissions USING btree (permission_id) + db_dbuser_user_permissions_pkey: CREATE UNIQUE INDEX db_dbuser_user_permissions_pkey + ON public.db_dbuser_user_permissions USING btree (id) + db_dbworkflow: + db_dbworkflow_label_7368f34a: CREATE INDEX db_dbworkflow_label_7368f34a ON public.db_dbworkflow + USING btree (label) + db_dbworkflow_label_7368f34a_like: CREATE INDEX db_dbworkflow_label_7368f34a_like + ON public.db_dbworkflow USING btree (label varchar_pattern_ops) + db_dbworkflow_pkey: CREATE UNIQUE INDEX db_dbworkflow_pkey ON public.db_dbworkflow + USING btree (id) + db_dbworkflow_user_id_ef1f3251: CREATE INDEX db_dbworkflow_user_id_ef1f3251 ON + public.db_dbworkflow USING btree (user_id) + db_dbworkflowdata: + db_dbworkflowdata_aiida_obj_id_70a2d33b: CREATE INDEX db_dbworkflowdata_aiida_obj_id_70a2d33b + ON public.db_dbworkflowdata USING btree (aiida_obj_id) + db_dbworkflowdata_parent_id_ff4dbf8d: CREATE INDEX db_dbworkflowdata_parent_id_ff4dbf8d + ON public.db_dbworkflowdata USING btree (parent_id) + db_dbworkflowdata_parent_id_name_data_type_a4b50dae_uniq: CREATE UNIQUE INDEX + db_dbworkflowdata_parent_id_name_data_type_a4b50dae_uniq ON public.db_dbworkflowdata + USING btree (parent_id, name, data_type) + db_dbworkflowdata_pkey: CREATE UNIQUE INDEX db_dbworkflowdata_pkey ON public.db_dbworkflowdata + USING btree (id) + db_dbworkflowstep: + db_dbworkflowstep_parent_id_ffb754d9: CREATE INDEX db_dbworkflowstep_parent_id_ffb754d9 + ON public.db_dbworkflowstep USING btree (parent_id) + db_dbworkflowstep_parent_id_name_111027e3_uniq: CREATE UNIQUE INDEX db_dbworkflowstep_parent_id_name_111027e3_uniq + ON public.db_dbworkflowstep USING btree (parent_id, name) + db_dbworkflowstep_pkey: CREATE UNIQUE INDEX db_dbworkflowstep_pkey ON public.db_dbworkflowstep + USING btree (id) + db_dbworkflowstep_user_id_04282431: CREATE INDEX db_dbworkflowstep_user_id_04282431 + ON public.db_dbworkflowstep USING btree (user_id) + db_dbworkflowstep_calculations: + db_dbworkflowstep_calcul_dbworkflowstep_id_dbnode_60f50d02_uniq: CREATE UNIQUE + INDEX db_dbworkflowstep_calcul_dbworkflowstep_id_dbnode_60f50d02_uniq ON public.db_dbworkflowstep_calculations + USING btree (dbworkflowstep_id, dbnode_id) + db_dbworkflowstep_calculations_dbnode_id_0d07b7a7: CREATE INDEX db_dbworkflowstep_calculations_dbnode_id_0d07b7a7 + ON public.db_dbworkflowstep_calculations USING btree (dbnode_id) + db_dbworkflowstep_calculations_dbworkflowstep_id_575c3637: CREATE INDEX db_dbworkflowstep_calculations_dbworkflowstep_id_575c3637 + ON public.db_dbworkflowstep_calculations USING btree (dbworkflowstep_id) + db_dbworkflowstep_calculations_pkey: CREATE UNIQUE INDEX db_dbworkflowstep_calculations_pkey + ON public.db_dbworkflowstep_calculations USING btree (id) + db_dbworkflowstep_sub_workflows: + db_dbworkflowstep_sub_wo_dbworkflowstep_id_dbwork_e9b2b624_uniq: CREATE UNIQUE + INDEX db_dbworkflowstep_sub_wo_dbworkflowstep_id_dbwork_e9b2b624_uniq ON public.db_dbworkflowstep_sub_workflows + USING btree (dbworkflowstep_id, dbworkflow_id) + db_dbworkflowstep_sub_workflows_dbworkflow_id_dca4d103: CREATE INDEX db_dbworkflowstep_sub_workflows_dbworkflow_id_dca4d103 + ON public.db_dbworkflowstep_sub_workflows USING btree (dbworkflow_id) + db_dbworkflowstep_sub_workflows_dbworkflowstep_id_e183bbb7: CREATE INDEX db_dbworkflowstep_sub_workflows_dbworkflowstep_id_e183bbb7 + ON public.db_dbworkflowstep_sub_workflows USING btree (dbworkflowstep_id) + db_dbworkflowstep_sub_workflows_pkey: CREATE UNIQUE INDEX db_dbworkflowstep_sub_workflows_pkey + ON public.db_dbworkflowstep_sub_workflows USING btree (id) diff --git a/tests/backends/aiida_sqlalchemy/migrations/test_all_schema/test_django_django_0010_.yml b/tests/backends/aiida_sqlalchemy/migrations/test_all_schema/test_django_django_0010_.yml new file mode 100644 index 0000000000..72b6e4a401 --- /dev/null +++ b/tests/backends/aiida_sqlalchemy/migrations/test_all_schema/test_django_django_0010_.yml @@ -0,0 +1,1090 @@ +columns: + db_dbattribute: + bval: + data_type: boolean + default: null + is_nullable: true + datatype: + data_type: character varying + default: null + is_nullable: false + max_length: 10 + dbnode_id: + data_type: integer + default: null + is_nullable: false + dval: + data_type: timestamp with time zone + default: null + is_nullable: true + fval: + data_type: double precision + default: null + is_nullable: true + id: + data_type: integer + default: nextval('db_dbattribute_id_seq'::regclass) + is_nullable: false + ival: + data_type: integer + default: null + is_nullable: true + key: + data_type: character varying + default: null + is_nullable: false + max_length: 1024 + tval: + data_type: text + default: null + is_nullable: false + db_dbauthinfo: + aiidauser_id: + data_type: integer + default: null + is_nullable: false + auth_params: + data_type: text + default: null + is_nullable: false + dbcomputer_id: + data_type: integer + default: null + is_nullable: false + enabled: + data_type: boolean + default: null + is_nullable: false + id: + data_type: integer + default: nextval('db_dbauthinfo_id_seq'::regclass) + is_nullable: false + metadata: + data_type: text + default: null + is_nullable: false + db_dbcalcstate: + dbnode_id: + data_type: integer + default: null + is_nullable: false + id: + data_type: integer + default: nextval('db_dbcalcstate_id_seq'::regclass) + is_nullable: false + state: + data_type: character varying + default: null + is_nullable: false + max_length: 25 + time: + data_type: timestamp with time zone + default: null + is_nullable: false + db_dbcomment: + content: + data_type: text + default: null + is_nullable: false + ctime: + data_type: timestamp with time zone + default: null + is_nullable: false + dbnode_id: + data_type: integer + default: null + is_nullable: false + id: + data_type: integer + default: nextval('db_dbcomment_id_seq'::regclass) + is_nullable: false + mtime: + data_type: timestamp with time zone + default: null + is_nullable: false + user_id: + data_type: integer + default: null + is_nullable: false + uuid: + data_type: character varying + default: null + is_nullable: false + max_length: 36 + db_dbcomputer: + description: + data_type: text + default: null + is_nullable: false + enabled: + data_type: boolean + default: null + is_nullable: false + hostname: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + id: + data_type: integer + default: nextval('db_dbcomputer_id_seq'::regclass) + is_nullable: false + metadata: + data_type: text + default: null + is_nullable: false + name: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + scheduler_type: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + transport_params: + data_type: text + default: null + is_nullable: false + transport_type: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + uuid: + data_type: character varying + default: null + is_nullable: false + max_length: 36 + db_dbextra: + bval: + data_type: boolean + default: null + is_nullable: true + datatype: + data_type: character varying + default: null + is_nullable: false + max_length: 10 + dbnode_id: + data_type: integer + default: null + is_nullable: false + dval: + data_type: timestamp with time zone + default: null + is_nullable: true + fval: + data_type: double precision + default: null + is_nullable: true + id: + data_type: integer + default: nextval('db_dbextra_id_seq'::regclass) + is_nullable: false + ival: + data_type: integer + default: null + is_nullable: true + key: + data_type: character varying + default: null + is_nullable: false + max_length: 1024 + tval: + data_type: text + default: null + is_nullable: false + db_dbgroup: + description: + data_type: text + default: null + is_nullable: false + id: + data_type: integer + default: nextval('db_dbgroup_id_seq'::regclass) + is_nullable: false + name: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + time: + data_type: timestamp with time zone + default: null + is_nullable: false + type: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + user_id: + data_type: integer + default: null + is_nullable: false + uuid: + data_type: character varying + default: null + is_nullable: false + max_length: 36 + db_dbgroup_dbnodes: + dbgroup_id: + data_type: integer + default: null + is_nullable: false + dbnode_id: + data_type: integer + default: null + is_nullable: false + id: + data_type: integer + default: nextval('db_dbgroup_dbnodes_id_seq'::regclass) + is_nullable: false + db_dblink: + id: + data_type: integer + default: nextval('db_dblink_id_seq'::regclass) + is_nullable: false + input_id: + data_type: integer + default: null + is_nullable: false + label: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + output_id: + data_type: integer + default: null + is_nullable: false + type: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + db_dblock: + creation: + data_type: timestamp with time zone + default: null + is_nullable: false + key: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + owner: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + timeout: + data_type: integer + default: null + is_nullable: false + db_dblog: + id: + data_type: integer + default: nextval('db_dblog_id_seq'::regclass) + is_nullable: false + levelname: + data_type: character varying + default: null + is_nullable: false + max_length: 50 + loggername: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + message: + data_type: text + default: null + is_nullable: false + metadata: + data_type: text + default: null + is_nullable: false + objname: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + objpk: + data_type: integer + default: null + is_nullable: true + time: + data_type: timestamp with time zone + default: null + is_nullable: false + db_dbnode: + ctime: + data_type: timestamp with time zone + default: null + is_nullable: false + dbcomputer_id: + data_type: integer + default: null + is_nullable: true + description: + data_type: text + default: null + is_nullable: false + id: + data_type: integer + default: nextval('db_dbnode_id_seq'::regclass) + is_nullable: false + label: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + mtime: + data_type: timestamp with time zone + default: null + is_nullable: false + nodeversion: + data_type: integer + default: null + is_nullable: false + process_type: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + public: + data_type: boolean + default: null + is_nullable: false + type: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + user_id: + data_type: integer + default: null + is_nullable: false + uuid: + data_type: character varying + default: null + is_nullable: false + max_length: 36 + db_dbsetting: + bval: + data_type: boolean + default: null + is_nullable: true + datatype: + data_type: character varying + default: null + is_nullable: false + max_length: 10 + description: + data_type: text + default: null + is_nullable: false + dval: + data_type: timestamp with time zone + default: null + is_nullable: true + fval: + data_type: double precision + default: null + is_nullable: true + id: + data_type: integer + default: nextval('db_dbsetting_id_seq'::regclass) + is_nullable: false + ival: + data_type: integer + default: null + is_nullable: true + key: + data_type: character varying + default: null + is_nullable: false + max_length: 1024 + time: + data_type: timestamp with time zone + default: null + is_nullable: false + tval: + data_type: text + default: null + is_nullable: false + db_dbuser: + date_joined: + data_type: timestamp with time zone + default: null + is_nullable: false + email: + data_type: character varying + default: null + is_nullable: false + max_length: 75 + first_name: + data_type: character varying + default: null + is_nullable: false + max_length: 254 + id: + data_type: integer + default: nextval('db_dbuser_id_seq'::regclass) + is_nullable: false + institution: + data_type: character varying + default: null + is_nullable: false + max_length: 254 + is_active: + data_type: boolean + default: null + is_nullable: false + is_staff: + data_type: boolean + default: null + is_nullable: false + is_superuser: + data_type: boolean + default: null + is_nullable: false + last_login: + data_type: timestamp with time zone + default: null + is_nullable: false + last_name: + data_type: character varying + default: null + is_nullable: false + max_length: 254 + password: + data_type: character varying + default: null + is_nullable: false + max_length: 128 + db_dbuser_groups: + dbuser_id: + data_type: integer + default: null + is_nullable: false + group_id: + data_type: integer + default: null + is_nullable: false + id: + data_type: integer + default: nextval('db_dbuser_groups_id_seq'::regclass) + is_nullable: false + db_dbuser_user_permissions: + dbuser_id: + data_type: integer + default: null + is_nullable: false + id: + data_type: integer + default: nextval('db_dbuser_user_permissions_id_seq'::regclass) + is_nullable: false + permission_id: + data_type: integer + default: null + is_nullable: false + db_dbworkflow: + ctime: + data_type: timestamp with time zone + default: null + is_nullable: false + description: + data_type: text + default: null + is_nullable: false + id: + data_type: integer + default: nextval('db_dbworkflow_id_seq'::regclass) + is_nullable: false + label: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + lastsyncedversion: + data_type: integer + default: null + is_nullable: false + module: + data_type: text + default: null + is_nullable: false + module_class: + data_type: text + default: null + is_nullable: false + mtime: + data_type: timestamp with time zone + default: null + is_nullable: false + nodeversion: + data_type: integer + default: null + is_nullable: false + report: + data_type: text + default: null + is_nullable: false + script_md5: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + script_path: + data_type: text + default: null + is_nullable: false + state: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + user_id: + data_type: integer + default: null + is_nullable: false + uuid: + data_type: character varying + default: null + is_nullable: false + max_length: 36 + db_dbworkflowdata: + aiida_obj_id: + data_type: integer + default: null + is_nullable: true + data_type: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + id: + data_type: integer + default: nextval('db_dbworkflowdata_id_seq'::regclass) + is_nullable: false + json_value: + data_type: text + default: null + is_nullable: false + name: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + parent_id: + data_type: integer + default: null + is_nullable: false + time: + data_type: timestamp with time zone + default: null + is_nullable: false + value_type: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + db_dbworkflowstep: + id: + data_type: integer + default: nextval('db_dbworkflowstep_id_seq'::regclass) + is_nullable: false + name: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + nextcall: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + parent_id: + data_type: integer + default: null + is_nullable: false + state: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + time: + data_type: timestamp with time zone + default: null + is_nullable: false + user_id: + data_type: integer + default: null + is_nullable: false + db_dbworkflowstep_calculations: + dbnode_id: + data_type: integer + default: null + is_nullable: false + dbworkflowstep_id: + data_type: integer + default: null + is_nullable: false + id: + data_type: integer + default: nextval('db_dbworkflowstep_calculations_id_seq'::regclass) + is_nullable: false + db_dbworkflowstep_sub_workflows: + dbworkflow_id: + data_type: integer + default: null + is_nullable: false + dbworkflowstep_id: + data_type: integer + default: null + is_nullable: false + id: + data_type: integer + default: nextval('db_dbworkflowstep_sub_workflows_id_seq'::regclass) + is_nullable: false +constraints: + primary_key: + db_dbattribute: + db_dbattribute_pkey: + - id + db_dbauthinfo: + db_dbauthinfo_pkey: + - id + db_dbcalcstate: + db_dbcalcstate_pkey: + - id + db_dbcomment: + db_dbcomment_pkey: + - id + db_dbcomputer: + db_dbcomputer_pkey: + - id + db_dbextra: + db_dbextra_pkey: + - id + db_dbgroup: + db_dbgroup_pkey: + - id + db_dbgroup_dbnodes: + db_dbgroup_dbnodes_pkey: + - id + db_dblink: + db_dblink_pkey: + - id + db_dblock: + db_dblock_pkey: + - key + db_dblog: + db_dblog_pkey: + - id + db_dbnode: + db_dbnode_pkey: + - id + db_dbsetting: + db_dbsetting_pkey: + - id + db_dbuser: + db_dbuser_pkey: + - id + db_dbuser_groups: + db_dbuser_groups_pkey: + - id + db_dbuser_user_permissions: + db_dbuser_user_permissions_pkey: + - id + db_dbworkflow: + db_dbworkflow_pkey: + - id + db_dbworkflowdata: + db_dbworkflowdata_pkey: + - id + db_dbworkflowstep: + db_dbworkflowstep_pkey: + - id + db_dbworkflowstep_calculations: + db_dbworkflowstep_calculations_pkey: + - id + db_dbworkflowstep_sub_workflows: + db_dbworkflowstep_sub_workflows_pkey: + - id + unique: + db_dbattribute: + db_dbattribute_dbnode_id_key_c589e447_uniq: + - dbnode_id + - key + db_dbauthinfo: + db_dbauthinfo_aiidauser_id_dbcomputer_id_777cdaa8_uniq: + - aiidauser_id + - dbcomputer_id + db_dbcalcstate: + db_dbcalcstate_dbnode_id_state_b4a14db3_uniq: + - dbnode_id + - state + db_dbcomputer: + db_dbcomputer_name_key: + - name + db_dbextra: + db_dbextra_dbnode_id_key_aa56fd37_uniq: + - dbnode_id + - key + db_dbgroup: + db_dbgroup_name_type_12656f33_uniq: + - name + - type + db_dbgroup_dbnodes: + db_dbgroup_dbnodes_dbgroup_id_dbnode_id_eee23cce_uniq: + - dbgroup_id + - dbnode_id + db_dbsetting: + db_dbsetting_key_1b84beb4_uniq: + - key + db_dbuser: + db_dbuser_email_key: + - email + db_dbuser_groups: + db_dbuser_groups_dbuser_id_group_id_9155eb4f_uniq: + - dbuser_id + - group_id + db_dbuser_user_permissions: + db_dbuser_user_permissio_dbuser_id_permission_id_e6cbabe4_uniq: + - dbuser_id + - permission_id + db_dbworkflowdata: + db_dbworkflowdata_parent_id_name_data_type_a4b50dae_uniq: + - data_type + - name + - parent_id + db_dbworkflowstep: + db_dbworkflowstep_parent_id_name_111027e3_uniq: + - name + - parent_id + db_dbworkflowstep_calculations: + db_dbworkflowstep_calcul_dbworkflowstep_id_dbnode_60f50d02_uniq: + - dbnode_id + - dbworkflowstep_id + db_dbworkflowstep_sub_workflows: + db_dbworkflowstep_sub_wo_dbworkflowstep_id_dbwork_e9b2b624_uniq: + - dbworkflow_id + - dbworkflowstep_id +foreign_keys: + db_dbattribute: + db_dbattribute_dbnode_id_253bf153_fk_db_dbnode_id: FOREIGN KEY (dbnode_id) REFERENCES + db_dbnode(id) DEFERRABLE INITIALLY DEFERRED + db_dbauthinfo: + db_dbauthinfo_aiidauser_id_0684fdfb_fk_db_dbuser_id: FOREIGN KEY (aiidauser_id) + REFERENCES db_dbuser(id) DEFERRABLE INITIALLY DEFERRED + db_dbauthinfo_dbcomputer_id_424f7ac4_fk_db_dbcomputer_id: FOREIGN KEY (dbcomputer_id) + REFERENCES db_dbcomputer(id) DEFERRABLE INITIALLY DEFERRED + db_dbcalcstate: + db_dbcalcstate_dbnode_id_f217a84c_fk_db_dbnode_id: FOREIGN KEY (dbnode_id) REFERENCES + db_dbnode(id) DEFERRABLE INITIALLY DEFERRED + db_dbcomment: + db_dbcomment_dbnode_id_3b812b6b_fk_db_dbnode_id: FOREIGN KEY (dbnode_id) REFERENCES + db_dbnode(id) DEFERRABLE INITIALLY DEFERRED + db_dbcomment_user_id_8ed5e360_fk_db_dbuser_id: FOREIGN KEY (user_id) REFERENCES + db_dbuser(id) DEFERRABLE INITIALLY DEFERRED + db_dbextra: + db_dbextra_dbnode_id_c7fe8961_fk_db_dbnode_id: FOREIGN KEY (dbnode_id) REFERENCES + db_dbnode(id) DEFERRABLE INITIALLY DEFERRED + db_dbgroup: + db_dbgroup_user_id_100f8a51_fk_db_dbuser_id: FOREIGN KEY (user_id) REFERENCES + db_dbuser(id) DEFERRABLE INITIALLY DEFERRED + db_dbgroup_dbnodes: + db_dbgroup_dbnodes_dbgroup_id_9d3a0f9d_fk_db_dbgroup_id: FOREIGN KEY (dbgroup_id) + REFERENCES db_dbgroup(id) DEFERRABLE INITIALLY DEFERRED + db_dbgroup_dbnodes_dbnode_id_118b9439_fk_db_dbnode_id: FOREIGN KEY (dbnode_id) + REFERENCES db_dbnode(id) DEFERRABLE INITIALLY DEFERRED + db_dblink: + db_dblink_input_id_9245bd73_fk_db_dbnode_id: FOREIGN KEY (input_id) REFERENCES + db_dbnode(id) DEFERRABLE INITIALLY DEFERRED + db_dblink_output_id_c0167528_fk_db_dbnode_id: FOREIGN KEY (output_id) REFERENCES + db_dbnode(id) DEFERRABLE INITIALLY DEFERRED + db_dbnode: + db_dbnode_dbcomputer_id_315372a3_fk_db_dbcomputer_id: FOREIGN KEY (dbcomputer_id) + REFERENCES db_dbcomputer(id) DEFERRABLE INITIALLY DEFERRED + db_dbnode_user_id_12e7aeaf_fk_db_dbuser_id: FOREIGN KEY (user_id) REFERENCES db_dbuser(id) + DEFERRABLE INITIALLY DEFERRED + db_dbuser_groups: + db_dbuser_groups_dbuser_id_480b3520_fk_db_dbuser_id: FOREIGN KEY (dbuser_id) REFERENCES + db_dbuser(id) DEFERRABLE INITIALLY DEFERRED + db_dbuser_groups_group_id_8478d87e_fk_auth_group_id: FOREIGN KEY (group_id) REFERENCES + auth_group(id) DEFERRABLE INITIALLY DEFERRED + db_dbuser_user_permissions: + db_dbuser_user_permi_permission_id_c5aafc54_fk_auth_perm: FOREIGN KEY (permission_id) + REFERENCES auth_permission(id) DEFERRABLE INITIALLY DEFERRED + db_dbuser_user_permissions_dbuser_id_364456ee_fk_db_dbuser_id: FOREIGN KEY (dbuser_id) + REFERENCES db_dbuser(id) DEFERRABLE INITIALLY DEFERRED + db_dbworkflow: + db_dbworkflow_user_id_ef1f3251_fk_db_dbuser_id: FOREIGN KEY (user_id) REFERENCES + db_dbuser(id) DEFERRABLE INITIALLY DEFERRED + db_dbworkflowdata: + db_dbworkflowdata_aiida_obj_id_70a2d33b_fk_db_dbnode_id: FOREIGN KEY (aiida_obj_id) + REFERENCES db_dbnode(id) DEFERRABLE INITIALLY DEFERRED + db_dbworkflowdata_parent_id_ff4dbf8d_fk_db_dbworkflow_id: FOREIGN KEY (parent_id) + REFERENCES db_dbworkflow(id) DEFERRABLE INITIALLY DEFERRED + db_dbworkflowstep: + db_dbworkflowstep_parent_id_ffb754d9_fk_db_dbworkflow_id: FOREIGN KEY (parent_id) + REFERENCES db_dbworkflow(id) DEFERRABLE INITIALLY DEFERRED + db_dbworkflowstep_user_id_04282431_fk_db_dbuser_id: FOREIGN KEY (user_id) REFERENCES + db_dbuser(id) DEFERRABLE INITIALLY DEFERRED + db_dbworkflowstep_calculations: + db_dbworkflowstep_ca_dbnode_id_0d07b7a7_fk_db_dbnode: FOREIGN KEY (dbnode_id) + REFERENCES db_dbnode(id) DEFERRABLE INITIALLY DEFERRED + db_dbworkflowstep_ca_dbworkflowstep_id_575c3637_fk_db_dbwork: FOREIGN KEY (dbworkflowstep_id) + REFERENCES db_dbworkflowstep(id) DEFERRABLE INITIALLY DEFERRED + db_dbworkflowstep_sub_workflows: + db_dbworkflowstep_su_dbworkflow_id_dca4d103_fk_db_dbwork: FOREIGN KEY (dbworkflow_id) + REFERENCES db_dbworkflow(id) DEFERRABLE INITIALLY DEFERRED + db_dbworkflowstep_su_dbworkflowstep_id_e183bbb7_fk_db_dbwork: FOREIGN KEY (dbworkflowstep_id) + REFERENCES db_dbworkflowstep(id) DEFERRABLE INITIALLY DEFERRED +indexes: + db_dbattribute: + db_dbattribute_datatype_91c4dc04: CREATE INDEX db_dbattribute_datatype_91c4dc04 + ON public.db_dbattribute USING btree (datatype) + db_dbattribute_datatype_91c4dc04_like: CREATE INDEX db_dbattribute_datatype_91c4dc04_like + ON public.db_dbattribute USING btree (datatype varchar_pattern_ops) + db_dbattribute_dbnode_id_253bf153: CREATE INDEX db_dbattribute_dbnode_id_253bf153 + ON public.db_dbattribute USING btree (dbnode_id) + db_dbattribute_dbnode_id_key_c589e447_uniq: CREATE UNIQUE INDEX db_dbattribute_dbnode_id_key_c589e447_uniq + ON public.db_dbattribute USING btree (dbnode_id, key) + db_dbattribute_key_ac2bc4e4: CREATE INDEX db_dbattribute_key_ac2bc4e4 ON public.db_dbattribute + USING btree (key) + db_dbattribute_key_ac2bc4e4_like: CREATE INDEX db_dbattribute_key_ac2bc4e4_like + ON public.db_dbattribute USING btree (key varchar_pattern_ops) + db_dbattribute_pkey: CREATE UNIQUE INDEX db_dbattribute_pkey ON public.db_dbattribute + USING btree (id) + tval_idx_for_daemon: CREATE INDEX tval_idx_for_daemon ON public.db_dbattribute + USING btree (tval) WHERE (tval = ANY (ARRAY['COMPUTED'::text, 'WITHSCHEDULER'::text, + 'TOSUBMIT'::text])) + db_dbauthinfo: + db_dbauthinfo_aiidauser_id_0684fdfb: CREATE INDEX db_dbauthinfo_aiidauser_id_0684fdfb + ON public.db_dbauthinfo USING btree (aiidauser_id) + db_dbauthinfo_aiidauser_id_dbcomputer_id_777cdaa8_uniq: CREATE UNIQUE INDEX db_dbauthinfo_aiidauser_id_dbcomputer_id_777cdaa8_uniq + ON public.db_dbauthinfo USING btree (aiidauser_id, dbcomputer_id) + db_dbauthinfo_dbcomputer_id_424f7ac4: CREATE INDEX db_dbauthinfo_dbcomputer_id_424f7ac4 + ON public.db_dbauthinfo USING btree (dbcomputer_id) + db_dbauthinfo_pkey: CREATE UNIQUE INDEX db_dbauthinfo_pkey ON public.db_dbauthinfo + USING btree (id) + db_dbcalcstate: + db_dbcalcstate_dbnode_id_f217a84c: CREATE INDEX db_dbcalcstate_dbnode_id_f217a84c + ON public.db_dbcalcstate USING btree (dbnode_id) + db_dbcalcstate_dbnode_id_state_b4a14db3_uniq: CREATE UNIQUE INDEX db_dbcalcstate_dbnode_id_state_b4a14db3_uniq + ON public.db_dbcalcstate USING btree (dbnode_id, state) + db_dbcalcstate_pkey: CREATE UNIQUE INDEX db_dbcalcstate_pkey ON public.db_dbcalcstate + USING btree (id) + db_dbcalcstate_state_0bf54584: CREATE INDEX db_dbcalcstate_state_0bf54584 ON public.db_dbcalcstate + USING btree (state) + db_dbcalcstate_state_0bf54584_like: CREATE INDEX db_dbcalcstate_state_0bf54584_like + ON public.db_dbcalcstate USING btree (state varchar_pattern_ops) + db_dbcomment: + db_dbcomment_dbnode_id_3b812b6b: CREATE INDEX db_dbcomment_dbnode_id_3b812b6b + ON public.db_dbcomment USING btree (dbnode_id) + db_dbcomment_pkey: CREATE UNIQUE INDEX db_dbcomment_pkey ON public.db_dbcomment + USING btree (id) + db_dbcomment_user_id_8ed5e360: CREATE INDEX db_dbcomment_user_id_8ed5e360 ON public.db_dbcomment + USING btree (user_id) + db_dbcomputer: + db_dbcomputer_name_f1800b1a_like: CREATE INDEX db_dbcomputer_name_f1800b1a_like + ON public.db_dbcomputer USING btree (name varchar_pattern_ops) + db_dbcomputer_name_key: CREATE UNIQUE INDEX db_dbcomputer_name_key ON public.db_dbcomputer + USING btree (name) + db_dbcomputer_pkey: CREATE UNIQUE INDEX db_dbcomputer_pkey ON public.db_dbcomputer + USING btree (id) + db_dbextra: + db_dbextra_datatype_2eba38c6: CREATE INDEX db_dbextra_datatype_2eba38c6 ON public.db_dbextra + USING btree (datatype) + db_dbextra_datatype_2eba38c6_like: CREATE INDEX db_dbextra_datatype_2eba38c6_like + ON public.db_dbextra USING btree (datatype varchar_pattern_ops) + db_dbextra_dbnode_id_c7fe8961: CREATE INDEX db_dbextra_dbnode_id_c7fe8961 ON public.db_dbextra + USING btree (dbnode_id) + db_dbextra_dbnode_id_key_aa56fd37_uniq: CREATE UNIQUE INDEX db_dbextra_dbnode_id_key_aa56fd37_uniq + ON public.db_dbextra USING btree (dbnode_id, key) + db_dbextra_key_b1a8abc6: CREATE INDEX db_dbextra_key_b1a8abc6 ON public.db_dbextra + USING btree (key) + db_dbextra_key_b1a8abc6_like: CREATE INDEX db_dbextra_key_b1a8abc6_like ON public.db_dbextra + USING btree (key varchar_pattern_ops) + db_dbextra_pkey: CREATE UNIQUE INDEX db_dbextra_pkey ON public.db_dbextra USING + btree (id) + db_dbgroup: + db_dbgroup_name_66c75272: CREATE INDEX db_dbgroup_name_66c75272 ON public.db_dbgroup + USING btree (name) + db_dbgroup_name_66c75272_like: CREATE INDEX db_dbgroup_name_66c75272_like ON public.db_dbgroup + USING btree (name varchar_pattern_ops) + db_dbgroup_name_type_12656f33_uniq: CREATE UNIQUE INDEX db_dbgroup_name_type_12656f33_uniq + ON public.db_dbgroup USING btree (name, type) + db_dbgroup_pkey: CREATE UNIQUE INDEX db_dbgroup_pkey ON public.db_dbgroup USING + btree (id) + db_dbgroup_type_23b2a748: CREATE INDEX db_dbgroup_type_23b2a748 ON public.db_dbgroup + USING btree (type) + db_dbgroup_type_23b2a748_like: CREATE INDEX db_dbgroup_type_23b2a748_like ON public.db_dbgroup + USING btree (type varchar_pattern_ops) + db_dbgroup_user_id_100f8a51: CREATE INDEX db_dbgroup_user_id_100f8a51 ON public.db_dbgroup + USING btree (user_id) + db_dbgroup_dbnodes: + db_dbgroup_dbnodes_dbgroup_id_9d3a0f9d: CREATE INDEX db_dbgroup_dbnodes_dbgroup_id_9d3a0f9d + ON public.db_dbgroup_dbnodes USING btree (dbgroup_id) + db_dbgroup_dbnodes_dbgroup_id_dbnode_id_eee23cce_uniq: CREATE UNIQUE INDEX db_dbgroup_dbnodes_dbgroup_id_dbnode_id_eee23cce_uniq + ON public.db_dbgroup_dbnodes USING btree (dbgroup_id, dbnode_id) + db_dbgroup_dbnodes_dbnode_id_118b9439: CREATE INDEX db_dbgroup_dbnodes_dbnode_id_118b9439 + ON public.db_dbgroup_dbnodes USING btree (dbnode_id) + db_dbgroup_dbnodes_pkey: CREATE UNIQUE INDEX db_dbgroup_dbnodes_pkey ON public.db_dbgroup_dbnodes + USING btree (id) + db_dblink: + db_dblink_input_id_9245bd73: CREATE INDEX db_dblink_input_id_9245bd73 ON public.db_dblink + USING btree (input_id) + db_dblink_label_f1343cfb: CREATE INDEX db_dblink_label_f1343cfb ON public.db_dblink + USING btree (label) + db_dblink_label_f1343cfb_like: CREATE INDEX db_dblink_label_f1343cfb_like ON public.db_dblink + USING btree (label varchar_pattern_ops) + db_dblink_output_id_c0167528: CREATE INDEX db_dblink_output_id_c0167528 ON public.db_dblink + USING btree (output_id) + db_dblink_pkey: CREATE UNIQUE INDEX db_dblink_pkey ON public.db_dblink USING btree + (id) + db_dblink_type_229f212b: CREATE INDEX db_dblink_type_229f212b ON public.db_dblink + USING btree (type) + db_dblink_type_229f212b_like: CREATE INDEX db_dblink_type_229f212b_like ON public.db_dblink + USING btree (type varchar_pattern_ops) + db_dblock: + db_dblock_key_048c6767_like: CREATE INDEX db_dblock_key_048c6767_like ON public.db_dblock + USING btree (key varchar_pattern_ops) + db_dblock_pkey: CREATE UNIQUE INDEX db_dblock_pkey ON public.db_dblock USING btree + (key) + db_dblog: + db_dblog_levelname_ad5dc346: CREATE INDEX db_dblog_levelname_ad5dc346 ON public.db_dblog + USING btree (levelname) + db_dblog_levelname_ad5dc346_like: CREATE INDEX db_dblog_levelname_ad5dc346_like + ON public.db_dblog USING btree (levelname varchar_pattern_ops) + db_dblog_loggername_00b5ba16: CREATE INDEX db_dblog_loggername_00b5ba16 ON public.db_dblog + USING btree (loggername) + db_dblog_loggername_00b5ba16_like: CREATE INDEX db_dblog_loggername_00b5ba16_like + ON public.db_dblog USING btree (loggername varchar_pattern_ops) + db_dblog_objname_69932b1e: CREATE INDEX db_dblog_objname_69932b1e ON public.db_dblog + USING btree (objname) + db_dblog_objname_69932b1e_like: CREATE INDEX db_dblog_objname_69932b1e_like ON + public.db_dblog USING btree (objname varchar_pattern_ops) + db_dblog_objpk_fc47afa9: CREATE INDEX db_dblog_objpk_fc47afa9 ON public.db_dblog + USING btree (objpk) + db_dblog_pkey: CREATE UNIQUE INDEX db_dblog_pkey ON public.db_dblog USING btree + (id) + db_dbnode: + db_dbnode_ctime_71626ef5: CREATE INDEX db_dbnode_ctime_71626ef5 ON public.db_dbnode + USING btree (ctime) + db_dbnode_dbcomputer_id_315372a3: CREATE INDEX db_dbnode_dbcomputer_id_315372a3 + ON public.db_dbnode USING btree (dbcomputer_id) + db_dbnode_label_6469539e: CREATE INDEX db_dbnode_label_6469539e ON public.db_dbnode + USING btree (label) + db_dbnode_label_6469539e_like: CREATE INDEX db_dbnode_label_6469539e_like ON public.db_dbnode + USING btree (label varchar_pattern_ops) + db_dbnode_mtime_0554ea3d: CREATE INDEX db_dbnode_mtime_0554ea3d ON public.db_dbnode + USING btree (mtime) + db_dbnode_pkey: CREATE UNIQUE INDEX db_dbnode_pkey ON public.db_dbnode USING btree + (id) + db_dbnode_process_type_df7298d0: CREATE INDEX db_dbnode_process_type_df7298d0 + ON public.db_dbnode USING btree (process_type) + db_dbnode_process_type_df7298d0_like: CREATE INDEX db_dbnode_process_type_df7298d0_like + ON public.db_dbnode USING btree (process_type varchar_pattern_ops) + db_dbnode_type_a8ce9753: CREATE INDEX db_dbnode_type_a8ce9753 ON public.db_dbnode + USING btree (type) + db_dbnode_type_a8ce9753_like: CREATE INDEX db_dbnode_type_a8ce9753_like ON public.db_dbnode + USING btree (type varchar_pattern_ops) + db_dbnode_user_id_12e7aeaf: CREATE INDEX db_dbnode_user_id_12e7aeaf ON public.db_dbnode + USING btree (user_id) + db_dbnode_uuid_62e0bf98: CREATE INDEX db_dbnode_uuid_62e0bf98 ON public.db_dbnode + USING btree (uuid) + db_dbnode_uuid_62e0bf98_like: CREATE INDEX db_dbnode_uuid_62e0bf98_like ON public.db_dbnode + USING btree (uuid varchar_pattern_ops) + db_dbsetting: + db_dbsetting_datatype_49f4397c: CREATE INDEX db_dbsetting_datatype_49f4397c ON + public.db_dbsetting USING btree (datatype) + db_dbsetting_datatype_49f4397c_like: CREATE INDEX db_dbsetting_datatype_49f4397c_like + ON public.db_dbsetting USING btree (datatype varchar_pattern_ops) + db_dbsetting_key_1b84beb4: CREATE INDEX db_dbsetting_key_1b84beb4 ON public.db_dbsetting + USING btree (key) + db_dbsetting_key_1b84beb4_like: CREATE INDEX db_dbsetting_key_1b84beb4_like ON + public.db_dbsetting USING btree (key varchar_pattern_ops) + db_dbsetting_key_1b84beb4_uniq: CREATE UNIQUE INDEX db_dbsetting_key_1b84beb4_uniq + ON public.db_dbsetting USING btree (key) + db_dbsetting_pkey: CREATE UNIQUE INDEX db_dbsetting_pkey ON public.db_dbsetting + USING btree (id) + db_dbuser: + db_dbuser_email_30150b7e_like: CREATE INDEX db_dbuser_email_30150b7e_like ON public.db_dbuser + USING btree (email varchar_pattern_ops) + db_dbuser_email_key: CREATE UNIQUE INDEX db_dbuser_email_key ON public.db_dbuser + USING btree (email) + db_dbuser_pkey: CREATE UNIQUE INDEX db_dbuser_pkey ON public.db_dbuser USING btree + (id) + db_dbuser_groups: + db_dbuser_groups_dbuser_id_480b3520: CREATE INDEX db_dbuser_groups_dbuser_id_480b3520 + ON public.db_dbuser_groups USING btree (dbuser_id) + db_dbuser_groups_dbuser_id_group_id_9155eb4f_uniq: CREATE UNIQUE INDEX db_dbuser_groups_dbuser_id_group_id_9155eb4f_uniq + ON public.db_dbuser_groups USING btree (dbuser_id, group_id) + db_dbuser_groups_group_id_8478d87e: CREATE INDEX db_dbuser_groups_group_id_8478d87e + ON public.db_dbuser_groups USING btree (group_id) + db_dbuser_groups_pkey: CREATE UNIQUE INDEX db_dbuser_groups_pkey ON public.db_dbuser_groups + USING btree (id) + db_dbuser_user_permissions: + db_dbuser_user_permissio_dbuser_id_permission_id_e6cbabe4_uniq: CREATE UNIQUE + INDEX db_dbuser_user_permissio_dbuser_id_permission_id_e6cbabe4_uniq ON public.db_dbuser_user_permissions + USING btree (dbuser_id, permission_id) + db_dbuser_user_permissions_dbuser_id_364456ee: CREATE INDEX db_dbuser_user_permissions_dbuser_id_364456ee + ON public.db_dbuser_user_permissions USING btree (dbuser_id) + db_dbuser_user_permissions_permission_id_c5aafc54: CREATE INDEX db_dbuser_user_permissions_permission_id_c5aafc54 + ON public.db_dbuser_user_permissions USING btree (permission_id) + db_dbuser_user_permissions_pkey: CREATE UNIQUE INDEX db_dbuser_user_permissions_pkey + ON public.db_dbuser_user_permissions USING btree (id) + db_dbworkflow: + db_dbworkflow_label_7368f34a: CREATE INDEX db_dbworkflow_label_7368f34a ON public.db_dbworkflow + USING btree (label) + db_dbworkflow_label_7368f34a_like: CREATE INDEX db_dbworkflow_label_7368f34a_like + ON public.db_dbworkflow USING btree (label varchar_pattern_ops) + db_dbworkflow_pkey: CREATE UNIQUE INDEX db_dbworkflow_pkey ON public.db_dbworkflow + USING btree (id) + db_dbworkflow_user_id_ef1f3251: CREATE INDEX db_dbworkflow_user_id_ef1f3251 ON + public.db_dbworkflow USING btree (user_id) + db_dbworkflowdata: + db_dbworkflowdata_aiida_obj_id_70a2d33b: CREATE INDEX db_dbworkflowdata_aiida_obj_id_70a2d33b + ON public.db_dbworkflowdata USING btree (aiida_obj_id) + db_dbworkflowdata_parent_id_ff4dbf8d: CREATE INDEX db_dbworkflowdata_parent_id_ff4dbf8d + ON public.db_dbworkflowdata USING btree (parent_id) + db_dbworkflowdata_parent_id_name_data_type_a4b50dae_uniq: CREATE UNIQUE INDEX + db_dbworkflowdata_parent_id_name_data_type_a4b50dae_uniq ON public.db_dbworkflowdata + USING btree (parent_id, name, data_type) + db_dbworkflowdata_pkey: CREATE UNIQUE INDEX db_dbworkflowdata_pkey ON public.db_dbworkflowdata + USING btree (id) + db_dbworkflowstep: + db_dbworkflowstep_parent_id_ffb754d9: CREATE INDEX db_dbworkflowstep_parent_id_ffb754d9 + ON public.db_dbworkflowstep USING btree (parent_id) + db_dbworkflowstep_parent_id_name_111027e3_uniq: CREATE UNIQUE INDEX db_dbworkflowstep_parent_id_name_111027e3_uniq + ON public.db_dbworkflowstep USING btree (parent_id, name) + db_dbworkflowstep_pkey: CREATE UNIQUE INDEX db_dbworkflowstep_pkey ON public.db_dbworkflowstep + USING btree (id) + db_dbworkflowstep_user_id_04282431: CREATE INDEX db_dbworkflowstep_user_id_04282431 + ON public.db_dbworkflowstep USING btree (user_id) + db_dbworkflowstep_calculations: + db_dbworkflowstep_calcul_dbworkflowstep_id_dbnode_60f50d02_uniq: CREATE UNIQUE + INDEX db_dbworkflowstep_calcul_dbworkflowstep_id_dbnode_60f50d02_uniq ON public.db_dbworkflowstep_calculations + USING btree (dbworkflowstep_id, dbnode_id) + db_dbworkflowstep_calculations_dbnode_id_0d07b7a7: CREATE INDEX db_dbworkflowstep_calculations_dbnode_id_0d07b7a7 + ON public.db_dbworkflowstep_calculations USING btree (dbnode_id) + db_dbworkflowstep_calculations_dbworkflowstep_id_575c3637: CREATE INDEX db_dbworkflowstep_calculations_dbworkflowstep_id_575c3637 + ON public.db_dbworkflowstep_calculations USING btree (dbworkflowstep_id) + db_dbworkflowstep_calculations_pkey: CREATE UNIQUE INDEX db_dbworkflowstep_calculations_pkey + ON public.db_dbworkflowstep_calculations USING btree (id) + db_dbworkflowstep_sub_workflows: + db_dbworkflowstep_sub_wo_dbworkflowstep_id_dbwork_e9b2b624_uniq: CREATE UNIQUE + INDEX db_dbworkflowstep_sub_wo_dbworkflowstep_id_dbwork_e9b2b624_uniq ON public.db_dbworkflowstep_sub_workflows + USING btree (dbworkflowstep_id, dbworkflow_id) + db_dbworkflowstep_sub_workflows_dbworkflow_id_dca4d103: CREATE INDEX db_dbworkflowstep_sub_workflows_dbworkflow_id_dca4d103 + ON public.db_dbworkflowstep_sub_workflows USING btree (dbworkflow_id) + db_dbworkflowstep_sub_workflows_dbworkflowstep_id_e183bbb7: CREATE INDEX db_dbworkflowstep_sub_workflows_dbworkflowstep_id_e183bbb7 + ON public.db_dbworkflowstep_sub_workflows USING btree (dbworkflowstep_id) + db_dbworkflowstep_sub_workflows_pkey: CREATE UNIQUE INDEX db_dbworkflowstep_sub_workflows_pkey + ON public.db_dbworkflowstep_sub_workflows USING btree (id) diff --git a/tests/backends/aiida_sqlalchemy/migrations/test_all_schema/test_django_django_0011_.yml b/tests/backends/aiida_sqlalchemy/migrations/test_all_schema/test_django_django_0011_.yml new file mode 100644 index 0000000000..72b6e4a401 --- /dev/null +++ b/tests/backends/aiida_sqlalchemy/migrations/test_all_schema/test_django_django_0011_.yml @@ -0,0 +1,1090 @@ +columns: + db_dbattribute: + bval: + data_type: boolean + default: null + is_nullable: true + datatype: + data_type: character varying + default: null + is_nullable: false + max_length: 10 + dbnode_id: + data_type: integer + default: null + is_nullable: false + dval: + data_type: timestamp with time zone + default: null + is_nullable: true + fval: + data_type: double precision + default: null + is_nullable: true + id: + data_type: integer + default: nextval('db_dbattribute_id_seq'::regclass) + is_nullable: false + ival: + data_type: integer + default: null + is_nullable: true + key: + data_type: character varying + default: null + is_nullable: false + max_length: 1024 + tval: + data_type: text + default: null + is_nullable: false + db_dbauthinfo: + aiidauser_id: + data_type: integer + default: null + is_nullable: false + auth_params: + data_type: text + default: null + is_nullable: false + dbcomputer_id: + data_type: integer + default: null + is_nullable: false + enabled: + data_type: boolean + default: null + is_nullable: false + id: + data_type: integer + default: nextval('db_dbauthinfo_id_seq'::regclass) + is_nullable: false + metadata: + data_type: text + default: null + is_nullable: false + db_dbcalcstate: + dbnode_id: + data_type: integer + default: null + is_nullable: false + id: + data_type: integer + default: nextval('db_dbcalcstate_id_seq'::regclass) + is_nullable: false + state: + data_type: character varying + default: null + is_nullable: false + max_length: 25 + time: + data_type: timestamp with time zone + default: null + is_nullable: false + db_dbcomment: + content: + data_type: text + default: null + is_nullable: false + ctime: + data_type: timestamp with time zone + default: null + is_nullable: false + dbnode_id: + data_type: integer + default: null + is_nullable: false + id: + data_type: integer + default: nextval('db_dbcomment_id_seq'::regclass) + is_nullable: false + mtime: + data_type: timestamp with time zone + default: null + is_nullable: false + user_id: + data_type: integer + default: null + is_nullable: false + uuid: + data_type: character varying + default: null + is_nullable: false + max_length: 36 + db_dbcomputer: + description: + data_type: text + default: null + is_nullable: false + enabled: + data_type: boolean + default: null + is_nullable: false + hostname: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + id: + data_type: integer + default: nextval('db_dbcomputer_id_seq'::regclass) + is_nullable: false + metadata: + data_type: text + default: null + is_nullable: false + name: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + scheduler_type: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + transport_params: + data_type: text + default: null + is_nullable: false + transport_type: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + uuid: + data_type: character varying + default: null + is_nullable: false + max_length: 36 + db_dbextra: + bval: + data_type: boolean + default: null + is_nullable: true + datatype: + data_type: character varying + default: null + is_nullable: false + max_length: 10 + dbnode_id: + data_type: integer + default: null + is_nullable: false + dval: + data_type: timestamp with time zone + default: null + is_nullable: true + fval: + data_type: double precision + default: null + is_nullable: true + id: + data_type: integer + default: nextval('db_dbextra_id_seq'::regclass) + is_nullable: false + ival: + data_type: integer + default: null + is_nullable: true + key: + data_type: character varying + default: null + is_nullable: false + max_length: 1024 + tval: + data_type: text + default: null + is_nullable: false + db_dbgroup: + description: + data_type: text + default: null + is_nullable: false + id: + data_type: integer + default: nextval('db_dbgroup_id_seq'::regclass) + is_nullable: false + name: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + time: + data_type: timestamp with time zone + default: null + is_nullable: false + type: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + user_id: + data_type: integer + default: null + is_nullable: false + uuid: + data_type: character varying + default: null + is_nullable: false + max_length: 36 + db_dbgroup_dbnodes: + dbgroup_id: + data_type: integer + default: null + is_nullable: false + dbnode_id: + data_type: integer + default: null + is_nullable: false + id: + data_type: integer + default: nextval('db_dbgroup_dbnodes_id_seq'::regclass) + is_nullable: false + db_dblink: + id: + data_type: integer + default: nextval('db_dblink_id_seq'::regclass) + is_nullable: false + input_id: + data_type: integer + default: null + is_nullable: false + label: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + output_id: + data_type: integer + default: null + is_nullable: false + type: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + db_dblock: + creation: + data_type: timestamp with time zone + default: null + is_nullable: false + key: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + owner: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + timeout: + data_type: integer + default: null + is_nullable: false + db_dblog: + id: + data_type: integer + default: nextval('db_dblog_id_seq'::regclass) + is_nullable: false + levelname: + data_type: character varying + default: null + is_nullable: false + max_length: 50 + loggername: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + message: + data_type: text + default: null + is_nullable: false + metadata: + data_type: text + default: null + is_nullable: false + objname: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + objpk: + data_type: integer + default: null + is_nullable: true + time: + data_type: timestamp with time zone + default: null + is_nullable: false + db_dbnode: + ctime: + data_type: timestamp with time zone + default: null + is_nullable: false + dbcomputer_id: + data_type: integer + default: null + is_nullable: true + description: + data_type: text + default: null + is_nullable: false + id: + data_type: integer + default: nextval('db_dbnode_id_seq'::regclass) + is_nullable: false + label: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + mtime: + data_type: timestamp with time zone + default: null + is_nullable: false + nodeversion: + data_type: integer + default: null + is_nullable: false + process_type: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + public: + data_type: boolean + default: null + is_nullable: false + type: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + user_id: + data_type: integer + default: null + is_nullable: false + uuid: + data_type: character varying + default: null + is_nullable: false + max_length: 36 + db_dbsetting: + bval: + data_type: boolean + default: null + is_nullable: true + datatype: + data_type: character varying + default: null + is_nullable: false + max_length: 10 + description: + data_type: text + default: null + is_nullable: false + dval: + data_type: timestamp with time zone + default: null + is_nullable: true + fval: + data_type: double precision + default: null + is_nullable: true + id: + data_type: integer + default: nextval('db_dbsetting_id_seq'::regclass) + is_nullable: false + ival: + data_type: integer + default: null + is_nullable: true + key: + data_type: character varying + default: null + is_nullable: false + max_length: 1024 + time: + data_type: timestamp with time zone + default: null + is_nullable: false + tval: + data_type: text + default: null + is_nullable: false + db_dbuser: + date_joined: + data_type: timestamp with time zone + default: null + is_nullable: false + email: + data_type: character varying + default: null + is_nullable: false + max_length: 75 + first_name: + data_type: character varying + default: null + is_nullable: false + max_length: 254 + id: + data_type: integer + default: nextval('db_dbuser_id_seq'::regclass) + is_nullable: false + institution: + data_type: character varying + default: null + is_nullable: false + max_length: 254 + is_active: + data_type: boolean + default: null + is_nullable: false + is_staff: + data_type: boolean + default: null + is_nullable: false + is_superuser: + data_type: boolean + default: null + is_nullable: false + last_login: + data_type: timestamp with time zone + default: null + is_nullable: false + last_name: + data_type: character varying + default: null + is_nullable: false + max_length: 254 + password: + data_type: character varying + default: null + is_nullable: false + max_length: 128 + db_dbuser_groups: + dbuser_id: + data_type: integer + default: null + is_nullable: false + group_id: + data_type: integer + default: null + is_nullable: false + id: + data_type: integer + default: nextval('db_dbuser_groups_id_seq'::regclass) + is_nullable: false + db_dbuser_user_permissions: + dbuser_id: + data_type: integer + default: null + is_nullable: false + id: + data_type: integer + default: nextval('db_dbuser_user_permissions_id_seq'::regclass) + is_nullable: false + permission_id: + data_type: integer + default: null + is_nullable: false + db_dbworkflow: + ctime: + data_type: timestamp with time zone + default: null + is_nullable: false + description: + data_type: text + default: null + is_nullable: false + id: + data_type: integer + default: nextval('db_dbworkflow_id_seq'::regclass) + is_nullable: false + label: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + lastsyncedversion: + data_type: integer + default: null + is_nullable: false + module: + data_type: text + default: null + is_nullable: false + module_class: + data_type: text + default: null + is_nullable: false + mtime: + data_type: timestamp with time zone + default: null + is_nullable: false + nodeversion: + data_type: integer + default: null + is_nullable: false + report: + data_type: text + default: null + is_nullable: false + script_md5: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + script_path: + data_type: text + default: null + is_nullable: false + state: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + user_id: + data_type: integer + default: null + is_nullable: false + uuid: + data_type: character varying + default: null + is_nullable: false + max_length: 36 + db_dbworkflowdata: + aiida_obj_id: + data_type: integer + default: null + is_nullable: true + data_type: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + id: + data_type: integer + default: nextval('db_dbworkflowdata_id_seq'::regclass) + is_nullable: false + json_value: + data_type: text + default: null + is_nullable: false + name: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + parent_id: + data_type: integer + default: null + is_nullable: false + time: + data_type: timestamp with time zone + default: null + is_nullable: false + value_type: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + db_dbworkflowstep: + id: + data_type: integer + default: nextval('db_dbworkflowstep_id_seq'::regclass) + is_nullable: false + name: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + nextcall: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + parent_id: + data_type: integer + default: null + is_nullable: false + state: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + time: + data_type: timestamp with time zone + default: null + is_nullable: false + user_id: + data_type: integer + default: null + is_nullable: false + db_dbworkflowstep_calculations: + dbnode_id: + data_type: integer + default: null + is_nullable: false + dbworkflowstep_id: + data_type: integer + default: null + is_nullable: false + id: + data_type: integer + default: nextval('db_dbworkflowstep_calculations_id_seq'::regclass) + is_nullable: false + db_dbworkflowstep_sub_workflows: + dbworkflow_id: + data_type: integer + default: null + is_nullable: false + dbworkflowstep_id: + data_type: integer + default: null + is_nullable: false + id: + data_type: integer + default: nextval('db_dbworkflowstep_sub_workflows_id_seq'::regclass) + is_nullable: false +constraints: + primary_key: + db_dbattribute: + db_dbattribute_pkey: + - id + db_dbauthinfo: + db_dbauthinfo_pkey: + - id + db_dbcalcstate: + db_dbcalcstate_pkey: + - id + db_dbcomment: + db_dbcomment_pkey: + - id + db_dbcomputer: + db_dbcomputer_pkey: + - id + db_dbextra: + db_dbextra_pkey: + - id + db_dbgroup: + db_dbgroup_pkey: + - id + db_dbgroup_dbnodes: + db_dbgroup_dbnodes_pkey: + - id + db_dblink: + db_dblink_pkey: + - id + db_dblock: + db_dblock_pkey: + - key + db_dblog: + db_dblog_pkey: + - id + db_dbnode: + db_dbnode_pkey: + - id + db_dbsetting: + db_dbsetting_pkey: + - id + db_dbuser: + db_dbuser_pkey: + - id + db_dbuser_groups: + db_dbuser_groups_pkey: + - id + db_dbuser_user_permissions: + db_dbuser_user_permissions_pkey: + - id + db_dbworkflow: + db_dbworkflow_pkey: + - id + db_dbworkflowdata: + db_dbworkflowdata_pkey: + - id + db_dbworkflowstep: + db_dbworkflowstep_pkey: + - id + db_dbworkflowstep_calculations: + db_dbworkflowstep_calculations_pkey: + - id + db_dbworkflowstep_sub_workflows: + db_dbworkflowstep_sub_workflows_pkey: + - id + unique: + db_dbattribute: + db_dbattribute_dbnode_id_key_c589e447_uniq: + - dbnode_id + - key + db_dbauthinfo: + db_dbauthinfo_aiidauser_id_dbcomputer_id_777cdaa8_uniq: + - aiidauser_id + - dbcomputer_id + db_dbcalcstate: + db_dbcalcstate_dbnode_id_state_b4a14db3_uniq: + - dbnode_id + - state + db_dbcomputer: + db_dbcomputer_name_key: + - name + db_dbextra: + db_dbextra_dbnode_id_key_aa56fd37_uniq: + - dbnode_id + - key + db_dbgroup: + db_dbgroup_name_type_12656f33_uniq: + - name + - type + db_dbgroup_dbnodes: + db_dbgroup_dbnodes_dbgroup_id_dbnode_id_eee23cce_uniq: + - dbgroup_id + - dbnode_id + db_dbsetting: + db_dbsetting_key_1b84beb4_uniq: + - key + db_dbuser: + db_dbuser_email_key: + - email + db_dbuser_groups: + db_dbuser_groups_dbuser_id_group_id_9155eb4f_uniq: + - dbuser_id + - group_id + db_dbuser_user_permissions: + db_dbuser_user_permissio_dbuser_id_permission_id_e6cbabe4_uniq: + - dbuser_id + - permission_id + db_dbworkflowdata: + db_dbworkflowdata_parent_id_name_data_type_a4b50dae_uniq: + - data_type + - name + - parent_id + db_dbworkflowstep: + db_dbworkflowstep_parent_id_name_111027e3_uniq: + - name + - parent_id + db_dbworkflowstep_calculations: + db_dbworkflowstep_calcul_dbworkflowstep_id_dbnode_60f50d02_uniq: + - dbnode_id + - dbworkflowstep_id + db_dbworkflowstep_sub_workflows: + db_dbworkflowstep_sub_wo_dbworkflowstep_id_dbwork_e9b2b624_uniq: + - dbworkflow_id + - dbworkflowstep_id +foreign_keys: + db_dbattribute: + db_dbattribute_dbnode_id_253bf153_fk_db_dbnode_id: FOREIGN KEY (dbnode_id) REFERENCES + db_dbnode(id) DEFERRABLE INITIALLY DEFERRED + db_dbauthinfo: + db_dbauthinfo_aiidauser_id_0684fdfb_fk_db_dbuser_id: FOREIGN KEY (aiidauser_id) + REFERENCES db_dbuser(id) DEFERRABLE INITIALLY DEFERRED + db_dbauthinfo_dbcomputer_id_424f7ac4_fk_db_dbcomputer_id: FOREIGN KEY (dbcomputer_id) + REFERENCES db_dbcomputer(id) DEFERRABLE INITIALLY DEFERRED + db_dbcalcstate: + db_dbcalcstate_dbnode_id_f217a84c_fk_db_dbnode_id: FOREIGN KEY (dbnode_id) REFERENCES + db_dbnode(id) DEFERRABLE INITIALLY DEFERRED + db_dbcomment: + db_dbcomment_dbnode_id_3b812b6b_fk_db_dbnode_id: FOREIGN KEY (dbnode_id) REFERENCES + db_dbnode(id) DEFERRABLE INITIALLY DEFERRED + db_dbcomment_user_id_8ed5e360_fk_db_dbuser_id: FOREIGN KEY (user_id) REFERENCES + db_dbuser(id) DEFERRABLE INITIALLY DEFERRED + db_dbextra: + db_dbextra_dbnode_id_c7fe8961_fk_db_dbnode_id: FOREIGN KEY (dbnode_id) REFERENCES + db_dbnode(id) DEFERRABLE INITIALLY DEFERRED + db_dbgroup: + db_dbgroup_user_id_100f8a51_fk_db_dbuser_id: FOREIGN KEY (user_id) REFERENCES + db_dbuser(id) DEFERRABLE INITIALLY DEFERRED + db_dbgroup_dbnodes: + db_dbgroup_dbnodes_dbgroup_id_9d3a0f9d_fk_db_dbgroup_id: FOREIGN KEY (dbgroup_id) + REFERENCES db_dbgroup(id) DEFERRABLE INITIALLY DEFERRED + db_dbgroup_dbnodes_dbnode_id_118b9439_fk_db_dbnode_id: FOREIGN KEY (dbnode_id) + REFERENCES db_dbnode(id) DEFERRABLE INITIALLY DEFERRED + db_dblink: + db_dblink_input_id_9245bd73_fk_db_dbnode_id: FOREIGN KEY (input_id) REFERENCES + db_dbnode(id) DEFERRABLE INITIALLY DEFERRED + db_dblink_output_id_c0167528_fk_db_dbnode_id: FOREIGN KEY (output_id) REFERENCES + db_dbnode(id) DEFERRABLE INITIALLY DEFERRED + db_dbnode: + db_dbnode_dbcomputer_id_315372a3_fk_db_dbcomputer_id: FOREIGN KEY (dbcomputer_id) + REFERENCES db_dbcomputer(id) DEFERRABLE INITIALLY DEFERRED + db_dbnode_user_id_12e7aeaf_fk_db_dbuser_id: FOREIGN KEY (user_id) REFERENCES db_dbuser(id) + DEFERRABLE INITIALLY DEFERRED + db_dbuser_groups: + db_dbuser_groups_dbuser_id_480b3520_fk_db_dbuser_id: FOREIGN KEY (dbuser_id) REFERENCES + db_dbuser(id) DEFERRABLE INITIALLY DEFERRED + db_dbuser_groups_group_id_8478d87e_fk_auth_group_id: FOREIGN KEY (group_id) REFERENCES + auth_group(id) DEFERRABLE INITIALLY DEFERRED + db_dbuser_user_permissions: + db_dbuser_user_permi_permission_id_c5aafc54_fk_auth_perm: FOREIGN KEY (permission_id) + REFERENCES auth_permission(id) DEFERRABLE INITIALLY DEFERRED + db_dbuser_user_permissions_dbuser_id_364456ee_fk_db_dbuser_id: FOREIGN KEY (dbuser_id) + REFERENCES db_dbuser(id) DEFERRABLE INITIALLY DEFERRED + db_dbworkflow: + db_dbworkflow_user_id_ef1f3251_fk_db_dbuser_id: FOREIGN KEY (user_id) REFERENCES + db_dbuser(id) DEFERRABLE INITIALLY DEFERRED + db_dbworkflowdata: + db_dbworkflowdata_aiida_obj_id_70a2d33b_fk_db_dbnode_id: FOREIGN KEY (aiida_obj_id) + REFERENCES db_dbnode(id) DEFERRABLE INITIALLY DEFERRED + db_dbworkflowdata_parent_id_ff4dbf8d_fk_db_dbworkflow_id: FOREIGN KEY (parent_id) + REFERENCES db_dbworkflow(id) DEFERRABLE INITIALLY DEFERRED + db_dbworkflowstep: + db_dbworkflowstep_parent_id_ffb754d9_fk_db_dbworkflow_id: FOREIGN KEY (parent_id) + REFERENCES db_dbworkflow(id) DEFERRABLE INITIALLY DEFERRED + db_dbworkflowstep_user_id_04282431_fk_db_dbuser_id: FOREIGN KEY (user_id) REFERENCES + db_dbuser(id) DEFERRABLE INITIALLY DEFERRED + db_dbworkflowstep_calculations: + db_dbworkflowstep_ca_dbnode_id_0d07b7a7_fk_db_dbnode: FOREIGN KEY (dbnode_id) + REFERENCES db_dbnode(id) DEFERRABLE INITIALLY DEFERRED + db_dbworkflowstep_ca_dbworkflowstep_id_575c3637_fk_db_dbwork: FOREIGN KEY (dbworkflowstep_id) + REFERENCES db_dbworkflowstep(id) DEFERRABLE INITIALLY DEFERRED + db_dbworkflowstep_sub_workflows: + db_dbworkflowstep_su_dbworkflow_id_dca4d103_fk_db_dbwork: FOREIGN KEY (dbworkflow_id) + REFERENCES db_dbworkflow(id) DEFERRABLE INITIALLY DEFERRED + db_dbworkflowstep_su_dbworkflowstep_id_e183bbb7_fk_db_dbwork: FOREIGN KEY (dbworkflowstep_id) + REFERENCES db_dbworkflowstep(id) DEFERRABLE INITIALLY DEFERRED +indexes: + db_dbattribute: + db_dbattribute_datatype_91c4dc04: CREATE INDEX db_dbattribute_datatype_91c4dc04 + ON public.db_dbattribute USING btree (datatype) + db_dbattribute_datatype_91c4dc04_like: CREATE INDEX db_dbattribute_datatype_91c4dc04_like + ON public.db_dbattribute USING btree (datatype varchar_pattern_ops) + db_dbattribute_dbnode_id_253bf153: CREATE INDEX db_dbattribute_dbnode_id_253bf153 + ON public.db_dbattribute USING btree (dbnode_id) + db_dbattribute_dbnode_id_key_c589e447_uniq: CREATE UNIQUE INDEX db_dbattribute_dbnode_id_key_c589e447_uniq + ON public.db_dbattribute USING btree (dbnode_id, key) + db_dbattribute_key_ac2bc4e4: CREATE INDEX db_dbattribute_key_ac2bc4e4 ON public.db_dbattribute + USING btree (key) + db_dbattribute_key_ac2bc4e4_like: CREATE INDEX db_dbattribute_key_ac2bc4e4_like + ON public.db_dbattribute USING btree (key varchar_pattern_ops) + db_dbattribute_pkey: CREATE UNIQUE INDEX db_dbattribute_pkey ON public.db_dbattribute + USING btree (id) + tval_idx_for_daemon: CREATE INDEX tval_idx_for_daemon ON public.db_dbattribute + USING btree (tval) WHERE (tval = ANY (ARRAY['COMPUTED'::text, 'WITHSCHEDULER'::text, + 'TOSUBMIT'::text])) + db_dbauthinfo: + db_dbauthinfo_aiidauser_id_0684fdfb: CREATE INDEX db_dbauthinfo_aiidauser_id_0684fdfb + ON public.db_dbauthinfo USING btree (aiidauser_id) + db_dbauthinfo_aiidauser_id_dbcomputer_id_777cdaa8_uniq: CREATE UNIQUE INDEX db_dbauthinfo_aiidauser_id_dbcomputer_id_777cdaa8_uniq + ON public.db_dbauthinfo USING btree (aiidauser_id, dbcomputer_id) + db_dbauthinfo_dbcomputer_id_424f7ac4: CREATE INDEX db_dbauthinfo_dbcomputer_id_424f7ac4 + ON public.db_dbauthinfo USING btree (dbcomputer_id) + db_dbauthinfo_pkey: CREATE UNIQUE INDEX db_dbauthinfo_pkey ON public.db_dbauthinfo + USING btree (id) + db_dbcalcstate: + db_dbcalcstate_dbnode_id_f217a84c: CREATE INDEX db_dbcalcstate_dbnode_id_f217a84c + ON public.db_dbcalcstate USING btree (dbnode_id) + db_dbcalcstate_dbnode_id_state_b4a14db3_uniq: CREATE UNIQUE INDEX db_dbcalcstate_dbnode_id_state_b4a14db3_uniq + ON public.db_dbcalcstate USING btree (dbnode_id, state) + db_dbcalcstate_pkey: CREATE UNIQUE INDEX db_dbcalcstate_pkey ON public.db_dbcalcstate + USING btree (id) + db_dbcalcstate_state_0bf54584: CREATE INDEX db_dbcalcstate_state_0bf54584 ON public.db_dbcalcstate + USING btree (state) + db_dbcalcstate_state_0bf54584_like: CREATE INDEX db_dbcalcstate_state_0bf54584_like + ON public.db_dbcalcstate USING btree (state varchar_pattern_ops) + db_dbcomment: + db_dbcomment_dbnode_id_3b812b6b: CREATE INDEX db_dbcomment_dbnode_id_3b812b6b + ON public.db_dbcomment USING btree (dbnode_id) + db_dbcomment_pkey: CREATE UNIQUE INDEX db_dbcomment_pkey ON public.db_dbcomment + USING btree (id) + db_dbcomment_user_id_8ed5e360: CREATE INDEX db_dbcomment_user_id_8ed5e360 ON public.db_dbcomment + USING btree (user_id) + db_dbcomputer: + db_dbcomputer_name_f1800b1a_like: CREATE INDEX db_dbcomputer_name_f1800b1a_like + ON public.db_dbcomputer USING btree (name varchar_pattern_ops) + db_dbcomputer_name_key: CREATE UNIQUE INDEX db_dbcomputer_name_key ON public.db_dbcomputer + USING btree (name) + db_dbcomputer_pkey: CREATE UNIQUE INDEX db_dbcomputer_pkey ON public.db_dbcomputer + USING btree (id) + db_dbextra: + db_dbextra_datatype_2eba38c6: CREATE INDEX db_dbextra_datatype_2eba38c6 ON public.db_dbextra + USING btree (datatype) + db_dbextra_datatype_2eba38c6_like: CREATE INDEX db_dbextra_datatype_2eba38c6_like + ON public.db_dbextra USING btree (datatype varchar_pattern_ops) + db_dbextra_dbnode_id_c7fe8961: CREATE INDEX db_dbextra_dbnode_id_c7fe8961 ON public.db_dbextra + USING btree (dbnode_id) + db_dbextra_dbnode_id_key_aa56fd37_uniq: CREATE UNIQUE INDEX db_dbextra_dbnode_id_key_aa56fd37_uniq + ON public.db_dbextra USING btree (dbnode_id, key) + db_dbextra_key_b1a8abc6: CREATE INDEX db_dbextra_key_b1a8abc6 ON public.db_dbextra + USING btree (key) + db_dbextra_key_b1a8abc6_like: CREATE INDEX db_dbextra_key_b1a8abc6_like ON public.db_dbextra + USING btree (key varchar_pattern_ops) + db_dbextra_pkey: CREATE UNIQUE INDEX db_dbextra_pkey ON public.db_dbextra USING + btree (id) + db_dbgroup: + db_dbgroup_name_66c75272: CREATE INDEX db_dbgroup_name_66c75272 ON public.db_dbgroup + USING btree (name) + db_dbgroup_name_66c75272_like: CREATE INDEX db_dbgroup_name_66c75272_like ON public.db_dbgroup + USING btree (name varchar_pattern_ops) + db_dbgroup_name_type_12656f33_uniq: CREATE UNIQUE INDEX db_dbgroup_name_type_12656f33_uniq + ON public.db_dbgroup USING btree (name, type) + db_dbgroup_pkey: CREATE UNIQUE INDEX db_dbgroup_pkey ON public.db_dbgroup USING + btree (id) + db_dbgroup_type_23b2a748: CREATE INDEX db_dbgroup_type_23b2a748 ON public.db_dbgroup + USING btree (type) + db_dbgroup_type_23b2a748_like: CREATE INDEX db_dbgroup_type_23b2a748_like ON public.db_dbgroup + USING btree (type varchar_pattern_ops) + db_dbgroup_user_id_100f8a51: CREATE INDEX db_dbgroup_user_id_100f8a51 ON public.db_dbgroup + USING btree (user_id) + db_dbgroup_dbnodes: + db_dbgroup_dbnodes_dbgroup_id_9d3a0f9d: CREATE INDEX db_dbgroup_dbnodes_dbgroup_id_9d3a0f9d + ON public.db_dbgroup_dbnodes USING btree (dbgroup_id) + db_dbgroup_dbnodes_dbgroup_id_dbnode_id_eee23cce_uniq: CREATE UNIQUE INDEX db_dbgroup_dbnodes_dbgroup_id_dbnode_id_eee23cce_uniq + ON public.db_dbgroup_dbnodes USING btree (dbgroup_id, dbnode_id) + db_dbgroup_dbnodes_dbnode_id_118b9439: CREATE INDEX db_dbgroup_dbnodes_dbnode_id_118b9439 + ON public.db_dbgroup_dbnodes USING btree (dbnode_id) + db_dbgroup_dbnodes_pkey: CREATE UNIQUE INDEX db_dbgroup_dbnodes_pkey ON public.db_dbgroup_dbnodes + USING btree (id) + db_dblink: + db_dblink_input_id_9245bd73: CREATE INDEX db_dblink_input_id_9245bd73 ON public.db_dblink + USING btree (input_id) + db_dblink_label_f1343cfb: CREATE INDEX db_dblink_label_f1343cfb ON public.db_dblink + USING btree (label) + db_dblink_label_f1343cfb_like: CREATE INDEX db_dblink_label_f1343cfb_like ON public.db_dblink + USING btree (label varchar_pattern_ops) + db_dblink_output_id_c0167528: CREATE INDEX db_dblink_output_id_c0167528 ON public.db_dblink + USING btree (output_id) + db_dblink_pkey: CREATE UNIQUE INDEX db_dblink_pkey ON public.db_dblink USING btree + (id) + db_dblink_type_229f212b: CREATE INDEX db_dblink_type_229f212b ON public.db_dblink + USING btree (type) + db_dblink_type_229f212b_like: CREATE INDEX db_dblink_type_229f212b_like ON public.db_dblink + USING btree (type varchar_pattern_ops) + db_dblock: + db_dblock_key_048c6767_like: CREATE INDEX db_dblock_key_048c6767_like ON public.db_dblock + USING btree (key varchar_pattern_ops) + db_dblock_pkey: CREATE UNIQUE INDEX db_dblock_pkey ON public.db_dblock USING btree + (key) + db_dblog: + db_dblog_levelname_ad5dc346: CREATE INDEX db_dblog_levelname_ad5dc346 ON public.db_dblog + USING btree (levelname) + db_dblog_levelname_ad5dc346_like: CREATE INDEX db_dblog_levelname_ad5dc346_like + ON public.db_dblog USING btree (levelname varchar_pattern_ops) + db_dblog_loggername_00b5ba16: CREATE INDEX db_dblog_loggername_00b5ba16 ON public.db_dblog + USING btree (loggername) + db_dblog_loggername_00b5ba16_like: CREATE INDEX db_dblog_loggername_00b5ba16_like + ON public.db_dblog USING btree (loggername varchar_pattern_ops) + db_dblog_objname_69932b1e: CREATE INDEX db_dblog_objname_69932b1e ON public.db_dblog + USING btree (objname) + db_dblog_objname_69932b1e_like: CREATE INDEX db_dblog_objname_69932b1e_like ON + public.db_dblog USING btree (objname varchar_pattern_ops) + db_dblog_objpk_fc47afa9: CREATE INDEX db_dblog_objpk_fc47afa9 ON public.db_dblog + USING btree (objpk) + db_dblog_pkey: CREATE UNIQUE INDEX db_dblog_pkey ON public.db_dblog USING btree + (id) + db_dbnode: + db_dbnode_ctime_71626ef5: CREATE INDEX db_dbnode_ctime_71626ef5 ON public.db_dbnode + USING btree (ctime) + db_dbnode_dbcomputer_id_315372a3: CREATE INDEX db_dbnode_dbcomputer_id_315372a3 + ON public.db_dbnode USING btree (dbcomputer_id) + db_dbnode_label_6469539e: CREATE INDEX db_dbnode_label_6469539e ON public.db_dbnode + USING btree (label) + db_dbnode_label_6469539e_like: CREATE INDEX db_dbnode_label_6469539e_like ON public.db_dbnode + USING btree (label varchar_pattern_ops) + db_dbnode_mtime_0554ea3d: CREATE INDEX db_dbnode_mtime_0554ea3d ON public.db_dbnode + USING btree (mtime) + db_dbnode_pkey: CREATE UNIQUE INDEX db_dbnode_pkey ON public.db_dbnode USING btree + (id) + db_dbnode_process_type_df7298d0: CREATE INDEX db_dbnode_process_type_df7298d0 + ON public.db_dbnode USING btree (process_type) + db_dbnode_process_type_df7298d0_like: CREATE INDEX db_dbnode_process_type_df7298d0_like + ON public.db_dbnode USING btree (process_type varchar_pattern_ops) + db_dbnode_type_a8ce9753: CREATE INDEX db_dbnode_type_a8ce9753 ON public.db_dbnode + USING btree (type) + db_dbnode_type_a8ce9753_like: CREATE INDEX db_dbnode_type_a8ce9753_like ON public.db_dbnode + USING btree (type varchar_pattern_ops) + db_dbnode_user_id_12e7aeaf: CREATE INDEX db_dbnode_user_id_12e7aeaf ON public.db_dbnode + USING btree (user_id) + db_dbnode_uuid_62e0bf98: CREATE INDEX db_dbnode_uuid_62e0bf98 ON public.db_dbnode + USING btree (uuid) + db_dbnode_uuid_62e0bf98_like: CREATE INDEX db_dbnode_uuid_62e0bf98_like ON public.db_dbnode + USING btree (uuid varchar_pattern_ops) + db_dbsetting: + db_dbsetting_datatype_49f4397c: CREATE INDEX db_dbsetting_datatype_49f4397c ON + public.db_dbsetting USING btree (datatype) + db_dbsetting_datatype_49f4397c_like: CREATE INDEX db_dbsetting_datatype_49f4397c_like + ON public.db_dbsetting USING btree (datatype varchar_pattern_ops) + db_dbsetting_key_1b84beb4: CREATE INDEX db_dbsetting_key_1b84beb4 ON public.db_dbsetting + USING btree (key) + db_dbsetting_key_1b84beb4_like: CREATE INDEX db_dbsetting_key_1b84beb4_like ON + public.db_dbsetting USING btree (key varchar_pattern_ops) + db_dbsetting_key_1b84beb4_uniq: CREATE UNIQUE INDEX db_dbsetting_key_1b84beb4_uniq + ON public.db_dbsetting USING btree (key) + db_dbsetting_pkey: CREATE UNIQUE INDEX db_dbsetting_pkey ON public.db_dbsetting + USING btree (id) + db_dbuser: + db_dbuser_email_30150b7e_like: CREATE INDEX db_dbuser_email_30150b7e_like ON public.db_dbuser + USING btree (email varchar_pattern_ops) + db_dbuser_email_key: CREATE UNIQUE INDEX db_dbuser_email_key ON public.db_dbuser + USING btree (email) + db_dbuser_pkey: CREATE UNIQUE INDEX db_dbuser_pkey ON public.db_dbuser USING btree + (id) + db_dbuser_groups: + db_dbuser_groups_dbuser_id_480b3520: CREATE INDEX db_dbuser_groups_dbuser_id_480b3520 + ON public.db_dbuser_groups USING btree (dbuser_id) + db_dbuser_groups_dbuser_id_group_id_9155eb4f_uniq: CREATE UNIQUE INDEX db_dbuser_groups_dbuser_id_group_id_9155eb4f_uniq + ON public.db_dbuser_groups USING btree (dbuser_id, group_id) + db_dbuser_groups_group_id_8478d87e: CREATE INDEX db_dbuser_groups_group_id_8478d87e + ON public.db_dbuser_groups USING btree (group_id) + db_dbuser_groups_pkey: CREATE UNIQUE INDEX db_dbuser_groups_pkey ON public.db_dbuser_groups + USING btree (id) + db_dbuser_user_permissions: + db_dbuser_user_permissio_dbuser_id_permission_id_e6cbabe4_uniq: CREATE UNIQUE + INDEX db_dbuser_user_permissio_dbuser_id_permission_id_e6cbabe4_uniq ON public.db_dbuser_user_permissions + USING btree (dbuser_id, permission_id) + db_dbuser_user_permissions_dbuser_id_364456ee: CREATE INDEX db_dbuser_user_permissions_dbuser_id_364456ee + ON public.db_dbuser_user_permissions USING btree (dbuser_id) + db_dbuser_user_permissions_permission_id_c5aafc54: CREATE INDEX db_dbuser_user_permissions_permission_id_c5aafc54 + ON public.db_dbuser_user_permissions USING btree (permission_id) + db_dbuser_user_permissions_pkey: CREATE UNIQUE INDEX db_dbuser_user_permissions_pkey + ON public.db_dbuser_user_permissions USING btree (id) + db_dbworkflow: + db_dbworkflow_label_7368f34a: CREATE INDEX db_dbworkflow_label_7368f34a ON public.db_dbworkflow + USING btree (label) + db_dbworkflow_label_7368f34a_like: CREATE INDEX db_dbworkflow_label_7368f34a_like + ON public.db_dbworkflow USING btree (label varchar_pattern_ops) + db_dbworkflow_pkey: CREATE UNIQUE INDEX db_dbworkflow_pkey ON public.db_dbworkflow + USING btree (id) + db_dbworkflow_user_id_ef1f3251: CREATE INDEX db_dbworkflow_user_id_ef1f3251 ON + public.db_dbworkflow USING btree (user_id) + db_dbworkflowdata: + db_dbworkflowdata_aiida_obj_id_70a2d33b: CREATE INDEX db_dbworkflowdata_aiida_obj_id_70a2d33b + ON public.db_dbworkflowdata USING btree (aiida_obj_id) + db_dbworkflowdata_parent_id_ff4dbf8d: CREATE INDEX db_dbworkflowdata_parent_id_ff4dbf8d + ON public.db_dbworkflowdata USING btree (parent_id) + db_dbworkflowdata_parent_id_name_data_type_a4b50dae_uniq: CREATE UNIQUE INDEX + db_dbworkflowdata_parent_id_name_data_type_a4b50dae_uniq ON public.db_dbworkflowdata + USING btree (parent_id, name, data_type) + db_dbworkflowdata_pkey: CREATE UNIQUE INDEX db_dbworkflowdata_pkey ON public.db_dbworkflowdata + USING btree (id) + db_dbworkflowstep: + db_dbworkflowstep_parent_id_ffb754d9: CREATE INDEX db_dbworkflowstep_parent_id_ffb754d9 + ON public.db_dbworkflowstep USING btree (parent_id) + db_dbworkflowstep_parent_id_name_111027e3_uniq: CREATE UNIQUE INDEX db_dbworkflowstep_parent_id_name_111027e3_uniq + ON public.db_dbworkflowstep USING btree (parent_id, name) + db_dbworkflowstep_pkey: CREATE UNIQUE INDEX db_dbworkflowstep_pkey ON public.db_dbworkflowstep + USING btree (id) + db_dbworkflowstep_user_id_04282431: CREATE INDEX db_dbworkflowstep_user_id_04282431 + ON public.db_dbworkflowstep USING btree (user_id) + db_dbworkflowstep_calculations: + db_dbworkflowstep_calcul_dbworkflowstep_id_dbnode_60f50d02_uniq: CREATE UNIQUE + INDEX db_dbworkflowstep_calcul_dbworkflowstep_id_dbnode_60f50d02_uniq ON public.db_dbworkflowstep_calculations + USING btree (dbworkflowstep_id, dbnode_id) + db_dbworkflowstep_calculations_dbnode_id_0d07b7a7: CREATE INDEX db_dbworkflowstep_calculations_dbnode_id_0d07b7a7 + ON public.db_dbworkflowstep_calculations USING btree (dbnode_id) + db_dbworkflowstep_calculations_dbworkflowstep_id_575c3637: CREATE INDEX db_dbworkflowstep_calculations_dbworkflowstep_id_575c3637 + ON public.db_dbworkflowstep_calculations USING btree (dbworkflowstep_id) + db_dbworkflowstep_calculations_pkey: CREATE UNIQUE INDEX db_dbworkflowstep_calculations_pkey + ON public.db_dbworkflowstep_calculations USING btree (id) + db_dbworkflowstep_sub_workflows: + db_dbworkflowstep_sub_wo_dbworkflowstep_id_dbwork_e9b2b624_uniq: CREATE UNIQUE + INDEX db_dbworkflowstep_sub_wo_dbworkflowstep_id_dbwork_e9b2b624_uniq ON public.db_dbworkflowstep_sub_workflows + USING btree (dbworkflowstep_id, dbworkflow_id) + db_dbworkflowstep_sub_workflows_dbworkflow_id_dca4d103: CREATE INDEX db_dbworkflowstep_sub_workflows_dbworkflow_id_dca4d103 + ON public.db_dbworkflowstep_sub_workflows USING btree (dbworkflow_id) + db_dbworkflowstep_sub_workflows_dbworkflowstep_id_e183bbb7: CREATE INDEX db_dbworkflowstep_sub_workflows_dbworkflowstep_id_e183bbb7 + ON public.db_dbworkflowstep_sub_workflows USING btree (dbworkflowstep_id) + db_dbworkflowstep_sub_workflows_pkey: CREATE UNIQUE INDEX db_dbworkflowstep_sub_workflows_pkey + ON public.db_dbworkflowstep_sub_workflows USING btree (id) diff --git a/tests/backends/aiida_sqlalchemy/migrations/test_all_schema/test_django_django_0012_.yml b/tests/backends/aiida_sqlalchemy/migrations/test_all_schema/test_django_django_0012_.yml new file mode 100644 index 0000000000..abf34e42f9 --- /dev/null +++ b/tests/backends/aiida_sqlalchemy/migrations/test_all_schema/test_django_django_0012_.yml @@ -0,0 +1,1063 @@ +columns: + db_dbattribute: + bval: + data_type: boolean + default: null + is_nullable: true + datatype: + data_type: character varying + default: null + is_nullable: false + max_length: 10 + dbnode_id: + data_type: integer + default: null + is_nullable: false + dval: + data_type: timestamp with time zone + default: null + is_nullable: true + fval: + data_type: double precision + default: null + is_nullable: true + id: + data_type: integer + default: nextval('db_dbattribute_id_seq'::regclass) + is_nullable: false + ival: + data_type: integer + default: null + is_nullable: true + key: + data_type: character varying + default: null + is_nullable: false + max_length: 1024 + tval: + data_type: text + default: null + is_nullable: false + db_dbauthinfo: + aiidauser_id: + data_type: integer + default: null + is_nullable: false + auth_params: + data_type: text + default: null + is_nullable: false + dbcomputer_id: + data_type: integer + default: null + is_nullable: false + enabled: + data_type: boolean + default: null + is_nullable: false + id: + data_type: integer + default: nextval('db_dbauthinfo_id_seq'::regclass) + is_nullable: false + metadata: + data_type: text + default: null + is_nullable: false + db_dbcalcstate: + dbnode_id: + data_type: integer + default: null + is_nullable: false + id: + data_type: integer + default: nextval('db_dbcalcstate_id_seq'::regclass) + is_nullable: false + state: + data_type: character varying + default: null + is_nullable: false + max_length: 25 + time: + data_type: timestamp with time zone + default: null + is_nullable: false + db_dbcomment: + content: + data_type: text + default: null + is_nullable: false + ctime: + data_type: timestamp with time zone + default: null + is_nullable: false + dbnode_id: + data_type: integer + default: null + is_nullable: false + id: + data_type: integer + default: nextval('db_dbcomment_id_seq'::regclass) + is_nullable: false + mtime: + data_type: timestamp with time zone + default: null + is_nullable: false + user_id: + data_type: integer + default: null + is_nullable: false + uuid: + data_type: character varying + default: null + is_nullable: false + max_length: 36 + db_dbcomputer: + description: + data_type: text + default: null + is_nullable: false + enabled: + data_type: boolean + default: null + is_nullable: false + hostname: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + id: + data_type: integer + default: nextval('db_dbcomputer_id_seq'::regclass) + is_nullable: false + metadata: + data_type: text + default: null + is_nullable: false + name: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + scheduler_type: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + transport_params: + data_type: text + default: null + is_nullable: false + transport_type: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + uuid: + data_type: character varying + default: null + is_nullable: false + max_length: 36 + db_dbextra: + bval: + data_type: boolean + default: null + is_nullable: true + datatype: + data_type: character varying + default: null + is_nullable: false + max_length: 10 + dbnode_id: + data_type: integer + default: null + is_nullable: false + dval: + data_type: timestamp with time zone + default: null + is_nullable: true + fval: + data_type: double precision + default: null + is_nullable: true + id: + data_type: integer + default: nextval('db_dbextra_id_seq'::regclass) + is_nullable: false + ival: + data_type: integer + default: null + is_nullable: true + key: + data_type: character varying + default: null + is_nullable: false + max_length: 1024 + tval: + data_type: text + default: null + is_nullable: false + db_dbgroup: + description: + data_type: text + default: null + is_nullable: false + id: + data_type: integer + default: nextval('db_dbgroup_id_seq'::regclass) + is_nullable: false + name: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + time: + data_type: timestamp with time zone + default: null + is_nullable: false + type: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + user_id: + data_type: integer + default: null + is_nullable: false + uuid: + data_type: character varying + default: null + is_nullable: false + max_length: 36 + db_dbgroup_dbnodes: + dbgroup_id: + data_type: integer + default: null + is_nullable: false + dbnode_id: + data_type: integer + default: null + is_nullable: false + id: + data_type: integer + default: nextval('db_dbgroup_dbnodes_id_seq'::regclass) + is_nullable: false + db_dblink: + id: + data_type: integer + default: nextval('db_dblink_id_seq'::regclass) + is_nullable: false + input_id: + data_type: integer + default: null + is_nullable: false + label: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + output_id: + data_type: integer + default: null + is_nullable: false + type: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + db_dblog: + id: + data_type: integer + default: nextval('db_dblog_id_seq'::regclass) + is_nullable: false + levelname: + data_type: character varying + default: null + is_nullable: false + max_length: 50 + loggername: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + message: + data_type: text + default: null + is_nullable: false + metadata: + data_type: text + default: null + is_nullable: false + objname: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + objpk: + data_type: integer + default: null + is_nullable: true + time: + data_type: timestamp with time zone + default: null + is_nullable: false + db_dbnode: + ctime: + data_type: timestamp with time zone + default: null + is_nullable: false + dbcomputer_id: + data_type: integer + default: null + is_nullable: true + description: + data_type: text + default: null + is_nullable: false + id: + data_type: integer + default: nextval('db_dbnode_id_seq'::regclass) + is_nullable: false + label: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + mtime: + data_type: timestamp with time zone + default: null + is_nullable: false + nodeversion: + data_type: integer + default: null + is_nullable: false + process_type: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + public: + data_type: boolean + default: null + is_nullable: false + type: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + user_id: + data_type: integer + default: null + is_nullable: false + uuid: + data_type: character varying + default: null + is_nullable: false + max_length: 36 + db_dbsetting: + bval: + data_type: boolean + default: null + is_nullable: true + datatype: + data_type: character varying + default: null + is_nullable: false + max_length: 10 + description: + data_type: text + default: null + is_nullable: false + dval: + data_type: timestamp with time zone + default: null + is_nullable: true + fval: + data_type: double precision + default: null + is_nullable: true + id: + data_type: integer + default: nextval('db_dbsetting_id_seq'::regclass) + is_nullable: false + ival: + data_type: integer + default: null + is_nullable: true + key: + data_type: character varying + default: null + is_nullable: false + max_length: 1024 + time: + data_type: timestamp with time zone + default: null + is_nullable: false + tval: + data_type: text + default: null + is_nullable: false + db_dbuser: + date_joined: + data_type: timestamp with time zone + default: null + is_nullable: false + email: + data_type: character varying + default: null + is_nullable: false + max_length: 75 + first_name: + data_type: character varying + default: null + is_nullable: false + max_length: 254 + id: + data_type: integer + default: nextval('db_dbuser_id_seq'::regclass) + is_nullable: false + institution: + data_type: character varying + default: null + is_nullable: false + max_length: 254 + is_active: + data_type: boolean + default: null + is_nullable: false + is_staff: + data_type: boolean + default: null + is_nullable: false + is_superuser: + data_type: boolean + default: null + is_nullable: false + last_login: + data_type: timestamp with time zone + default: null + is_nullable: false + last_name: + data_type: character varying + default: null + is_nullable: false + max_length: 254 + password: + data_type: character varying + default: null + is_nullable: false + max_length: 128 + db_dbuser_groups: + dbuser_id: + data_type: integer + default: null + is_nullable: false + group_id: + data_type: integer + default: null + is_nullable: false + id: + data_type: integer + default: nextval('db_dbuser_groups_id_seq'::regclass) + is_nullable: false + db_dbuser_user_permissions: + dbuser_id: + data_type: integer + default: null + is_nullable: false + id: + data_type: integer + default: nextval('db_dbuser_user_permissions_id_seq'::regclass) + is_nullable: false + permission_id: + data_type: integer + default: null + is_nullable: false + db_dbworkflow: + ctime: + data_type: timestamp with time zone + default: null + is_nullable: false + description: + data_type: text + default: null + is_nullable: false + id: + data_type: integer + default: nextval('db_dbworkflow_id_seq'::regclass) + is_nullable: false + label: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + lastsyncedversion: + data_type: integer + default: null + is_nullable: false + module: + data_type: text + default: null + is_nullable: false + module_class: + data_type: text + default: null + is_nullable: false + mtime: + data_type: timestamp with time zone + default: null + is_nullable: false + nodeversion: + data_type: integer + default: null + is_nullable: false + report: + data_type: text + default: null + is_nullable: false + script_md5: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + script_path: + data_type: text + default: null + is_nullable: false + state: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + user_id: + data_type: integer + default: null + is_nullable: false + uuid: + data_type: character varying + default: null + is_nullable: false + max_length: 36 + db_dbworkflowdata: + aiida_obj_id: + data_type: integer + default: null + is_nullable: true + data_type: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + id: + data_type: integer + default: nextval('db_dbworkflowdata_id_seq'::regclass) + is_nullable: false + json_value: + data_type: text + default: null + is_nullable: false + name: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + parent_id: + data_type: integer + default: null + is_nullable: false + time: + data_type: timestamp with time zone + default: null + is_nullable: false + value_type: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + db_dbworkflowstep: + id: + data_type: integer + default: nextval('db_dbworkflowstep_id_seq'::regclass) + is_nullable: false + name: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + nextcall: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + parent_id: + data_type: integer + default: null + is_nullable: false + state: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + time: + data_type: timestamp with time zone + default: null + is_nullable: false + user_id: + data_type: integer + default: null + is_nullable: false + db_dbworkflowstep_calculations: + dbnode_id: + data_type: integer + default: null + is_nullable: false + dbworkflowstep_id: + data_type: integer + default: null + is_nullable: false + id: + data_type: integer + default: nextval('db_dbworkflowstep_calculations_id_seq'::regclass) + is_nullable: false + db_dbworkflowstep_sub_workflows: + dbworkflow_id: + data_type: integer + default: null + is_nullable: false + dbworkflowstep_id: + data_type: integer + default: null + is_nullable: false + id: + data_type: integer + default: nextval('db_dbworkflowstep_sub_workflows_id_seq'::regclass) + is_nullable: false +constraints: + primary_key: + db_dbattribute: + db_dbattribute_pkey: + - id + db_dbauthinfo: + db_dbauthinfo_pkey: + - id + db_dbcalcstate: + db_dbcalcstate_pkey: + - id + db_dbcomment: + db_dbcomment_pkey: + - id + db_dbcomputer: + db_dbcomputer_pkey: + - id + db_dbextra: + db_dbextra_pkey: + - id + db_dbgroup: + db_dbgroup_pkey: + - id + db_dbgroup_dbnodes: + db_dbgroup_dbnodes_pkey: + - id + db_dblink: + db_dblink_pkey: + - id + db_dblog: + db_dblog_pkey: + - id + db_dbnode: + db_dbnode_pkey: + - id + db_dbsetting: + db_dbsetting_pkey: + - id + db_dbuser: + db_dbuser_pkey: + - id + db_dbuser_groups: + db_dbuser_groups_pkey: + - id + db_dbuser_user_permissions: + db_dbuser_user_permissions_pkey: + - id + db_dbworkflow: + db_dbworkflow_pkey: + - id + db_dbworkflowdata: + db_dbworkflowdata_pkey: + - id + db_dbworkflowstep: + db_dbworkflowstep_pkey: + - id + db_dbworkflowstep_calculations: + db_dbworkflowstep_calculations_pkey: + - id + db_dbworkflowstep_sub_workflows: + db_dbworkflowstep_sub_workflows_pkey: + - id + unique: + db_dbattribute: + db_dbattribute_dbnode_id_key_c589e447_uniq: + - dbnode_id + - key + db_dbauthinfo: + db_dbauthinfo_aiidauser_id_dbcomputer_id_777cdaa8_uniq: + - aiidauser_id + - dbcomputer_id + db_dbcalcstate: + db_dbcalcstate_dbnode_id_state_b4a14db3_uniq: + - dbnode_id + - state + db_dbcomputer: + db_dbcomputer_name_key: + - name + db_dbextra: + db_dbextra_dbnode_id_key_aa56fd37_uniq: + - dbnode_id + - key + db_dbgroup: + db_dbgroup_name_type_12656f33_uniq: + - name + - type + db_dbgroup_dbnodes: + db_dbgroup_dbnodes_dbgroup_id_dbnode_id_eee23cce_uniq: + - dbgroup_id + - dbnode_id + db_dbsetting: + db_dbsetting_key_1b84beb4_uniq: + - key + db_dbuser: + db_dbuser_email_key: + - email + db_dbuser_groups: + db_dbuser_groups_dbuser_id_group_id_9155eb4f_uniq: + - dbuser_id + - group_id + db_dbuser_user_permissions: + db_dbuser_user_permissio_dbuser_id_permission_id_e6cbabe4_uniq: + - dbuser_id + - permission_id + db_dbworkflowdata: + db_dbworkflowdata_parent_id_name_data_type_a4b50dae_uniq: + - data_type + - name + - parent_id + db_dbworkflowstep: + db_dbworkflowstep_parent_id_name_111027e3_uniq: + - name + - parent_id + db_dbworkflowstep_calculations: + db_dbworkflowstep_calcul_dbworkflowstep_id_dbnode_60f50d02_uniq: + - dbnode_id + - dbworkflowstep_id + db_dbworkflowstep_sub_workflows: + db_dbworkflowstep_sub_wo_dbworkflowstep_id_dbwork_e9b2b624_uniq: + - dbworkflow_id + - dbworkflowstep_id +foreign_keys: + db_dbattribute: + db_dbattribute_dbnode_id_253bf153_fk_db_dbnode_id: FOREIGN KEY (dbnode_id) REFERENCES + db_dbnode(id) DEFERRABLE INITIALLY DEFERRED + db_dbauthinfo: + db_dbauthinfo_aiidauser_id_0684fdfb_fk_db_dbuser_id: FOREIGN KEY (aiidauser_id) + REFERENCES db_dbuser(id) DEFERRABLE INITIALLY DEFERRED + db_dbauthinfo_dbcomputer_id_424f7ac4_fk_db_dbcomputer_id: FOREIGN KEY (dbcomputer_id) + REFERENCES db_dbcomputer(id) DEFERRABLE INITIALLY DEFERRED + db_dbcalcstate: + db_dbcalcstate_dbnode_id_f217a84c_fk_db_dbnode_id: FOREIGN KEY (dbnode_id) REFERENCES + db_dbnode(id) DEFERRABLE INITIALLY DEFERRED + db_dbcomment: + db_dbcomment_dbnode_id_3b812b6b_fk_db_dbnode_id: FOREIGN KEY (dbnode_id) REFERENCES + db_dbnode(id) DEFERRABLE INITIALLY DEFERRED + db_dbcomment_user_id_8ed5e360_fk_db_dbuser_id: FOREIGN KEY (user_id) REFERENCES + db_dbuser(id) DEFERRABLE INITIALLY DEFERRED + db_dbextra: + db_dbextra_dbnode_id_c7fe8961_fk_db_dbnode_id: FOREIGN KEY (dbnode_id) REFERENCES + db_dbnode(id) DEFERRABLE INITIALLY DEFERRED + db_dbgroup: + db_dbgroup_user_id_100f8a51_fk_db_dbuser_id: FOREIGN KEY (user_id) REFERENCES + db_dbuser(id) DEFERRABLE INITIALLY DEFERRED + db_dbgroup_dbnodes: + db_dbgroup_dbnodes_dbgroup_id_9d3a0f9d_fk_db_dbgroup_id: FOREIGN KEY (dbgroup_id) + REFERENCES db_dbgroup(id) DEFERRABLE INITIALLY DEFERRED + db_dbgroup_dbnodes_dbnode_id_118b9439_fk_db_dbnode_id: FOREIGN KEY (dbnode_id) + REFERENCES db_dbnode(id) DEFERRABLE INITIALLY DEFERRED + db_dblink: + db_dblink_input_id_9245bd73_fk_db_dbnode_id: FOREIGN KEY (input_id) REFERENCES + db_dbnode(id) DEFERRABLE INITIALLY DEFERRED + db_dblink_output_id_c0167528_fk_db_dbnode_id: FOREIGN KEY (output_id) REFERENCES + db_dbnode(id) DEFERRABLE INITIALLY DEFERRED + db_dbnode: + db_dbnode_dbcomputer_id_315372a3_fk_db_dbcomputer_id: FOREIGN KEY (dbcomputer_id) + REFERENCES db_dbcomputer(id) DEFERRABLE INITIALLY DEFERRED + db_dbnode_user_id_12e7aeaf_fk_db_dbuser_id: FOREIGN KEY (user_id) REFERENCES db_dbuser(id) + DEFERRABLE INITIALLY DEFERRED + db_dbuser_groups: + db_dbuser_groups_dbuser_id_480b3520_fk_db_dbuser_id: FOREIGN KEY (dbuser_id) REFERENCES + db_dbuser(id) DEFERRABLE INITIALLY DEFERRED + db_dbuser_groups_group_id_8478d87e_fk_auth_group_id: FOREIGN KEY (group_id) REFERENCES + auth_group(id) DEFERRABLE INITIALLY DEFERRED + db_dbuser_user_permissions: + db_dbuser_user_permi_permission_id_c5aafc54_fk_auth_perm: FOREIGN KEY (permission_id) + REFERENCES auth_permission(id) DEFERRABLE INITIALLY DEFERRED + db_dbuser_user_permissions_dbuser_id_364456ee_fk_db_dbuser_id: FOREIGN KEY (dbuser_id) + REFERENCES db_dbuser(id) DEFERRABLE INITIALLY DEFERRED + db_dbworkflow: + db_dbworkflow_user_id_ef1f3251_fk_db_dbuser_id: FOREIGN KEY (user_id) REFERENCES + db_dbuser(id) DEFERRABLE INITIALLY DEFERRED + db_dbworkflowdata: + db_dbworkflowdata_aiida_obj_id_70a2d33b_fk_db_dbnode_id: FOREIGN KEY (aiida_obj_id) + REFERENCES db_dbnode(id) DEFERRABLE INITIALLY DEFERRED + db_dbworkflowdata_parent_id_ff4dbf8d_fk_db_dbworkflow_id: FOREIGN KEY (parent_id) + REFERENCES db_dbworkflow(id) DEFERRABLE INITIALLY DEFERRED + db_dbworkflowstep: + db_dbworkflowstep_parent_id_ffb754d9_fk_db_dbworkflow_id: FOREIGN KEY (parent_id) + REFERENCES db_dbworkflow(id) DEFERRABLE INITIALLY DEFERRED + db_dbworkflowstep_user_id_04282431_fk_db_dbuser_id: FOREIGN KEY (user_id) REFERENCES + db_dbuser(id) DEFERRABLE INITIALLY DEFERRED + db_dbworkflowstep_calculations: + db_dbworkflowstep_ca_dbnode_id_0d07b7a7_fk_db_dbnode: FOREIGN KEY (dbnode_id) + REFERENCES db_dbnode(id) DEFERRABLE INITIALLY DEFERRED + db_dbworkflowstep_ca_dbworkflowstep_id_575c3637_fk_db_dbwork: FOREIGN KEY (dbworkflowstep_id) + REFERENCES db_dbworkflowstep(id) DEFERRABLE INITIALLY DEFERRED + db_dbworkflowstep_sub_workflows: + db_dbworkflowstep_su_dbworkflow_id_dca4d103_fk_db_dbwork: FOREIGN KEY (dbworkflow_id) + REFERENCES db_dbworkflow(id) DEFERRABLE INITIALLY DEFERRED + db_dbworkflowstep_su_dbworkflowstep_id_e183bbb7_fk_db_dbwork: FOREIGN KEY (dbworkflowstep_id) + REFERENCES db_dbworkflowstep(id) DEFERRABLE INITIALLY DEFERRED +indexes: + db_dbattribute: + db_dbattribute_datatype_91c4dc04: CREATE INDEX db_dbattribute_datatype_91c4dc04 + ON public.db_dbattribute USING btree (datatype) + db_dbattribute_datatype_91c4dc04_like: CREATE INDEX db_dbattribute_datatype_91c4dc04_like + ON public.db_dbattribute USING btree (datatype varchar_pattern_ops) + db_dbattribute_dbnode_id_253bf153: CREATE INDEX db_dbattribute_dbnode_id_253bf153 + ON public.db_dbattribute USING btree (dbnode_id) + db_dbattribute_dbnode_id_key_c589e447_uniq: CREATE UNIQUE INDEX db_dbattribute_dbnode_id_key_c589e447_uniq + ON public.db_dbattribute USING btree (dbnode_id, key) + db_dbattribute_key_ac2bc4e4: CREATE INDEX db_dbattribute_key_ac2bc4e4 ON public.db_dbattribute + USING btree (key) + db_dbattribute_key_ac2bc4e4_like: CREATE INDEX db_dbattribute_key_ac2bc4e4_like + ON public.db_dbattribute USING btree (key varchar_pattern_ops) + db_dbattribute_pkey: CREATE UNIQUE INDEX db_dbattribute_pkey ON public.db_dbattribute + USING btree (id) + tval_idx_for_daemon: CREATE INDEX tval_idx_for_daemon ON public.db_dbattribute + USING btree (tval) WHERE (tval = ANY (ARRAY['COMPUTED'::text, 'WITHSCHEDULER'::text, + 'TOSUBMIT'::text])) + db_dbauthinfo: + db_dbauthinfo_aiidauser_id_0684fdfb: CREATE INDEX db_dbauthinfo_aiidauser_id_0684fdfb + ON public.db_dbauthinfo USING btree (aiidauser_id) + db_dbauthinfo_aiidauser_id_dbcomputer_id_777cdaa8_uniq: CREATE UNIQUE INDEX db_dbauthinfo_aiidauser_id_dbcomputer_id_777cdaa8_uniq + ON public.db_dbauthinfo USING btree (aiidauser_id, dbcomputer_id) + db_dbauthinfo_dbcomputer_id_424f7ac4: CREATE INDEX db_dbauthinfo_dbcomputer_id_424f7ac4 + ON public.db_dbauthinfo USING btree (dbcomputer_id) + db_dbauthinfo_pkey: CREATE UNIQUE INDEX db_dbauthinfo_pkey ON public.db_dbauthinfo + USING btree (id) + db_dbcalcstate: + db_dbcalcstate_dbnode_id_f217a84c: CREATE INDEX db_dbcalcstate_dbnode_id_f217a84c + ON public.db_dbcalcstate USING btree (dbnode_id) + db_dbcalcstate_dbnode_id_state_b4a14db3_uniq: CREATE UNIQUE INDEX db_dbcalcstate_dbnode_id_state_b4a14db3_uniq + ON public.db_dbcalcstate USING btree (dbnode_id, state) + db_dbcalcstate_pkey: CREATE UNIQUE INDEX db_dbcalcstate_pkey ON public.db_dbcalcstate + USING btree (id) + db_dbcalcstate_state_0bf54584: CREATE INDEX db_dbcalcstate_state_0bf54584 ON public.db_dbcalcstate + USING btree (state) + db_dbcalcstate_state_0bf54584_like: CREATE INDEX db_dbcalcstate_state_0bf54584_like + ON public.db_dbcalcstate USING btree (state varchar_pattern_ops) + db_dbcomment: + db_dbcomment_dbnode_id_3b812b6b: CREATE INDEX db_dbcomment_dbnode_id_3b812b6b + ON public.db_dbcomment USING btree (dbnode_id) + db_dbcomment_pkey: CREATE UNIQUE INDEX db_dbcomment_pkey ON public.db_dbcomment + USING btree (id) + db_dbcomment_user_id_8ed5e360: CREATE INDEX db_dbcomment_user_id_8ed5e360 ON public.db_dbcomment + USING btree (user_id) + db_dbcomputer: + db_dbcomputer_name_f1800b1a_like: CREATE INDEX db_dbcomputer_name_f1800b1a_like + ON public.db_dbcomputer USING btree (name varchar_pattern_ops) + db_dbcomputer_name_key: CREATE UNIQUE INDEX db_dbcomputer_name_key ON public.db_dbcomputer + USING btree (name) + db_dbcomputer_pkey: CREATE UNIQUE INDEX db_dbcomputer_pkey ON public.db_dbcomputer + USING btree (id) + db_dbextra: + db_dbextra_datatype_2eba38c6: CREATE INDEX db_dbextra_datatype_2eba38c6 ON public.db_dbextra + USING btree (datatype) + db_dbextra_datatype_2eba38c6_like: CREATE INDEX db_dbextra_datatype_2eba38c6_like + ON public.db_dbextra USING btree (datatype varchar_pattern_ops) + db_dbextra_dbnode_id_c7fe8961: CREATE INDEX db_dbextra_dbnode_id_c7fe8961 ON public.db_dbextra + USING btree (dbnode_id) + db_dbextra_dbnode_id_key_aa56fd37_uniq: CREATE UNIQUE INDEX db_dbextra_dbnode_id_key_aa56fd37_uniq + ON public.db_dbextra USING btree (dbnode_id, key) + db_dbextra_key_b1a8abc6: CREATE INDEX db_dbextra_key_b1a8abc6 ON public.db_dbextra + USING btree (key) + db_dbextra_key_b1a8abc6_like: CREATE INDEX db_dbextra_key_b1a8abc6_like ON public.db_dbextra + USING btree (key varchar_pattern_ops) + db_dbextra_pkey: CREATE UNIQUE INDEX db_dbextra_pkey ON public.db_dbextra USING + btree (id) + db_dbgroup: + db_dbgroup_name_66c75272: CREATE INDEX db_dbgroup_name_66c75272 ON public.db_dbgroup + USING btree (name) + db_dbgroup_name_66c75272_like: CREATE INDEX db_dbgroup_name_66c75272_like ON public.db_dbgroup + USING btree (name varchar_pattern_ops) + db_dbgroup_name_type_12656f33_uniq: CREATE UNIQUE INDEX db_dbgroup_name_type_12656f33_uniq + ON public.db_dbgroup USING btree (name, type) + db_dbgroup_pkey: CREATE UNIQUE INDEX db_dbgroup_pkey ON public.db_dbgroup USING + btree (id) + db_dbgroup_type_23b2a748: CREATE INDEX db_dbgroup_type_23b2a748 ON public.db_dbgroup + USING btree (type) + db_dbgroup_type_23b2a748_like: CREATE INDEX db_dbgroup_type_23b2a748_like ON public.db_dbgroup + USING btree (type varchar_pattern_ops) + db_dbgroup_user_id_100f8a51: CREATE INDEX db_dbgroup_user_id_100f8a51 ON public.db_dbgroup + USING btree (user_id) + db_dbgroup_dbnodes: + db_dbgroup_dbnodes_dbgroup_id_9d3a0f9d: CREATE INDEX db_dbgroup_dbnodes_dbgroup_id_9d3a0f9d + ON public.db_dbgroup_dbnodes USING btree (dbgroup_id) + db_dbgroup_dbnodes_dbgroup_id_dbnode_id_eee23cce_uniq: CREATE UNIQUE INDEX db_dbgroup_dbnodes_dbgroup_id_dbnode_id_eee23cce_uniq + ON public.db_dbgroup_dbnodes USING btree (dbgroup_id, dbnode_id) + db_dbgroup_dbnodes_dbnode_id_118b9439: CREATE INDEX db_dbgroup_dbnodes_dbnode_id_118b9439 + ON public.db_dbgroup_dbnodes USING btree (dbnode_id) + db_dbgroup_dbnodes_pkey: CREATE UNIQUE INDEX db_dbgroup_dbnodes_pkey ON public.db_dbgroup_dbnodes + USING btree (id) + db_dblink: + db_dblink_input_id_9245bd73: CREATE INDEX db_dblink_input_id_9245bd73 ON public.db_dblink + USING btree (input_id) + db_dblink_label_f1343cfb: CREATE INDEX db_dblink_label_f1343cfb ON public.db_dblink + USING btree (label) + db_dblink_label_f1343cfb_like: CREATE INDEX db_dblink_label_f1343cfb_like ON public.db_dblink + USING btree (label varchar_pattern_ops) + db_dblink_output_id_c0167528: CREATE INDEX db_dblink_output_id_c0167528 ON public.db_dblink + USING btree (output_id) + db_dblink_pkey: CREATE UNIQUE INDEX db_dblink_pkey ON public.db_dblink USING btree + (id) + db_dblink_type_229f212b: CREATE INDEX db_dblink_type_229f212b ON public.db_dblink + USING btree (type) + db_dblink_type_229f212b_like: CREATE INDEX db_dblink_type_229f212b_like ON public.db_dblink + USING btree (type varchar_pattern_ops) + db_dblog: + db_dblog_levelname_ad5dc346: CREATE INDEX db_dblog_levelname_ad5dc346 ON public.db_dblog + USING btree (levelname) + db_dblog_levelname_ad5dc346_like: CREATE INDEX db_dblog_levelname_ad5dc346_like + ON public.db_dblog USING btree (levelname varchar_pattern_ops) + db_dblog_loggername_00b5ba16: CREATE INDEX db_dblog_loggername_00b5ba16 ON public.db_dblog + USING btree (loggername) + db_dblog_loggername_00b5ba16_like: CREATE INDEX db_dblog_loggername_00b5ba16_like + ON public.db_dblog USING btree (loggername varchar_pattern_ops) + db_dblog_objname_69932b1e: CREATE INDEX db_dblog_objname_69932b1e ON public.db_dblog + USING btree (objname) + db_dblog_objname_69932b1e_like: CREATE INDEX db_dblog_objname_69932b1e_like ON + public.db_dblog USING btree (objname varchar_pattern_ops) + db_dblog_objpk_fc47afa9: CREATE INDEX db_dblog_objpk_fc47afa9 ON public.db_dblog + USING btree (objpk) + db_dblog_pkey: CREATE UNIQUE INDEX db_dblog_pkey ON public.db_dblog USING btree + (id) + db_dbnode: + db_dbnode_ctime_71626ef5: CREATE INDEX db_dbnode_ctime_71626ef5 ON public.db_dbnode + USING btree (ctime) + db_dbnode_dbcomputer_id_315372a3: CREATE INDEX db_dbnode_dbcomputer_id_315372a3 + ON public.db_dbnode USING btree (dbcomputer_id) + db_dbnode_label_6469539e: CREATE INDEX db_dbnode_label_6469539e ON public.db_dbnode + USING btree (label) + db_dbnode_label_6469539e_like: CREATE INDEX db_dbnode_label_6469539e_like ON public.db_dbnode + USING btree (label varchar_pattern_ops) + db_dbnode_mtime_0554ea3d: CREATE INDEX db_dbnode_mtime_0554ea3d ON public.db_dbnode + USING btree (mtime) + db_dbnode_pkey: CREATE UNIQUE INDEX db_dbnode_pkey ON public.db_dbnode USING btree + (id) + db_dbnode_process_type_df7298d0: CREATE INDEX db_dbnode_process_type_df7298d0 + ON public.db_dbnode USING btree (process_type) + db_dbnode_process_type_df7298d0_like: CREATE INDEX db_dbnode_process_type_df7298d0_like + ON public.db_dbnode USING btree (process_type varchar_pattern_ops) + db_dbnode_type_a8ce9753: CREATE INDEX db_dbnode_type_a8ce9753 ON public.db_dbnode + USING btree (type) + db_dbnode_type_a8ce9753_like: CREATE INDEX db_dbnode_type_a8ce9753_like ON public.db_dbnode + USING btree (type varchar_pattern_ops) + db_dbnode_user_id_12e7aeaf: CREATE INDEX db_dbnode_user_id_12e7aeaf ON public.db_dbnode + USING btree (user_id) + db_dbnode_uuid_62e0bf98: CREATE INDEX db_dbnode_uuid_62e0bf98 ON public.db_dbnode + USING btree (uuid) + db_dbnode_uuid_62e0bf98_like: CREATE INDEX db_dbnode_uuid_62e0bf98_like ON public.db_dbnode + USING btree (uuid varchar_pattern_ops) + db_dbsetting: + db_dbsetting_datatype_49f4397c: CREATE INDEX db_dbsetting_datatype_49f4397c ON + public.db_dbsetting USING btree (datatype) + db_dbsetting_datatype_49f4397c_like: CREATE INDEX db_dbsetting_datatype_49f4397c_like + ON public.db_dbsetting USING btree (datatype varchar_pattern_ops) + db_dbsetting_key_1b84beb4: CREATE INDEX db_dbsetting_key_1b84beb4 ON public.db_dbsetting + USING btree (key) + db_dbsetting_key_1b84beb4_like: CREATE INDEX db_dbsetting_key_1b84beb4_like ON + public.db_dbsetting USING btree (key varchar_pattern_ops) + db_dbsetting_key_1b84beb4_uniq: CREATE UNIQUE INDEX db_dbsetting_key_1b84beb4_uniq + ON public.db_dbsetting USING btree (key) + db_dbsetting_pkey: CREATE UNIQUE INDEX db_dbsetting_pkey ON public.db_dbsetting + USING btree (id) + db_dbuser: + db_dbuser_email_30150b7e_like: CREATE INDEX db_dbuser_email_30150b7e_like ON public.db_dbuser + USING btree (email varchar_pattern_ops) + db_dbuser_email_key: CREATE UNIQUE INDEX db_dbuser_email_key ON public.db_dbuser + USING btree (email) + db_dbuser_pkey: CREATE UNIQUE INDEX db_dbuser_pkey ON public.db_dbuser USING btree + (id) + db_dbuser_groups: + db_dbuser_groups_dbuser_id_480b3520: CREATE INDEX db_dbuser_groups_dbuser_id_480b3520 + ON public.db_dbuser_groups USING btree (dbuser_id) + db_dbuser_groups_dbuser_id_group_id_9155eb4f_uniq: CREATE UNIQUE INDEX db_dbuser_groups_dbuser_id_group_id_9155eb4f_uniq + ON public.db_dbuser_groups USING btree (dbuser_id, group_id) + db_dbuser_groups_group_id_8478d87e: CREATE INDEX db_dbuser_groups_group_id_8478d87e + ON public.db_dbuser_groups USING btree (group_id) + db_dbuser_groups_pkey: CREATE UNIQUE INDEX db_dbuser_groups_pkey ON public.db_dbuser_groups + USING btree (id) + db_dbuser_user_permissions: + db_dbuser_user_permissio_dbuser_id_permission_id_e6cbabe4_uniq: CREATE UNIQUE + INDEX db_dbuser_user_permissio_dbuser_id_permission_id_e6cbabe4_uniq ON public.db_dbuser_user_permissions + USING btree (dbuser_id, permission_id) + db_dbuser_user_permissions_dbuser_id_364456ee: CREATE INDEX db_dbuser_user_permissions_dbuser_id_364456ee + ON public.db_dbuser_user_permissions USING btree (dbuser_id) + db_dbuser_user_permissions_permission_id_c5aafc54: CREATE INDEX db_dbuser_user_permissions_permission_id_c5aafc54 + ON public.db_dbuser_user_permissions USING btree (permission_id) + db_dbuser_user_permissions_pkey: CREATE UNIQUE INDEX db_dbuser_user_permissions_pkey + ON public.db_dbuser_user_permissions USING btree (id) + db_dbworkflow: + db_dbworkflow_label_7368f34a: CREATE INDEX db_dbworkflow_label_7368f34a ON public.db_dbworkflow + USING btree (label) + db_dbworkflow_label_7368f34a_like: CREATE INDEX db_dbworkflow_label_7368f34a_like + ON public.db_dbworkflow USING btree (label varchar_pattern_ops) + db_dbworkflow_pkey: CREATE UNIQUE INDEX db_dbworkflow_pkey ON public.db_dbworkflow + USING btree (id) + db_dbworkflow_user_id_ef1f3251: CREATE INDEX db_dbworkflow_user_id_ef1f3251 ON + public.db_dbworkflow USING btree (user_id) + db_dbworkflowdata: + db_dbworkflowdata_aiida_obj_id_70a2d33b: CREATE INDEX db_dbworkflowdata_aiida_obj_id_70a2d33b + ON public.db_dbworkflowdata USING btree (aiida_obj_id) + db_dbworkflowdata_parent_id_ff4dbf8d: CREATE INDEX db_dbworkflowdata_parent_id_ff4dbf8d + ON public.db_dbworkflowdata USING btree (parent_id) + db_dbworkflowdata_parent_id_name_data_type_a4b50dae_uniq: CREATE UNIQUE INDEX + db_dbworkflowdata_parent_id_name_data_type_a4b50dae_uniq ON public.db_dbworkflowdata + USING btree (parent_id, name, data_type) + db_dbworkflowdata_pkey: CREATE UNIQUE INDEX db_dbworkflowdata_pkey ON public.db_dbworkflowdata + USING btree (id) + db_dbworkflowstep: + db_dbworkflowstep_parent_id_ffb754d9: CREATE INDEX db_dbworkflowstep_parent_id_ffb754d9 + ON public.db_dbworkflowstep USING btree (parent_id) + db_dbworkflowstep_parent_id_name_111027e3_uniq: CREATE UNIQUE INDEX db_dbworkflowstep_parent_id_name_111027e3_uniq + ON public.db_dbworkflowstep USING btree (parent_id, name) + db_dbworkflowstep_pkey: CREATE UNIQUE INDEX db_dbworkflowstep_pkey ON public.db_dbworkflowstep + USING btree (id) + db_dbworkflowstep_user_id_04282431: CREATE INDEX db_dbworkflowstep_user_id_04282431 + ON public.db_dbworkflowstep USING btree (user_id) + db_dbworkflowstep_calculations: + db_dbworkflowstep_calcul_dbworkflowstep_id_dbnode_60f50d02_uniq: CREATE UNIQUE + INDEX db_dbworkflowstep_calcul_dbworkflowstep_id_dbnode_60f50d02_uniq ON public.db_dbworkflowstep_calculations + USING btree (dbworkflowstep_id, dbnode_id) + db_dbworkflowstep_calculations_dbnode_id_0d07b7a7: CREATE INDEX db_dbworkflowstep_calculations_dbnode_id_0d07b7a7 + ON public.db_dbworkflowstep_calculations USING btree (dbnode_id) + db_dbworkflowstep_calculations_dbworkflowstep_id_575c3637: CREATE INDEX db_dbworkflowstep_calculations_dbworkflowstep_id_575c3637 + ON public.db_dbworkflowstep_calculations USING btree (dbworkflowstep_id) + db_dbworkflowstep_calculations_pkey: CREATE UNIQUE INDEX db_dbworkflowstep_calculations_pkey + ON public.db_dbworkflowstep_calculations USING btree (id) + db_dbworkflowstep_sub_workflows: + db_dbworkflowstep_sub_wo_dbworkflowstep_id_dbwork_e9b2b624_uniq: CREATE UNIQUE + INDEX db_dbworkflowstep_sub_wo_dbworkflowstep_id_dbwork_e9b2b624_uniq ON public.db_dbworkflowstep_sub_workflows + USING btree (dbworkflowstep_id, dbworkflow_id) + db_dbworkflowstep_sub_workflows_dbworkflow_id_dca4d103: CREATE INDEX db_dbworkflowstep_sub_workflows_dbworkflow_id_dca4d103 + ON public.db_dbworkflowstep_sub_workflows USING btree (dbworkflow_id) + db_dbworkflowstep_sub_workflows_dbworkflowstep_id_e183bbb7: CREATE INDEX db_dbworkflowstep_sub_workflows_dbworkflowstep_id_e183bbb7 + ON public.db_dbworkflowstep_sub_workflows USING btree (dbworkflowstep_id) + db_dbworkflowstep_sub_workflows_pkey: CREATE UNIQUE INDEX db_dbworkflowstep_sub_workflows_pkey + ON public.db_dbworkflowstep_sub_workflows USING btree (id) diff --git a/tests/backends/aiida_sqlalchemy/migrations/test_all_schema/test_django_django_0013_.yml b/tests/backends/aiida_sqlalchemy/migrations/test_all_schema/test_django_django_0013_.yml new file mode 100644 index 0000000000..36856e06fd --- /dev/null +++ b/tests/backends/aiida_sqlalchemy/migrations/test_all_schema/test_django_django_0013_.yml @@ -0,0 +1,1056 @@ +columns: + db_dbattribute: + bval: + data_type: boolean + default: null + is_nullable: true + datatype: + data_type: character varying + default: null + is_nullable: false + max_length: 10 + dbnode_id: + data_type: integer + default: null + is_nullable: false + dval: + data_type: timestamp with time zone + default: null + is_nullable: true + fval: + data_type: double precision + default: null + is_nullable: true + id: + data_type: integer + default: nextval('db_dbattribute_id_seq'::regclass) + is_nullable: false + ival: + data_type: integer + default: null + is_nullable: true + key: + data_type: character varying + default: null + is_nullable: false + max_length: 1024 + tval: + data_type: text + default: null + is_nullable: false + db_dbauthinfo: + aiidauser_id: + data_type: integer + default: null + is_nullable: false + auth_params: + data_type: text + default: null + is_nullable: false + dbcomputer_id: + data_type: integer + default: null + is_nullable: false + enabled: + data_type: boolean + default: null + is_nullable: false + id: + data_type: integer + default: nextval('db_dbauthinfo_id_seq'::regclass) + is_nullable: false + metadata: + data_type: text + default: null + is_nullable: false + db_dbcalcstate: + dbnode_id: + data_type: integer + default: null + is_nullable: false + id: + data_type: integer + default: nextval('db_dbcalcstate_id_seq'::regclass) + is_nullable: false + state: + data_type: character varying + default: null + is_nullable: false + max_length: 25 + time: + data_type: timestamp with time zone + default: null + is_nullable: false + db_dbcomment: + content: + data_type: text + default: null + is_nullable: false + ctime: + data_type: timestamp with time zone + default: null + is_nullable: false + dbnode_id: + data_type: integer + default: null + is_nullable: false + id: + data_type: integer + default: nextval('db_dbcomment_id_seq'::regclass) + is_nullable: false + mtime: + data_type: timestamp with time zone + default: null + is_nullable: false + user_id: + data_type: integer + default: null + is_nullable: false + uuid: + data_type: character varying + default: null + is_nullable: false + max_length: 36 + db_dbcomputer: + description: + data_type: text + default: null + is_nullable: false + enabled: + data_type: boolean + default: null + is_nullable: false + hostname: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + id: + data_type: integer + default: nextval('db_dbcomputer_id_seq'::regclass) + is_nullable: false + metadata: + data_type: text + default: null + is_nullable: false + name: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + scheduler_type: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + transport_params: + data_type: text + default: null + is_nullable: false + transport_type: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + uuid: + data_type: character varying + default: null + is_nullable: false + max_length: 36 + db_dbextra: + bval: + data_type: boolean + default: null + is_nullable: true + datatype: + data_type: character varying + default: null + is_nullable: false + max_length: 10 + dbnode_id: + data_type: integer + default: null + is_nullable: false + dval: + data_type: timestamp with time zone + default: null + is_nullable: true + fval: + data_type: double precision + default: null + is_nullable: true + id: + data_type: integer + default: nextval('db_dbextra_id_seq'::regclass) + is_nullable: false + ival: + data_type: integer + default: null + is_nullable: true + key: + data_type: character varying + default: null + is_nullable: false + max_length: 1024 + tval: + data_type: text + default: null + is_nullable: false + db_dbgroup: + description: + data_type: text + default: null + is_nullable: false + id: + data_type: integer + default: nextval('db_dbgroup_id_seq'::regclass) + is_nullable: false + name: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + time: + data_type: timestamp with time zone + default: null + is_nullable: false + type: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + user_id: + data_type: integer + default: null + is_nullable: false + uuid: + data_type: character varying + default: null + is_nullable: false + max_length: 36 + db_dbgroup_dbnodes: + dbgroup_id: + data_type: integer + default: null + is_nullable: false + dbnode_id: + data_type: integer + default: null + is_nullable: false + id: + data_type: integer + default: nextval('db_dbgroup_dbnodes_id_seq'::regclass) + is_nullable: false + db_dblink: + id: + data_type: integer + default: nextval('db_dblink_id_seq'::regclass) + is_nullable: false + input_id: + data_type: integer + default: null + is_nullable: false + label: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + output_id: + data_type: integer + default: null + is_nullable: false + type: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + db_dblog: + id: + data_type: integer + default: nextval('db_dblog_id_seq'::regclass) + is_nullable: false + levelname: + data_type: character varying + default: null + is_nullable: false + max_length: 50 + loggername: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + message: + data_type: text + default: null + is_nullable: false + metadata: + data_type: text + default: null + is_nullable: false + objname: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + objpk: + data_type: integer + default: null + is_nullable: true + time: + data_type: timestamp with time zone + default: null + is_nullable: false + db_dbnode: + ctime: + data_type: timestamp with time zone + default: null + is_nullable: false + dbcomputer_id: + data_type: integer + default: null + is_nullable: true + description: + data_type: text + default: null + is_nullable: false + id: + data_type: integer + default: nextval('db_dbnode_id_seq'::regclass) + is_nullable: false + label: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + mtime: + data_type: timestamp with time zone + default: null + is_nullable: false + nodeversion: + data_type: integer + default: null + is_nullable: false + process_type: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + public: + data_type: boolean + default: null + is_nullable: false + type: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + user_id: + data_type: integer + default: null + is_nullable: false + uuid: + data_type: character varying + default: null + is_nullable: false + max_length: 36 + db_dbsetting: + bval: + data_type: boolean + default: null + is_nullable: true + datatype: + data_type: character varying + default: null + is_nullable: false + max_length: 10 + description: + data_type: text + default: null + is_nullable: false + dval: + data_type: timestamp with time zone + default: null + is_nullable: true + fval: + data_type: double precision + default: null + is_nullable: true + id: + data_type: integer + default: nextval('db_dbsetting_id_seq'::regclass) + is_nullable: false + ival: + data_type: integer + default: null + is_nullable: true + key: + data_type: character varying + default: null + is_nullable: false + max_length: 1024 + time: + data_type: timestamp with time zone + default: null + is_nullable: false + tval: + data_type: text + default: null + is_nullable: false + db_dbuser: + date_joined: + data_type: timestamp with time zone + default: null + is_nullable: false + email: + data_type: character varying + default: null + is_nullable: false + max_length: 254 + first_name: + data_type: character varying + default: null + is_nullable: false + max_length: 254 + id: + data_type: integer + default: nextval('db_dbuser_id_seq'::regclass) + is_nullable: false + institution: + data_type: character varying + default: null + is_nullable: false + max_length: 254 + is_active: + data_type: boolean + default: null + is_nullable: false + is_staff: + data_type: boolean + default: null + is_nullable: false + is_superuser: + data_type: boolean + default: null + is_nullable: false + last_login: + data_type: timestamp with time zone + default: null + is_nullable: true + last_name: + data_type: character varying + default: null + is_nullable: false + max_length: 254 + password: + data_type: character varying + default: null + is_nullable: false + max_length: 128 + db_dbuser_groups: + dbuser_id: + data_type: integer + default: null + is_nullable: false + group_id: + data_type: integer + default: null + is_nullable: false + id: + data_type: integer + default: nextval('db_dbuser_groups_id_seq'::regclass) + is_nullable: false + db_dbuser_user_permissions: + dbuser_id: + data_type: integer + default: null + is_nullable: false + id: + data_type: integer + default: nextval('db_dbuser_user_permissions_id_seq'::regclass) + is_nullable: false + permission_id: + data_type: integer + default: null + is_nullable: false + db_dbworkflow: + ctime: + data_type: timestamp with time zone + default: null + is_nullable: false + description: + data_type: text + default: null + is_nullable: false + id: + data_type: integer + default: nextval('db_dbworkflow_id_seq'::regclass) + is_nullable: false + label: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + lastsyncedversion: + data_type: integer + default: null + is_nullable: false + module: + data_type: text + default: null + is_nullable: false + module_class: + data_type: text + default: null + is_nullable: false + mtime: + data_type: timestamp with time zone + default: null + is_nullable: false + nodeversion: + data_type: integer + default: null + is_nullable: false + report: + data_type: text + default: null + is_nullable: false + script_md5: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + script_path: + data_type: text + default: null + is_nullable: false + state: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + user_id: + data_type: integer + default: null + is_nullable: false + uuid: + data_type: character varying + default: null + is_nullable: false + max_length: 36 + db_dbworkflowdata: + aiida_obj_id: + data_type: integer + default: null + is_nullable: true + data_type: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + id: + data_type: integer + default: nextval('db_dbworkflowdata_id_seq'::regclass) + is_nullable: false + json_value: + data_type: text + default: null + is_nullable: false + name: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + parent_id: + data_type: integer + default: null + is_nullable: false + time: + data_type: timestamp with time zone + default: null + is_nullable: false + value_type: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + db_dbworkflowstep: + id: + data_type: integer + default: nextval('db_dbworkflowstep_id_seq'::regclass) + is_nullable: false + name: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + nextcall: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + parent_id: + data_type: integer + default: null + is_nullable: false + state: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + time: + data_type: timestamp with time zone + default: null + is_nullable: false + user_id: + data_type: integer + default: null + is_nullable: false + db_dbworkflowstep_calculations: + dbnode_id: + data_type: integer + default: null + is_nullable: false + dbworkflowstep_id: + data_type: integer + default: null + is_nullable: false + id: + data_type: integer + default: nextval('db_dbworkflowstep_calculations_id_seq'::regclass) + is_nullable: false + db_dbworkflowstep_sub_workflows: + dbworkflow_id: + data_type: integer + default: null + is_nullable: false + dbworkflowstep_id: + data_type: integer + default: null + is_nullable: false + id: + data_type: integer + default: nextval('db_dbworkflowstep_sub_workflows_id_seq'::regclass) + is_nullable: false +constraints: + primary_key: + db_dbattribute: + db_dbattribute_pkey: + - id + db_dbauthinfo: + db_dbauthinfo_pkey: + - id + db_dbcalcstate: + db_dbcalcstate_pkey: + - id + db_dbcomment: + db_dbcomment_pkey: + - id + db_dbcomputer: + db_dbcomputer_pkey: + - id + db_dbextra: + db_dbextra_pkey: + - id + db_dbgroup: + db_dbgroup_pkey: + - id + db_dbgroup_dbnodes: + db_dbgroup_dbnodes_pkey: + - id + db_dblink: + db_dblink_pkey: + - id + db_dblog: + db_dblog_pkey: + - id + db_dbnode: + db_dbnode_pkey: + - id + db_dbsetting: + db_dbsetting_pkey: + - id + db_dbuser: + db_dbuser_pkey: + - id + db_dbuser_groups: + db_dbuser_groups_pkey: + - id + db_dbuser_user_permissions: + db_dbuser_user_permissions_pkey: + - id + db_dbworkflow: + db_dbworkflow_pkey: + - id + db_dbworkflowdata: + db_dbworkflowdata_pkey: + - id + db_dbworkflowstep: + db_dbworkflowstep_pkey: + - id + db_dbworkflowstep_calculations: + db_dbworkflowstep_calculations_pkey: + - id + db_dbworkflowstep_sub_workflows: + db_dbworkflowstep_sub_workflows_pkey: + - id + unique: + db_dbattribute: + db_dbattribute_dbnode_id_key_c589e447_uniq: + - dbnode_id + - key + db_dbauthinfo: + db_dbauthinfo_aiidauser_id_dbcomputer_id_777cdaa8_uniq: + - aiidauser_id + - dbcomputer_id + db_dbcalcstate: + db_dbcalcstate_dbnode_id_state_b4a14db3_uniq: + - dbnode_id + - state + db_dbcomputer: + db_dbcomputer_name_key: + - name + db_dbextra: + db_dbextra_dbnode_id_key_aa56fd37_uniq: + - dbnode_id + - key + db_dbgroup: + db_dbgroup_name_type_12656f33_uniq: + - name + - type + db_dbgroup_dbnodes: + db_dbgroup_dbnodes_dbgroup_id_dbnode_id_eee23cce_uniq: + - dbgroup_id + - dbnode_id + db_dbsetting: + db_dbsetting_key_1b84beb4_uniq: + - key + db_dbuser_groups: + db_dbuser_groups_dbuser_id_group_id_9155eb4f_uniq: + - dbuser_id + - group_id + db_dbuser_user_permissions: + db_dbuser_user_permissio_dbuser_id_permission_id_e6cbabe4_uniq: + - dbuser_id + - permission_id + db_dbworkflowdata: + db_dbworkflowdata_parent_id_name_data_type_a4b50dae_uniq: + - data_type + - name + - parent_id + db_dbworkflowstep: + db_dbworkflowstep_parent_id_name_111027e3_uniq: + - name + - parent_id + db_dbworkflowstep_calculations: + db_dbworkflowstep_calcul_dbworkflowstep_id_dbnode_60f50d02_uniq: + - dbnode_id + - dbworkflowstep_id + db_dbworkflowstep_sub_workflows: + db_dbworkflowstep_sub_wo_dbworkflowstep_id_dbwork_e9b2b624_uniq: + - dbworkflow_id + - dbworkflowstep_id +foreign_keys: + db_dbattribute: + db_dbattribute_dbnode_id_253bf153_fk_db_dbnode_id: FOREIGN KEY (dbnode_id) REFERENCES + db_dbnode(id) DEFERRABLE INITIALLY DEFERRED + db_dbauthinfo: + db_dbauthinfo_aiidauser_id_0684fdfb_fk_db_dbuser_id: FOREIGN KEY (aiidauser_id) + REFERENCES db_dbuser(id) DEFERRABLE INITIALLY DEFERRED + db_dbauthinfo_dbcomputer_id_424f7ac4_fk_db_dbcomputer_id: FOREIGN KEY (dbcomputer_id) + REFERENCES db_dbcomputer(id) DEFERRABLE INITIALLY DEFERRED + db_dbcalcstate: + db_dbcalcstate_dbnode_id_f217a84c_fk_db_dbnode_id: FOREIGN KEY (dbnode_id) REFERENCES + db_dbnode(id) DEFERRABLE INITIALLY DEFERRED + db_dbcomment: + db_dbcomment_dbnode_id_3b812b6b_fk_db_dbnode_id: FOREIGN KEY (dbnode_id) REFERENCES + db_dbnode(id) DEFERRABLE INITIALLY DEFERRED + db_dbcomment_user_id_8ed5e360_fk_db_dbuser_id: FOREIGN KEY (user_id) REFERENCES + db_dbuser(id) DEFERRABLE INITIALLY DEFERRED + db_dbextra: + db_dbextra_dbnode_id_c7fe8961_fk_db_dbnode_id: FOREIGN KEY (dbnode_id) REFERENCES + db_dbnode(id) DEFERRABLE INITIALLY DEFERRED + db_dbgroup: + db_dbgroup_user_id_100f8a51_fk_db_dbuser_id: FOREIGN KEY (user_id) REFERENCES + db_dbuser(id) DEFERRABLE INITIALLY DEFERRED + db_dbgroup_dbnodes: + db_dbgroup_dbnodes_dbgroup_id_9d3a0f9d_fk_db_dbgroup_id: FOREIGN KEY (dbgroup_id) + REFERENCES db_dbgroup(id) DEFERRABLE INITIALLY DEFERRED + db_dbgroup_dbnodes_dbnode_id_118b9439_fk_db_dbnode_id: FOREIGN KEY (dbnode_id) + REFERENCES db_dbnode(id) DEFERRABLE INITIALLY DEFERRED + db_dblink: + db_dblink_input_id_9245bd73_fk_db_dbnode_id: FOREIGN KEY (input_id) REFERENCES + db_dbnode(id) DEFERRABLE INITIALLY DEFERRED + db_dblink_output_id_c0167528_fk_db_dbnode_id: FOREIGN KEY (output_id) REFERENCES + db_dbnode(id) DEFERRABLE INITIALLY DEFERRED + db_dbnode: + db_dbnode_dbcomputer_id_315372a3_fk_db_dbcomputer_id: FOREIGN KEY (dbcomputer_id) + REFERENCES db_dbcomputer(id) DEFERRABLE INITIALLY DEFERRED + db_dbnode_user_id_12e7aeaf_fk_db_dbuser_id: FOREIGN KEY (user_id) REFERENCES db_dbuser(id) + DEFERRABLE INITIALLY DEFERRED + db_dbuser_groups: + db_dbuser_groups_dbuser_id_480b3520_fk_db_dbuser_id: FOREIGN KEY (dbuser_id) REFERENCES + db_dbuser(id) DEFERRABLE INITIALLY DEFERRED + db_dbuser_groups_group_id_8478d87e_fk_auth_group_id: FOREIGN KEY (group_id) REFERENCES + auth_group(id) DEFERRABLE INITIALLY DEFERRED + db_dbuser_user_permissions: + db_dbuser_user_permi_permission_id_c5aafc54_fk_auth_perm: FOREIGN KEY (permission_id) + REFERENCES auth_permission(id) DEFERRABLE INITIALLY DEFERRED + db_dbuser_user_permissions_dbuser_id_364456ee_fk_db_dbuser_id: FOREIGN KEY (dbuser_id) + REFERENCES db_dbuser(id) DEFERRABLE INITIALLY DEFERRED + db_dbworkflow: + db_dbworkflow_user_id_ef1f3251_fk_db_dbuser_id: FOREIGN KEY (user_id) REFERENCES + db_dbuser(id) DEFERRABLE INITIALLY DEFERRED + db_dbworkflowdata: + db_dbworkflowdata_aiida_obj_id_70a2d33b_fk_db_dbnode_id: FOREIGN KEY (aiida_obj_id) + REFERENCES db_dbnode(id) DEFERRABLE INITIALLY DEFERRED + db_dbworkflowdata_parent_id_ff4dbf8d_fk_db_dbworkflow_id: FOREIGN KEY (parent_id) + REFERENCES db_dbworkflow(id) DEFERRABLE INITIALLY DEFERRED + db_dbworkflowstep: + db_dbworkflowstep_parent_id_ffb754d9_fk_db_dbworkflow_id: FOREIGN KEY (parent_id) + REFERENCES db_dbworkflow(id) DEFERRABLE INITIALLY DEFERRED + db_dbworkflowstep_user_id_04282431_fk_db_dbuser_id: FOREIGN KEY (user_id) REFERENCES + db_dbuser(id) DEFERRABLE INITIALLY DEFERRED + db_dbworkflowstep_calculations: + db_dbworkflowstep_ca_dbnode_id_0d07b7a7_fk_db_dbnode: FOREIGN KEY (dbnode_id) + REFERENCES db_dbnode(id) DEFERRABLE INITIALLY DEFERRED + db_dbworkflowstep_ca_dbworkflowstep_id_575c3637_fk_db_dbwork: FOREIGN KEY (dbworkflowstep_id) + REFERENCES db_dbworkflowstep(id) DEFERRABLE INITIALLY DEFERRED + db_dbworkflowstep_sub_workflows: + db_dbworkflowstep_su_dbworkflow_id_dca4d103_fk_db_dbwork: FOREIGN KEY (dbworkflow_id) + REFERENCES db_dbworkflow(id) DEFERRABLE INITIALLY DEFERRED + db_dbworkflowstep_su_dbworkflowstep_id_e183bbb7_fk_db_dbwork: FOREIGN KEY (dbworkflowstep_id) + REFERENCES db_dbworkflowstep(id) DEFERRABLE INITIALLY DEFERRED +indexes: + db_dbattribute: + db_dbattribute_datatype_91c4dc04: CREATE INDEX db_dbattribute_datatype_91c4dc04 + ON public.db_dbattribute USING btree (datatype) + db_dbattribute_datatype_91c4dc04_like: CREATE INDEX db_dbattribute_datatype_91c4dc04_like + ON public.db_dbattribute USING btree (datatype varchar_pattern_ops) + db_dbattribute_dbnode_id_253bf153: CREATE INDEX db_dbattribute_dbnode_id_253bf153 + ON public.db_dbattribute USING btree (dbnode_id) + db_dbattribute_dbnode_id_key_c589e447_uniq: CREATE UNIQUE INDEX db_dbattribute_dbnode_id_key_c589e447_uniq + ON public.db_dbattribute USING btree (dbnode_id, key) + db_dbattribute_key_ac2bc4e4: CREATE INDEX db_dbattribute_key_ac2bc4e4 ON public.db_dbattribute + USING btree (key) + db_dbattribute_key_ac2bc4e4_like: CREATE INDEX db_dbattribute_key_ac2bc4e4_like + ON public.db_dbattribute USING btree (key varchar_pattern_ops) + db_dbattribute_pkey: CREATE UNIQUE INDEX db_dbattribute_pkey ON public.db_dbattribute + USING btree (id) + tval_idx_for_daemon: CREATE INDEX tval_idx_for_daemon ON public.db_dbattribute + USING btree (tval) WHERE (tval = ANY (ARRAY['COMPUTED'::text, 'WITHSCHEDULER'::text, + 'TOSUBMIT'::text])) + db_dbauthinfo: + db_dbauthinfo_aiidauser_id_0684fdfb: CREATE INDEX db_dbauthinfo_aiidauser_id_0684fdfb + ON public.db_dbauthinfo USING btree (aiidauser_id) + db_dbauthinfo_aiidauser_id_dbcomputer_id_777cdaa8_uniq: CREATE UNIQUE INDEX db_dbauthinfo_aiidauser_id_dbcomputer_id_777cdaa8_uniq + ON public.db_dbauthinfo USING btree (aiidauser_id, dbcomputer_id) + db_dbauthinfo_dbcomputer_id_424f7ac4: CREATE INDEX db_dbauthinfo_dbcomputer_id_424f7ac4 + ON public.db_dbauthinfo USING btree (dbcomputer_id) + db_dbauthinfo_pkey: CREATE UNIQUE INDEX db_dbauthinfo_pkey ON public.db_dbauthinfo + USING btree (id) + db_dbcalcstate: + db_dbcalcstate_dbnode_id_f217a84c: CREATE INDEX db_dbcalcstate_dbnode_id_f217a84c + ON public.db_dbcalcstate USING btree (dbnode_id) + db_dbcalcstate_dbnode_id_state_b4a14db3_uniq: CREATE UNIQUE INDEX db_dbcalcstate_dbnode_id_state_b4a14db3_uniq + ON public.db_dbcalcstate USING btree (dbnode_id, state) + db_dbcalcstate_pkey: CREATE UNIQUE INDEX db_dbcalcstate_pkey ON public.db_dbcalcstate + USING btree (id) + db_dbcalcstate_state_0bf54584: CREATE INDEX db_dbcalcstate_state_0bf54584 ON public.db_dbcalcstate + USING btree (state) + db_dbcalcstate_state_0bf54584_like: CREATE INDEX db_dbcalcstate_state_0bf54584_like + ON public.db_dbcalcstate USING btree (state varchar_pattern_ops) + db_dbcomment: + db_dbcomment_dbnode_id_3b812b6b: CREATE INDEX db_dbcomment_dbnode_id_3b812b6b + ON public.db_dbcomment USING btree (dbnode_id) + db_dbcomment_pkey: CREATE UNIQUE INDEX db_dbcomment_pkey ON public.db_dbcomment + USING btree (id) + db_dbcomment_user_id_8ed5e360: CREATE INDEX db_dbcomment_user_id_8ed5e360 ON public.db_dbcomment + USING btree (user_id) + db_dbcomputer: + db_dbcomputer_name_f1800b1a_like: CREATE INDEX db_dbcomputer_name_f1800b1a_like + ON public.db_dbcomputer USING btree (name varchar_pattern_ops) + db_dbcomputer_name_key: CREATE UNIQUE INDEX db_dbcomputer_name_key ON public.db_dbcomputer + USING btree (name) + db_dbcomputer_pkey: CREATE UNIQUE INDEX db_dbcomputer_pkey ON public.db_dbcomputer + USING btree (id) + db_dbextra: + db_dbextra_datatype_2eba38c6: CREATE INDEX db_dbextra_datatype_2eba38c6 ON public.db_dbextra + USING btree (datatype) + db_dbextra_datatype_2eba38c6_like: CREATE INDEX db_dbextra_datatype_2eba38c6_like + ON public.db_dbextra USING btree (datatype varchar_pattern_ops) + db_dbextra_dbnode_id_c7fe8961: CREATE INDEX db_dbextra_dbnode_id_c7fe8961 ON public.db_dbextra + USING btree (dbnode_id) + db_dbextra_dbnode_id_key_aa56fd37_uniq: CREATE UNIQUE INDEX db_dbextra_dbnode_id_key_aa56fd37_uniq + ON public.db_dbextra USING btree (dbnode_id, key) + db_dbextra_key_b1a8abc6: CREATE INDEX db_dbextra_key_b1a8abc6 ON public.db_dbextra + USING btree (key) + db_dbextra_key_b1a8abc6_like: CREATE INDEX db_dbextra_key_b1a8abc6_like ON public.db_dbextra + USING btree (key varchar_pattern_ops) + db_dbextra_pkey: CREATE UNIQUE INDEX db_dbextra_pkey ON public.db_dbextra USING + btree (id) + db_dbgroup: + db_dbgroup_name_66c75272: CREATE INDEX db_dbgroup_name_66c75272 ON public.db_dbgroup + USING btree (name) + db_dbgroup_name_66c75272_like: CREATE INDEX db_dbgroup_name_66c75272_like ON public.db_dbgroup + USING btree (name varchar_pattern_ops) + db_dbgroup_name_type_12656f33_uniq: CREATE UNIQUE INDEX db_dbgroup_name_type_12656f33_uniq + ON public.db_dbgroup USING btree (name, type) + db_dbgroup_pkey: CREATE UNIQUE INDEX db_dbgroup_pkey ON public.db_dbgroup USING + btree (id) + db_dbgroup_type_23b2a748: CREATE INDEX db_dbgroup_type_23b2a748 ON public.db_dbgroup + USING btree (type) + db_dbgroup_type_23b2a748_like: CREATE INDEX db_dbgroup_type_23b2a748_like ON public.db_dbgroup + USING btree (type varchar_pattern_ops) + db_dbgroup_user_id_100f8a51: CREATE INDEX db_dbgroup_user_id_100f8a51 ON public.db_dbgroup + USING btree (user_id) + db_dbgroup_dbnodes: + db_dbgroup_dbnodes_dbgroup_id_9d3a0f9d: CREATE INDEX db_dbgroup_dbnodes_dbgroup_id_9d3a0f9d + ON public.db_dbgroup_dbnodes USING btree (dbgroup_id) + db_dbgroup_dbnodes_dbgroup_id_dbnode_id_eee23cce_uniq: CREATE UNIQUE INDEX db_dbgroup_dbnodes_dbgroup_id_dbnode_id_eee23cce_uniq + ON public.db_dbgroup_dbnodes USING btree (dbgroup_id, dbnode_id) + db_dbgroup_dbnodes_dbnode_id_118b9439: CREATE INDEX db_dbgroup_dbnodes_dbnode_id_118b9439 + ON public.db_dbgroup_dbnodes USING btree (dbnode_id) + db_dbgroup_dbnodes_pkey: CREATE UNIQUE INDEX db_dbgroup_dbnodes_pkey ON public.db_dbgroup_dbnodes + USING btree (id) + db_dblink: + db_dblink_input_id_9245bd73: CREATE INDEX db_dblink_input_id_9245bd73 ON public.db_dblink + USING btree (input_id) + db_dblink_label_f1343cfb: CREATE INDEX db_dblink_label_f1343cfb ON public.db_dblink + USING btree (label) + db_dblink_label_f1343cfb_like: CREATE INDEX db_dblink_label_f1343cfb_like ON public.db_dblink + USING btree (label varchar_pattern_ops) + db_dblink_output_id_c0167528: CREATE INDEX db_dblink_output_id_c0167528 ON public.db_dblink + USING btree (output_id) + db_dblink_pkey: CREATE UNIQUE INDEX db_dblink_pkey ON public.db_dblink USING btree + (id) + db_dblink_type_229f212b: CREATE INDEX db_dblink_type_229f212b ON public.db_dblink + USING btree (type) + db_dblink_type_229f212b_like: CREATE INDEX db_dblink_type_229f212b_like ON public.db_dblink + USING btree (type varchar_pattern_ops) + db_dblog: + db_dblog_levelname_ad5dc346: CREATE INDEX db_dblog_levelname_ad5dc346 ON public.db_dblog + USING btree (levelname) + db_dblog_levelname_ad5dc346_like: CREATE INDEX db_dblog_levelname_ad5dc346_like + ON public.db_dblog USING btree (levelname varchar_pattern_ops) + db_dblog_loggername_00b5ba16: CREATE INDEX db_dblog_loggername_00b5ba16 ON public.db_dblog + USING btree (loggername) + db_dblog_loggername_00b5ba16_like: CREATE INDEX db_dblog_loggername_00b5ba16_like + ON public.db_dblog USING btree (loggername varchar_pattern_ops) + db_dblog_objname_69932b1e: CREATE INDEX db_dblog_objname_69932b1e ON public.db_dblog + USING btree (objname) + db_dblog_objname_69932b1e_like: CREATE INDEX db_dblog_objname_69932b1e_like ON + public.db_dblog USING btree (objname varchar_pattern_ops) + db_dblog_objpk_fc47afa9: CREATE INDEX db_dblog_objpk_fc47afa9 ON public.db_dblog + USING btree (objpk) + db_dblog_pkey: CREATE UNIQUE INDEX db_dblog_pkey ON public.db_dblog USING btree + (id) + db_dbnode: + db_dbnode_ctime_71626ef5: CREATE INDEX db_dbnode_ctime_71626ef5 ON public.db_dbnode + USING btree (ctime) + db_dbnode_dbcomputer_id_315372a3: CREATE INDEX db_dbnode_dbcomputer_id_315372a3 + ON public.db_dbnode USING btree (dbcomputer_id) + db_dbnode_label_6469539e: CREATE INDEX db_dbnode_label_6469539e ON public.db_dbnode + USING btree (label) + db_dbnode_label_6469539e_like: CREATE INDEX db_dbnode_label_6469539e_like ON public.db_dbnode + USING btree (label varchar_pattern_ops) + db_dbnode_mtime_0554ea3d: CREATE INDEX db_dbnode_mtime_0554ea3d ON public.db_dbnode + USING btree (mtime) + db_dbnode_pkey: CREATE UNIQUE INDEX db_dbnode_pkey ON public.db_dbnode USING btree + (id) + db_dbnode_process_type_df7298d0: CREATE INDEX db_dbnode_process_type_df7298d0 + ON public.db_dbnode USING btree (process_type) + db_dbnode_process_type_df7298d0_like: CREATE INDEX db_dbnode_process_type_df7298d0_like + ON public.db_dbnode USING btree (process_type varchar_pattern_ops) + db_dbnode_type_a8ce9753: CREATE INDEX db_dbnode_type_a8ce9753 ON public.db_dbnode + USING btree (type) + db_dbnode_type_a8ce9753_like: CREATE INDEX db_dbnode_type_a8ce9753_like ON public.db_dbnode + USING btree (type varchar_pattern_ops) + db_dbnode_user_id_12e7aeaf: CREATE INDEX db_dbnode_user_id_12e7aeaf ON public.db_dbnode + USING btree (user_id) + db_dbnode_uuid_62e0bf98: CREATE INDEX db_dbnode_uuid_62e0bf98 ON public.db_dbnode + USING btree (uuid) + db_dbnode_uuid_62e0bf98_like: CREATE INDEX db_dbnode_uuid_62e0bf98_like ON public.db_dbnode + USING btree (uuid varchar_pattern_ops) + db_dbsetting: + db_dbsetting_datatype_49f4397c: CREATE INDEX db_dbsetting_datatype_49f4397c ON + public.db_dbsetting USING btree (datatype) + db_dbsetting_datatype_49f4397c_like: CREATE INDEX db_dbsetting_datatype_49f4397c_like + ON public.db_dbsetting USING btree (datatype varchar_pattern_ops) + db_dbsetting_key_1b84beb4: CREATE INDEX db_dbsetting_key_1b84beb4 ON public.db_dbsetting + USING btree (key) + db_dbsetting_key_1b84beb4_like: CREATE INDEX db_dbsetting_key_1b84beb4_like ON + public.db_dbsetting USING btree (key varchar_pattern_ops) + db_dbsetting_key_1b84beb4_uniq: CREATE UNIQUE INDEX db_dbsetting_key_1b84beb4_uniq + ON public.db_dbsetting USING btree (key) + db_dbsetting_pkey: CREATE UNIQUE INDEX db_dbsetting_pkey ON public.db_dbsetting + USING btree (id) + db_dbuser: + db_dbuser_pkey: CREATE UNIQUE INDEX db_dbuser_pkey ON public.db_dbuser USING btree + (id) + db_dbuser_groups: + db_dbuser_groups_dbuser_id_480b3520: CREATE INDEX db_dbuser_groups_dbuser_id_480b3520 + ON public.db_dbuser_groups USING btree (dbuser_id) + db_dbuser_groups_dbuser_id_group_id_9155eb4f_uniq: CREATE UNIQUE INDEX db_dbuser_groups_dbuser_id_group_id_9155eb4f_uniq + ON public.db_dbuser_groups USING btree (dbuser_id, group_id) + db_dbuser_groups_group_id_8478d87e: CREATE INDEX db_dbuser_groups_group_id_8478d87e + ON public.db_dbuser_groups USING btree (group_id) + db_dbuser_groups_pkey: CREATE UNIQUE INDEX db_dbuser_groups_pkey ON public.db_dbuser_groups + USING btree (id) + db_dbuser_user_permissions: + db_dbuser_user_permissio_dbuser_id_permission_id_e6cbabe4_uniq: CREATE UNIQUE + INDEX db_dbuser_user_permissio_dbuser_id_permission_id_e6cbabe4_uniq ON public.db_dbuser_user_permissions + USING btree (dbuser_id, permission_id) + db_dbuser_user_permissions_dbuser_id_364456ee: CREATE INDEX db_dbuser_user_permissions_dbuser_id_364456ee + ON public.db_dbuser_user_permissions USING btree (dbuser_id) + db_dbuser_user_permissions_permission_id_c5aafc54: CREATE INDEX db_dbuser_user_permissions_permission_id_c5aafc54 + ON public.db_dbuser_user_permissions USING btree (permission_id) + db_dbuser_user_permissions_pkey: CREATE UNIQUE INDEX db_dbuser_user_permissions_pkey + ON public.db_dbuser_user_permissions USING btree (id) + db_dbworkflow: + db_dbworkflow_label_7368f34a: CREATE INDEX db_dbworkflow_label_7368f34a ON public.db_dbworkflow + USING btree (label) + db_dbworkflow_label_7368f34a_like: CREATE INDEX db_dbworkflow_label_7368f34a_like + ON public.db_dbworkflow USING btree (label varchar_pattern_ops) + db_dbworkflow_pkey: CREATE UNIQUE INDEX db_dbworkflow_pkey ON public.db_dbworkflow + USING btree (id) + db_dbworkflow_user_id_ef1f3251: CREATE INDEX db_dbworkflow_user_id_ef1f3251 ON + public.db_dbworkflow USING btree (user_id) + db_dbworkflowdata: + db_dbworkflowdata_aiida_obj_id_70a2d33b: CREATE INDEX db_dbworkflowdata_aiida_obj_id_70a2d33b + ON public.db_dbworkflowdata USING btree (aiida_obj_id) + db_dbworkflowdata_parent_id_ff4dbf8d: CREATE INDEX db_dbworkflowdata_parent_id_ff4dbf8d + ON public.db_dbworkflowdata USING btree (parent_id) + db_dbworkflowdata_parent_id_name_data_type_a4b50dae_uniq: CREATE UNIQUE INDEX + db_dbworkflowdata_parent_id_name_data_type_a4b50dae_uniq ON public.db_dbworkflowdata + USING btree (parent_id, name, data_type) + db_dbworkflowdata_pkey: CREATE UNIQUE INDEX db_dbworkflowdata_pkey ON public.db_dbworkflowdata + USING btree (id) + db_dbworkflowstep: + db_dbworkflowstep_parent_id_ffb754d9: CREATE INDEX db_dbworkflowstep_parent_id_ffb754d9 + ON public.db_dbworkflowstep USING btree (parent_id) + db_dbworkflowstep_parent_id_name_111027e3_uniq: CREATE UNIQUE INDEX db_dbworkflowstep_parent_id_name_111027e3_uniq + ON public.db_dbworkflowstep USING btree (parent_id, name) + db_dbworkflowstep_pkey: CREATE UNIQUE INDEX db_dbworkflowstep_pkey ON public.db_dbworkflowstep + USING btree (id) + db_dbworkflowstep_user_id_04282431: CREATE INDEX db_dbworkflowstep_user_id_04282431 + ON public.db_dbworkflowstep USING btree (user_id) + db_dbworkflowstep_calculations: + db_dbworkflowstep_calcul_dbworkflowstep_id_dbnode_60f50d02_uniq: CREATE UNIQUE + INDEX db_dbworkflowstep_calcul_dbworkflowstep_id_dbnode_60f50d02_uniq ON public.db_dbworkflowstep_calculations + USING btree (dbworkflowstep_id, dbnode_id) + db_dbworkflowstep_calculations_dbnode_id_0d07b7a7: CREATE INDEX db_dbworkflowstep_calculations_dbnode_id_0d07b7a7 + ON public.db_dbworkflowstep_calculations USING btree (dbnode_id) + db_dbworkflowstep_calculations_dbworkflowstep_id_575c3637: CREATE INDEX db_dbworkflowstep_calculations_dbworkflowstep_id_575c3637 + ON public.db_dbworkflowstep_calculations USING btree (dbworkflowstep_id) + db_dbworkflowstep_calculations_pkey: CREATE UNIQUE INDEX db_dbworkflowstep_calculations_pkey + ON public.db_dbworkflowstep_calculations USING btree (id) + db_dbworkflowstep_sub_workflows: + db_dbworkflowstep_sub_wo_dbworkflowstep_id_dbwork_e9b2b624_uniq: CREATE UNIQUE + INDEX db_dbworkflowstep_sub_wo_dbworkflowstep_id_dbwork_e9b2b624_uniq ON public.db_dbworkflowstep_sub_workflows + USING btree (dbworkflowstep_id, dbworkflow_id) + db_dbworkflowstep_sub_workflows_dbworkflow_id_dca4d103: CREATE INDEX db_dbworkflowstep_sub_workflows_dbworkflow_id_dca4d103 + ON public.db_dbworkflowstep_sub_workflows USING btree (dbworkflow_id) + db_dbworkflowstep_sub_workflows_dbworkflowstep_id_e183bbb7: CREATE INDEX db_dbworkflowstep_sub_workflows_dbworkflowstep_id_e183bbb7 + ON public.db_dbworkflowstep_sub_workflows USING btree (dbworkflowstep_id) + db_dbworkflowstep_sub_workflows_pkey: CREATE UNIQUE INDEX db_dbworkflowstep_sub_workflows_pkey + ON public.db_dbworkflowstep_sub_workflows USING btree (id) diff --git a/tests/backends/aiida_sqlalchemy/migrations/test_all_schema/test_django_django_0014_.yml b/tests/backends/aiida_sqlalchemy/migrations/test_all_schema/test_django_django_0014_.yml new file mode 100644 index 0000000000..955d97cee8 --- /dev/null +++ b/tests/backends/aiida_sqlalchemy/migrations/test_all_schema/test_django_django_0014_.yml @@ -0,0 +1,1059 @@ +columns: + db_dbattribute: + bval: + data_type: boolean + default: null + is_nullable: true + datatype: + data_type: character varying + default: null + is_nullable: false + max_length: 10 + dbnode_id: + data_type: integer + default: null + is_nullable: false + dval: + data_type: timestamp with time zone + default: null + is_nullable: true + fval: + data_type: double precision + default: null + is_nullable: true + id: + data_type: integer + default: nextval('db_dbattribute_id_seq'::regclass) + is_nullable: false + ival: + data_type: integer + default: null + is_nullable: true + key: + data_type: character varying + default: null + is_nullable: false + max_length: 1024 + tval: + data_type: text + default: null + is_nullable: false + db_dbauthinfo: + aiidauser_id: + data_type: integer + default: null + is_nullable: false + auth_params: + data_type: text + default: null + is_nullable: false + dbcomputer_id: + data_type: integer + default: null + is_nullable: false + enabled: + data_type: boolean + default: null + is_nullable: false + id: + data_type: integer + default: nextval('db_dbauthinfo_id_seq'::regclass) + is_nullable: false + metadata: + data_type: text + default: null + is_nullable: false + db_dbcalcstate: + dbnode_id: + data_type: integer + default: null + is_nullable: false + id: + data_type: integer + default: nextval('db_dbcalcstate_id_seq'::regclass) + is_nullable: false + state: + data_type: character varying + default: null + is_nullable: false + max_length: 25 + time: + data_type: timestamp with time zone + default: null + is_nullable: false + db_dbcomment: + content: + data_type: text + default: null + is_nullable: false + ctime: + data_type: timestamp with time zone + default: null + is_nullable: false + dbnode_id: + data_type: integer + default: null + is_nullable: false + id: + data_type: integer + default: nextval('db_dbcomment_id_seq'::regclass) + is_nullable: false + mtime: + data_type: timestamp with time zone + default: null + is_nullable: false + user_id: + data_type: integer + default: null + is_nullable: false + uuid: + data_type: character varying + default: null + is_nullable: false + max_length: 36 + db_dbcomputer: + description: + data_type: text + default: null + is_nullable: false + enabled: + data_type: boolean + default: null + is_nullable: false + hostname: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + id: + data_type: integer + default: nextval('db_dbcomputer_id_seq'::regclass) + is_nullable: false + metadata: + data_type: text + default: null + is_nullable: false + name: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + scheduler_type: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + transport_params: + data_type: text + default: null + is_nullable: false + transport_type: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + uuid: + data_type: character varying + default: null + is_nullable: false + max_length: 36 + db_dbextra: + bval: + data_type: boolean + default: null + is_nullable: true + datatype: + data_type: character varying + default: null + is_nullable: false + max_length: 10 + dbnode_id: + data_type: integer + default: null + is_nullable: false + dval: + data_type: timestamp with time zone + default: null + is_nullable: true + fval: + data_type: double precision + default: null + is_nullable: true + id: + data_type: integer + default: nextval('db_dbextra_id_seq'::regclass) + is_nullable: false + ival: + data_type: integer + default: null + is_nullable: true + key: + data_type: character varying + default: null + is_nullable: false + max_length: 1024 + tval: + data_type: text + default: null + is_nullable: false + db_dbgroup: + description: + data_type: text + default: null + is_nullable: false + id: + data_type: integer + default: nextval('db_dbgroup_id_seq'::regclass) + is_nullable: false + name: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + time: + data_type: timestamp with time zone + default: null + is_nullable: false + type: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + user_id: + data_type: integer + default: null + is_nullable: false + uuid: + data_type: character varying + default: null + is_nullable: false + max_length: 36 + db_dbgroup_dbnodes: + dbgroup_id: + data_type: integer + default: null + is_nullable: false + dbnode_id: + data_type: integer + default: null + is_nullable: false + id: + data_type: integer + default: nextval('db_dbgroup_dbnodes_id_seq'::regclass) + is_nullable: false + db_dblink: + id: + data_type: integer + default: nextval('db_dblink_id_seq'::regclass) + is_nullable: false + input_id: + data_type: integer + default: null + is_nullable: false + label: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + output_id: + data_type: integer + default: null + is_nullable: false + type: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + db_dblog: + id: + data_type: integer + default: nextval('db_dblog_id_seq'::regclass) + is_nullable: false + levelname: + data_type: character varying + default: null + is_nullable: false + max_length: 50 + loggername: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + message: + data_type: text + default: null + is_nullable: false + metadata: + data_type: text + default: null + is_nullable: false + objname: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + objpk: + data_type: integer + default: null + is_nullable: true + time: + data_type: timestamp with time zone + default: null + is_nullable: false + db_dbnode: + ctime: + data_type: timestamp with time zone + default: null + is_nullable: false + dbcomputer_id: + data_type: integer + default: null + is_nullable: true + description: + data_type: text + default: null + is_nullable: false + id: + data_type: integer + default: nextval('db_dbnode_id_seq'::regclass) + is_nullable: false + label: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + mtime: + data_type: timestamp with time zone + default: null + is_nullable: false + nodeversion: + data_type: integer + default: null + is_nullable: false + process_type: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + public: + data_type: boolean + default: null + is_nullable: false + type: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + user_id: + data_type: integer + default: null + is_nullable: false + uuid: + data_type: character varying + default: null + is_nullable: false + max_length: 36 + db_dbsetting: + bval: + data_type: boolean + default: null + is_nullable: true + datatype: + data_type: character varying + default: null + is_nullable: false + max_length: 10 + description: + data_type: text + default: null + is_nullable: false + dval: + data_type: timestamp with time zone + default: null + is_nullable: true + fval: + data_type: double precision + default: null + is_nullable: true + id: + data_type: integer + default: nextval('db_dbsetting_id_seq'::regclass) + is_nullable: false + ival: + data_type: integer + default: null + is_nullable: true + key: + data_type: character varying + default: null + is_nullable: false + max_length: 1024 + time: + data_type: timestamp with time zone + default: null + is_nullable: false + tval: + data_type: text + default: null + is_nullable: false + db_dbuser: + date_joined: + data_type: timestamp with time zone + default: null + is_nullable: false + email: + data_type: character varying + default: null + is_nullable: false + max_length: 254 + first_name: + data_type: character varying + default: null + is_nullable: false + max_length: 254 + id: + data_type: integer + default: nextval('db_dbuser_id_seq'::regclass) + is_nullable: false + institution: + data_type: character varying + default: null + is_nullable: false + max_length: 254 + is_active: + data_type: boolean + default: null + is_nullable: false + is_staff: + data_type: boolean + default: null + is_nullable: false + is_superuser: + data_type: boolean + default: null + is_nullable: false + last_login: + data_type: timestamp with time zone + default: null + is_nullable: true + last_name: + data_type: character varying + default: null + is_nullable: false + max_length: 254 + password: + data_type: character varying + default: null + is_nullable: false + max_length: 128 + db_dbuser_groups: + dbuser_id: + data_type: integer + default: null + is_nullable: false + group_id: + data_type: integer + default: null + is_nullable: false + id: + data_type: integer + default: nextval('db_dbuser_groups_id_seq'::regclass) + is_nullable: false + db_dbuser_user_permissions: + dbuser_id: + data_type: integer + default: null + is_nullable: false + id: + data_type: integer + default: nextval('db_dbuser_user_permissions_id_seq'::regclass) + is_nullable: false + permission_id: + data_type: integer + default: null + is_nullable: false + db_dbworkflow: + ctime: + data_type: timestamp with time zone + default: null + is_nullable: false + description: + data_type: text + default: null + is_nullable: false + id: + data_type: integer + default: nextval('db_dbworkflow_id_seq'::regclass) + is_nullable: false + label: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + lastsyncedversion: + data_type: integer + default: null + is_nullable: false + module: + data_type: text + default: null + is_nullable: false + module_class: + data_type: text + default: null + is_nullable: false + mtime: + data_type: timestamp with time zone + default: null + is_nullable: false + nodeversion: + data_type: integer + default: null + is_nullable: false + report: + data_type: text + default: null + is_nullable: false + script_md5: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + script_path: + data_type: text + default: null + is_nullable: false + state: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + user_id: + data_type: integer + default: null + is_nullable: false + uuid: + data_type: character varying + default: null + is_nullable: false + max_length: 36 + db_dbworkflowdata: + aiida_obj_id: + data_type: integer + default: null + is_nullable: true + data_type: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + id: + data_type: integer + default: nextval('db_dbworkflowdata_id_seq'::regclass) + is_nullable: false + json_value: + data_type: text + default: null + is_nullable: false + name: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + parent_id: + data_type: integer + default: null + is_nullable: false + time: + data_type: timestamp with time zone + default: null + is_nullable: false + value_type: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + db_dbworkflowstep: + id: + data_type: integer + default: nextval('db_dbworkflowstep_id_seq'::regclass) + is_nullable: false + name: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + nextcall: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + parent_id: + data_type: integer + default: null + is_nullable: false + state: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + time: + data_type: timestamp with time zone + default: null + is_nullable: false + user_id: + data_type: integer + default: null + is_nullable: false + db_dbworkflowstep_calculations: + dbnode_id: + data_type: integer + default: null + is_nullable: false + dbworkflowstep_id: + data_type: integer + default: null + is_nullable: false + id: + data_type: integer + default: nextval('db_dbworkflowstep_calculations_id_seq'::regclass) + is_nullable: false + db_dbworkflowstep_sub_workflows: + dbworkflow_id: + data_type: integer + default: null + is_nullable: false + dbworkflowstep_id: + data_type: integer + default: null + is_nullable: false + id: + data_type: integer + default: nextval('db_dbworkflowstep_sub_workflows_id_seq'::regclass) + is_nullable: false +constraints: + primary_key: + db_dbattribute: + db_dbattribute_pkey: + - id + db_dbauthinfo: + db_dbauthinfo_pkey: + - id + db_dbcalcstate: + db_dbcalcstate_pkey: + - id + db_dbcomment: + db_dbcomment_pkey: + - id + db_dbcomputer: + db_dbcomputer_pkey: + - id + db_dbextra: + db_dbextra_pkey: + - id + db_dbgroup: + db_dbgroup_pkey: + - id + db_dbgroup_dbnodes: + db_dbgroup_dbnodes_pkey: + - id + db_dblink: + db_dblink_pkey: + - id + db_dblog: + db_dblog_pkey: + - id + db_dbnode: + db_dbnode_pkey: + - id + db_dbsetting: + db_dbsetting_pkey: + - id + db_dbuser: + db_dbuser_pkey: + - id + db_dbuser_groups: + db_dbuser_groups_pkey: + - id + db_dbuser_user_permissions: + db_dbuser_user_permissions_pkey: + - id + db_dbworkflow: + db_dbworkflow_pkey: + - id + db_dbworkflowdata: + db_dbworkflowdata_pkey: + - id + db_dbworkflowstep: + db_dbworkflowstep_pkey: + - id + db_dbworkflowstep_calculations: + db_dbworkflowstep_calculations_pkey: + - id + db_dbworkflowstep_sub_workflows: + db_dbworkflowstep_sub_workflows_pkey: + - id + unique: + db_dbattribute: + db_dbattribute_dbnode_id_key_c589e447_uniq: + - dbnode_id + - key + db_dbauthinfo: + db_dbauthinfo_aiidauser_id_dbcomputer_id_777cdaa8_uniq: + - aiidauser_id + - dbcomputer_id + db_dbcalcstate: + db_dbcalcstate_dbnode_id_state_b4a14db3_uniq: + - dbnode_id + - state + db_dbcomputer: + db_dbcomputer_name_key: + - name + db_dbextra: + db_dbextra_dbnode_id_key_aa56fd37_uniq: + - dbnode_id + - key + db_dbgroup: + db_dbgroup_name_type_12656f33_uniq: + - name + - type + db_dbgroup_dbnodes: + db_dbgroup_dbnodes_dbgroup_id_dbnode_id_eee23cce_uniq: + - dbgroup_id + - dbnode_id + db_dbnode: + db_dbnode_uuid_62e0bf98_uniq: + - uuid + db_dbsetting: + db_dbsetting_key_1b84beb4_uniq: + - key + db_dbuser_groups: + db_dbuser_groups_dbuser_id_group_id_9155eb4f_uniq: + - dbuser_id + - group_id + db_dbuser_user_permissions: + db_dbuser_user_permissio_dbuser_id_permission_id_e6cbabe4_uniq: + - dbuser_id + - permission_id + db_dbworkflowdata: + db_dbworkflowdata_parent_id_name_data_type_a4b50dae_uniq: + - data_type + - name + - parent_id + db_dbworkflowstep: + db_dbworkflowstep_parent_id_name_111027e3_uniq: + - name + - parent_id + db_dbworkflowstep_calculations: + db_dbworkflowstep_calcul_dbworkflowstep_id_dbnode_60f50d02_uniq: + - dbnode_id + - dbworkflowstep_id + db_dbworkflowstep_sub_workflows: + db_dbworkflowstep_sub_wo_dbworkflowstep_id_dbwork_e9b2b624_uniq: + - dbworkflow_id + - dbworkflowstep_id +foreign_keys: + db_dbattribute: + db_dbattribute_dbnode_id_253bf153_fk_db_dbnode_id: FOREIGN KEY (dbnode_id) REFERENCES + db_dbnode(id) DEFERRABLE INITIALLY DEFERRED + db_dbauthinfo: + db_dbauthinfo_aiidauser_id_0684fdfb_fk_db_dbuser_id: FOREIGN KEY (aiidauser_id) + REFERENCES db_dbuser(id) DEFERRABLE INITIALLY DEFERRED + db_dbauthinfo_dbcomputer_id_424f7ac4_fk_db_dbcomputer_id: FOREIGN KEY (dbcomputer_id) + REFERENCES db_dbcomputer(id) DEFERRABLE INITIALLY DEFERRED + db_dbcalcstate: + db_dbcalcstate_dbnode_id_f217a84c_fk_db_dbnode_id: FOREIGN KEY (dbnode_id) REFERENCES + db_dbnode(id) DEFERRABLE INITIALLY DEFERRED + db_dbcomment: + db_dbcomment_dbnode_id_3b812b6b_fk_db_dbnode_id: FOREIGN KEY (dbnode_id) REFERENCES + db_dbnode(id) DEFERRABLE INITIALLY DEFERRED + db_dbcomment_user_id_8ed5e360_fk_db_dbuser_id: FOREIGN KEY (user_id) REFERENCES + db_dbuser(id) DEFERRABLE INITIALLY DEFERRED + db_dbextra: + db_dbextra_dbnode_id_c7fe8961_fk_db_dbnode_id: FOREIGN KEY (dbnode_id) REFERENCES + db_dbnode(id) DEFERRABLE INITIALLY DEFERRED + db_dbgroup: + db_dbgroup_user_id_100f8a51_fk_db_dbuser_id: FOREIGN KEY (user_id) REFERENCES + db_dbuser(id) DEFERRABLE INITIALLY DEFERRED + db_dbgroup_dbnodes: + db_dbgroup_dbnodes_dbgroup_id_9d3a0f9d_fk_db_dbgroup_id: FOREIGN KEY (dbgroup_id) + REFERENCES db_dbgroup(id) DEFERRABLE INITIALLY DEFERRED + db_dbgroup_dbnodes_dbnode_id_118b9439_fk_db_dbnode_id: FOREIGN KEY (dbnode_id) + REFERENCES db_dbnode(id) DEFERRABLE INITIALLY DEFERRED + db_dblink: + db_dblink_input_id_9245bd73_fk_db_dbnode_id: FOREIGN KEY (input_id) REFERENCES + db_dbnode(id) DEFERRABLE INITIALLY DEFERRED + db_dblink_output_id_c0167528_fk_db_dbnode_id: FOREIGN KEY (output_id) REFERENCES + db_dbnode(id) DEFERRABLE INITIALLY DEFERRED + db_dbnode: + db_dbnode_dbcomputer_id_315372a3_fk_db_dbcomputer_id: FOREIGN KEY (dbcomputer_id) + REFERENCES db_dbcomputer(id) DEFERRABLE INITIALLY DEFERRED + db_dbnode_user_id_12e7aeaf_fk_db_dbuser_id: FOREIGN KEY (user_id) REFERENCES db_dbuser(id) + DEFERRABLE INITIALLY DEFERRED + db_dbuser_groups: + db_dbuser_groups_dbuser_id_480b3520_fk_db_dbuser_id: FOREIGN KEY (dbuser_id) REFERENCES + db_dbuser(id) DEFERRABLE INITIALLY DEFERRED + db_dbuser_groups_group_id_8478d87e_fk_auth_group_id: FOREIGN KEY (group_id) REFERENCES + auth_group(id) DEFERRABLE INITIALLY DEFERRED + db_dbuser_user_permissions: + db_dbuser_user_permi_permission_id_c5aafc54_fk_auth_perm: FOREIGN KEY (permission_id) + REFERENCES auth_permission(id) DEFERRABLE INITIALLY DEFERRED + db_dbuser_user_permissions_dbuser_id_364456ee_fk_db_dbuser_id: FOREIGN KEY (dbuser_id) + REFERENCES db_dbuser(id) DEFERRABLE INITIALLY DEFERRED + db_dbworkflow: + db_dbworkflow_user_id_ef1f3251_fk_db_dbuser_id: FOREIGN KEY (user_id) REFERENCES + db_dbuser(id) DEFERRABLE INITIALLY DEFERRED + db_dbworkflowdata: + db_dbworkflowdata_aiida_obj_id_70a2d33b_fk_db_dbnode_id: FOREIGN KEY (aiida_obj_id) + REFERENCES db_dbnode(id) DEFERRABLE INITIALLY DEFERRED + db_dbworkflowdata_parent_id_ff4dbf8d_fk_db_dbworkflow_id: FOREIGN KEY (parent_id) + REFERENCES db_dbworkflow(id) DEFERRABLE INITIALLY DEFERRED + db_dbworkflowstep: + db_dbworkflowstep_parent_id_ffb754d9_fk_db_dbworkflow_id: FOREIGN KEY (parent_id) + REFERENCES db_dbworkflow(id) DEFERRABLE INITIALLY DEFERRED + db_dbworkflowstep_user_id_04282431_fk_db_dbuser_id: FOREIGN KEY (user_id) REFERENCES + db_dbuser(id) DEFERRABLE INITIALLY DEFERRED + db_dbworkflowstep_calculations: + db_dbworkflowstep_ca_dbnode_id_0d07b7a7_fk_db_dbnode: FOREIGN KEY (dbnode_id) + REFERENCES db_dbnode(id) DEFERRABLE INITIALLY DEFERRED + db_dbworkflowstep_ca_dbworkflowstep_id_575c3637_fk_db_dbwork: FOREIGN KEY (dbworkflowstep_id) + REFERENCES db_dbworkflowstep(id) DEFERRABLE INITIALLY DEFERRED + db_dbworkflowstep_sub_workflows: + db_dbworkflowstep_su_dbworkflow_id_dca4d103_fk_db_dbwork: FOREIGN KEY (dbworkflow_id) + REFERENCES db_dbworkflow(id) DEFERRABLE INITIALLY DEFERRED + db_dbworkflowstep_su_dbworkflowstep_id_e183bbb7_fk_db_dbwork: FOREIGN KEY (dbworkflowstep_id) + REFERENCES db_dbworkflowstep(id) DEFERRABLE INITIALLY DEFERRED +indexes: + db_dbattribute: + db_dbattribute_datatype_91c4dc04: CREATE INDEX db_dbattribute_datatype_91c4dc04 + ON public.db_dbattribute USING btree (datatype) + db_dbattribute_datatype_91c4dc04_like: CREATE INDEX db_dbattribute_datatype_91c4dc04_like + ON public.db_dbattribute USING btree (datatype varchar_pattern_ops) + db_dbattribute_dbnode_id_253bf153: CREATE INDEX db_dbattribute_dbnode_id_253bf153 + ON public.db_dbattribute USING btree (dbnode_id) + db_dbattribute_dbnode_id_key_c589e447_uniq: CREATE UNIQUE INDEX db_dbattribute_dbnode_id_key_c589e447_uniq + ON public.db_dbattribute USING btree (dbnode_id, key) + db_dbattribute_key_ac2bc4e4: CREATE INDEX db_dbattribute_key_ac2bc4e4 ON public.db_dbattribute + USING btree (key) + db_dbattribute_key_ac2bc4e4_like: CREATE INDEX db_dbattribute_key_ac2bc4e4_like + ON public.db_dbattribute USING btree (key varchar_pattern_ops) + db_dbattribute_pkey: CREATE UNIQUE INDEX db_dbattribute_pkey ON public.db_dbattribute + USING btree (id) + tval_idx_for_daemon: CREATE INDEX tval_idx_for_daemon ON public.db_dbattribute + USING btree (tval) WHERE (tval = ANY (ARRAY['COMPUTED'::text, 'WITHSCHEDULER'::text, + 'TOSUBMIT'::text])) + db_dbauthinfo: + db_dbauthinfo_aiidauser_id_0684fdfb: CREATE INDEX db_dbauthinfo_aiidauser_id_0684fdfb + ON public.db_dbauthinfo USING btree (aiidauser_id) + db_dbauthinfo_aiidauser_id_dbcomputer_id_777cdaa8_uniq: CREATE UNIQUE INDEX db_dbauthinfo_aiidauser_id_dbcomputer_id_777cdaa8_uniq + ON public.db_dbauthinfo USING btree (aiidauser_id, dbcomputer_id) + db_dbauthinfo_dbcomputer_id_424f7ac4: CREATE INDEX db_dbauthinfo_dbcomputer_id_424f7ac4 + ON public.db_dbauthinfo USING btree (dbcomputer_id) + db_dbauthinfo_pkey: CREATE UNIQUE INDEX db_dbauthinfo_pkey ON public.db_dbauthinfo + USING btree (id) + db_dbcalcstate: + db_dbcalcstate_dbnode_id_f217a84c: CREATE INDEX db_dbcalcstate_dbnode_id_f217a84c + ON public.db_dbcalcstate USING btree (dbnode_id) + db_dbcalcstate_dbnode_id_state_b4a14db3_uniq: CREATE UNIQUE INDEX db_dbcalcstate_dbnode_id_state_b4a14db3_uniq + ON public.db_dbcalcstate USING btree (dbnode_id, state) + db_dbcalcstate_pkey: CREATE UNIQUE INDEX db_dbcalcstate_pkey ON public.db_dbcalcstate + USING btree (id) + db_dbcalcstate_state_0bf54584: CREATE INDEX db_dbcalcstate_state_0bf54584 ON public.db_dbcalcstate + USING btree (state) + db_dbcalcstate_state_0bf54584_like: CREATE INDEX db_dbcalcstate_state_0bf54584_like + ON public.db_dbcalcstate USING btree (state varchar_pattern_ops) + db_dbcomment: + db_dbcomment_dbnode_id_3b812b6b: CREATE INDEX db_dbcomment_dbnode_id_3b812b6b + ON public.db_dbcomment USING btree (dbnode_id) + db_dbcomment_pkey: CREATE UNIQUE INDEX db_dbcomment_pkey ON public.db_dbcomment + USING btree (id) + db_dbcomment_user_id_8ed5e360: CREATE INDEX db_dbcomment_user_id_8ed5e360 ON public.db_dbcomment + USING btree (user_id) + db_dbcomputer: + db_dbcomputer_name_f1800b1a_like: CREATE INDEX db_dbcomputer_name_f1800b1a_like + ON public.db_dbcomputer USING btree (name varchar_pattern_ops) + db_dbcomputer_name_key: CREATE UNIQUE INDEX db_dbcomputer_name_key ON public.db_dbcomputer + USING btree (name) + db_dbcomputer_pkey: CREATE UNIQUE INDEX db_dbcomputer_pkey ON public.db_dbcomputer + USING btree (id) + db_dbextra: + db_dbextra_datatype_2eba38c6: CREATE INDEX db_dbextra_datatype_2eba38c6 ON public.db_dbextra + USING btree (datatype) + db_dbextra_datatype_2eba38c6_like: CREATE INDEX db_dbextra_datatype_2eba38c6_like + ON public.db_dbextra USING btree (datatype varchar_pattern_ops) + db_dbextra_dbnode_id_c7fe8961: CREATE INDEX db_dbextra_dbnode_id_c7fe8961 ON public.db_dbextra + USING btree (dbnode_id) + db_dbextra_dbnode_id_key_aa56fd37_uniq: CREATE UNIQUE INDEX db_dbextra_dbnode_id_key_aa56fd37_uniq + ON public.db_dbextra USING btree (dbnode_id, key) + db_dbextra_key_b1a8abc6: CREATE INDEX db_dbextra_key_b1a8abc6 ON public.db_dbextra + USING btree (key) + db_dbextra_key_b1a8abc6_like: CREATE INDEX db_dbextra_key_b1a8abc6_like ON public.db_dbextra + USING btree (key varchar_pattern_ops) + db_dbextra_pkey: CREATE UNIQUE INDEX db_dbextra_pkey ON public.db_dbextra USING + btree (id) + db_dbgroup: + db_dbgroup_name_66c75272: CREATE INDEX db_dbgroup_name_66c75272 ON public.db_dbgroup + USING btree (name) + db_dbgroup_name_66c75272_like: CREATE INDEX db_dbgroup_name_66c75272_like ON public.db_dbgroup + USING btree (name varchar_pattern_ops) + db_dbgroup_name_type_12656f33_uniq: CREATE UNIQUE INDEX db_dbgroup_name_type_12656f33_uniq + ON public.db_dbgroup USING btree (name, type) + db_dbgroup_pkey: CREATE UNIQUE INDEX db_dbgroup_pkey ON public.db_dbgroup USING + btree (id) + db_dbgroup_type_23b2a748: CREATE INDEX db_dbgroup_type_23b2a748 ON public.db_dbgroup + USING btree (type) + db_dbgroup_type_23b2a748_like: CREATE INDEX db_dbgroup_type_23b2a748_like ON public.db_dbgroup + USING btree (type varchar_pattern_ops) + db_dbgroup_user_id_100f8a51: CREATE INDEX db_dbgroup_user_id_100f8a51 ON public.db_dbgroup + USING btree (user_id) + db_dbgroup_dbnodes: + db_dbgroup_dbnodes_dbgroup_id_9d3a0f9d: CREATE INDEX db_dbgroup_dbnodes_dbgroup_id_9d3a0f9d + ON public.db_dbgroup_dbnodes USING btree (dbgroup_id) + db_dbgroup_dbnodes_dbgroup_id_dbnode_id_eee23cce_uniq: CREATE UNIQUE INDEX db_dbgroup_dbnodes_dbgroup_id_dbnode_id_eee23cce_uniq + ON public.db_dbgroup_dbnodes USING btree (dbgroup_id, dbnode_id) + db_dbgroup_dbnodes_dbnode_id_118b9439: CREATE INDEX db_dbgroup_dbnodes_dbnode_id_118b9439 + ON public.db_dbgroup_dbnodes USING btree (dbnode_id) + db_dbgroup_dbnodes_pkey: CREATE UNIQUE INDEX db_dbgroup_dbnodes_pkey ON public.db_dbgroup_dbnodes + USING btree (id) + db_dblink: + db_dblink_input_id_9245bd73: CREATE INDEX db_dblink_input_id_9245bd73 ON public.db_dblink + USING btree (input_id) + db_dblink_label_f1343cfb: CREATE INDEX db_dblink_label_f1343cfb ON public.db_dblink + USING btree (label) + db_dblink_label_f1343cfb_like: CREATE INDEX db_dblink_label_f1343cfb_like ON public.db_dblink + USING btree (label varchar_pattern_ops) + db_dblink_output_id_c0167528: CREATE INDEX db_dblink_output_id_c0167528 ON public.db_dblink + USING btree (output_id) + db_dblink_pkey: CREATE UNIQUE INDEX db_dblink_pkey ON public.db_dblink USING btree + (id) + db_dblink_type_229f212b: CREATE INDEX db_dblink_type_229f212b ON public.db_dblink + USING btree (type) + db_dblink_type_229f212b_like: CREATE INDEX db_dblink_type_229f212b_like ON public.db_dblink + USING btree (type varchar_pattern_ops) + db_dblog: + db_dblog_levelname_ad5dc346: CREATE INDEX db_dblog_levelname_ad5dc346 ON public.db_dblog + USING btree (levelname) + db_dblog_levelname_ad5dc346_like: CREATE INDEX db_dblog_levelname_ad5dc346_like + ON public.db_dblog USING btree (levelname varchar_pattern_ops) + db_dblog_loggername_00b5ba16: CREATE INDEX db_dblog_loggername_00b5ba16 ON public.db_dblog + USING btree (loggername) + db_dblog_loggername_00b5ba16_like: CREATE INDEX db_dblog_loggername_00b5ba16_like + ON public.db_dblog USING btree (loggername varchar_pattern_ops) + db_dblog_objname_69932b1e: CREATE INDEX db_dblog_objname_69932b1e ON public.db_dblog + USING btree (objname) + db_dblog_objname_69932b1e_like: CREATE INDEX db_dblog_objname_69932b1e_like ON + public.db_dblog USING btree (objname varchar_pattern_ops) + db_dblog_objpk_fc47afa9: CREATE INDEX db_dblog_objpk_fc47afa9 ON public.db_dblog + USING btree (objpk) + db_dblog_pkey: CREATE UNIQUE INDEX db_dblog_pkey ON public.db_dblog USING btree + (id) + db_dbnode: + db_dbnode_ctime_71626ef5: CREATE INDEX db_dbnode_ctime_71626ef5 ON public.db_dbnode + USING btree (ctime) + db_dbnode_dbcomputer_id_315372a3: CREATE INDEX db_dbnode_dbcomputer_id_315372a3 + ON public.db_dbnode USING btree (dbcomputer_id) + db_dbnode_label_6469539e: CREATE INDEX db_dbnode_label_6469539e ON public.db_dbnode + USING btree (label) + db_dbnode_label_6469539e_like: CREATE INDEX db_dbnode_label_6469539e_like ON public.db_dbnode + USING btree (label varchar_pattern_ops) + db_dbnode_mtime_0554ea3d: CREATE INDEX db_dbnode_mtime_0554ea3d ON public.db_dbnode + USING btree (mtime) + db_dbnode_pkey: CREATE UNIQUE INDEX db_dbnode_pkey ON public.db_dbnode USING btree + (id) + db_dbnode_process_type_df7298d0: CREATE INDEX db_dbnode_process_type_df7298d0 + ON public.db_dbnode USING btree (process_type) + db_dbnode_process_type_df7298d0_like: CREATE INDEX db_dbnode_process_type_df7298d0_like + ON public.db_dbnode USING btree (process_type varchar_pattern_ops) + db_dbnode_type_a8ce9753: CREATE INDEX db_dbnode_type_a8ce9753 ON public.db_dbnode + USING btree (type) + db_dbnode_type_a8ce9753_like: CREATE INDEX db_dbnode_type_a8ce9753_like ON public.db_dbnode + USING btree (type varchar_pattern_ops) + db_dbnode_user_id_12e7aeaf: CREATE INDEX db_dbnode_user_id_12e7aeaf ON public.db_dbnode + USING btree (user_id) + db_dbnode_uuid_62e0bf98_like: CREATE INDEX db_dbnode_uuid_62e0bf98_like ON public.db_dbnode + USING btree (uuid varchar_pattern_ops) + db_dbnode_uuid_62e0bf98_uniq: CREATE UNIQUE INDEX db_dbnode_uuid_62e0bf98_uniq + ON public.db_dbnode USING btree (uuid) + db_dbsetting: + db_dbsetting_datatype_49f4397c: CREATE INDEX db_dbsetting_datatype_49f4397c ON + public.db_dbsetting USING btree (datatype) + db_dbsetting_datatype_49f4397c_like: CREATE INDEX db_dbsetting_datatype_49f4397c_like + ON public.db_dbsetting USING btree (datatype varchar_pattern_ops) + db_dbsetting_key_1b84beb4: CREATE INDEX db_dbsetting_key_1b84beb4 ON public.db_dbsetting + USING btree (key) + db_dbsetting_key_1b84beb4_like: CREATE INDEX db_dbsetting_key_1b84beb4_like ON + public.db_dbsetting USING btree (key varchar_pattern_ops) + db_dbsetting_key_1b84beb4_uniq: CREATE UNIQUE INDEX db_dbsetting_key_1b84beb4_uniq + ON public.db_dbsetting USING btree (key) + db_dbsetting_pkey: CREATE UNIQUE INDEX db_dbsetting_pkey ON public.db_dbsetting + USING btree (id) + db_dbuser: + db_dbuser_pkey: CREATE UNIQUE INDEX db_dbuser_pkey ON public.db_dbuser USING btree + (id) + db_dbuser_groups: + db_dbuser_groups_dbuser_id_480b3520: CREATE INDEX db_dbuser_groups_dbuser_id_480b3520 + ON public.db_dbuser_groups USING btree (dbuser_id) + db_dbuser_groups_dbuser_id_group_id_9155eb4f_uniq: CREATE UNIQUE INDEX db_dbuser_groups_dbuser_id_group_id_9155eb4f_uniq + ON public.db_dbuser_groups USING btree (dbuser_id, group_id) + db_dbuser_groups_group_id_8478d87e: CREATE INDEX db_dbuser_groups_group_id_8478d87e + ON public.db_dbuser_groups USING btree (group_id) + db_dbuser_groups_pkey: CREATE UNIQUE INDEX db_dbuser_groups_pkey ON public.db_dbuser_groups + USING btree (id) + db_dbuser_user_permissions: + db_dbuser_user_permissio_dbuser_id_permission_id_e6cbabe4_uniq: CREATE UNIQUE + INDEX db_dbuser_user_permissio_dbuser_id_permission_id_e6cbabe4_uniq ON public.db_dbuser_user_permissions + USING btree (dbuser_id, permission_id) + db_dbuser_user_permissions_dbuser_id_364456ee: CREATE INDEX db_dbuser_user_permissions_dbuser_id_364456ee + ON public.db_dbuser_user_permissions USING btree (dbuser_id) + db_dbuser_user_permissions_permission_id_c5aafc54: CREATE INDEX db_dbuser_user_permissions_permission_id_c5aafc54 + ON public.db_dbuser_user_permissions USING btree (permission_id) + db_dbuser_user_permissions_pkey: CREATE UNIQUE INDEX db_dbuser_user_permissions_pkey + ON public.db_dbuser_user_permissions USING btree (id) + db_dbworkflow: + db_dbworkflow_label_7368f34a: CREATE INDEX db_dbworkflow_label_7368f34a ON public.db_dbworkflow + USING btree (label) + db_dbworkflow_label_7368f34a_like: CREATE INDEX db_dbworkflow_label_7368f34a_like + ON public.db_dbworkflow USING btree (label varchar_pattern_ops) + db_dbworkflow_pkey: CREATE UNIQUE INDEX db_dbworkflow_pkey ON public.db_dbworkflow + USING btree (id) + db_dbworkflow_user_id_ef1f3251: CREATE INDEX db_dbworkflow_user_id_ef1f3251 ON + public.db_dbworkflow USING btree (user_id) + db_dbworkflowdata: + db_dbworkflowdata_aiida_obj_id_70a2d33b: CREATE INDEX db_dbworkflowdata_aiida_obj_id_70a2d33b + ON public.db_dbworkflowdata USING btree (aiida_obj_id) + db_dbworkflowdata_parent_id_ff4dbf8d: CREATE INDEX db_dbworkflowdata_parent_id_ff4dbf8d + ON public.db_dbworkflowdata USING btree (parent_id) + db_dbworkflowdata_parent_id_name_data_type_a4b50dae_uniq: CREATE UNIQUE INDEX + db_dbworkflowdata_parent_id_name_data_type_a4b50dae_uniq ON public.db_dbworkflowdata + USING btree (parent_id, name, data_type) + db_dbworkflowdata_pkey: CREATE UNIQUE INDEX db_dbworkflowdata_pkey ON public.db_dbworkflowdata + USING btree (id) + db_dbworkflowstep: + db_dbworkflowstep_parent_id_ffb754d9: CREATE INDEX db_dbworkflowstep_parent_id_ffb754d9 + ON public.db_dbworkflowstep USING btree (parent_id) + db_dbworkflowstep_parent_id_name_111027e3_uniq: CREATE UNIQUE INDEX db_dbworkflowstep_parent_id_name_111027e3_uniq + ON public.db_dbworkflowstep USING btree (parent_id, name) + db_dbworkflowstep_pkey: CREATE UNIQUE INDEX db_dbworkflowstep_pkey ON public.db_dbworkflowstep + USING btree (id) + db_dbworkflowstep_user_id_04282431: CREATE INDEX db_dbworkflowstep_user_id_04282431 + ON public.db_dbworkflowstep USING btree (user_id) + db_dbworkflowstep_calculations: + db_dbworkflowstep_calcul_dbworkflowstep_id_dbnode_60f50d02_uniq: CREATE UNIQUE + INDEX db_dbworkflowstep_calcul_dbworkflowstep_id_dbnode_60f50d02_uniq ON public.db_dbworkflowstep_calculations + USING btree (dbworkflowstep_id, dbnode_id) + db_dbworkflowstep_calculations_dbnode_id_0d07b7a7: CREATE INDEX db_dbworkflowstep_calculations_dbnode_id_0d07b7a7 + ON public.db_dbworkflowstep_calculations USING btree (dbnode_id) + db_dbworkflowstep_calculations_dbworkflowstep_id_575c3637: CREATE INDEX db_dbworkflowstep_calculations_dbworkflowstep_id_575c3637 + ON public.db_dbworkflowstep_calculations USING btree (dbworkflowstep_id) + db_dbworkflowstep_calculations_pkey: CREATE UNIQUE INDEX db_dbworkflowstep_calculations_pkey + ON public.db_dbworkflowstep_calculations USING btree (id) + db_dbworkflowstep_sub_workflows: + db_dbworkflowstep_sub_wo_dbworkflowstep_id_dbwork_e9b2b624_uniq: CREATE UNIQUE + INDEX db_dbworkflowstep_sub_wo_dbworkflowstep_id_dbwork_e9b2b624_uniq ON public.db_dbworkflowstep_sub_workflows + USING btree (dbworkflowstep_id, dbworkflow_id) + db_dbworkflowstep_sub_workflows_dbworkflow_id_dca4d103: CREATE INDEX db_dbworkflowstep_sub_workflows_dbworkflow_id_dca4d103 + ON public.db_dbworkflowstep_sub_workflows USING btree (dbworkflow_id) + db_dbworkflowstep_sub_workflows_dbworkflowstep_id_e183bbb7: CREATE INDEX db_dbworkflowstep_sub_workflows_dbworkflowstep_id_e183bbb7 + ON public.db_dbworkflowstep_sub_workflows USING btree (dbworkflowstep_id) + db_dbworkflowstep_sub_workflows_pkey: CREATE UNIQUE INDEX db_dbworkflowstep_sub_workflows_pkey + ON public.db_dbworkflowstep_sub_workflows USING btree (id) diff --git a/tests/backends/aiida_sqlalchemy/migrations/test_all_schema/test_django_django_0015_.yml b/tests/backends/aiida_sqlalchemy/migrations/test_all_schema/test_django_django_0015_.yml new file mode 100644 index 0000000000..955d97cee8 --- /dev/null +++ b/tests/backends/aiida_sqlalchemy/migrations/test_all_schema/test_django_django_0015_.yml @@ -0,0 +1,1059 @@ +columns: + db_dbattribute: + bval: + data_type: boolean + default: null + is_nullable: true + datatype: + data_type: character varying + default: null + is_nullable: false + max_length: 10 + dbnode_id: + data_type: integer + default: null + is_nullable: false + dval: + data_type: timestamp with time zone + default: null + is_nullable: true + fval: + data_type: double precision + default: null + is_nullable: true + id: + data_type: integer + default: nextval('db_dbattribute_id_seq'::regclass) + is_nullable: false + ival: + data_type: integer + default: null + is_nullable: true + key: + data_type: character varying + default: null + is_nullable: false + max_length: 1024 + tval: + data_type: text + default: null + is_nullable: false + db_dbauthinfo: + aiidauser_id: + data_type: integer + default: null + is_nullable: false + auth_params: + data_type: text + default: null + is_nullable: false + dbcomputer_id: + data_type: integer + default: null + is_nullable: false + enabled: + data_type: boolean + default: null + is_nullable: false + id: + data_type: integer + default: nextval('db_dbauthinfo_id_seq'::regclass) + is_nullable: false + metadata: + data_type: text + default: null + is_nullable: false + db_dbcalcstate: + dbnode_id: + data_type: integer + default: null + is_nullable: false + id: + data_type: integer + default: nextval('db_dbcalcstate_id_seq'::regclass) + is_nullable: false + state: + data_type: character varying + default: null + is_nullable: false + max_length: 25 + time: + data_type: timestamp with time zone + default: null + is_nullable: false + db_dbcomment: + content: + data_type: text + default: null + is_nullable: false + ctime: + data_type: timestamp with time zone + default: null + is_nullable: false + dbnode_id: + data_type: integer + default: null + is_nullable: false + id: + data_type: integer + default: nextval('db_dbcomment_id_seq'::regclass) + is_nullable: false + mtime: + data_type: timestamp with time zone + default: null + is_nullable: false + user_id: + data_type: integer + default: null + is_nullable: false + uuid: + data_type: character varying + default: null + is_nullable: false + max_length: 36 + db_dbcomputer: + description: + data_type: text + default: null + is_nullable: false + enabled: + data_type: boolean + default: null + is_nullable: false + hostname: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + id: + data_type: integer + default: nextval('db_dbcomputer_id_seq'::regclass) + is_nullable: false + metadata: + data_type: text + default: null + is_nullable: false + name: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + scheduler_type: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + transport_params: + data_type: text + default: null + is_nullable: false + transport_type: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + uuid: + data_type: character varying + default: null + is_nullable: false + max_length: 36 + db_dbextra: + bval: + data_type: boolean + default: null + is_nullable: true + datatype: + data_type: character varying + default: null + is_nullable: false + max_length: 10 + dbnode_id: + data_type: integer + default: null + is_nullable: false + dval: + data_type: timestamp with time zone + default: null + is_nullable: true + fval: + data_type: double precision + default: null + is_nullable: true + id: + data_type: integer + default: nextval('db_dbextra_id_seq'::regclass) + is_nullable: false + ival: + data_type: integer + default: null + is_nullable: true + key: + data_type: character varying + default: null + is_nullable: false + max_length: 1024 + tval: + data_type: text + default: null + is_nullable: false + db_dbgroup: + description: + data_type: text + default: null + is_nullable: false + id: + data_type: integer + default: nextval('db_dbgroup_id_seq'::regclass) + is_nullable: false + name: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + time: + data_type: timestamp with time zone + default: null + is_nullable: false + type: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + user_id: + data_type: integer + default: null + is_nullable: false + uuid: + data_type: character varying + default: null + is_nullable: false + max_length: 36 + db_dbgroup_dbnodes: + dbgroup_id: + data_type: integer + default: null + is_nullable: false + dbnode_id: + data_type: integer + default: null + is_nullable: false + id: + data_type: integer + default: nextval('db_dbgroup_dbnodes_id_seq'::regclass) + is_nullable: false + db_dblink: + id: + data_type: integer + default: nextval('db_dblink_id_seq'::regclass) + is_nullable: false + input_id: + data_type: integer + default: null + is_nullable: false + label: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + output_id: + data_type: integer + default: null + is_nullable: false + type: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + db_dblog: + id: + data_type: integer + default: nextval('db_dblog_id_seq'::regclass) + is_nullable: false + levelname: + data_type: character varying + default: null + is_nullable: false + max_length: 50 + loggername: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + message: + data_type: text + default: null + is_nullable: false + metadata: + data_type: text + default: null + is_nullable: false + objname: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + objpk: + data_type: integer + default: null + is_nullable: true + time: + data_type: timestamp with time zone + default: null + is_nullable: false + db_dbnode: + ctime: + data_type: timestamp with time zone + default: null + is_nullable: false + dbcomputer_id: + data_type: integer + default: null + is_nullable: true + description: + data_type: text + default: null + is_nullable: false + id: + data_type: integer + default: nextval('db_dbnode_id_seq'::regclass) + is_nullable: false + label: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + mtime: + data_type: timestamp with time zone + default: null + is_nullable: false + nodeversion: + data_type: integer + default: null + is_nullable: false + process_type: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + public: + data_type: boolean + default: null + is_nullable: false + type: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + user_id: + data_type: integer + default: null + is_nullable: false + uuid: + data_type: character varying + default: null + is_nullable: false + max_length: 36 + db_dbsetting: + bval: + data_type: boolean + default: null + is_nullable: true + datatype: + data_type: character varying + default: null + is_nullable: false + max_length: 10 + description: + data_type: text + default: null + is_nullable: false + dval: + data_type: timestamp with time zone + default: null + is_nullable: true + fval: + data_type: double precision + default: null + is_nullable: true + id: + data_type: integer + default: nextval('db_dbsetting_id_seq'::regclass) + is_nullable: false + ival: + data_type: integer + default: null + is_nullable: true + key: + data_type: character varying + default: null + is_nullable: false + max_length: 1024 + time: + data_type: timestamp with time zone + default: null + is_nullable: false + tval: + data_type: text + default: null + is_nullable: false + db_dbuser: + date_joined: + data_type: timestamp with time zone + default: null + is_nullable: false + email: + data_type: character varying + default: null + is_nullable: false + max_length: 254 + first_name: + data_type: character varying + default: null + is_nullable: false + max_length: 254 + id: + data_type: integer + default: nextval('db_dbuser_id_seq'::regclass) + is_nullable: false + institution: + data_type: character varying + default: null + is_nullable: false + max_length: 254 + is_active: + data_type: boolean + default: null + is_nullable: false + is_staff: + data_type: boolean + default: null + is_nullable: false + is_superuser: + data_type: boolean + default: null + is_nullable: false + last_login: + data_type: timestamp with time zone + default: null + is_nullable: true + last_name: + data_type: character varying + default: null + is_nullable: false + max_length: 254 + password: + data_type: character varying + default: null + is_nullable: false + max_length: 128 + db_dbuser_groups: + dbuser_id: + data_type: integer + default: null + is_nullable: false + group_id: + data_type: integer + default: null + is_nullable: false + id: + data_type: integer + default: nextval('db_dbuser_groups_id_seq'::regclass) + is_nullable: false + db_dbuser_user_permissions: + dbuser_id: + data_type: integer + default: null + is_nullable: false + id: + data_type: integer + default: nextval('db_dbuser_user_permissions_id_seq'::regclass) + is_nullable: false + permission_id: + data_type: integer + default: null + is_nullable: false + db_dbworkflow: + ctime: + data_type: timestamp with time zone + default: null + is_nullable: false + description: + data_type: text + default: null + is_nullable: false + id: + data_type: integer + default: nextval('db_dbworkflow_id_seq'::regclass) + is_nullable: false + label: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + lastsyncedversion: + data_type: integer + default: null + is_nullable: false + module: + data_type: text + default: null + is_nullable: false + module_class: + data_type: text + default: null + is_nullable: false + mtime: + data_type: timestamp with time zone + default: null + is_nullable: false + nodeversion: + data_type: integer + default: null + is_nullable: false + report: + data_type: text + default: null + is_nullable: false + script_md5: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + script_path: + data_type: text + default: null + is_nullable: false + state: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + user_id: + data_type: integer + default: null + is_nullable: false + uuid: + data_type: character varying + default: null + is_nullable: false + max_length: 36 + db_dbworkflowdata: + aiida_obj_id: + data_type: integer + default: null + is_nullable: true + data_type: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + id: + data_type: integer + default: nextval('db_dbworkflowdata_id_seq'::regclass) + is_nullable: false + json_value: + data_type: text + default: null + is_nullable: false + name: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + parent_id: + data_type: integer + default: null + is_nullable: false + time: + data_type: timestamp with time zone + default: null + is_nullable: false + value_type: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + db_dbworkflowstep: + id: + data_type: integer + default: nextval('db_dbworkflowstep_id_seq'::regclass) + is_nullable: false + name: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + nextcall: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + parent_id: + data_type: integer + default: null + is_nullable: false + state: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + time: + data_type: timestamp with time zone + default: null + is_nullable: false + user_id: + data_type: integer + default: null + is_nullable: false + db_dbworkflowstep_calculations: + dbnode_id: + data_type: integer + default: null + is_nullable: false + dbworkflowstep_id: + data_type: integer + default: null + is_nullable: false + id: + data_type: integer + default: nextval('db_dbworkflowstep_calculations_id_seq'::regclass) + is_nullable: false + db_dbworkflowstep_sub_workflows: + dbworkflow_id: + data_type: integer + default: null + is_nullable: false + dbworkflowstep_id: + data_type: integer + default: null + is_nullable: false + id: + data_type: integer + default: nextval('db_dbworkflowstep_sub_workflows_id_seq'::regclass) + is_nullable: false +constraints: + primary_key: + db_dbattribute: + db_dbattribute_pkey: + - id + db_dbauthinfo: + db_dbauthinfo_pkey: + - id + db_dbcalcstate: + db_dbcalcstate_pkey: + - id + db_dbcomment: + db_dbcomment_pkey: + - id + db_dbcomputer: + db_dbcomputer_pkey: + - id + db_dbextra: + db_dbextra_pkey: + - id + db_dbgroup: + db_dbgroup_pkey: + - id + db_dbgroup_dbnodes: + db_dbgroup_dbnodes_pkey: + - id + db_dblink: + db_dblink_pkey: + - id + db_dblog: + db_dblog_pkey: + - id + db_dbnode: + db_dbnode_pkey: + - id + db_dbsetting: + db_dbsetting_pkey: + - id + db_dbuser: + db_dbuser_pkey: + - id + db_dbuser_groups: + db_dbuser_groups_pkey: + - id + db_dbuser_user_permissions: + db_dbuser_user_permissions_pkey: + - id + db_dbworkflow: + db_dbworkflow_pkey: + - id + db_dbworkflowdata: + db_dbworkflowdata_pkey: + - id + db_dbworkflowstep: + db_dbworkflowstep_pkey: + - id + db_dbworkflowstep_calculations: + db_dbworkflowstep_calculations_pkey: + - id + db_dbworkflowstep_sub_workflows: + db_dbworkflowstep_sub_workflows_pkey: + - id + unique: + db_dbattribute: + db_dbattribute_dbnode_id_key_c589e447_uniq: + - dbnode_id + - key + db_dbauthinfo: + db_dbauthinfo_aiidauser_id_dbcomputer_id_777cdaa8_uniq: + - aiidauser_id + - dbcomputer_id + db_dbcalcstate: + db_dbcalcstate_dbnode_id_state_b4a14db3_uniq: + - dbnode_id + - state + db_dbcomputer: + db_dbcomputer_name_key: + - name + db_dbextra: + db_dbextra_dbnode_id_key_aa56fd37_uniq: + - dbnode_id + - key + db_dbgroup: + db_dbgroup_name_type_12656f33_uniq: + - name + - type + db_dbgroup_dbnodes: + db_dbgroup_dbnodes_dbgroup_id_dbnode_id_eee23cce_uniq: + - dbgroup_id + - dbnode_id + db_dbnode: + db_dbnode_uuid_62e0bf98_uniq: + - uuid + db_dbsetting: + db_dbsetting_key_1b84beb4_uniq: + - key + db_dbuser_groups: + db_dbuser_groups_dbuser_id_group_id_9155eb4f_uniq: + - dbuser_id + - group_id + db_dbuser_user_permissions: + db_dbuser_user_permissio_dbuser_id_permission_id_e6cbabe4_uniq: + - dbuser_id + - permission_id + db_dbworkflowdata: + db_dbworkflowdata_parent_id_name_data_type_a4b50dae_uniq: + - data_type + - name + - parent_id + db_dbworkflowstep: + db_dbworkflowstep_parent_id_name_111027e3_uniq: + - name + - parent_id + db_dbworkflowstep_calculations: + db_dbworkflowstep_calcul_dbworkflowstep_id_dbnode_60f50d02_uniq: + - dbnode_id + - dbworkflowstep_id + db_dbworkflowstep_sub_workflows: + db_dbworkflowstep_sub_wo_dbworkflowstep_id_dbwork_e9b2b624_uniq: + - dbworkflow_id + - dbworkflowstep_id +foreign_keys: + db_dbattribute: + db_dbattribute_dbnode_id_253bf153_fk_db_dbnode_id: FOREIGN KEY (dbnode_id) REFERENCES + db_dbnode(id) DEFERRABLE INITIALLY DEFERRED + db_dbauthinfo: + db_dbauthinfo_aiidauser_id_0684fdfb_fk_db_dbuser_id: FOREIGN KEY (aiidauser_id) + REFERENCES db_dbuser(id) DEFERRABLE INITIALLY DEFERRED + db_dbauthinfo_dbcomputer_id_424f7ac4_fk_db_dbcomputer_id: FOREIGN KEY (dbcomputer_id) + REFERENCES db_dbcomputer(id) DEFERRABLE INITIALLY DEFERRED + db_dbcalcstate: + db_dbcalcstate_dbnode_id_f217a84c_fk_db_dbnode_id: FOREIGN KEY (dbnode_id) REFERENCES + db_dbnode(id) DEFERRABLE INITIALLY DEFERRED + db_dbcomment: + db_dbcomment_dbnode_id_3b812b6b_fk_db_dbnode_id: FOREIGN KEY (dbnode_id) REFERENCES + db_dbnode(id) DEFERRABLE INITIALLY DEFERRED + db_dbcomment_user_id_8ed5e360_fk_db_dbuser_id: FOREIGN KEY (user_id) REFERENCES + db_dbuser(id) DEFERRABLE INITIALLY DEFERRED + db_dbextra: + db_dbextra_dbnode_id_c7fe8961_fk_db_dbnode_id: FOREIGN KEY (dbnode_id) REFERENCES + db_dbnode(id) DEFERRABLE INITIALLY DEFERRED + db_dbgroup: + db_dbgroup_user_id_100f8a51_fk_db_dbuser_id: FOREIGN KEY (user_id) REFERENCES + db_dbuser(id) DEFERRABLE INITIALLY DEFERRED + db_dbgroup_dbnodes: + db_dbgroup_dbnodes_dbgroup_id_9d3a0f9d_fk_db_dbgroup_id: FOREIGN KEY (dbgroup_id) + REFERENCES db_dbgroup(id) DEFERRABLE INITIALLY DEFERRED + db_dbgroup_dbnodes_dbnode_id_118b9439_fk_db_dbnode_id: FOREIGN KEY (dbnode_id) + REFERENCES db_dbnode(id) DEFERRABLE INITIALLY DEFERRED + db_dblink: + db_dblink_input_id_9245bd73_fk_db_dbnode_id: FOREIGN KEY (input_id) REFERENCES + db_dbnode(id) DEFERRABLE INITIALLY DEFERRED + db_dblink_output_id_c0167528_fk_db_dbnode_id: FOREIGN KEY (output_id) REFERENCES + db_dbnode(id) DEFERRABLE INITIALLY DEFERRED + db_dbnode: + db_dbnode_dbcomputer_id_315372a3_fk_db_dbcomputer_id: FOREIGN KEY (dbcomputer_id) + REFERENCES db_dbcomputer(id) DEFERRABLE INITIALLY DEFERRED + db_dbnode_user_id_12e7aeaf_fk_db_dbuser_id: FOREIGN KEY (user_id) REFERENCES db_dbuser(id) + DEFERRABLE INITIALLY DEFERRED + db_dbuser_groups: + db_dbuser_groups_dbuser_id_480b3520_fk_db_dbuser_id: FOREIGN KEY (dbuser_id) REFERENCES + db_dbuser(id) DEFERRABLE INITIALLY DEFERRED + db_dbuser_groups_group_id_8478d87e_fk_auth_group_id: FOREIGN KEY (group_id) REFERENCES + auth_group(id) DEFERRABLE INITIALLY DEFERRED + db_dbuser_user_permissions: + db_dbuser_user_permi_permission_id_c5aafc54_fk_auth_perm: FOREIGN KEY (permission_id) + REFERENCES auth_permission(id) DEFERRABLE INITIALLY DEFERRED + db_dbuser_user_permissions_dbuser_id_364456ee_fk_db_dbuser_id: FOREIGN KEY (dbuser_id) + REFERENCES db_dbuser(id) DEFERRABLE INITIALLY DEFERRED + db_dbworkflow: + db_dbworkflow_user_id_ef1f3251_fk_db_dbuser_id: FOREIGN KEY (user_id) REFERENCES + db_dbuser(id) DEFERRABLE INITIALLY DEFERRED + db_dbworkflowdata: + db_dbworkflowdata_aiida_obj_id_70a2d33b_fk_db_dbnode_id: FOREIGN KEY (aiida_obj_id) + REFERENCES db_dbnode(id) DEFERRABLE INITIALLY DEFERRED + db_dbworkflowdata_parent_id_ff4dbf8d_fk_db_dbworkflow_id: FOREIGN KEY (parent_id) + REFERENCES db_dbworkflow(id) DEFERRABLE INITIALLY DEFERRED + db_dbworkflowstep: + db_dbworkflowstep_parent_id_ffb754d9_fk_db_dbworkflow_id: FOREIGN KEY (parent_id) + REFERENCES db_dbworkflow(id) DEFERRABLE INITIALLY DEFERRED + db_dbworkflowstep_user_id_04282431_fk_db_dbuser_id: FOREIGN KEY (user_id) REFERENCES + db_dbuser(id) DEFERRABLE INITIALLY DEFERRED + db_dbworkflowstep_calculations: + db_dbworkflowstep_ca_dbnode_id_0d07b7a7_fk_db_dbnode: FOREIGN KEY (dbnode_id) + REFERENCES db_dbnode(id) DEFERRABLE INITIALLY DEFERRED + db_dbworkflowstep_ca_dbworkflowstep_id_575c3637_fk_db_dbwork: FOREIGN KEY (dbworkflowstep_id) + REFERENCES db_dbworkflowstep(id) DEFERRABLE INITIALLY DEFERRED + db_dbworkflowstep_sub_workflows: + db_dbworkflowstep_su_dbworkflow_id_dca4d103_fk_db_dbwork: FOREIGN KEY (dbworkflow_id) + REFERENCES db_dbworkflow(id) DEFERRABLE INITIALLY DEFERRED + db_dbworkflowstep_su_dbworkflowstep_id_e183bbb7_fk_db_dbwork: FOREIGN KEY (dbworkflowstep_id) + REFERENCES db_dbworkflowstep(id) DEFERRABLE INITIALLY DEFERRED +indexes: + db_dbattribute: + db_dbattribute_datatype_91c4dc04: CREATE INDEX db_dbattribute_datatype_91c4dc04 + ON public.db_dbattribute USING btree (datatype) + db_dbattribute_datatype_91c4dc04_like: CREATE INDEX db_dbattribute_datatype_91c4dc04_like + ON public.db_dbattribute USING btree (datatype varchar_pattern_ops) + db_dbattribute_dbnode_id_253bf153: CREATE INDEX db_dbattribute_dbnode_id_253bf153 + ON public.db_dbattribute USING btree (dbnode_id) + db_dbattribute_dbnode_id_key_c589e447_uniq: CREATE UNIQUE INDEX db_dbattribute_dbnode_id_key_c589e447_uniq + ON public.db_dbattribute USING btree (dbnode_id, key) + db_dbattribute_key_ac2bc4e4: CREATE INDEX db_dbattribute_key_ac2bc4e4 ON public.db_dbattribute + USING btree (key) + db_dbattribute_key_ac2bc4e4_like: CREATE INDEX db_dbattribute_key_ac2bc4e4_like + ON public.db_dbattribute USING btree (key varchar_pattern_ops) + db_dbattribute_pkey: CREATE UNIQUE INDEX db_dbattribute_pkey ON public.db_dbattribute + USING btree (id) + tval_idx_for_daemon: CREATE INDEX tval_idx_for_daemon ON public.db_dbattribute + USING btree (tval) WHERE (tval = ANY (ARRAY['COMPUTED'::text, 'WITHSCHEDULER'::text, + 'TOSUBMIT'::text])) + db_dbauthinfo: + db_dbauthinfo_aiidauser_id_0684fdfb: CREATE INDEX db_dbauthinfo_aiidauser_id_0684fdfb + ON public.db_dbauthinfo USING btree (aiidauser_id) + db_dbauthinfo_aiidauser_id_dbcomputer_id_777cdaa8_uniq: CREATE UNIQUE INDEX db_dbauthinfo_aiidauser_id_dbcomputer_id_777cdaa8_uniq + ON public.db_dbauthinfo USING btree (aiidauser_id, dbcomputer_id) + db_dbauthinfo_dbcomputer_id_424f7ac4: CREATE INDEX db_dbauthinfo_dbcomputer_id_424f7ac4 + ON public.db_dbauthinfo USING btree (dbcomputer_id) + db_dbauthinfo_pkey: CREATE UNIQUE INDEX db_dbauthinfo_pkey ON public.db_dbauthinfo + USING btree (id) + db_dbcalcstate: + db_dbcalcstate_dbnode_id_f217a84c: CREATE INDEX db_dbcalcstate_dbnode_id_f217a84c + ON public.db_dbcalcstate USING btree (dbnode_id) + db_dbcalcstate_dbnode_id_state_b4a14db3_uniq: CREATE UNIQUE INDEX db_dbcalcstate_dbnode_id_state_b4a14db3_uniq + ON public.db_dbcalcstate USING btree (dbnode_id, state) + db_dbcalcstate_pkey: CREATE UNIQUE INDEX db_dbcalcstate_pkey ON public.db_dbcalcstate + USING btree (id) + db_dbcalcstate_state_0bf54584: CREATE INDEX db_dbcalcstate_state_0bf54584 ON public.db_dbcalcstate + USING btree (state) + db_dbcalcstate_state_0bf54584_like: CREATE INDEX db_dbcalcstate_state_0bf54584_like + ON public.db_dbcalcstate USING btree (state varchar_pattern_ops) + db_dbcomment: + db_dbcomment_dbnode_id_3b812b6b: CREATE INDEX db_dbcomment_dbnode_id_3b812b6b + ON public.db_dbcomment USING btree (dbnode_id) + db_dbcomment_pkey: CREATE UNIQUE INDEX db_dbcomment_pkey ON public.db_dbcomment + USING btree (id) + db_dbcomment_user_id_8ed5e360: CREATE INDEX db_dbcomment_user_id_8ed5e360 ON public.db_dbcomment + USING btree (user_id) + db_dbcomputer: + db_dbcomputer_name_f1800b1a_like: CREATE INDEX db_dbcomputer_name_f1800b1a_like + ON public.db_dbcomputer USING btree (name varchar_pattern_ops) + db_dbcomputer_name_key: CREATE UNIQUE INDEX db_dbcomputer_name_key ON public.db_dbcomputer + USING btree (name) + db_dbcomputer_pkey: CREATE UNIQUE INDEX db_dbcomputer_pkey ON public.db_dbcomputer + USING btree (id) + db_dbextra: + db_dbextra_datatype_2eba38c6: CREATE INDEX db_dbextra_datatype_2eba38c6 ON public.db_dbextra + USING btree (datatype) + db_dbextra_datatype_2eba38c6_like: CREATE INDEX db_dbextra_datatype_2eba38c6_like + ON public.db_dbextra USING btree (datatype varchar_pattern_ops) + db_dbextra_dbnode_id_c7fe8961: CREATE INDEX db_dbextra_dbnode_id_c7fe8961 ON public.db_dbextra + USING btree (dbnode_id) + db_dbextra_dbnode_id_key_aa56fd37_uniq: CREATE UNIQUE INDEX db_dbextra_dbnode_id_key_aa56fd37_uniq + ON public.db_dbextra USING btree (dbnode_id, key) + db_dbextra_key_b1a8abc6: CREATE INDEX db_dbextra_key_b1a8abc6 ON public.db_dbextra + USING btree (key) + db_dbextra_key_b1a8abc6_like: CREATE INDEX db_dbextra_key_b1a8abc6_like ON public.db_dbextra + USING btree (key varchar_pattern_ops) + db_dbextra_pkey: CREATE UNIQUE INDEX db_dbextra_pkey ON public.db_dbextra USING + btree (id) + db_dbgroup: + db_dbgroup_name_66c75272: CREATE INDEX db_dbgroup_name_66c75272 ON public.db_dbgroup + USING btree (name) + db_dbgroup_name_66c75272_like: CREATE INDEX db_dbgroup_name_66c75272_like ON public.db_dbgroup + USING btree (name varchar_pattern_ops) + db_dbgroup_name_type_12656f33_uniq: CREATE UNIQUE INDEX db_dbgroup_name_type_12656f33_uniq + ON public.db_dbgroup USING btree (name, type) + db_dbgroup_pkey: CREATE UNIQUE INDEX db_dbgroup_pkey ON public.db_dbgroup USING + btree (id) + db_dbgroup_type_23b2a748: CREATE INDEX db_dbgroup_type_23b2a748 ON public.db_dbgroup + USING btree (type) + db_dbgroup_type_23b2a748_like: CREATE INDEX db_dbgroup_type_23b2a748_like ON public.db_dbgroup + USING btree (type varchar_pattern_ops) + db_dbgroup_user_id_100f8a51: CREATE INDEX db_dbgroup_user_id_100f8a51 ON public.db_dbgroup + USING btree (user_id) + db_dbgroup_dbnodes: + db_dbgroup_dbnodes_dbgroup_id_9d3a0f9d: CREATE INDEX db_dbgroup_dbnodes_dbgroup_id_9d3a0f9d + ON public.db_dbgroup_dbnodes USING btree (dbgroup_id) + db_dbgroup_dbnodes_dbgroup_id_dbnode_id_eee23cce_uniq: CREATE UNIQUE INDEX db_dbgroup_dbnodes_dbgroup_id_dbnode_id_eee23cce_uniq + ON public.db_dbgroup_dbnodes USING btree (dbgroup_id, dbnode_id) + db_dbgroup_dbnodes_dbnode_id_118b9439: CREATE INDEX db_dbgroup_dbnodes_dbnode_id_118b9439 + ON public.db_dbgroup_dbnodes USING btree (dbnode_id) + db_dbgroup_dbnodes_pkey: CREATE UNIQUE INDEX db_dbgroup_dbnodes_pkey ON public.db_dbgroup_dbnodes + USING btree (id) + db_dblink: + db_dblink_input_id_9245bd73: CREATE INDEX db_dblink_input_id_9245bd73 ON public.db_dblink + USING btree (input_id) + db_dblink_label_f1343cfb: CREATE INDEX db_dblink_label_f1343cfb ON public.db_dblink + USING btree (label) + db_dblink_label_f1343cfb_like: CREATE INDEX db_dblink_label_f1343cfb_like ON public.db_dblink + USING btree (label varchar_pattern_ops) + db_dblink_output_id_c0167528: CREATE INDEX db_dblink_output_id_c0167528 ON public.db_dblink + USING btree (output_id) + db_dblink_pkey: CREATE UNIQUE INDEX db_dblink_pkey ON public.db_dblink USING btree + (id) + db_dblink_type_229f212b: CREATE INDEX db_dblink_type_229f212b ON public.db_dblink + USING btree (type) + db_dblink_type_229f212b_like: CREATE INDEX db_dblink_type_229f212b_like ON public.db_dblink + USING btree (type varchar_pattern_ops) + db_dblog: + db_dblog_levelname_ad5dc346: CREATE INDEX db_dblog_levelname_ad5dc346 ON public.db_dblog + USING btree (levelname) + db_dblog_levelname_ad5dc346_like: CREATE INDEX db_dblog_levelname_ad5dc346_like + ON public.db_dblog USING btree (levelname varchar_pattern_ops) + db_dblog_loggername_00b5ba16: CREATE INDEX db_dblog_loggername_00b5ba16 ON public.db_dblog + USING btree (loggername) + db_dblog_loggername_00b5ba16_like: CREATE INDEX db_dblog_loggername_00b5ba16_like + ON public.db_dblog USING btree (loggername varchar_pattern_ops) + db_dblog_objname_69932b1e: CREATE INDEX db_dblog_objname_69932b1e ON public.db_dblog + USING btree (objname) + db_dblog_objname_69932b1e_like: CREATE INDEX db_dblog_objname_69932b1e_like ON + public.db_dblog USING btree (objname varchar_pattern_ops) + db_dblog_objpk_fc47afa9: CREATE INDEX db_dblog_objpk_fc47afa9 ON public.db_dblog + USING btree (objpk) + db_dblog_pkey: CREATE UNIQUE INDEX db_dblog_pkey ON public.db_dblog USING btree + (id) + db_dbnode: + db_dbnode_ctime_71626ef5: CREATE INDEX db_dbnode_ctime_71626ef5 ON public.db_dbnode + USING btree (ctime) + db_dbnode_dbcomputer_id_315372a3: CREATE INDEX db_dbnode_dbcomputer_id_315372a3 + ON public.db_dbnode USING btree (dbcomputer_id) + db_dbnode_label_6469539e: CREATE INDEX db_dbnode_label_6469539e ON public.db_dbnode + USING btree (label) + db_dbnode_label_6469539e_like: CREATE INDEX db_dbnode_label_6469539e_like ON public.db_dbnode + USING btree (label varchar_pattern_ops) + db_dbnode_mtime_0554ea3d: CREATE INDEX db_dbnode_mtime_0554ea3d ON public.db_dbnode + USING btree (mtime) + db_dbnode_pkey: CREATE UNIQUE INDEX db_dbnode_pkey ON public.db_dbnode USING btree + (id) + db_dbnode_process_type_df7298d0: CREATE INDEX db_dbnode_process_type_df7298d0 + ON public.db_dbnode USING btree (process_type) + db_dbnode_process_type_df7298d0_like: CREATE INDEX db_dbnode_process_type_df7298d0_like + ON public.db_dbnode USING btree (process_type varchar_pattern_ops) + db_dbnode_type_a8ce9753: CREATE INDEX db_dbnode_type_a8ce9753 ON public.db_dbnode + USING btree (type) + db_dbnode_type_a8ce9753_like: CREATE INDEX db_dbnode_type_a8ce9753_like ON public.db_dbnode + USING btree (type varchar_pattern_ops) + db_dbnode_user_id_12e7aeaf: CREATE INDEX db_dbnode_user_id_12e7aeaf ON public.db_dbnode + USING btree (user_id) + db_dbnode_uuid_62e0bf98_like: CREATE INDEX db_dbnode_uuid_62e0bf98_like ON public.db_dbnode + USING btree (uuid varchar_pattern_ops) + db_dbnode_uuid_62e0bf98_uniq: CREATE UNIQUE INDEX db_dbnode_uuid_62e0bf98_uniq + ON public.db_dbnode USING btree (uuid) + db_dbsetting: + db_dbsetting_datatype_49f4397c: CREATE INDEX db_dbsetting_datatype_49f4397c ON + public.db_dbsetting USING btree (datatype) + db_dbsetting_datatype_49f4397c_like: CREATE INDEX db_dbsetting_datatype_49f4397c_like + ON public.db_dbsetting USING btree (datatype varchar_pattern_ops) + db_dbsetting_key_1b84beb4: CREATE INDEX db_dbsetting_key_1b84beb4 ON public.db_dbsetting + USING btree (key) + db_dbsetting_key_1b84beb4_like: CREATE INDEX db_dbsetting_key_1b84beb4_like ON + public.db_dbsetting USING btree (key varchar_pattern_ops) + db_dbsetting_key_1b84beb4_uniq: CREATE UNIQUE INDEX db_dbsetting_key_1b84beb4_uniq + ON public.db_dbsetting USING btree (key) + db_dbsetting_pkey: CREATE UNIQUE INDEX db_dbsetting_pkey ON public.db_dbsetting + USING btree (id) + db_dbuser: + db_dbuser_pkey: CREATE UNIQUE INDEX db_dbuser_pkey ON public.db_dbuser USING btree + (id) + db_dbuser_groups: + db_dbuser_groups_dbuser_id_480b3520: CREATE INDEX db_dbuser_groups_dbuser_id_480b3520 + ON public.db_dbuser_groups USING btree (dbuser_id) + db_dbuser_groups_dbuser_id_group_id_9155eb4f_uniq: CREATE UNIQUE INDEX db_dbuser_groups_dbuser_id_group_id_9155eb4f_uniq + ON public.db_dbuser_groups USING btree (dbuser_id, group_id) + db_dbuser_groups_group_id_8478d87e: CREATE INDEX db_dbuser_groups_group_id_8478d87e + ON public.db_dbuser_groups USING btree (group_id) + db_dbuser_groups_pkey: CREATE UNIQUE INDEX db_dbuser_groups_pkey ON public.db_dbuser_groups + USING btree (id) + db_dbuser_user_permissions: + db_dbuser_user_permissio_dbuser_id_permission_id_e6cbabe4_uniq: CREATE UNIQUE + INDEX db_dbuser_user_permissio_dbuser_id_permission_id_e6cbabe4_uniq ON public.db_dbuser_user_permissions + USING btree (dbuser_id, permission_id) + db_dbuser_user_permissions_dbuser_id_364456ee: CREATE INDEX db_dbuser_user_permissions_dbuser_id_364456ee + ON public.db_dbuser_user_permissions USING btree (dbuser_id) + db_dbuser_user_permissions_permission_id_c5aafc54: CREATE INDEX db_dbuser_user_permissions_permission_id_c5aafc54 + ON public.db_dbuser_user_permissions USING btree (permission_id) + db_dbuser_user_permissions_pkey: CREATE UNIQUE INDEX db_dbuser_user_permissions_pkey + ON public.db_dbuser_user_permissions USING btree (id) + db_dbworkflow: + db_dbworkflow_label_7368f34a: CREATE INDEX db_dbworkflow_label_7368f34a ON public.db_dbworkflow + USING btree (label) + db_dbworkflow_label_7368f34a_like: CREATE INDEX db_dbworkflow_label_7368f34a_like + ON public.db_dbworkflow USING btree (label varchar_pattern_ops) + db_dbworkflow_pkey: CREATE UNIQUE INDEX db_dbworkflow_pkey ON public.db_dbworkflow + USING btree (id) + db_dbworkflow_user_id_ef1f3251: CREATE INDEX db_dbworkflow_user_id_ef1f3251 ON + public.db_dbworkflow USING btree (user_id) + db_dbworkflowdata: + db_dbworkflowdata_aiida_obj_id_70a2d33b: CREATE INDEX db_dbworkflowdata_aiida_obj_id_70a2d33b + ON public.db_dbworkflowdata USING btree (aiida_obj_id) + db_dbworkflowdata_parent_id_ff4dbf8d: CREATE INDEX db_dbworkflowdata_parent_id_ff4dbf8d + ON public.db_dbworkflowdata USING btree (parent_id) + db_dbworkflowdata_parent_id_name_data_type_a4b50dae_uniq: CREATE UNIQUE INDEX + db_dbworkflowdata_parent_id_name_data_type_a4b50dae_uniq ON public.db_dbworkflowdata + USING btree (parent_id, name, data_type) + db_dbworkflowdata_pkey: CREATE UNIQUE INDEX db_dbworkflowdata_pkey ON public.db_dbworkflowdata + USING btree (id) + db_dbworkflowstep: + db_dbworkflowstep_parent_id_ffb754d9: CREATE INDEX db_dbworkflowstep_parent_id_ffb754d9 + ON public.db_dbworkflowstep USING btree (parent_id) + db_dbworkflowstep_parent_id_name_111027e3_uniq: CREATE UNIQUE INDEX db_dbworkflowstep_parent_id_name_111027e3_uniq + ON public.db_dbworkflowstep USING btree (parent_id, name) + db_dbworkflowstep_pkey: CREATE UNIQUE INDEX db_dbworkflowstep_pkey ON public.db_dbworkflowstep + USING btree (id) + db_dbworkflowstep_user_id_04282431: CREATE INDEX db_dbworkflowstep_user_id_04282431 + ON public.db_dbworkflowstep USING btree (user_id) + db_dbworkflowstep_calculations: + db_dbworkflowstep_calcul_dbworkflowstep_id_dbnode_60f50d02_uniq: CREATE UNIQUE + INDEX db_dbworkflowstep_calcul_dbworkflowstep_id_dbnode_60f50d02_uniq ON public.db_dbworkflowstep_calculations + USING btree (dbworkflowstep_id, dbnode_id) + db_dbworkflowstep_calculations_dbnode_id_0d07b7a7: CREATE INDEX db_dbworkflowstep_calculations_dbnode_id_0d07b7a7 + ON public.db_dbworkflowstep_calculations USING btree (dbnode_id) + db_dbworkflowstep_calculations_dbworkflowstep_id_575c3637: CREATE INDEX db_dbworkflowstep_calculations_dbworkflowstep_id_575c3637 + ON public.db_dbworkflowstep_calculations USING btree (dbworkflowstep_id) + db_dbworkflowstep_calculations_pkey: CREATE UNIQUE INDEX db_dbworkflowstep_calculations_pkey + ON public.db_dbworkflowstep_calculations USING btree (id) + db_dbworkflowstep_sub_workflows: + db_dbworkflowstep_sub_wo_dbworkflowstep_id_dbwork_e9b2b624_uniq: CREATE UNIQUE + INDEX db_dbworkflowstep_sub_wo_dbworkflowstep_id_dbwork_e9b2b624_uniq ON public.db_dbworkflowstep_sub_workflows + USING btree (dbworkflowstep_id, dbworkflow_id) + db_dbworkflowstep_sub_workflows_dbworkflow_id_dca4d103: CREATE INDEX db_dbworkflowstep_sub_workflows_dbworkflow_id_dca4d103 + ON public.db_dbworkflowstep_sub_workflows USING btree (dbworkflow_id) + db_dbworkflowstep_sub_workflows_dbworkflowstep_id_e183bbb7: CREATE INDEX db_dbworkflowstep_sub_workflows_dbworkflowstep_id_e183bbb7 + ON public.db_dbworkflowstep_sub_workflows USING btree (dbworkflowstep_id) + db_dbworkflowstep_sub_workflows_pkey: CREATE UNIQUE INDEX db_dbworkflowstep_sub_workflows_pkey + ON public.db_dbworkflowstep_sub_workflows USING btree (id) diff --git a/tests/backends/aiida_sqlalchemy/migrations/test_all_schema/test_django_django_0016_.yml b/tests/backends/aiida_sqlalchemy/migrations/test_all_schema/test_django_django_0016_.yml new file mode 100644 index 0000000000..955d97cee8 --- /dev/null +++ b/tests/backends/aiida_sqlalchemy/migrations/test_all_schema/test_django_django_0016_.yml @@ -0,0 +1,1059 @@ +columns: + db_dbattribute: + bval: + data_type: boolean + default: null + is_nullable: true + datatype: + data_type: character varying + default: null + is_nullable: false + max_length: 10 + dbnode_id: + data_type: integer + default: null + is_nullable: false + dval: + data_type: timestamp with time zone + default: null + is_nullable: true + fval: + data_type: double precision + default: null + is_nullable: true + id: + data_type: integer + default: nextval('db_dbattribute_id_seq'::regclass) + is_nullable: false + ival: + data_type: integer + default: null + is_nullable: true + key: + data_type: character varying + default: null + is_nullable: false + max_length: 1024 + tval: + data_type: text + default: null + is_nullable: false + db_dbauthinfo: + aiidauser_id: + data_type: integer + default: null + is_nullable: false + auth_params: + data_type: text + default: null + is_nullable: false + dbcomputer_id: + data_type: integer + default: null + is_nullable: false + enabled: + data_type: boolean + default: null + is_nullable: false + id: + data_type: integer + default: nextval('db_dbauthinfo_id_seq'::regclass) + is_nullable: false + metadata: + data_type: text + default: null + is_nullable: false + db_dbcalcstate: + dbnode_id: + data_type: integer + default: null + is_nullable: false + id: + data_type: integer + default: nextval('db_dbcalcstate_id_seq'::regclass) + is_nullable: false + state: + data_type: character varying + default: null + is_nullable: false + max_length: 25 + time: + data_type: timestamp with time zone + default: null + is_nullable: false + db_dbcomment: + content: + data_type: text + default: null + is_nullable: false + ctime: + data_type: timestamp with time zone + default: null + is_nullable: false + dbnode_id: + data_type: integer + default: null + is_nullable: false + id: + data_type: integer + default: nextval('db_dbcomment_id_seq'::regclass) + is_nullable: false + mtime: + data_type: timestamp with time zone + default: null + is_nullable: false + user_id: + data_type: integer + default: null + is_nullable: false + uuid: + data_type: character varying + default: null + is_nullable: false + max_length: 36 + db_dbcomputer: + description: + data_type: text + default: null + is_nullable: false + enabled: + data_type: boolean + default: null + is_nullable: false + hostname: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + id: + data_type: integer + default: nextval('db_dbcomputer_id_seq'::regclass) + is_nullable: false + metadata: + data_type: text + default: null + is_nullable: false + name: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + scheduler_type: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + transport_params: + data_type: text + default: null + is_nullable: false + transport_type: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + uuid: + data_type: character varying + default: null + is_nullable: false + max_length: 36 + db_dbextra: + bval: + data_type: boolean + default: null + is_nullable: true + datatype: + data_type: character varying + default: null + is_nullable: false + max_length: 10 + dbnode_id: + data_type: integer + default: null + is_nullable: false + dval: + data_type: timestamp with time zone + default: null + is_nullable: true + fval: + data_type: double precision + default: null + is_nullable: true + id: + data_type: integer + default: nextval('db_dbextra_id_seq'::regclass) + is_nullable: false + ival: + data_type: integer + default: null + is_nullable: true + key: + data_type: character varying + default: null + is_nullable: false + max_length: 1024 + tval: + data_type: text + default: null + is_nullable: false + db_dbgroup: + description: + data_type: text + default: null + is_nullable: false + id: + data_type: integer + default: nextval('db_dbgroup_id_seq'::regclass) + is_nullable: false + name: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + time: + data_type: timestamp with time zone + default: null + is_nullable: false + type: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + user_id: + data_type: integer + default: null + is_nullable: false + uuid: + data_type: character varying + default: null + is_nullable: false + max_length: 36 + db_dbgroup_dbnodes: + dbgroup_id: + data_type: integer + default: null + is_nullable: false + dbnode_id: + data_type: integer + default: null + is_nullable: false + id: + data_type: integer + default: nextval('db_dbgroup_dbnodes_id_seq'::regclass) + is_nullable: false + db_dblink: + id: + data_type: integer + default: nextval('db_dblink_id_seq'::regclass) + is_nullable: false + input_id: + data_type: integer + default: null + is_nullable: false + label: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + output_id: + data_type: integer + default: null + is_nullable: false + type: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + db_dblog: + id: + data_type: integer + default: nextval('db_dblog_id_seq'::regclass) + is_nullable: false + levelname: + data_type: character varying + default: null + is_nullable: false + max_length: 50 + loggername: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + message: + data_type: text + default: null + is_nullable: false + metadata: + data_type: text + default: null + is_nullable: false + objname: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + objpk: + data_type: integer + default: null + is_nullable: true + time: + data_type: timestamp with time zone + default: null + is_nullable: false + db_dbnode: + ctime: + data_type: timestamp with time zone + default: null + is_nullable: false + dbcomputer_id: + data_type: integer + default: null + is_nullable: true + description: + data_type: text + default: null + is_nullable: false + id: + data_type: integer + default: nextval('db_dbnode_id_seq'::regclass) + is_nullable: false + label: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + mtime: + data_type: timestamp with time zone + default: null + is_nullable: false + nodeversion: + data_type: integer + default: null + is_nullable: false + process_type: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + public: + data_type: boolean + default: null + is_nullable: false + type: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + user_id: + data_type: integer + default: null + is_nullable: false + uuid: + data_type: character varying + default: null + is_nullable: false + max_length: 36 + db_dbsetting: + bval: + data_type: boolean + default: null + is_nullable: true + datatype: + data_type: character varying + default: null + is_nullable: false + max_length: 10 + description: + data_type: text + default: null + is_nullable: false + dval: + data_type: timestamp with time zone + default: null + is_nullable: true + fval: + data_type: double precision + default: null + is_nullable: true + id: + data_type: integer + default: nextval('db_dbsetting_id_seq'::regclass) + is_nullable: false + ival: + data_type: integer + default: null + is_nullable: true + key: + data_type: character varying + default: null + is_nullable: false + max_length: 1024 + time: + data_type: timestamp with time zone + default: null + is_nullable: false + tval: + data_type: text + default: null + is_nullable: false + db_dbuser: + date_joined: + data_type: timestamp with time zone + default: null + is_nullable: false + email: + data_type: character varying + default: null + is_nullable: false + max_length: 254 + first_name: + data_type: character varying + default: null + is_nullable: false + max_length: 254 + id: + data_type: integer + default: nextval('db_dbuser_id_seq'::regclass) + is_nullable: false + institution: + data_type: character varying + default: null + is_nullable: false + max_length: 254 + is_active: + data_type: boolean + default: null + is_nullable: false + is_staff: + data_type: boolean + default: null + is_nullable: false + is_superuser: + data_type: boolean + default: null + is_nullable: false + last_login: + data_type: timestamp with time zone + default: null + is_nullable: true + last_name: + data_type: character varying + default: null + is_nullable: false + max_length: 254 + password: + data_type: character varying + default: null + is_nullable: false + max_length: 128 + db_dbuser_groups: + dbuser_id: + data_type: integer + default: null + is_nullable: false + group_id: + data_type: integer + default: null + is_nullable: false + id: + data_type: integer + default: nextval('db_dbuser_groups_id_seq'::regclass) + is_nullable: false + db_dbuser_user_permissions: + dbuser_id: + data_type: integer + default: null + is_nullable: false + id: + data_type: integer + default: nextval('db_dbuser_user_permissions_id_seq'::regclass) + is_nullable: false + permission_id: + data_type: integer + default: null + is_nullable: false + db_dbworkflow: + ctime: + data_type: timestamp with time zone + default: null + is_nullable: false + description: + data_type: text + default: null + is_nullable: false + id: + data_type: integer + default: nextval('db_dbworkflow_id_seq'::regclass) + is_nullable: false + label: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + lastsyncedversion: + data_type: integer + default: null + is_nullable: false + module: + data_type: text + default: null + is_nullable: false + module_class: + data_type: text + default: null + is_nullable: false + mtime: + data_type: timestamp with time zone + default: null + is_nullable: false + nodeversion: + data_type: integer + default: null + is_nullable: false + report: + data_type: text + default: null + is_nullable: false + script_md5: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + script_path: + data_type: text + default: null + is_nullable: false + state: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + user_id: + data_type: integer + default: null + is_nullable: false + uuid: + data_type: character varying + default: null + is_nullable: false + max_length: 36 + db_dbworkflowdata: + aiida_obj_id: + data_type: integer + default: null + is_nullable: true + data_type: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + id: + data_type: integer + default: nextval('db_dbworkflowdata_id_seq'::regclass) + is_nullable: false + json_value: + data_type: text + default: null + is_nullable: false + name: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + parent_id: + data_type: integer + default: null + is_nullable: false + time: + data_type: timestamp with time zone + default: null + is_nullable: false + value_type: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + db_dbworkflowstep: + id: + data_type: integer + default: nextval('db_dbworkflowstep_id_seq'::regclass) + is_nullable: false + name: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + nextcall: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + parent_id: + data_type: integer + default: null + is_nullable: false + state: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + time: + data_type: timestamp with time zone + default: null + is_nullable: false + user_id: + data_type: integer + default: null + is_nullable: false + db_dbworkflowstep_calculations: + dbnode_id: + data_type: integer + default: null + is_nullable: false + dbworkflowstep_id: + data_type: integer + default: null + is_nullable: false + id: + data_type: integer + default: nextval('db_dbworkflowstep_calculations_id_seq'::regclass) + is_nullable: false + db_dbworkflowstep_sub_workflows: + dbworkflow_id: + data_type: integer + default: null + is_nullable: false + dbworkflowstep_id: + data_type: integer + default: null + is_nullable: false + id: + data_type: integer + default: nextval('db_dbworkflowstep_sub_workflows_id_seq'::regclass) + is_nullable: false +constraints: + primary_key: + db_dbattribute: + db_dbattribute_pkey: + - id + db_dbauthinfo: + db_dbauthinfo_pkey: + - id + db_dbcalcstate: + db_dbcalcstate_pkey: + - id + db_dbcomment: + db_dbcomment_pkey: + - id + db_dbcomputer: + db_dbcomputer_pkey: + - id + db_dbextra: + db_dbextra_pkey: + - id + db_dbgroup: + db_dbgroup_pkey: + - id + db_dbgroup_dbnodes: + db_dbgroup_dbnodes_pkey: + - id + db_dblink: + db_dblink_pkey: + - id + db_dblog: + db_dblog_pkey: + - id + db_dbnode: + db_dbnode_pkey: + - id + db_dbsetting: + db_dbsetting_pkey: + - id + db_dbuser: + db_dbuser_pkey: + - id + db_dbuser_groups: + db_dbuser_groups_pkey: + - id + db_dbuser_user_permissions: + db_dbuser_user_permissions_pkey: + - id + db_dbworkflow: + db_dbworkflow_pkey: + - id + db_dbworkflowdata: + db_dbworkflowdata_pkey: + - id + db_dbworkflowstep: + db_dbworkflowstep_pkey: + - id + db_dbworkflowstep_calculations: + db_dbworkflowstep_calculations_pkey: + - id + db_dbworkflowstep_sub_workflows: + db_dbworkflowstep_sub_workflows_pkey: + - id + unique: + db_dbattribute: + db_dbattribute_dbnode_id_key_c589e447_uniq: + - dbnode_id + - key + db_dbauthinfo: + db_dbauthinfo_aiidauser_id_dbcomputer_id_777cdaa8_uniq: + - aiidauser_id + - dbcomputer_id + db_dbcalcstate: + db_dbcalcstate_dbnode_id_state_b4a14db3_uniq: + - dbnode_id + - state + db_dbcomputer: + db_dbcomputer_name_key: + - name + db_dbextra: + db_dbextra_dbnode_id_key_aa56fd37_uniq: + - dbnode_id + - key + db_dbgroup: + db_dbgroup_name_type_12656f33_uniq: + - name + - type + db_dbgroup_dbnodes: + db_dbgroup_dbnodes_dbgroup_id_dbnode_id_eee23cce_uniq: + - dbgroup_id + - dbnode_id + db_dbnode: + db_dbnode_uuid_62e0bf98_uniq: + - uuid + db_dbsetting: + db_dbsetting_key_1b84beb4_uniq: + - key + db_dbuser_groups: + db_dbuser_groups_dbuser_id_group_id_9155eb4f_uniq: + - dbuser_id + - group_id + db_dbuser_user_permissions: + db_dbuser_user_permissio_dbuser_id_permission_id_e6cbabe4_uniq: + - dbuser_id + - permission_id + db_dbworkflowdata: + db_dbworkflowdata_parent_id_name_data_type_a4b50dae_uniq: + - data_type + - name + - parent_id + db_dbworkflowstep: + db_dbworkflowstep_parent_id_name_111027e3_uniq: + - name + - parent_id + db_dbworkflowstep_calculations: + db_dbworkflowstep_calcul_dbworkflowstep_id_dbnode_60f50d02_uniq: + - dbnode_id + - dbworkflowstep_id + db_dbworkflowstep_sub_workflows: + db_dbworkflowstep_sub_wo_dbworkflowstep_id_dbwork_e9b2b624_uniq: + - dbworkflow_id + - dbworkflowstep_id +foreign_keys: + db_dbattribute: + db_dbattribute_dbnode_id_253bf153_fk_db_dbnode_id: FOREIGN KEY (dbnode_id) REFERENCES + db_dbnode(id) DEFERRABLE INITIALLY DEFERRED + db_dbauthinfo: + db_dbauthinfo_aiidauser_id_0684fdfb_fk_db_dbuser_id: FOREIGN KEY (aiidauser_id) + REFERENCES db_dbuser(id) DEFERRABLE INITIALLY DEFERRED + db_dbauthinfo_dbcomputer_id_424f7ac4_fk_db_dbcomputer_id: FOREIGN KEY (dbcomputer_id) + REFERENCES db_dbcomputer(id) DEFERRABLE INITIALLY DEFERRED + db_dbcalcstate: + db_dbcalcstate_dbnode_id_f217a84c_fk_db_dbnode_id: FOREIGN KEY (dbnode_id) REFERENCES + db_dbnode(id) DEFERRABLE INITIALLY DEFERRED + db_dbcomment: + db_dbcomment_dbnode_id_3b812b6b_fk_db_dbnode_id: FOREIGN KEY (dbnode_id) REFERENCES + db_dbnode(id) DEFERRABLE INITIALLY DEFERRED + db_dbcomment_user_id_8ed5e360_fk_db_dbuser_id: FOREIGN KEY (user_id) REFERENCES + db_dbuser(id) DEFERRABLE INITIALLY DEFERRED + db_dbextra: + db_dbextra_dbnode_id_c7fe8961_fk_db_dbnode_id: FOREIGN KEY (dbnode_id) REFERENCES + db_dbnode(id) DEFERRABLE INITIALLY DEFERRED + db_dbgroup: + db_dbgroup_user_id_100f8a51_fk_db_dbuser_id: FOREIGN KEY (user_id) REFERENCES + db_dbuser(id) DEFERRABLE INITIALLY DEFERRED + db_dbgroup_dbnodes: + db_dbgroup_dbnodes_dbgroup_id_9d3a0f9d_fk_db_dbgroup_id: FOREIGN KEY (dbgroup_id) + REFERENCES db_dbgroup(id) DEFERRABLE INITIALLY DEFERRED + db_dbgroup_dbnodes_dbnode_id_118b9439_fk_db_dbnode_id: FOREIGN KEY (dbnode_id) + REFERENCES db_dbnode(id) DEFERRABLE INITIALLY DEFERRED + db_dblink: + db_dblink_input_id_9245bd73_fk_db_dbnode_id: FOREIGN KEY (input_id) REFERENCES + db_dbnode(id) DEFERRABLE INITIALLY DEFERRED + db_dblink_output_id_c0167528_fk_db_dbnode_id: FOREIGN KEY (output_id) REFERENCES + db_dbnode(id) DEFERRABLE INITIALLY DEFERRED + db_dbnode: + db_dbnode_dbcomputer_id_315372a3_fk_db_dbcomputer_id: FOREIGN KEY (dbcomputer_id) + REFERENCES db_dbcomputer(id) DEFERRABLE INITIALLY DEFERRED + db_dbnode_user_id_12e7aeaf_fk_db_dbuser_id: FOREIGN KEY (user_id) REFERENCES db_dbuser(id) + DEFERRABLE INITIALLY DEFERRED + db_dbuser_groups: + db_dbuser_groups_dbuser_id_480b3520_fk_db_dbuser_id: FOREIGN KEY (dbuser_id) REFERENCES + db_dbuser(id) DEFERRABLE INITIALLY DEFERRED + db_dbuser_groups_group_id_8478d87e_fk_auth_group_id: FOREIGN KEY (group_id) REFERENCES + auth_group(id) DEFERRABLE INITIALLY DEFERRED + db_dbuser_user_permissions: + db_dbuser_user_permi_permission_id_c5aafc54_fk_auth_perm: FOREIGN KEY (permission_id) + REFERENCES auth_permission(id) DEFERRABLE INITIALLY DEFERRED + db_dbuser_user_permissions_dbuser_id_364456ee_fk_db_dbuser_id: FOREIGN KEY (dbuser_id) + REFERENCES db_dbuser(id) DEFERRABLE INITIALLY DEFERRED + db_dbworkflow: + db_dbworkflow_user_id_ef1f3251_fk_db_dbuser_id: FOREIGN KEY (user_id) REFERENCES + db_dbuser(id) DEFERRABLE INITIALLY DEFERRED + db_dbworkflowdata: + db_dbworkflowdata_aiida_obj_id_70a2d33b_fk_db_dbnode_id: FOREIGN KEY (aiida_obj_id) + REFERENCES db_dbnode(id) DEFERRABLE INITIALLY DEFERRED + db_dbworkflowdata_parent_id_ff4dbf8d_fk_db_dbworkflow_id: FOREIGN KEY (parent_id) + REFERENCES db_dbworkflow(id) DEFERRABLE INITIALLY DEFERRED + db_dbworkflowstep: + db_dbworkflowstep_parent_id_ffb754d9_fk_db_dbworkflow_id: FOREIGN KEY (parent_id) + REFERENCES db_dbworkflow(id) DEFERRABLE INITIALLY DEFERRED + db_dbworkflowstep_user_id_04282431_fk_db_dbuser_id: FOREIGN KEY (user_id) REFERENCES + db_dbuser(id) DEFERRABLE INITIALLY DEFERRED + db_dbworkflowstep_calculations: + db_dbworkflowstep_ca_dbnode_id_0d07b7a7_fk_db_dbnode: FOREIGN KEY (dbnode_id) + REFERENCES db_dbnode(id) DEFERRABLE INITIALLY DEFERRED + db_dbworkflowstep_ca_dbworkflowstep_id_575c3637_fk_db_dbwork: FOREIGN KEY (dbworkflowstep_id) + REFERENCES db_dbworkflowstep(id) DEFERRABLE INITIALLY DEFERRED + db_dbworkflowstep_sub_workflows: + db_dbworkflowstep_su_dbworkflow_id_dca4d103_fk_db_dbwork: FOREIGN KEY (dbworkflow_id) + REFERENCES db_dbworkflow(id) DEFERRABLE INITIALLY DEFERRED + db_dbworkflowstep_su_dbworkflowstep_id_e183bbb7_fk_db_dbwork: FOREIGN KEY (dbworkflowstep_id) + REFERENCES db_dbworkflowstep(id) DEFERRABLE INITIALLY DEFERRED +indexes: + db_dbattribute: + db_dbattribute_datatype_91c4dc04: CREATE INDEX db_dbattribute_datatype_91c4dc04 + ON public.db_dbattribute USING btree (datatype) + db_dbattribute_datatype_91c4dc04_like: CREATE INDEX db_dbattribute_datatype_91c4dc04_like + ON public.db_dbattribute USING btree (datatype varchar_pattern_ops) + db_dbattribute_dbnode_id_253bf153: CREATE INDEX db_dbattribute_dbnode_id_253bf153 + ON public.db_dbattribute USING btree (dbnode_id) + db_dbattribute_dbnode_id_key_c589e447_uniq: CREATE UNIQUE INDEX db_dbattribute_dbnode_id_key_c589e447_uniq + ON public.db_dbattribute USING btree (dbnode_id, key) + db_dbattribute_key_ac2bc4e4: CREATE INDEX db_dbattribute_key_ac2bc4e4 ON public.db_dbattribute + USING btree (key) + db_dbattribute_key_ac2bc4e4_like: CREATE INDEX db_dbattribute_key_ac2bc4e4_like + ON public.db_dbattribute USING btree (key varchar_pattern_ops) + db_dbattribute_pkey: CREATE UNIQUE INDEX db_dbattribute_pkey ON public.db_dbattribute + USING btree (id) + tval_idx_for_daemon: CREATE INDEX tval_idx_for_daemon ON public.db_dbattribute + USING btree (tval) WHERE (tval = ANY (ARRAY['COMPUTED'::text, 'WITHSCHEDULER'::text, + 'TOSUBMIT'::text])) + db_dbauthinfo: + db_dbauthinfo_aiidauser_id_0684fdfb: CREATE INDEX db_dbauthinfo_aiidauser_id_0684fdfb + ON public.db_dbauthinfo USING btree (aiidauser_id) + db_dbauthinfo_aiidauser_id_dbcomputer_id_777cdaa8_uniq: CREATE UNIQUE INDEX db_dbauthinfo_aiidauser_id_dbcomputer_id_777cdaa8_uniq + ON public.db_dbauthinfo USING btree (aiidauser_id, dbcomputer_id) + db_dbauthinfo_dbcomputer_id_424f7ac4: CREATE INDEX db_dbauthinfo_dbcomputer_id_424f7ac4 + ON public.db_dbauthinfo USING btree (dbcomputer_id) + db_dbauthinfo_pkey: CREATE UNIQUE INDEX db_dbauthinfo_pkey ON public.db_dbauthinfo + USING btree (id) + db_dbcalcstate: + db_dbcalcstate_dbnode_id_f217a84c: CREATE INDEX db_dbcalcstate_dbnode_id_f217a84c + ON public.db_dbcalcstate USING btree (dbnode_id) + db_dbcalcstate_dbnode_id_state_b4a14db3_uniq: CREATE UNIQUE INDEX db_dbcalcstate_dbnode_id_state_b4a14db3_uniq + ON public.db_dbcalcstate USING btree (dbnode_id, state) + db_dbcalcstate_pkey: CREATE UNIQUE INDEX db_dbcalcstate_pkey ON public.db_dbcalcstate + USING btree (id) + db_dbcalcstate_state_0bf54584: CREATE INDEX db_dbcalcstate_state_0bf54584 ON public.db_dbcalcstate + USING btree (state) + db_dbcalcstate_state_0bf54584_like: CREATE INDEX db_dbcalcstate_state_0bf54584_like + ON public.db_dbcalcstate USING btree (state varchar_pattern_ops) + db_dbcomment: + db_dbcomment_dbnode_id_3b812b6b: CREATE INDEX db_dbcomment_dbnode_id_3b812b6b + ON public.db_dbcomment USING btree (dbnode_id) + db_dbcomment_pkey: CREATE UNIQUE INDEX db_dbcomment_pkey ON public.db_dbcomment + USING btree (id) + db_dbcomment_user_id_8ed5e360: CREATE INDEX db_dbcomment_user_id_8ed5e360 ON public.db_dbcomment + USING btree (user_id) + db_dbcomputer: + db_dbcomputer_name_f1800b1a_like: CREATE INDEX db_dbcomputer_name_f1800b1a_like + ON public.db_dbcomputer USING btree (name varchar_pattern_ops) + db_dbcomputer_name_key: CREATE UNIQUE INDEX db_dbcomputer_name_key ON public.db_dbcomputer + USING btree (name) + db_dbcomputer_pkey: CREATE UNIQUE INDEX db_dbcomputer_pkey ON public.db_dbcomputer + USING btree (id) + db_dbextra: + db_dbextra_datatype_2eba38c6: CREATE INDEX db_dbextra_datatype_2eba38c6 ON public.db_dbextra + USING btree (datatype) + db_dbextra_datatype_2eba38c6_like: CREATE INDEX db_dbextra_datatype_2eba38c6_like + ON public.db_dbextra USING btree (datatype varchar_pattern_ops) + db_dbextra_dbnode_id_c7fe8961: CREATE INDEX db_dbextra_dbnode_id_c7fe8961 ON public.db_dbextra + USING btree (dbnode_id) + db_dbextra_dbnode_id_key_aa56fd37_uniq: CREATE UNIQUE INDEX db_dbextra_dbnode_id_key_aa56fd37_uniq + ON public.db_dbextra USING btree (dbnode_id, key) + db_dbextra_key_b1a8abc6: CREATE INDEX db_dbextra_key_b1a8abc6 ON public.db_dbextra + USING btree (key) + db_dbextra_key_b1a8abc6_like: CREATE INDEX db_dbextra_key_b1a8abc6_like ON public.db_dbextra + USING btree (key varchar_pattern_ops) + db_dbextra_pkey: CREATE UNIQUE INDEX db_dbextra_pkey ON public.db_dbextra USING + btree (id) + db_dbgroup: + db_dbgroup_name_66c75272: CREATE INDEX db_dbgroup_name_66c75272 ON public.db_dbgroup + USING btree (name) + db_dbgroup_name_66c75272_like: CREATE INDEX db_dbgroup_name_66c75272_like ON public.db_dbgroup + USING btree (name varchar_pattern_ops) + db_dbgroup_name_type_12656f33_uniq: CREATE UNIQUE INDEX db_dbgroup_name_type_12656f33_uniq + ON public.db_dbgroup USING btree (name, type) + db_dbgroup_pkey: CREATE UNIQUE INDEX db_dbgroup_pkey ON public.db_dbgroup USING + btree (id) + db_dbgroup_type_23b2a748: CREATE INDEX db_dbgroup_type_23b2a748 ON public.db_dbgroup + USING btree (type) + db_dbgroup_type_23b2a748_like: CREATE INDEX db_dbgroup_type_23b2a748_like ON public.db_dbgroup + USING btree (type varchar_pattern_ops) + db_dbgroup_user_id_100f8a51: CREATE INDEX db_dbgroup_user_id_100f8a51 ON public.db_dbgroup + USING btree (user_id) + db_dbgroup_dbnodes: + db_dbgroup_dbnodes_dbgroup_id_9d3a0f9d: CREATE INDEX db_dbgroup_dbnodes_dbgroup_id_9d3a0f9d + ON public.db_dbgroup_dbnodes USING btree (dbgroup_id) + db_dbgroup_dbnodes_dbgroup_id_dbnode_id_eee23cce_uniq: CREATE UNIQUE INDEX db_dbgroup_dbnodes_dbgroup_id_dbnode_id_eee23cce_uniq + ON public.db_dbgroup_dbnodes USING btree (dbgroup_id, dbnode_id) + db_dbgroup_dbnodes_dbnode_id_118b9439: CREATE INDEX db_dbgroup_dbnodes_dbnode_id_118b9439 + ON public.db_dbgroup_dbnodes USING btree (dbnode_id) + db_dbgroup_dbnodes_pkey: CREATE UNIQUE INDEX db_dbgroup_dbnodes_pkey ON public.db_dbgroup_dbnodes + USING btree (id) + db_dblink: + db_dblink_input_id_9245bd73: CREATE INDEX db_dblink_input_id_9245bd73 ON public.db_dblink + USING btree (input_id) + db_dblink_label_f1343cfb: CREATE INDEX db_dblink_label_f1343cfb ON public.db_dblink + USING btree (label) + db_dblink_label_f1343cfb_like: CREATE INDEX db_dblink_label_f1343cfb_like ON public.db_dblink + USING btree (label varchar_pattern_ops) + db_dblink_output_id_c0167528: CREATE INDEX db_dblink_output_id_c0167528 ON public.db_dblink + USING btree (output_id) + db_dblink_pkey: CREATE UNIQUE INDEX db_dblink_pkey ON public.db_dblink USING btree + (id) + db_dblink_type_229f212b: CREATE INDEX db_dblink_type_229f212b ON public.db_dblink + USING btree (type) + db_dblink_type_229f212b_like: CREATE INDEX db_dblink_type_229f212b_like ON public.db_dblink + USING btree (type varchar_pattern_ops) + db_dblog: + db_dblog_levelname_ad5dc346: CREATE INDEX db_dblog_levelname_ad5dc346 ON public.db_dblog + USING btree (levelname) + db_dblog_levelname_ad5dc346_like: CREATE INDEX db_dblog_levelname_ad5dc346_like + ON public.db_dblog USING btree (levelname varchar_pattern_ops) + db_dblog_loggername_00b5ba16: CREATE INDEX db_dblog_loggername_00b5ba16 ON public.db_dblog + USING btree (loggername) + db_dblog_loggername_00b5ba16_like: CREATE INDEX db_dblog_loggername_00b5ba16_like + ON public.db_dblog USING btree (loggername varchar_pattern_ops) + db_dblog_objname_69932b1e: CREATE INDEX db_dblog_objname_69932b1e ON public.db_dblog + USING btree (objname) + db_dblog_objname_69932b1e_like: CREATE INDEX db_dblog_objname_69932b1e_like ON + public.db_dblog USING btree (objname varchar_pattern_ops) + db_dblog_objpk_fc47afa9: CREATE INDEX db_dblog_objpk_fc47afa9 ON public.db_dblog + USING btree (objpk) + db_dblog_pkey: CREATE UNIQUE INDEX db_dblog_pkey ON public.db_dblog USING btree + (id) + db_dbnode: + db_dbnode_ctime_71626ef5: CREATE INDEX db_dbnode_ctime_71626ef5 ON public.db_dbnode + USING btree (ctime) + db_dbnode_dbcomputer_id_315372a3: CREATE INDEX db_dbnode_dbcomputer_id_315372a3 + ON public.db_dbnode USING btree (dbcomputer_id) + db_dbnode_label_6469539e: CREATE INDEX db_dbnode_label_6469539e ON public.db_dbnode + USING btree (label) + db_dbnode_label_6469539e_like: CREATE INDEX db_dbnode_label_6469539e_like ON public.db_dbnode + USING btree (label varchar_pattern_ops) + db_dbnode_mtime_0554ea3d: CREATE INDEX db_dbnode_mtime_0554ea3d ON public.db_dbnode + USING btree (mtime) + db_dbnode_pkey: CREATE UNIQUE INDEX db_dbnode_pkey ON public.db_dbnode USING btree + (id) + db_dbnode_process_type_df7298d0: CREATE INDEX db_dbnode_process_type_df7298d0 + ON public.db_dbnode USING btree (process_type) + db_dbnode_process_type_df7298d0_like: CREATE INDEX db_dbnode_process_type_df7298d0_like + ON public.db_dbnode USING btree (process_type varchar_pattern_ops) + db_dbnode_type_a8ce9753: CREATE INDEX db_dbnode_type_a8ce9753 ON public.db_dbnode + USING btree (type) + db_dbnode_type_a8ce9753_like: CREATE INDEX db_dbnode_type_a8ce9753_like ON public.db_dbnode + USING btree (type varchar_pattern_ops) + db_dbnode_user_id_12e7aeaf: CREATE INDEX db_dbnode_user_id_12e7aeaf ON public.db_dbnode + USING btree (user_id) + db_dbnode_uuid_62e0bf98_like: CREATE INDEX db_dbnode_uuid_62e0bf98_like ON public.db_dbnode + USING btree (uuid varchar_pattern_ops) + db_dbnode_uuid_62e0bf98_uniq: CREATE UNIQUE INDEX db_dbnode_uuid_62e0bf98_uniq + ON public.db_dbnode USING btree (uuid) + db_dbsetting: + db_dbsetting_datatype_49f4397c: CREATE INDEX db_dbsetting_datatype_49f4397c ON + public.db_dbsetting USING btree (datatype) + db_dbsetting_datatype_49f4397c_like: CREATE INDEX db_dbsetting_datatype_49f4397c_like + ON public.db_dbsetting USING btree (datatype varchar_pattern_ops) + db_dbsetting_key_1b84beb4: CREATE INDEX db_dbsetting_key_1b84beb4 ON public.db_dbsetting + USING btree (key) + db_dbsetting_key_1b84beb4_like: CREATE INDEX db_dbsetting_key_1b84beb4_like ON + public.db_dbsetting USING btree (key varchar_pattern_ops) + db_dbsetting_key_1b84beb4_uniq: CREATE UNIQUE INDEX db_dbsetting_key_1b84beb4_uniq + ON public.db_dbsetting USING btree (key) + db_dbsetting_pkey: CREATE UNIQUE INDEX db_dbsetting_pkey ON public.db_dbsetting + USING btree (id) + db_dbuser: + db_dbuser_pkey: CREATE UNIQUE INDEX db_dbuser_pkey ON public.db_dbuser USING btree + (id) + db_dbuser_groups: + db_dbuser_groups_dbuser_id_480b3520: CREATE INDEX db_dbuser_groups_dbuser_id_480b3520 + ON public.db_dbuser_groups USING btree (dbuser_id) + db_dbuser_groups_dbuser_id_group_id_9155eb4f_uniq: CREATE UNIQUE INDEX db_dbuser_groups_dbuser_id_group_id_9155eb4f_uniq + ON public.db_dbuser_groups USING btree (dbuser_id, group_id) + db_dbuser_groups_group_id_8478d87e: CREATE INDEX db_dbuser_groups_group_id_8478d87e + ON public.db_dbuser_groups USING btree (group_id) + db_dbuser_groups_pkey: CREATE UNIQUE INDEX db_dbuser_groups_pkey ON public.db_dbuser_groups + USING btree (id) + db_dbuser_user_permissions: + db_dbuser_user_permissio_dbuser_id_permission_id_e6cbabe4_uniq: CREATE UNIQUE + INDEX db_dbuser_user_permissio_dbuser_id_permission_id_e6cbabe4_uniq ON public.db_dbuser_user_permissions + USING btree (dbuser_id, permission_id) + db_dbuser_user_permissions_dbuser_id_364456ee: CREATE INDEX db_dbuser_user_permissions_dbuser_id_364456ee + ON public.db_dbuser_user_permissions USING btree (dbuser_id) + db_dbuser_user_permissions_permission_id_c5aafc54: CREATE INDEX db_dbuser_user_permissions_permission_id_c5aafc54 + ON public.db_dbuser_user_permissions USING btree (permission_id) + db_dbuser_user_permissions_pkey: CREATE UNIQUE INDEX db_dbuser_user_permissions_pkey + ON public.db_dbuser_user_permissions USING btree (id) + db_dbworkflow: + db_dbworkflow_label_7368f34a: CREATE INDEX db_dbworkflow_label_7368f34a ON public.db_dbworkflow + USING btree (label) + db_dbworkflow_label_7368f34a_like: CREATE INDEX db_dbworkflow_label_7368f34a_like + ON public.db_dbworkflow USING btree (label varchar_pattern_ops) + db_dbworkflow_pkey: CREATE UNIQUE INDEX db_dbworkflow_pkey ON public.db_dbworkflow + USING btree (id) + db_dbworkflow_user_id_ef1f3251: CREATE INDEX db_dbworkflow_user_id_ef1f3251 ON + public.db_dbworkflow USING btree (user_id) + db_dbworkflowdata: + db_dbworkflowdata_aiida_obj_id_70a2d33b: CREATE INDEX db_dbworkflowdata_aiida_obj_id_70a2d33b + ON public.db_dbworkflowdata USING btree (aiida_obj_id) + db_dbworkflowdata_parent_id_ff4dbf8d: CREATE INDEX db_dbworkflowdata_parent_id_ff4dbf8d + ON public.db_dbworkflowdata USING btree (parent_id) + db_dbworkflowdata_parent_id_name_data_type_a4b50dae_uniq: CREATE UNIQUE INDEX + db_dbworkflowdata_parent_id_name_data_type_a4b50dae_uniq ON public.db_dbworkflowdata + USING btree (parent_id, name, data_type) + db_dbworkflowdata_pkey: CREATE UNIQUE INDEX db_dbworkflowdata_pkey ON public.db_dbworkflowdata + USING btree (id) + db_dbworkflowstep: + db_dbworkflowstep_parent_id_ffb754d9: CREATE INDEX db_dbworkflowstep_parent_id_ffb754d9 + ON public.db_dbworkflowstep USING btree (parent_id) + db_dbworkflowstep_parent_id_name_111027e3_uniq: CREATE UNIQUE INDEX db_dbworkflowstep_parent_id_name_111027e3_uniq + ON public.db_dbworkflowstep USING btree (parent_id, name) + db_dbworkflowstep_pkey: CREATE UNIQUE INDEX db_dbworkflowstep_pkey ON public.db_dbworkflowstep + USING btree (id) + db_dbworkflowstep_user_id_04282431: CREATE INDEX db_dbworkflowstep_user_id_04282431 + ON public.db_dbworkflowstep USING btree (user_id) + db_dbworkflowstep_calculations: + db_dbworkflowstep_calcul_dbworkflowstep_id_dbnode_60f50d02_uniq: CREATE UNIQUE + INDEX db_dbworkflowstep_calcul_dbworkflowstep_id_dbnode_60f50d02_uniq ON public.db_dbworkflowstep_calculations + USING btree (dbworkflowstep_id, dbnode_id) + db_dbworkflowstep_calculations_dbnode_id_0d07b7a7: CREATE INDEX db_dbworkflowstep_calculations_dbnode_id_0d07b7a7 + ON public.db_dbworkflowstep_calculations USING btree (dbnode_id) + db_dbworkflowstep_calculations_dbworkflowstep_id_575c3637: CREATE INDEX db_dbworkflowstep_calculations_dbworkflowstep_id_575c3637 + ON public.db_dbworkflowstep_calculations USING btree (dbworkflowstep_id) + db_dbworkflowstep_calculations_pkey: CREATE UNIQUE INDEX db_dbworkflowstep_calculations_pkey + ON public.db_dbworkflowstep_calculations USING btree (id) + db_dbworkflowstep_sub_workflows: + db_dbworkflowstep_sub_wo_dbworkflowstep_id_dbwork_e9b2b624_uniq: CREATE UNIQUE + INDEX db_dbworkflowstep_sub_wo_dbworkflowstep_id_dbwork_e9b2b624_uniq ON public.db_dbworkflowstep_sub_workflows + USING btree (dbworkflowstep_id, dbworkflow_id) + db_dbworkflowstep_sub_workflows_dbworkflow_id_dca4d103: CREATE INDEX db_dbworkflowstep_sub_workflows_dbworkflow_id_dca4d103 + ON public.db_dbworkflowstep_sub_workflows USING btree (dbworkflow_id) + db_dbworkflowstep_sub_workflows_dbworkflowstep_id_e183bbb7: CREATE INDEX db_dbworkflowstep_sub_workflows_dbworkflowstep_id_e183bbb7 + ON public.db_dbworkflowstep_sub_workflows USING btree (dbworkflowstep_id) + db_dbworkflowstep_sub_workflows_pkey: CREATE UNIQUE INDEX db_dbworkflowstep_sub_workflows_pkey + ON public.db_dbworkflowstep_sub_workflows USING btree (id) diff --git a/tests/backends/aiida_sqlalchemy/migrations/test_all_schema/test_django_django_0017_.yml b/tests/backends/aiida_sqlalchemy/migrations/test_all_schema/test_django_django_0017_.yml new file mode 100644 index 0000000000..cb41ab47ed --- /dev/null +++ b/tests/backends/aiida_sqlalchemy/migrations/test_all_schema/test_django_django_0017_.yml @@ -0,0 +1,1020 @@ +columns: + db_dbattribute: + bval: + data_type: boolean + default: null + is_nullable: true + datatype: + data_type: character varying + default: null + is_nullable: false + max_length: 10 + dbnode_id: + data_type: integer + default: null + is_nullable: false + dval: + data_type: timestamp with time zone + default: null + is_nullable: true + fval: + data_type: double precision + default: null + is_nullable: true + id: + data_type: integer + default: nextval('db_dbattribute_id_seq'::regclass) + is_nullable: false + ival: + data_type: integer + default: null + is_nullable: true + key: + data_type: character varying + default: null + is_nullable: false + max_length: 1024 + tval: + data_type: text + default: null + is_nullable: false + db_dbauthinfo: + aiidauser_id: + data_type: integer + default: null + is_nullable: false + auth_params: + data_type: text + default: null + is_nullable: false + dbcomputer_id: + data_type: integer + default: null + is_nullable: false + enabled: + data_type: boolean + default: null + is_nullable: false + id: + data_type: integer + default: nextval('db_dbauthinfo_id_seq'::regclass) + is_nullable: false + metadata: + data_type: text + default: null + is_nullable: false + db_dbcomment: + content: + data_type: text + default: null + is_nullable: false + ctime: + data_type: timestamp with time zone + default: null + is_nullable: false + dbnode_id: + data_type: integer + default: null + is_nullable: false + id: + data_type: integer + default: nextval('db_dbcomment_id_seq'::regclass) + is_nullable: false + mtime: + data_type: timestamp with time zone + default: null + is_nullable: false + user_id: + data_type: integer + default: null + is_nullable: false + uuid: + data_type: character varying + default: null + is_nullable: false + max_length: 36 + db_dbcomputer: + description: + data_type: text + default: null + is_nullable: false + enabled: + data_type: boolean + default: null + is_nullable: false + hostname: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + id: + data_type: integer + default: nextval('db_dbcomputer_id_seq'::regclass) + is_nullable: false + metadata: + data_type: text + default: null + is_nullable: false + name: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + scheduler_type: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + transport_params: + data_type: text + default: null + is_nullable: false + transport_type: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + uuid: + data_type: character varying + default: null + is_nullable: false + max_length: 36 + db_dbextra: + bval: + data_type: boolean + default: null + is_nullable: true + datatype: + data_type: character varying + default: null + is_nullable: false + max_length: 10 + dbnode_id: + data_type: integer + default: null + is_nullable: false + dval: + data_type: timestamp with time zone + default: null + is_nullable: true + fval: + data_type: double precision + default: null + is_nullable: true + id: + data_type: integer + default: nextval('db_dbextra_id_seq'::regclass) + is_nullable: false + ival: + data_type: integer + default: null + is_nullable: true + key: + data_type: character varying + default: null + is_nullable: false + max_length: 1024 + tval: + data_type: text + default: null + is_nullable: false + db_dbgroup: + description: + data_type: text + default: null + is_nullable: false + id: + data_type: integer + default: nextval('db_dbgroup_id_seq'::regclass) + is_nullable: false + name: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + time: + data_type: timestamp with time zone + default: null + is_nullable: false + type: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + user_id: + data_type: integer + default: null + is_nullable: false + uuid: + data_type: character varying + default: null + is_nullable: false + max_length: 36 + db_dbgroup_dbnodes: + dbgroup_id: + data_type: integer + default: null + is_nullable: false + dbnode_id: + data_type: integer + default: null + is_nullable: false + id: + data_type: integer + default: nextval('db_dbgroup_dbnodes_id_seq'::regclass) + is_nullable: false + db_dblink: + id: + data_type: integer + default: nextval('db_dblink_id_seq'::regclass) + is_nullable: false + input_id: + data_type: integer + default: null + is_nullable: false + label: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + output_id: + data_type: integer + default: null + is_nullable: false + type: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + db_dblog: + id: + data_type: integer + default: nextval('db_dblog_id_seq'::regclass) + is_nullable: false + levelname: + data_type: character varying + default: null + is_nullable: false + max_length: 50 + loggername: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + message: + data_type: text + default: null + is_nullable: false + metadata: + data_type: text + default: null + is_nullable: false + objname: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + objpk: + data_type: integer + default: null + is_nullable: true + time: + data_type: timestamp with time zone + default: null + is_nullable: false + db_dbnode: + ctime: + data_type: timestamp with time zone + default: null + is_nullable: false + dbcomputer_id: + data_type: integer + default: null + is_nullable: true + description: + data_type: text + default: null + is_nullable: false + id: + data_type: integer + default: nextval('db_dbnode_id_seq'::regclass) + is_nullable: false + label: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + mtime: + data_type: timestamp with time zone + default: null + is_nullable: false + nodeversion: + data_type: integer + default: null + is_nullable: false + process_type: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + public: + data_type: boolean + default: null + is_nullable: false + type: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + user_id: + data_type: integer + default: null + is_nullable: false + uuid: + data_type: character varying + default: null + is_nullable: false + max_length: 36 + db_dbsetting: + bval: + data_type: boolean + default: null + is_nullable: true + datatype: + data_type: character varying + default: null + is_nullable: false + max_length: 10 + description: + data_type: text + default: null + is_nullable: false + dval: + data_type: timestamp with time zone + default: null + is_nullable: true + fval: + data_type: double precision + default: null + is_nullable: true + id: + data_type: integer + default: nextval('db_dbsetting_id_seq'::regclass) + is_nullable: false + ival: + data_type: integer + default: null + is_nullable: true + key: + data_type: character varying + default: null + is_nullable: false + max_length: 1024 + time: + data_type: timestamp with time zone + default: null + is_nullable: false + tval: + data_type: text + default: null + is_nullable: false + db_dbuser: + date_joined: + data_type: timestamp with time zone + default: null + is_nullable: false + email: + data_type: character varying + default: null + is_nullable: false + max_length: 254 + first_name: + data_type: character varying + default: null + is_nullable: false + max_length: 254 + id: + data_type: integer + default: nextval('db_dbuser_id_seq'::regclass) + is_nullable: false + institution: + data_type: character varying + default: null + is_nullable: false + max_length: 254 + is_active: + data_type: boolean + default: null + is_nullable: false + is_staff: + data_type: boolean + default: null + is_nullable: false + is_superuser: + data_type: boolean + default: null + is_nullable: false + last_login: + data_type: timestamp with time zone + default: null + is_nullable: true + last_name: + data_type: character varying + default: null + is_nullable: false + max_length: 254 + password: + data_type: character varying + default: null + is_nullable: false + max_length: 128 + db_dbuser_groups: + dbuser_id: + data_type: integer + default: null + is_nullable: false + group_id: + data_type: integer + default: null + is_nullable: false + id: + data_type: integer + default: nextval('db_dbuser_groups_id_seq'::regclass) + is_nullable: false + db_dbuser_user_permissions: + dbuser_id: + data_type: integer + default: null + is_nullable: false + id: + data_type: integer + default: nextval('db_dbuser_user_permissions_id_seq'::regclass) + is_nullable: false + permission_id: + data_type: integer + default: null + is_nullable: false + db_dbworkflow: + ctime: + data_type: timestamp with time zone + default: null + is_nullable: false + description: + data_type: text + default: null + is_nullable: false + id: + data_type: integer + default: nextval('db_dbworkflow_id_seq'::regclass) + is_nullable: false + label: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + lastsyncedversion: + data_type: integer + default: null + is_nullable: false + module: + data_type: text + default: null + is_nullable: false + module_class: + data_type: text + default: null + is_nullable: false + mtime: + data_type: timestamp with time zone + default: null + is_nullable: false + nodeversion: + data_type: integer + default: null + is_nullable: false + report: + data_type: text + default: null + is_nullable: false + script_md5: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + script_path: + data_type: text + default: null + is_nullable: false + state: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + user_id: + data_type: integer + default: null + is_nullable: false + uuid: + data_type: character varying + default: null + is_nullable: false + max_length: 36 + db_dbworkflowdata: + aiida_obj_id: + data_type: integer + default: null + is_nullable: true + data_type: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + id: + data_type: integer + default: nextval('db_dbworkflowdata_id_seq'::regclass) + is_nullable: false + json_value: + data_type: text + default: null + is_nullable: false + name: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + parent_id: + data_type: integer + default: null + is_nullable: false + time: + data_type: timestamp with time zone + default: null + is_nullable: false + value_type: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + db_dbworkflowstep: + id: + data_type: integer + default: nextval('db_dbworkflowstep_id_seq'::regclass) + is_nullable: false + name: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + nextcall: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + parent_id: + data_type: integer + default: null + is_nullable: false + state: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + time: + data_type: timestamp with time zone + default: null + is_nullable: false + user_id: + data_type: integer + default: null + is_nullable: false + db_dbworkflowstep_calculations: + dbnode_id: + data_type: integer + default: null + is_nullable: false + dbworkflowstep_id: + data_type: integer + default: null + is_nullable: false + id: + data_type: integer + default: nextval('db_dbworkflowstep_calculations_id_seq'::regclass) + is_nullable: false + db_dbworkflowstep_sub_workflows: + dbworkflow_id: + data_type: integer + default: null + is_nullable: false + dbworkflowstep_id: + data_type: integer + default: null + is_nullable: false + id: + data_type: integer + default: nextval('db_dbworkflowstep_sub_workflows_id_seq'::regclass) + is_nullable: false +constraints: + primary_key: + db_dbattribute: + db_dbattribute_pkey: + - id + db_dbauthinfo: + db_dbauthinfo_pkey: + - id + db_dbcomment: + db_dbcomment_pkey: + - id + db_dbcomputer: + db_dbcomputer_pkey: + - id + db_dbextra: + db_dbextra_pkey: + - id + db_dbgroup: + db_dbgroup_pkey: + - id + db_dbgroup_dbnodes: + db_dbgroup_dbnodes_pkey: + - id + db_dblink: + db_dblink_pkey: + - id + db_dblog: + db_dblog_pkey: + - id + db_dbnode: + db_dbnode_pkey: + - id + db_dbsetting: + db_dbsetting_pkey: + - id + db_dbuser: + db_dbuser_pkey: + - id + db_dbuser_groups: + db_dbuser_groups_pkey: + - id + db_dbuser_user_permissions: + db_dbuser_user_permissions_pkey: + - id + db_dbworkflow: + db_dbworkflow_pkey: + - id + db_dbworkflowdata: + db_dbworkflowdata_pkey: + - id + db_dbworkflowstep: + db_dbworkflowstep_pkey: + - id + db_dbworkflowstep_calculations: + db_dbworkflowstep_calculations_pkey: + - id + db_dbworkflowstep_sub_workflows: + db_dbworkflowstep_sub_workflows_pkey: + - id + unique: + db_dbattribute: + db_dbattribute_dbnode_id_key_c589e447_uniq: + - dbnode_id + - key + db_dbauthinfo: + db_dbauthinfo_aiidauser_id_dbcomputer_id_777cdaa8_uniq: + - aiidauser_id + - dbcomputer_id + db_dbcomputer: + db_dbcomputer_name_key: + - name + db_dbextra: + db_dbextra_dbnode_id_key_aa56fd37_uniq: + - dbnode_id + - key + db_dbgroup: + db_dbgroup_name_type_12656f33_uniq: + - name + - type + db_dbgroup_dbnodes: + db_dbgroup_dbnodes_dbgroup_id_dbnode_id_eee23cce_uniq: + - dbgroup_id + - dbnode_id + db_dbnode: + db_dbnode_uuid_62e0bf98_uniq: + - uuid + db_dbsetting: + db_dbsetting_key_1b84beb4_uniq: + - key + db_dbuser_groups: + db_dbuser_groups_dbuser_id_group_id_9155eb4f_uniq: + - dbuser_id + - group_id + db_dbuser_user_permissions: + db_dbuser_user_permissio_dbuser_id_permission_id_e6cbabe4_uniq: + - dbuser_id + - permission_id + db_dbworkflowdata: + db_dbworkflowdata_parent_id_name_data_type_a4b50dae_uniq: + - data_type + - name + - parent_id + db_dbworkflowstep: + db_dbworkflowstep_parent_id_name_111027e3_uniq: + - name + - parent_id + db_dbworkflowstep_calculations: + db_dbworkflowstep_calcul_dbworkflowstep_id_dbnode_60f50d02_uniq: + - dbnode_id + - dbworkflowstep_id + db_dbworkflowstep_sub_workflows: + db_dbworkflowstep_sub_wo_dbworkflowstep_id_dbwork_e9b2b624_uniq: + - dbworkflow_id + - dbworkflowstep_id +foreign_keys: + db_dbattribute: + db_dbattribute_dbnode_id_253bf153_fk_db_dbnode_id: FOREIGN KEY (dbnode_id) REFERENCES + db_dbnode(id) DEFERRABLE INITIALLY DEFERRED + db_dbauthinfo: + db_dbauthinfo_aiidauser_id_0684fdfb_fk_db_dbuser_id: FOREIGN KEY (aiidauser_id) + REFERENCES db_dbuser(id) DEFERRABLE INITIALLY DEFERRED + db_dbauthinfo_dbcomputer_id_424f7ac4_fk_db_dbcomputer_id: FOREIGN KEY (dbcomputer_id) + REFERENCES db_dbcomputer(id) DEFERRABLE INITIALLY DEFERRED + db_dbcomment: + db_dbcomment_dbnode_id_3b812b6b_fk_db_dbnode_id: FOREIGN KEY (dbnode_id) REFERENCES + db_dbnode(id) DEFERRABLE INITIALLY DEFERRED + db_dbcomment_user_id_8ed5e360_fk_db_dbuser_id: FOREIGN KEY (user_id) REFERENCES + db_dbuser(id) DEFERRABLE INITIALLY DEFERRED + db_dbextra: + db_dbextra_dbnode_id_c7fe8961_fk_db_dbnode_id: FOREIGN KEY (dbnode_id) REFERENCES + db_dbnode(id) DEFERRABLE INITIALLY DEFERRED + db_dbgroup: + db_dbgroup_user_id_100f8a51_fk_db_dbuser_id: FOREIGN KEY (user_id) REFERENCES + db_dbuser(id) DEFERRABLE INITIALLY DEFERRED + db_dbgroup_dbnodes: + db_dbgroup_dbnodes_dbgroup_id_9d3a0f9d_fk_db_dbgroup_id: FOREIGN KEY (dbgroup_id) + REFERENCES db_dbgroup(id) DEFERRABLE INITIALLY DEFERRED + db_dbgroup_dbnodes_dbnode_id_118b9439_fk_db_dbnode_id: FOREIGN KEY (dbnode_id) + REFERENCES db_dbnode(id) DEFERRABLE INITIALLY DEFERRED + db_dblink: + db_dblink_input_id_9245bd73_fk_db_dbnode_id: FOREIGN KEY (input_id) REFERENCES + db_dbnode(id) DEFERRABLE INITIALLY DEFERRED + db_dblink_output_id_c0167528_fk_db_dbnode_id: FOREIGN KEY (output_id) REFERENCES + db_dbnode(id) DEFERRABLE INITIALLY DEFERRED + db_dbnode: + db_dbnode_dbcomputer_id_315372a3_fk_db_dbcomputer_id: FOREIGN KEY (dbcomputer_id) + REFERENCES db_dbcomputer(id) DEFERRABLE INITIALLY DEFERRED + db_dbnode_user_id_12e7aeaf_fk_db_dbuser_id: FOREIGN KEY (user_id) REFERENCES db_dbuser(id) + DEFERRABLE INITIALLY DEFERRED + db_dbuser_groups: + db_dbuser_groups_dbuser_id_480b3520_fk_db_dbuser_id: FOREIGN KEY (dbuser_id) REFERENCES + db_dbuser(id) DEFERRABLE INITIALLY DEFERRED + db_dbuser_groups_group_id_8478d87e_fk_auth_group_id: FOREIGN KEY (group_id) REFERENCES + auth_group(id) DEFERRABLE INITIALLY DEFERRED + db_dbuser_user_permissions: + db_dbuser_user_permi_permission_id_c5aafc54_fk_auth_perm: FOREIGN KEY (permission_id) + REFERENCES auth_permission(id) DEFERRABLE INITIALLY DEFERRED + db_dbuser_user_permissions_dbuser_id_364456ee_fk_db_dbuser_id: FOREIGN KEY (dbuser_id) + REFERENCES db_dbuser(id) DEFERRABLE INITIALLY DEFERRED + db_dbworkflow: + db_dbworkflow_user_id_ef1f3251_fk_db_dbuser_id: FOREIGN KEY (user_id) REFERENCES + db_dbuser(id) DEFERRABLE INITIALLY DEFERRED + db_dbworkflowdata: + db_dbworkflowdata_aiida_obj_id_70a2d33b_fk_db_dbnode_id: FOREIGN KEY (aiida_obj_id) + REFERENCES db_dbnode(id) DEFERRABLE INITIALLY DEFERRED + db_dbworkflowdata_parent_id_ff4dbf8d_fk_db_dbworkflow_id: FOREIGN KEY (parent_id) + REFERENCES db_dbworkflow(id) DEFERRABLE INITIALLY DEFERRED + db_dbworkflowstep: + db_dbworkflowstep_parent_id_ffb754d9_fk_db_dbworkflow_id: FOREIGN KEY (parent_id) + REFERENCES db_dbworkflow(id) DEFERRABLE INITIALLY DEFERRED + db_dbworkflowstep_user_id_04282431_fk_db_dbuser_id: FOREIGN KEY (user_id) REFERENCES + db_dbuser(id) DEFERRABLE INITIALLY DEFERRED + db_dbworkflowstep_calculations: + db_dbworkflowstep_ca_dbnode_id_0d07b7a7_fk_db_dbnode: FOREIGN KEY (dbnode_id) + REFERENCES db_dbnode(id) DEFERRABLE INITIALLY DEFERRED + db_dbworkflowstep_ca_dbworkflowstep_id_575c3637_fk_db_dbwork: FOREIGN KEY (dbworkflowstep_id) + REFERENCES db_dbworkflowstep(id) DEFERRABLE INITIALLY DEFERRED + db_dbworkflowstep_sub_workflows: + db_dbworkflowstep_su_dbworkflow_id_dca4d103_fk_db_dbwork: FOREIGN KEY (dbworkflow_id) + REFERENCES db_dbworkflow(id) DEFERRABLE INITIALLY DEFERRED + db_dbworkflowstep_su_dbworkflowstep_id_e183bbb7_fk_db_dbwork: FOREIGN KEY (dbworkflowstep_id) + REFERENCES db_dbworkflowstep(id) DEFERRABLE INITIALLY DEFERRED +indexes: + db_dbattribute: + db_dbattribute_datatype_91c4dc04: CREATE INDEX db_dbattribute_datatype_91c4dc04 + ON public.db_dbattribute USING btree (datatype) + db_dbattribute_datatype_91c4dc04_like: CREATE INDEX db_dbattribute_datatype_91c4dc04_like + ON public.db_dbattribute USING btree (datatype varchar_pattern_ops) + db_dbattribute_dbnode_id_253bf153: CREATE INDEX db_dbattribute_dbnode_id_253bf153 + ON public.db_dbattribute USING btree (dbnode_id) + db_dbattribute_dbnode_id_key_c589e447_uniq: CREATE UNIQUE INDEX db_dbattribute_dbnode_id_key_c589e447_uniq + ON public.db_dbattribute USING btree (dbnode_id, key) + db_dbattribute_key_ac2bc4e4: CREATE INDEX db_dbattribute_key_ac2bc4e4 ON public.db_dbattribute + USING btree (key) + db_dbattribute_key_ac2bc4e4_like: CREATE INDEX db_dbattribute_key_ac2bc4e4_like + ON public.db_dbattribute USING btree (key varchar_pattern_ops) + db_dbattribute_pkey: CREATE UNIQUE INDEX db_dbattribute_pkey ON public.db_dbattribute + USING btree (id) + tval_idx_for_daemon: CREATE INDEX tval_idx_for_daemon ON public.db_dbattribute + USING btree (tval) WHERE (tval = ANY (ARRAY['COMPUTED'::text, 'WITHSCHEDULER'::text, + 'TOSUBMIT'::text])) + db_dbauthinfo: + db_dbauthinfo_aiidauser_id_0684fdfb: CREATE INDEX db_dbauthinfo_aiidauser_id_0684fdfb + ON public.db_dbauthinfo USING btree (aiidauser_id) + db_dbauthinfo_aiidauser_id_dbcomputer_id_777cdaa8_uniq: CREATE UNIQUE INDEX db_dbauthinfo_aiidauser_id_dbcomputer_id_777cdaa8_uniq + ON public.db_dbauthinfo USING btree (aiidauser_id, dbcomputer_id) + db_dbauthinfo_dbcomputer_id_424f7ac4: CREATE INDEX db_dbauthinfo_dbcomputer_id_424f7ac4 + ON public.db_dbauthinfo USING btree (dbcomputer_id) + db_dbauthinfo_pkey: CREATE UNIQUE INDEX db_dbauthinfo_pkey ON public.db_dbauthinfo + USING btree (id) + db_dbcomment: + db_dbcomment_dbnode_id_3b812b6b: CREATE INDEX db_dbcomment_dbnode_id_3b812b6b + ON public.db_dbcomment USING btree (dbnode_id) + db_dbcomment_pkey: CREATE UNIQUE INDEX db_dbcomment_pkey ON public.db_dbcomment + USING btree (id) + db_dbcomment_user_id_8ed5e360: CREATE INDEX db_dbcomment_user_id_8ed5e360 ON public.db_dbcomment + USING btree (user_id) + db_dbcomputer: + db_dbcomputer_name_f1800b1a_like: CREATE INDEX db_dbcomputer_name_f1800b1a_like + ON public.db_dbcomputer USING btree (name varchar_pattern_ops) + db_dbcomputer_name_key: CREATE UNIQUE INDEX db_dbcomputer_name_key ON public.db_dbcomputer + USING btree (name) + db_dbcomputer_pkey: CREATE UNIQUE INDEX db_dbcomputer_pkey ON public.db_dbcomputer + USING btree (id) + db_dbextra: + db_dbextra_datatype_2eba38c6: CREATE INDEX db_dbextra_datatype_2eba38c6 ON public.db_dbextra + USING btree (datatype) + db_dbextra_datatype_2eba38c6_like: CREATE INDEX db_dbextra_datatype_2eba38c6_like + ON public.db_dbextra USING btree (datatype varchar_pattern_ops) + db_dbextra_dbnode_id_c7fe8961: CREATE INDEX db_dbextra_dbnode_id_c7fe8961 ON public.db_dbextra + USING btree (dbnode_id) + db_dbextra_dbnode_id_key_aa56fd37_uniq: CREATE UNIQUE INDEX db_dbextra_dbnode_id_key_aa56fd37_uniq + ON public.db_dbextra USING btree (dbnode_id, key) + db_dbextra_key_b1a8abc6: CREATE INDEX db_dbextra_key_b1a8abc6 ON public.db_dbextra + USING btree (key) + db_dbextra_key_b1a8abc6_like: CREATE INDEX db_dbextra_key_b1a8abc6_like ON public.db_dbextra + USING btree (key varchar_pattern_ops) + db_dbextra_pkey: CREATE UNIQUE INDEX db_dbextra_pkey ON public.db_dbextra USING + btree (id) + db_dbgroup: + db_dbgroup_name_66c75272: CREATE INDEX db_dbgroup_name_66c75272 ON public.db_dbgroup + USING btree (name) + db_dbgroup_name_66c75272_like: CREATE INDEX db_dbgroup_name_66c75272_like ON public.db_dbgroup + USING btree (name varchar_pattern_ops) + db_dbgroup_name_type_12656f33_uniq: CREATE UNIQUE INDEX db_dbgroup_name_type_12656f33_uniq + ON public.db_dbgroup USING btree (name, type) + db_dbgroup_pkey: CREATE UNIQUE INDEX db_dbgroup_pkey ON public.db_dbgroup USING + btree (id) + db_dbgroup_type_23b2a748: CREATE INDEX db_dbgroup_type_23b2a748 ON public.db_dbgroup + USING btree (type) + db_dbgroup_type_23b2a748_like: CREATE INDEX db_dbgroup_type_23b2a748_like ON public.db_dbgroup + USING btree (type varchar_pattern_ops) + db_dbgroup_user_id_100f8a51: CREATE INDEX db_dbgroup_user_id_100f8a51 ON public.db_dbgroup + USING btree (user_id) + db_dbgroup_dbnodes: + db_dbgroup_dbnodes_dbgroup_id_9d3a0f9d: CREATE INDEX db_dbgroup_dbnodes_dbgroup_id_9d3a0f9d + ON public.db_dbgroup_dbnodes USING btree (dbgroup_id) + db_dbgroup_dbnodes_dbgroup_id_dbnode_id_eee23cce_uniq: CREATE UNIQUE INDEX db_dbgroup_dbnodes_dbgroup_id_dbnode_id_eee23cce_uniq + ON public.db_dbgroup_dbnodes USING btree (dbgroup_id, dbnode_id) + db_dbgroup_dbnodes_dbnode_id_118b9439: CREATE INDEX db_dbgroup_dbnodes_dbnode_id_118b9439 + ON public.db_dbgroup_dbnodes USING btree (dbnode_id) + db_dbgroup_dbnodes_pkey: CREATE UNIQUE INDEX db_dbgroup_dbnodes_pkey ON public.db_dbgroup_dbnodes + USING btree (id) + db_dblink: + db_dblink_input_id_9245bd73: CREATE INDEX db_dblink_input_id_9245bd73 ON public.db_dblink + USING btree (input_id) + db_dblink_label_f1343cfb: CREATE INDEX db_dblink_label_f1343cfb ON public.db_dblink + USING btree (label) + db_dblink_label_f1343cfb_like: CREATE INDEX db_dblink_label_f1343cfb_like ON public.db_dblink + USING btree (label varchar_pattern_ops) + db_dblink_output_id_c0167528: CREATE INDEX db_dblink_output_id_c0167528 ON public.db_dblink + USING btree (output_id) + db_dblink_pkey: CREATE UNIQUE INDEX db_dblink_pkey ON public.db_dblink USING btree + (id) + db_dblink_type_229f212b: CREATE INDEX db_dblink_type_229f212b ON public.db_dblink + USING btree (type) + db_dblink_type_229f212b_like: CREATE INDEX db_dblink_type_229f212b_like ON public.db_dblink + USING btree (type varchar_pattern_ops) + db_dblog: + db_dblog_levelname_ad5dc346: CREATE INDEX db_dblog_levelname_ad5dc346 ON public.db_dblog + USING btree (levelname) + db_dblog_levelname_ad5dc346_like: CREATE INDEX db_dblog_levelname_ad5dc346_like + ON public.db_dblog USING btree (levelname varchar_pattern_ops) + db_dblog_loggername_00b5ba16: CREATE INDEX db_dblog_loggername_00b5ba16 ON public.db_dblog + USING btree (loggername) + db_dblog_loggername_00b5ba16_like: CREATE INDEX db_dblog_loggername_00b5ba16_like + ON public.db_dblog USING btree (loggername varchar_pattern_ops) + db_dblog_objname_69932b1e: CREATE INDEX db_dblog_objname_69932b1e ON public.db_dblog + USING btree (objname) + db_dblog_objname_69932b1e_like: CREATE INDEX db_dblog_objname_69932b1e_like ON + public.db_dblog USING btree (objname varchar_pattern_ops) + db_dblog_objpk_fc47afa9: CREATE INDEX db_dblog_objpk_fc47afa9 ON public.db_dblog + USING btree (objpk) + db_dblog_pkey: CREATE UNIQUE INDEX db_dblog_pkey ON public.db_dblog USING btree + (id) + db_dbnode: + db_dbnode_ctime_71626ef5: CREATE INDEX db_dbnode_ctime_71626ef5 ON public.db_dbnode + USING btree (ctime) + db_dbnode_dbcomputer_id_315372a3: CREATE INDEX db_dbnode_dbcomputer_id_315372a3 + ON public.db_dbnode USING btree (dbcomputer_id) + db_dbnode_label_6469539e: CREATE INDEX db_dbnode_label_6469539e ON public.db_dbnode + USING btree (label) + db_dbnode_label_6469539e_like: CREATE INDEX db_dbnode_label_6469539e_like ON public.db_dbnode + USING btree (label varchar_pattern_ops) + db_dbnode_mtime_0554ea3d: CREATE INDEX db_dbnode_mtime_0554ea3d ON public.db_dbnode + USING btree (mtime) + db_dbnode_pkey: CREATE UNIQUE INDEX db_dbnode_pkey ON public.db_dbnode USING btree + (id) + db_dbnode_process_type_df7298d0: CREATE INDEX db_dbnode_process_type_df7298d0 + ON public.db_dbnode USING btree (process_type) + db_dbnode_process_type_df7298d0_like: CREATE INDEX db_dbnode_process_type_df7298d0_like + ON public.db_dbnode USING btree (process_type varchar_pattern_ops) + db_dbnode_type_a8ce9753: CREATE INDEX db_dbnode_type_a8ce9753 ON public.db_dbnode + USING btree (type) + db_dbnode_type_a8ce9753_like: CREATE INDEX db_dbnode_type_a8ce9753_like ON public.db_dbnode + USING btree (type varchar_pattern_ops) + db_dbnode_user_id_12e7aeaf: CREATE INDEX db_dbnode_user_id_12e7aeaf ON public.db_dbnode + USING btree (user_id) + db_dbnode_uuid_62e0bf98_like: CREATE INDEX db_dbnode_uuid_62e0bf98_like ON public.db_dbnode + USING btree (uuid varchar_pattern_ops) + db_dbnode_uuid_62e0bf98_uniq: CREATE UNIQUE INDEX db_dbnode_uuid_62e0bf98_uniq + ON public.db_dbnode USING btree (uuid) + db_dbsetting: + db_dbsetting_datatype_49f4397c: CREATE INDEX db_dbsetting_datatype_49f4397c ON + public.db_dbsetting USING btree (datatype) + db_dbsetting_datatype_49f4397c_like: CREATE INDEX db_dbsetting_datatype_49f4397c_like + ON public.db_dbsetting USING btree (datatype varchar_pattern_ops) + db_dbsetting_key_1b84beb4: CREATE INDEX db_dbsetting_key_1b84beb4 ON public.db_dbsetting + USING btree (key) + db_dbsetting_key_1b84beb4_like: CREATE INDEX db_dbsetting_key_1b84beb4_like ON + public.db_dbsetting USING btree (key varchar_pattern_ops) + db_dbsetting_key_1b84beb4_uniq: CREATE UNIQUE INDEX db_dbsetting_key_1b84beb4_uniq + ON public.db_dbsetting USING btree (key) + db_dbsetting_pkey: CREATE UNIQUE INDEX db_dbsetting_pkey ON public.db_dbsetting + USING btree (id) + db_dbuser: + db_dbuser_pkey: CREATE UNIQUE INDEX db_dbuser_pkey ON public.db_dbuser USING btree + (id) + db_dbuser_groups: + db_dbuser_groups_dbuser_id_480b3520: CREATE INDEX db_dbuser_groups_dbuser_id_480b3520 + ON public.db_dbuser_groups USING btree (dbuser_id) + db_dbuser_groups_dbuser_id_group_id_9155eb4f_uniq: CREATE UNIQUE INDEX db_dbuser_groups_dbuser_id_group_id_9155eb4f_uniq + ON public.db_dbuser_groups USING btree (dbuser_id, group_id) + db_dbuser_groups_group_id_8478d87e: CREATE INDEX db_dbuser_groups_group_id_8478d87e + ON public.db_dbuser_groups USING btree (group_id) + db_dbuser_groups_pkey: CREATE UNIQUE INDEX db_dbuser_groups_pkey ON public.db_dbuser_groups + USING btree (id) + db_dbuser_user_permissions: + db_dbuser_user_permissio_dbuser_id_permission_id_e6cbabe4_uniq: CREATE UNIQUE + INDEX db_dbuser_user_permissio_dbuser_id_permission_id_e6cbabe4_uniq ON public.db_dbuser_user_permissions + USING btree (dbuser_id, permission_id) + db_dbuser_user_permissions_dbuser_id_364456ee: CREATE INDEX db_dbuser_user_permissions_dbuser_id_364456ee + ON public.db_dbuser_user_permissions USING btree (dbuser_id) + db_dbuser_user_permissions_permission_id_c5aafc54: CREATE INDEX db_dbuser_user_permissions_permission_id_c5aafc54 + ON public.db_dbuser_user_permissions USING btree (permission_id) + db_dbuser_user_permissions_pkey: CREATE UNIQUE INDEX db_dbuser_user_permissions_pkey + ON public.db_dbuser_user_permissions USING btree (id) + db_dbworkflow: + db_dbworkflow_label_7368f34a: CREATE INDEX db_dbworkflow_label_7368f34a ON public.db_dbworkflow + USING btree (label) + db_dbworkflow_label_7368f34a_like: CREATE INDEX db_dbworkflow_label_7368f34a_like + ON public.db_dbworkflow USING btree (label varchar_pattern_ops) + db_dbworkflow_pkey: CREATE UNIQUE INDEX db_dbworkflow_pkey ON public.db_dbworkflow + USING btree (id) + db_dbworkflow_user_id_ef1f3251: CREATE INDEX db_dbworkflow_user_id_ef1f3251 ON + public.db_dbworkflow USING btree (user_id) + db_dbworkflowdata: + db_dbworkflowdata_aiida_obj_id_70a2d33b: CREATE INDEX db_dbworkflowdata_aiida_obj_id_70a2d33b + ON public.db_dbworkflowdata USING btree (aiida_obj_id) + db_dbworkflowdata_parent_id_ff4dbf8d: CREATE INDEX db_dbworkflowdata_parent_id_ff4dbf8d + ON public.db_dbworkflowdata USING btree (parent_id) + db_dbworkflowdata_parent_id_name_data_type_a4b50dae_uniq: CREATE UNIQUE INDEX + db_dbworkflowdata_parent_id_name_data_type_a4b50dae_uniq ON public.db_dbworkflowdata + USING btree (parent_id, name, data_type) + db_dbworkflowdata_pkey: CREATE UNIQUE INDEX db_dbworkflowdata_pkey ON public.db_dbworkflowdata + USING btree (id) + db_dbworkflowstep: + db_dbworkflowstep_parent_id_ffb754d9: CREATE INDEX db_dbworkflowstep_parent_id_ffb754d9 + ON public.db_dbworkflowstep USING btree (parent_id) + db_dbworkflowstep_parent_id_name_111027e3_uniq: CREATE UNIQUE INDEX db_dbworkflowstep_parent_id_name_111027e3_uniq + ON public.db_dbworkflowstep USING btree (parent_id, name) + db_dbworkflowstep_pkey: CREATE UNIQUE INDEX db_dbworkflowstep_pkey ON public.db_dbworkflowstep + USING btree (id) + db_dbworkflowstep_user_id_04282431: CREATE INDEX db_dbworkflowstep_user_id_04282431 + ON public.db_dbworkflowstep USING btree (user_id) + db_dbworkflowstep_calculations: + db_dbworkflowstep_calcul_dbworkflowstep_id_dbnode_60f50d02_uniq: CREATE UNIQUE + INDEX db_dbworkflowstep_calcul_dbworkflowstep_id_dbnode_60f50d02_uniq ON public.db_dbworkflowstep_calculations + USING btree (dbworkflowstep_id, dbnode_id) + db_dbworkflowstep_calculations_dbnode_id_0d07b7a7: CREATE INDEX db_dbworkflowstep_calculations_dbnode_id_0d07b7a7 + ON public.db_dbworkflowstep_calculations USING btree (dbnode_id) + db_dbworkflowstep_calculations_dbworkflowstep_id_575c3637: CREATE INDEX db_dbworkflowstep_calculations_dbworkflowstep_id_575c3637 + ON public.db_dbworkflowstep_calculations USING btree (dbworkflowstep_id) + db_dbworkflowstep_calculations_pkey: CREATE UNIQUE INDEX db_dbworkflowstep_calculations_pkey + ON public.db_dbworkflowstep_calculations USING btree (id) + db_dbworkflowstep_sub_workflows: + db_dbworkflowstep_sub_wo_dbworkflowstep_id_dbwork_e9b2b624_uniq: CREATE UNIQUE + INDEX db_dbworkflowstep_sub_wo_dbworkflowstep_id_dbwork_e9b2b624_uniq ON public.db_dbworkflowstep_sub_workflows + USING btree (dbworkflowstep_id, dbworkflow_id) + db_dbworkflowstep_sub_workflows_dbworkflow_id_dca4d103: CREATE INDEX db_dbworkflowstep_sub_workflows_dbworkflow_id_dca4d103 + ON public.db_dbworkflowstep_sub_workflows USING btree (dbworkflow_id) + db_dbworkflowstep_sub_workflows_dbworkflowstep_id_e183bbb7: CREATE INDEX db_dbworkflowstep_sub_workflows_dbworkflowstep_id_e183bbb7 + ON public.db_dbworkflowstep_sub_workflows USING btree (dbworkflowstep_id) + db_dbworkflowstep_sub_workflows_pkey: CREATE UNIQUE INDEX db_dbworkflowstep_sub_workflows_pkey + ON public.db_dbworkflowstep_sub_workflows USING btree (id) diff --git a/tests/backends/aiida_sqlalchemy/migrations/test_all_schema/test_django_django_0018_.yml b/tests/backends/aiida_sqlalchemy/migrations/test_all_schema/test_django_django_0018_.yml new file mode 100644 index 0000000000..b3e1e522b4 --- /dev/null +++ b/tests/backends/aiida_sqlalchemy/migrations/test_all_schema/test_django_django_0018_.yml @@ -0,0 +1,1038 @@ +columns: + db_dbattribute: + bval: + data_type: boolean + default: null + is_nullable: true + datatype: + data_type: character varying + default: null + is_nullable: false + max_length: 10 + dbnode_id: + data_type: integer + default: null + is_nullable: false + dval: + data_type: timestamp with time zone + default: null + is_nullable: true + fval: + data_type: double precision + default: null + is_nullable: true + id: + data_type: integer + default: nextval('db_dbattribute_id_seq'::regclass) + is_nullable: false + ival: + data_type: integer + default: null + is_nullable: true + key: + data_type: character varying + default: null + is_nullable: false + max_length: 1024 + tval: + data_type: text + default: null + is_nullable: false + db_dbauthinfo: + aiidauser_id: + data_type: integer + default: null + is_nullable: false + auth_params: + data_type: text + default: null + is_nullable: false + dbcomputer_id: + data_type: integer + default: null + is_nullable: false + enabled: + data_type: boolean + default: null + is_nullable: false + id: + data_type: integer + default: nextval('db_dbauthinfo_id_seq'::regclass) + is_nullable: false + metadata: + data_type: text + default: null + is_nullable: false + db_dbcomment: + content: + data_type: text + default: null + is_nullable: false + ctime: + data_type: timestamp with time zone + default: null + is_nullable: false + dbnode_id: + data_type: integer + default: null + is_nullable: false + id: + data_type: integer + default: nextval('db_dbcomment_id_seq'::regclass) + is_nullable: false + mtime: + data_type: timestamp with time zone + default: null + is_nullable: false + user_id: + data_type: integer + default: null + is_nullable: false + uuid: + data_type: uuid + default: null + is_nullable: false + db_dbcomputer: + description: + data_type: text + default: null + is_nullable: false + enabled: + data_type: boolean + default: null + is_nullable: false + hostname: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + id: + data_type: integer + default: nextval('db_dbcomputer_id_seq'::regclass) + is_nullable: false + metadata: + data_type: text + default: null + is_nullable: false + name: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + scheduler_type: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + transport_params: + data_type: text + default: null + is_nullable: false + transport_type: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + uuid: + data_type: uuid + default: null + is_nullable: false + db_dbextra: + bval: + data_type: boolean + default: null + is_nullable: true + datatype: + data_type: character varying + default: null + is_nullable: false + max_length: 10 + dbnode_id: + data_type: integer + default: null + is_nullable: false + dval: + data_type: timestamp with time zone + default: null + is_nullable: true + fval: + data_type: double precision + default: null + is_nullable: true + id: + data_type: integer + default: nextval('db_dbextra_id_seq'::regclass) + is_nullable: false + ival: + data_type: integer + default: null + is_nullable: true + key: + data_type: character varying + default: null + is_nullable: false + max_length: 1024 + tval: + data_type: text + default: null + is_nullable: false + db_dbgroup: + description: + data_type: text + default: null + is_nullable: false + id: + data_type: integer + default: nextval('db_dbgroup_id_seq'::regclass) + is_nullable: false + name: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + time: + data_type: timestamp with time zone + default: null + is_nullable: false + type: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + user_id: + data_type: integer + default: null + is_nullable: false + uuid: + data_type: uuid + default: null + is_nullable: false + db_dbgroup_dbnodes: + dbgroup_id: + data_type: integer + default: null + is_nullable: false + dbnode_id: + data_type: integer + default: null + is_nullable: false + id: + data_type: integer + default: nextval('db_dbgroup_dbnodes_id_seq'::regclass) + is_nullable: false + db_dblink: + id: + data_type: integer + default: nextval('db_dblink_id_seq'::regclass) + is_nullable: false + input_id: + data_type: integer + default: null + is_nullable: false + label: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + output_id: + data_type: integer + default: null + is_nullable: false + type: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + db_dblog: + id: + data_type: integer + default: nextval('db_dblog_id_seq'::regclass) + is_nullable: false + levelname: + data_type: character varying + default: null + is_nullable: false + max_length: 50 + loggername: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + message: + data_type: text + default: null + is_nullable: false + metadata: + data_type: text + default: null + is_nullable: false + objname: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + objpk: + data_type: integer + default: null + is_nullable: true + time: + data_type: timestamp with time zone + default: null + is_nullable: false + db_dbnode: + ctime: + data_type: timestamp with time zone + default: null + is_nullable: false + dbcomputer_id: + data_type: integer + default: null + is_nullable: true + description: + data_type: text + default: null + is_nullable: false + id: + data_type: integer + default: nextval('db_dbnode_id_seq'::regclass) + is_nullable: false + label: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + mtime: + data_type: timestamp with time zone + default: null + is_nullable: false + nodeversion: + data_type: integer + default: null + is_nullable: false + process_type: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + public: + data_type: boolean + default: null + is_nullable: false + type: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + user_id: + data_type: integer + default: null + is_nullable: false + uuid: + data_type: uuid + default: null + is_nullable: false + db_dbsetting: + bval: + data_type: boolean + default: null + is_nullable: true + datatype: + data_type: character varying + default: null + is_nullable: false + max_length: 10 + description: + data_type: text + default: null + is_nullable: false + dval: + data_type: timestamp with time zone + default: null + is_nullable: true + fval: + data_type: double precision + default: null + is_nullable: true + id: + data_type: integer + default: nextval('db_dbsetting_id_seq'::regclass) + is_nullable: false + ival: + data_type: integer + default: null + is_nullable: true + key: + data_type: character varying + default: null + is_nullable: false + max_length: 1024 + time: + data_type: timestamp with time zone + default: null + is_nullable: false + tval: + data_type: text + default: null + is_nullable: false + db_dbuser: + date_joined: + data_type: timestamp with time zone + default: null + is_nullable: false + email: + data_type: character varying + default: null + is_nullable: false + max_length: 254 + first_name: + data_type: character varying + default: null + is_nullable: false + max_length: 254 + id: + data_type: integer + default: nextval('db_dbuser_id_seq'::regclass) + is_nullable: false + institution: + data_type: character varying + default: null + is_nullable: false + max_length: 254 + is_active: + data_type: boolean + default: null + is_nullable: false + is_staff: + data_type: boolean + default: null + is_nullable: false + is_superuser: + data_type: boolean + default: null + is_nullable: false + last_login: + data_type: timestamp with time zone + default: null + is_nullable: true + last_name: + data_type: character varying + default: null + is_nullable: false + max_length: 254 + password: + data_type: character varying + default: null + is_nullable: false + max_length: 128 + db_dbuser_groups: + dbuser_id: + data_type: integer + default: null + is_nullable: false + group_id: + data_type: integer + default: null + is_nullable: false + id: + data_type: integer + default: nextval('db_dbuser_groups_id_seq'::regclass) + is_nullable: false + db_dbuser_user_permissions: + dbuser_id: + data_type: integer + default: null + is_nullable: false + id: + data_type: integer + default: nextval('db_dbuser_user_permissions_id_seq'::regclass) + is_nullable: false + permission_id: + data_type: integer + default: null + is_nullable: false + db_dbworkflow: + ctime: + data_type: timestamp with time zone + default: null + is_nullable: false + description: + data_type: text + default: null + is_nullable: false + id: + data_type: integer + default: nextval('db_dbworkflow_id_seq'::regclass) + is_nullable: false + label: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + lastsyncedversion: + data_type: integer + default: null + is_nullable: false + module: + data_type: text + default: null + is_nullable: false + module_class: + data_type: text + default: null + is_nullable: false + mtime: + data_type: timestamp with time zone + default: null + is_nullable: false + nodeversion: + data_type: integer + default: null + is_nullable: false + report: + data_type: text + default: null + is_nullable: false + script_md5: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + script_path: + data_type: text + default: null + is_nullable: false + state: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + user_id: + data_type: integer + default: null + is_nullable: false + uuid: + data_type: uuid + default: null + is_nullable: false + db_dbworkflowdata: + aiida_obj_id: + data_type: integer + default: null + is_nullable: true + data_type: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + id: + data_type: integer + default: nextval('db_dbworkflowdata_id_seq'::regclass) + is_nullable: false + json_value: + data_type: text + default: null + is_nullable: false + name: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + parent_id: + data_type: integer + default: null + is_nullable: false + time: + data_type: timestamp with time zone + default: null + is_nullable: false + value_type: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + db_dbworkflowstep: + id: + data_type: integer + default: nextval('db_dbworkflowstep_id_seq'::regclass) + is_nullable: false + name: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + nextcall: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + parent_id: + data_type: integer + default: null + is_nullable: false + state: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + time: + data_type: timestamp with time zone + default: null + is_nullable: false + user_id: + data_type: integer + default: null + is_nullable: false + db_dbworkflowstep_calculations: + dbnode_id: + data_type: integer + default: null + is_nullable: false + dbworkflowstep_id: + data_type: integer + default: null + is_nullable: false + id: + data_type: integer + default: nextval('db_dbworkflowstep_calculations_id_seq'::regclass) + is_nullable: false + db_dbworkflowstep_sub_workflows: + dbworkflow_id: + data_type: integer + default: null + is_nullable: false + dbworkflowstep_id: + data_type: integer + default: null + is_nullable: false + id: + data_type: integer + default: nextval('db_dbworkflowstep_sub_workflows_id_seq'::regclass) + is_nullable: false +constraints: + primary_key: + db_dbattribute: + db_dbattribute_pkey: + - id + db_dbauthinfo: + db_dbauthinfo_pkey: + - id + db_dbcomment: + db_dbcomment_pkey: + - id + db_dbcomputer: + db_dbcomputer_pkey: + - id + db_dbextra: + db_dbextra_pkey: + - id + db_dbgroup: + db_dbgroup_pkey: + - id + db_dbgroup_dbnodes: + db_dbgroup_dbnodes_pkey: + - id + db_dblink: + db_dblink_pkey: + - id + db_dblog: + db_dblog_pkey: + - id + db_dbnode: + db_dbnode_pkey: + - id + db_dbsetting: + db_dbsetting_pkey: + - id + db_dbuser: + db_dbuser_pkey: + - id + db_dbuser_groups: + db_dbuser_groups_pkey: + - id + db_dbuser_user_permissions: + db_dbuser_user_permissions_pkey: + - id + db_dbworkflow: + db_dbworkflow_pkey: + - id + db_dbworkflowdata: + db_dbworkflowdata_pkey: + - id + db_dbworkflowstep: + db_dbworkflowstep_pkey: + - id + db_dbworkflowstep_calculations: + db_dbworkflowstep_calculations_pkey: + - id + db_dbworkflowstep_sub_workflows: + db_dbworkflowstep_sub_workflows_pkey: + - id + unique: + db_dbattribute: + db_dbattribute_dbnode_id_key_c589e447_uniq: + - dbnode_id + - key + db_dbauthinfo: + db_dbauthinfo_aiidauser_id_dbcomputer_id_777cdaa8_uniq: + - aiidauser_id + - dbcomputer_id + db_dbcomment: + db_dbcomment_uuid_49bac08c_uniq: + - uuid + db_dbcomputer: + db_dbcomputer_name_key: + - name + db_dbcomputer_uuid_f35defa6_uniq: + - uuid + db_dbextra: + db_dbextra_dbnode_id_key_aa56fd37_uniq: + - dbnode_id + - key + db_dbgroup: + db_dbgroup_name_type_12656f33_uniq: + - name + - type + db_dbgroup_uuid_af896177_uniq: + - uuid + db_dbgroup_dbnodes: + db_dbgroup_dbnodes_dbgroup_id_dbnode_id_eee23cce_uniq: + - dbgroup_id + - dbnode_id + db_dbnode: + db_dbnode_uuid_62e0bf98_uniq: + - uuid + db_dbsetting: + db_dbsetting_key_1b84beb4_uniq: + - key + db_dbuser: + db_dbuser_email_30150b7e_uniq: + - email + db_dbuser_groups: + db_dbuser_groups_dbuser_id_group_id_9155eb4f_uniq: + - dbuser_id + - group_id + db_dbuser_user_permissions: + db_dbuser_user_permissio_dbuser_id_permission_id_e6cbabe4_uniq: + - dbuser_id + - permission_id + db_dbworkflow: + db_dbworkflow_uuid_08947ee2_uniq: + - uuid + db_dbworkflowdata: + db_dbworkflowdata_parent_id_name_data_type_a4b50dae_uniq: + - data_type + - name + - parent_id + db_dbworkflowstep: + db_dbworkflowstep_parent_id_name_111027e3_uniq: + - name + - parent_id + db_dbworkflowstep_calculations: + db_dbworkflowstep_calcul_dbworkflowstep_id_dbnode_60f50d02_uniq: + - dbnode_id + - dbworkflowstep_id + db_dbworkflowstep_sub_workflows: + db_dbworkflowstep_sub_wo_dbworkflowstep_id_dbwork_e9b2b624_uniq: + - dbworkflow_id + - dbworkflowstep_id +foreign_keys: + db_dbattribute: + db_dbattribute_dbnode_id_253bf153_fk_db_dbnode_id: FOREIGN KEY (dbnode_id) REFERENCES + db_dbnode(id) DEFERRABLE INITIALLY DEFERRED + db_dbauthinfo: + db_dbauthinfo_aiidauser_id_0684fdfb_fk_db_dbuser_id: FOREIGN KEY (aiidauser_id) + REFERENCES db_dbuser(id) DEFERRABLE INITIALLY DEFERRED + db_dbauthinfo_dbcomputer_id_424f7ac4_fk_db_dbcomputer_id: FOREIGN KEY (dbcomputer_id) + REFERENCES db_dbcomputer(id) DEFERRABLE INITIALLY DEFERRED + db_dbcomment: + db_dbcomment_dbnode_id_3b812b6b_fk_db_dbnode_id: FOREIGN KEY (dbnode_id) REFERENCES + db_dbnode(id) DEFERRABLE INITIALLY DEFERRED + db_dbcomment_user_id_8ed5e360_fk_db_dbuser_id: FOREIGN KEY (user_id) REFERENCES + db_dbuser(id) DEFERRABLE INITIALLY DEFERRED + db_dbextra: + db_dbextra_dbnode_id_c7fe8961_fk_db_dbnode_id: FOREIGN KEY (dbnode_id) REFERENCES + db_dbnode(id) DEFERRABLE INITIALLY DEFERRED + db_dbgroup: + db_dbgroup_user_id_100f8a51_fk_db_dbuser_id: FOREIGN KEY (user_id) REFERENCES + db_dbuser(id) DEFERRABLE INITIALLY DEFERRED + db_dbgroup_dbnodes: + db_dbgroup_dbnodes_dbgroup_id_9d3a0f9d_fk_db_dbgroup_id: FOREIGN KEY (dbgroup_id) + REFERENCES db_dbgroup(id) DEFERRABLE INITIALLY DEFERRED + db_dbgroup_dbnodes_dbnode_id_118b9439_fk_db_dbnode_id: FOREIGN KEY (dbnode_id) + REFERENCES db_dbnode(id) DEFERRABLE INITIALLY DEFERRED + db_dblink: + db_dblink_input_id_9245bd73_fk_db_dbnode_id: FOREIGN KEY (input_id) REFERENCES + db_dbnode(id) DEFERRABLE INITIALLY DEFERRED + db_dblink_output_id_c0167528_fk_db_dbnode_id: FOREIGN KEY (output_id) REFERENCES + db_dbnode(id) DEFERRABLE INITIALLY DEFERRED + db_dbnode: + db_dbnode_dbcomputer_id_315372a3_fk_db_dbcomputer_id: FOREIGN KEY (dbcomputer_id) + REFERENCES db_dbcomputer(id) DEFERRABLE INITIALLY DEFERRED + db_dbnode_user_id_12e7aeaf_fk_db_dbuser_id: FOREIGN KEY (user_id) REFERENCES db_dbuser(id) + DEFERRABLE INITIALLY DEFERRED + db_dbuser_groups: + db_dbuser_groups_dbuser_id_480b3520_fk_db_dbuser_id: FOREIGN KEY (dbuser_id) REFERENCES + db_dbuser(id) DEFERRABLE INITIALLY DEFERRED + db_dbuser_groups_group_id_8478d87e_fk_auth_group_id: FOREIGN KEY (group_id) REFERENCES + auth_group(id) DEFERRABLE INITIALLY DEFERRED + db_dbuser_user_permissions: + db_dbuser_user_permi_permission_id_c5aafc54_fk_auth_perm: FOREIGN KEY (permission_id) + REFERENCES auth_permission(id) DEFERRABLE INITIALLY DEFERRED + db_dbuser_user_permissions_dbuser_id_364456ee_fk_db_dbuser_id: FOREIGN KEY (dbuser_id) + REFERENCES db_dbuser(id) DEFERRABLE INITIALLY DEFERRED + db_dbworkflow: + db_dbworkflow_user_id_ef1f3251_fk_db_dbuser_id: FOREIGN KEY (user_id) REFERENCES + db_dbuser(id) DEFERRABLE INITIALLY DEFERRED + db_dbworkflowdata: + db_dbworkflowdata_aiida_obj_id_70a2d33b_fk_db_dbnode_id: FOREIGN KEY (aiida_obj_id) + REFERENCES db_dbnode(id) DEFERRABLE INITIALLY DEFERRED + db_dbworkflowdata_parent_id_ff4dbf8d_fk_db_dbworkflow_id: FOREIGN KEY (parent_id) + REFERENCES db_dbworkflow(id) DEFERRABLE INITIALLY DEFERRED + db_dbworkflowstep: + db_dbworkflowstep_parent_id_ffb754d9_fk_db_dbworkflow_id: FOREIGN KEY (parent_id) + REFERENCES db_dbworkflow(id) DEFERRABLE INITIALLY DEFERRED + db_dbworkflowstep_user_id_04282431_fk_db_dbuser_id: FOREIGN KEY (user_id) REFERENCES + db_dbuser(id) DEFERRABLE INITIALLY DEFERRED + db_dbworkflowstep_calculations: + db_dbworkflowstep_ca_dbnode_id_0d07b7a7_fk_db_dbnode: FOREIGN KEY (dbnode_id) + REFERENCES db_dbnode(id) DEFERRABLE INITIALLY DEFERRED + db_dbworkflowstep_ca_dbworkflowstep_id_575c3637_fk_db_dbwork: FOREIGN KEY (dbworkflowstep_id) + REFERENCES db_dbworkflowstep(id) DEFERRABLE INITIALLY DEFERRED + db_dbworkflowstep_sub_workflows: + db_dbworkflowstep_su_dbworkflow_id_dca4d103_fk_db_dbwork: FOREIGN KEY (dbworkflow_id) + REFERENCES db_dbworkflow(id) DEFERRABLE INITIALLY DEFERRED + db_dbworkflowstep_su_dbworkflowstep_id_e183bbb7_fk_db_dbwork: FOREIGN KEY (dbworkflowstep_id) + REFERENCES db_dbworkflowstep(id) DEFERRABLE INITIALLY DEFERRED +indexes: + db_dbattribute: + db_dbattribute_datatype_91c4dc04: CREATE INDEX db_dbattribute_datatype_91c4dc04 + ON public.db_dbattribute USING btree (datatype) + db_dbattribute_datatype_91c4dc04_like: CREATE INDEX db_dbattribute_datatype_91c4dc04_like + ON public.db_dbattribute USING btree (datatype varchar_pattern_ops) + db_dbattribute_dbnode_id_253bf153: CREATE INDEX db_dbattribute_dbnode_id_253bf153 + ON public.db_dbattribute USING btree (dbnode_id) + db_dbattribute_dbnode_id_key_c589e447_uniq: CREATE UNIQUE INDEX db_dbattribute_dbnode_id_key_c589e447_uniq + ON public.db_dbattribute USING btree (dbnode_id, key) + db_dbattribute_key_ac2bc4e4: CREATE INDEX db_dbattribute_key_ac2bc4e4 ON public.db_dbattribute + USING btree (key) + db_dbattribute_key_ac2bc4e4_like: CREATE INDEX db_dbattribute_key_ac2bc4e4_like + ON public.db_dbattribute USING btree (key varchar_pattern_ops) + db_dbattribute_pkey: CREATE UNIQUE INDEX db_dbattribute_pkey ON public.db_dbattribute + USING btree (id) + tval_idx_for_daemon: CREATE INDEX tval_idx_for_daemon ON public.db_dbattribute + USING btree (tval) WHERE (tval = ANY (ARRAY['COMPUTED'::text, 'WITHSCHEDULER'::text, + 'TOSUBMIT'::text])) + db_dbauthinfo: + db_dbauthinfo_aiidauser_id_0684fdfb: CREATE INDEX db_dbauthinfo_aiidauser_id_0684fdfb + ON public.db_dbauthinfo USING btree (aiidauser_id) + db_dbauthinfo_aiidauser_id_dbcomputer_id_777cdaa8_uniq: CREATE UNIQUE INDEX db_dbauthinfo_aiidauser_id_dbcomputer_id_777cdaa8_uniq + ON public.db_dbauthinfo USING btree (aiidauser_id, dbcomputer_id) + db_dbauthinfo_dbcomputer_id_424f7ac4: CREATE INDEX db_dbauthinfo_dbcomputer_id_424f7ac4 + ON public.db_dbauthinfo USING btree (dbcomputer_id) + db_dbauthinfo_pkey: CREATE UNIQUE INDEX db_dbauthinfo_pkey ON public.db_dbauthinfo + USING btree (id) + db_dbcomment: + db_dbcomment_dbnode_id_3b812b6b: CREATE INDEX db_dbcomment_dbnode_id_3b812b6b + ON public.db_dbcomment USING btree (dbnode_id) + db_dbcomment_pkey: CREATE UNIQUE INDEX db_dbcomment_pkey ON public.db_dbcomment + USING btree (id) + db_dbcomment_user_id_8ed5e360: CREATE INDEX db_dbcomment_user_id_8ed5e360 ON public.db_dbcomment + USING btree (user_id) + db_dbcomment_uuid_49bac08c_uniq: CREATE UNIQUE INDEX db_dbcomment_uuid_49bac08c_uniq + ON public.db_dbcomment USING btree (uuid) + db_dbcomputer: + db_dbcomputer_name_f1800b1a_like: CREATE INDEX db_dbcomputer_name_f1800b1a_like + ON public.db_dbcomputer USING btree (name varchar_pattern_ops) + db_dbcomputer_name_key: CREATE UNIQUE INDEX db_dbcomputer_name_key ON public.db_dbcomputer + USING btree (name) + db_dbcomputer_pkey: CREATE UNIQUE INDEX db_dbcomputer_pkey ON public.db_dbcomputer + USING btree (id) + db_dbcomputer_uuid_f35defa6_uniq: CREATE UNIQUE INDEX db_dbcomputer_uuid_f35defa6_uniq + ON public.db_dbcomputer USING btree (uuid) + db_dbextra: + db_dbextra_datatype_2eba38c6: CREATE INDEX db_dbextra_datatype_2eba38c6 ON public.db_dbextra + USING btree (datatype) + db_dbextra_datatype_2eba38c6_like: CREATE INDEX db_dbextra_datatype_2eba38c6_like + ON public.db_dbextra USING btree (datatype varchar_pattern_ops) + db_dbextra_dbnode_id_c7fe8961: CREATE INDEX db_dbextra_dbnode_id_c7fe8961 ON public.db_dbextra + USING btree (dbnode_id) + db_dbextra_dbnode_id_key_aa56fd37_uniq: CREATE UNIQUE INDEX db_dbextra_dbnode_id_key_aa56fd37_uniq + ON public.db_dbextra USING btree (dbnode_id, key) + db_dbextra_key_b1a8abc6: CREATE INDEX db_dbextra_key_b1a8abc6 ON public.db_dbextra + USING btree (key) + db_dbextra_key_b1a8abc6_like: CREATE INDEX db_dbextra_key_b1a8abc6_like ON public.db_dbextra + USING btree (key varchar_pattern_ops) + db_dbextra_pkey: CREATE UNIQUE INDEX db_dbextra_pkey ON public.db_dbextra USING + btree (id) + db_dbgroup: + db_dbgroup_name_66c75272: CREATE INDEX db_dbgroup_name_66c75272 ON public.db_dbgroup + USING btree (name) + db_dbgroup_name_66c75272_like: CREATE INDEX db_dbgroup_name_66c75272_like ON public.db_dbgroup + USING btree (name varchar_pattern_ops) + db_dbgroup_name_type_12656f33_uniq: CREATE UNIQUE INDEX db_dbgroup_name_type_12656f33_uniq + ON public.db_dbgroup USING btree (name, type) + db_dbgroup_pkey: CREATE UNIQUE INDEX db_dbgroup_pkey ON public.db_dbgroup USING + btree (id) + db_dbgroup_type_23b2a748: CREATE INDEX db_dbgroup_type_23b2a748 ON public.db_dbgroup + USING btree (type) + db_dbgroup_type_23b2a748_like: CREATE INDEX db_dbgroup_type_23b2a748_like ON public.db_dbgroup + USING btree (type varchar_pattern_ops) + db_dbgroup_user_id_100f8a51: CREATE INDEX db_dbgroup_user_id_100f8a51 ON public.db_dbgroup + USING btree (user_id) + db_dbgroup_uuid_af896177_uniq: CREATE UNIQUE INDEX db_dbgroup_uuid_af896177_uniq + ON public.db_dbgroup USING btree (uuid) + db_dbgroup_dbnodes: + db_dbgroup_dbnodes_dbgroup_id_9d3a0f9d: CREATE INDEX db_dbgroup_dbnodes_dbgroup_id_9d3a0f9d + ON public.db_dbgroup_dbnodes USING btree (dbgroup_id) + db_dbgroup_dbnodes_dbgroup_id_dbnode_id_eee23cce_uniq: CREATE UNIQUE INDEX db_dbgroup_dbnodes_dbgroup_id_dbnode_id_eee23cce_uniq + ON public.db_dbgroup_dbnodes USING btree (dbgroup_id, dbnode_id) + db_dbgroup_dbnodes_dbnode_id_118b9439: CREATE INDEX db_dbgroup_dbnodes_dbnode_id_118b9439 + ON public.db_dbgroup_dbnodes USING btree (dbnode_id) + db_dbgroup_dbnodes_pkey: CREATE UNIQUE INDEX db_dbgroup_dbnodes_pkey ON public.db_dbgroup_dbnodes + USING btree (id) + db_dblink: + db_dblink_input_id_9245bd73: CREATE INDEX db_dblink_input_id_9245bd73 ON public.db_dblink + USING btree (input_id) + db_dblink_label_f1343cfb: CREATE INDEX db_dblink_label_f1343cfb ON public.db_dblink + USING btree (label) + db_dblink_label_f1343cfb_like: CREATE INDEX db_dblink_label_f1343cfb_like ON public.db_dblink + USING btree (label varchar_pattern_ops) + db_dblink_output_id_c0167528: CREATE INDEX db_dblink_output_id_c0167528 ON public.db_dblink + USING btree (output_id) + db_dblink_pkey: CREATE UNIQUE INDEX db_dblink_pkey ON public.db_dblink USING btree + (id) + db_dblink_type_229f212b: CREATE INDEX db_dblink_type_229f212b ON public.db_dblink + USING btree (type) + db_dblink_type_229f212b_like: CREATE INDEX db_dblink_type_229f212b_like ON public.db_dblink + USING btree (type varchar_pattern_ops) + db_dblog: + db_dblog_levelname_ad5dc346: CREATE INDEX db_dblog_levelname_ad5dc346 ON public.db_dblog + USING btree (levelname) + db_dblog_levelname_ad5dc346_like: CREATE INDEX db_dblog_levelname_ad5dc346_like + ON public.db_dblog USING btree (levelname varchar_pattern_ops) + db_dblog_loggername_00b5ba16: CREATE INDEX db_dblog_loggername_00b5ba16 ON public.db_dblog + USING btree (loggername) + db_dblog_loggername_00b5ba16_like: CREATE INDEX db_dblog_loggername_00b5ba16_like + ON public.db_dblog USING btree (loggername varchar_pattern_ops) + db_dblog_objname_69932b1e: CREATE INDEX db_dblog_objname_69932b1e ON public.db_dblog + USING btree (objname) + db_dblog_objname_69932b1e_like: CREATE INDEX db_dblog_objname_69932b1e_like ON + public.db_dblog USING btree (objname varchar_pattern_ops) + db_dblog_objpk_fc47afa9: CREATE INDEX db_dblog_objpk_fc47afa9 ON public.db_dblog + USING btree (objpk) + db_dblog_pkey: CREATE UNIQUE INDEX db_dblog_pkey ON public.db_dblog USING btree + (id) + db_dbnode: + db_dbnode_ctime_71626ef5: CREATE INDEX db_dbnode_ctime_71626ef5 ON public.db_dbnode + USING btree (ctime) + db_dbnode_dbcomputer_id_315372a3: CREATE INDEX db_dbnode_dbcomputer_id_315372a3 + ON public.db_dbnode USING btree (dbcomputer_id) + db_dbnode_label_6469539e: CREATE INDEX db_dbnode_label_6469539e ON public.db_dbnode + USING btree (label) + db_dbnode_label_6469539e_like: CREATE INDEX db_dbnode_label_6469539e_like ON public.db_dbnode + USING btree (label varchar_pattern_ops) + db_dbnode_mtime_0554ea3d: CREATE INDEX db_dbnode_mtime_0554ea3d ON public.db_dbnode + USING btree (mtime) + db_dbnode_pkey: CREATE UNIQUE INDEX db_dbnode_pkey ON public.db_dbnode USING btree + (id) + db_dbnode_process_type_df7298d0: CREATE INDEX db_dbnode_process_type_df7298d0 + ON public.db_dbnode USING btree (process_type) + db_dbnode_process_type_df7298d0_like: CREATE INDEX db_dbnode_process_type_df7298d0_like + ON public.db_dbnode USING btree (process_type varchar_pattern_ops) + db_dbnode_type_a8ce9753: CREATE INDEX db_dbnode_type_a8ce9753 ON public.db_dbnode + USING btree (type) + db_dbnode_type_a8ce9753_like: CREATE INDEX db_dbnode_type_a8ce9753_like ON public.db_dbnode + USING btree (type varchar_pattern_ops) + db_dbnode_user_id_12e7aeaf: CREATE INDEX db_dbnode_user_id_12e7aeaf ON public.db_dbnode + USING btree (user_id) + db_dbnode_uuid_62e0bf98_uniq: CREATE UNIQUE INDEX db_dbnode_uuid_62e0bf98_uniq + ON public.db_dbnode USING btree (uuid) + db_dbsetting: + db_dbsetting_datatype_49f4397c: CREATE INDEX db_dbsetting_datatype_49f4397c ON + public.db_dbsetting USING btree (datatype) + db_dbsetting_datatype_49f4397c_like: CREATE INDEX db_dbsetting_datatype_49f4397c_like + ON public.db_dbsetting USING btree (datatype varchar_pattern_ops) + db_dbsetting_key_1b84beb4: CREATE INDEX db_dbsetting_key_1b84beb4 ON public.db_dbsetting + USING btree (key) + db_dbsetting_key_1b84beb4_like: CREATE INDEX db_dbsetting_key_1b84beb4_like ON + public.db_dbsetting USING btree (key varchar_pattern_ops) + db_dbsetting_key_1b84beb4_uniq: CREATE UNIQUE INDEX db_dbsetting_key_1b84beb4_uniq + ON public.db_dbsetting USING btree (key) + db_dbsetting_pkey: CREATE UNIQUE INDEX db_dbsetting_pkey ON public.db_dbsetting + USING btree (id) + db_dbuser: + db_dbuser_email_30150b7e_like: CREATE INDEX db_dbuser_email_30150b7e_like ON public.db_dbuser + USING btree (email varchar_pattern_ops) + db_dbuser_email_30150b7e_uniq: CREATE UNIQUE INDEX db_dbuser_email_30150b7e_uniq + ON public.db_dbuser USING btree (email) + db_dbuser_pkey: CREATE UNIQUE INDEX db_dbuser_pkey ON public.db_dbuser USING btree + (id) + db_dbuser_groups: + db_dbuser_groups_dbuser_id_480b3520: CREATE INDEX db_dbuser_groups_dbuser_id_480b3520 + ON public.db_dbuser_groups USING btree (dbuser_id) + db_dbuser_groups_dbuser_id_group_id_9155eb4f_uniq: CREATE UNIQUE INDEX db_dbuser_groups_dbuser_id_group_id_9155eb4f_uniq + ON public.db_dbuser_groups USING btree (dbuser_id, group_id) + db_dbuser_groups_group_id_8478d87e: CREATE INDEX db_dbuser_groups_group_id_8478d87e + ON public.db_dbuser_groups USING btree (group_id) + db_dbuser_groups_pkey: CREATE UNIQUE INDEX db_dbuser_groups_pkey ON public.db_dbuser_groups + USING btree (id) + db_dbuser_user_permissions: + db_dbuser_user_permissio_dbuser_id_permission_id_e6cbabe4_uniq: CREATE UNIQUE + INDEX db_dbuser_user_permissio_dbuser_id_permission_id_e6cbabe4_uniq ON public.db_dbuser_user_permissions + USING btree (dbuser_id, permission_id) + db_dbuser_user_permissions_dbuser_id_364456ee: CREATE INDEX db_dbuser_user_permissions_dbuser_id_364456ee + ON public.db_dbuser_user_permissions USING btree (dbuser_id) + db_dbuser_user_permissions_permission_id_c5aafc54: CREATE INDEX db_dbuser_user_permissions_permission_id_c5aafc54 + ON public.db_dbuser_user_permissions USING btree (permission_id) + db_dbuser_user_permissions_pkey: CREATE UNIQUE INDEX db_dbuser_user_permissions_pkey + ON public.db_dbuser_user_permissions USING btree (id) + db_dbworkflow: + db_dbworkflow_label_7368f34a: CREATE INDEX db_dbworkflow_label_7368f34a ON public.db_dbworkflow + USING btree (label) + db_dbworkflow_label_7368f34a_like: CREATE INDEX db_dbworkflow_label_7368f34a_like + ON public.db_dbworkflow USING btree (label varchar_pattern_ops) + db_dbworkflow_pkey: CREATE UNIQUE INDEX db_dbworkflow_pkey ON public.db_dbworkflow + USING btree (id) + db_dbworkflow_user_id_ef1f3251: CREATE INDEX db_dbworkflow_user_id_ef1f3251 ON + public.db_dbworkflow USING btree (user_id) + db_dbworkflow_uuid_08947ee2_uniq: CREATE UNIQUE INDEX db_dbworkflow_uuid_08947ee2_uniq + ON public.db_dbworkflow USING btree (uuid) + db_dbworkflowdata: + db_dbworkflowdata_aiida_obj_id_70a2d33b: CREATE INDEX db_dbworkflowdata_aiida_obj_id_70a2d33b + ON public.db_dbworkflowdata USING btree (aiida_obj_id) + db_dbworkflowdata_parent_id_ff4dbf8d: CREATE INDEX db_dbworkflowdata_parent_id_ff4dbf8d + ON public.db_dbworkflowdata USING btree (parent_id) + db_dbworkflowdata_parent_id_name_data_type_a4b50dae_uniq: CREATE UNIQUE INDEX + db_dbworkflowdata_parent_id_name_data_type_a4b50dae_uniq ON public.db_dbworkflowdata + USING btree (parent_id, name, data_type) + db_dbworkflowdata_pkey: CREATE UNIQUE INDEX db_dbworkflowdata_pkey ON public.db_dbworkflowdata + USING btree (id) + db_dbworkflowstep: + db_dbworkflowstep_parent_id_ffb754d9: CREATE INDEX db_dbworkflowstep_parent_id_ffb754d9 + ON public.db_dbworkflowstep USING btree (parent_id) + db_dbworkflowstep_parent_id_name_111027e3_uniq: CREATE UNIQUE INDEX db_dbworkflowstep_parent_id_name_111027e3_uniq + ON public.db_dbworkflowstep USING btree (parent_id, name) + db_dbworkflowstep_pkey: CREATE UNIQUE INDEX db_dbworkflowstep_pkey ON public.db_dbworkflowstep + USING btree (id) + db_dbworkflowstep_user_id_04282431: CREATE INDEX db_dbworkflowstep_user_id_04282431 + ON public.db_dbworkflowstep USING btree (user_id) + db_dbworkflowstep_calculations: + db_dbworkflowstep_calcul_dbworkflowstep_id_dbnode_60f50d02_uniq: CREATE UNIQUE + INDEX db_dbworkflowstep_calcul_dbworkflowstep_id_dbnode_60f50d02_uniq ON public.db_dbworkflowstep_calculations + USING btree (dbworkflowstep_id, dbnode_id) + db_dbworkflowstep_calculations_dbnode_id_0d07b7a7: CREATE INDEX db_dbworkflowstep_calculations_dbnode_id_0d07b7a7 + ON public.db_dbworkflowstep_calculations USING btree (dbnode_id) + db_dbworkflowstep_calculations_dbworkflowstep_id_575c3637: CREATE INDEX db_dbworkflowstep_calculations_dbworkflowstep_id_575c3637 + ON public.db_dbworkflowstep_calculations USING btree (dbworkflowstep_id) + db_dbworkflowstep_calculations_pkey: CREATE UNIQUE INDEX db_dbworkflowstep_calculations_pkey + ON public.db_dbworkflowstep_calculations USING btree (id) + db_dbworkflowstep_sub_workflows: + db_dbworkflowstep_sub_wo_dbworkflowstep_id_dbwork_e9b2b624_uniq: CREATE UNIQUE + INDEX db_dbworkflowstep_sub_wo_dbworkflowstep_id_dbwork_e9b2b624_uniq ON public.db_dbworkflowstep_sub_workflows + USING btree (dbworkflowstep_id, dbworkflow_id) + db_dbworkflowstep_sub_workflows_dbworkflow_id_dca4d103: CREATE INDEX db_dbworkflowstep_sub_workflows_dbworkflow_id_dca4d103 + ON public.db_dbworkflowstep_sub_workflows USING btree (dbworkflow_id) + db_dbworkflowstep_sub_workflows_dbworkflowstep_id_e183bbb7: CREATE INDEX db_dbworkflowstep_sub_workflows_dbworkflowstep_id_e183bbb7 + ON public.db_dbworkflowstep_sub_workflows USING btree (dbworkflowstep_id) + db_dbworkflowstep_sub_workflows_pkey: CREATE UNIQUE INDEX db_dbworkflowstep_sub_workflows_pkey + ON public.db_dbworkflowstep_sub_workflows USING btree (id) diff --git a/tests/backends/aiida_sqlalchemy/migrations/test_all_schema/test_django_django_0019_.yml b/tests/backends/aiida_sqlalchemy/migrations/test_all_schema/test_django_django_0019_.yml new file mode 100644 index 0000000000..b3e1e522b4 --- /dev/null +++ b/tests/backends/aiida_sqlalchemy/migrations/test_all_schema/test_django_django_0019_.yml @@ -0,0 +1,1038 @@ +columns: + db_dbattribute: + bval: + data_type: boolean + default: null + is_nullable: true + datatype: + data_type: character varying + default: null + is_nullable: false + max_length: 10 + dbnode_id: + data_type: integer + default: null + is_nullable: false + dval: + data_type: timestamp with time zone + default: null + is_nullable: true + fval: + data_type: double precision + default: null + is_nullable: true + id: + data_type: integer + default: nextval('db_dbattribute_id_seq'::regclass) + is_nullable: false + ival: + data_type: integer + default: null + is_nullable: true + key: + data_type: character varying + default: null + is_nullable: false + max_length: 1024 + tval: + data_type: text + default: null + is_nullable: false + db_dbauthinfo: + aiidauser_id: + data_type: integer + default: null + is_nullable: false + auth_params: + data_type: text + default: null + is_nullable: false + dbcomputer_id: + data_type: integer + default: null + is_nullable: false + enabled: + data_type: boolean + default: null + is_nullable: false + id: + data_type: integer + default: nextval('db_dbauthinfo_id_seq'::regclass) + is_nullable: false + metadata: + data_type: text + default: null + is_nullable: false + db_dbcomment: + content: + data_type: text + default: null + is_nullable: false + ctime: + data_type: timestamp with time zone + default: null + is_nullable: false + dbnode_id: + data_type: integer + default: null + is_nullable: false + id: + data_type: integer + default: nextval('db_dbcomment_id_seq'::regclass) + is_nullable: false + mtime: + data_type: timestamp with time zone + default: null + is_nullable: false + user_id: + data_type: integer + default: null + is_nullable: false + uuid: + data_type: uuid + default: null + is_nullable: false + db_dbcomputer: + description: + data_type: text + default: null + is_nullable: false + enabled: + data_type: boolean + default: null + is_nullable: false + hostname: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + id: + data_type: integer + default: nextval('db_dbcomputer_id_seq'::regclass) + is_nullable: false + metadata: + data_type: text + default: null + is_nullable: false + name: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + scheduler_type: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + transport_params: + data_type: text + default: null + is_nullable: false + transport_type: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + uuid: + data_type: uuid + default: null + is_nullable: false + db_dbextra: + bval: + data_type: boolean + default: null + is_nullable: true + datatype: + data_type: character varying + default: null + is_nullable: false + max_length: 10 + dbnode_id: + data_type: integer + default: null + is_nullable: false + dval: + data_type: timestamp with time zone + default: null + is_nullable: true + fval: + data_type: double precision + default: null + is_nullable: true + id: + data_type: integer + default: nextval('db_dbextra_id_seq'::regclass) + is_nullable: false + ival: + data_type: integer + default: null + is_nullable: true + key: + data_type: character varying + default: null + is_nullable: false + max_length: 1024 + tval: + data_type: text + default: null + is_nullable: false + db_dbgroup: + description: + data_type: text + default: null + is_nullable: false + id: + data_type: integer + default: nextval('db_dbgroup_id_seq'::regclass) + is_nullable: false + name: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + time: + data_type: timestamp with time zone + default: null + is_nullable: false + type: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + user_id: + data_type: integer + default: null + is_nullable: false + uuid: + data_type: uuid + default: null + is_nullable: false + db_dbgroup_dbnodes: + dbgroup_id: + data_type: integer + default: null + is_nullable: false + dbnode_id: + data_type: integer + default: null + is_nullable: false + id: + data_type: integer + default: nextval('db_dbgroup_dbnodes_id_seq'::regclass) + is_nullable: false + db_dblink: + id: + data_type: integer + default: nextval('db_dblink_id_seq'::regclass) + is_nullable: false + input_id: + data_type: integer + default: null + is_nullable: false + label: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + output_id: + data_type: integer + default: null + is_nullable: false + type: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + db_dblog: + id: + data_type: integer + default: nextval('db_dblog_id_seq'::regclass) + is_nullable: false + levelname: + data_type: character varying + default: null + is_nullable: false + max_length: 50 + loggername: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + message: + data_type: text + default: null + is_nullable: false + metadata: + data_type: text + default: null + is_nullable: false + objname: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + objpk: + data_type: integer + default: null + is_nullable: true + time: + data_type: timestamp with time zone + default: null + is_nullable: false + db_dbnode: + ctime: + data_type: timestamp with time zone + default: null + is_nullable: false + dbcomputer_id: + data_type: integer + default: null + is_nullable: true + description: + data_type: text + default: null + is_nullable: false + id: + data_type: integer + default: nextval('db_dbnode_id_seq'::regclass) + is_nullable: false + label: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + mtime: + data_type: timestamp with time zone + default: null + is_nullable: false + nodeversion: + data_type: integer + default: null + is_nullable: false + process_type: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + public: + data_type: boolean + default: null + is_nullable: false + type: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + user_id: + data_type: integer + default: null + is_nullable: false + uuid: + data_type: uuid + default: null + is_nullable: false + db_dbsetting: + bval: + data_type: boolean + default: null + is_nullable: true + datatype: + data_type: character varying + default: null + is_nullable: false + max_length: 10 + description: + data_type: text + default: null + is_nullable: false + dval: + data_type: timestamp with time zone + default: null + is_nullable: true + fval: + data_type: double precision + default: null + is_nullable: true + id: + data_type: integer + default: nextval('db_dbsetting_id_seq'::regclass) + is_nullable: false + ival: + data_type: integer + default: null + is_nullable: true + key: + data_type: character varying + default: null + is_nullable: false + max_length: 1024 + time: + data_type: timestamp with time zone + default: null + is_nullable: false + tval: + data_type: text + default: null + is_nullable: false + db_dbuser: + date_joined: + data_type: timestamp with time zone + default: null + is_nullable: false + email: + data_type: character varying + default: null + is_nullable: false + max_length: 254 + first_name: + data_type: character varying + default: null + is_nullable: false + max_length: 254 + id: + data_type: integer + default: nextval('db_dbuser_id_seq'::regclass) + is_nullable: false + institution: + data_type: character varying + default: null + is_nullable: false + max_length: 254 + is_active: + data_type: boolean + default: null + is_nullable: false + is_staff: + data_type: boolean + default: null + is_nullable: false + is_superuser: + data_type: boolean + default: null + is_nullable: false + last_login: + data_type: timestamp with time zone + default: null + is_nullable: true + last_name: + data_type: character varying + default: null + is_nullable: false + max_length: 254 + password: + data_type: character varying + default: null + is_nullable: false + max_length: 128 + db_dbuser_groups: + dbuser_id: + data_type: integer + default: null + is_nullable: false + group_id: + data_type: integer + default: null + is_nullable: false + id: + data_type: integer + default: nextval('db_dbuser_groups_id_seq'::regclass) + is_nullable: false + db_dbuser_user_permissions: + dbuser_id: + data_type: integer + default: null + is_nullable: false + id: + data_type: integer + default: nextval('db_dbuser_user_permissions_id_seq'::regclass) + is_nullable: false + permission_id: + data_type: integer + default: null + is_nullable: false + db_dbworkflow: + ctime: + data_type: timestamp with time zone + default: null + is_nullable: false + description: + data_type: text + default: null + is_nullable: false + id: + data_type: integer + default: nextval('db_dbworkflow_id_seq'::regclass) + is_nullable: false + label: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + lastsyncedversion: + data_type: integer + default: null + is_nullable: false + module: + data_type: text + default: null + is_nullable: false + module_class: + data_type: text + default: null + is_nullable: false + mtime: + data_type: timestamp with time zone + default: null + is_nullable: false + nodeversion: + data_type: integer + default: null + is_nullable: false + report: + data_type: text + default: null + is_nullable: false + script_md5: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + script_path: + data_type: text + default: null + is_nullable: false + state: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + user_id: + data_type: integer + default: null + is_nullable: false + uuid: + data_type: uuid + default: null + is_nullable: false + db_dbworkflowdata: + aiida_obj_id: + data_type: integer + default: null + is_nullable: true + data_type: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + id: + data_type: integer + default: nextval('db_dbworkflowdata_id_seq'::regclass) + is_nullable: false + json_value: + data_type: text + default: null + is_nullable: false + name: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + parent_id: + data_type: integer + default: null + is_nullable: false + time: + data_type: timestamp with time zone + default: null + is_nullable: false + value_type: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + db_dbworkflowstep: + id: + data_type: integer + default: nextval('db_dbworkflowstep_id_seq'::regclass) + is_nullable: false + name: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + nextcall: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + parent_id: + data_type: integer + default: null + is_nullable: false + state: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + time: + data_type: timestamp with time zone + default: null + is_nullable: false + user_id: + data_type: integer + default: null + is_nullable: false + db_dbworkflowstep_calculations: + dbnode_id: + data_type: integer + default: null + is_nullable: false + dbworkflowstep_id: + data_type: integer + default: null + is_nullable: false + id: + data_type: integer + default: nextval('db_dbworkflowstep_calculations_id_seq'::regclass) + is_nullable: false + db_dbworkflowstep_sub_workflows: + dbworkflow_id: + data_type: integer + default: null + is_nullable: false + dbworkflowstep_id: + data_type: integer + default: null + is_nullable: false + id: + data_type: integer + default: nextval('db_dbworkflowstep_sub_workflows_id_seq'::regclass) + is_nullable: false +constraints: + primary_key: + db_dbattribute: + db_dbattribute_pkey: + - id + db_dbauthinfo: + db_dbauthinfo_pkey: + - id + db_dbcomment: + db_dbcomment_pkey: + - id + db_dbcomputer: + db_dbcomputer_pkey: + - id + db_dbextra: + db_dbextra_pkey: + - id + db_dbgroup: + db_dbgroup_pkey: + - id + db_dbgroup_dbnodes: + db_dbgroup_dbnodes_pkey: + - id + db_dblink: + db_dblink_pkey: + - id + db_dblog: + db_dblog_pkey: + - id + db_dbnode: + db_dbnode_pkey: + - id + db_dbsetting: + db_dbsetting_pkey: + - id + db_dbuser: + db_dbuser_pkey: + - id + db_dbuser_groups: + db_dbuser_groups_pkey: + - id + db_dbuser_user_permissions: + db_dbuser_user_permissions_pkey: + - id + db_dbworkflow: + db_dbworkflow_pkey: + - id + db_dbworkflowdata: + db_dbworkflowdata_pkey: + - id + db_dbworkflowstep: + db_dbworkflowstep_pkey: + - id + db_dbworkflowstep_calculations: + db_dbworkflowstep_calculations_pkey: + - id + db_dbworkflowstep_sub_workflows: + db_dbworkflowstep_sub_workflows_pkey: + - id + unique: + db_dbattribute: + db_dbattribute_dbnode_id_key_c589e447_uniq: + - dbnode_id + - key + db_dbauthinfo: + db_dbauthinfo_aiidauser_id_dbcomputer_id_777cdaa8_uniq: + - aiidauser_id + - dbcomputer_id + db_dbcomment: + db_dbcomment_uuid_49bac08c_uniq: + - uuid + db_dbcomputer: + db_dbcomputer_name_key: + - name + db_dbcomputer_uuid_f35defa6_uniq: + - uuid + db_dbextra: + db_dbextra_dbnode_id_key_aa56fd37_uniq: + - dbnode_id + - key + db_dbgroup: + db_dbgroup_name_type_12656f33_uniq: + - name + - type + db_dbgroup_uuid_af896177_uniq: + - uuid + db_dbgroup_dbnodes: + db_dbgroup_dbnodes_dbgroup_id_dbnode_id_eee23cce_uniq: + - dbgroup_id + - dbnode_id + db_dbnode: + db_dbnode_uuid_62e0bf98_uniq: + - uuid + db_dbsetting: + db_dbsetting_key_1b84beb4_uniq: + - key + db_dbuser: + db_dbuser_email_30150b7e_uniq: + - email + db_dbuser_groups: + db_dbuser_groups_dbuser_id_group_id_9155eb4f_uniq: + - dbuser_id + - group_id + db_dbuser_user_permissions: + db_dbuser_user_permissio_dbuser_id_permission_id_e6cbabe4_uniq: + - dbuser_id + - permission_id + db_dbworkflow: + db_dbworkflow_uuid_08947ee2_uniq: + - uuid + db_dbworkflowdata: + db_dbworkflowdata_parent_id_name_data_type_a4b50dae_uniq: + - data_type + - name + - parent_id + db_dbworkflowstep: + db_dbworkflowstep_parent_id_name_111027e3_uniq: + - name + - parent_id + db_dbworkflowstep_calculations: + db_dbworkflowstep_calcul_dbworkflowstep_id_dbnode_60f50d02_uniq: + - dbnode_id + - dbworkflowstep_id + db_dbworkflowstep_sub_workflows: + db_dbworkflowstep_sub_wo_dbworkflowstep_id_dbwork_e9b2b624_uniq: + - dbworkflow_id + - dbworkflowstep_id +foreign_keys: + db_dbattribute: + db_dbattribute_dbnode_id_253bf153_fk_db_dbnode_id: FOREIGN KEY (dbnode_id) REFERENCES + db_dbnode(id) DEFERRABLE INITIALLY DEFERRED + db_dbauthinfo: + db_dbauthinfo_aiidauser_id_0684fdfb_fk_db_dbuser_id: FOREIGN KEY (aiidauser_id) + REFERENCES db_dbuser(id) DEFERRABLE INITIALLY DEFERRED + db_dbauthinfo_dbcomputer_id_424f7ac4_fk_db_dbcomputer_id: FOREIGN KEY (dbcomputer_id) + REFERENCES db_dbcomputer(id) DEFERRABLE INITIALLY DEFERRED + db_dbcomment: + db_dbcomment_dbnode_id_3b812b6b_fk_db_dbnode_id: FOREIGN KEY (dbnode_id) REFERENCES + db_dbnode(id) DEFERRABLE INITIALLY DEFERRED + db_dbcomment_user_id_8ed5e360_fk_db_dbuser_id: FOREIGN KEY (user_id) REFERENCES + db_dbuser(id) DEFERRABLE INITIALLY DEFERRED + db_dbextra: + db_dbextra_dbnode_id_c7fe8961_fk_db_dbnode_id: FOREIGN KEY (dbnode_id) REFERENCES + db_dbnode(id) DEFERRABLE INITIALLY DEFERRED + db_dbgroup: + db_dbgroup_user_id_100f8a51_fk_db_dbuser_id: FOREIGN KEY (user_id) REFERENCES + db_dbuser(id) DEFERRABLE INITIALLY DEFERRED + db_dbgroup_dbnodes: + db_dbgroup_dbnodes_dbgroup_id_9d3a0f9d_fk_db_dbgroup_id: FOREIGN KEY (dbgroup_id) + REFERENCES db_dbgroup(id) DEFERRABLE INITIALLY DEFERRED + db_dbgroup_dbnodes_dbnode_id_118b9439_fk_db_dbnode_id: FOREIGN KEY (dbnode_id) + REFERENCES db_dbnode(id) DEFERRABLE INITIALLY DEFERRED + db_dblink: + db_dblink_input_id_9245bd73_fk_db_dbnode_id: FOREIGN KEY (input_id) REFERENCES + db_dbnode(id) DEFERRABLE INITIALLY DEFERRED + db_dblink_output_id_c0167528_fk_db_dbnode_id: FOREIGN KEY (output_id) REFERENCES + db_dbnode(id) DEFERRABLE INITIALLY DEFERRED + db_dbnode: + db_dbnode_dbcomputer_id_315372a3_fk_db_dbcomputer_id: FOREIGN KEY (dbcomputer_id) + REFERENCES db_dbcomputer(id) DEFERRABLE INITIALLY DEFERRED + db_dbnode_user_id_12e7aeaf_fk_db_dbuser_id: FOREIGN KEY (user_id) REFERENCES db_dbuser(id) + DEFERRABLE INITIALLY DEFERRED + db_dbuser_groups: + db_dbuser_groups_dbuser_id_480b3520_fk_db_dbuser_id: FOREIGN KEY (dbuser_id) REFERENCES + db_dbuser(id) DEFERRABLE INITIALLY DEFERRED + db_dbuser_groups_group_id_8478d87e_fk_auth_group_id: FOREIGN KEY (group_id) REFERENCES + auth_group(id) DEFERRABLE INITIALLY DEFERRED + db_dbuser_user_permissions: + db_dbuser_user_permi_permission_id_c5aafc54_fk_auth_perm: FOREIGN KEY (permission_id) + REFERENCES auth_permission(id) DEFERRABLE INITIALLY DEFERRED + db_dbuser_user_permissions_dbuser_id_364456ee_fk_db_dbuser_id: FOREIGN KEY (dbuser_id) + REFERENCES db_dbuser(id) DEFERRABLE INITIALLY DEFERRED + db_dbworkflow: + db_dbworkflow_user_id_ef1f3251_fk_db_dbuser_id: FOREIGN KEY (user_id) REFERENCES + db_dbuser(id) DEFERRABLE INITIALLY DEFERRED + db_dbworkflowdata: + db_dbworkflowdata_aiida_obj_id_70a2d33b_fk_db_dbnode_id: FOREIGN KEY (aiida_obj_id) + REFERENCES db_dbnode(id) DEFERRABLE INITIALLY DEFERRED + db_dbworkflowdata_parent_id_ff4dbf8d_fk_db_dbworkflow_id: FOREIGN KEY (parent_id) + REFERENCES db_dbworkflow(id) DEFERRABLE INITIALLY DEFERRED + db_dbworkflowstep: + db_dbworkflowstep_parent_id_ffb754d9_fk_db_dbworkflow_id: FOREIGN KEY (parent_id) + REFERENCES db_dbworkflow(id) DEFERRABLE INITIALLY DEFERRED + db_dbworkflowstep_user_id_04282431_fk_db_dbuser_id: FOREIGN KEY (user_id) REFERENCES + db_dbuser(id) DEFERRABLE INITIALLY DEFERRED + db_dbworkflowstep_calculations: + db_dbworkflowstep_ca_dbnode_id_0d07b7a7_fk_db_dbnode: FOREIGN KEY (dbnode_id) + REFERENCES db_dbnode(id) DEFERRABLE INITIALLY DEFERRED + db_dbworkflowstep_ca_dbworkflowstep_id_575c3637_fk_db_dbwork: FOREIGN KEY (dbworkflowstep_id) + REFERENCES db_dbworkflowstep(id) DEFERRABLE INITIALLY DEFERRED + db_dbworkflowstep_sub_workflows: + db_dbworkflowstep_su_dbworkflow_id_dca4d103_fk_db_dbwork: FOREIGN KEY (dbworkflow_id) + REFERENCES db_dbworkflow(id) DEFERRABLE INITIALLY DEFERRED + db_dbworkflowstep_su_dbworkflowstep_id_e183bbb7_fk_db_dbwork: FOREIGN KEY (dbworkflowstep_id) + REFERENCES db_dbworkflowstep(id) DEFERRABLE INITIALLY DEFERRED +indexes: + db_dbattribute: + db_dbattribute_datatype_91c4dc04: CREATE INDEX db_dbattribute_datatype_91c4dc04 + ON public.db_dbattribute USING btree (datatype) + db_dbattribute_datatype_91c4dc04_like: CREATE INDEX db_dbattribute_datatype_91c4dc04_like + ON public.db_dbattribute USING btree (datatype varchar_pattern_ops) + db_dbattribute_dbnode_id_253bf153: CREATE INDEX db_dbattribute_dbnode_id_253bf153 + ON public.db_dbattribute USING btree (dbnode_id) + db_dbattribute_dbnode_id_key_c589e447_uniq: CREATE UNIQUE INDEX db_dbattribute_dbnode_id_key_c589e447_uniq + ON public.db_dbattribute USING btree (dbnode_id, key) + db_dbattribute_key_ac2bc4e4: CREATE INDEX db_dbattribute_key_ac2bc4e4 ON public.db_dbattribute + USING btree (key) + db_dbattribute_key_ac2bc4e4_like: CREATE INDEX db_dbattribute_key_ac2bc4e4_like + ON public.db_dbattribute USING btree (key varchar_pattern_ops) + db_dbattribute_pkey: CREATE UNIQUE INDEX db_dbattribute_pkey ON public.db_dbattribute + USING btree (id) + tval_idx_for_daemon: CREATE INDEX tval_idx_for_daemon ON public.db_dbattribute + USING btree (tval) WHERE (tval = ANY (ARRAY['COMPUTED'::text, 'WITHSCHEDULER'::text, + 'TOSUBMIT'::text])) + db_dbauthinfo: + db_dbauthinfo_aiidauser_id_0684fdfb: CREATE INDEX db_dbauthinfo_aiidauser_id_0684fdfb + ON public.db_dbauthinfo USING btree (aiidauser_id) + db_dbauthinfo_aiidauser_id_dbcomputer_id_777cdaa8_uniq: CREATE UNIQUE INDEX db_dbauthinfo_aiidauser_id_dbcomputer_id_777cdaa8_uniq + ON public.db_dbauthinfo USING btree (aiidauser_id, dbcomputer_id) + db_dbauthinfo_dbcomputer_id_424f7ac4: CREATE INDEX db_dbauthinfo_dbcomputer_id_424f7ac4 + ON public.db_dbauthinfo USING btree (dbcomputer_id) + db_dbauthinfo_pkey: CREATE UNIQUE INDEX db_dbauthinfo_pkey ON public.db_dbauthinfo + USING btree (id) + db_dbcomment: + db_dbcomment_dbnode_id_3b812b6b: CREATE INDEX db_dbcomment_dbnode_id_3b812b6b + ON public.db_dbcomment USING btree (dbnode_id) + db_dbcomment_pkey: CREATE UNIQUE INDEX db_dbcomment_pkey ON public.db_dbcomment + USING btree (id) + db_dbcomment_user_id_8ed5e360: CREATE INDEX db_dbcomment_user_id_8ed5e360 ON public.db_dbcomment + USING btree (user_id) + db_dbcomment_uuid_49bac08c_uniq: CREATE UNIQUE INDEX db_dbcomment_uuid_49bac08c_uniq + ON public.db_dbcomment USING btree (uuid) + db_dbcomputer: + db_dbcomputer_name_f1800b1a_like: CREATE INDEX db_dbcomputer_name_f1800b1a_like + ON public.db_dbcomputer USING btree (name varchar_pattern_ops) + db_dbcomputer_name_key: CREATE UNIQUE INDEX db_dbcomputer_name_key ON public.db_dbcomputer + USING btree (name) + db_dbcomputer_pkey: CREATE UNIQUE INDEX db_dbcomputer_pkey ON public.db_dbcomputer + USING btree (id) + db_dbcomputer_uuid_f35defa6_uniq: CREATE UNIQUE INDEX db_dbcomputer_uuid_f35defa6_uniq + ON public.db_dbcomputer USING btree (uuid) + db_dbextra: + db_dbextra_datatype_2eba38c6: CREATE INDEX db_dbextra_datatype_2eba38c6 ON public.db_dbextra + USING btree (datatype) + db_dbextra_datatype_2eba38c6_like: CREATE INDEX db_dbextra_datatype_2eba38c6_like + ON public.db_dbextra USING btree (datatype varchar_pattern_ops) + db_dbextra_dbnode_id_c7fe8961: CREATE INDEX db_dbextra_dbnode_id_c7fe8961 ON public.db_dbextra + USING btree (dbnode_id) + db_dbextra_dbnode_id_key_aa56fd37_uniq: CREATE UNIQUE INDEX db_dbextra_dbnode_id_key_aa56fd37_uniq + ON public.db_dbextra USING btree (dbnode_id, key) + db_dbextra_key_b1a8abc6: CREATE INDEX db_dbextra_key_b1a8abc6 ON public.db_dbextra + USING btree (key) + db_dbextra_key_b1a8abc6_like: CREATE INDEX db_dbextra_key_b1a8abc6_like ON public.db_dbextra + USING btree (key varchar_pattern_ops) + db_dbextra_pkey: CREATE UNIQUE INDEX db_dbextra_pkey ON public.db_dbextra USING + btree (id) + db_dbgroup: + db_dbgroup_name_66c75272: CREATE INDEX db_dbgroup_name_66c75272 ON public.db_dbgroup + USING btree (name) + db_dbgroup_name_66c75272_like: CREATE INDEX db_dbgroup_name_66c75272_like ON public.db_dbgroup + USING btree (name varchar_pattern_ops) + db_dbgroup_name_type_12656f33_uniq: CREATE UNIQUE INDEX db_dbgroup_name_type_12656f33_uniq + ON public.db_dbgroup USING btree (name, type) + db_dbgroup_pkey: CREATE UNIQUE INDEX db_dbgroup_pkey ON public.db_dbgroup USING + btree (id) + db_dbgroup_type_23b2a748: CREATE INDEX db_dbgroup_type_23b2a748 ON public.db_dbgroup + USING btree (type) + db_dbgroup_type_23b2a748_like: CREATE INDEX db_dbgroup_type_23b2a748_like ON public.db_dbgroup + USING btree (type varchar_pattern_ops) + db_dbgroup_user_id_100f8a51: CREATE INDEX db_dbgroup_user_id_100f8a51 ON public.db_dbgroup + USING btree (user_id) + db_dbgroup_uuid_af896177_uniq: CREATE UNIQUE INDEX db_dbgroup_uuid_af896177_uniq + ON public.db_dbgroup USING btree (uuid) + db_dbgroup_dbnodes: + db_dbgroup_dbnodes_dbgroup_id_9d3a0f9d: CREATE INDEX db_dbgroup_dbnodes_dbgroup_id_9d3a0f9d + ON public.db_dbgroup_dbnodes USING btree (dbgroup_id) + db_dbgroup_dbnodes_dbgroup_id_dbnode_id_eee23cce_uniq: CREATE UNIQUE INDEX db_dbgroup_dbnodes_dbgroup_id_dbnode_id_eee23cce_uniq + ON public.db_dbgroup_dbnodes USING btree (dbgroup_id, dbnode_id) + db_dbgroup_dbnodes_dbnode_id_118b9439: CREATE INDEX db_dbgroup_dbnodes_dbnode_id_118b9439 + ON public.db_dbgroup_dbnodes USING btree (dbnode_id) + db_dbgroup_dbnodes_pkey: CREATE UNIQUE INDEX db_dbgroup_dbnodes_pkey ON public.db_dbgroup_dbnodes + USING btree (id) + db_dblink: + db_dblink_input_id_9245bd73: CREATE INDEX db_dblink_input_id_9245bd73 ON public.db_dblink + USING btree (input_id) + db_dblink_label_f1343cfb: CREATE INDEX db_dblink_label_f1343cfb ON public.db_dblink + USING btree (label) + db_dblink_label_f1343cfb_like: CREATE INDEX db_dblink_label_f1343cfb_like ON public.db_dblink + USING btree (label varchar_pattern_ops) + db_dblink_output_id_c0167528: CREATE INDEX db_dblink_output_id_c0167528 ON public.db_dblink + USING btree (output_id) + db_dblink_pkey: CREATE UNIQUE INDEX db_dblink_pkey ON public.db_dblink USING btree + (id) + db_dblink_type_229f212b: CREATE INDEX db_dblink_type_229f212b ON public.db_dblink + USING btree (type) + db_dblink_type_229f212b_like: CREATE INDEX db_dblink_type_229f212b_like ON public.db_dblink + USING btree (type varchar_pattern_ops) + db_dblog: + db_dblog_levelname_ad5dc346: CREATE INDEX db_dblog_levelname_ad5dc346 ON public.db_dblog + USING btree (levelname) + db_dblog_levelname_ad5dc346_like: CREATE INDEX db_dblog_levelname_ad5dc346_like + ON public.db_dblog USING btree (levelname varchar_pattern_ops) + db_dblog_loggername_00b5ba16: CREATE INDEX db_dblog_loggername_00b5ba16 ON public.db_dblog + USING btree (loggername) + db_dblog_loggername_00b5ba16_like: CREATE INDEX db_dblog_loggername_00b5ba16_like + ON public.db_dblog USING btree (loggername varchar_pattern_ops) + db_dblog_objname_69932b1e: CREATE INDEX db_dblog_objname_69932b1e ON public.db_dblog + USING btree (objname) + db_dblog_objname_69932b1e_like: CREATE INDEX db_dblog_objname_69932b1e_like ON + public.db_dblog USING btree (objname varchar_pattern_ops) + db_dblog_objpk_fc47afa9: CREATE INDEX db_dblog_objpk_fc47afa9 ON public.db_dblog + USING btree (objpk) + db_dblog_pkey: CREATE UNIQUE INDEX db_dblog_pkey ON public.db_dblog USING btree + (id) + db_dbnode: + db_dbnode_ctime_71626ef5: CREATE INDEX db_dbnode_ctime_71626ef5 ON public.db_dbnode + USING btree (ctime) + db_dbnode_dbcomputer_id_315372a3: CREATE INDEX db_dbnode_dbcomputer_id_315372a3 + ON public.db_dbnode USING btree (dbcomputer_id) + db_dbnode_label_6469539e: CREATE INDEX db_dbnode_label_6469539e ON public.db_dbnode + USING btree (label) + db_dbnode_label_6469539e_like: CREATE INDEX db_dbnode_label_6469539e_like ON public.db_dbnode + USING btree (label varchar_pattern_ops) + db_dbnode_mtime_0554ea3d: CREATE INDEX db_dbnode_mtime_0554ea3d ON public.db_dbnode + USING btree (mtime) + db_dbnode_pkey: CREATE UNIQUE INDEX db_dbnode_pkey ON public.db_dbnode USING btree + (id) + db_dbnode_process_type_df7298d0: CREATE INDEX db_dbnode_process_type_df7298d0 + ON public.db_dbnode USING btree (process_type) + db_dbnode_process_type_df7298d0_like: CREATE INDEX db_dbnode_process_type_df7298d0_like + ON public.db_dbnode USING btree (process_type varchar_pattern_ops) + db_dbnode_type_a8ce9753: CREATE INDEX db_dbnode_type_a8ce9753 ON public.db_dbnode + USING btree (type) + db_dbnode_type_a8ce9753_like: CREATE INDEX db_dbnode_type_a8ce9753_like ON public.db_dbnode + USING btree (type varchar_pattern_ops) + db_dbnode_user_id_12e7aeaf: CREATE INDEX db_dbnode_user_id_12e7aeaf ON public.db_dbnode + USING btree (user_id) + db_dbnode_uuid_62e0bf98_uniq: CREATE UNIQUE INDEX db_dbnode_uuid_62e0bf98_uniq + ON public.db_dbnode USING btree (uuid) + db_dbsetting: + db_dbsetting_datatype_49f4397c: CREATE INDEX db_dbsetting_datatype_49f4397c ON + public.db_dbsetting USING btree (datatype) + db_dbsetting_datatype_49f4397c_like: CREATE INDEX db_dbsetting_datatype_49f4397c_like + ON public.db_dbsetting USING btree (datatype varchar_pattern_ops) + db_dbsetting_key_1b84beb4: CREATE INDEX db_dbsetting_key_1b84beb4 ON public.db_dbsetting + USING btree (key) + db_dbsetting_key_1b84beb4_like: CREATE INDEX db_dbsetting_key_1b84beb4_like ON + public.db_dbsetting USING btree (key varchar_pattern_ops) + db_dbsetting_key_1b84beb4_uniq: CREATE UNIQUE INDEX db_dbsetting_key_1b84beb4_uniq + ON public.db_dbsetting USING btree (key) + db_dbsetting_pkey: CREATE UNIQUE INDEX db_dbsetting_pkey ON public.db_dbsetting + USING btree (id) + db_dbuser: + db_dbuser_email_30150b7e_like: CREATE INDEX db_dbuser_email_30150b7e_like ON public.db_dbuser + USING btree (email varchar_pattern_ops) + db_dbuser_email_30150b7e_uniq: CREATE UNIQUE INDEX db_dbuser_email_30150b7e_uniq + ON public.db_dbuser USING btree (email) + db_dbuser_pkey: CREATE UNIQUE INDEX db_dbuser_pkey ON public.db_dbuser USING btree + (id) + db_dbuser_groups: + db_dbuser_groups_dbuser_id_480b3520: CREATE INDEX db_dbuser_groups_dbuser_id_480b3520 + ON public.db_dbuser_groups USING btree (dbuser_id) + db_dbuser_groups_dbuser_id_group_id_9155eb4f_uniq: CREATE UNIQUE INDEX db_dbuser_groups_dbuser_id_group_id_9155eb4f_uniq + ON public.db_dbuser_groups USING btree (dbuser_id, group_id) + db_dbuser_groups_group_id_8478d87e: CREATE INDEX db_dbuser_groups_group_id_8478d87e + ON public.db_dbuser_groups USING btree (group_id) + db_dbuser_groups_pkey: CREATE UNIQUE INDEX db_dbuser_groups_pkey ON public.db_dbuser_groups + USING btree (id) + db_dbuser_user_permissions: + db_dbuser_user_permissio_dbuser_id_permission_id_e6cbabe4_uniq: CREATE UNIQUE + INDEX db_dbuser_user_permissio_dbuser_id_permission_id_e6cbabe4_uniq ON public.db_dbuser_user_permissions + USING btree (dbuser_id, permission_id) + db_dbuser_user_permissions_dbuser_id_364456ee: CREATE INDEX db_dbuser_user_permissions_dbuser_id_364456ee + ON public.db_dbuser_user_permissions USING btree (dbuser_id) + db_dbuser_user_permissions_permission_id_c5aafc54: CREATE INDEX db_dbuser_user_permissions_permission_id_c5aafc54 + ON public.db_dbuser_user_permissions USING btree (permission_id) + db_dbuser_user_permissions_pkey: CREATE UNIQUE INDEX db_dbuser_user_permissions_pkey + ON public.db_dbuser_user_permissions USING btree (id) + db_dbworkflow: + db_dbworkflow_label_7368f34a: CREATE INDEX db_dbworkflow_label_7368f34a ON public.db_dbworkflow + USING btree (label) + db_dbworkflow_label_7368f34a_like: CREATE INDEX db_dbworkflow_label_7368f34a_like + ON public.db_dbworkflow USING btree (label varchar_pattern_ops) + db_dbworkflow_pkey: CREATE UNIQUE INDEX db_dbworkflow_pkey ON public.db_dbworkflow + USING btree (id) + db_dbworkflow_user_id_ef1f3251: CREATE INDEX db_dbworkflow_user_id_ef1f3251 ON + public.db_dbworkflow USING btree (user_id) + db_dbworkflow_uuid_08947ee2_uniq: CREATE UNIQUE INDEX db_dbworkflow_uuid_08947ee2_uniq + ON public.db_dbworkflow USING btree (uuid) + db_dbworkflowdata: + db_dbworkflowdata_aiida_obj_id_70a2d33b: CREATE INDEX db_dbworkflowdata_aiida_obj_id_70a2d33b + ON public.db_dbworkflowdata USING btree (aiida_obj_id) + db_dbworkflowdata_parent_id_ff4dbf8d: CREATE INDEX db_dbworkflowdata_parent_id_ff4dbf8d + ON public.db_dbworkflowdata USING btree (parent_id) + db_dbworkflowdata_parent_id_name_data_type_a4b50dae_uniq: CREATE UNIQUE INDEX + db_dbworkflowdata_parent_id_name_data_type_a4b50dae_uniq ON public.db_dbworkflowdata + USING btree (parent_id, name, data_type) + db_dbworkflowdata_pkey: CREATE UNIQUE INDEX db_dbworkflowdata_pkey ON public.db_dbworkflowdata + USING btree (id) + db_dbworkflowstep: + db_dbworkflowstep_parent_id_ffb754d9: CREATE INDEX db_dbworkflowstep_parent_id_ffb754d9 + ON public.db_dbworkflowstep USING btree (parent_id) + db_dbworkflowstep_parent_id_name_111027e3_uniq: CREATE UNIQUE INDEX db_dbworkflowstep_parent_id_name_111027e3_uniq + ON public.db_dbworkflowstep USING btree (parent_id, name) + db_dbworkflowstep_pkey: CREATE UNIQUE INDEX db_dbworkflowstep_pkey ON public.db_dbworkflowstep + USING btree (id) + db_dbworkflowstep_user_id_04282431: CREATE INDEX db_dbworkflowstep_user_id_04282431 + ON public.db_dbworkflowstep USING btree (user_id) + db_dbworkflowstep_calculations: + db_dbworkflowstep_calcul_dbworkflowstep_id_dbnode_60f50d02_uniq: CREATE UNIQUE + INDEX db_dbworkflowstep_calcul_dbworkflowstep_id_dbnode_60f50d02_uniq ON public.db_dbworkflowstep_calculations + USING btree (dbworkflowstep_id, dbnode_id) + db_dbworkflowstep_calculations_dbnode_id_0d07b7a7: CREATE INDEX db_dbworkflowstep_calculations_dbnode_id_0d07b7a7 + ON public.db_dbworkflowstep_calculations USING btree (dbnode_id) + db_dbworkflowstep_calculations_dbworkflowstep_id_575c3637: CREATE INDEX db_dbworkflowstep_calculations_dbworkflowstep_id_575c3637 + ON public.db_dbworkflowstep_calculations USING btree (dbworkflowstep_id) + db_dbworkflowstep_calculations_pkey: CREATE UNIQUE INDEX db_dbworkflowstep_calculations_pkey + ON public.db_dbworkflowstep_calculations USING btree (id) + db_dbworkflowstep_sub_workflows: + db_dbworkflowstep_sub_wo_dbworkflowstep_id_dbwork_e9b2b624_uniq: CREATE UNIQUE + INDEX db_dbworkflowstep_sub_wo_dbworkflowstep_id_dbwork_e9b2b624_uniq ON public.db_dbworkflowstep_sub_workflows + USING btree (dbworkflowstep_id, dbworkflow_id) + db_dbworkflowstep_sub_workflows_dbworkflow_id_dca4d103: CREATE INDEX db_dbworkflowstep_sub_workflows_dbworkflow_id_dca4d103 + ON public.db_dbworkflowstep_sub_workflows USING btree (dbworkflow_id) + db_dbworkflowstep_sub_workflows_dbworkflowstep_id_e183bbb7: CREATE INDEX db_dbworkflowstep_sub_workflows_dbworkflowstep_id_e183bbb7 + ON public.db_dbworkflowstep_sub_workflows USING btree (dbworkflowstep_id) + db_dbworkflowstep_sub_workflows_pkey: CREATE UNIQUE INDEX db_dbworkflowstep_sub_workflows_pkey + ON public.db_dbworkflowstep_sub_workflows USING btree (id) diff --git a/tests/backends/aiida_sqlalchemy/migrations/test_all_schema/test_django_django_0020_.yml b/tests/backends/aiida_sqlalchemy/migrations/test_all_schema/test_django_django_0020_.yml new file mode 100644 index 0000000000..b3e1e522b4 --- /dev/null +++ b/tests/backends/aiida_sqlalchemy/migrations/test_all_schema/test_django_django_0020_.yml @@ -0,0 +1,1038 @@ +columns: + db_dbattribute: + bval: + data_type: boolean + default: null + is_nullable: true + datatype: + data_type: character varying + default: null + is_nullable: false + max_length: 10 + dbnode_id: + data_type: integer + default: null + is_nullable: false + dval: + data_type: timestamp with time zone + default: null + is_nullable: true + fval: + data_type: double precision + default: null + is_nullable: true + id: + data_type: integer + default: nextval('db_dbattribute_id_seq'::regclass) + is_nullable: false + ival: + data_type: integer + default: null + is_nullable: true + key: + data_type: character varying + default: null + is_nullable: false + max_length: 1024 + tval: + data_type: text + default: null + is_nullable: false + db_dbauthinfo: + aiidauser_id: + data_type: integer + default: null + is_nullable: false + auth_params: + data_type: text + default: null + is_nullable: false + dbcomputer_id: + data_type: integer + default: null + is_nullable: false + enabled: + data_type: boolean + default: null + is_nullable: false + id: + data_type: integer + default: nextval('db_dbauthinfo_id_seq'::regclass) + is_nullable: false + metadata: + data_type: text + default: null + is_nullable: false + db_dbcomment: + content: + data_type: text + default: null + is_nullable: false + ctime: + data_type: timestamp with time zone + default: null + is_nullable: false + dbnode_id: + data_type: integer + default: null + is_nullable: false + id: + data_type: integer + default: nextval('db_dbcomment_id_seq'::regclass) + is_nullable: false + mtime: + data_type: timestamp with time zone + default: null + is_nullable: false + user_id: + data_type: integer + default: null + is_nullable: false + uuid: + data_type: uuid + default: null + is_nullable: false + db_dbcomputer: + description: + data_type: text + default: null + is_nullable: false + enabled: + data_type: boolean + default: null + is_nullable: false + hostname: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + id: + data_type: integer + default: nextval('db_dbcomputer_id_seq'::regclass) + is_nullable: false + metadata: + data_type: text + default: null + is_nullable: false + name: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + scheduler_type: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + transport_params: + data_type: text + default: null + is_nullable: false + transport_type: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + uuid: + data_type: uuid + default: null + is_nullable: false + db_dbextra: + bval: + data_type: boolean + default: null + is_nullable: true + datatype: + data_type: character varying + default: null + is_nullable: false + max_length: 10 + dbnode_id: + data_type: integer + default: null + is_nullable: false + dval: + data_type: timestamp with time zone + default: null + is_nullable: true + fval: + data_type: double precision + default: null + is_nullable: true + id: + data_type: integer + default: nextval('db_dbextra_id_seq'::regclass) + is_nullable: false + ival: + data_type: integer + default: null + is_nullable: true + key: + data_type: character varying + default: null + is_nullable: false + max_length: 1024 + tval: + data_type: text + default: null + is_nullable: false + db_dbgroup: + description: + data_type: text + default: null + is_nullable: false + id: + data_type: integer + default: nextval('db_dbgroup_id_seq'::regclass) + is_nullable: false + name: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + time: + data_type: timestamp with time zone + default: null + is_nullable: false + type: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + user_id: + data_type: integer + default: null + is_nullable: false + uuid: + data_type: uuid + default: null + is_nullable: false + db_dbgroup_dbnodes: + dbgroup_id: + data_type: integer + default: null + is_nullable: false + dbnode_id: + data_type: integer + default: null + is_nullable: false + id: + data_type: integer + default: nextval('db_dbgroup_dbnodes_id_seq'::regclass) + is_nullable: false + db_dblink: + id: + data_type: integer + default: nextval('db_dblink_id_seq'::regclass) + is_nullable: false + input_id: + data_type: integer + default: null + is_nullable: false + label: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + output_id: + data_type: integer + default: null + is_nullable: false + type: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + db_dblog: + id: + data_type: integer + default: nextval('db_dblog_id_seq'::regclass) + is_nullable: false + levelname: + data_type: character varying + default: null + is_nullable: false + max_length: 50 + loggername: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + message: + data_type: text + default: null + is_nullable: false + metadata: + data_type: text + default: null + is_nullable: false + objname: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + objpk: + data_type: integer + default: null + is_nullable: true + time: + data_type: timestamp with time zone + default: null + is_nullable: false + db_dbnode: + ctime: + data_type: timestamp with time zone + default: null + is_nullable: false + dbcomputer_id: + data_type: integer + default: null + is_nullable: true + description: + data_type: text + default: null + is_nullable: false + id: + data_type: integer + default: nextval('db_dbnode_id_seq'::regclass) + is_nullable: false + label: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + mtime: + data_type: timestamp with time zone + default: null + is_nullable: false + nodeversion: + data_type: integer + default: null + is_nullable: false + process_type: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + public: + data_type: boolean + default: null + is_nullable: false + type: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + user_id: + data_type: integer + default: null + is_nullable: false + uuid: + data_type: uuid + default: null + is_nullable: false + db_dbsetting: + bval: + data_type: boolean + default: null + is_nullable: true + datatype: + data_type: character varying + default: null + is_nullable: false + max_length: 10 + description: + data_type: text + default: null + is_nullable: false + dval: + data_type: timestamp with time zone + default: null + is_nullable: true + fval: + data_type: double precision + default: null + is_nullable: true + id: + data_type: integer + default: nextval('db_dbsetting_id_seq'::regclass) + is_nullable: false + ival: + data_type: integer + default: null + is_nullable: true + key: + data_type: character varying + default: null + is_nullable: false + max_length: 1024 + time: + data_type: timestamp with time zone + default: null + is_nullable: false + tval: + data_type: text + default: null + is_nullable: false + db_dbuser: + date_joined: + data_type: timestamp with time zone + default: null + is_nullable: false + email: + data_type: character varying + default: null + is_nullable: false + max_length: 254 + first_name: + data_type: character varying + default: null + is_nullable: false + max_length: 254 + id: + data_type: integer + default: nextval('db_dbuser_id_seq'::regclass) + is_nullable: false + institution: + data_type: character varying + default: null + is_nullable: false + max_length: 254 + is_active: + data_type: boolean + default: null + is_nullable: false + is_staff: + data_type: boolean + default: null + is_nullable: false + is_superuser: + data_type: boolean + default: null + is_nullable: false + last_login: + data_type: timestamp with time zone + default: null + is_nullable: true + last_name: + data_type: character varying + default: null + is_nullable: false + max_length: 254 + password: + data_type: character varying + default: null + is_nullable: false + max_length: 128 + db_dbuser_groups: + dbuser_id: + data_type: integer + default: null + is_nullable: false + group_id: + data_type: integer + default: null + is_nullable: false + id: + data_type: integer + default: nextval('db_dbuser_groups_id_seq'::regclass) + is_nullable: false + db_dbuser_user_permissions: + dbuser_id: + data_type: integer + default: null + is_nullable: false + id: + data_type: integer + default: nextval('db_dbuser_user_permissions_id_seq'::regclass) + is_nullable: false + permission_id: + data_type: integer + default: null + is_nullable: false + db_dbworkflow: + ctime: + data_type: timestamp with time zone + default: null + is_nullable: false + description: + data_type: text + default: null + is_nullable: false + id: + data_type: integer + default: nextval('db_dbworkflow_id_seq'::regclass) + is_nullable: false + label: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + lastsyncedversion: + data_type: integer + default: null + is_nullable: false + module: + data_type: text + default: null + is_nullable: false + module_class: + data_type: text + default: null + is_nullable: false + mtime: + data_type: timestamp with time zone + default: null + is_nullable: false + nodeversion: + data_type: integer + default: null + is_nullable: false + report: + data_type: text + default: null + is_nullable: false + script_md5: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + script_path: + data_type: text + default: null + is_nullable: false + state: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + user_id: + data_type: integer + default: null + is_nullable: false + uuid: + data_type: uuid + default: null + is_nullable: false + db_dbworkflowdata: + aiida_obj_id: + data_type: integer + default: null + is_nullable: true + data_type: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + id: + data_type: integer + default: nextval('db_dbworkflowdata_id_seq'::regclass) + is_nullable: false + json_value: + data_type: text + default: null + is_nullable: false + name: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + parent_id: + data_type: integer + default: null + is_nullable: false + time: + data_type: timestamp with time zone + default: null + is_nullable: false + value_type: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + db_dbworkflowstep: + id: + data_type: integer + default: nextval('db_dbworkflowstep_id_seq'::regclass) + is_nullable: false + name: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + nextcall: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + parent_id: + data_type: integer + default: null + is_nullable: false + state: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + time: + data_type: timestamp with time zone + default: null + is_nullable: false + user_id: + data_type: integer + default: null + is_nullable: false + db_dbworkflowstep_calculations: + dbnode_id: + data_type: integer + default: null + is_nullable: false + dbworkflowstep_id: + data_type: integer + default: null + is_nullable: false + id: + data_type: integer + default: nextval('db_dbworkflowstep_calculations_id_seq'::regclass) + is_nullable: false + db_dbworkflowstep_sub_workflows: + dbworkflow_id: + data_type: integer + default: null + is_nullable: false + dbworkflowstep_id: + data_type: integer + default: null + is_nullable: false + id: + data_type: integer + default: nextval('db_dbworkflowstep_sub_workflows_id_seq'::regclass) + is_nullable: false +constraints: + primary_key: + db_dbattribute: + db_dbattribute_pkey: + - id + db_dbauthinfo: + db_dbauthinfo_pkey: + - id + db_dbcomment: + db_dbcomment_pkey: + - id + db_dbcomputer: + db_dbcomputer_pkey: + - id + db_dbextra: + db_dbextra_pkey: + - id + db_dbgroup: + db_dbgroup_pkey: + - id + db_dbgroup_dbnodes: + db_dbgroup_dbnodes_pkey: + - id + db_dblink: + db_dblink_pkey: + - id + db_dblog: + db_dblog_pkey: + - id + db_dbnode: + db_dbnode_pkey: + - id + db_dbsetting: + db_dbsetting_pkey: + - id + db_dbuser: + db_dbuser_pkey: + - id + db_dbuser_groups: + db_dbuser_groups_pkey: + - id + db_dbuser_user_permissions: + db_dbuser_user_permissions_pkey: + - id + db_dbworkflow: + db_dbworkflow_pkey: + - id + db_dbworkflowdata: + db_dbworkflowdata_pkey: + - id + db_dbworkflowstep: + db_dbworkflowstep_pkey: + - id + db_dbworkflowstep_calculations: + db_dbworkflowstep_calculations_pkey: + - id + db_dbworkflowstep_sub_workflows: + db_dbworkflowstep_sub_workflows_pkey: + - id + unique: + db_dbattribute: + db_dbattribute_dbnode_id_key_c589e447_uniq: + - dbnode_id + - key + db_dbauthinfo: + db_dbauthinfo_aiidauser_id_dbcomputer_id_777cdaa8_uniq: + - aiidauser_id + - dbcomputer_id + db_dbcomment: + db_dbcomment_uuid_49bac08c_uniq: + - uuid + db_dbcomputer: + db_dbcomputer_name_key: + - name + db_dbcomputer_uuid_f35defa6_uniq: + - uuid + db_dbextra: + db_dbextra_dbnode_id_key_aa56fd37_uniq: + - dbnode_id + - key + db_dbgroup: + db_dbgroup_name_type_12656f33_uniq: + - name + - type + db_dbgroup_uuid_af896177_uniq: + - uuid + db_dbgroup_dbnodes: + db_dbgroup_dbnodes_dbgroup_id_dbnode_id_eee23cce_uniq: + - dbgroup_id + - dbnode_id + db_dbnode: + db_dbnode_uuid_62e0bf98_uniq: + - uuid + db_dbsetting: + db_dbsetting_key_1b84beb4_uniq: + - key + db_dbuser: + db_dbuser_email_30150b7e_uniq: + - email + db_dbuser_groups: + db_dbuser_groups_dbuser_id_group_id_9155eb4f_uniq: + - dbuser_id + - group_id + db_dbuser_user_permissions: + db_dbuser_user_permissio_dbuser_id_permission_id_e6cbabe4_uniq: + - dbuser_id + - permission_id + db_dbworkflow: + db_dbworkflow_uuid_08947ee2_uniq: + - uuid + db_dbworkflowdata: + db_dbworkflowdata_parent_id_name_data_type_a4b50dae_uniq: + - data_type + - name + - parent_id + db_dbworkflowstep: + db_dbworkflowstep_parent_id_name_111027e3_uniq: + - name + - parent_id + db_dbworkflowstep_calculations: + db_dbworkflowstep_calcul_dbworkflowstep_id_dbnode_60f50d02_uniq: + - dbnode_id + - dbworkflowstep_id + db_dbworkflowstep_sub_workflows: + db_dbworkflowstep_sub_wo_dbworkflowstep_id_dbwork_e9b2b624_uniq: + - dbworkflow_id + - dbworkflowstep_id +foreign_keys: + db_dbattribute: + db_dbattribute_dbnode_id_253bf153_fk_db_dbnode_id: FOREIGN KEY (dbnode_id) REFERENCES + db_dbnode(id) DEFERRABLE INITIALLY DEFERRED + db_dbauthinfo: + db_dbauthinfo_aiidauser_id_0684fdfb_fk_db_dbuser_id: FOREIGN KEY (aiidauser_id) + REFERENCES db_dbuser(id) DEFERRABLE INITIALLY DEFERRED + db_dbauthinfo_dbcomputer_id_424f7ac4_fk_db_dbcomputer_id: FOREIGN KEY (dbcomputer_id) + REFERENCES db_dbcomputer(id) DEFERRABLE INITIALLY DEFERRED + db_dbcomment: + db_dbcomment_dbnode_id_3b812b6b_fk_db_dbnode_id: FOREIGN KEY (dbnode_id) REFERENCES + db_dbnode(id) DEFERRABLE INITIALLY DEFERRED + db_dbcomment_user_id_8ed5e360_fk_db_dbuser_id: FOREIGN KEY (user_id) REFERENCES + db_dbuser(id) DEFERRABLE INITIALLY DEFERRED + db_dbextra: + db_dbextra_dbnode_id_c7fe8961_fk_db_dbnode_id: FOREIGN KEY (dbnode_id) REFERENCES + db_dbnode(id) DEFERRABLE INITIALLY DEFERRED + db_dbgroup: + db_dbgroup_user_id_100f8a51_fk_db_dbuser_id: FOREIGN KEY (user_id) REFERENCES + db_dbuser(id) DEFERRABLE INITIALLY DEFERRED + db_dbgroup_dbnodes: + db_dbgroup_dbnodes_dbgroup_id_9d3a0f9d_fk_db_dbgroup_id: FOREIGN KEY (dbgroup_id) + REFERENCES db_dbgroup(id) DEFERRABLE INITIALLY DEFERRED + db_dbgroup_dbnodes_dbnode_id_118b9439_fk_db_dbnode_id: FOREIGN KEY (dbnode_id) + REFERENCES db_dbnode(id) DEFERRABLE INITIALLY DEFERRED + db_dblink: + db_dblink_input_id_9245bd73_fk_db_dbnode_id: FOREIGN KEY (input_id) REFERENCES + db_dbnode(id) DEFERRABLE INITIALLY DEFERRED + db_dblink_output_id_c0167528_fk_db_dbnode_id: FOREIGN KEY (output_id) REFERENCES + db_dbnode(id) DEFERRABLE INITIALLY DEFERRED + db_dbnode: + db_dbnode_dbcomputer_id_315372a3_fk_db_dbcomputer_id: FOREIGN KEY (dbcomputer_id) + REFERENCES db_dbcomputer(id) DEFERRABLE INITIALLY DEFERRED + db_dbnode_user_id_12e7aeaf_fk_db_dbuser_id: FOREIGN KEY (user_id) REFERENCES db_dbuser(id) + DEFERRABLE INITIALLY DEFERRED + db_dbuser_groups: + db_dbuser_groups_dbuser_id_480b3520_fk_db_dbuser_id: FOREIGN KEY (dbuser_id) REFERENCES + db_dbuser(id) DEFERRABLE INITIALLY DEFERRED + db_dbuser_groups_group_id_8478d87e_fk_auth_group_id: FOREIGN KEY (group_id) REFERENCES + auth_group(id) DEFERRABLE INITIALLY DEFERRED + db_dbuser_user_permissions: + db_dbuser_user_permi_permission_id_c5aafc54_fk_auth_perm: FOREIGN KEY (permission_id) + REFERENCES auth_permission(id) DEFERRABLE INITIALLY DEFERRED + db_dbuser_user_permissions_dbuser_id_364456ee_fk_db_dbuser_id: FOREIGN KEY (dbuser_id) + REFERENCES db_dbuser(id) DEFERRABLE INITIALLY DEFERRED + db_dbworkflow: + db_dbworkflow_user_id_ef1f3251_fk_db_dbuser_id: FOREIGN KEY (user_id) REFERENCES + db_dbuser(id) DEFERRABLE INITIALLY DEFERRED + db_dbworkflowdata: + db_dbworkflowdata_aiida_obj_id_70a2d33b_fk_db_dbnode_id: FOREIGN KEY (aiida_obj_id) + REFERENCES db_dbnode(id) DEFERRABLE INITIALLY DEFERRED + db_dbworkflowdata_parent_id_ff4dbf8d_fk_db_dbworkflow_id: FOREIGN KEY (parent_id) + REFERENCES db_dbworkflow(id) DEFERRABLE INITIALLY DEFERRED + db_dbworkflowstep: + db_dbworkflowstep_parent_id_ffb754d9_fk_db_dbworkflow_id: FOREIGN KEY (parent_id) + REFERENCES db_dbworkflow(id) DEFERRABLE INITIALLY DEFERRED + db_dbworkflowstep_user_id_04282431_fk_db_dbuser_id: FOREIGN KEY (user_id) REFERENCES + db_dbuser(id) DEFERRABLE INITIALLY DEFERRED + db_dbworkflowstep_calculations: + db_dbworkflowstep_ca_dbnode_id_0d07b7a7_fk_db_dbnode: FOREIGN KEY (dbnode_id) + REFERENCES db_dbnode(id) DEFERRABLE INITIALLY DEFERRED + db_dbworkflowstep_ca_dbworkflowstep_id_575c3637_fk_db_dbwork: FOREIGN KEY (dbworkflowstep_id) + REFERENCES db_dbworkflowstep(id) DEFERRABLE INITIALLY DEFERRED + db_dbworkflowstep_sub_workflows: + db_dbworkflowstep_su_dbworkflow_id_dca4d103_fk_db_dbwork: FOREIGN KEY (dbworkflow_id) + REFERENCES db_dbworkflow(id) DEFERRABLE INITIALLY DEFERRED + db_dbworkflowstep_su_dbworkflowstep_id_e183bbb7_fk_db_dbwork: FOREIGN KEY (dbworkflowstep_id) + REFERENCES db_dbworkflowstep(id) DEFERRABLE INITIALLY DEFERRED +indexes: + db_dbattribute: + db_dbattribute_datatype_91c4dc04: CREATE INDEX db_dbattribute_datatype_91c4dc04 + ON public.db_dbattribute USING btree (datatype) + db_dbattribute_datatype_91c4dc04_like: CREATE INDEX db_dbattribute_datatype_91c4dc04_like + ON public.db_dbattribute USING btree (datatype varchar_pattern_ops) + db_dbattribute_dbnode_id_253bf153: CREATE INDEX db_dbattribute_dbnode_id_253bf153 + ON public.db_dbattribute USING btree (dbnode_id) + db_dbattribute_dbnode_id_key_c589e447_uniq: CREATE UNIQUE INDEX db_dbattribute_dbnode_id_key_c589e447_uniq + ON public.db_dbattribute USING btree (dbnode_id, key) + db_dbattribute_key_ac2bc4e4: CREATE INDEX db_dbattribute_key_ac2bc4e4 ON public.db_dbattribute + USING btree (key) + db_dbattribute_key_ac2bc4e4_like: CREATE INDEX db_dbattribute_key_ac2bc4e4_like + ON public.db_dbattribute USING btree (key varchar_pattern_ops) + db_dbattribute_pkey: CREATE UNIQUE INDEX db_dbattribute_pkey ON public.db_dbattribute + USING btree (id) + tval_idx_for_daemon: CREATE INDEX tval_idx_for_daemon ON public.db_dbattribute + USING btree (tval) WHERE (tval = ANY (ARRAY['COMPUTED'::text, 'WITHSCHEDULER'::text, + 'TOSUBMIT'::text])) + db_dbauthinfo: + db_dbauthinfo_aiidauser_id_0684fdfb: CREATE INDEX db_dbauthinfo_aiidauser_id_0684fdfb + ON public.db_dbauthinfo USING btree (aiidauser_id) + db_dbauthinfo_aiidauser_id_dbcomputer_id_777cdaa8_uniq: CREATE UNIQUE INDEX db_dbauthinfo_aiidauser_id_dbcomputer_id_777cdaa8_uniq + ON public.db_dbauthinfo USING btree (aiidauser_id, dbcomputer_id) + db_dbauthinfo_dbcomputer_id_424f7ac4: CREATE INDEX db_dbauthinfo_dbcomputer_id_424f7ac4 + ON public.db_dbauthinfo USING btree (dbcomputer_id) + db_dbauthinfo_pkey: CREATE UNIQUE INDEX db_dbauthinfo_pkey ON public.db_dbauthinfo + USING btree (id) + db_dbcomment: + db_dbcomment_dbnode_id_3b812b6b: CREATE INDEX db_dbcomment_dbnode_id_3b812b6b + ON public.db_dbcomment USING btree (dbnode_id) + db_dbcomment_pkey: CREATE UNIQUE INDEX db_dbcomment_pkey ON public.db_dbcomment + USING btree (id) + db_dbcomment_user_id_8ed5e360: CREATE INDEX db_dbcomment_user_id_8ed5e360 ON public.db_dbcomment + USING btree (user_id) + db_dbcomment_uuid_49bac08c_uniq: CREATE UNIQUE INDEX db_dbcomment_uuid_49bac08c_uniq + ON public.db_dbcomment USING btree (uuid) + db_dbcomputer: + db_dbcomputer_name_f1800b1a_like: CREATE INDEX db_dbcomputer_name_f1800b1a_like + ON public.db_dbcomputer USING btree (name varchar_pattern_ops) + db_dbcomputer_name_key: CREATE UNIQUE INDEX db_dbcomputer_name_key ON public.db_dbcomputer + USING btree (name) + db_dbcomputer_pkey: CREATE UNIQUE INDEX db_dbcomputer_pkey ON public.db_dbcomputer + USING btree (id) + db_dbcomputer_uuid_f35defa6_uniq: CREATE UNIQUE INDEX db_dbcomputer_uuid_f35defa6_uniq + ON public.db_dbcomputer USING btree (uuid) + db_dbextra: + db_dbextra_datatype_2eba38c6: CREATE INDEX db_dbextra_datatype_2eba38c6 ON public.db_dbextra + USING btree (datatype) + db_dbextra_datatype_2eba38c6_like: CREATE INDEX db_dbextra_datatype_2eba38c6_like + ON public.db_dbextra USING btree (datatype varchar_pattern_ops) + db_dbextra_dbnode_id_c7fe8961: CREATE INDEX db_dbextra_dbnode_id_c7fe8961 ON public.db_dbextra + USING btree (dbnode_id) + db_dbextra_dbnode_id_key_aa56fd37_uniq: CREATE UNIQUE INDEX db_dbextra_dbnode_id_key_aa56fd37_uniq + ON public.db_dbextra USING btree (dbnode_id, key) + db_dbextra_key_b1a8abc6: CREATE INDEX db_dbextra_key_b1a8abc6 ON public.db_dbextra + USING btree (key) + db_dbextra_key_b1a8abc6_like: CREATE INDEX db_dbextra_key_b1a8abc6_like ON public.db_dbextra + USING btree (key varchar_pattern_ops) + db_dbextra_pkey: CREATE UNIQUE INDEX db_dbextra_pkey ON public.db_dbextra USING + btree (id) + db_dbgroup: + db_dbgroup_name_66c75272: CREATE INDEX db_dbgroup_name_66c75272 ON public.db_dbgroup + USING btree (name) + db_dbgroup_name_66c75272_like: CREATE INDEX db_dbgroup_name_66c75272_like ON public.db_dbgroup + USING btree (name varchar_pattern_ops) + db_dbgroup_name_type_12656f33_uniq: CREATE UNIQUE INDEX db_dbgroup_name_type_12656f33_uniq + ON public.db_dbgroup USING btree (name, type) + db_dbgroup_pkey: CREATE UNIQUE INDEX db_dbgroup_pkey ON public.db_dbgroup USING + btree (id) + db_dbgroup_type_23b2a748: CREATE INDEX db_dbgroup_type_23b2a748 ON public.db_dbgroup + USING btree (type) + db_dbgroup_type_23b2a748_like: CREATE INDEX db_dbgroup_type_23b2a748_like ON public.db_dbgroup + USING btree (type varchar_pattern_ops) + db_dbgroup_user_id_100f8a51: CREATE INDEX db_dbgroup_user_id_100f8a51 ON public.db_dbgroup + USING btree (user_id) + db_dbgroup_uuid_af896177_uniq: CREATE UNIQUE INDEX db_dbgroup_uuid_af896177_uniq + ON public.db_dbgroup USING btree (uuid) + db_dbgroup_dbnodes: + db_dbgroup_dbnodes_dbgroup_id_9d3a0f9d: CREATE INDEX db_dbgroup_dbnodes_dbgroup_id_9d3a0f9d + ON public.db_dbgroup_dbnodes USING btree (dbgroup_id) + db_dbgroup_dbnodes_dbgroup_id_dbnode_id_eee23cce_uniq: CREATE UNIQUE INDEX db_dbgroup_dbnodes_dbgroup_id_dbnode_id_eee23cce_uniq + ON public.db_dbgroup_dbnodes USING btree (dbgroup_id, dbnode_id) + db_dbgroup_dbnodes_dbnode_id_118b9439: CREATE INDEX db_dbgroup_dbnodes_dbnode_id_118b9439 + ON public.db_dbgroup_dbnodes USING btree (dbnode_id) + db_dbgroup_dbnodes_pkey: CREATE UNIQUE INDEX db_dbgroup_dbnodes_pkey ON public.db_dbgroup_dbnodes + USING btree (id) + db_dblink: + db_dblink_input_id_9245bd73: CREATE INDEX db_dblink_input_id_9245bd73 ON public.db_dblink + USING btree (input_id) + db_dblink_label_f1343cfb: CREATE INDEX db_dblink_label_f1343cfb ON public.db_dblink + USING btree (label) + db_dblink_label_f1343cfb_like: CREATE INDEX db_dblink_label_f1343cfb_like ON public.db_dblink + USING btree (label varchar_pattern_ops) + db_dblink_output_id_c0167528: CREATE INDEX db_dblink_output_id_c0167528 ON public.db_dblink + USING btree (output_id) + db_dblink_pkey: CREATE UNIQUE INDEX db_dblink_pkey ON public.db_dblink USING btree + (id) + db_dblink_type_229f212b: CREATE INDEX db_dblink_type_229f212b ON public.db_dblink + USING btree (type) + db_dblink_type_229f212b_like: CREATE INDEX db_dblink_type_229f212b_like ON public.db_dblink + USING btree (type varchar_pattern_ops) + db_dblog: + db_dblog_levelname_ad5dc346: CREATE INDEX db_dblog_levelname_ad5dc346 ON public.db_dblog + USING btree (levelname) + db_dblog_levelname_ad5dc346_like: CREATE INDEX db_dblog_levelname_ad5dc346_like + ON public.db_dblog USING btree (levelname varchar_pattern_ops) + db_dblog_loggername_00b5ba16: CREATE INDEX db_dblog_loggername_00b5ba16 ON public.db_dblog + USING btree (loggername) + db_dblog_loggername_00b5ba16_like: CREATE INDEX db_dblog_loggername_00b5ba16_like + ON public.db_dblog USING btree (loggername varchar_pattern_ops) + db_dblog_objname_69932b1e: CREATE INDEX db_dblog_objname_69932b1e ON public.db_dblog + USING btree (objname) + db_dblog_objname_69932b1e_like: CREATE INDEX db_dblog_objname_69932b1e_like ON + public.db_dblog USING btree (objname varchar_pattern_ops) + db_dblog_objpk_fc47afa9: CREATE INDEX db_dblog_objpk_fc47afa9 ON public.db_dblog + USING btree (objpk) + db_dblog_pkey: CREATE UNIQUE INDEX db_dblog_pkey ON public.db_dblog USING btree + (id) + db_dbnode: + db_dbnode_ctime_71626ef5: CREATE INDEX db_dbnode_ctime_71626ef5 ON public.db_dbnode + USING btree (ctime) + db_dbnode_dbcomputer_id_315372a3: CREATE INDEX db_dbnode_dbcomputer_id_315372a3 + ON public.db_dbnode USING btree (dbcomputer_id) + db_dbnode_label_6469539e: CREATE INDEX db_dbnode_label_6469539e ON public.db_dbnode + USING btree (label) + db_dbnode_label_6469539e_like: CREATE INDEX db_dbnode_label_6469539e_like ON public.db_dbnode + USING btree (label varchar_pattern_ops) + db_dbnode_mtime_0554ea3d: CREATE INDEX db_dbnode_mtime_0554ea3d ON public.db_dbnode + USING btree (mtime) + db_dbnode_pkey: CREATE UNIQUE INDEX db_dbnode_pkey ON public.db_dbnode USING btree + (id) + db_dbnode_process_type_df7298d0: CREATE INDEX db_dbnode_process_type_df7298d0 + ON public.db_dbnode USING btree (process_type) + db_dbnode_process_type_df7298d0_like: CREATE INDEX db_dbnode_process_type_df7298d0_like + ON public.db_dbnode USING btree (process_type varchar_pattern_ops) + db_dbnode_type_a8ce9753: CREATE INDEX db_dbnode_type_a8ce9753 ON public.db_dbnode + USING btree (type) + db_dbnode_type_a8ce9753_like: CREATE INDEX db_dbnode_type_a8ce9753_like ON public.db_dbnode + USING btree (type varchar_pattern_ops) + db_dbnode_user_id_12e7aeaf: CREATE INDEX db_dbnode_user_id_12e7aeaf ON public.db_dbnode + USING btree (user_id) + db_dbnode_uuid_62e0bf98_uniq: CREATE UNIQUE INDEX db_dbnode_uuid_62e0bf98_uniq + ON public.db_dbnode USING btree (uuid) + db_dbsetting: + db_dbsetting_datatype_49f4397c: CREATE INDEX db_dbsetting_datatype_49f4397c ON + public.db_dbsetting USING btree (datatype) + db_dbsetting_datatype_49f4397c_like: CREATE INDEX db_dbsetting_datatype_49f4397c_like + ON public.db_dbsetting USING btree (datatype varchar_pattern_ops) + db_dbsetting_key_1b84beb4: CREATE INDEX db_dbsetting_key_1b84beb4 ON public.db_dbsetting + USING btree (key) + db_dbsetting_key_1b84beb4_like: CREATE INDEX db_dbsetting_key_1b84beb4_like ON + public.db_dbsetting USING btree (key varchar_pattern_ops) + db_dbsetting_key_1b84beb4_uniq: CREATE UNIQUE INDEX db_dbsetting_key_1b84beb4_uniq + ON public.db_dbsetting USING btree (key) + db_dbsetting_pkey: CREATE UNIQUE INDEX db_dbsetting_pkey ON public.db_dbsetting + USING btree (id) + db_dbuser: + db_dbuser_email_30150b7e_like: CREATE INDEX db_dbuser_email_30150b7e_like ON public.db_dbuser + USING btree (email varchar_pattern_ops) + db_dbuser_email_30150b7e_uniq: CREATE UNIQUE INDEX db_dbuser_email_30150b7e_uniq + ON public.db_dbuser USING btree (email) + db_dbuser_pkey: CREATE UNIQUE INDEX db_dbuser_pkey ON public.db_dbuser USING btree + (id) + db_dbuser_groups: + db_dbuser_groups_dbuser_id_480b3520: CREATE INDEX db_dbuser_groups_dbuser_id_480b3520 + ON public.db_dbuser_groups USING btree (dbuser_id) + db_dbuser_groups_dbuser_id_group_id_9155eb4f_uniq: CREATE UNIQUE INDEX db_dbuser_groups_dbuser_id_group_id_9155eb4f_uniq + ON public.db_dbuser_groups USING btree (dbuser_id, group_id) + db_dbuser_groups_group_id_8478d87e: CREATE INDEX db_dbuser_groups_group_id_8478d87e + ON public.db_dbuser_groups USING btree (group_id) + db_dbuser_groups_pkey: CREATE UNIQUE INDEX db_dbuser_groups_pkey ON public.db_dbuser_groups + USING btree (id) + db_dbuser_user_permissions: + db_dbuser_user_permissio_dbuser_id_permission_id_e6cbabe4_uniq: CREATE UNIQUE + INDEX db_dbuser_user_permissio_dbuser_id_permission_id_e6cbabe4_uniq ON public.db_dbuser_user_permissions + USING btree (dbuser_id, permission_id) + db_dbuser_user_permissions_dbuser_id_364456ee: CREATE INDEX db_dbuser_user_permissions_dbuser_id_364456ee + ON public.db_dbuser_user_permissions USING btree (dbuser_id) + db_dbuser_user_permissions_permission_id_c5aafc54: CREATE INDEX db_dbuser_user_permissions_permission_id_c5aafc54 + ON public.db_dbuser_user_permissions USING btree (permission_id) + db_dbuser_user_permissions_pkey: CREATE UNIQUE INDEX db_dbuser_user_permissions_pkey + ON public.db_dbuser_user_permissions USING btree (id) + db_dbworkflow: + db_dbworkflow_label_7368f34a: CREATE INDEX db_dbworkflow_label_7368f34a ON public.db_dbworkflow + USING btree (label) + db_dbworkflow_label_7368f34a_like: CREATE INDEX db_dbworkflow_label_7368f34a_like + ON public.db_dbworkflow USING btree (label varchar_pattern_ops) + db_dbworkflow_pkey: CREATE UNIQUE INDEX db_dbworkflow_pkey ON public.db_dbworkflow + USING btree (id) + db_dbworkflow_user_id_ef1f3251: CREATE INDEX db_dbworkflow_user_id_ef1f3251 ON + public.db_dbworkflow USING btree (user_id) + db_dbworkflow_uuid_08947ee2_uniq: CREATE UNIQUE INDEX db_dbworkflow_uuid_08947ee2_uniq + ON public.db_dbworkflow USING btree (uuid) + db_dbworkflowdata: + db_dbworkflowdata_aiida_obj_id_70a2d33b: CREATE INDEX db_dbworkflowdata_aiida_obj_id_70a2d33b + ON public.db_dbworkflowdata USING btree (aiida_obj_id) + db_dbworkflowdata_parent_id_ff4dbf8d: CREATE INDEX db_dbworkflowdata_parent_id_ff4dbf8d + ON public.db_dbworkflowdata USING btree (parent_id) + db_dbworkflowdata_parent_id_name_data_type_a4b50dae_uniq: CREATE UNIQUE INDEX + db_dbworkflowdata_parent_id_name_data_type_a4b50dae_uniq ON public.db_dbworkflowdata + USING btree (parent_id, name, data_type) + db_dbworkflowdata_pkey: CREATE UNIQUE INDEX db_dbworkflowdata_pkey ON public.db_dbworkflowdata + USING btree (id) + db_dbworkflowstep: + db_dbworkflowstep_parent_id_ffb754d9: CREATE INDEX db_dbworkflowstep_parent_id_ffb754d9 + ON public.db_dbworkflowstep USING btree (parent_id) + db_dbworkflowstep_parent_id_name_111027e3_uniq: CREATE UNIQUE INDEX db_dbworkflowstep_parent_id_name_111027e3_uniq + ON public.db_dbworkflowstep USING btree (parent_id, name) + db_dbworkflowstep_pkey: CREATE UNIQUE INDEX db_dbworkflowstep_pkey ON public.db_dbworkflowstep + USING btree (id) + db_dbworkflowstep_user_id_04282431: CREATE INDEX db_dbworkflowstep_user_id_04282431 + ON public.db_dbworkflowstep USING btree (user_id) + db_dbworkflowstep_calculations: + db_dbworkflowstep_calcul_dbworkflowstep_id_dbnode_60f50d02_uniq: CREATE UNIQUE + INDEX db_dbworkflowstep_calcul_dbworkflowstep_id_dbnode_60f50d02_uniq ON public.db_dbworkflowstep_calculations + USING btree (dbworkflowstep_id, dbnode_id) + db_dbworkflowstep_calculations_dbnode_id_0d07b7a7: CREATE INDEX db_dbworkflowstep_calculations_dbnode_id_0d07b7a7 + ON public.db_dbworkflowstep_calculations USING btree (dbnode_id) + db_dbworkflowstep_calculations_dbworkflowstep_id_575c3637: CREATE INDEX db_dbworkflowstep_calculations_dbworkflowstep_id_575c3637 + ON public.db_dbworkflowstep_calculations USING btree (dbworkflowstep_id) + db_dbworkflowstep_calculations_pkey: CREATE UNIQUE INDEX db_dbworkflowstep_calculations_pkey + ON public.db_dbworkflowstep_calculations USING btree (id) + db_dbworkflowstep_sub_workflows: + db_dbworkflowstep_sub_wo_dbworkflowstep_id_dbwork_e9b2b624_uniq: CREATE UNIQUE + INDEX db_dbworkflowstep_sub_wo_dbworkflowstep_id_dbwork_e9b2b624_uniq ON public.db_dbworkflowstep_sub_workflows + USING btree (dbworkflowstep_id, dbworkflow_id) + db_dbworkflowstep_sub_workflows_dbworkflow_id_dca4d103: CREATE INDEX db_dbworkflowstep_sub_workflows_dbworkflow_id_dca4d103 + ON public.db_dbworkflowstep_sub_workflows USING btree (dbworkflow_id) + db_dbworkflowstep_sub_workflows_dbworkflowstep_id_e183bbb7: CREATE INDEX db_dbworkflowstep_sub_workflows_dbworkflowstep_id_e183bbb7 + ON public.db_dbworkflowstep_sub_workflows USING btree (dbworkflowstep_id) + db_dbworkflowstep_sub_workflows_pkey: CREATE UNIQUE INDEX db_dbworkflowstep_sub_workflows_pkey + ON public.db_dbworkflowstep_sub_workflows USING btree (id) diff --git a/tests/backends/aiida_sqlalchemy/migrations/test_all_schema/test_django_django_0021_.yml b/tests/backends/aiida_sqlalchemy/migrations/test_all_schema/test_django_django_0021_.yml new file mode 100644 index 0000000000..6603dc9f88 --- /dev/null +++ b/tests/backends/aiida_sqlalchemy/migrations/test_all_schema/test_django_django_0021_.yml @@ -0,0 +1,1038 @@ +columns: + db_dbattribute: + bval: + data_type: boolean + default: null + is_nullable: true + datatype: + data_type: character varying + default: null + is_nullable: false + max_length: 10 + dbnode_id: + data_type: integer + default: null + is_nullable: false + dval: + data_type: timestamp with time zone + default: null + is_nullable: true + fval: + data_type: double precision + default: null + is_nullable: true + id: + data_type: integer + default: nextval('db_dbattribute_id_seq'::regclass) + is_nullable: false + ival: + data_type: integer + default: null + is_nullable: true + key: + data_type: character varying + default: null + is_nullable: false + max_length: 1024 + tval: + data_type: text + default: null + is_nullable: false + db_dbauthinfo: + aiidauser_id: + data_type: integer + default: null + is_nullable: false + auth_params: + data_type: text + default: null + is_nullable: false + dbcomputer_id: + data_type: integer + default: null + is_nullable: false + enabled: + data_type: boolean + default: null + is_nullable: false + id: + data_type: integer + default: nextval('db_dbauthinfo_id_seq'::regclass) + is_nullable: false + metadata: + data_type: text + default: null + is_nullable: false + db_dbcomment: + content: + data_type: text + default: null + is_nullable: false + ctime: + data_type: timestamp with time zone + default: null + is_nullable: false + dbnode_id: + data_type: integer + default: null + is_nullable: false + id: + data_type: integer + default: nextval('db_dbcomment_id_seq'::regclass) + is_nullable: false + mtime: + data_type: timestamp with time zone + default: null + is_nullable: false + user_id: + data_type: integer + default: null + is_nullable: false + uuid: + data_type: uuid + default: null + is_nullable: false + db_dbcomputer: + description: + data_type: text + default: null + is_nullable: false + enabled: + data_type: boolean + default: null + is_nullable: false + hostname: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + id: + data_type: integer + default: nextval('db_dbcomputer_id_seq'::regclass) + is_nullable: false + metadata: + data_type: text + default: null + is_nullable: false + name: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + scheduler_type: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + transport_params: + data_type: text + default: null + is_nullable: false + transport_type: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + uuid: + data_type: uuid + default: null + is_nullable: false + db_dbextra: + bval: + data_type: boolean + default: null + is_nullable: true + datatype: + data_type: character varying + default: null + is_nullable: false + max_length: 10 + dbnode_id: + data_type: integer + default: null + is_nullable: false + dval: + data_type: timestamp with time zone + default: null + is_nullable: true + fval: + data_type: double precision + default: null + is_nullable: true + id: + data_type: integer + default: nextval('db_dbextra_id_seq'::regclass) + is_nullable: false + ival: + data_type: integer + default: null + is_nullable: true + key: + data_type: character varying + default: null + is_nullable: false + max_length: 1024 + tval: + data_type: text + default: null + is_nullable: false + db_dbgroup: + description: + data_type: text + default: null + is_nullable: false + id: + data_type: integer + default: nextval('db_dbgroup_id_seq'::regclass) + is_nullable: false + label: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + time: + data_type: timestamp with time zone + default: null + is_nullable: false + type_string: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + user_id: + data_type: integer + default: null + is_nullable: false + uuid: + data_type: uuid + default: null + is_nullable: false + db_dbgroup_dbnodes: + dbgroup_id: + data_type: integer + default: null + is_nullable: false + dbnode_id: + data_type: integer + default: null + is_nullable: false + id: + data_type: integer + default: nextval('db_dbgroup_dbnodes_id_seq'::regclass) + is_nullable: false + db_dblink: + id: + data_type: integer + default: nextval('db_dblink_id_seq'::regclass) + is_nullable: false + input_id: + data_type: integer + default: null + is_nullable: false + label: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + output_id: + data_type: integer + default: null + is_nullable: false + type: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + db_dblog: + id: + data_type: integer + default: nextval('db_dblog_id_seq'::regclass) + is_nullable: false + levelname: + data_type: character varying + default: null + is_nullable: false + max_length: 50 + loggername: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + message: + data_type: text + default: null + is_nullable: false + metadata: + data_type: text + default: null + is_nullable: false + objname: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + objpk: + data_type: integer + default: null + is_nullable: true + time: + data_type: timestamp with time zone + default: null + is_nullable: false + db_dbnode: + ctime: + data_type: timestamp with time zone + default: null + is_nullable: false + dbcomputer_id: + data_type: integer + default: null + is_nullable: true + description: + data_type: text + default: null + is_nullable: false + id: + data_type: integer + default: nextval('db_dbnode_id_seq'::regclass) + is_nullable: false + label: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + mtime: + data_type: timestamp with time zone + default: null + is_nullable: false + nodeversion: + data_type: integer + default: null + is_nullable: false + process_type: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + public: + data_type: boolean + default: null + is_nullable: false + type: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + user_id: + data_type: integer + default: null + is_nullable: false + uuid: + data_type: uuid + default: null + is_nullable: false + db_dbsetting: + bval: + data_type: boolean + default: null + is_nullable: true + datatype: + data_type: character varying + default: null + is_nullable: false + max_length: 10 + description: + data_type: text + default: null + is_nullable: false + dval: + data_type: timestamp with time zone + default: null + is_nullable: true + fval: + data_type: double precision + default: null + is_nullable: true + id: + data_type: integer + default: nextval('db_dbsetting_id_seq'::regclass) + is_nullable: false + ival: + data_type: integer + default: null + is_nullable: true + key: + data_type: character varying + default: null + is_nullable: false + max_length: 1024 + time: + data_type: timestamp with time zone + default: null + is_nullable: false + tval: + data_type: text + default: null + is_nullable: false + db_dbuser: + date_joined: + data_type: timestamp with time zone + default: null + is_nullable: false + email: + data_type: character varying + default: null + is_nullable: false + max_length: 254 + first_name: + data_type: character varying + default: null + is_nullable: false + max_length: 254 + id: + data_type: integer + default: nextval('db_dbuser_id_seq'::regclass) + is_nullable: false + institution: + data_type: character varying + default: null + is_nullable: false + max_length: 254 + is_active: + data_type: boolean + default: null + is_nullable: false + is_staff: + data_type: boolean + default: null + is_nullable: false + is_superuser: + data_type: boolean + default: null + is_nullable: false + last_login: + data_type: timestamp with time zone + default: null + is_nullable: true + last_name: + data_type: character varying + default: null + is_nullable: false + max_length: 254 + password: + data_type: character varying + default: null + is_nullable: false + max_length: 128 + db_dbuser_groups: + dbuser_id: + data_type: integer + default: null + is_nullable: false + group_id: + data_type: integer + default: null + is_nullable: false + id: + data_type: integer + default: nextval('db_dbuser_groups_id_seq'::regclass) + is_nullable: false + db_dbuser_user_permissions: + dbuser_id: + data_type: integer + default: null + is_nullable: false + id: + data_type: integer + default: nextval('db_dbuser_user_permissions_id_seq'::regclass) + is_nullable: false + permission_id: + data_type: integer + default: null + is_nullable: false + db_dbworkflow: + ctime: + data_type: timestamp with time zone + default: null + is_nullable: false + description: + data_type: text + default: null + is_nullable: false + id: + data_type: integer + default: nextval('db_dbworkflow_id_seq'::regclass) + is_nullable: false + label: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + lastsyncedversion: + data_type: integer + default: null + is_nullable: false + module: + data_type: text + default: null + is_nullable: false + module_class: + data_type: text + default: null + is_nullable: false + mtime: + data_type: timestamp with time zone + default: null + is_nullable: false + nodeversion: + data_type: integer + default: null + is_nullable: false + report: + data_type: text + default: null + is_nullable: false + script_md5: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + script_path: + data_type: text + default: null + is_nullable: false + state: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + user_id: + data_type: integer + default: null + is_nullable: false + uuid: + data_type: uuid + default: null + is_nullable: false + db_dbworkflowdata: + aiida_obj_id: + data_type: integer + default: null + is_nullable: true + data_type: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + id: + data_type: integer + default: nextval('db_dbworkflowdata_id_seq'::regclass) + is_nullable: false + json_value: + data_type: text + default: null + is_nullable: false + name: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + parent_id: + data_type: integer + default: null + is_nullable: false + time: + data_type: timestamp with time zone + default: null + is_nullable: false + value_type: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + db_dbworkflowstep: + id: + data_type: integer + default: nextval('db_dbworkflowstep_id_seq'::regclass) + is_nullable: false + name: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + nextcall: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + parent_id: + data_type: integer + default: null + is_nullable: false + state: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + time: + data_type: timestamp with time zone + default: null + is_nullable: false + user_id: + data_type: integer + default: null + is_nullable: false + db_dbworkflowstep_calculations: + dbnode_id: + data_type: integer + default: null + is_nullable: false + dbworkflowstep_id: + data_type: integer + default: null + is_nullable: false + id: + data_type: integer + default: nextval('db_dbworkflowstep_calculations_id_seq'::regclass) + is_nullable: false + db_dbworkflowstep_sub_workflows: + dbworkflow_id: + data_type: integer + default: null + is_nullable: false + dbworkflowstep_id: + data_type: integer + default: null + is_nullable: false + id: + data_type: integer + default: nextval('db_dbworkflowstep_sub_workflows_id_seq'::regclass) + is_nullable: false +constraints: + primary_key: + db_dbattribute: + db_dbattribute_pkey: + - id + db_dbauthinfo: + db_dbauthinfo_pkey: + - id + db_dbcomment: + db_dbcomment_pkey: + - id + db_dbcomputer: + db_dbcomputer_pkey: + - id + db_dbextra: + db_dbextra_pkey: + - id + db_dbgroup: + db_dbgroup_pkey: + - id + db_dbgroup_dbnodes: + db_dbgroup_dbnodes_pkey: + - id + db_dblink: + db_dblink_pkey: + - id + db_dblog: + db_dblog_pkey: + - id + db_dbnode: + db_dbnode_pkey: + - id + db_dbsetting: + db_dbsetting_pkey: + - id + db_dbuser: + db_dbuser_pkey: + - id + db_dbuser_groups: + db_dbuser_groups_pkey: + - id + db_dbuser_user_permissions: + db_dbuser_user_permissions_pkey: + - id + db_dbworkflow: + db_dbworkflow_pkey: + - id + db_dbworkflowdata: + db_dbworkflowdata_pkey: + - id + db_dbworkflowstep: + db_dbworkflowstep_pkey: + - id + db_dbworkflowstep_calculations: + db_dbworkflowstep_calculations_pkey: + - id + db_dbworkflowstep_sub_workflows: + db_dbworkflowstep_sub_workflows_pkey: + - id + unique: + db_dbattribute: + db_dbattribute_dbnode_id_key_c589e447_uniq: + - dbnode_id + - key + db_dbauthinfo: + db_dbauthinfo_aiidauser_id_dbcomputer_id_777cdaa8_uniq: + - aiidauser_id + - dbcomputer_id + db_dbcomment: + db_dbcomment_uuid_49bac08c_uniq: + - uuid + db_dbcomputer: + db_dbcomputer_name_key: + - name + db_dbcomputer_uuid_f35defa6_uniq: + - uuid + db_dbextra: + db_dbextra_dbnode_id_key_aa56fd37_uniq: + - dbnode_id + - key + db_dbgroup: + db_dbgroup_name_type_12656f33_uniq: + - label + - type_string + db_dbgroup_uuid_af896177_uniq: + - uuid + db_dbgroup_dbnodes: + db_dbgroup_dbnodes_dbgroup_id_dbnode_id_eee23cce_uniq: + - dbgroup_id + - dbnode_id + db_dbnode: + db_dbnode_uuid_62e0bf98_uniq: + - uuid + db_dbsetting: + db_dbsetting_key_1b84beb4_uniq: + - key + db_dbuser: + db_dbuser_email_30150b7e_uniq: + - email + db_dbuser_groups: + db_dbuser_groups_dbuser_id_group_id_9155eb4f_uniq: + - dbuser_id + - group_id + db_dbuser_user_permissions: + db_dbuser_user_permissio_dbuser_id_permission_id_e6cbabe4_uniq: + - dbuser_id + - permission_id + db_dbworkflow: + db_dbworkflow_uuid_08947ee2_uniq: + - uuid + db_dbworkflowdata: + db_dbworkflowdata_parent_id_name_data_type_a4b50dae_uniq: + - data_type + - name + - parent_id + db_dbworkflowstep: + db_dbworkflowstep_parent_id_name_111027e3_uniq: + - name + - parent_id + db_dbworkflowstep_calculations: + db_dbworkflowstep_calcul_dbworkflowstep_id_dbnode_60f50d02_uniq: + - dbnode_id + - dbworkflowstep_id + db_dbworkflowstep_sub_workflows: + db_dbworkflowstep_sub_wo_dbworkflowstep_id_dbwork_e9b2b624_uniq: + - dbworkflow_id + - dbworkflowstep_id +foreign_keys: + db_dbattribute: + db_dbattribute_dbnode_id_253bf153_fk_db_dbnode_id: FOREIGN KEY (dbnode_id) REFERENCES + db_dbnode(id) DEFERRABLE INITIALLY DEFERRED + db_dbauthinfo: + db_dbauthinfo_aiidauser_id_0684fdfb_fk_db_dbuser_id: FOREIGN KEY (aiidauser_id) + REFERENCES db_dbuser(id) DEFERRABLE INITIALLY DEFERRED + db_dbauthinfo_dbcomputer_id_424f7ac4_fk_db_dbcomputer_id: FOREIGN KEY (dbcomputer_id) + REFERENCES db_dbcomputer(id) DEFERRABLE INITIALLY DEFERRED + db_dbcomment: + db_dbcomment_dbnode_id_3b812b6b_fk_db_dbnode_id: FOREIGN KEY (dbnode_id) REFERENCES + db_dbnode(id) DEFERRABLE INITIALLY DEFERRED + db_dbcomment_user_id_8ed5e360_fk_db_dbuser_id: FOREIGN KEY (user_id) REFERENCES + db_dbuser(id) DEFERRABLE INITIALLY DEFERRED + db_dbextra: + db_dbextra_dbnode_id_c7fe8961_fk_db_dbnode_id: FOREIGN KEY (dbnode_id) REFERENCES + db_dbnode(id) DEFERRABLE INITIALLY DEFERRED + db_dbgroup: + db_dbgroup_user_id_100f8a51_fk_db_dbuser_id: FOREIGN KEY (user_id) REFERENCES + db_dbuser(id) DEFERRABLE INITIALLY DEFERRED + db_dbgroup_dbnodes: + db_dbgroup_dbnodes_dbgroup_id_9d3a0f9d_fk_db_dbgroup_id: FOREIGN KEY (dbgroup_id) + REFERENCES db_dbgroup(id) DEFERRABLE INITIALLY DEFERRED + db_dbgroup_dbnodes_dbnode_id_118b9439_fk_db_dbnode_id: FOREIGN KEY (dbnode_id) + REFERENCES db_dbnode(id) DEFERRABLE INITIALLY DEFERRED + db_dblink: + db_dblink_input_id_9245bd73_fk_db_dbnode_id: FOREIGN KEY (input_id) REFERENCES + db_dbnode(id) DEFERRABLE INITIALLY DEFERRED + db_dblink_output_id_c0167528_fk_db_dbnode_id: FOREIGN KEY (output_id) REFERENCES + db_dbnode(id) DEFERRABLE INITIALLY DEFERRED + db_dbnode: + db_dbnode_dbcomputer_id_315372a3_fk_db_dbcomputer_id: FOREIGN KEY (dbcomputer_id) + REFERENCES db_dbcomputer(id) DEFERRABLE INITIALLY DEFERRED + db_dbnode_user_id_12e7aeaf_fk_db_dbuser_id: FOREIGN KEY (user_id) REFERENCES db_dbuser(id) + DEFERRABLE INITIALLY DEFERRED + db_dbuser_groups: + db_dbuser_groups_dbuser_id_480b3520_fk_db_dbuser_id: FOREIGN KEY (dbuser_id) REFERENCES + db_dbuser(id) DEFERRABLE INITIALLY DEFERRED + db_dbuser_groups_group_id_8478d87e_fk_auth_group_id: FOREIGN KEY (group_id) REFERENCES + auth_group(id) DEFERRABLE INITIALLY DEFERRED + db_dbuser_user_permissions: + db_dbuser_user_permi_permission_id_c5aafc54_fk_auth_perm: FOREIGN KEY (permission_id) + REFERENCES auth_permission(id) DEFERRABLE INITIALLY DEFERRED + db_dbuser_user_permissions_dbuser_id_364456ee_fk_db_dbuser_id: FOREIGN KEY (dbuser_id) + REFERENCES db_dbuser(id) DEFERRABLE INITIALLY DEFERRED + db_dbworkflow: + db_dbworkflow_user_id_ef1f3251_fk_db_dbuser_id: FOREIGN KEY (user_id) REFERENCES + db_dbuser(id) DEFERRABLE INITIALLY DEFERRED + db_dbworkflowdata: + db_dbworkflowdata_aiida_obj_id_70a2d33b_fk_db_dbnode_id: FOREIGN KEY (aiida_obj_id) + REFERENCES db_dbnode(id) DEFERRABLE INITIALLY DEFERRED + db_dbworkflowdata_parent_id_ff4dbf8d_fk_db_dbworkflow_id: FOREIGN KEY (parent_id) + REFERENCES db_dbworkflow(id) DEFERRABLE INITIALLY DEFERRED + db_dbworkflowstep: + db_dbworkflowstep_parent_id_ffb754d9_fk_db_dbworkflow_id: FOREIGN KEY (parent_id) + REFERENCES db_dbworkflow(id) DEFERRABLE INITIALLY DEFERRED + db_dbworkflowstep_user_id_04282431_fk_db_dbuser_id: FOREIGN KEY (user_id) REFERENCES + db_dbuser(id) DEFERRABLE INITIALLY DEFERRED + db_dbworkflowstep_calculations: + db_dbworkflowstep_ca_dbnode_id_0d07b7a7_fk_db_dbnode: FOREIGN KEY (dbnode_id) + REFERENCES db_dbnode(id) DEFERRABLE INITIALLY DEFERRED + db_dbworkflowstep_ca_dbworkflowstep_id_575c3637_fk_db_dbwork: FOREIGN KEY (dbworkflowstep_id) + REFERENCES db_dbworkflowstep(id) DEFERRABLE INITIALLY DEFERRED + db_dbworkflowstep_sub_workflows: + db_dbworkflowstep_su_dbworkflow_id_dca4d103_fk_db_dbwork: FOREIGN KEY (dbworkflow_id) + REFERENCES db_dbworkflow(id) DEFERRABLE INITIALLY DEFERRED + db_dbworkflowstep_su_dbworkflowstep_id_e183bbb7_fk_db_dbwork: FOREIGN KEY (dbworkflowstep_id) + REFERENCES db_dbworkflowstep(id) DEFERRABLE INITIALLY DEFERRED +indexes: + db_dbattribute: + db_dbattribute_datatype_91c4dc04: CREATE INDEX db_dbattribute_datatype_91c4dc04 + ON public.db_dbattribute USING btree (datatype) + db_dbattribute_datatype_91c4dc04_like: CREATE INDEX db_dbattribute_datatype_91c4dc04_like + ON public.db_dbattribute USING btree (datatype varchar_pattern_ops) + db_dbattribute_dbnode_id_253bf153: CREATE INDEX db_dbattribute_dbnode_id_253bf153 + ON public.db_dbattribute USING btree (dbnode_id) + db_dbattribute_dbnode_id_key_c589e447_uniq: CREATE UNIQUE INDEX db_dbattribute_dbnode_id_key_c589e447_uniq + ON public.db_dbattribute USING btree (dbnode_id, key) + db_dbattribute_key_ac2bc4e4: CREATE INDEX db_dbattribute_key_ac2bc4e4 ON public.db_dbattribute + USING btree (key) + db_dbattribute_key_ac2bc4e4_like: CREATE INDEX db_dbattribute_key_ac2bc4e4_like + ON public.db_dbattribute USING btree (key varchar_pattern_ops) + db_dbattribute_pkey: CREATE UNIQUE INDEX db_dbattribute_pkey ON public.db_dbattribute + USING btree (id) + tval_idx_for_daemon: CREATE INDEX tval_idx_for_daemon ON public.db_dbattribute + USING btree (tval) WHERE (tval = ANY (ARRAY['COMPUTED'::text, 'WITHSCHEDULER'::text, + 'TOSUBMIT'::text])) + db_dbauthinfo: + db_dbauthinfo_aiidauser_id_0684fdfb: CREATE INDEX db_dbauthinfo_aiidauser_id_0684fdfb + ON public.db_dbauthinfo USING btree (aiidauser_id) + db_dbauthinfo_aiidauser_id_dbcomputer_id_777cdaa8_uniq: CREATE UNIQUE INDEX db_dbauthinfo_aiidauser_id_dbcomputer_id_777cdaa8_uniq + ON public.db_dbauthinfo USING btree (aiidauser_id, dbcomputer_id) + db_dbauthinfo_dbcomputer_id_424f7ac4: CREATE INDEX db_dbauthinfo_dbcomputer_id_424f7ac4 + ON public.db_dbauthinfo USING btree (dbcomputer_id) + db_dbauthinfo_pkey: CREATE UNIQUE INDEX db_dbauthinfo_pkey ON public.db_dbauthinfo + USING btree (id) + db_dbcomment: + db_dbcomment_dbnode_id_3b812b6b: CREATE INDEX db_dbcomment_dbnode_id_3b812b6b + ON public.db_dbcomment USING btree (dbnode_id) + db_dbcomment_pkey: CREATE UNIQUE INDEX db_dbcomment_pkey ON public.db_dbcomment + USING btree (id) + db_dbcomment_user_id_8ed5e360: CREATE INDEX db_dbcomment_user_id_8ed5e360 ON public.db_dbcomment + USING btree (user_id) + db_dbcomment_uuid_49bac08c_uniq: CREATE UNIQUE INDEX db_dbcomment_uuid_49bac08c_uniq + ON public.db_dbcomment USING btree (uuid) + db_dbcomputer: + db_dbcomputer_name_f1800b1a_like: CREATE INDEX db_dbcomputer_name_f1800b1a_like + ON public.db_dbcomputer USING btree (name varchar_pattern_ops) + db_dbcomputer_name_key: CREATE UNIQUE INDEX db_dbcomputer_name_key ON public.db_dbcomputer + USING btree (name) + db_dbcomputer_pkey: CREATE UNIQUE INDEX db_dbcomputer_pkey ON public.db_dbcomputer + USING btree (id) + db_dbcomputer_uuid_f35defa6_uniq: CREATE UNIQUE INDEX db_dbcomputer_uuid_f35defa6_uniq + ON public.db_dbcomputer USING btree (uuid) + db_dbextra: + db_dbextra_datatype_2eba38c6: CREATE INDEX db_dbextra_datatype_2eba38c6 ON public.db_dbextra + USING btree (datatype) + db_dbextra_datatype_2eba38c6_like: CREATE INDEX db_dbextra_datatype_2eba38c6_like + ON public.db_dbextra USING btree (datatype varchar_pattern_ops) + db_dbextra_dbnode_id_c7fe8961: CREATE INDEX db_dbextra_dbnode_id_c7fe8961 ON public.db_dbextra + USING btree (dbnode_id) + db_dbextra_dbnode_id_key_aa56fd37_uniq: CREATE UNIQUE INDEX db_dbextra_dbnode_id_key_aa56fd37_uniq + ON public.db_dbextra USING btree (dbnode_id, key) + db_dbextra_key_b1a8abc6: CREATE INDEX db_dbextra_key_b1a8abc6 ON public.db_dbextra + USING btree (key) + db_dbextra_key_b1a8abc6_like: CREATE INDEX db_dbextra_key_b1a8abc6_like ON public.db_dbextra + USING btree (key varchar_pattern_ops) + db_dbextra_pkey: CREATE UNIQUE INDEX db_dbextra_pkey ON public.db_dbextra USING + btree (id) + db_dbgroup: + db_dbgroup_name_66c75272: CREATE INDEX db_dbgroup_name_66c75272 ON public.db_dbgroup + USING btree (label) + db_dbgroup_name_66c75272_like: CREATE INDEX db_dbgroup_name_66c75272_like ON public.db_dbgroup + USING btree (label varchar_pattern_ops) + db_dbgroup_name_type_12656f33_uniq: CREATE UNIQUE INDEX db_dbgroup_name_type_12656f33_uniq + ON public.db_dbgroup USING btree (label, type_string) + db_dbgroup_pkey: CREATE UNIQUE INDEX db_dbgroup_pkey ON public.db_dbgroup USING + btree (id) + db_dbgroup_type_23b2a748: CREATE INDEX db_dbgroup_type_23b2a748 ON public.db_dbgroup + USING btree (type_string) + db_dbgroup_type_23b2a748_like: CREATE INDEX db_dbgroup_type_23b2a748_like ON public.db_dbgroup + USING btree (type_string varchar_pattern_ops) + db_dbgroup_user_id_100f8a51: CREATE INDEX db_dbgroup_user_id_100f8a51 ON public.db_dbgroup + USING btree (user_id) + db_dbgroup_uuid_af896177_uniq: CREATE UNIQUE INDEX db_dbgroup_uuid_af896177_uniq + ON public.db_dbgroup USING btree (uuid) + db_dbgroup_dbnodes: + db_dbgroup_dbnodes_dbgroup_id_9d3a0f9d: CREATE INDEX db_dbgroup_dbnodes_dbgroup_id_9d3a0f9d + ON public.db_dbgroup_dbnodes USING btree (dbgroup_id) + db_dbgroup_dbnodes_dbgroup_id_dbnode_id_eee23cce_uniq: CREATE UNIQUE INDEX db_dbgroup_dbnodes_dbgroup_id_dbnode_id_eee23cce_uniq + ON public.db_dbgroup_dbnodes USING btree (dbgroup_id, dbnode_id) + db_dbgroup_dbnodes_dbnode_id_118b9439: CREATE INDEX db_dbgroup_dbnodes_dbnode_id_118b9439 + ON public.db_dbgroup_dbnodes USING btree (dbnode_id) + db_dbgroup_dbnodes_pkey: CREATE UNIQUE INDEX db_dbgroup_dbnodes_pkey ON public.db_dbgroup_dbnodes + USING btree (id) + db_dblink: + db_dblink_input_id_9245bd73: CREATE INDEX db_dblink_input_id_9245bd73 ON public.db_dblink + USING btree (input_id) + db_dblink_label_f1343cfb: CREATE INDEX db_dblink_label_f1343cfb ON public.db_dblink + USING btree (label) + db_dblink_label_f1343cfb_like: CREATE INDEX db_dblink_label_f1343cfb_like ON public.db_dblink + USING btree (label varchar_pattern_ops) + db_dblink_output_id_c0167528: CREATE INDEX db_dblink_output_id_c0167528 ON public.db_dblink + USING btree (output_id) + db_dblink_pkey: CREATE UNIQUE INDEX db_dblink_pkey ON public.db_dblink USING btree + (id) + db_dblink_type_229f212b: CREATE INDEX db_dblink_type_229f212b ON public.db_dblink + USING btree (type) + db_dblink_type_229f212b_like: CREATE INDEX db_dblink_type_229f212b_like ON public.db_dblink + USING btree (type varchar_pattern_ops) + db_dblog: + db_dblog_levelname_ad5dc346: CREATE INDEX db_dblog_levelname_ad5dc346 ON public.db_dblog + USING btree (levelname) + db_dblog_levelname_ad5dc346_like: CREATE INDEX db_dblog_levelname_ad5dc346_like + ON public.db_dblog USING btree (levelname varchar_pattern_ops) + db_dblog_loggername_00b5ba16: CREATE INDEX db_dblog_loggername_00b5ba16 ON public.db_dblog + USING btree (loggername) + db_dblog_loggername_00b5ba16_like: CREATE INDEX db_dblog_loggername_00b5ba16_like + ON public.db_dblog USING btree (loggername varchar_pattern_ops) + db_dblog_objname_69932b1e: CREATE INDEX db_dblog_objname_69932b1e ON public.db_dblog + USING btree (objname) + db_dblog_objname_69932b1e_like: CREATE INDEX db_dblog_objname_69932b1e_like ON + public.db_dblog USING btree (objname varchar_pattern_ops) + db_dblog_objpk_fc47afa9: CREATE INDEX db_dblog_objpk_fc47afa9 ON public.db_dblog + USING btree (objpk) + db_dblog_pkey: CREATE UNIQUE INDEX db_dblog_pkey ON public.db_dblog USING btree + (id) + db_dbnode: + db_dbnode_ctime_71626ef5: CREATE INDEX db_dbnode_ctime_71626ef5 ON public.db_dbnode + USING btree (ctime) + db_dbnode_dbcomputer_id_315372a3: CREATE INDEX db_dbnode_dbcomputer_id_315372a3 + ON public.db_dbnode USING btree (dbcomputer_id) + db_dbnode_label_6469539e: CREATE INDEX db_dbnode_label_6469539e ON public.db_dbnode + USING btree (label) + db_dbnode_label_6469539e_like: CREATE INDEX db_dbnode_label_6469539e_like ON public.db_dbnode + USING btree (label varchar_pattern_ops) + db_dbnode_mtime_0554ea3d: CREATE INDEX db_dbnode_mtime_0554ea3d ON public.db_dbnode + USING btree (mtime) + db_dbnode_pkey: CREATE UNIQUE INDEX db_dbnode_pkey ON public.db_dbnode USING btree + (id) + db_dbnode_process_type_df7298d0: CREATE INDEX db_dbnode_process_type_df7298d0 + ON public.db_dbnode USING btree (process_type) + db_dbnode_process_type_df7298d0_like: CREATE INDEX db_dbnode_process_type_df7298d0_like + ON public.db_dbnode USING btree (process_type varchar_pattern_ops) + db_dbnode_type_a8ce9753: CREATE INDEX db_dbnode_type_a8ce9753 ON public.db_dbnode + USING btree (type) + db_dbnode_type_a8ce9753_like: CREATE INDEX db_dbnode_type_a8ce9753_like ON public.db_dbnode + USING btree (type varchar_pattern_ops) + db_dbnode_user_id_12e7aeaf: CREATE INDEX db_dbnode_user_id_12e7aeaf ON public.db_dbnode + USING btree (user_id) + db_dbnode_uuid_62e0bf98_uniq: CREATE UNIQUE INDEX db_dbnode_uuid_62e0bf98_uniq + ON public.db_dbnode USING btree (uuid) + db_dbsetting: + db_dbsetting_datatype_49f4397c: CREATE INDEX db_dbsetting_datatype_49f4397c ON + public.db_dbsetting USING btree (datatype) + db_dbsetting_datatype_49f4397c_like: CREATE INDEX db_dbsetting_datatype_49f4397c_like + ON public.db_dbsetting USING btree (datatype varchar_pattern_ops) + db_dbsetting_key_1b84beb4: CREATE INDEX db_dbsetting_key_1b84beb4 ON public.db_dbsetting + USING btree (key) + db_dbsetting_key_1b84beb4_like: CREATE INDEX db_dbsetting_key_1b84beb4_like ON + public.db_dbsetting USING btree (key varchar_pattern_ops) + db_dbsetting_key_1b84beb4_uniq: CREATE UNIQUE INDEX db_dbsetting_key_1b84beb4_uniq + ON public.db_dbsetting USING btree (key) + db_dbsetting_pkey: CREATE UNIQUE INDEX db_dbsetting_pkey ON public.db_dbsetting + USING btree (id) + db_dbuser: + db_dbuser_email_30150b7e_like: CREATE INDEX db_dbuser_email_30150b7e_like ON public.db_dbuser + USING btree (email varchar_pattern_ops) + db_dbuser_email_30150b7e_uniq: CREATE UNIQUE INDEX db_dbuser_email_30150b7e_uniq + ON public.db_dbuser USING btree (email) + db_dbuser_pkey: CREATE UNIQUE INDEX db_dbuser_pkey ON public.db_dbuser USING btree + (id) + db_dbuser_groups: + db_dbuser_groups_dbuser_id_480b3520: CREATE INDEX db_dbuser_groups_dbuser_id_480b3520 + ON public.db_dbuser_groups USING btree (dbuser_id) + db_dbuser_groups_dbuser_id_group_id_9155eb4f_uniq: CREATE UNIQUE INDEX db_dbuser_groups_dbuser_id_group_id_9155eb4f_uniq + ON public.db_dbuser_groups USING btree (dbuser_id, group_id) + db_dbuser_groups_group_id_8478d87e: CREATE INDEX db_dbuser_groups_group_id_8478d87e + ON public.db_dbuser_groups USING btree (group_id) + db_dbuser_groups_pkey: CREATE UNIQUE INDEX db_dbuser_groups_pkey ON public.db_dbuser_groups + USING btree (id) + db_dbuser_user_permissions: + db_dbuser_user_permissio_dbuser_id_permission_id_e6cbabe4_uniq: CREATE UNIQUE + INDEX db_dbuser_user_permissio_dbuser_id_permission_id_e6cbabe4_uniq ON public.db_dbuser_user_permissions + USING btree (dbuser_id, permission_id) + db_dbuser_user_permissions_dbuser_id_364456ee: CREATE INDEX db_dbuser_user_permissions_dbuser_id_364456ee + ON public.db_dbuser_user_permissions USING btree (dbuser_id) + db_dbuser_user_permissions_permission_id_c5aafc54: CREATE INDEX db_dbuser_user_permissions_permission_id_c5aafc54 + ON public.db_dbuser_user_permissions USING btree (permission_id) + db_dbuser_user_permissions_pkey: CREATE UNIQUE INDEX db_dbuser_user_permissions_pkey + ON public.db_dbuser_user_permissions USING btree (id) + db_dbworkflow: + db_dbworkflow_label_7368f34a: CREATE INDEX db_dbworkflow_label_7368f34a ON public.db_dbworkflow + USING btree (label) + db_dbworkflow_label_7368f34a_like: CREATE INDEX db_dbworkflow_label_7368f34a_like + ON public.db_dbworkflow USING btree (label varchar_pattern_ops) + db_dbworkflow_pkey: CREATE UNIQUE INDEX db_dbworkflow_pkey ON public.db_dbworkflow + USING btree (id) + db_dbworkflow_user_id_ef1f3251: CREATE INDEX db_dbworkflow_user_id_ef1f3251 ON + public.db_dbworkflow USING btree (user_id) + db_dbworkflow_uuid_08947ee2_uniq: CREATE UNIQUE INDEX db_dbworkflow_uuid_08947ee2_uniq + ON public.db_dbworkflow USING btree (uuid) + db_dbworkflowdata: + db_dbworkflowdata_aiida_obj_id_70a2d33b: CREATE INDEX db_dbworkflowdata_aiida_obj_id_70a2d33b + ON public.db_dbworkflowdata USING btree (aiida_obj_id) + db_dbworkflowdata_parent_id_ff4dbf8d: CREATE INDEX db_dbworkflowdata_parent_id_ff4dbf8d + ON public.db_dbworkflowdata USING btree (parent_id) + db_dbworkflowdata_parent_id_name_data_type_a4b50dae_uniq: CREATE UNIQUE INDEX + db_dbworkflowdata_parent_id_name_data_type_a4b50dae_uniq ON public.db_dbworkflowdata + USING btree (parent_id, name, data_type) + db_dbworkflowdata_pkey: CREATE UNIQUE INDEX db_dbworkflowdata_pkey ON public.db_dbworkflowdata + USING btree (id) + db_dbworkflowstep: + db_dbworkflowstep_parent_id_ffb754d9: CREATE INDEX db_dbworkflowstep_parent_id_ffb754d9 + ON public.db_dbworkflowstep USING btree (parent_id) + db_dbworkflowstep_parent_id_name_111027e3_uniq: CREATE UNIQUE INDEX db_dbworkflowstep_parent_id_name_111027e3_uniq + ON public.db_dbworkflowstep USING btree (parent_id, name) + db_dbworkflowstep_pkey: CREATE UNIQUE INDEX db_dbworkflowstep_pkey ON public.db_dbworkflowstep + USING btree (id) + db_dbworkflowstep_user_id_04282431: CREATE INDEX db_dbworkflowstep_user_id_04282431 + ON public.db_dbworkflowstep USING btree (user_id) + db_dbworkflowstep_calculations: + db_dbworkflowstep_calcul_dbworkflowstep_id_dbnode_60f50d02_uniq: CREATE UNIQUE + INDEX db_dbworkflowstep_calcul_dbworkflowstep_id_dbnode_60f50d02_uniq ON public.db_dbworkflowstep_calculations + USING btree (dbworkflowstep_id, dbnode_id) + db_dbworkflowstep_calculations_dbnode_id_0d07b7a7: CREATE INDEX db_dbworkflowstep_calculations_dbnode_id_0d07b7a7 + ON public.db_dbworkflowstep_calculations USING btree (dbnode_id) + db_dbworkflowstep_calculations_dbworkflowstep_id_575c3637: CREATE INDEX db_dbworkflowstep_calculations_dbworkflowstep_id_575c3637 + ON public.db_dbworkflowstep_calculations USING btree (dbworkflowstep_id) + db_dbworkflowstep_calculations_pkey: CREATE UNIQUE INDEX db_dbworkflowstep_calculations_pkey + ON public.db_dbworkflowstep_calculations USING btree (id) + db_dbworkflowstep_sub_workflows: + db_dbworkflowstep_sub_wo_dbworkflowstep_id_dbwork_e9b2b624_uniq: CREATE UNIQUE + INDEX db_dbworkflowstep_sub_wo_dbworkflowstep_id_dbwork_e9b2b624_uniq ON public.db_dbworkflowstep_sub_workflows + USING btree (dbworkflowstep_id, dbworkflow_id) + db_dbworkflowstep_sub_workflows_dbworkflow_id_dca4d103: CREATE INDEX db_dbworkflowstep_sub_workflows_dbworkflow_id_dca4d103 + ON public.db_dbworkflowstep_sub_workflows USING btree (dbworkflow_id) + db_dbworkflowstep_sub_workflows_dbworkflowstep_id_e183bbb7: CREATE INDEX db_dbworkflowstep_sub_workflows_dbworkflowstep_id_e183bbb7 + ON public.db_dbworkflowstep_sub_workflows USING btree (dbworkflowstep_id) + db_dbworkflowstep_sub_workflows_pkey: CREATE UNIQUE INDEX db_dbworkflowstep_sub_workflows_pkey + ON public.db_dbworkflowstep_sub_workflows USING btree (id) diff --git a/tests/backends/aiida_sqlalchemy/migrations/test_all_schema/test_django_django_0022_.yml b/tests/backends/aiida_sqlalchemy/migrations/test_all_schema/test_django_django_0022_.yml new file mode 100644 index 0000000000..6603dc9f88 --- /dev/null +++ b/tests/backends/aiida_sqlalchemy/migrations/test_all_schema/test_django_django_0022_.yml @@ -0,0 +1,1038 @@ +columns: + db_dbattribute: + bval: + data_type: boolean + default: null + is_nullable: true + datatype: + data_type: character varying + default: null + is_nullable: false + max_length: 10 + dbnode_id: + data_type: integer + default: null + is_nullable: false + dval: + data_type: timestamp with time zone + default: null + is_nullable: true + fval: + data_type: double precision + default: null + is_nullable: true + id: + data_type: integer + default: nextval('db_dbattribute_id_seq'::regclass) + is_nullable: false + ival: + data_type: integer + default: null + is_nullable: true + key: + data_type: character varying + default: null + is_nullable: false + max_length: 1024 + tval: + data_type: text + default: null + is_nullable: false + db_dbauthinfo: + aiidauser_id: + data_type: integer + default: null + is_nullable: false + auth_params: + data_type: text + default: null + is_nullable: false + dbcomputer_id: + data_type: integer + default: null + is_nullable: false + enabled: + data_type: boolean + default: null + is_nullable: false + id: + data_type: integer + default: nextval('db_dbauthinfo_id_seq'::regclass) + is_nullable: false + metadata: + data_type: text + default: null + is_nullable: false + db_dbcomment: + content: + data_type: text + default: null + is_nullable: false + ctime: + data_type: timestamp with time zone + default: null + is_nullable: false + dbnode_id: + data_type: integer + default: null + is_nullable: false + id: + data_type: integer + default: nextval('db_dbcomment_id_seq'::regclass) + is_nullable: false + mtime: + data_type: timestamp with time zone + default: null + is_nullable: false + user_id: + data_type: integer + default: null + is_nullable: false + uuid: + data_type: uuid + default: null + is_nullable: false + db_dbcomputer: + description: + data_type: text + default: null + is_nullable: false + enabled: + data_type: boolean + default: null + is_nullable: false + hostname: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + id: + data_type: integer + default: nextval('db_dbcomputer_id_seq'::regclass) + is_nullable: false + metadata: + data_type: text + default: null + is_nullable: false + name: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + scheduler_type: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + transport_params: + data_type: text + default: null + is_nullable: false + transport_type: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + uuid: + data_type: uuid + default: null + is_nullable: false + db_dbextra: + bval: + data_type: boolean + default: null + is_nullable: true + datatype: + data_type: character varying + default: null + is_nullable: false + max_length: 10 + dbnode_id: + data_type: integer + default: null + is_nullable: false + dval: + data_type: timestamp with time zone + default: null + is_nullable: true + fval: + data_type: double precision + default: null + is_nullable: true + id: + data_type: integer + default: nextval('db_dbextra_id_seq'::regclass) + is_nullable: false + ival: + data_type: integer + default: null + is_nullable: true + key: + data_type: character varying + default: null + is_nullable: false + max_length: 1024 + tval: + data_type: text + default: null + is_nullable: false + db_dbgroup: + description: + data_type: text + default: null + is_nullable: false + id: + data_type: integer + default: nextval('db_dbgroup_id_seq'::regclass) + is_nullable: false + label: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + time: + data_type: timestamp with time zone + default: null + is_nullable: false + type_string: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + user_id: + data_type: integer + default: null + is_nullable: false + uuid: + data_type: uuid + default: null + is_nullable: false + db_dbgroup_dbnodes: + dbgroup_id: + data_type: integer + default: null + is_nullable: false + dbnode_id: + data_type: integer + default: null + is_nullable: false + id: + data_type: integer + default: nextval('db_dbgroup_dbnodes_id_seq'::regclass) + is_nullable: false + db_dblink: + id: + data_type: integer + default: nextval('db_dblink_id_seq'::regclass) + is_nullable: false + input_id: + data_type: integer + default: null + is_nullable: false + label: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + output_id: + data_type: integer + default: null + is_nullable: false + type: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + db_dblog: + id: + data_type: integer + default: nextval('db_dblog_id_seq'::regclass) + is_nullable: false + levelname: + data_type: character varying + default: null + is_nullable: false + max_length: 50 + loggername: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + message: + data_type: text + default: null + is_nullable: false + metadata: + data_type: text + default: null + is_nullable: false + objname: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + objpk: + data_type: integer + default: null + is_nullable: true + time: + data_type: timestamp with time zone + default: null + is_nullable: false + db_dbnode: + ctime: + data_type: timestamp with time zone + default: null + is_nullable: false + dbcomputer_id: + data_type: integer + default: null + is_nullable: true + description: + data_type: text + default: null + is_nullable: false + id: + data_type: integer + default: nextval('db_dbnode_id_seq'::regclass) + is_nullable: false + label: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + mtime: + data_type: timestamp with time zone + default: null + is_nullable: false + nodeversion: + data_type: integer + default: null + is_nullable: false + process_type: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + public: + data_type: boolean + default: null + is_nullable: false + type: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + user_id: + data_type: integer + default: null + is_nullable: false + uuid: + data_type: uuid + default: null + is_nullable: false + db_dbsetting: + bval: + data_type: boolean + default: null + is_nullable: true + datatype: + data_type: character varying + default: null + is_nullable: false + max_length: 10 + description: + data_type: text + default: null + is_nullable: false + dval: + data_type: timestamp with time zone + default: null + is_nullable: true + fval: + data_type: double precision + default: null + is_nullable: true + id: + data_type: integer + default: nextval('db_dbsetting_id_seq'::regclass) + is_nullable: false + ival: + data_type: integer + default: null + is_nullable: true + key: + data_type: character varying + default: null + is_nullable: false + max_length: 1024 + time: + data_type: timestamp with time zone + default: null + is_nullable: false + tval: + data_type: text + default: null + is_nullable: false + db_dbuser: + date_joined: + data_type: timestamp with time zone + default: null + is_nullable: false + email: + data_type: character varying + default: null + is_nullable: false + max_length: 254 + first_name: + data_type: character varying + default: null + is_nullable: false + max_length: 254 + id: + data_type: integer + default: nextval('db_dbuser_id_seq'::regclass) + is_nullable: false + institution: + data_type: character varying + default: null + is_nullable: false + max_length: 254 + is_active: + data_type: boolean + default: null + is_nullable: false + is_staff: + data_type: boolean + default: null + is_nullable: false + is_superuser: + data_type: boolean + default: null + is_nullable: false + last_login: + data_type: timestamp with time zone + default: null + is_nullable: true + last_name: + data_type: character varying + default: null + is_nullable: false + max_length: 254 + password: + data_type: character varying + default: null + is_nullable: false + max_length: 128 + db_dbuser_groups: + dbuser_id: + data_type: integer + default: null + is_nullable: false + group_id: + data_type: integer + default: null + is_nullable: false + id: + data_type: integer + default: nextval('db_dbuser_groups_id_seq'::regclass) + is_nullable: false + db_dbuser_user_permissions: + dbuser_id: + data_type: integer + default: null + is_nullable: false + id: + data_type: integer + default: nextval('db_dbuser_user_permissions_id_seq'::regclass) + is_nullable: false + permission_id: + data_type: integer + default: null + is_nullable: false + db_dbworkflow: + ctime: + data_type: timestamp with time zone + default: null + is_nullable: false + description: + data_type: text + default: null + is_nullable: false + id: + data_type: integer + default: nextval('db_dbworkflow_id_seq'::regclass) + is_nullable: false + label: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + lastsyncedversion: + data_type: integer + default: null + is_nullable: false + module: + data_type: text + default: null + is_nullable: false + module_class: + data_type: text + default: null + is_nullable: false + mtime: + data_type: timestamp with time zone + default: null + is_nullable: false + nodeversion: + data_type: integer + default: null + is_nullable: false + report: + data_type: text + default: null + is_nullable: false + script_md5: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + script_path: + data_type: text + default: null + is_nullable: false + state: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + user_id: + data_type: integer + default: null + is_nullable: false + uuid: + data_type: uuid + default: null + is_nullable: false + db_dbworkflowdata: + aiida_obj_id: + data_type: integer + default: null + is_nullable: true + data_type: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + id: + data_type: integer + default: nextval('db_dbworkflowdata_id_seq'::regclass) + is_nullable: false + json_value: + data_type: text + default: null + is_nullable: false + name: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + parent_id: + data_type: integer + default: null + is_nullable: false + time: + data_type: timestamp with time zone + default: null + is_nullable: false + value_type: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + db_dbworkflowstep: + id: + data_type: integer + default: nextval('db_dbworkflowstep_id_seq'::regclass) + is_nullable: false + name: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + nextcall: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + parent_id: + data_type: integer + default: null + is_nullable: false + state: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + time: + data_type: timestamp with time zone + default: null + is_nullable: false + user_id: + data_type: integer + default: null + is_nullable: false + db_dbworkflowstep_calculations: + dbnode_id: + data_type: integer + default: null + is_nullable: false + dbworkflowstep_id: + data_type: integer + default: null + is_nullable: false + id: + data_type: integer + default: nextval('db_dbworkflowstep_calculations_id_seq'::regclass) + is_nullable: false + db_dbworkflowstep_sub_workflows: + dbworkflow_id: + data_type: integer + default: null + is_nullable: false + dbworkflowstep_id: + data_type: integer + default: null + is_nullable: false + id: + data_type: integer + default: nextval('db_dbworkflowstep_sub_workflows_id_seq'::regclass) + is_nullable: false +constraints: + primary_key: + db_dbattribute: + db_dbattribute_pkey: + - id + db_dbauthinfo: + db_dbauthinfo_pkey: + - id + db_dbcomment: + db_dbcomment_pkey: + - id + db_dbcomputer: + db_dbcomputer_pkey: + - id + db_dbextra: + db_dbextra_pkey: + - id + db_dbgroup: + db_dbgroup_pkey: + - id + db_dbgroup_dbnodes: + db_dbgroup_dbnodes_pkey: + - id + db_dblink: + db_dblink_pkey: + - id + db_dblog: + db_dblog_pkey: + - id + db_dbnode: + db_dbnode_pkey: + - id + db_dbsetting: + db_dbsetting_pkey: + - id + db_dbuser: + db_dbuser_pkey: + - id + db_dbuser_groups: + db_dbuser_groups_pkey: + - id + db_dbuser_user_permissions: + db_dbuser_user_permissions_pkey: + - id + db_dbworkflow: + db_dbworkflow_pkey: + - id + db_dbworkflowdata: + db_dbworkflowdata_pkey: + - id + db_dbworkflowstep: + db_dbworkflowstep_pkey: + - id + db_dbworkflowstep_calculations: + db_dbworkflowstep_calculations_pkey: + - id + db_dbworkflowstep_sub_workflows: + db_dbworkflowstep_sub_workflows_pkey: + - id + unique: + db_dbattribute: + db_dbattribute_dbnode_id_key_c589e447_uniq: + - dbnode_id + - key + db_dbauthinfo: + db_dbauthinfo_aiidauser_id_dbcomputer_id_777cdaa8_uniq: + - aiidauser_id + - dbcomputer_id + db_dbcomment: + db_dbcomment_uuid_49bac08c_uniq: + - uuid + db_dbcomputer: + db_dbcomputer_name_key: + - name + db_dbcomputer_uuid_f35defa6_uniq: + - uuid + db_dbextra: + db_dbextra_dbnode_id_key_aa56fd37_uniq: + - dbnode_id + - key + db_dbgroup: + db_dbgroup_name_type_12656f33_uniq: + - label + - type_string + db_dbgroup_uuid_af896177_uniq: + - uuid + db_dbgroup_dbnodes: + db_dbgroup_dbnodes_dbgroup_id_dbnode_id_eee23cce_uniq: + - dbgroup_id + - dbnode_id + db_dbnode: + db_dbnode_uuid_62e0bf98_uniq: + - uuid + db_dbsetting: + db_dbsetting_key_1b84beb4_uniq: + - key + db_dbuser: + db_dbuser_email_30150b7e_uniq: + - email + db_dbuser_groups: + db_dbuser_groups_dbuser_id_group_id_9155eb4f_uniq: + - dbuser_id + - group_id + db_dbuser_user_permissions: + db_dbuser_user_permissio_dbuser_id_permission_id_e6cbabe4_uniq: + - dbuser_id + - permission_id + db_dbworkflow: + db_dbworkflow_uuid_08947ee2_uniq: + - uuid + db_dbworkflowdata: + db_dbworkflowdata_parent_id_name_data_type_a4b50dae_uniq: + - data_type + - name + - parent_id + db_dbworkflowstep: + db_dbworkflowstep_parent_id_name_111027e3_uniq: + - name + - parent_id + db_dbworkflowstep_calculations: + db_dbworkflowstep_calcul_dbworkflowstep_id_dbnode_60f50d02_uniq: + - dbnode_id + - dbworkflowstep_id + db_dbworkflowstep_sub_workflows: + db_dbworkflowstep_sub_wo_dbworkflowstep_id_dbwork_e9b2b624_uniq: + - dbworkflow_id + - dbworkflowstep_id +foreign_keys: + db_dbattribute: + db_dbattribute_dbnode_id_253bf153_fk_db_dbnode_id: FOREIGN KEY (dbnode_id) REFERENCES + db_dbnode(id) DEFERRABLE INITIALLY DEFERRED + db_dbauthinfo: + db_dbauthinfo_aiidauser_id_0684fdfb_fk_db_dbuser_id: FOREIGN KEY (aiidauser_id) + REFERENCES db_dbuser(id) DEFERRABLE INITIALLY DEFERRED + db_dbauthinfo_dbcomputer_id_424f7ac4_fk_db_dbcomputer_id: FOREIGN KEY (dbcomputer_id) + REFERENCES db_dbcomputer(id) DEFERRABLE INITIALLY DEFERRED + db_dbcomment: + db_dbcomment_dbnode_id_3b812b6b_fk_db_dbnode_id: FOREIGN KEY (dbnode_id) REFERENCES + db_dbnode(id) DEFERRABLE INITIALLY DEFERRED + db_dbcomment_user_id_8ed5e360_fk_db_dbuser_id: FOREIGN KEY (user_id) REFERENCES + db_dbuser(id) DEFERRABLE INITIALLY DEFERRED + db_dbextra: + db_dbextra_dbnode_id_c7fe8961_fk_db_dbnode_id: FOREIGN KEY (dbnode_id) REFERENCES + db_dbnode(id) DEFERRABLE INITIALLY DEFERRED + db_dbgroup: + db_dbgroup_user_id_100f8a51_fk_db_dbuser_id: FOREIGN KEY (user_id) REFERENCES + db_dbuser(id) DEFERRABLE INITIALLY DEFERRED + db_dbgroup_dbnodes: + db_dbgroup_dbnodes_dbgroup_id_9d3a0f9d_fk_db_dbgroup_id: FOREIGN KEY (dbgroup_id) + REFERENCES db_dbgroup(id) DEFERRABLE INITIALLY DEFERRED + db_dbgroup_dbnodes_dbnode_id_118b9439_fk_db_dbnode_id: FOREIGN KEY (dbnode_id) + REFERENCES db_dbnode(id) DEFERRABLE INITIALLY DEFERRED + db_dblink: + db_dblink_input_id_9245bd73_fk_db_dbnode_id: FOREIGN KEY (input_id) REFERENCES + db_dbnode(id) DEFERRABLE INITIALLY DEFERRED + db_dblink_output_id_c0167528_fk_db_dbnode_id: FOREIGN KEY (output_id) REFERENCES + db_dbnode(id) DEFERRABLE INITIALLY DEFERRED + db_dbnode: + db_dbnode_dbcomputer_id_315372a3_fk_db_dbcomputer_id: FOREIGN KEY (dbcomputer_id) + REFERENCES db_dbcomputer(id) DEFERRABLE INITIALLY DEFERRED + db_dbnode_user_id_12e7aeaf_fk_db_dbuser_id: FOREIGN KEY (user_id) REFERENCES db_dbuser(id) + DEFERRABLE INITIALLY DEFERRED + db_dbuser_groups: + db_dbuser_groups_dbuser_id_480b3520_fk_db_dbuser_id: FOREIGN KEY (dbuser_id) REFERENCES + db_dbuser(id) DEFERRABLE INITIALLY DEFERRED + db_dbuser_groups_group_id_8478d87e_fk_auth_group_id: FOREIGN KEY (group_id) REFERENCES + auth_group(id) DEFERRABLE INITIALLY DEFERRED + db_dbuser_user_permissions: + db_dbuser_user_permi_permission_id_c5aafc54_fk_auth_perm: FOREIGN KEY (permission_id) + REFERENCES auth_permission(id) DEFERRABLE INITIALLY DEFERRED + db_dbuser_user_permissions_dbuser_id_364456ee_fk_db_dbuser_id: FOREIGN KEY (dbuser_id) + REFERENCES db_dbuser(id) DEFERRABLE INITIALLY DEFERRED + db_dbworkflow: + db_dbworkflow_user_id_ef1f3251_fk_db_dbuser_id: FOREIGN KEY (user_id) REFERENCES + db_dbuser(id) DEFERRABLE INITIALLY DEFERRED + db_dbworkflowdata: + db_dbworkflowdata_aiida_obj_id_70a2d33b_fk_db_dbnode_id: FOREIGN KEY (aiida_obj_id) + REFERENCES db_dbnode(id) DEFERRABLE INITIALLY DEFERRED + db_dbworkflowdata_parent_id_ff4dbf8d_fk_db_dbworkflow_id: FOREIGN KEY (parent_id) + REFERENCES db_dbworkflow(id) DEFERRABLE INITIALLY DEFERRED + db_dbworkflowstep: + db_dbworkflowstep_parent_id_ffb754d9_fk_db_dbworkflow_id: FOREIGN KEY (parent_id) + REFERENCES db_dbworkflow(id) DEFERRABLE INITIALLY DEFERRED + db_dbworkflowstep_user_id_04282431_fk_db_dbuser_id: FOREIGN KEY (user_id) REFERENCES + db_dbuser(id) DEFERRABLE INITIALLY DEFERRED + db_dbworkflowstep_calculations: + db_dbworkflowstep_ca_dbnode_id_0d07b7a7_fk_db_dbnode: FOREIGN KEY (dbnode_id) + REFERENCES db_dbnode(id) DEFERRABLE INITIALLY DEFERRED + db_dbworkflowstep_ca_dbworkflowstep_id_575c3637_fk_db_dbwork: FOREIGN KEY (dbworkflowstep_id) + REFERENCES db_dbworkflowstep(id) DEFERRABLE INITIALLY DEFERRED + db_dbworkflowstep_sub_workflows: + db_dbworkflowstep_su_dbworkflow_id_dca4d103_fk_db_dbwork: FOREIGN KEY (dbworkflow_id) + REFERENCES db_dbworkflow(id) DEFERRABLE INITIALLY DEFERRED + db_dbworkflowstep_su_dbworkflowstep_id_e183bbb7_fk_db_dbwork: FOREIGN KEY (dbworkflowstep_id) + REFERENCES db_dbworkflowstep(id) DEFERRABLE INITIALLY DEFERRED +indexes: + db_dbattribute: + db_dbattribute_datatype_91c4dc04: CREATE INDEX db_dbattribute_datatype_91c4dc04 + ON public.db_dbattribute USING btree (datatype) + db_dbattribute_datatype_91c4dc04_like: CREATE INDEX db_dbattribute_datatype_91c4dc04_like + ON public.db_dbattribute USING btree (datatype varchar_pattern_ops) + db_dbattribute_dbnode_id_253bf153: CREATE INDEX db_dbattribute_dbnode_id_253bf153 + ON public.db_dbattribute USING btree (dbnode_id) + db_dbattribute_dbnode_id_key_c589e447_uniq: CREATE UNIQUE INDEX db_dbattribute_dbnode_id_key_c589e447_uniq + ON public.db_dbattribute USING btree (dbnode_id, key) + db_dbattribute_key_ac2bc4e4: CREATE INDEX db_dbattribute_key_ac2bc4e4 ON public.db_dbattribute + USING btree (key) + db_dbattribute_key_ac2bc4e4_like: CREATE INDEX db_dbattribute_key_ac2bc4e4_like + ON public.db_dbattribute USING btree (key varchar_pattern_ops) + db_dbattribute_pkey: CREATE UNIQUE INDEX db_dbattribute_pkey ON public.db_dbattribute + USING btree (id) + tval_idx_for_daemon: CREATE INDEX tval_idx_for_daemon ON public.db_dbattribute + USING btree (tval) WHERE (tval = ANY (ARRAY['COMPUTED'::text, 'WITHSCHEDULER'::text, + 'TOSUBMIT'::text])) + db_dbauthinfo: + db_dbauthinfo_aiidauser_id_0684fdfb: CREATE INDEX db_dbauthinfo_aiidauser_id_0684fdfb + ON public.db_dbauthinfo USING btree (aiidauser_id) + db_dbauthinfo_aiidauser_id_dbcomputer_id_777cdaa8_uniq: CREATE UNIQUE INDEX db_dbauthinfo_aiidauser_id_dbcomputer_id_777cdaa8_uniq + ON public.db_dbauthinfo USING btree (aiidauser_id, dbcomputer_id) + db_dbauthinfo_dbcomputer_id_424f7ac4: CREATE INDEX db_dbauthinfo_dbcomputer_id_424f7ac4 + ON public.db_dbauthinfo USING btree (dbcomputer_id) + db_dbauthinfo_pkey: CREATE UNIQUE INDEX db_dbauthinfo_pkey ON public.db_dbauthinfo + USING btree (id) + db_dbcomment: + db_dbcomment_dbnode_id_3b812b6b: CREATE INDEX db_dbcomment_dbnode_id_3b812b6b + ON public.db_dbcomment USING btree (dbnode_id) + db_dbcomment_pkey: CREATE UNIQUE INDEX db_dbcomment_pkey ON public.db_dbcomment + USING btree (id) + db_dbcomment_user_id_8ed5e360: CREATE INDEX db_dbcomment_user_id_8ed5e360 ON public.db_dbcomment + USING btree (user_id) + db_dbcomment_uuid_49bac08c_uniq: CREATE UNIQUE INDEX db_dbcomment_uuid_49bac08c_uniq + ON public.db_dbcomment USING btree (uuid) + db_dbcomputer: + db_dbcomputer_name_f1800b1a_like: CREATE INDEX db_dbcomputer_name_f1800b1a_like + ON public.db_dbcomputer USING btree (name varchar_pattern_ops) + db_dbcomputer_name_key: CREATE UNIQUE INDEX db_dbcomputer_name_key ON public.db_dbcomputer + USING btree (name) + db_dbcomputer_pkey: CREATE UNIQUE INDEX db_dbcomputer_pkey ON public.db_dbcomputer + USING btree (id) + db_dbcomputer_uuid_f35defa6_uniq: CREATE UNIQUE INDEX db_dbcomputer_uuid_f35defa6_uniq + ON public.db_dbcomputer USING btree (uuid) + db_dbextra: + db_dbextra_datatype_2eba38c6: CREATE INDEX db_dbextra_datatype_2eba38c6 ON public.db_dbextra + USING btree (datatype) + db_dbextra_datatype_2eba38c6_like: CREATE INDEX db_dbextra_datatype_2eba38c6_like + ON public.db_dbextra USING btree (datatype varchar_pattern_ops) + db_dbextra_dbnode_id_c7fe8961: CREATE INDEX db_dbextra_dbnode_id_c7fe8961 ON public.db_dbextra + USING btree (dbnode_id) + db_dbextra_dbnode_id_key_aa56fd37_uniq: CREATE UNIQUE INDEX db_dbextra_dbnode_id_key_aa56fd37_uniq + ON public.db_dbextra USING btree (dbnode_id, key) + db_dbextra_key_b1a8abc6: CREATE INDEX db_dbextra_key_b1a8abc6 ON public.db_dbextra + USING btree (key) + db_dbextra_key_b1a8abc6_like: CREATE INDEX db_dbextra_key_b1a8abc6_like ON public.db_dbextra + USING btree (key varchar_pattern_ops) + db_dbextra_pkey: CREATE UNIQUE INDEX db_dbextra_pkey ON public.db_dbextra USING + btree (id) + db_dbgroup: + db_dbgroup_name_66c75272: CREATE INDEX db_dbgroup_name_66c75272 ON public.db_dbgroup + USING btree (label) + db_dbgroup_name_66c75272_like: CREATE INDEX db_dbgroup_name_66c75272_like ON public.db_dbgroup + USING btree (label varchar_pattern_ops) + db_dbgroup_name_type_12656f33_uniq: CREATE UNIQUE INDEX db_dbgroup_name_type_12656f33_uniq + ON public.db_dbgroup USING btree (label, type_string) + db_dbgroup_pkey: CREATE UNIQUE INDEX db_dbgroup_pkey ON public.db_dbgroup USING + btree (id) + db_dbgroup_type_23b2a748: CREATE INDEX db_dbgroup_type_23b2a748 ON public.db_dbgroup + USING btree (type_string) + db_dbgroup_type_23b2a748_like: CREATE INDEX db_dbgroup_type_23b2a748_like ON public.db_dbgroup + USING btree (type_string varchar_pattern_ops) + db_dbgroup_user_id_100f8a51: CREATE INDEX db_dbgroup_user_id_100f8a51 ON public.db_dbgroup + USING btree (user_id) + db_dbgroup_uuid_af896177_uniq: CREATE UNIQUE INDEX db_dbgroup_uuid_af896177_uniq + ON public.db_dbgroup USING btree (uuid) + db_dbgroup_dbnodes: + db_dbgroup_dbnodes_dbgroup_id_9d3a0f9d: CREATE INDEX db_dbgroup_dbnodes_dbgroup_id_9d3a0f9d + ON public.db_dbgroup_dbnodes USING btree (dbgroup_id) + db_dbgroup_dbnodes_dbgroup_id_dbnode_id_eee23cce_uniq: CREATE UNIQUE INDEX db_dbgroup_dbnodes_dbgroup_id_dbnode_id_eee23cce_uniq + ON public.db_dbgroup_dbnodes USING btree (dbgroup_id, dbnode_id) + db_dbgroup_dbnodes_dbnode_id_118b9439: CREATE INDEX db_dbgroup_dbnodes_dbnode_id_118b9439 + ON public.db_dbgroup_dbnodes USING btree (dbnode_id) + db_dbgroup_dbnodes_pkey: CREATE UNIQUE INDEX db_dbgroup_dbnodes_pkey ON public.db_dbgroup_dbnodes + USING btree (id) + db_dblink: + db_dblink_input_id_9245bd73: CREATE INDEX db_dblink_input_id_9245bd73 ON public.db_dblink + USING btree (input_id) + db_dblink_label_f1343cfb: CREATE INDEX db_dblink_label_f1343cfb ON public.db_dblink + USING btree (label) + db_dblink_label_f1343cfb_like: CREATE INDEX db_dblink_label_f1343cfb_like ON public.db_dblink + USING btree (label varchar_pattern_ops) + db_dblink_output_id_c0167528: CREATE INDEX db_dblink_output_id_c0167528 ON public.db_dblink + USING btree (output_id) + db_dblink_pkey: CREATE UNIQUE INDEX db_dblink_pkey ON public.db_dblink USING btree + (id) + db_dblink_type_229f212b: CREATE INDEX db_dblink_type_229f212b ON public.db_dblink + USING btree (type) + db_dblink_type_229f212b_like: CREATE INDEX db_dblink_type_229f212b_like ON public.db_dblink + USING btree (type varchar_pattern_ops) + db_dblog: + db_dblog_levelname_ad5dc346: CREATE INDEX db_dblog_levelname_ad5dc346 ON public.db_dblog + USING btree (levelname) + db_dblog_levelname_ad5dc346_like: CREATE INDEX db_dblog_levelname_ad5dc346_like + ON public.db_dblog USING btree (levelname varchar_pattern_ops) + db_dblog_loggername_00b5ba16: CREATE INDEX db_dblog_loggername_00b5ba16 ON public.db_dblog + USING btree (loggername) + db_dblog_loggername_00b5ba16_like: CREATE INDEX db_dblog_loggername_00b5ba16_like + ON public.db_dblog USING btree (loggername varchar_pattern_ops) + db_dblog_objname_69932b1e: CREATE INDEX db_dblog_objname_69932b1e ON public.db_dblog + USING btree (objname) + db_dblog_objname_69932b1e_like: CREATE INDEX db_dblog_objname_69932b1e_like ON + public.db_dblog USING btree (objname varchar_pattern_ops) + db_dblog_objpk_fc47afa9: CREATE INDEX db_dblog_objpk_fc47afa9 ON public.db_dblog + USING btree (objpk) + db_dblog_pkey: CREATE UNIQUE INDEX db_dblog_pkey ON public.db_dblog USING btree + (id) + db_dbnode: + db_dbnode_ctime_71626ef5: CREATE INDEX db_dbnode_ctime_71626ef5 ON public.db_dbnode + USING btree (ctime) + db_dbnode_dbcomputer_id_315372a3: CREATE INDEX db_dbnode_dbcomputer_id_315372a3 + ON public.db_dbnode USING btree (dbcomputer_id) + db_dbnode_label_6469539e: CREATE INDEX db_dbnode_label_6469539e ON public.db_dbnode + USING btree (label) + db_dbnode_label_6469539e_like: CREATE INDEX db_dbnode_label_6469539e_like ON public.db_dbnode + USING btree (label varchar_pattern_ops) + db_dbnode_mtime_0554ea3d: CREATE INDEX db_dbnode_mtime_0554ea3d ON public.db_dbnode + USING btree (mtime) + db_dbnode_pkey: CREATE UNIQUE INDEX db_dbnode_pkey ON public.db_dbnode USING btree + (id) + db_dbnode_process_type_df7298d0: CREATE INDEX db_dbnode_process_type_df7298d0 + ON public.db_dbnode USING btree (process_type) + db_dbnode_process_type_df7298d0_like: CREATE INDEX db_dbnode_process_type_df7298d0_like + ON public.db_dbnode USING btree (process_type varchar_pattern_ops) + db_dbnode_type_a8ce9753: CREATE INDEX db_dbnode_type_a8ce9753 ON public.db_dbnode + USING btree (type) + db_dbnode_type_a8ce9753_like: CREATE INDEX db_dbnode_type_a8ce9753_like ON public.db_dbnode + USING btree (type varchar_pattern_ops) + db_dbnode_user_id_12e7aeaf: CREATE INDEX db_dbnode_user_id_12e7aeaf ON public.db_dbnode + USING btree (user_id) + db_dbnode_uuid_62e0bf98_uniq: CREATE UNIQUE INDEX db_dbnode_uuid_62e0bf98_uniq + ON public.db_dbnode USING btree (uuid) + db_dbsetting: + db_dbsetting_datatype_49f4397c: CREATE INDEX db_dbsetting_datatype_49f4397c ON + public.db_dbsetting USING btree (datatype) + db_dbsetting_datatype_49f4397c_like: CREATE INDEX db_dbsetting_datatype_49f4397c_like + ON public.db_dbsetting USING btree (datatype varchar_pattern_ops) + db_dbsetting_key_1b84beb4: CREATE INDEX db_dbsetting_key_1b84beb4 ON public.db_dbsetting + USING btree (key) + db_dbsetting_key_1b84beb4_like: CREATE INDEX db_dbsetting_key_1b84beb4_like ON + public.db_dbsetting USING btree (key varchar_pattern_ops) + db_dbsetting_key_1b84beb4_uniq: CREATE UNIQUE INDEX db_dbsetting_key_1b84beb4_uniq + ON public.db_dbsetting USING btree (key) + db_dbsetting_pkey: CREATE UNIQUE INDEX db_dbsetting_pkey ON public.db_dbsetting + USING btree (id) + db_dbuser: + db_dbuser_email_30150b7e_like: CREATE INDEX db_dbuser_email_30150b7e_like ON public.db_dbuser + USING btree (email varchar_pattern_ops) + db_dbuser_email_30150b7e_uniq: CREATE UNIQUE INDEX db_dbuser_email_30150b7e_uniq + ON public.db_dbuser USING btree (email) + db_dbuser_pkey: CREATE UNIQUE INDEX db_dbuser_pkey ON public.db_dbuser USING btree + (id) + db_dbuser_groups: + db_dbuser_groups_dbuser_id_480b3520: CREATE INDEX db_dbuser_groups_dbuser_id_480b3520 + ON public.db_dbuser_groups USING btree (dbuser_id) + db_dbuser_groups_dbuser_id_group_id_9155eb4f_uniq: CREATE UNIQUE INDEX db_dbuser_groups_dbuser_id_group_id_9155eb4f_uniq + ON public.db_dbuser_groups USING btree (dbuser_id, group_id) + db_dbuser_groups_group_id_8478d87e: CREATE INDEX db_dbuser_groups_group_id_8478d87e + ON public.db_dbuser_groups USING btree (group_id) + db_dbuser_groups_pkey: CREATE UNIQUE INDEX db_dbuser_groups_pkey ON public.db_dbuser_groups + USING btree (id) + db_dbuser_user_permissions: + db_dbuser_user_permissio_dbuser_id_permission_id_e6cbabe4_uniq: CREATE UNIQUE + INDEX db_dbuser_user_permissio_dbuser_id_permission_id_e6cbabe4_uniq ON public.db_dbuser_user_permissions + USING btree (dbuser_id, permission_id) + db_dbuser_user_permissions_dbuser_id_364456ee: CREATE INDEX db_dbuser_user_permissions_dbuser_id_364456ee + ON public.db_dbuser_user_permissions USING btree (dbuser_id) + db_dbuser_user_permissions_permission_id_c5aafc54: CREATE INDEX db_dbuser_user_permissions_permission_id_c5aafc54 + ON public.db_dbuser_user_permissions USING btree (permission_id) + db_dbuser_user_permissions_pkey: CREATE UNIQUE INDEX db_dbuser_user_permissions_pkey + ON public.db_dbuser_user_permissions USING btree (id) + db_dbworkflow: + db_dbworkflow_label_7368f34a: CREATE INDEX db_dbworkflow_label_7368f34a ON public.db_dbworkflow + USING btree (label) + db_dbworkflow_label_7368f34a_like: CREATE INDEX db_dbworkflow_label_7368f34a_like + ON public.db_dbworkflow USING btree (label varchar_pattern_ops) + db_dbworkflow_pkey: CREATE UNIQUE INDEX db_dbworkflow_pkey ON public.db_dbworkflow + USING btree (id) + db_dbworkflow_user_id_ef1f3251: CREATE INDEX db_dbworkflow_user_id_ef1f3251 ON + public.db_dbworkflow USING btree (user_id) + db_dbworkflow_uuid_08947ee2_uniq: CREATE UNIQUE INDEX db_dbworkflow_uuid_08947ee2_uniq + ON public.db_dbworkflow USING btree (uuid) + db_dbworkflowdata: + db_dbworkflowdata_aiida_obj_id_70a2d33b: CREATE INDEX db_dbworkflowdata_aiida_obj_id_70a2d33b + ON public.db_dbworkflowdata USING btree (aiida_obj_id) + db_dbworkflowdata_parent_id_ff4dbf8d: CREATE INDEX db_dbworkflowdata_parent_id_ff4dbf8d + ON public.db_dbworkflowdata USING btree (parent_id) + db_dbworkflowdata_parent_id_name_data_type_a4b50dae_uniq: CREATE UNIQUE INDEX + db_dbworkflowdata_parent_id_name_data_type_a4b50dae_uniq ON public.db_dbworkflowdata + USING btree (parent_id, name, data_type) + db_dbworkflowdata_pkey: CREATE UNIQUE INDEX db_dbworkflowdata_pkey ON public.db_dbworkflowdata + USING btree (id) + db_dbworkflowstep: + db_dbworkflowstep_parent_id_ffb754d9: CREATE INDEX db_dbworkflowstep_parent_id_ffb754d9 + ON public.db_dbworkflowstep USING btree (parent_id) + db_dbworkflowstep_parent_id_name_111027e3_uniq: CREATE UNIQUE INDEX db_dbworkflowstep_parent_id_name_111027e3_uniq + ON public.db_dbworkflowstep USING btree (parent_id, name) + db_dbworkflowstep_pkey: CREATE UNIQUE INDEX db_dbworkflowstep_pkey ON public.db_dbworkflowstep + USING btree (id) + db_dbworkflowstep_user_id_04282431: CREATE INDEX db_dbworkflowstep_user_id_04282431 + ON public.db_dbworkflowstep USING btree (user_id) + db_dbworkflowstep_calculations: + db_dbworkflowstep_calcul_dbworkflowstep_id_dbnode_60f50d02_uniq: CREATE UNIQUE + INDEX db_dbworkflowstep_calcul_dbworkflowstep_id_dbnode_60f50d02_uniq ON public.db_dbworkflowstep_calculations + USING btree (dbworkflowstep_id, dbnode_id) + db_dbworkflowstep_calculations_dbnode_id_0d07b7a7: CREATE INDEX db_dbworkflowstep_calculations_dbnode_id_0d07b7a7 + ON public.db_dbworkflowstep_calculations USING btree (dbnode_id) + db_dbworkflowstep_calculations_dbworkflowstep_id_575c3637: CREATE INDEX db_dbworkflowstep_calculations_dbworkflowstep_id_575c3637 + ON public.db_dbworkflowstep_calculations USING btree (dbworkflowstep_id) + db_dbworkflowstep_calculations_pkey: CREATE UNIQUE INDEX db_dbworkflowstep_calculations_pkey + ON public.db_dbworkflowstep_calculations USING btree (id) + db_dbworkflowstep_sub_workflows: + db_dbworkflowstep_sub_wo_dbworkflowstep_id_dbwork_e9b2b624_uniq: CREATE UNIQUE + INDEX db_dbworkflowstep_sub_wo_dbworkflowstep_id_dbwork_e9b2b624_uniq ON public.db_dbworkflowstep_sub_workflows + USING btree (dbworkflowstep_id, dbworkflow_id) + db_dbworkflowstep_sub_workflows_dbworkflow_id_dca4d103: CREATE INDEX db_dbworkflowstep_sub_workflows_dbworkflow_id_dca4d103 + ON public.db_dbworkflowstep_sub_workflows USING btree (dbworkflow_id) + db_dbworkflowstep_sub_workflows_dbworkflowstep_id_e183bbb7: CREATE INDEX db_dbworkflowstep_sub_workflows_dbworkflowstep_id_e183bbb7 + ON public.db_dbworkflowstep_sub_workflows USING btree (dbworkflowstep_id) + db_dbworkflowstep_sub_workflows_pkey: CREATE UNIQUE INDEX db_dbworkflowstep_sub_workflows_pkey + ON public.db_dbworkflowstep_sub_workflows USING btree (id) diff --git a/tests/backends/aiida_sqlalchemy/migrations/test_all_schema/test_django_django_0023_.yml b/tests/backends/aiida_sqlalchemy/migrations/test_all_schema/test_django_django_0023_.yml new file mode 100644 index 0000000000..6603dc9f88 --- /dev/null +++ b/tests/backends/aiida_sqlalchemy/migrations/test_all_schema/test_django_django_0023_.yml @@ -0,0 +1,1038 @@ +columns: + db_dbattribute: + bval: + data_type: boolean + default: null + is_nullable: true + datatype: + data_type: character varying + default: null + is_nullable: false + max_length: 10 + dbnode_id: + data_type: integer + default: null + is_nullable: false + dval: + data_type: timestamp with time zone + default: null + is_nullable: true + fval: + data_type: double precision + default: null + is_nullable: true + id: + data_type: integer + default: nextval('db_dbattribute_id_seq'::regclass) + is_nullable: false + ival: + data_type: integer + default: null + is_nullable: true + key: + data_type: character varying + default: null + is_nullable: false + max_length: 1024 + tval: + data_type: text + default: null + is_nullable: false + db_dbauthinfo: + aiidauser_id: + data_type: integer + default: null + is_nullable: false + auth_params: + data_type: text + default: null + is_nullable: false + dbcomputer_id: + data_type: integer + default: null + is_nullable: false + enabled: + data_type: boolean + default: null + is_nullable: false + id: + data_type: integer + default: nextval('db_dbauthinfo_id_seq'::regclass) + is_nullable: false + metadata: + data_type: text + default: null + is_nullable: false + db_dbcomment: + content: + data_type: text + default: null + is_nullable: false + ctime: + data_type: timestamp with time zone + default: null + is_nullable: false + dbnode_id: + data_type: integer + default: null + is_nullable: false + id: + data_type: integer + default: nextval('db_dbcomment_id_seq'::regclass) + is_nullable: false + mtime: + data_type: timestamp with time zone + default: null + is_nullable: false + user_id: + data_type: integer + default: null + is_nullable: false + uuid: + data_type: uuid + default: null + is_nullable: false + db_dbcomputer: + description: + data_type: text + default: null + is_nullable: false + enabled: + data_type: boolean + default: null + is_nullable: false + hostname: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + id: + data_type: integer + default: nextval('db_dbcomputer_id_seq'::regclass) + is_nullable: false + metadata: + data_type: text + default: null + is_nullable: false + name: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + scheduler_type: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + transport_params: + data_type: text + default: null + is_nullable: false + transport_type: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + uuid: + data_type: uuid + default: null + is_nullable: false + db_dbextra: + bval: + data_type: boolean + default: null + is_nullable: true + datatype: + data_type: character varying + default: null + is_nullable: false + max_length: 10 + dbnode_id: + data_type: integer + default: null + is_nullable: false + dval: + data_type: timestamp with time zone + default: null + is_nullable: true + fval: + data_type: double precision + default: null + is_nullable: true + id: + data_type: integer + default: nextval('db_dbextra_id_seq'::regclass) + is_nullable: false + ival: + data_type: integer + default: null + is_nullable: true + key: + data_type: character varying + default: null + is_nullable: false + max_length: 1024 + tval: + data_type: text + default: null + is_nullable: false + db_dbgroup: + description: + data_type: text + default: null + is_nullable: false + id: + data_type: integer + default: nextval('db_dbgroup_id_seq'::regclass) + is_nullable: false + label: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + time: + data_type: timestamp with time zone + default: null + is_nullable: false + type_string: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + user_id: + data_type: integer + default: null + is_nullable: false + uuid: + data_type: uuid + default: null + is_nullable: false + db_dbgroup_dbnodes: + dbgroup_id: + data_type: integer + default: null + is_nullable: false + dbnode_id: + data_type: integer + default: null + is_nullable: false + id: + data_type: integer + default: nextval('db_dbgroup_dbnodes_id_seq'::regclass) + is_nullable: false + db_dblink: + id: + data_type: integer + default: nextval('db_dblink_id_seq'::regclass) + is_nullable: false + input_id: + data_type: integer + default: null + is_nullable: false + label: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + output_id: + data_type: integer + default: null + is_nullable: false + type: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + db_dblog: + id: + data_type: integer + default: nextval('db_dblog_id_seq'::regclass) + is_nullable: false + levelname: + data_type: character varying + default: null + is_nullable: false + max_length: 50 + loggername: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + message: + data_type: text + default: null + is_nullable: false + metadata: + data_type: text + default: null + is_nullable: false + objname: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + objpk: + data_type: integer + default: null + is_nullable: true + time: + data_type: timestamp with time zone + default: null + is_nullable: false + db_dbnode: + ctime: + data_type: timestamp with time zone + default: null + is_nullable: false + dbcomputer_id: + data_type: integer + default: null + is_nullable: true + description: + data_type: text + default: null + is_nullable: false + id: + data_type: integer + default: nextval('db_dbnode_id_seq'::regclass) + is_nullable: false + label: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + mtime: + data_type: timestamp with time zone + default: null + is_nullable: false + nodeversion: + data_type: integer + default: null + is_nullable: false + process_type: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + public: + data_type: boolean + default: null + is_nullable: false + type: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + user_id: + data_type: integer + default: null + is_nullable: false + uuid: + data_type: uuid + default: null + is_nullable: false + db_dbsetting: + bval: + data_type: boolean + default: null + is_nullable: true + datatype: + data_type: character varying + default: null + is_nullable: false + max_length: 10 + description: + data_type: text + default: null + is_nullable: false + dval: + data_type: timestamp with time zone + default: null + is_nullable: true + fval: + data_type: double precision + default: null + is_nullable: true + id: + data_type: integer + default: nextval('db_dbsetting_id_seq'::regclass) + is_nullable: false + ival: + data_type: integer + default: null + is_nullable: true + key: + data_type: character varying + default: null + is_nullable: false + max_length: 1024 + time: + data_type: timestamp with time zone + default: null + is_nullable: false + tval: + data_type: text + default: null + is_nullable: false + db_dbuser: + date_joined: + data_type: timestamp with time zone + default: null + is_nullable: false + email: + data_type: character varying + default: null + is_nullable: false + max_length: 254 + first_name: + data_type: character varying + default: null + is_nullable: false + max_length: 254 + id: + data_type: integer + default: nextval('db_dbuser_id_seq'::regclass) + is_nullable: false + institution: + data_type: character varying + default: null + is_nullable: false + max_length: 254 + is_active: + data_type: boolean + default: null + is_nullable: false + is_staff: + data_type: boolean + default: null + is_nullable: false + is_superuser: + data_type: boolean + default: null + is_nullable: false + last_login: + data_type: timestamp with time zone + default: null + is_nullable: true + last_name: + data_type: character varying + default: null + is_nullable: false + max_length: 254 + password: + data_type: character varying + default: null + is_nullable: false + max_length: 128 + db_dbuser_groups: + dbuser_id: + data_type: integer + default: null + is_nullable: false + group_id: + data_type: integer + default: null + is_nullable: false + id: + data_type: integer + default: nextval('db_dbuser_groups_id_seq'::regclass) + is_nullable: false + db_dbuser_user_permissions: + dbuser_id: + data_type: integer + default: null + is_nullable: false + id: + data_type: integer + default: nextval('db_dbuser_user_permissions_id_seq'::regclass) + is_nullable: false + permission_id: + data_type: integer + default: null + is_nullable: false + db_dbworkflow: + ctime: + data_type: timestamp with time zone + default: null + is_nullable: false + description: + data_type: text + default: null + is_nullable: false + id: + data_type: integer + default: nextval('db_dbworkflow_id_seq'::regclass) + is_nullable: false + label: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + lastsyncedversion: + data_type: integer + default: null + is_nullable: false + module: + data_type: text + default: null + is_nullable: false + module_class: + data_type: text + default: null + is_nullable: false + mtime: + data_type: timestamp with time zone + default: null + is_nullable: false + nodeversion: + data_type: integer + default: null + is_nullable: false + report: + data_type: text + default: null + is_nullable: false + script_md5: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + script_path: + data_type: text + default: null + is_nullable: false + state: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + user_id: + data_type: integer + default: null + is_nullable: false + uuid: + data_type: uuid + default: null + is_nullable: false + db_dbworkflowdata: + aiida_obj_id: + data_type: integer + default: null + is_nullable: true + data_type: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + id: + data_type: integer + default: nextval('db_dbworkflowdata_id_seq'::regclass) + is_nullable: false + json_value: + data_type: text + default: null + is_nullable: false + name: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + parent_id: + data_type: integer + default: null + is_nullable: false + time: + data_type: timestamp with time zone + default: null + is_nullable: false + value_type: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + db_dbworkflowstep: + id: + data_type: integer + default: nextval('db_dbworkflowstep_id_seq'::regclass) + is_nullable: false + name: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + nextcall: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + parent_id: + data_type: integer + default: null + is_nullable: false + state: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + time: + data_type: timestamp with time zone + default: null + is_nullable: false + user_id: + data_type: integer + default: null + is_nullable: false + db_dbworkflowstep_calculations: + dbnode_id: + data_type: integer + default: null + is_nullable: false + dbworkflowstep_id: + data_type: integer + default: null + is_nullable: false + id: + data_type: integer + default: nextval('db_dbworkflowstep_calculations_id_seq'::regclass) + is_nullable: false + db_dbworkflowstep_sub_workflows: + dbworkflow_id: + data_type: integer + default: null + is_nullable: false + dbworkflowstep_id: + data_type: integer + default: null + is_nullable: false + id: + data_type: integer + default: nextval('db_dbworkflowstep_sub_workflows_id_seq'::regclass) + is_nullable: false +constraints: + primary_key: + db_dbattribute: + db_dbattribute_pkey: + - id + db_dbauthinfo: + db_dbauthinfo_pkey: + - id + db_dbcomment: + db_dbcomment_pkey: + - id + db_dbcomputer: + db_dbcomputer_pkey: + - id + db_dbextra: + db_dbextra_pkey: + - id + db_dbgroup: + db_dbgroup_pkey: + - id + db_dbgroup_dbnodes: + db_dbgroup_dbnodes_pkey: + - id + db_dblink: + db_dblink_pkey: + - id + db_dblog: + db_dblog_pkey: + - id + db_dbnode: + db_dbnode_pkey: + - id + db_dbsetting: + db_dbsetting_pkey: + - id + db_dbuser: + db_dbuser_pkey: + - id + db_dbuser_groups: + db_dbuser_groups_pkey: + - id + db_dbuser_user_permissions: + db_dbuser_user_permissions_pkey: + - id + db_dbworkflow: + db_dbworkflow_pkey: + - id + db_dbworkflowdata: + db_dbworkflowdata_pkey: + - id + db_dbworkflowstep: + db_dbworkflowstep_pkey: + - id + db_dbworkflowstep_calculations: + db_dbworkflowstep_calculations_pkey: + - id + db_dbworkflowstep_sub_workflows: + db_dbworkflowstep_sub_workflows_pkey: + - id + unique: + db_dbattribute: + db_dbattribute_dbnode_id_key_c589e447_uniq: + - dbnode_id + - key + db_dbauthinfo: + db_dbauthinfo_aiidauser_id_dbcomputer_id_777cdaa8_uniq: + - aiidauser_id + - dbcomputer_id + db_dbcomment: + db_dbcomment_uuid_49bac08c_uniq: + - uuid + db_dbcomputer: + db_dbcomputer_name_key: + - name + db_dbcomputer_uuid_f35defa6_uniq: + - uuid + db_dbextra: + db_dbextra_dbnode_id_key_aa56fd37_uniq: + - dbnode_id + - key + db_dbgroup: + db_dbgroup_name_type_12656f33_uniq: + - label + - type_string + db_dbgroup_uuid_af896177_uniq: + - uuid + db_dbgroup_dbnodes: + db_dbgroup_dbnodes_dbgroup_id_dbnode_id_eee23cce_uniq: + - dbgroup_id + - dbnode_id + db_dbnode: + db_dbnode_uuid_62e0bf98_uniq: + - uuid + db_dbsetting: + db_dbsetting_key_1b84beb4_uniq: + - key + db_dbuser: + db_dbuser_email_30150b7e_uniq: + - email + db_dbuser_groups: + db_dbuser_groups_dbuser_id_group_id_9155eb4f_uniq: + - dbuser_id + - group_id + db_dbuser_user_permissions: + db_dbuser_user_permissio_dbuser_id_permission_id_e6cbabe4_uniq: + - dbuser_id + - permission_id + db_dbworkflow: + db_dbworkflow_uuid_08947ee2_uniq: + - uuid + db_dbworkflowdata: + db_dbworkflowdata_parent_id_name_data_type_a4b50dae_uniq: + - data_type + - name + - parent_id + db_dbworkflowstep: + db_dbworkflowstep_parent_id_name_111027e3_uniq: + - name + - parent_id + db_dbworkflowstep_calculations: + db_dbworkflowstep_calcul_dbworkflowstep_id_dbnode_60f50d02_uniq: + - dbnode_id + - dbworkflowstep_id + db_dbworkflowstep_sub_workflows: + db_dbworkflowstep_sub_wo_dbworkflowstep_id_dbwork_e9b2b624_uniq: + - dbworkflow_id + - dbworkflowstep_id +foreign_keys: + db_dbattribute: + db_dbattribute_dbnode_id_253bf153_fk_db_dbnode_id: FOREIGN KEY (dbnode_id) REFERENCES + db_dbnode(id) DEFERRABLE INITIALLY DEFERRED + db_dbauthinfo: + db_dbauthinfo_aiidauser_id_0684fdfb_fk_db_dbuser_id: FOREIGN KEY (aiidauser_id) + REFERENCES db_dbuser(id) DEFERRABLE INITIALLY DEFERRED + db_dbauthinfo_dbcomputer_id_424f7ac4_fk_db_dbcomputer_id: FOREIGN KEY (dbcomputer_id) + REFERENCES db_dbcomputer(id) DEFERRABLE INITIALLY DEFERRED + db_dbcomment: + db_dbcomment_dbnode_id_3b812b6b_fk_db_dbnode_id: FOREIGN KEY (dbnode_id) REFERENCES + db_dbnode(id) DEFERRABLE INITIALLY DEFERRED + db_dbcomment_user_id_8ed5e360_fk_db_dbuser_id: FOREIGN KEY (user_id) REFERENCES + db_dbuser(id) DEFERRABLE INITIALLY DEFERRED + db_dbextra: + db_dbextra_dbnode_id_c7fe8961_fk_db_dbnode_id: FOREIGN KEY (dbnode_id) REFERENCES + db_dbnode(id) DEFERRABLE INITIALLY DEFERRED + db_dbgroup: + db_dbgroup_user_id_100f8a51_fk_db_dbuser_id: FOREIGN KEY (user_id) REFERENCES + db_dbuser(id) DEFERRABLE INITIALLY DEFERRED + db_dbgroup_dbnodes: + db_dbgroup_dbnodes_dbgroup_id_9d3a0f9d_fk_db_dbgroup_id: FOREIGN KEY (dbgroup_id) + REFERENCES db_dbgroup(id) DEFERRABLE INITIALLY DEFERRED + db_dbgroup_dbnodes_dbnode_id_118b9439_fk_db_dbnode_id: FOREIGN KEY (dbnode_id) + REFERENCES db_dbnode(id) DEFERRABLE INITIALLY DEFERRED + db_dblink: + db_dblink_input_id_9245bd73_fk_db_dbnode_id: FOREIGN KEY (input_id) REFERENCES + db_dbnode(id) DEFERRABLE INITIALLY DEFERRED + db_dblink_output_id_c0167528_fk_db_dbnode_id: FOREIGN KEY (output_id) REFERENCES + db_dbnode(id) DEFERRABLE INITIALLY DEFERRED + db_dbnode: + db_dbnode_dbcomputer_id_315372a3_fk_db_dbcomputer_id: FOREIGN KEY (dbcomputer_id) + REFERENCES db_dbcomputer(id) DEFERRABLE INITIALLY DEFERRED + db_dbnode_user_id_12e7aeaf_fk_db_dbuser_id: FOREIGN KEY (user_id) REFERENCES db_dbuser(id) + DEFERRABLE INITIALLY DEFERRED + db_dbuser_groups: + db_dbuser_groups_dbuser_id_480b3520_fk_db_dbuser_id: FOREIGN KEY (dbuser_id) REFERENCES + db_dbuser(id) DEFERRABLE INITIALLY DEFERRED + db_dbuser_groups_group_id_8478d87e_fk_auth_group_id: FOREIGN KEY (group_id) REFERENCES + auth_group(id) DEFERRABLE INITIALLY DEFERRED + db_dbuser_user_permissions: + db_dbuser_user_permi_permission_id_c5aafc54_fk_auth_perm: FOREIGN KEY (permission_id) + REFERENCES auth_permission(id) DEFERRABLE INITIALLY DEFERRED + db_dbuser_user_permissions_dbuser_id_364456ee_fk_db_dbuser_id: FOREIGN KEY (dbuser_id) + REFERENCES db_dbuser(id) DEFERRABLE INITIALLY DEFERRED + db_dbworkflow: + db_dbworkflow_user_id_ef1f3251_fk_db_dbuser_id: FOREIGN KEY (user_id) REFERENCES + db_dbuser(id) DEFERRABLE INITIALLY DEFERRED + db_dbworkflowdata: + db_dbworkflowdata_aiida_obj_id_70a2d33b_fk_db_dbnode_id: FOREIGN KEY (aiida_obj_id) + REFERENCES db_dbnode(id) DEFERRABLE INITIALLY DEFERRED + db_dbworkflowdata_parent_id_ff4dbf8d_fk_db_dbworkflow_id: FOREIGN KEY (parent_id) + REFERENCES db_dbworkflow(id) DEFERRABLE INITIALLY DEFERRED + db_dbworkflowstep: + db_dbworkflowstep_parent_id_ffb754d9_fk_db_dbworkflow_id: FOREIGN KEY (parent_id) + REFERENCES db_dbworkflow(id) DEFERRABLE INITIALLY DEFERRED + db_dbworkflowstep_user_id_04282431_fk_db_dbuser_id: FOREIGN KEY (user_id) REFERENCES + db_dbuser(id) DEFERRABLE INITIALLY DEFERRED + db_dbworkflowstep_calculations: + db_dbworkflowstep_ca_dbnode_id_0d07b7a7_fk_db_dbnode: FOREIGN KEY (dbnode_id) + REFERENCES db_dbnode(id) DEFERRABLE INITIALLY DEFERRED + db_dbworkflowstep_ca_dbworkflowstep_id_575c3637_fk_db_dbwork: FOREIGN KEY (dbworkflowstep_id) + REFERENCES db_dbworkflowstep(id) DEFERRABLE INITIALLY DEFERRED + db_dbworkflowstep_sub_workflows: + db_dbworkflowstep_su_dbworkflow_id_dca4d103_fk_db_dbwork: FOREIGN KEY (dbworkflow_id) + REFERENCES db_dbworkflow(id) DEFERRABLE INITIALLY DEFERRED + db_dbworkflowstep_su_dbworkflowstep_id_e183bbb7_fk_db_dbwork: FOREIGN KEY (dbworkflowstep_id) + REFERENCES db_dbworkflowstep(id) DEFERRABLE INITIALLY DEFERRED +indexes: + db_dbattribute: + db_dbattribute_datatype_91c4dc04: CREATE INDEX db_dbattribute_datatype_91c4dc04 + ON public.db_dbattribute USING btree (datatype) + db_dbattribute_datatype_91c4dc04_like: CREATE INDEX db_dbattribute_datatype_91c4dc04_like + ON public.db_dbattribute USING btree (datatype varchar_pattern_ops) + db_dbattribute_dbnode_id_253bf153: CREATE INDEX db_dbattribute_dbnode_id_253bf153 + ON public.db_dbattribute USING btree (dbnode_id) + db_dbattribute_dbnode_id_key_c589e447_uniq: CREATE UNIQUE INDEX db_dbattribute_dbnode_id_key_c589e447_uniq + ON public.db_dbattribute USING btree (dbnode_id, key) + db_dbattribute_key_ac2bc4e4: CREATE INDEX db_dbattribute_key_ac2bc4e4 ON public.db_dbattribute + USING btree (key) + db_dbattribute_key_ac2bc4e4_like: CREATE INDEX db_dbattribute_key_ac2bc4e4_like + ON public.db_dbattribute USING btree (key varchar_pattern_ops) + db_dbattribute_pkey: CREATE UNIQUE INDEX db_dbattribute_pkey ON public.db_dbattribute + USING btree (id) + tval_idx_for_daemon: CREATE INDEX tval_idx_for_daemon ON public.db_dbattribute + USING btree (tval) WHERE (tval = ANY (ARRAY['COMPUTED'::text, 'WITHSCHEDULER'::text, + 'TOSUBMIT'::text])) + db_dbauthinfo: + db_dbauthinfo_aiidauser_id_0684fdfb: CREATE INDEX db_dbauthinfo_aiidauser_id_0684fdfb + ON public.db_dbauthinfo USING btree (aiidauser_id) + db_dbauthinfo_aiidauser_id_dbcomputer_id_777cdaa8_uniq: CREATE UNIQUE INDEX db_dbauthinfo_aiidauser_id_dbcomputer_id_777cdaa8_uniq + ON public.db_dbauthinfo USING btree (aiidauser_id, dbcomputer_id) + db_dbauthinfo_dbcomputer_id_424f7ac4: CREATE INDEX db_dbauthinfo_dbcomputer_id_424f7ac4 + ON public.db_dbauthinfo USING btree (dbcomputer_id) + db_dbauthinfo_pkey: CREATE UNIQUE INDEX db_dbauthinfo_pkey ON public.db_dbauthinfo + USING btree (id) + db_dbcomment: + db_dbcomment_dbnode_id_3b812b6b: CREATE INDEX db_dbcomment_dbnode_id_3b812b6b + ON public.db_dbcomment USING btree (dbnode_id) + db_dbcomment_pkey: CREATE UNIQUE INDEX db_dbcomment_pkey ON public.db_dbcomment + USING btree (id) + db_dbcomment_user_id_8ed5e360: CREATE INDEX db_dbcomment_user_id_8ed5e360 ON public.db_dbcomment + USING btree (user_id) + db_dbcomment_uuid_49bac08c_uniq: CREATE UNIQUE INDEX db_dbcomment_uuid_49bac08c_uniq + ON public.db_dbcomment USING btree (uuid) + db_dbcomputer: + db_dbcomputer_name_f1800b1a_like: CREATE INDEX db_dbcomputer_name_f1800b1a_like + ON public.db_dbcomputer USING btree (name varchar_pattern_ops) + db_dbcomputer_name_key: CREATE UNIQUE INDEX db_dbcomputer_name_key ON public.db_dbcomputer + USING btree (name) + db_dbcomputer_pkey: CREATE UNIQUE INDEX db_dbcomputer_pkey ON public.db_dbcomputer + USING btree (id) + db_dbcomputer_uuid_f35defa6_uniq: CREATE UNIQUE INDEX db_dbcomputer_uuid_f35defa6_uniq + ON public.db_dbcomputer USING btree (uuid) + db_dbextra: + db_dbextra_datatype_2eba38c6: CREATE INDEX db_dbextra_datatype_2eba38c6 ON public.db_dbextra + USING btree (datatype) + db_dbextra_datatype_2eba38c6_like: CREATE INDEX db_dbextra_datatype_2eba38c6_like + ON public.db_dbextra USING btree (datatype varchar_pattern_ops) + db_dbextra_dbnode_id_c7fe8961: CREATE INDEX db_dbextra_dbnode_id_c7fe8961 ON public.db_dbextra + USING btree (dbnode_id) + db_dbextra_dbnode_id_key_aa56fd37_uniq: CREATE UNIQUE INDEX db_dbextra_dbnode_id_key_aa56fd37_uniq + ON public.db_dbextra USING btree (dbnode_id, key) + db_dbextra_key_b1a8abc6: CREATE INDEX db_dbextra_key_b1a8abc6 ON public.db_dbextra + USING btree (key) + db_dbextra_key_b1a8abc6_like: CREATE INDEX db_dbextra_key_b1a8abc6_like ON public.db_dbextra + USING btree (key varchar_pattern_ops) + db_dbextra_pkey: CREATE UNIQUE INDEX db_dbextra_pkey ON public.db_dbextra USING + btree (id) + db_dbgroup: + db_dbgroup_name_66c75272: CREATE INDEX db_dbgroup_name_66c75272 ON public.db_dbgroup + USING btree (label) + db_dbgroup_name_66c75272_like: CREATE INDEX db_dbgroup_name_66c75272_like ON public.db_dbgroup + USING btree (label varchar_pattern_ops) + db_dbgroup_name_type_12656f33_uniq: CREATE UNIQUE INDEX db_dbgroup_name_type_12656f33_uniq + ON public.db_dbgroup USING btree (label, type_string) + db_dbgroup_pkey: CREATE UNIQUE INDEX db_dbgroup_pkey ON public.db_dbgroup USING + btree (id) + db_dbgroup_type_23b2a748: CREATE INDEX db_dbgroup_type_23b2a748 ON public.db_dbgroup + USING btree (type_string) + db_dbgroup_type_23b2a748_like: CREATE INDEX db_dbgroup_type_23b2a748_like ON public.db_dbgroup + USING btree (type_string varchar_pattern_ops) + db_dbgroup_user_id_100f8a51: CREATE INDEX db_dbgroup_user_id_100f8a51 ON public.db_dbgroup + USING btree (user_id) + db_dbgroup_uuid_af896177_uniq: CREATE UNIQUE INDEX db_dbgroup_uuid_af896177_uniq + ON public.db_dbgroup USING btree (uuid) + db_dbgroup_dbnodes: + db_dbgroup_dbnodes_dbgroup_id_9d3a0f9d: CREATE INDEX db_dbgroup_dbnodes_dbgroup_id_9d3a0f9d + ON public.db_dbgroup_dbnodes USING btree (dbgroup_id) + db_dbgroup_dbnodes_dbgroup_id_dbnode_id_eee23cce_uniq: CREATE UNIQUE INDEX db_dbgroup_dbnodes_dbgroup_id_dbnode_id_eee23cce_uniq + ON public.db_dbgroup_dbnodes USING btree (dbgroup_id, dbnode_id) + db_dbgroup_dbnodes_dbnode_id_118b9439: CREATE INDEX db_dbgroup_dbnodes_dbnode_id_118b9439 + ON public.db_dbgroup_dbnodes USING btree (dbnode_id) + db_dbgroup_dbnodes_pkey: CREATE UNIQUE INDEX db_dbgroup_dbnodes_pkey ON public.db_dbgroup_dbnodes + USING btree (id) + db_dblink: + db_dblink_input_id_9245bd73: CREATE INDEX db_dblink_input_id_9245bd73 ON public.db_dblink + USING btree (input_id) + db_dblink_label_f1343cfb: CREATE INDEX db_dblink_label_f1343cfb ON public.db_dblink + USING btree (label) + db_dblink_label_f1343cfb_like: CREATE INDEX db_dblink_label_f1343cfb_like ON public.db_dblink + USING btree (label varchar_pattern_ops) + db_dblink_output_id_c0167528: CREATE INDEX db_dblink_output_id_c0167528 ON public.db_dblink + USING btree (output_id) + db_dblink_pkey: CREATE UNIQUE INDEX db_dblink_pkey ON public.db_dblink USING btree + (id) + db_dblink_type_229f212b: CREATE INDEX db_dblink_type_229f212b ON public.db_dblink + USING btree (type) + db_dblink_type_229f212b_like: CREATE INDEX db_dblink_type_229f212b_like ON public.db_dblink + USING btree (type varchar_pattern_ops) + db_dblog: + db_dblog_levelname_ad5dc346: CREATE INDEX db_dblog_levelname_ad5dc346 ON public.db_dblog + USING btree (levelname) + db_dblog_levelname_ad5dc346_like: CREATE INDEX db_dblog_levelname_ad5dc346_like + ON public.db_dblog USING btree (levelname varchar_pattern_ops) + db_dblog_loggername_00b5ba16: CREATE INDEX db_dblog_loggername_00b5ba16 ON public.db_dblog + USING btree (loggername) + db_dblog_loggername_00b5ba16_like: CREATE INDEX db_dblog_loggername_00b5ba16_like + ON public.db_dblog USING btree (loggername varchar_pattern_ops) + db_dblog_objname_69932b1e: CREATE INDEX db_dblog_objname_69932b1e ON public.db_dblog + USING btree (objname) + db_dblog_objname_69932b1e_like: CREATE INDEX db_dblog_objname_69932b1e_like ON + public.db_dblog USING btree (objname varchar_pattern_ops) + db_dblog_objpk_fc47afa9: CREATE INDEX db_dblog_objpk_fc47afa9 ON public.db_dblog + USING btree (objpk) + db_dblog_pkey: CREATE UNIQUE INDEX db_dblog_pkey ON public.db_dblog USING btree + (id) + db_dbnode: + db_dbnode_ctime_71626ef5: CREATE INDEX db_dbnode_ctime_71626ef5 ON public.db_dbnode + USING btree (ctime) + db_dbnode_dbcomputer_id_315372a3: CREATE INDEX db_dbnode_dbcomputer_id_315372a3 + ON public.db_dbnode USING btree (dbcomputer_id) + db_dbnode_label_6469539e: CREATE INDEX db_dbnode_label_6469539e ON public.db_dbnode + USING btree (label) + db_dbnode_label_6469539e_like: CREATE INDEX db_dbnode_label_6469539e_like ON public.db_dbnode + USING btree (label varchar_pattern_ops) + db_dbnode_mtime_0554ea3d: CREATE INDEX db_dbnode_mtime_0554ea3d ON public.db_dbnode + USING btree (mtime) + db_dbnode_pkey: CREATE UNIQUE INDEX db_dbnode_pkey ON public.db_dbnode USING btree + (id) + db_dbnode_process_type_df7298d0: CREATE INDEX db_dbnode_process_type_df7298d0 + ON public.db_dbnode USING btree (process_type) + db_dbnode_process_type_df7298d0_like: CREATE INDEX db_dbnode_process_type_df7298d0_like + ON public.db_dbnode USING btree (process_type varchar_pattern_ops) + db_dbnode_type_a8ce9753: CREATE INDEX db_dbnode_type_a8ce9753 ON public.db_dbnode + USING btree (type) + db_dbnode_type_a8ce9753_like: CREATE INDEX db_dbnode_type_a8ce9753_like ON public.db_dbnode + USING btree (type varchar_pattern_ops) + db_dbnode_user_id_12e7aeaf: CREATE INDEX db_dbnode_user_id_12e7aeaf ON public.db_dbnode + USING btree (user_id) + db_dbnode_uuid_62e0bf98_uniq: CREATE UNIQUE INDEX db_dbnode_uuid_62e0bf98_uniq + ON public.db_dbnode USING btree (uuid) + db_dbsetting: + db_dbsetting_datatype_49f4397c: CREATE INDEX db_dbsetting_datatype_49f4397c ON + public.db_dbsetting USING btree (datatype) + db_dbsetting_datatype_49f4397c_like: CREATE INDEX db_dbsetting_datatype_49f4397c_like + ON public.db_dbsetting USING btree (datatype varchar_pattern_ops) + db_dbsetting_key_1b84beb4: CREATE INDEX db_dbsetting_key_1b84beb4 ON public.db_dbsetting + USING btree (key) + db_dbsetting_key_1b84beb4_like: CREATE INDEX db_dbsetting_key_1b84beb4_like ON + public.db_dbsetting USING btree (key varchar_pattern_ops) + db_dbsetting_key_1b84beb4_uniq: CREATE UNIQUE INDEX db_dbsetting_key_1b84beb4_uniq + ON public.db_dbsetting USING btree (key) + db_dbsetting_pkey: CREATE UNIQUE INDEX db_dbsetting_pkey ON public.db_dbsetting + USING btree (id) + db_dbuser: + db_dbuser_email_30150b7e_like: CREATE INDEX db_dbuser_email_30150b7e_like ON public.db_dbuser + USING btree (email varchar_pattern_ops) + db_dbuser_email_30150b7e_uniq: CREATE UNIQUE INDEX db_dbuser_email_30150b7e_uniq + ON public.db_dbuser USING btree (email) + db_dbuser_pkey: CREATE UNIQUE INDEX db_dbuser_pkey ON public.db_dbuser USING btree + (id) + db_dbuser_groups: + db_dbuser_groups_dbuser_id_480b3520: CREATE INDEX db_dbuser_groups_dbuser_id_480b3520 + ON public.db_dbuser_groups USING btree (dbuser_id) + db_dbuser_groups_dbuser_id_group_id_9155eb4f_uniq: CREATE UNIQUE INDEX db_dbuser_groups_dbuser_id_group_id_9155eb4f_uniq + ON public.db_dbuser_groups USING btree (dbuser_id, group_id) + db_dbuser_groups_group_id_8478d87e: CREATE INDEX db_dbuser_groups_group_id_8478d87e + ON public.db_dbuser_groups USING btree (group_id) + db_dbuser_groups_pkey: CREATE UNIQUE INDEX db_dbuser_groups_pkey ON public.db_dbuser_groups + USING btree (id) + db_dbuser_user_permissions: + db_dbuser_user_permissio_dbuser_id_permission_id_e6cbabe4_uniq: CREATE UNIQUE + INDEX db_dbuser_user_permissio_dbuser_id_permission_id_e6cbabe4_uniq ON public.db_dbuser_user_permissions + USING btree (dbuser_id, permission_id) + db_dbuser_user_permissions_dbuser_id_364456ee: CREATE INDEX db_dbuser_user_permissions_dbuser_id_364456ee + ON public.db_dbuser_user_permissions USING btree (dbuser_id) + db_dbuser_user_permissions_permission_id_c5aafc54: CREATE INDEX db_dbuser_user_permissions_permission_id_c5aafc54 + ON public.db_dbuser_user_permissions USING btree (permission_id) + db_dbuser_user_permissions_pkey: CREATE UNIQUE INDEX db_dbuser_user_permissions_pkey + ON public.db_dbuser_user_permissions USING btree (id) + db_dbworkflow: + db_dbworkflow_label_7368f34a: CREATE INDEX db_dbworkflow_label_7368f34a ON public.db_dbworkflow + USING btree (label) + db_dbworkflow_label_7368f34a_like: CREATE INDEX db_dbworkflow_label_7368f34a_like + ON public.db_dbworkflow USING btree (label varchar_pattern_ops) + db_dbworkflow_pkey: CREATE UNIQUE INDEX db_dbworkflow_pkey ON public.db_dbworkflow + USING btree (id) + db_dbworkflow_user_id_ef1f3251: CREATE INDEX db_dbworkflow_user_id_ef1f3251 ON + public.db_dbworkflow USING btree (user_id) + db_dbworkflow_uuid_08947ee2_uniq: CREATE UNIQUE INDEX db_dbworkflow_uuid_08947ee2_uniq + ON public.db_dbworkflow USING btree (uuid) + db_dbworkflowdata: + db_dbworkflowdata_aiida_obj_id_70a2d33b: CREATE INDEX db_dbworkflowdata_aiida_obj_id_70a2d33b + ON public.db_dbworkflowdata USING btree (aiida_obj_id) + db_dbworkflowdata_parent_id_ff4dbf8d: CREATE INDEX db_dbworkflowdata_parent_id_ff4dbf8d + ON public.db_dbworkflowdata USING btree (parent_id) + db_dbworkflowdata_parent_id_name_data_type_a4b50dae_uniq: CREATE UNIQUE INDEX + db_dbworkflowdata_parent_id_name_data_type_a4b50dae_uniq ON public.db_dbworkflowdata + USING btree (parent_id, name, data_type) + db_dbworkflowdata_pkey: CREATE UNIQUE INDEX db_dbworkflowdata_pkey ON public.db_dbworkflowdata + USING btree (id) + db_dbworkflowstep: + db_dbworkflowstep_parent_id_ffb754d9: CREATE INDEX db_dbworkflowstep_parent_id_ffb754d9 + ON public.db_dbworkflowstep USING btree (parent_id) + db_dbworkflowstep_parent_id_name_111027e3_uniq: CREATE UNIQUE INDEX db_dbworkflowstep_parent_id_name_111027e3_uniq + ON public.db_dbworkflowstep USING btree (parent_id, name) + db_dbworkflowstep_pkey: CREATE UNIQUE INDEX db_dbworkflowstep_pkey ON public.db_dbworkflowstep + USING btree (id) + db_dbworkflowstep_user_id_04282431: CREATE INDEX db_dbworkflowstep_user_id_04282431 + ON public.db_dbworkflowstep USING btree (user_id) + db_dbworkflowstep_calculations: + db_dbworkflowstep_calcul_dbworkflowstep_id_dbnode_60f50d02_uniq: CREATE UNIQUE + INDEX db_dbworkflowstep_calcul_dbworkflowstep_id_dbnode_60f50d02_uniq ON public.db_dbworkflowstep_calculations + USING btree (dbworkflowstep_id, dbnode_id) + db_dbworkflowstep_calculations_dbnode_id_0d07b7a7: CREATE INDEX db_dbworkflowstep_calculations_dbnode_id_0d07b7a7 + ON public.db_dbworkflowstep_calculations USING btree (dbnode_id) + db_dbworkflowstep_calculations_dbworkflowstep_id_575c3637: CREATE INDEX db_dbworkflowstep_calculations_dbworkflowstep_id_575c3637 + ON public.db_dbworkflowstep_calculations USING btree (dbworkflowstep_id) + db_dbworkflowstep_calculations_pkey: CREATE UNIQUE INDEX db_dbworkflowstep_calculations_pkey + ON public.db_dbworkflowstep_calculations USING btree (id) + db_dbworkflowstep_sub_workflows: + db_dbworkflowstep_sub_wo_dbworkflowstep_id_dbwork_e9b2b624_uniq: CREATE UNIQUE + INDEX db_dbworkflowstep_sub_wo_dbworkflowstep_id_dbwork_e9b2b624_uniq ON public.db_dbworkflowstep_sub_workflows + USING btree (dbworkflowstep_id, dbworkflow_id) + db_dbworkflowstep_sub_workflows_dbworkflow_id_dca4d103: CREATE INDEX db_dbworkflowstep_sub_workflows_dbworkflow_id_dca4d103 + ON public.db_dbworkflowstep_sub_workflows USING btree (dbworkflow_id) + db_dbworkflowstep_sub_workflows_dbworkflowstep_id_e183bbb7: CREATE INDEX db_dbworkflowstep_sub_workflows_dbworkflowstep_id_e183bbb7 + ON public.db_dbworkflowstep_sub_workflows USING btree (dbworkflowstep_id) + db_dbworkflowstep_sub_workflows_pkey: CREATE UNIQUE INDEX db_dbworkflowstep_sub_workflows_pkey + ON public.db_dbworkflowstep_sub_workflows USING btree (id) diff --git a/tests/backends/aiida_sqlalchemy/migrations/test_all_schema/test_django_django_0024_.yml b/tests/backends/aiida_sqlalchemy/migrations/test_all_schema/test_django_django_0024_.yml new file mode 100644 index 0000000000..3e6143e5bc --- /dev/null +++ b/tests/backends/aiida_sqlalchemy/migrations/test_all_schema/test_django_django_0024_.yml @@ -0,0 +1,1041 @@ +columns: + db_dbattribute: + bval: + data_type: boolean + default: null + is_nullable: true + datatype: + data_type: character varying + default: null + is_nullable: false + max_length: 10 + dbnode_id: + data_type: integer + default: null + is_nullable: false + dval: + data_type: timestamp with time zone + default: null + is_nullable: true + fval: + data_type: double precision + default: null + is_nullable: true + id: + data_type: integer + default: nextval('db_dbattribute_id_seq'::regclass) + is_nullable: false + ival: + data_type: integer + default: null + is_nullable: true + key: + data_type: character varying + default: null + is_nullable: false + max_length: 1024 + tval: + data_type: text + default: null + is_nullable: false + db_dbauthinfo: + aiidauser_id: + data_type: integer + default: null + is_nullable: false + auth_params: + data_type: text + default: null + is_nullable: false + dbcomputer_id: + data_type: integer + default: null + is_nullable: false + enabled: + data_type: boolean + default: null + is_nullable: false + id: + data_type: integer + default: nextval('db_dbauthinfo_id_seq'::regclass) + is_nullable: false + metadata: + data_type: text + default: null + is_nullable: false + db_dbcomment: + content: + data_type: text + default: null + is_nullable: false + ctime: + data_type: timestamp with time zone + default: null + is_nullable: false + dbnode_id: + data_type: integer + default: null + is_nullable: false + id: + data_type: integer + default: nextval('db_dbcomment_id_seq'::regclass) + is_nullable: false + mtime: + data_type: timestamp with time zone + default: null + is_nullable: false + user_id: + data_type: integer + default: null + is_nullable: false + uuid: + data_type: uuid + default: null + is_nullable: false + db_dbcomputer: + description: + data_type: text + default: null + is_nullable: false + enabled: + data_type: boolean + default: null + is_nullable: false + hostname: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + id: + data_type: integer + default: nextval('db_dbcomputer_id_seq'::regclass) + is_nullable: false + metadata: + data_type: text + default: null + is_nullable: false + name: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + scheduler_type: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + transport_params: + data_type: text + default: null + is_nullable: false + transport_type: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + uuid: + data_type: uuid + default: null + is_nullable: false + db_dbextra: + bval: + data_type: boolean + default: null + is_nullable: true + datatype: + data_type: character varying + default: null + is_nullable: false + max_length: 10 + dbnode_id: + data_type: integer + default: null + is_nullable: false + dval: + data_type: timestamp with time zone + default: null + is_nullable: true + fval: + data_type: double precision + default: null + is_nullable: true + id: + data_type: integer + default: nextval('db_dbextra_id_seq'::regclass) + is_nullable: false + ival: + data_type: integer + default: null + is_nullable: true + key: + data_type: character varying + default: null + is_nullable: false + max_length: 1024 + tval: + data_type: text + default: null + is_nullable: false + db_dbgroup: + description: + data_type: text + default: null + is_nullable: false + id: + data_type: integer + default: nextval('db_dbgroup_id_seq'::regclass) + is_nullable: false + label: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + time: + data_type: timestamp with time zone + default: null + is_nullable: false + type_string: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + user_id: + data_type: integer + default: null + is_nullable: false + uuid: + data_type: uuid + default: null + is_nullable: false + db_dbgroup_dbnodes: + dbgroup_id: + data_type: integer + default: null + is_nullable: false + dbnode_id: + data_type: integer + default: null + is_nullable: false + id: + data_type: integer + default: nextval('db_dbgroup_dbnodes_id_seq'::regclass) + is_nullable: false + db_dblink: + id: + data_type: integer + default: nextval('db_dblink_id_seq'::regclass) + is_nullable: false + input_id: + data_type: integer + default: null + is_nullable: false + label: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + output_id: + data_type: integer + default: null + is_nullable: false + type: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + db_dblog: + dbnode_id: + data_type: integer + default: null + is_nullable: false + id: + data_type: integer + default: nextval('db_dblog_id_seq'::regclass) + is_nullable: false + levelname: + data_type: character varying + default: null + is_nullable: false + max_length: 50 + loggername: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + message: + data_type: text + default: null + is_nullable: false + metadata: + data_type: text + default: null + is_nullable: false + time: + data_type: timestamp with time zone + default: null + is_nullable: false + uuid: + data_type: uuid + default: null + is_nullable: false + db_dbnode: + ctime: + data_type: timestamp with time zone + default: null + is_nullable: false + dbcomputer_id: + data_type: integer + default: null + is_nullable: true + description: + data_type: text + default: null + is_nullable: false + id: + data_type: integer + default: nextval('db_dbnode_id_seq'::regclass) + is_nullable: false + label: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + mtime: + data_type: timestamp with time zone + default: null + is_nullable: false + nodeversion: + data_type: integer + default: null + is_nullable: false + process_type: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + public: + data_type: boolean + default: null + is_nullable: false + type: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + user_id: + data_type: integer + default: null + is_nullable: false + uuid: + data_type: uuid + default: null + is_nullable: false + db_dbsetting: + bval: + data_type: boolean + default: null + is_nullable: true + datatype: + data_type: character varying + default: null + is_nullable: false + max_length: 10 + description: + data_type: text + default: null + is_nullable: false + dval: + data_type: timestamp with time zone + default: null + is_nullable: true + fval: + data_type: double precision + default: null + is_nullable: true + id: + data_type: integer + default: nextval('db_dbsetting_id_seq'::regclass) + is_nullable: false + ival: + data_type: integer + default: null + is_nullable: true + key: + data_type: character varying + default: null + is_nullable: false + max_length: 1024 + time: + data_type: timestamp with time zone + default: null + is_nullable: false + tval: + data_type: text + default: null + is_nullable: false + db_dbuser: + date_joined: + data_type: timestamp with time zone + default: null + is_nullable: false + email: + data_type: character varying + default: null + is_nullable: false + max_length: 254 + first_name: + data_type: character varying + default: null + is_nullable: false + max_length: 254 + id: + data_type: integer + default: nextval('db_dbuser_id_seq'::regclass) + is_nullable: false + institution: + data_type: character varying + default: null + is_nullable: false + max_length: 254 + is_active: + data_type: boolean + default: null + is_nullable: false + is_staff: + data_type: boolean + default: null + is_nullable: false + is_superuser: + data_type: boolean + default: null + is_nullable: false + last_login: + data_type: timestamp with time zone + default: null + is_nullable: true + last_name: + data_type: character varying + default: null + is_nullable: false + max_length: 254 + password: + data_type: character varying + default: null + is_nullable: false + max_length: 128 + db_dbuser_groups: + dbuser_id: + data_type: integer + default: null + is_nullable: false + group_id: + data_type: integer + default: null + is_nullable: false + id: + data_type: integer + default: nextval('db_dbuser_groups_id_seq'::regclass) + is_nullable: false + db_dbuser_user_permissions: + dbuser_id: + data_type: integer + default: null + is_nullable: false + id: + data_type: integer + default: nextval('db_dbuser_user_permissions_id_seq'::regclass) + is_nullable: false + permission_id: + data_type: integer + default: null + is_nullable: false + db_dbworkflow: + ctime: + data_type: timestamp with time zone + default: null + is_nullable: false + description: + data_type: text + default: null + is_nullable: false + id: + data_type: integer + default: nextval('db_dbworkflow_id_seq'::regclass) + is_nullable: false + label: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + lastsyncedversion: + data_type: integer + default: null + is_nullable: false + module: + data_type: text + default: null + is_nullable: false + module_class: + data_type: text + default: null + is_nullable: false + mtime: + data_type: timestamp with time zone + default: null + is_nullable: false + nodeversion: + data_type: integer + default: null + is_nullable: false + report: + data_type: text + default: null + is_nullable: false + script_md5: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + script_path: + data_type: text + default: null + is_nullable: false + state: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + user_id: + data_type: integer + default: null + is_nullable: false + uuid: + data_type: uuid + default: null + is_nullable: false + db_dbworkflowdata: + aiida_obj_id: + data_type: integer + default: null + is_nullable: true + data_type: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + id: + data_type: integer + default: nextval('db_dbworkflowdata_id_seq'::regclass) + is_nullable: false + json_value: + data_type: text + default: null + is_nullable: false + name: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + parent_id: + data_type: integer + default: null + is_nullable: false + time: + data_type: timestamp with time zone + default: null + is_nullable: false + value_type: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + db_dbworkflowstep: + id: + data_type: integer + default: nextval('db_dbworkflowstep_id_seq'::regclass) + is_nullable: false + name: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + nextcall: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + parent_id: + data_type: integer + default: null + is_nullable: false + state: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + time: + data_type: timestamp with time zone + default: null + is_nullable: false + user_id: + data_type: integer + default: null + is_nullable: false + db_dbworkflowstep_calculations: + dbnode_id: + data_type: integer + default: null + is_nullable: false + dbworkflowstep_id: + data_type: integer + default: null + is_nullable: false + id: + data_type: integer + default: nextval('db_dbworkflowstep_calculations_id_seq'::regclass) + is_nullable: false + db_dbworkflowstep_sub_workflows: + dbworkflow_id: + data_type: integer + default: null + is_nullable: false + dbworkflowstep_id: + data_type: integer + default: null + is_nullable: false + id: + data_type: integer + default: nextval('db_dbworkflowstep_sub_workflows_id_seq'::regclass) + is_nullable: false +constraints: + primary_key: + db_dbattribute: + db_dbattribute_pkey: + - id + db_dbauthinfo: + db_dbauthinfo_pkey: + - id + db_dbcomment: + db_dbcomment_pkey: + - id + db_dbcomputer: + db_dbcomputer_pkey: + - id + db_dbextra: + db_dbextra_pkey: + - id + db_dbgroup: + db_dbgroup_pkey: + - id + db_dbgroup_dbnodes: + db_dbgroup_dbnodes_pkey: + - id + db_dblink: + db_dblink_pkey: + - id + db_dblog: + db_dblog_pkey: + - id + db_dbnode: + db_dbnode_pkey: + - id + db_dbsetting: + db_dbsetting_pkey: + - id + db_dbuser: + db_dbuser_pkey: + - id + db_dbuser_groups: + db_dbuser_groups_pkey: + - id + db_dbuser_user_permissions: + db_dbuser_user_permissions_pkey: + - id + db_dbworkflow: + db_dbworkflow_pkey: + - id + db_dbworkflowdata: + db_dbworkflowdata_pkey: + - id + db_dbworkflowstep: + db_dbworkflowstep_pkey: + - id + db_dbworkflowstep_calculations: + db_dbworkflowstep_calculations_pkey: + - id + db_dbworkflowstep_sub_workflows: + db_dbworkflowstep_sub_workflows_pkey: + - id + unique: + db_dbattribute: + db_dbattribute_dbnode_id_key_c589e447_uniq: + - dbnode_id + - key + db_dbauthinfo: + db_dbauthinfo_aiidauser_id_dbcomputer_id_777cdaa8_uniq: + - aiidauser_id + - dbcomputer_id + db_dbcomment: + db_dbcomment_uuid_49bac08c_uniq: + - uuid + db_dbcomputer: + db_dbcomputer_name_key: + - name + db_dbcomputer_uuid_f35defa6_uniq: + - uuid + db_dbextra: + db_dbextra_dbnode_id_key_aa56fd37_uniq: + - dbnode_id + - key + db_dbgroup: + db_dbgroup_name_type_12656f33_uniq: + - label + - type_string + db_dbgroup_uuid_af896177_uniq: + - uuid + db_dbgroup_dbnodes: + db_dbgroup_dbnodes_dbgroup_id_dbnode_id_eee23cce_uniq: + - dbgroup_id + - dbnode_id + db_dblog: + db_dblog_uuid_9cf77df3_uniq: + - uuid + db_dbnode: + db_dbnode_uuid_62e0bf98_uniq: + - uuid + db_dbsetting: + db_dbsetting_key_1b84beb4_uniq: + - key + db_dbuser: + db_dbuser_email_30150b7e_uniq: + - email + db_dbuser_groups: + db_dbuser_groups_dbuser_id_group_id_9155eb4f_uniq: + - dbuser_id + - group_id + db_dbuser_user_permissions: + db_dbuser_user_permissio_dbuser_id_permission_id_e6cbabe4_uniq: + - dbuser_id + - permission_id + db_dbworkflow: + db_dbworkflow_uuid_08947ee2_uniq: + - uuid + db_dbworkflowdata: + db_dbworkflowdata_parent_id_name_data_type_a4b50dae_uniq: + - data_type + - name + - parent_id + db_dbworkflowstep: + db_dbworkflowstep_parent_id_name_111027e3_uniq: + - name + - parent_id + db_dbworkflowstep_calculations: + db_dbworkflowstep_calcul_dbworkflowstep_id_dbnode_60f50d02_uniq: + - dbnode_id + - dbworkflowstep_id + db_dbworkflowstep_sub_workflows: + db_dbworkflowstep_sub_wo_dbworkflowstep_id_dbwork_e9b2b624_uniq: + - dbworkflow_id + - dbworkflowstep_id +foreign_keys: + db_dbattribute: + db_dbattribute_dbnode_id_253bf153_fk_db_dbnode_id: FOREIGN KEY (dbnode_id) REFERENCES + db_dbnode(id) DEFERRABLE INITIALLY DEFERRED + db_dbauthinfo: + db_dbauthinfo_aiidauser_id_0684fdfb_fk_db_dbuser_id: FOREIGN KEY (aiidauser_id) + REFERENCES db_dbuser(id) DEFERRABLE INITIALLY DEFERRED + db_dbauthinfo_dbcomputer_id_424f7ac4_fk_db_dbcomputer_id: FOREIGN KEY (dbcomputer_id) + REFERENCES db_dbcomputer(id) DEFERRABLE INITIALLY DEFERRED + db_dbcomment: + db_dbcomment_dbnode_id_3b812b6b_fk_db_dbnode_id: FOREIGN KEY (dbnode_id) REFERENCES + db_dbnode(id) DEFERRABLE INITIALLY DEFERRED + db_dbcomment_user_id_8ed5e360_fk_db_dbuser_id: FOREIGN KEY (user_id) REFERENCES + db_dbuser(id) DEFERRABLE INITIALLY DEFERRED + db_dbextra: + db_dbextra_dbnode_id_c7fe8961_fk_db_dbnode_id: FOREIGN KEY (dbnode_id) REFERENCES + db_dbnode(id) DEFERRABLE INITIALLY DEFERRED + db_dbgroup: + db_dbgroup_user_id_100f8a51_fk_db_dbuser_id: FOREIGN KEY (user_id) REFERENCES + db_dbuser(id) DEFERRABLE INITIALLY DEFERRED + db_dbgroup_dbnodes: + db_dbgroup_dbnodes_dbgroup_id_9d3a0f9d_fk_db_dbgroup_id: FOREIGN KEY (dbgroup_id) + REFERENCES db_dbgroup(id) DEFERRABLE INITIALLY DEFERRED + db_dbgroup_dbnodes_dbnode_id_118b9439_fk_db_dbnode_id: FOREIGN KEY (dbnode_id) + REFERENCES db_dbnode(id) DEFERRABLE INITIALLY DEFERRED + db_dblink: + db_dblink_input_id_9245bd73_fk_db_dbnode_id: FOREIGN KEY (input_id) REFERENCES + db_dbnode(id) DEFERRABLE INITIALLY DEFERRED + db_dblink_output_id_c0167528_fk_db_dbnode_id: FOREIGN KEY (output_id) REFERENCES + db_dbnode(id) DEFERRABLE INITIALLY DEFERRED + db_dblog: + db_dblog_dbnode_id_da34b732_fk_db_dbnode_id: FOREIGN KEY (dbnode_id) REFERENCES + db_dbnode(id) DEFERRABLE INITIALLY DEFERRED + db_dbnode: + db_dbnode_dbcomputer_id_315372a3_fk_db_dbcomputer_id: FOREIGN KEY (dbcomputer_id) + REFERENCES db_dbcomputer(id) DEFERRABLE INITIALLY DEFERRED + db_dbnode_user_id_12e7aeaf_fk_db_dbuser_id: FOREIGN KEY (user_id) REFERENCES db_dbuser(id) + DEFERRABLE INITIALLY DEFERRED + db_dbuser_groups: + db_dbuser_groups_dbuser_id_480b3520_fk_db_dbuser_id: FOREIGN KEY (dbuser_id) REFERENCES + db_dbuser(id) DEFERRABLE INITIALLY DEFERRED + db_dbuser_groups_group_id_8478d87e_fk_auth_group_id: FOREIGN KEY (group_id) REFERENCES + auth_group(id) DEFERRABLE INITIALLY DEFERRED + db_dbuser_user_permissions: + db_dbuser_user_permi_permission_id_c5aafc54_fk_auth_perm: FOREIGN KEY (permission_id) + REFERENCES auth_permission(id) DEFERRABLE INITIALLY DEFERRED + db_dbuser_user_permissions_dbuser_id_364456ee_fk_db_dbuser_id: FOREIGN KEY (dbuser_id) + REFERENCES db_dbuser(id) DEFERRABLE INITIALLY DEFERRED + db_dbworkflow: + db_dbworkflow_user_id_ef1f3251_fk_db_dbuser_id: FOREIGN KEY (user_id) REFERENCES + db_dbuser(id) DEFERRABLE INITIALLY DEFERRED + db_dbworkflowdata: + db_dbworkflowdata_aiida_obj_id_70a2d33b_fk_db_dbnode_id: FOREIGN KEY (aiida_obj_id) + REFERENCES db_dbnode(id) DEFERRABLE INITIALLY DEFERRED + db_dbworkflowdata_parent_id_ff4dbf8d_fk_db_dbworkflow_id: FOREIGN KEY (parent_id) + REFERENCES db_dbworkflow(id) DEFERRABLE INITIALLY DEFERRED + db_dbworkflowstep: + db_dbworkflowstep_parent_id_ffb754d9_fk_db_dbworkflow_id: FOREIGN KEY (parent_id) + REFERENCES db_dbworkflow(id) DEFERRABLE INITIALLY DEFERRED + db_dbworkflowstep_user_id_04282431_fk_db_dbuser_id: FOREIGN KEY (user_id) REFERENCES + db_dbuser(id) DEFERRABLE INITIALLY DEFERRED + db_dbworkflowstep_calculations: + db_dbworkflowstep_ca_dbnode_id_0d07b7a7_fk_db_dbnode: FOREIGN KEY (dbnode_id) + REFERENCES db_dbnode(id) DEFERRABLE INITIALLY DEFERRED + db_dbworkflowstep_ca_dbworkflowstep_id_575c3637_fk_db_dbwork: FOREIGN KEY (dbworkflowstep_id) + REFERENCES db_dbworkflowstep(id) DEFERRABLE INITIALLY DEFERRED + db_dbworkflowstep_sub_workflows: + db_dbworkflowstep_su_dbworkflow_id_dca4d103_fk_db_dbwork: FOREIGN KEY (dbworkflow_id) + REFERENCES db_dbworkflow(id) DEFERRABLE INITIALLY DEFERRED + db_dbworkflowstep_su_dbworkflowstep_id_e183bbb7_fk_db_dbwork: FOREIGN KEY (dbworkflowstep_id) + REFERENCES db_dbworkflowstep(id) DEFERRABLE INITIALLY DEFERRED +indexes: + db_dbattribute: + db_dbattribute_datatype_91c4dc04: CREATE INDEX db_dbattribute_datatype_91c4dc04 + ON public.db_dbattribute USING btree (datatype) + db_dbattribute_datatype_91c4dc04_like: CREATE INDEX db_dbattribute_datatype_91c4dc04_like + ON public.db_dbattribute USING btree (datatype varchar_pattern_ops) + db_dbattribute_dbnode_id_253bf153: CREATE INDEX db_dbattribute_dbnode_id_253bf153 + ON public.db_dbattribute USING btree (dbnode_id) + db_dbattribute_dbnode_id_key_c589e447_uniq: CREATE UNIQUE INDEX db_dbattribute_dbnode_id_key_c589e447_uniq + ON public.db_dbattribute USING btree (dbnode_id, key) + db_dbattribute_key_ac2bc4e4: CREATE INDEX db_dbattribute_key_ac2bc4e4 ON public.db_dbattribute + USING btree (key) + db_dbattribute_key_ac2bc4e4_like: CREATE INDEX db_dbattribute_key_ac2bc4e4_like + ON public.db_dbattribute USING btree (key varchar_pattern_ops) + db_dbattribute_pkey: CREATE UNIQUE INDEX db_dbattribute_pkey ON public.db_dbattribute + USING btree (id) + tval_idx_for_daemon: CREATE INDEX tval_idx_for_daemon ON public.db_dbattribute + USING btree (tval) WHERE (tval = ANY (ARRAY['COMPUTED'::text, 'WITHSCHEDULER'::text, + 'TOSUBMIT'::text])) + db_dbauthinfo: + db_dbauthinfo_aiidauser_id_0684fdfb: CREATE INDEX db_dbauthinfo_aiidauser_id_0684fdfb + ON public.db_dbauthinfo USING btree (aiidauser_id) + db_dbauthinfo_aiidauser_id_dbcomputer_id_777cdaa8_uniq: CREATE UNIQUE INDEX db_dbauthinfo_aiidauser_id_dbcomputer_id_777cdaa8_uniq + ON public.db_dbauthinfo USING btree (aiidauser_id, dbcomputer_id) + db_dbauthinfo_dbcomputer_id_424f7ac4: CREATE INDEX db_dbauthinfo_dbcomputer_id_424f7ac4 + ON public.db_dbauthinfo USING btree (dbcomputer_id) + db_dbauthinfo_pkey: CREATE UNIQUE INDEX db_dbauthinfo_pkey ON public.db_dbauthinfo + USING btree (id) + db_dbcomment: + db_dbcomment_dbnode_id_3b812b6b: CREATE INDEX db_dbcomment_dbnode_id_3b812b6b + ON public.db_dbcomment USING btree (dbnode_id) + db_dbcomment_pkey: CREATE UNIQUE INDEX db_dbcomment_pkey ON public.db_dbcomment + USING btree (id) + db_dbcomment_user_id_8ed5e360: CREATE INDEX db_dbcomment_user_id_8ed5e360 ON public.db_dbcomment + USING btree (user_id) + db_dbcomment_uuid_49bac08c_uniq: CREATE UNIQUE INDEX db_dbcomment_uuid_49bac08c_uniq + ON public.db_dbcomment USING btree (uuid) + db_dbcomputer: + db_dbcomputer_name_f1800b1a_like: CREATE INDEX db_dbcomputer_name_f1800b1a_like + ON public.db_dbcomputer USING btree (name varchar_pattern_ops) + db_dbcomputer_name_key: CREATE UNIQUE INDEX db_dbcomputer_name_key ON public.db_dbcomputer + USING btree (name) + db_dbcomputer_pkey: CREATE UNIQUE INDEX db_dbcomputer_pkey ON public.db_dbcomputer + USING btree (id) + db_dbcomputer_uuid_f35defa6_uniq: CREATE UNIQUE INDEX db_dbcomputer_uuid_f35defa6_uniq + ON public.db_dbcomputer USING btree (uuid) + db_dbextra: + db_dbextra_datatype_2eba38c6: CREATE INDEX db_dbextra_datatype_2eba38c6 ON public.db_dbextra + USING btree (datatype) + db_dbextra_datatype_2eba38c6_like: CREATE INDEX db_dbextra_datatype_2eba38c6_like + ON public.db_dbextra USING btree (datatype varchar_pattern_ops) + db_dbextra_dbnode_id_c7fe8961: CREATE INDEX db_dbextra_dbnode_id_c7fe8961 ON public.db_dbextra + USING btree (dbnode_id) + db_dbextra_dbnode_id_key_aa56fd37_uniq: CREATE UNIQUE INDEX db_dbextra_dbnode_id_key_aa56fd37_uniq + ON public.db_dbextra USING btree (dbnode_id, key) + db_dbextra_key_b1a8abc6: CREATE INDEX db_dbextra_key_b1a8abc6 ON public.db_dbextra + USING btree (key) + db_dbextra_key_b1a8abc6_like: CREATE INDEX db_dbextra_key_b1a8abc6_like ON public.db_dbextra + USING btree (key varchar_pattern_ops) + db_dbextra_pkey: CREATE UNIQUE INDEX db_dbextra_pkey ON public.db_dbextra USING + btree (id) + db_dbgroup: + db_dbgroup_name_66c75272: CREATE INDEX db_dbgroup_name_66c75272 ON public.db_dbgroup + USING btree (label) + db_dbgroup_name_66c75272_like: CREATE INDEX db_dbgroup_name_66c75272_like ON public.db_dbgroup + USING btree (label varchar_pattern_ops) + db_dbgroup_name_type_12656f33_uniq: CREATE UNIQUE INDEX db_dbgroup_name_type_12656f33_uniq + ON public.db_dbgroup USING btree (label, type_string) + db_dbgroup_pkey: CREATE UNIQUE INDEX db_dbgroup_pkey ON public.db_dbgroup USING + btree (id) + db_dbgroup_type_23b2a748: CREATE INDEX db_dbgroup_type_23b2a748 ON public.db_dbgroup + USING btree (type_string) + db_dbgroup_type_23b2a748_like: CREATE INDEX db_dbgroup_type_23b2a748_like ON public.db_dbgroup + USING btree (type_string varchar_pattern_ops) + db_dbgroup_user_id_100f8a51: CREATE INDEX db_dbgroup_user_id_100f8a51 ON public.db_dbgroup + USING btree (user_id) + db_dbgroup_uuid_af896177_uniq: CREATE UNIQUE INDEX db_dbgroup_uuid_af896177_uniq + ON public.db_dbgroup USING btree (uuid) + db_dbgroup_dbnodes: + db_dbgroup_dbnodes_dbgroup_id_9d3a0f9d: CREATE INDEX db_dbgroup_dbnodes_dbgroup_id_9d3a0f9d + ON public.db_dbgroup_dbnodes USING btree (dbgroup_id) + db_dbgroup_dbnodes_dbgroup_id_dbnode_id_eee23cce_uniq: CREATE UNIQUE INDEX db_dbgroup_dbnodes_dbgroup_id_dbnode_id_eee23cce_uniq + ON public.db_dbgroup_dbnodes USING btree (dbgroup_id, dbnode_id) + db_dbgroup_dbnodes_dbnode_id_118b9439: CREATE INDEX db_dbgroup_dbnodes_dbnode_id_118b9439 + ON public.db_dbgroup_dbnodes USING btree (dbnode_id) + db_dbgroup_dbnodes_pkey: CREATE UNIQUE INDEX db_dbgroup_dbnodes_pkey ON public.db_dbgroup_dbnodes + USING btree (id) + db_dblink: + db_dblink_input_id_9245bd73: CREATE INDEX db_dblink_input_id_9245bd73 ON public.db_dblink + USING btree (input_id) + db_dblink_label_f1343cfb: CREATE INDEX db_dblink_label_f1343cfb ON public.db_dblink + USING btree (label) + db_dblink_label_f1343cfb_like: CREATE INDEX db_dblink_label_f1343cfb_like ON public.db_dblink + USING btree (label varchar_pattern_ops) + db_dblink_output_id_c0167528: CREATE INDEX db_dblink_output_id_c0167528 ON public.db_dblink + USING btree (output_id) + db_dblink_pkey: CREATE UNIQUE INDEX db_dblink_pkey ON public.db_dblink USING btree + (id) + db_dblink_type_229f212b: CREATE INDEX db_dblink_type_229f212b ON public.db_dblink + USING btree (type) + db_dblink_type_229f212b_like: CREATE INDEX db_dblink_type_229f212b_like ON public.db_dblink + USING btree (type varchar_pattern_ops) + db_dblog: + db_dblog_dbnode_id_da34b732: CREATE INDEX db_dblog_dbnode_id_da34b732 ON public.db_dblog + USING btree (dbnode_id) + db_dblog_levelname_ad5dc346: CREATE INDEX db_dblog_levelname_ad5dc346 ON public.db_dblog + USING btree (levelname) + db_dblog_levelname_ad5dc346_like: CREATE INDEX db_dblog_levelname_ad5dc346_like + ON public.db_dblog USING btree (levelname varchar_pattern_ops) + db_dblog_loggername_00b5ba16: CREATE INDEX db_dblog_loggername_00b5ba16 ON public.db_dblog + USING btree (loggername) + db_dblog_loggername_00b5ba16_like: CREATE INDEX db_dblog_loggername_00b5ba16_like + ON public.db_dblog USING btree (loggername varchar_pattern_ops) + db_dblog_pkey: CREATE UNIQUE INDEX db_dblog_pkey ON public.db_dblog USING btree + (id) + db_dblog_uuid_9cf77df3_uniq: CREATE UNIQUE INDEX db_dblog_uuid_9cf77df3_uniq ON + public.db_dblog USING btree (uuid) + db_dbnode: + db_dbnode_ctime_71626ef5: CREATE INDEX db_dbnode_ctime_71626ef5 ON public.db_dbnode + USING btree (ctime) + db_dbnode_dbcomputer_id_315372a3: CREATE INDEX db_dbnode_dbcomputer_id_315372a3 + ON public.db_dbnode USING btree (dbcomputer_id) + db_dbnode_label_6469539e: CREATE INDEX db_dbnode_label_6469539e ON public.db_dbnode + USING btree (label) + db_dbnode_label_6469539e_like: CREATE INDEX db_dbnode_label_6469539e_like ON public.db_dbnode + USING btree (label varchar_pattern_ops) + db_dbnode_mtime_0554ea3d: CREATE INDEX db_dbnode_mtime_0554ea3d ON public.db_dbnode + USING btree (mtime) + db_dbnode_pkey: CREATE UNIQUE INDEX db_dbnode_pkey ON public.db_dbnode USING btree + (id) + db_dbnode_process_type_df7298d0: CREATE INDEX db_dbnode_process_type_df7298d0 + ON public.db_dbnode USING btree (process_type) + db_dbnode_process_type_df7298d0_like: CREATE INDEX db_dbnode_process_type_df7298d0_like + ON public.db_dbnode USING btree (process_type varchar_pattern_ops) + db_dbnode_type_a8ce9753: CREATE INDEX db_dbnode_type_a8ce9753 ON public.db_dbnode + USING btree (type) + db_dbnode_type_a8ce9753_like: CREATE INDEX db_dbnode_type_a8ce9753_like ON public.db_dbnode + USING btree (type varchar_pattern_ops) + db_dbnode_user_id_12e7aeaf: CREATE INDEX db_dbnode_user_id_12e7aeaf ON public.db_dbnode + USING btree (user_id) + db_dbnode_uuid_62e0bf98_uniq: CREATE UNIQUE INDEX db_dbnode_uuid_62e0bf98_uniq + ON public.db_dbnode USING btree (uuid) + db_dbsetting: + db_dbsetting_datatype_49f4397c: CREATE INDEX db_dbsetting_datatype_49f4397c ON + public.db_dbsetting USING btree (datatype) + db_dbsetting_datatype_49f4397c_like: CREATE INDEX db_dbsetting_datatype_49f4397c_like + ON public.db_dbsetting USING btree (datatype varchar_pattern_ops) + db_dbsetting_key_1b84beb4: CREATE INDEX db_dbsetting_key_1b84beb4 ON public.db_dbsetting + USING btree (key) + db_dbsetting_key_1b84beb4_like: CREATE INDEX db_dbsetting_key_1b84beb4_like ON + public.db_dbsetting USING btree (key varchar_pattern_ops) + db_dbsetting_key_1b84beb4_uniq: CREATE UNIQUE INDEX db_dbsetting_key_1b84beb4_uniq + ON public.db_dbsetting USING btree (key) + db_dbsetting_pkey: CREATE UNIQUE INDEX db_dbsetting_pkey ON public.db_dbsetting + USING btree (id) + db_dbuser: + db_dbuser_email_30150b7e_like: CREATE INDEX db_dbuser_email_30150b7e_like ON public.db_dbuser + USING btree (email varchar_pattern_ops) + db_dbuser_email_30150b7e_uniq: CREATE UNIQUE INDEX db_dbuser_email_30150b7e_uniq + ON public.db_dbuser USING btree (email) + db_dbuser_pkey: CREATE UNIQUE INDEX db_dbuser_pkey ON public.db_dbuser USING btree + (id) + db_dbuser_groups: + db_dbuser_groups_dbuser_id_480b3520: CREATE INDEX db_dbuser_groups_dbuser_id_480b3520 + ON public.db_dbuser_groups USING btree (dbuser_id) + db_dbuser_groups_dbuser_id_group_id_9155eb4f_uniq: CREATE UNIQUE INDEX db_dbuser_groups_dbuser_id_group_id_9155eb4f_uniq + ON public.db_dbuser_groups USING btree (dbuser_id, group_id) + db_dbuser_groups_group_id_8478d87e: CREATE INDEX db_dbuser_groups_group_id_8478d87e + ON public.db_dbuser_groups USING btree (group_id) + db_dbuser_groups_pkey: CREATE UNIQUE INDEX db_dbuser_groups_pkey ON public.db_dbuser_groups + USING btree (id) + db_dbuser_user_permissions: + db_dbuser_user_permissio_dbuser_id_permission_id_e6cbabe4_uniq: CREATE UNIQUE + INDEX db_dbuser_user_permissio_dbuser_id_permission_id_e6cbabe4_uniq ON public.db_dbuser_user_permissions + USING btree (dbuser_id, permission_id) + db_dbuser_user_permissions_dbuser_id_364456ee: CREATE INDEX db_dbuser_user_permissions_dbuser_id_364456ee + ON public.db_dbuser_user_permissions USING btree (dbuser_id) + db_dbuser_user_permissions_permission_id_c5aafc54: CREATE INDEX db_dbuser_user_permissions_permission_id_c5aafc54 + ON public.db_dbuser_user_permissions USING btree (permission_id) + db_dbuser_user_permissions_pkey: CREATE UNIQUE INDEX db_dbuser_user_permissions_pkey + ON public.db_dbuser_user_permissions USING btree (id) + db_dbworkflow: + db_dbworkflow_label_7368f34a: CREATE INDEX db_dbworkflow_label_7368f34a ON public.db_dbworkflow + USING btree (label) + db_dbworkflow_label_7368f34a_like: CREATE INDEX db_dbworkflow_label_7368f34a_like + ON public.db_dbworkflow USING btree (label varchar_pattern_ops) + db_dbworkflow_pkey: CREATE UNIQUE INDEX db_dbworkflow_pkey ON public.db_dbworkflow + USING btree (id) + db_dbworkflow_user_id_ef1f3251: CREATE INDEX db_dbworkflow_user_id_ef1f3251 ON + public.db_dbworkflow USING btree (user_id) + db_dbworkflow_uuid_08947ee2_uniq: CREATE UNIQUE INDEX db_dbworkflow_uuid_08947ee2_uniq + ON public.db_dbworkflow USING btree (uuid) + db_dbworkflowdata: + db_dbworkflowdata_aiida_obj_id_70a2d33b: CREATE INDEX db_dbworkflowdata_aiida_obj_id_70a2d33b + ON public.db_dbworkflowdata USING btree (aiida_obj_id) + db_dbworkflowdata_parent_id_ff4dbf8d: CREATE INDEX db_dbworkflowdata_parent_id_ff4dbf8d + ON public.db_dbworkflowdata USING btree (parent_id) + db_dbworkflowdata_parent_id_name_data_type_a4b50dae_uniq: CREATE UNIQUE INDEX + db_dbworkflowdata_parent_id_name_data_type_a4b50dae_uniq ON public.db_dbworkflowdata + USING btree (parent_id, name, data_type) + db_dbworkflowdata_pkey: CREATE UNIQUE INDEX db_dbworkflowdata_pkey ON public.db_dbworkflowdata + USING btree (id) + db_dbworkflowstep: + db_dbworkflowstep_parent_id_ffb754d9: CREATE INDEX db_dbworkflowstep_parent_id_ffb754d9 + ON public.db_dbworkflowstep USING btree (parent_id) + db_dbworkflowstep_parent_id_name_111027e3_uniq: CREATE UNIQUE INDEX db_dbworkflowstep_parent_id_name_111027e3_uniq + ON public.db_dbworkflowstep USING btree (parent_id, name) + db_dbworkflowstep_pkey: CREATE UNIQUE INDEX db_dbworkflowstep_pkey ON public.db_dbworkflowstep + USING btree (id) + db_dbworkflowstep_user_id_04282431: CREATE INDEX db_dbworkflowstep_user_id_04282431 + ON public.db_dbworkflowstep USING btree (user_id) + db_dbworkflowstep_calculations: + db_dbworkflowstep_calcul_dbworkflowstep_id_dbnode_60f50d02_uniq: CREATE UNIQUE + INDEX db_dbworkflowstep_calcul_dbworkflowstep_id_dbnode_60f50d02_uniq ON public.db_dbworkflowstep_calculations + USING btree (dbworkflowstep_id, dbnode_id) + db_dbworkflowstep_calculations_dbnode_id_0d07b7a7: CREATE INDEX db_dbworkflowstep_calculations_dbnode_id_0d07b7a7 + ON public.db_dbworkflowstep_calculations USING btree (dbnode_id) + db_dbworkflowstep_calculations_dbworkflowstep_id_575c3637: CREATE INDEX db_dbworkflowstep_calculations_dbworkflowstep_id_575c3637 + ON public.db_dbworkflowstep_calculations USING btree (dbworkflowstep_id) + db_dbworkflowstep_calculations_pkey: CREATE UNIQUE INDEX db_dbworkflowstep_calculations_pkey + ON public.db_dbworkflowstep_calculations USING btree (id) + db_dbworkflowstep_sub_workflows: + db_dbworkflowstep_sub_wo_dbworkflowstep_id_dbwork_e9b2b624_uniq: CREATE UNIQUE + INDEX db_dbworkflowstep_sub_wo_dbworkflowstep_id_dbwork_e9b2b624_uniq ON public.db_dbworkflowstep_sub_workflows + USING btree (dbworkflowstep_id, dbworkflow_id) + db_dbworkflowstep_sub_workflows_dbworkflow_id_dca4d103: CREATE INDEX db_dbworkflowstep_sub_workflows_dbworkflow_id_dca4d103 + ON public.db_dbworkflowstep_sub_workflows USING btree (dbworkflow_id) + db_dbworkflowstep_sub_workflows_dbworkflowstep_id_e183bbb7: CREATE INDEX db_dbworkflowstep_sub_workflows_dbworkflowstep_id_e183bbb7 + ON public.db_dbworkflowstep_sub_workflows USING btree (dbworkflowstep_id) + db_dbworkflowstep_sub_workflows_pkey: CREATE UNIQUE INDEX db_dbworkflowstep_sub_workflows_pkey + ON public.db_dbworkflowstep_sub_workflows USING btree (id) diff --git a/tests/backends/aiida_sqlalchemy/migrations/test_all_schema/test_django_django_0024a_.yml b/tests/backends/aiida_sqlalchemy/migrations/test_all_schema/test_django_django_0024a_.yml new file mode 100644 index 0000000000..7036f45d60 --- /dev/null +++ b/tests/backends/aiida_sqlalchemy/migrations/test_all_schema/test_django_django_0024a_.yml @@ -0,0 +1,1036 @@ +columns: + db_dbattribute: + bval: + data_type: boolean + default: null + is_nullable: true + datatype: + data_type: character varying + default: null + is_nullable: false + max_length: 10 + dbnode_id: + data_type: integer + default: null + is_nullable: false + dval: + data_type: timestamp with time zone + default: null + is_nullable: true + fval: + data_type: double precision + default: null + is_nullable: true + id: + data_type: integer + default: nextval('db_dbattribute_id_seq'::regclass) + is_nullable: false + ival: + data_type: integer + default: null + is_nullable: true + key: + data_type: character varying + default: null + is_nullable: false + max_length: 1024 + tval: + data_type: text + default: null + is_nullable: false + db_dbauthinfo: + aiidauser_id: + data_type: integer + default: null + is_nullable: false + auth_params: + data_type: text + default: null + is_nullable: false + dbcomputer_id: + data_type: integer + default: null + is_nullable: false + enabled: + data_type: boolean + default: null + is_nullable: false + id: + data_type: integer + default: nextval('db_dbauthinfo_id_seq'::regclass) + is_nullable: false + metadata: + data_type: text + default: null + is_nullable: false + db_dbcomment: + content: + data_type: text + default: null + is_nullable: false + ctime: + data_type: timestamp with time zone + default: null + is_nullable: false + dbnode_id: + data_type: integer + default: null + is_nullable: false + id: + data_type: integer + default: nextval('db_dbcomment_id_seq'::regclass) + is_nullable: false + mtime: + data_type: timestamp with time zone + default: null + is_nullable: false + user_id: + data_type: integer + default: null + is_nullable: false + uuid: + data_type: uuid + default: null + is_nullable: false + db_dbcomputer: + description: + data_type: text + default: null + is_nullable: false + enabled: + data_type: boolean + default: null + is_nullable: false + hostname: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + id: + data_type: integer + default: nextval('db_dbcomputer_id_seq'::regclass) + is_nullable: false + metadata: + data_type: text + default: null + is_nullable: false + name: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + scheduler_type: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + transport_params: + data_type: text + default: null + is_nullable: false + transport_type: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + uuid: + data_type: uuid + default: null + is_nullable: false + db_dbextra: + bval: + data_type: boolean + default: null + is_nullable: true + datatype: + data_type: character varying + default: null + is_nullable: false + max_length: 10 + dbnode_id: + data_type: integer + default: null + is_nullable: false + dval: + data_type: timestamp with time zone + default: null + is_nullable: true + fval: + data_type: double precision + default: null + is_nullable: true + id: + data_type: integer + default: nextval('db_dbextra_id_seq'::regclass) + is_nullable: false + ival: + data_type: integer + default: null + is_nullable: true + key: + data_type: character varying + default: null + is_nullable: false + max_length: 1024 + tval: + data_type: text + default: null + is_nullable: false + db_dbgroup: + description: + data_type: text + default: null + is_nullable: false + id: + data_type: integer + default: nextval('db_dbgroup_id_seq'::regclass) + is_nullable: false + label: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + time: + data_type: timestamp with time zone + default: null + is_nullable: false + type_string: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + user_id: + data_type: integer + default: null + is_nullable: false + uuid: + data_type: uuid + default: null + is_nullable: false + db_dbgroup_dbnodes: + dbgroup_id: + data_type: integer + default: null + is_nullable: false + dbnode_id: + data_type: integer + default: null + is_nullable: false + id: + data_type: integer + default: nextval('db_dbgroup_dbnodes_id_seq'::regclass) + is_nullable: false + db_dblink: + id: + data_type: integer + default: nextval('db_dblink_id_seq'::regclass) + is_nullable: false + input_id: + data_type: integer + default: null + is_nullable: false + label: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + output_id: + data_type: integer + default: null + is_nullable: false + type: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + db_dblog: + dbnode_id: + data_type: integer + default: null + is_nullable: false + id: + data_type: integer + default: nextval('db_dblog_id_seq'::regclass) + is_nullable: false + levelname: + data_type: character varying + default: null + is_nullable: false + max_length: 50 + loggername: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + message: + data_type: text + default: null + is_nullable: false + metadata: + data_type: text + default: null + is_nullable: false + time: + data_type: timestamp with time zone + default: null + is_nullable: false + uuid: + data_type: uuid + default: null + is_nullable: false + db_dbnode: + ctime: + data_type: timestamp with time zone + default: null + is_nullable: false + dbcomputer_id: + data_type: integer + default: null + is_nullable: true + description: + data_type: text + default: null + is_nullable: false + id: + data_type: integer + default: nextval('db_dbnode_id_seq'::regclass) + is_nullable: false + label: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + mtime: + data_type: timestamp with time zone + default: null + is_nullable: false + nodeversion: + data_type: integer + default: null + is_nullable: false + process_type: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + public: + data_type: boolean + default: null + is_nullable: false + type: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + user_id: + data_type: integer + default: null + is_nullable: false + uuid: + data_type: uuid + default: null + is_nullable: false + db_dbsetting: + bval: + data_type: boolean + default: null + is_nullable: true + datatype: + data_type: character varying + default: null + is_nullable: false + max_length: 10 + description: + data_type: text + default: null + is_nullable: false + dval: + data_type: timestamp with time zone + default: null + is_nullable: true + fval: + data_type: double precision + default: null + is_nullable: true + id: + data_type: integer + default: nextval('db_dbsetting_id_seq'::regclass) + is_nullable: false + ival: + data_type: integer + default: null + is_nullable: true + key: + data_type: character varying + default: null + is_nullable: false + max_length: 1024 + time: + data_type: timestamp with time zone + default: null + is_nullable: false + tval: + data_type: text + default: null + is_nullable: false + db_dbuser: + date_joined: + data_type: timestamp with time zone + default: null + is_nullable: false + email: + data_type: character varying + default: null + is_nullable: false + max_length: 254 + first_name: + data_type: character varying + default: null + is_nullable: false + max_length: 254 + id: + data_type: integer + default: nextval('db_dbuser_id_seq'::regclass) + is_nullable: false + institution: + data_type: character varying + default: null + is_nullable: false + max_length: 254 + is_active: + data_type: boolean + default: null + is_nullable: false + is_staff: + data_type: boolean + default: null + is_nullable: false + is_superuser: + data_type: boolean + default: null + is_nullable: false + last_login: + data_type: timestamp with time zone + default: null + is_nullable: true + last_name: + data_type: character varying + default: null + is_nullable: false + max_length: 254 + password: + data_type: character varying + default: null + is_nullable: false + max_length: 128 + db_dbuser_groups: + dbuser_id: + data_type: integer + default: null + is_nullable: false + group_id: + data_type: integer + default: null + is_nullable: false + id: + data_type: integer + default: nextval('db_dbuser_groups_id_seq'::regclass) + is_nullable: false + db_dbuser_user_permissions: + dbuser_id: + data_type: integer + default: null + is_nullable: false + id: + data_type: integer + default: nextval('db_dbuser_user_permissions_id_seq'::regclass) + is_nullable: false + permission_id: + data_type: integer + default: null + is_nullable: false + db_dbworkflow: + ctime: + data_type: timestamp with time zone + default: null + is_nullable: false + description: + data_type: text + default: null + is_nullable: false + id: + data_type: integer + default: nextval('db_dbworkflow_id_seq'::regclass) + is_nullable: false + label: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + lastsyncedversion: + data_type: integer + default: null + is_nullable: false + module: + data_type: text + default: null + is_nullable: false + module_class: + data_type: text + default: null + is_nullable: false + mtime: + data_type: timestamp with time zone + default: null + is_nullable: false + nodeversion: + data_type: integer + default: null + is_nullable: false + report: + data_type: text + default: null + is_nullable: false + script_md5: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + script_path: + data_type: text + default: null + is_nullable: false + state: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + user_id: + data_type: integer + default: null + is_nullable: false + uuid: + data_type: uuid + default: null + is_nullable: false + db_dbworkflowdata: + aiida_obj_id: + data_type: integer + default: null + is_nullable: true + data_type: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + id: + data_type: integer + default: nextval('db_dbworkflowdata_id_seq'::regclass) + is_nullable: false + json_value: + data_type: text + default: null + is_nullable: false + name: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + parent_id: + data_type: integer + default: null + is_nullable: false + time: + data_type: timestamp with time zone + default: null + is_nullable: false + value_type: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + db_dbworkflowstep: + id: + data_type: integer + default: nextval('db_dbworkflowstep_id_seq'::regclass) + is_nullable: false + name: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + nextcall: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + parent_id: + data_type: integer + default: null + is_nullable: false + state: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + time: + data_type: timestamp with time zone + default: null + is_nullable: false + user_id: + data_type: integer + default: null + is_nullable: false + db_dbworkflowstep_calculations: + dbnode_id: + data_type: integer + default: null + is_nullable: false + dbworkflowstep_id: + data_type: integer + default: null + is_nullable: false + id: + data_type: integer + default: nextval('db_dbworkflowstep_calculations_id_seq'::regclass) + is_nullable: false + db_dbworkflowstep_sub_workflows: + dbworkflow_id: + data_type: integer + default: null + is_nullable: false + dbworkflowstep_id: + data_type: integer + default: null + is_nullable: false + id: + data_type: integer + default: nextval('db_dbworkflowstep_sub_workflows_id_seq'::regclass) + is_nullable: false +constraints: + primary_key: + db_dbattribute: + db_dbattribute_pkey: + - id + db_dbauthinfo: + db_dbauthinfo_pkey: + - id + db_dbcomment: + db_dbcomment_pkey: + - id + db_dbcomputer: + db_dbcomputer_pkey: + - id + db_dbextra: + db_dbextra_pkey: + - id + db_dbgroup: + db_dbgroup_pkey: + - id + db_dbgroup_dbnodes: + db_dbgroup_dbnodes_pkey: + - id + db_dblink: + db_dblink_pkey: + - id + db_dblog: + db_dblog_pkey: + - id + db_dbnode: + db_dbnode_pkey: + - id + db_dbsetting: + db_dbsetting_pkey: + - id + db_dbuser: + db_dbuser_pkey: + - id + db_dbuser_groups: + db_dbuser_groups_pkey: + - id + db_dbuser_user_permissions: + db_dbuser_user_permissions_pkey: + - id + db_dbworkflow: + db_dbworkflow_pkey: + - id + db_dbworkflowdata: + db_dbworkflowdata_pkey: + - id + db_dbworkflowstep: + db_dbworkflowstep_pkey: + - id + db_dbworkflowstep_calculations: + db_dbworkflowstep_calculations_pkey: + - id + db_dbworkflowstep_sub_workflows: + db_dbworkflowstep_sub_workflows_pkey: + - id + unique: + db_dbattribute: + db_dbattribute_dbnode_id_key_c589e447_uniq: + - dbnode_id + - key + db_dbauthinfo: + db_dbauthinfo_aiidauser_id_dbcomputer_id_777cdaa8_uniq: + - aiidauser_id + - dbcomputer_id + db_dbcomment: + db_dbcomment_uuid_49bac08c_uniq: + - uuid + db_dbcomputer: + db_dbcomputer_name_key: + - name + db_dbcomputer_uuid_f35defa6_uniq: + - uuid + db_dbextra: + db_dbextra_dbnode_id_key_aa56fd37_uniq: + - dbnode_id + - key + db_dbgroup: + db_dbgroup_name_type_12656f33_uniq: + - label + - type_string + db_dbgroup_uuid_af896177_uniq: + - uuid + db_dbgroup_dbnodes: + db_dbgroup_dbnodes_dbgroup_id_dbnode_id_eee23cce_uniq: + - dbgroup_id + - dbnode_id + db_dbnode: + db_dbnode_uuid_62e0bf98_uniq: + - uuid + db_dbsetting: + db_dbsetting_key_1b84beb4_uniq: + - key + db_dbuser: + db_dbuser_email_30150b7e_uniq: + - email + db_dbuser_groups: + db_dbuser_groups_dbuser_id_group_id_9155eb4f_uniq: + - dbuser_id + - group_id + db_dbuser_user_permissions: + db_dbuser_user_permissio_dbuser_id_permission_id_e6cbabe4_uniq: + - dbuser_id + - permission_id + db_dbworkflow: + db_dbworkflow_uuid_08947ee2_uniq: + - uuid + db_dbworkflowdata: + db_dbworkflowdata_parent_id_name_data_type_a4b50dae_uniq: + - data_type + - name + - parent_id + db_dbworkflowstep: + db_dbworkflowstep_parent_id_name_111027e3_uniq: + - name + - parent_id + db_dbworkflowstep_calculations: + db_dbworkflowstep_calcul_dbworkflowstep_id_dbnode_60f50d02_uniq: + - dbnode_id + - dbworkflowstep_id + db_dbworkflowstep_sub_workflows: + db_dbworkflowstep_sub_wo_dbworkflowstep_id_dbwork_e9b2b624_uniq: + - dbworkflow_id + - dbworkflowstep_id +foreign_keys: + db_dbattribute: + db_dbattribute_dbnode_id_253bf153_fk_db_dbnode_id: FOREIGN KEY (dbnode_id) REFERENCES + db_dbnode(id) DEFERRABLE INITIALLY DEFERRED + db_dbauthinfo: + db_dbauthinfo_aiidauser_id_0684fdfb_fk_db_dbuser_id: FOREIGN KEY (aiidauser_id) + REFERENCES db_dbuser(id) DEFERRABLE INITIALLY DEFERRED + db_dbauthinfo_dbcomputer_id_424f7ac4_fk_db_dbcomputer_id: FOREIGN KEY (dbcomputer_id) + REFERENCES db_dbcomputer(id) DEFERRABLE INITIALLY DEFERRED + db_dbcomment: + db_dbcomment_dbnode_id_3b812b6b_fk_db_dbnode_id: FOREIGN KEY (dbnode_id) REFERENCES + db_dbnode(id) DEFERRABLE INITIALLY DEFERRED + db_dbcomment_user_id_8ed5e360_fk_db_dbuser_id: FOREIGN KEY (user_id) REFERENCES + db_dbuser(id) DEFERRABLE INITIALLY DEFERRED + db_dbextra: + db_dbextra_dbnode_id_c7fe8961_fk_db_dbnode_id: FOREIGN KEY (dbnode_id) REFERENCES + db_dbnode(id) DEFERRABLE INITIALLY DEFERRED + db_dbgroup: + db_dbgroup_user_id_100f8a51_fk_db_dbuser_id: FOREIGN KEY (user_id) REFERENCES + db_dbuser(id) DEFERRABLE INITIALLY DEFERRED + db_dbgroup_dbnodes: + db_dbgroup_dbnodes_dbgroup_id_9d3a0f9d_fk_db_dbgroup_id: FOREIGN KEY (dbgroup_id) + REFERENCES db_dbgroup(id) DEFERRABLE INITIALLY DEFERRED + db_dbgroup_dbnodes_dbnode_id_118b9439_fk_db_dbnode_id: FOREIGN KEY (dbnode_id) + REFERENCES db_dbnode(id) DEFERRABLE INITIALLY DEFERRED + db_dblink: + db_dblink_input_id_9245bd73_fk_db_dbnode_id: FOREIGN KEY (input_id) REFERENCES + db_dbnode(id) DEFERRABLE INITIALLY DEFERRED + db_dblink_output_id_c0167528_fk_db_dbnode_id: FOREIGN KEY (output_id) REFERENCES + db_dbnode(id) DEFERRABLE INITIALLY DEFERRED + db_dblog: + db_dblog_dbnode_id_da34b732_fk_db_dbnode_id: FOREIGN KEY (dbnode_id) REFERENCES + db_dbnode(id) DEFERRABLE INITIALLY DEFERRED + db_dbnode: + db_dbnode_dbcomputer_id_315372a3_fk_db_dbcomputer_id: FOREIGN KEY (dbcomputer_id) + REFERENCES db_dbcomputer(id) DEFERRABLE INITIALLY DEFERRED + db_dbnode_user_id_12e7aeaf_fk_db_dbuser_id: FOREIGN KEY (user_id) REFERENCES db_dbuser(id) + DEFERRABLE INITIALLY DEFERRED + db_dbuser_groups: + db_dbuser_groups_dbuser_id_480b3520_fk_db_dbuser_id: FOREIGN KEY (dbuser_id) REFERENCES + db_dbuser(id) DEFERRABLE INITIALLY DEFERRED + db_dbuser_groups_group_id_8478d87e_fk_auth_group_id: FOREIGN KEY (group_id) REFERENCES + auth_group(id) DEFERRABLE INITIALLY DEFERRED + db_dbuser_user_permissions: + db_dbuser_user_permi_permission_id_c5aafc54_fk_auth_perm: FOREIGN KEY (permission_id) + REFERENCES auth_permission(id) DEFERRABLE INITIALLY DEFERRED + db_dbuser_user_permissions_dbuser_id_364456ee_fk_db_dbuser_id: FOREIGN KEY (dbuser_id) + REFERENCES db_dbuser(id) DEFERRABLE INITIALLY DEFERRED + db_dbworkflow: + db_dbworkflow_user_id_ef1f3251_fk_db_dbuser_id: FOREIGN KEY (user_id) REFERENCES + db_dbuser(id) DEFERRABLE INITIALLY DEFERRED + db_dbworkflowdata: + db_dbworkflowdata_aiida_obj_id_70a2d33b_fk_db_dbnode_id: FOREIGN KEY (aiida_obj_id) + REFERENCES db_dbnode(id) DEFERRABLE INITIALLY DEFERRED + db_dbworkflowdata_parent_id_ff4dbf8d_fk_db_dbworkflow_id: FOREIGN KEY (parent_id) + REFERENCES db_dbworkflow(id) DEFERRABLE INITIALLY DEFERRED + db_dbworkflowstep: + db_dbworkflowstep_parent_id_ffb754d9_fk_db_dbworkflow_id: FOREIGN KEY (parent_id) + REFERENCES db_dbworkflow(id) DEFERRABLE INITIALLY DEFERRED + db_dbworkflowstep_user_id_04282431_fk_db_dbuser_id: FOREIGN KEY (user_id) REFERENCES + db_dbuser(id) DEFERRABLE INITIALLY DEFERRED + db_dbworkflowstep_calculations: + db_dbworkflowstep_ca_dbnode_id_0d07b7a7_fk_db_dbnode: FOREIGN KEY (dbnode_id) + REFERENCES db_dbnode(id) DEFERRABLE INITIALLY DEFERRED + db_dbworkflowstep_ca_dbworkflowstep_id_575c3637_fk_db_dbwork: FOREIGN KEY (dbworkflowstep_id) + REFERENCES db_dbworkflowstep(id) DEFERRABLE INITIALLY DEFERRED + db_dbworkflowstep_sub_workflows: + db_dbworkflowstep_su_dbworkflow_id_dca4d103_fk_db_dbwork: FOREIGN KEY (dbworkflow_id) + REFERENCES db_dbworkflow(id) DEFERRABLE INITIALLY DEFERRED + db_dbworkflowstep_su_dbworkflowstep_id_e183bbb7_fk_db_dbwork: FOREIGN KEY (dbworkflowstep_id) + REFERENCES db_dbworkflowstep(id) DEFERRABLE INITIALLY DEFERRED +indexes: + db_dbattribute: + db_dbattribute_datatype_91c4dc04: CREATE INDEX db_dbattribute_datatype_91c4dc04 + ON public.db_dbattribute USING btree (datatype) + db_dbattribute_datatype_91c4dc04_like: CREATE INDEX db_dbattribute_datatype_91c4dc04_like + ON public.db_dbattribute USING btree (datatype varchar_pattern_ops) + db_dbattribute_dbnode_id_253bf153: CREATE INDEX db_dbattribute_dbnode_id_253bf153 + ON public.db_dbattribute USING btree (dbnode_id) + db_dbattribute_dbnode_id_key_c589e447_uniq: CREATE UNIQUE INDEX db_dbattribute_dbnode_id_key_c589e447_uniq + ON public.db_dbattribute USING btree (dbnode_id, key) + db_dbattribute_key_ac2bc4e4: CREATE INDEX db_dbattribute_key_ac2bc4e4 ON public.db_dbattribute + USING btree (key) + db_dbattribute_key_ac2bc4e4_like: CREATE INDEX db_dbattribute_key_ac2bc4e4_like + ON public.db_dbattribute USING btree (key varchar_pattern_ops) + db_dbattribute_pkey: CREATE UNIQUE INDEX db_dbattribute_pkey ON public.db_dbattribute + USING btree (id) + tval_idx_for_daemon: CREATE INDEX tval_idx_for_daemon ON public.db_dbattribute + USING btree (tval) WHERE (tval = ANY (ARRAY['COMPUTED'::text, 'WITHSCHEDULER'::text, + 'TOSUBMIT'::text])) + db_dbauthinfo: + db_dbauthinfo_aiidauser_id_0684fdfb: CREATE INDEX db_dbauthinfo_aiidauser_id_0684fdfb + ON public.db_dbauthinfo USING btree (aiidauser_id) + db_dbauthinfo_aiidauser_id_dbcomputer_id_777cdaa8_uniq: CREATE UNIQUE INDEX db_dbauthinfo_aiidauser_id_dbcomputer_id_777cdaa8_uniq + ON public.db_dbauthinfo USING btree (aiidauser_id, dbcomputer_id) + db_dbauthinfo_dbcomputer_id_424f7ac4: CREATE INDEX db_dbauthinfo_dbcomputer_id_424f7ac4 + ON public.db_dbauthinfo USING btree (dbcomputer_id) + db_dbauthinfo_pkey: CREATE UNIQUE INDEX db_dbauthinfo_pkey ON public.db_dbauthinfo + USING btree (id) + db_dbcomment: + db_dbcomment_dbnode_id_3b812b6b: CREATE INDEX db_dbcomment_dbnode_id_3b812b6b + ON public.db_dbcomment USING btree (dbnode_id) + db_dbcomment_pkey: CREATE UNIQUE INDEX db_dbcomment_pkey ON public.db_dbcomment + USING btree (id) + db_dbcomment_user_id_8ed5e360: CREATE INDEX db_dbcomment_user_id_8ed5e360 ON public.db_dbcomment + USING btree (user_id) + db_dbcomment_uuid_49bac08c_uniq: CREATE UNIQUE INDEX db_dbcomment_uuid_49bac08c_uniq + ON public.db_dbcomment USING btree (uuid) + db_dbcomputer: + db_dbcomputer_name_f1800b1a_like: CREATE INDEX db_dbcomputer_name_f1800b1a_like + ON public.db_dbcomputer USING btree (name varchar_pattern_ops) + db_dbcomputer_name_key: CREATE UNIQUE INDEX db_dbcomputer_name_key ON public.db_dbcomputer + USING btree (name) + db_dbcomputer_pkey: CREATE UNIQUE INDEX db_dbcomputer_pkey ON public.db_dbcomputer + USING btree (id) + db_dbcomputer_uuid_f35defa6_uniq: CREATE UNIQUE INDEX db_dbcomputer_uuid_f35defa6_uniq + ON public.db_dbcomputer USING btree (uuid) + db_dbextra: + db_dbextra_datatype_2eba38c6: CREATE INDEX db_dbextra_datatype_2eba38c6 ON public.db_dbextra + USING btree (datatype) + db_dbextra_datatype_2eba38c6_like: CREATE INDEX db_dbextra_datatype_2eba38c6_like + ON public.db_dbextra USING btree (datatype varchar_pattern_ops) + db_dbextra_dbnode_id_c7fe8961: CREATE INDEX db_dbextra_dbnode_id_c7fe8961 ON public.db_dbextra + USING btree (dbnode_id) + db_dbextra_dbnode_id_key_aa56fd37_uniq: CREATE UNIQUE INDEX db_dbextra_dbnode_id_key_aa56fd37_uniq + ON public.db_dbextra USING btree (dbnode_id, key) + db_dbextra_key_b1a8abc6: CREATE INDEX db_dbextra_key_b1a8abc6 ON public.db_dbextra + USING btree (key) + db_dbextra_key_b1a8abc6_like: CREATE INDEX db_dbextra_key_b1a8abc6_like ON public.db_dbextra + USING btree (key varchar_pattern_ops) + db_dbextra_pkey: CREATE UNIQUE INDEX db_dbextra_pkey ON public.db_dbextra USING + btree (id) + db_dbgroup: + db_dbgroup_name_66c75272: CREATE INDEX db_dbgroup_name_66c75272 ON public.db_dbgroup + USING btree (label) + db_dbgroup_name_66c75272_like: CREATE INDEX db_dbgroup_name_66c75272_like ON public.db_dbgroup + USING btree (label varchar_pattern_ops) + db_dbgroup_name_type_12656f33_uniq: CREATE UNIQUE INDEX db_dbgroup_name_type_12656f33_uniq + ON public.db_dbgroup USING btree (label, type_string) + db_dbgroup_pkey: CREATE UNIQUE INDEX db_dbgroup_pkey ON public.db_dbgroup USING + btree (id) + db_dbgroup_type_23b2a748: CREATE INDEX db_dbgroup_type_23b2a748 ON public.db_dbgroup + USING btree (type_string) + db_dbgroup_type_23b2a748_like: CREATE INDEX db_dbgroup_type_23b2a748_like ON public.db_dbgroup + USING btree (type_string varchar_pattern_ops) + db_dbgroup_user_id_100f8a51: CREATE INDEX db_dbgroup_user_id_100f8a51 ON public.db_dbgroup + USING btree (user_id) + db_dbgroup_uuid_af896177_uniq: CREATE UNIQUE INDEX db_dbgroup_uuid_af896177_uniq + ON public.db_dbgroup USING btree (uuid) + db_dbgroup_dbnodes: + db_dbgroup_dbnodes_dbgroup_id_9d3a0f9d: CREATE INDEX db_dbgroup_dbnodes_dbgroup_id_9d3a0f9d + ON public.db_dbgroup_dbnodes USING btree (dbgroup_id) + db_dbgroup_dbnodes_dbgroup_id_dbnode_id_eee23cce_uniq: CREATE UNIQUE INDEX db_dbgroup_dbnodes_dbgroup_id_dbnode_id_eee23cce_uniq + ON public.db_dbgroup_dbnodes USING btree (dbgroup_id, dbnode_id) + db_dbgroup_dbnodes_dbnode_id_118b9439: CREATE INDEX db_dbgroup_dbnodes_dbnode_id_118b9439 + ON public.db_dbgroup_dbnodes USING btree (dbnode_id) + db_dbgroup_dbnodes_pkey: CREATE UNIQUE INDEX db_dbgroup_dbnodes_pkey ON public.db_dbgroup_dbnodes + USING btree (id) + db_dblink: + db_dblink_input_id_9245bd73: CREATE INDEX db_dblink_input_id_9245bd73 ON public.db_dblink + USING btree (input_id) + db_dblink_label_f1343cfb: CREATE INDEX db_dblink_label_f1343cfb ON public.db_dblink + USING btree (label) + db_dblink_label_f1343cfb_like: CREATE INDEX db_dblink_label_f1343cfb_like ON public.db_dblink + USING btree (label varchar_pattern_ops) + db_dblink_output_id_c0167528: CREATE INDEX db_dblink_output_id_c0167528 ON public.db_dblink + USING btree (output_id) + db_dblink_pkey: CREATE UNIQUE INDEX db_dblink_pkey ON public.db_dblink USING btree + (id) + db_dblink_type_229f212b: CREATE INDEX db_dblink_type_229f212b ON public.db_dblink + USING btree (type) + db_dblink_type_229f212b_like: CREATE INDEX db_dblink_type_229f212b_like ON public.db_dblink + USING btree (type varchar_pattern_ops) + db_dblog: + db_dblog_dbnode_id_da34b732: CREATE INDEX db_dblog_dbnode_id_da34b732 ON public.db_dblog + USING btree (dbnode_id) + db_dblog_levelname_ad5dc346: CREATE INDEX db_dblog_levelname_ad5dc346 ON public.db_dblog + USING btree (levelname) + db_dblog_levelname_ad5dc346_like: CREATE INDEX db_dblog_levelname_ad5dc346_like + ON public.db_dblog USING btree (levelname varchar_pattern_ops) + db_dblog_loggername_00b5ba16: CREATE INDEX db_dblog_loggername_00b5ba16 ON public.db_dblog + USING btree (loggername) + db_dblog_loggername_00b5ba16_like: CREATE INDEX db_dblog_loggername_00b5ba16_like + ON public.db_dblog USING btree (loggername varchar_pattern_ops) + db_dblog_pkey: CREATE UNIQUE INDEX db_dblog_pkey ON public.db_dblog USING btree + (id) + db_dbnode: + db_dbnode_ctime_71626ef5: CREATE INDEX db_dbnode_ctime_71626ef5 ON public.db_dbnode + USING btree (ctime) + db_dbnode_dbcomputer_id_315372a3: CREATE INDEX db_dbnode_dbcomputer_id_315372a3 + ON public.db_dbnode USING btree (dbcomputer_id) + db_dbnode_label_6469539e: CREATE INDEX db_dbnode_label_6469539e ON public.db_dbnode + USING btree (label) + db_dbnode_label_6469539e_like: CREATE INDEX db_dbnode_label_6469539e_like ON public.db_dbnode + USING btree (label varchar_pattern_ops) + db_dbnode_mtime_0554ea3d: CREATE INDEX db_dbnode_mtime_0554ea3d ON public.db_dbnode + USING btree (mtime) + db_dbnode_pkey: CREATE UNIQUE INDEX db_dbnode_pkey ON public.db_dbnode USING btree + (id) + db_dbnode_process_type_df7298d0: CREATE INDEX db_dbnode_process_type_df7298d0 + ON public.db_dbnode USING btree (process_type) + db_dbnode_process_type_df7298d0_like: CREATE INDEX db_dbnode_process_type_df7298d0_like + ON public.db_dbnode USING btree (process_type varchar_pattern_ops) + db_dbnode_type_a8ce9753: CREATE INDEX db_dbnode_type_a8ce9753 ON public.db_dbnode + USING btree (type) + db_dbnode_type_a8ce9753_like: CREATE INDEX db_dbnode_type_a8ce9753_like ON public.db_dbnode + USING btree (type varchar_pattern_ops) + db_dbnode_user_id_12e7aeaf: CREATE INDEX db_dbnode_user_id_12e7aeaf ON public.db_dbnode + USING btree (user_id) + db_dbnode_uuid_62e0bf98_uniq: CREATE UNIQUE INDEX db_dbnode_uuid_62e0bf98_uniq + ON public.db_dbnode USING btree (uuid) + db_dbsetting: + db_dbsetting_datatype_49f4397c: CREATE INDEX db_dbsetting_datatype_49f4397c ON + public.db_dbsetting USING btree (datatype) + db_dbsetting_datatype_49f4397c_like: CREATE INDEX db_dbsetting_datatype_49f4397c_like + ON public.db_dbsetting USING btree (datatype varchar_pattern_ops) + db_dbsetting_key_1b84beb4: CREATE INDEX db_dbsetting_key_1b84beb4 ON public.db_dbsetting + USING btree (key) + db_dbsetting_key_1b84beb4_like: CREATE INDEX db_dbsetting_key_1b84beb4_like ON + public.db_dbsetting USING btree (key varchar_pattern_ops) + db_dbsetting_key_1b84beb4_uniq: CREATE UNIQUE INDEX db_dbsetting_key_1b84beb4_uniq + ON public.db_dbsetting USING btree (key) + db_dbsetting_pkey: CREATE UNIQUE INDEX db_dbsetting_pkey ON public.db_dbsetting + USING btree (id) + db_dbuser: + db_dbuser_email_30150b7e_like: CREATE INDEX db_dbuser_email_30150b7e_like ON public.db_dbuser + USING btree (email varchar_pattern_ops) + db_dbuser_email_30150b7e_uniq: CREATE UNIQUE INDEX db_dbuser_email_30150b7e_uniq + ON public.db_dbuser USING btree (email) + db_dbuser_pkey: CREATE UNIQUE INDEX db_dbuser_pkey ON public.db_dbuser USING btree + (id) + db_dbuser_groups: + db_dbuser_groups_dbuser_id_480b3520: CREATE INDEX db_dbuser_groups_dbuser_id_480b3520 + ON public.db_dbuser_groups USING btree (dbuser_id) + db_dbuser_groups_dbuser_id_group_id_9155eb4f_uniq: CREATE UNIQUE INDEX db_dbuser_groups_dbuser_id_group_id_9155eb4f_uniq + ON public.db_dbuser_groups USING btree (dbuser_id, group_id) + db_dbuser_groups_group_id_8478d87e: CREATE INDEX db_dbuser_groups_group_id_8478d87e + ON public.db_dbuser_groups USING btree (group_id) + db_dbuser_groups_pkey: CREATE UNIQUE INDEX db_dbuser_groups_pkey ON public.db_dbuser_groups + USING btree (id) + db_dbuser_user_permissions: + db_dbuser_user_permissio_dbuser_id_permission_id_e6cbabe4_uniq: CREATE UNIQUE + INDEX db_dbuser_user_permissio_dbuser_id_permission_id_e6cbabe4_uniq ON public.db_dbuser_user_permissions + USING btree (dbuser_id, permission_id) + db_dbuser_user_permissions_dbuser_id_364456ee: CREATE INDEX db_dbuser_user_permissions_dbuser_id_364456ee + ON public.db_dbuser_user_permissions USING btree (dbuser_id) + db_dbuser_user_permissions_permission_id_c5aafc54: CREATE INDEX db_dbuser_user_permissions_permission_id_c5aafc54 + ON public.db_dbuser_user_permissions USING btree (permission_id) + db_dbuser_user_permissions_pkey: CREATE UNIQUE INDEX db_dbuser_user_permissions_pkey + ON public.db_dbuser_user_permissions USING btree (id) + db_dbworkflow: + db_dbworkflow_label_7368f34a: CREATE INDEX db_dbworkflow_label_7368f34a ON public.db_dbworkflow + USING btree (label) + db_dbworkflow_label_7368f34a_like: CREATE INDEX db_dbworkflow_label_7368f34a_like + ON public.db_dbworkflow USING btree (label varchar_pattern_ops) + db_dbworkflow_pkey: CREATE UNIQUE INDEX db_dbworkflow_pkey ON public.db_dbworkflow + USING btree (id) + db_dbworkflow_user_id_ef1f3251: CREATE INDEX db_dbworkflow_user_id_ef1f3251 ON + public.db_dbworkflow USING btree (user_id) + db_dbworkflow_uuid_08947ee2_uniq: CREATE UNIQUE INDEX db_dbworkflow_uuid_08947ee2_uniq + ON public.db_dbworkflow USING btree (uuid) + db_dbworkflowdata: + db_dbworkflowdata_aiida_obj_id_70a2d33b: CREATE INDEX db_dbworkflowdata_aiida_obj_id_70a2d33b + ON public.db_dbworkflowdata USING btree (aiida_obj_id) + db_dbworkflowdata_parent_id_ff4dbf8d: CREATE INDEX db_dbworkflowdata_parent_id_ff4dbf8d + ON public.db_dbworkflowdata USING btree (parent_id) + db_dbworkflowdata_parent_id_name_data_type_a4b50dae_uniq: CREATE UNIQUE INDEX + db_dbworkflowdata_parent_id_name_data_type_a4b50dae_uniq ON public.db_dbworkflowdata + USING btree (parent_id, name, data_type) + db_dbworkflowdata_pkey: CREATE UNIQUE INDEX db_dbworkflowdata_pkey ON public.db_dbworkflowdata + USING btree (id) + db_dbworkflowstep: + db_dbworkflowstep_parent_id_ffb754d9: CREATE INDEX db_dbworkflowstep_parent_id_ffb754d9 + ON public.db_dbworkflowstep USING btree (parent_id) + db_dbworkflowstep_parent_id_name_111027e3_uniq: CREATE UNIQUE INDEX db_dbworkflowstep_parent_id_name_111027e3_uniq + ON public.db_dbworkflowstep USING btree (parent_id, name) + db_dbworkflowstep_pkey: CREATE UNIQUE INDEX db_dbworkflowstep_pkey ON public.db_dbworkflowstep + USING btree (id) + db_dbworkflowstep_user_id_04282431: CREATE INDEX db_dbworkflowstep_user_id_04282431 + ON public.db_dbworkflowstep USING btree (user_id) + db_dbworkflowstep_calculations: + db_dbworkflowstep_calcul_dbworkflowstep_id_dbnode_60f50d02_uniq: CREATE UNIQUE + INDEX db_dbworkflowstep_calcul_dbworkflowstep_id_dbnode_60f50d02_uniq ON public.db_dbworkflowstep_calculations + USING btree (dbworkflowstep_id, dbnode_id) + db_dbworkflowstep_calculations_dbnode_id_0d07b7a7: CREATE INDEX db_dbworkflowstep_calculations_dbnode_id_0d07b7a7 + ON public.db_dbworkflowstep_calculations USING btree (dbnode_id) + db_dbworkflowstep_calculations_dbworkflowstep_id_575c3637: CREATE INDEX db_dbworkflowstep_calculations_dbworkflowstep_id_575c3637 + ON public.db_dbworkflowstep_calculations USING btree (dbworkflowstep_id) + db_dbworkflowstep_calculations_pkey: CREATE UNIQUE INDEX db_dbworkflowstep_calculations_pkey + ON public.db_dbworkflowstep_calculations USING btree (id) + db_dbworkflowstep_sub_workflows: + db_dbworkflowstep_sub_wo_dbworkflowstep_id_dbwork_e9b2b624_uniq: CREATE UNIQUE + INDEX db_dbworkflowstep_sub_wo_dbworkflowstep_id_dbwork_e9b2b624_uniq ON public.db_dbworkflowstep_sub_workflows + USING btree (dbworkflowstep_id, dbworkflow_id) + db_dbworkflowstep_sub_workflows_dbworkflow_id_dca4d103: CREATE INDEX db_dbworkflowstep_sub_workflows_dbworkflow_id_dca4d103 + ON public.db_dbworkflowstep_sub_workflows USING btree (dbworkflow_id) + db_dbworkflowstep_sub_workflows_dbworkflowstep_id_e183bbb7: CREATE INDEX db_dbworkflowstep_sub_workflows_dbworkflowstep_id_e183bbb7 + ON public.db_dbworkflowstep_sub_workflows USING btree (dbworkflowstep_id) + db_dbworkflowstep_sub_workflows_pkey: CREATE UNIQUE INDEX db_dbworkflowstep_sub_workflows_pkey + ON public.db_dbworkflowstep_sub_workflows USING btree (id) diff --git a/tests/backends/aiida_sqlalchemy/migrations/test_all_schema/test_django_django_0025_.yml b/tests/backends/aiida_sqlalchemy/migrations/test_all_schema/test_django_django_0025_.yml new file mode 100644 index 0000000000..3e6143e5bc --- /dev/null +++ b/tests/backends/aiida_sqlalchemy/migrations/test_all_schema/test_django_django_0025_.yml @@ -0,0 +1,1041 @@ +columns: + db_dbattribute: + bval: + data_type: boolean + default: null + is_nullable: true + datatype: + data_type: character varying + default: null + is_nullable: false + max_length: 10 + dbnode_id: + data_type: integer + default: null + is_nullable: false + dval: + data_type: timestamp with time zone + default: null + is_nullable: true + fval: + data_type: double precision + default: null + is_nullable: true + id: + data_type: integer + default: nextval('db_dbattribute_id_seq'::regclass) + is_nullable: false + ival: + data_type: integer + default: null + is_nullable: true + key: + data_type: character varying + default: null + is_nullable: false + max_length: 1024 + tval: + data_type: text + default: null + is_nullable: false + db_dbauthinfo: + aiidauser_id: + data_type: integer + default: null + is_nullable: false + auth_params: + data_type: text + default: null + is_nullable: false + dbcomputer_id: + data_type: integer + default: null + is_nullable: false + enabled: + data_type: boolean + default: null + is_nullable: false + id: + data_type: integer + default: nextval('db_dbauthinfo_id_seq'::regclass) + is_nullable: false + metadata: + data_type: text + default: null + is_nullable: false + db_dbcomment: + content: + data_type: text + default: null + is_nullable: false + ctime: + data_type: timestamp with time zone + default: null + is_nullable: false + dbnode_id: + data_type: integer + default: null + is_nullable: false + id: + data_type: integer + default: nextval('db_dbcomment_id_seq'::regclass) + is_nullable: false + mtime: + data_type: timestamp with time zone + default: null + is_nullable: false + user_id: + data_type: integer + default: null + is_nullable: false + uuid: + data_type: uuid + default: null + is_nullable: false + db_dbcomputer: + description: + data_type: text + default: null + is_nullable: false + enabled: + data_type: boolean + default: null + is_nullable: false + hostname: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + id: + data_type: integer + default: nextval('db_dbcomputer_id_seq'::regclass) + is_nullable: false + metadata: + data_type: text + default: null + is_nullable: false + name: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + scheduler_type: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + transport_params: + data_type: text + default: null + is_nullable: false + transport_type: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + uuid: + data_type: uuid + default: null + is_nullable: false + db_dbextra: + bval: + data_type: boolean + default: null + is_nullable: true + datatype: + data_type: character varying + default: null + is_nullable: false + max_length: 10 + dbnode_id: + data_type: integer + default: null + is_nullable: false + dval: + data_type: timestamp with time zone + default: null + is_nullable: true + fval: + data_type: double precision + default: null + is_nullable: true + id: + data_type: integer + default: nextval('db_dbextra_id_seq'::regclass) + is_nullable: false + ival: + data_type: integer + default: null + is_nullable: true + key: + data_type: character varying + default: null + is_nullable: false + max_length: 1024 + tval: + data_type: text + default: null + is_nullable: false + db_dbgroup: + description: + data_type: text + default: null + is_nullable: false + id: + data_type: integer + default: nextval('db_dbgroup_id_seq'::regclass) + is_nullable: false + label: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + time: + data_type: timestamp with time zone + default: null + is_nullable: false + type_string: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + user_id: + data_type: integer + default: null + is_nullable: false + uuid: + data_type: uuid + default: null + is_nullable: false + db_dbgroup_dbnodes: + dbgroup_id: + data_type: integer + default: null + is_nullable: false + dbnode_id: + data_type: integer + default: null + is_nullable: false + id: + data_type: integer + default: nextval('db_dbgroup_dbnodes_id_seq'::regclass) + is_nullable: false + db_dblink: + id: + data_type: integer + default: nextval('db_dblink_id_seq'::regclass) + is_nullable: false + input_id: + data_type: integer + default: null + is_nullable: false + label: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + output_id: + data_type: integer + default: null + is_nullable: false + type: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + db_dblog: + dbnode_id: + data_type: integer + default: null + is_nullable: false + id: + data_type: integer + default: nextval('db_dblog_id_seq'::regclass) + is_nullable: false + levelname: + data_type: character varying + default: null + is_nullable: false + max_length: 50 + loggername: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + message: + data_type: text + default: null + is_nullable: false + metadata: + data_type: text + default: null + is_nullable: false + time: + data_type: timestamp with time zone + default: null + is_nullable: false + uuid: + data_type: uuid + default: null + is_nullable: false + db_dbnode: + ctime: + data_type: timestamp with time zone + default: null + is_nullable: false + dbcomputer_id: + data_type: integer + default: null + is_nullable: true + description: + data_type: text + default: null + is_nullable: false + id: + data_type: integer + default: nextval('db_dbnode_id_seq'::regclass) + is_nullable: false + label: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + mtime: + data_type: timestamp with time zone + default: null + is_nullable: false + nodeversion: + data_type: integer + default: null + is_nullable: false + process_type: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + public: + data_type: boolean + default: null + is_nullable: false + type: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + user_id: + data_type: integer + default: null + is_nullable: false + uuid: + data_type: uuid + default: null + is_nullable: false + db_dbsetting: + bval: + data_type: boolean + default: null + is_nullable: true + datatype: + data_type: character varying + default: null + is_nullable: false + max_length: 10 + description: + data_type: text + default: null + is_nullable: false + dval: + data_type: timestamp with time zone + default: null + is_nullable: true + fval: + data_type: double precision + default: null + is_nullable: true + id: + data_type: integer + default: nextval('db_dbsetting_id_seq'::regclass) + is_nullable: false + ival: + data_type: integer + default: null + is_nullable: true + key: + data_type: character varying + default: null + is_nullable: false + max_length: 1024 + time: + data_type: timestamp with time zone + default: null + is_nullable: false + tval: + data_type: text + default: null + is_nullable: false + db_dbuser: + date_joined: + data_type: timestamp with time zone + default: null + is_nullable: false + email: + data_type: character varying + default: null + is_nullable: false + max_length: 254 + first_name: + data_type: character varying + default: null + is_nullable: false + max_length: 254 + id: + data_type: integer + default: nextval('db_dbuser_id_seq'::regclass) + is_nullable: false + institution: + data_type: character varying + default: null + is_nullable: false + max_length: 254 + is_active: + data_type: boolean + default: null + is_nullable: false + is_staff: + data_type: boolean + default: null + is_nullable: false + is_superuser: + data_type: boolean + default: null + is_nullable: false + last_login: + data_type: timestamp with time zone + default: null + is_nullable: true + last_name: + data_type: character varying + default: null + is_nullable: false + max_length: 254 + password: + data_type: character varying + default: null + is_nullable: false + max_length: 128 + db_dbuser_groups: + dbuser_id: + data_type: integer + default: null + is_nullable: false + group_id: + data_type: integer + default: null + is_nullable: false + id: + data_type: integer + default: nextval('db_dbuser_groups_id_seq'::regclass) + is_nullable: false + db_dbuser_user_permissions: + dbuser_id: + data_type: integer + default: null + is_nullable: false + id: + data_type: integer + default: nextval('db_dbuser_user_permissions_id_seq'::regclass) + is_nullable: false + permission_id: + data_type: integer + default: null + is_nullable: false + db_dbworkflow: + ctime: + data_type: timestamp with time zone + default: null + is_nullable: false + description: + data_type: text + default: null + is_nullable: false + id: + data_type: integer + default: nextval('db_dbworkflow_id_seq'::regclass) + is_nullable: false + label: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + lastsyncedversion: + data_type: integer + default: null + is_nullable: false + module: + data_type: text + default: null + is_nullable: false + module_class: + data_type: text + default: null + is_nullable: false + mtime: + data_type: timestamp with time zone + default: null + is_nullable: false + nodeversion: + data_type: integer + default: null + is_nullable: false + report: + data_type: text + default: null + is_nullable: false + script_md5: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + script_path: + data_type: text + default: null + is_nullable: false + state: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + user_id: + data_type: integer + default: null + is_nullable: false + uuid: + data_type: uuid + default: null + is_nullable: false + db_dbworkflowdata: + aiida_obj_id: + data_type: integer + default: null + is_nullable: true + data_type: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + id: + data_type: integer + default: nextval('db_dbworkflowdata_id_seq'::regclass) + is_nullable: false + json_value: + data_type: text + default: null + is_nullable: false + name: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + parent_id: + data_type: integer + default: null + is_nullable: false + time: + data_type: timestamp with time zone + default: null + is_nullable: false + value_type: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + db_dbworkflowstep: + id: + data_type: integer + default: nextval('db_dbworkflowstep_id_seq'::regclass) + is_nullable: false + name: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + nextcall: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + parent_id: + data_type: integer + default: null + is_nullable: false + state: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + time: + data_type: timestamp with time zone + default: null + is_nullable: false + user_id: + data_type: integer + default: null + is_nullable: false + db_dbworkflowstep_calculations: + dbnode_id: + data_type: integer + default: null + is_nullable: false + dbworkflowstep_id: + data_type: integer + default: null + is_nullable: false + id: + data_type: integer + default: nextval('db_dbworkflowstep_calculations_id_seq'::regclass) + is_nullable: false + db_dbworkflowstep_sub_workflows: + dbworkflow_id: + data_type: integer + default: null + is_nullable: false + dbworkflowstep_id: + data_type: integer + default: null + is_nullable: false + id: + data_type: integer + default: nextval('db_dbworkflowstep_sub_workflows_id_seq'::regclass) + is_nullable: false +constraints: + primary_key: + db_dbattribute: + db_dbattribute_pkey: + - id + db_dbauthinfo: + db_dbauthinfo_pkey: + - id + db_dbcomment: + db_dbcomment_pkey: + - id + db_dbcomputer: + db_dbcomputer_pkey: + - id + db_dbextra: + db_dbextra_pkey: + - id + db_dbgroup: + db_dbgroup_pkey: + - id + db_dbgroup_dbnodes: + db_dbgroup_dbnodes_pkey: + - id + db_dblink: + db_dblink_pkey: + - id + db_dblog: + db_dblog_pkey: + - id + db_dbnode: + db_dbnode_pkey: + - id + db_dbsetting: + db_dbsetting_pkey: + - id + db_dbuser: + db_dbuser_pkey: + - id + db_dbuser_groups: + db_dbuser_groups_pkey: + - id + db_dbuser_user_permissions: + db_dbuser_user_permissions_pkey: + - id + db_dbworkflow: + db_dbworkflow_pkey: + - id + db_dbworkflowdata: + db_dbworkflowdata_pkey: + - id + db_dbworkflowstep: + db_dbworkflowstep_pkey: + - id + db_dbworkflowstep_calculations: + db_dbworkflowstep_calculations_pkey: + - id + db_dbworkflowstep_sub_workflows: + db_dbworkflowstep_sub_workflows_pkey: + - id + unique: + db_dbattribute: + db_dbattribute_dbnode_id_key_c589e447_uniq: + - dbnode_id + - key + db_dbauthinfo: + db_dbauthinfo_aiidauser_id_dbcomputer_id_777cdaa8_uniq: + - aiidauser_id + - dbcomputer_id + db_dbcomment: + db_dbcomment_uuid_49bac08c_uniq: + - uuid + db_dbcomputer: + db_dbcomputer_name_key: + - name + db_dbcomputer_uuid_f35defa6_uniq: + - uuid + db_dbextra: + db_dbextra_dbnode_id_key_aa56fd37_uniq: + - dbnode_id + - key + db_dbgroup: + db_dbgroup_name_type_12656f33_uniq: + - label + - type_string + db_dbgroup_uuid_af896177_uniq: + - uuid + db_dbgroup_dbnodes: + db_dbgroup_dbnodes_dbgroup_id_dbnode_id_eee23cce_uniq: + - dbgroup_id + - dbnode_id + db_dblog: + db_dblog_uuid_9cf77df3_uniq: + - uuid + db_dbnode: + db_dbnode_uuid_62e0bf98_uniq: + - uuid + db_dbsetting: + db_dbsetting_key_1b84beb4_uniq: + - key + db_dbuser: + db_dbuser_email_30150b7e_uniq: + - email + db_dbuser_groups: + db_dbuser_groups_dbuser_id_group_id_9155eb4f_uniq: + - dbuser_id + - group_id + db_dbuser_user_permissions: + db_dbuser_user_permissio_dbuser_id_permission_id_e6cbabe4_uniq: + - dbuser_id + - permission_id + db_dbworkflow: + db_dbworkflow_uuid_08947ee2_uniq: + - uuid + db_dbworkflowdata: + db_dbworkflowdata_parent_id_name_data_type_a4b50dae_uniq: + - data_type + - name + - parent_id + db_dbworkflowstep: + db_dbworkflowstep_parent_id_name_111027e3_uniq: + - name + - parent_id + db_dbworkflowstep_calculations: + db_dbworkflowstep_calcul_dbworkflowstep_id_dbnode_60f50d02_uniq: + - dbnode_id + - dbworkflowstep_id + db_dbworkflowstep_sub_workflows: + db_dbworkflowstep_sub_wo_dbworkflowstep_id_dbwork_e9b2b624_uniq: + - dbworkflow_id + - dbworkflowstep_id +foreign_keys: + db_dbattribute: + db_dbattribute_dbnode_id_253bf153_fk_db_dbnode_id: FOREIGN KEY (dbnode_id) REFERENCES + db_dbnode(id) DEFERRABLE INITIALLY DEFERRED + db_dbauthinfo: + db_dbauthinfo_aiidauser_id_0684fdfb_fk_db_dbuser_id: FOREIGN KEY (aiidauser_id) + REFERENCES db_dbuser(id) DEFERRABLE INITIALLY DEFERRED + db_dbauthinfo_dbcomputer_id_424f7ac4_fk_db_dbcomputer_id: FOREIGN KEY (dbcomputer_id) + REFERENCES db_dbcomputer(id) DEFERRABLE INITIALLY DEFERRED + db_dbcomment: + db_dbcomment_dbnode_id_3b812b6b_fk_db_dbnode_id: FOREIGN KEY (dbnode_id) REFERENCES + db_dbnode(id) DEFERRABLE INITIALLY DEFERRED + db_dbcomment_user_id_8ed5e360_fk_db_dbuser_id: FOREIGN KEY (user_id) REFERENCES + db_dbuser(id) DEFERRABLE INITIALLY DEFERRED + db_dbextra: + db_dbextra_dbnode_id_c7fe8961_fk_db_dbnode_id: FOREIGN KEY (dbnode_id) REFERENCES + db_dbnode(id) DEFERRABLE INITIALLY DEFERRED + db_dbgroup: + db_dbgroup_user_id_100f8a51_fk_db_dbuser_id: FOREIGN KEY (user_id) REFERENCES + db_dbuser(id) DEFERRABLE INITIALLY DEFERRED + db_dbgroup_dbnodes: + db_dbgroup_dbnodes_dbgroup_id_9d3a0f9d_fk_db_dbgroup_id: FOREIGN KEY (dbgroup_id) + REFERENCES db_dbgroup(id) DEFERRABLE INITIALLY DEFERRED + db_dbgroup_dbnodes_dbnode_id_118b9439_fk_db_dbnode_id: FOREIGN KEY (dbnode_id) + REFERENCES db_dbnode(id) DEFERRABLE INITIALLY DEFERRED + db_dblink: + db_dblink_input_id_9245bd73_fk_db_dbnode_id: FOREIGN KEY (input_id) REFERENCES + db_dbnode(id) DEFERRABLE INITIALLY DEFERRED + db_dblink_output_id_c0167528_fk_db_dbnode_id: FOREIGN KEY (output_id) REFERENCES + db_dbnode(id) DEFERRABLE INITIALLY DEFERRED + db_dblog: + db_dblog_dbnode_id_da34b732_fk_db_dbnode_id: FOREIGN KEY (dbnode_id) REFERENCES + db_dbnode(id) DEFERRABLE INITIALLY DEFERRED + db_dbnode: + db_dbnode_dbcomputer_id_315372a3_fk_db_dbcomputer_id: FOREIGN KEY (dbcomputer_id) + REFERENCES db_dbcomputer(id) DEFERRABLE INITIALLY DEFERRED + db_dbnode_user_id_12e7aeaf_fk_db_dbuser_id: FOREIGN KEY (user_id) REFERENCES db_dbuser(id) + DEFERRABLE INITIALLY DEFERRED + db_dbuser_groups: + db_dbuser_groups_dbuser_id_480b3520_fk_db_dbuser_id: FOREIGN KEY (dbuser_id) REFERENCES + db_dbuser(id) DEFERRABLE INITIALLY DEFERRED + db_dbuser_groups_group_id_8478d87e_fk_auth_group_id: FOREIGN KEY (group_id) REFERENCES + auth_group(id) DEFERRABLE INITIALLY DEFERRED + db_dbuser_user_permissions: + db_dbuser_user_permi_permission_id_c5aafc54_fk_auth_perm: FOREIGN KEY (permission_id) + REFERENCES auth_permission(id) DEFERRABLE INITIALLY DEFERRED + db_dbuser_user_permissions_dbuser_id_364456ee_fk_db_dbuser_id: FOREIGN KEY (dbuser_id) + REFERENCES db_dbuser(id) DEFERRABLE INITIALLY DEFERRED + db_dbworkflow: + db_dbworkflow_user_id_ef1f3251_fk_db_dbuser_id: FOREIGN KEY (user_id) REFERENCES + db_dbuser(id) DEFERRABLE INITIALLY DEFERRED + db_dbworkflowdata: + db_dbworkflowdata_aiida_obj_id_70a2d33b_fk_db_dbnode_id: FOREIGN KEY (aiida_obj_id) + REFERENCES db_dbnode(id) DEFERRABLE INITIALLY DEFERRED + db_dbworkflowdata_parent_id_ff4dbf8d_fk_db_dbworkflow_id: FOREIGN KEY (parent_id) + REFERENCES db_dbworkflow(id) DEFERRABLE INITIALLY DEFERRED + db_dbworkflowstep: + db_dbworkflowstep_parent_id_ffb754d9_fk_db_dbworkflow_id: FOREIGN KEY (parent_id) + REFERENCES db_dbworkflow(id) DEFERRABLE INITIALLY DEFERRED + db_dbworkflowstep_user_id_04282431_fk_db_dbuser_id: FOREIGN KEY (user_id) REFERENCES + db_dbuser(id) DEFERRABLE INITIALLY DEFERRED + db_dbworkflowstep_calculations: + db_dbworkflowstep_ca_dbnode_id_0d07b7a7_fk_db_dbnode: FOREIGN KEY (dbnode_id) + REFERENCES db_dbnode(id) DEFERRABLE INITIALLY DEFERRED + db_dbworkflowstep_ca_dbworkflowstep_id_575c3637_fk_db_dbwork: FOREIGN KEY (dbworkflowstep_id) + REFERENCES db_dbworkflowstep(id) DEFERRABLE INITIALLY DEFERRED + db_dbworkflowstep_sub_workflows: + db_dbworkflowstep_su_dbworkflow_id_dca4d103_fk_db_dbwork: FOREIGN KEY (dbworkflow_id) + REFERENCES db_dbworkflow(id) DEFERRABLE INITIALLY DEFERRED + db_dbworkflowstep_su_dbworkflowstep_id_e183bbb7_fk_db_dbwork: FOREIGN KEY (dbworkflowstep_id) + REFERENCES db_dbworkflowstep(id) DEFERRABLE INITIALLY DEFERRED +indexes: + db_dbattribute: + db_dbattribute_datatype_91c4dc04: CREATE INDEX db_dbattribute_datatype_91c4dc04 + ON public.db_dbattribute USING btree (datatype) + db_dbattribute_datatype_91c4dc04_like: CREATE INDEX db_dbattribute_datatype_91c4dc04_like + ON public.db_dbattribute USING btree (datatype varchar_pattern_ops) + db_dbattribute_dbnode_id_253bf153: CREATE INDEX db_dbattribute_dbnode_id_253bf153 + ON public.db_dbattribute USING btree (dbnode_id) + db_dbattribute_dbnode_id_key_c589e447_uniq: CREATE UNIQUE INDEX db_dbattribute_dbnode_id_key_c589e447_uniq + ON public.db_dbattribute USING btree (dbnode_id, key) + db_dbattribute_key_ac2bc4e4: CREATE INDEX db_dbattribute_key_ac2bc4e4 ON public.db_dbattribute + USING btree (key) + db_dbattribute_key_ac2bc4e4_like: CREATE INDEX db_dbattribute_key_ac2bc4e4_like + ON public.db_dbattribute USING btree (key varchar_pattern_ops) + db_dbattribute_pkey: CREATE UNIQUE INDEX db_dbattribute_pkey ON public.db_dbattribute + USING btree (id) + tval_idx_for_daemon: CREATE INDEX tval_idx_for_daemon ON public.db_dbattribute + USING btree (tval) WHERE (tval = ANY (ARRAY['COMPUTED'::text, 'WITHSCHEDULER'::text, + 'TOSUBMIT'::text])) + db_dbauthinfo: + db_dbauthinfo_aiidauser_id_0684fdfb: CREATE INDEX db_dbauthinfo_aiidauser_id_0684fdfb + ON public.db_dbauthinfo USING btree (aiidauser_id) + db_dbauthinfo_aiidauser_id_dbcomputer_id_777cdaa8_uniq: CREATE UNIQUE INDEX db_dbauthinfo_aiidauser_id_dbcomputer_id_777cdaa8_uniq + ON public.db_dbauthinfo USING btree (aiidauser_id, dbcomputer_id) + db_dbauthinfo_dbcomputer_id_424f7ac4: CREATE INDEX db_dbauthinfo_dbcomputer_id_424f7ac4 + ON public.db_dbauthinfo USING btree (dbcomputer_id) + db_dbauthinfo_pkey: CREATE UNIQUE INDEX db_dbauthinfo_pkey ON public.db_dbauthinfo + USING btree (id) + db_dbcomment: + db_dbcomment_dbnode_id_3b812b6b: CREATE INDEX db_dbcomment_dbnode_id_3b812b6b + ON public.db_dbcomment USING btree (dbnode_id) + db_dbcomment_pkey: CREATE UNIQUE INDEX db_dbcomment_pkey ON public.db_dbcomment + USING btree (id) + db_dbcomment_user_id_8ed5e360: CREATE INDEX db_dbcomment_user_id_8ed5e360 ON public.db_dbcomment + USING btree (user_id) + db_dbcomment_uuid_49bac08c_uniq: CREATE UNIQUE INDEX db_dbcomment_uuid_49bac08c_uniq + ON public.db_dbcomment USING btree (uuid) + db_dbcomputer: + db_dbcomputer_name_f1800b1a_like: CREATE INDEX db_dbcomputer_name_f1800b1a_like + ON public.db_dbcomputer USING btree (name varchar_pattern_ops) + db_dbcomputer_name_key: CREATE UNIQUE INDEX db_dbcomputer_name_key ON public.db_dbcomputer + USING btree (name) + db_dbcomputer_pkey: CREATE UNIQUE INDEX db_dbcomputer_pkey ON public.db_dbcomputer + USING btree (id) + db_dbcomputer_uuid_f35defa6_uniq: CREATE UNIQUE INDEX db_dbcomputer_uuid_f35defa6_uniq + ON public.db_dbcomputer USING btree (uuid) + db_dbextra: + db_dbextra_datatype_2eba38c6: CREATE INDEX db_dbextra_datatype_2eba38c6 ON public.db_dbextra + USING btree (datatype) + db_dbextra_datatype_2eba38c6_like: CREATE INDEX db_dbextra_datatype_2eba38c6_like + ON public.db_dbextra USING btree (datatype varchar_pattern_ops) + db_dbextra_dbnode_id_c7fe8961: CREATE INDEX db_dbextra_dbnode_id_c7fe8961 ON public.db_dbextra + USING btree (dbnode_id) + db_dbextra_dbnode_id_key_aa56fd37_uniq: CREATE UNIQUE INDEX db_dbextra_dbnode_id_key_aa56fd37_uniq + ON public.db_dbextra USING btree (dbnode_id, key) + db_dbextra_key_b1a8abc6: CREATE INDEX db_dbextra_key_b1a8abc6 ON public.db_dbextra + USING btree (key) + db_dbextra_key_b1a8abc6_like: CREATE INDEX db_dbextra_key_b1a8abc6_like ON public.db_dbextra + USING btree (key varchar_pattern_ops) + db_dbextra_pkey: CREATE UNIQUE INDEX db_dbextra_pkey ON public.db_dbextra USING + btree (id) + db_dbgroup: + db_dbgroup_name_66c75272: CREATE INDEX db_dbgroup_name_66c75272 ON public.db_dbgroup + USING btree (label) + db_dbgroup_name_66c75272_like: CREATE INDEX db_dbgroup_name_66c75272_like ON public.db_dbgroup + USING btree (label varchar_pattern_ops) + db_dbgroup_name_type_12656f33_uniq: CREATE UNIQUE INDEX db_dbgroup_name_type_12656f33_uniq + ON public.db_dbgroup USING btree (label, type_string) + db_dbgroup_pkey: CREATE UNIQUE INDEX db_dbgroup_pkey ON public.db_dbgroup USING + btree (id) + db_dbgroup_type_23b2a748: CREATE INDEX db_dbgroup_type_23b2a748 ON public.db_dbgroup + USING btree (type_string) + db_dbgroup_type_23b2a748_like: CREATE INDEX db_dbgroup_type_23b2a748_like ON public.db_dbgroup + USING btree (type_string varchar_pattern_ops) + db_dbgroup_user_id_100f8a51: CREATE INDEX db_dbgroup_user_id_100f8a51 ON public.db_dbgroup + USING btree (user_id) + db_dbgroup_uuid_af896177_uniq: CREATE UNIQUE INDEX db_dbgroup_uuid_af896177_uniq + ON public.db_dbgroup USING btree (uuid) + db_dbgroup_dbnodes: + db_dbgroup_dbnodes_dbgroup_id_9d3a0f9d: CREATE INDEX db_dbgroup_dbnodes_dbgroup_id_9d3a0f9d + ON public.db_dbgroup_dbnodes USING btree (dbgroup_id) + db_dbgroup_dbnodes_dbgroup_id_dbnode_id_eee23cce_uniq: CREATE UNIQUE INDEX db_dbgroup_dbnodes_dbgroup_id_dbnode_id_eee23cce_uniq + ON public.db_dbgroup_dbnodes USING btree (dbgroup_id, dbnode_id) + db_dbgroup_dbnodes_dbnode_id_118b9439: CREATE INDEX db_dbgroup_dbnodes_dbnode_id_118b9439 + ON public.db_dbgroup_dbnodes USING btree (dbnode_id) + db_dbgroup_dbnodes_pkey: CREATE UNIQUE INDEX db_dbgroup_dbnodes_pkey ON public.db_dbgroup_dbnodes + USING btree (id) + db_dblink: + db_dblink_input_id_9245bd73: CREATE INDEX db_dblink_input_id_9245bd73 ON public.db_dblink + USING btree (input_id) + db_dblink_label_f1343cfb: CREATE INDEX db_dblink_label_f1343cfb ON public.db_dblink + USING btree (label) + db_dblink_label_f1343cfb_like: CREATE INDEX db_dblink_label_f1343cfb_like ON public.db_dblink + USING btree (label varchar_pattern_ops) + db_dblink_output_id_c0167528: CREATE INDEX db_dblink_output_id_c0167528 ON public.db_dblink + USING btree (output_id) + db_dblink_pkey: CREATE UNIQUE INDEX db_dblink_pkey ON public.db_dblink USING btree + (id) + db_dblink_type_229f212b: CREATE INDEX db_dblink_type_229f212b ON public.db_dblink + USING btree (type) + db_dblink_type_229f212b_like: CREATE INDEX db_dblink_type_229f212b_like ON public.db_dblink + USING btree (type varchar_pattern_ops) + db_dblog: + db_dblog_dbnode_id_da34b732: CREATE INDEX db_dblog_dbnode_id_da34b732 ON public.db_dblog + USING btree (dbnode_id) + db_dblog_levelname_ad5dc346: CREATE INDEX db_dblog_levelname_ad5dc346 ON public.db_dblog + USING btree (levelname) + db_dblog_levelname_ad5dc346_like: CREATE INDEX db_dblog_levelname_ad5dc346_like + ON public.db_dblog USING btree (levelname varchar_pattern_ops) + db_dblog_loggername_00b5ba16: CREATE INDEX db_dblog_loggername_00b5ba16 ON public.db_dblog + USING btree (loggername) + db_dblog_loggername_00b5ba16_like: CREATE INDEX db_dblog_loggername_00b5ba16_like + ON public.db_dblog USING btree (loggername varchar_pattern_ops) + db_dblog_pkey: CREATE UNIQUE INDEX db_dblog_pkey ON public.db_dblog USING btree + (id) + db_dblog_uuid_9cf77df3_uniq: CREATE UNIQUE INDEX db_dblog_uuid_9cf77df3_uniq ON + public.db_dblog USING btree (uuid) + db_dbnode: + db_dbnode_ctime_71626ef5: CREATE INDEX db_dbnode_ctime_71626ef5 ON public.db_dbnode + USING btree (ctime) + db_dbnode_dbcomputer_id_315372a3: CREATE INDEX db_dbnode_dbcomputer_id_315372a3 + ON public.db_dbnode USING btree (dbcomputer_id) + db_dbnode_label_6469539e: CREATE INDEX db_dbnode_label_6469539e ON public.db_dbnode + USING btree (label) + db_dbnode_label_6469539e_like: CREATE INDEX db_dbnode_label_6469539e_like ON public.db_dbnode + USING btree (label varchar_pattern_ops) + db_dbnode_mtime_0554ea3d: CREATE INDEX db_dbnode_mtime_0554ea3d ON public.db_dbnode + USING btree (mtime) + db_dbnode_pkey: CREATE UNIQUE INDEX db_dbnode_pkey ON public.db_dbnode USING btree + (id) + db_dbnode_process_type_df7298d0: CREATE INDEX db_dbnode_process_type_df7298d0 + ON public.db_dbnode USING btree (process_type) + db_dbnode_process_type_df7298d0_like: CREATE INDEX db_dbnode_process_type_df7298d0_like + ON public.db_dbnode USING btree (process_type varchar_pattern_ops) + db_dbnode_type_a8ce9753: CREATE INDEX db_dbnode_type_a8ce9753 ON public.db_dbnode + USING btree (type) + db_dbnode_type_a8ce9753_like: CREATE INDEX db_dbnode_type_a8ce9753_like ON public.db_dbnode + USING btree (type varchar_pattern_ops) + db_dbnode_user_id_12e7aeaf: CREATE INDEX db_dbnode_user_id_12e7aeaf ON public.db_dbnode + USING btree (user_id) + db_dbnode_uuid_62e0bf98_uniq: CREATE UNIQUE INDEX db_dbnode_uuid_62e0bf98_uniq + ON public.db_dbnode USING btree (uuid) + db_dbsetting: + db_dbsetting_datatype_49f4397c: CREATE INDEX db_dbsetting_datatype_49f4397c ON + public.db_dbsetting USING btree (datatype) + db_dbsetting_datatype_49f4397c_like: CREATE INDEX db_dbsetting_datatype_49f4397c_like + ON public.db_dbsetting USING btree (datatype varchar_pattern_ops) + db_dbsetting_key_1b84beb4: CREATE INDEX db_dbsetting_key_1b84beb4 ON public.db_dbsetting + USING btree (key) + db_dbsetting_key_1b84beb4_like: CREATE INDEX db_dbsetting_key_1b84beb4_like ON + public.db_dbsetting USING btree (key varchar_pattern_ops) + db_dbsetting_key_1b84beb4_uniq: CREATE UNIQUE INDEX db_dbsetting_key_1b84beb4_uniq + ON public.db_dbsetting USING btree (key) + db_dbsetting_pkey: CREATE UNIQUE INDEX db_dbsetting_pkey ON public.db_dbsetting + USING btree (id) + db_dbuser: + db_dbuser_email_30150b7e_like: CREATE INDEX db_dbuser_email_30150b7e_like ON public.db_dbuser + USING btree (email varchar_pattern_ops) + db_dbuser_email_30150b7e_uniq: CREATE UNIQUE INDEX db_dbuser_email_30150b7e_uniq + ON public.db_dbuser USING btree (email) + db_dbuser_pkey: CREATE UNIQUE INDEX db_dbuser_pkey ON public.db_dbuser USING btree + (id) + db_dbuser_groups: + db_dbuser_groups_dbuser_id_480b3520: CREATE INDEX db_dbuser_groups_dbuser_id_480b3520 + ON public.db_dbuser_groups USING btree (dbuser_id) + db_dbuser_groups_dbuser_id_group_id_9155eb4f_uniq: CREATE UNIQUE INDEX db_dbuser_groups_dbuser_id_group_id_9155eb4f_uniq + ON public.db_dbuser_groups USING btree (dbuser_id, group_id) + db_dbuser_groups_group_id_8478d87e: CREATE INDEX db_dbuser_groups_group_id_8478d87e + ON public.db_dbuser_groups USING btree (group_id) + db_dbuser_groups_pkey: CREATE UNIQUE INDEX db_dbuser_groups_pkey ON public.db_dbuser_groups + USING btree (id) + db_dbuser_user_permissions: + db_dbuser_user_permissio_dbuser_id_permission_id_e6cbabe4_uniq: CREATE UNIQUE + INDEX db_dbuser_user_permissio_dbuser_id_permission_id_e6cbabe4_uniq ON public.db_dbuser_user_permissions + USING btree (dbuser_id, permission_id) + db_dbuser_user_permissions_dbuser_id_364456ee: CREATE INDEX db_dbuser_user_permissions_dbuser_id_364456ee + ON public.db_dbuser_user_permissions USING btree (dbuser_id) + db_dbuser_user_permissions_permission_id_c5aafc54: CREATE INDEX db_dbuser_user_permissions_permission_id_c5aafc54 + ON public.db_dbuser_user_permissions USING btree (permission_id) + db_dbuser_user_permissions_pkey: CREATE UNIQUE INDEX db_dbuser_user_permissions_pkey + ON public.db_dbuser_user_permissions USING btree (id) + db_dbworkflow: + db_dbworkflow_label_7368f34a: CREATE INDEX db_dbworkflow_label_7368f34a ON public.db_dbworkflow + USING btree (label) + db_dbworkflow_label_7368f34a_like: CREATE INDEX db_dbworkflow_label_7368f34a_like + ON public.db_dbworkflow USING btree (label varchar_pattern_ops) + db_dbworkflow_pkey: CREATE UNIQUE INDEX db_dbworkflow_pkey ON public.db_dbworkflow + USING btree (id) + db_dbworkflow_user_id_ef1f3251: CREATE INDEX db_dbworkflow_user_id_ef1f3251 ON + public.db_dbworkflow USING btree (user_id) + db_dbworkflow_uuid_08947ee2_uniq: CREATE UNIQUE INDEX db_dbworkflow_uuid_08947ee2_uniq + ON public.db_dbworkflow USING btree (uuid) + db_dbworkflowdata: + db_dbworkflowdata_aiida_obj_id_70a2d33b: CREATE INDEX db_dbworkflowdata_aiida_obj_id_70a2d33b + ON public.db_dbworkflowdata USING btree (aiida_obj_id) + db_dbworkflowdata_parent_id_ff4dbf8d: CREATE INDEX db_dbworkflowdata_parent_id_ff4dbf8d + ON public.db_dbworkflowdata USING btree (parent_id) + db_dbworkflowdata_parent_id_name_data_type_a4b50dae_uniq: CREATE UNIQUE INDEX + db_dbworkflowdata_parent_id_name_data_type_a4b50dae_uniq ON public.db_dbworkflowdata + USING btree (parent_id, name, data_type) + db_dbworkflowdata_pkey: CREATE UNIQUE INDEX db_dbworkflowdata_pkey ON public.db_dbworkflowdata + USING btree (id) + db_dbworkflowstep: + db_dbworkflowstep_parent_id_ffb754d9: CREATE INDEX db_dbworkflowstep_parent_id_ffb754d9 + ON public.db_dbworkflowstep USING btree (parent_id) + db_dbworkflowstep_parent_id_name_111027e3_uniq: CREATE UNIQUE INDEX db_dbworkflowstep_parent_id_name_111027e3_uniq + ON public.db_dbworkflowstep USING btree (parent_id, name) + db_dbworkflowstep_pkey: CREATE UNIQUE INDEX db_dbworkflowstep_pkey ON public.db_dbworkflowstep + USING btree (id) + db_dbworkflowstep_user_id_04282431: CREATE INDEX db_dbworkflowstep_user_id_04282431 + ON public.db_dbworkflowstep USING btree (user_id) + db_dbworkflowstep_calculations: + db_dbworkflowstep_calcul_dbworkflowstep_id_dbnode_60f50d02_uniq: CREATE UNIQUE + INDEX db_dbworkflowstep_calcul_dbworkflowstep_id_dbnode_60f50d02_uniq ON public.db_dbworkflowstep_calculations + USING btree (dbworkflowstep_id, dbnode_id) + db_dbworkflowstep_calculations_dbnode_id_0d07b7a7: CREATE INDEX db_dbworkflowstep_calculations_dbnode_id_0d07b7a7 + ON public.db_dbworkflowstep_calculations USING btree (dbnode_id) + db_dbworkflowstep_calculations_dbworkflowstep_id_575c3637: CREATE INDEX db_dbworkflowstep_calculations_dbworkflowstep_id_575c3637 + ON public.db_dbworkflowstep_calculations USING btree (dbworkflowstep_id) + db_dbworkflowstep_calculations_pkey: CREATE UNIQUE INDEX db_dbworkflowstep_calculations_pkey + ON public.db_dbworkflowstep_calculations USING btree (id) + db_dbworkflowstep_sub_workflows: + db_dbworkflowstep_sub_wo_dbworkflowstep_id_dbwork_e9b2b624_uniq: CREATE UNIQUE + INDEX db_dbworkflowstep_sub_wo_dbworkflowstep_id_dbwork_e9b2b624_uniq ON public.db_dbworkflowstep_sub_workflows + USING btree (dbworkflowstep_id, dbworkflow_id) + db_dbworkflowstep_sub_workflows_dbworkflow_id_dca4d103: CREATE INDEX db_dbworkflowstep_sub_workflows_dbworkflow_id_dca4d103 + ON public.db_dbworkflowstep_sub_workflows USING btree (dbworkflow_id) + db_dbworkflowstep_sub_workflows_dbworkflowstep_id_e183bbb7: CREATE INDEX db_dbworkflowstep_sub_workflows_dbworkflowstep_id_e183bbb7 + ON public.db_dbworkflowstep_sub_workflows USING btree (dbworkflowstep_id) + db_dbworkflowstep_sub_workflows_pkey: CREATE UNIQUE INDEX db_dbworkflowstep_sub_workflows_pkey + ON public.db_dbworkflowstep_sub_workflows USING btree (id) diff --git a/tests/backends/aiida_sqlalchemy/migrations/test_all_schema/test_django_django_0026_.yml b/tests/backends/aiida_sqlalchemy/migrations/test_all_schema/test_django_django_0026_.yml new file mode 100644 index 0000000000..3e6143e5bc --- /dev/null +++ b/tests/backends/aiida_sqlalchemy/migrations/test_all_schema/test_django_django_0026_.yml @@ -0,0 +1,1041 @@ +columns: + db_dbattribute: + bval: + data_type: boolean + default: null + is_nullable: true + datatype: + data_type: character varying + default: null + is_nullable: false + max_length: 10 + dbnode_id: + data_type: integer + default: null + is_nullable: false + dval: + data_type: timestamp with time zone + default: null + is_nullable: true + fval: + data_type: double precision + default: null + is_nullable: true + id: + data_type: integer + default: nextval('db_dbattribute_id_seq'::regclass) + is_nullable: false + ival: + data_type: integer + default: null + is_nullable: true + key: + data_type: character varying + default: null + is_nullable: false + max_length: 1024 + tval: + data_type: text + default: null + is_nullable: false + db_dbauthinfo: + aiidauser_id: + data_type: integer + default: null + is_nullable: false + auth_params: + data_type: text + default: null + is_nullable: false + dbcomputer_id: + data_type: integer + default: null + is_nullable: false + enabled: + data_type: boolean + default: null + is_nullable: false + id: + data_type: integer + default: nextval('db_dbauthinfo_id_seq'::regclass) + is_nullable: false + metadata: + data_type: text + default: null + is_nullable: false + db_dbcomment: + content: + data_type: text + default: null + is_nullable: false + ctime: + data_type: timestamp with time zone + default: null + is_nullable: false + dbnode_id: + data_type: integer + default: null + is_nullable: false + id: + data_type: integer + default: nextval('db_dbcomment_id_seq'::regclass) + is_nullable: false + mtime: + data_type: timestamp with time zone + default: null + is_nullable: false + user_id: + data_type: integer + default: null + is_nullable: false + uuid: + data_type: uuid + default: null + is_nullable: false + db_dbcomputer: + description: + data_type: text + default: null + is_nullable: false + enabled: + data_type: boolean + default: null + is_nullable: false + hostname: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + id: + data_type: integer + default: nextval('db_dbcomputer_id_seq'::regclass) + is_nullable: false + metadata: + data_type: text + default: null + is_nullable: false + name: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + scheduler_type: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + transport_params: + data_type: text + default: null + is_nullable: false + transport_type: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + uuid: + data_type: uuid + default: null + is_nullable: false + db_dbextra: + bval: + data_type: boolean + default: null + is_nullable: true + datatype: + data_type: character varying + default: null + is_nullable: false + max_length: 10 + dbnode_id: + data_type: integer + default: null + is_nullable: false + dval: + data_type: timestamp with time zone + default: null + is_nullable: true + fval: + data_type: double precision + default: null + is_nullable: true + id: + data_type: integer + default: nextval('db_dbextra_id_seq'::regclass) + is_nullable: false + ival: + data_type: integer + default: null + is_nullable: true + key: + data_type: character varying + default: null + is_nullable: false + max_length: 1024 + tval: + data_type: text + default: null + is_nullable: false + db_dbgroup: + description: + data_type: text + default: null + is_nullable: false + id: + data_type: integer + default: nextval('db_dbgroup_id_seq'::regclass) + is_nullable: false + label: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + time: + data_type: timestamp with time zone + default: null + is_nullable: false + type_string: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + user_id: + data_type: integer + default: null + is_nullable: false + uuid: + data_type: uuid + default: null + is_nullable: false + db_dbgroup_dbnodes: + dbgroup_id: + data_type: integer + default: null + is_nullable: false + dbnode_id: + data_type: integer + default: null + is_nullable: false + id: + data_type: integer + default: nextval('db_dbgroup_dbnodes_id_seq'::regclass) + is_nullable: false + db_dblink: + id: + data_type: integer + default: nextval('db_dblink_id_seq'::regclass) + is_nullable: false + input_id: + data_type: integer + default: null + is_nullable: false + label: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + output_id: + data_type: integer + default: null + is_nullable: false + type: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + db_dblog: + dbnode_id: + data_type: integer + default: null + is_nullable: false + id: + data_type: integer + default: nextval('db_dblog_id_seq'::regclass) + is_nullable: false + levelname: + data_type: character varying + default: null + is_nullable: false + max_length: 50 + loggername: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + message: + data_type: text + default: null + is_nullable: false + metadata: + data_type: text + default: null + is_nullable: false + time: + data_type: timestamp with time zone + default: null + is_nullable: false + uuid: + data_type: uuid + default: null + is_nullable: false + db_dbnode: + ctime: + data_type: timestamp with time zone + default: null + is_nullable: false + dbcomputer_id: + data_type: integer + default: null + is_nullable: true + description: + data_type: text + default: null + is_nullable: false + id: + data_type: integer + default: nextval('db_dbnode_id_seq'::regclass) + is_nullable: false + label: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + mtime: + data_type: timestamp with time zone + default: null + is_nullable: false + nodeversion: + data_type: integer + default: null + is_nullable: false + process_type: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + public: + data_type: boolean + default: null + is_nullable: false + type: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + user_id: + data_type: integer + default: null + is_nullable: false + uuid: + data_type: uuid + default: null + is_nullable: false + db_dbsetting: + bval: + data_type: boolean + default: null + is_nullable: true + datatype: + data_type: character varying + default: null + is_nullable: false + max_length: 10 + description: + data_type: text + default: null + is_nullable: false + dval: + data_type: timestamp with time zone + default: null + is_nullable: true + fval: + data_type: double precision + default: null + is_nullable: true + id: + data_type: integer + default: nextval('db_dbsetting_id_seq'::regclass) + is_nullable: false + ival: + data_type: integer + default: null + is_nullable: true + key: + data_type: character varying + default: null + is_nullable: false + max_length: 1024 + time: + data_type: timestamp with time zone + default: null + is_nullable: false + tval: + data_type: text + default: null + is_nullable: false + db_dbuser: + date_joined: + data_type: timestamp with time zone + default: null + is_nullable: false + email: + data_type: character varying + default: null + is_nullable: false + max_length: 254 + first_name: + data_type: character varying + default: null + is_nullable: false + max_length: 254 + id: + data_type: integer + default: nextval('db_dbuser_id_seq'::regclass) + is_nullable: false + institution: + data_type: character varying + default: null + is_nullable: false + max_length: 254 + is_active: + data_type: boolean + default: null + is_nullable: false + is_staff: + data_type: boolean + default: null + is_nullable: false + is_superuser: + data_type: boolean + default: null + is_nullable: false + last_login: + data_type: timestamp with time zone + default: null + is_nullable: true + last_name: + data_type: character varying + default: null + is_nullable: false + max_length: 254 + password: + data_type: character varying + default: null + is_nullable: false + max_length: 128 + db_dbuser_groups: + dbuser_id: + data_type: integer + default: null + is_nullable: false + group_id: + data_type: integer + default: null + is_nullable: false + id: + data_type: integer + default: nextval('db_dbuser_groups_id_seq'::regclass) + is_nullable: false + db_dbuser_user_permissions: + dbuser_id: + data_type: integer + default: null + is_nullable: false + id: + data_type: integer + default: nextval('db_dbuser_user_permissions_id_seq'::regclass) + is_nullable: false + permission_id: + data_type: integer + default: null + is_nullable: false + db_dbworkflow: + ctime: + data_type: timestamp with time zone + default: null + is_nullable: false + description: + data_type: text + default: null + is_nullable: false + id: + data_type: integer + default: nextval('db_dbworkflow_id_seq'::regclass) + is_nullable: false + label: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + lastsyncedversion: + data_type: integer + default: null + is_nullable: false + module: + data_type: text + default: null + is_nullable: false + module_class: + data_type: text + default: null + is_nullable: false + mtime: + data_type: timestamp with time zone + default: null + is_nullable: false + nodeversion: + data_type: integer + default: null + is_nullable: false + report: + data_type: text + default: null + is_nullable: false + script_md5: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + script_path: + data_type: text + default: null + is_nullable: false + state: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + user_id: + data_type: integer + default: null + is_nullable: false + uuid: + data_type: uuid + default: null + is_nullable: false + db_dbworkflowdata: + aiida_obj_id: + data_type: integer + default: null + is_nullable: true + data_type: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + id: + data_type: integer + default: nextval('db_dbworkflowdata_id_seq'::regclass) + is_nullable: false + json_value: + data_type: text + default: null + is_nullable: false + name: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + parent_id: + data_type: integer + default: null + is_nullable: false + time: + data_type: timestamp with time zone + default: null + is_nullable: false + value_type: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + db_dbworkflowstep: + id: + data_type: integer + default: nextval('db_dbworkflowstep_id_seq'::regclass) + is_nullable: false + name: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + nextcall: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + parent_id: + data_type: integer + default: null + is_nullable: false + state: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + time: + data_type: timestamp with time zone + default: null + is_nullable: false + user_id: + data_type: integer + default: null + is_nullable: false + db_dbworkflowstep_calculations: + dbnode_id: + data_type: integer + default: null + is_nullable: false + dbworkflowstep_id: + data_type: integer + default: null + is_nullable: false + id: + data_type: integer + default: nextval('db_dbworkflowstep_calculations_id_seq'::regclass) + is_nullable: false + db_dbworkflowstep_sub_workflows: + dbworkflow_id: + data_type: integer + default: null + is_nullable: false + dbworkflowstep_id: + data_type: integer + default: null + is_nullable: false + id: + data_type: integer + default: nextval('db_dbworkflowstep_sub_workflows_id_seq'::regclass) + is_nullable: false +constraints: + primary_key: + db_dbattribute: + db_dbattribute_pkey: + - id + db_dbauthinfo: + db_dbauthinfo_pkey: + - id + db_dbcomment: + db_dbcomment_pkey: + - id + db_dbcomputer: + db_dbcomputer_pkey: + - id + db_dbextra: + db_dbextra_pkey: + - id + db_dbgroup: + db_dbgroup_pkey: + - id + db_dbgroup_dbnodes: + db_dbgroup_dbnodes_pkey: + - id + db_dblink: + db_dblink_pkey: + - id + db_dblog: + db_dblog_pkey: + - id + db_dbnode: + db_dbnode_pkey: + - id + db_dbsetting: + db_dbsetting_pkey: + - id + db_dbuser: + db_dbuser_pkey: + - id + db_dbuser_groups: + db_dbuser_groups_pkey: + - id + db_dbuser_user_permissions: + db_dbuser_user_permissions_pkey: + - id + db_dbworkflow: + db_dbworkflow_pkey: + - id + db_dbworkflowdata: + db_dbworkflowdata_pkey: + - id + db_dbworkflowstep: + db_dbworkflowstep_pkey: + - id + db_dbworkflowstep_calculations: + db_dbworkflowstep_calculations_pkey: + - id + db_dbworkflowstep_sub_workflows: + db_dbworkflowstep_sub_workflows_pkey: + - id + unique: + db_dbattribute: + db_dbattribute_dbnode_id_key_c589e447_uniq: + - dbnode_id + - key + db_dbauthinfo: + db_dbauthinfo_aiidauser_id_dbcomputer_id_777cdaa8_uniq: + - aiidauser_id + - dbcomputer_id + db_dbcomment: + db_dbcomment_uuid_49bac08c_uniq: + - uuid + db_dbcomputer: + db_dbcomputer_name_key: + - name + db_dbcomputer_uuid_f35defa6_uniq: + - uuid + db_dbextra: + db_dbextra_dbnode_id_key_aa56fd37_uniq: + - dbnode_id + - key + db_dbgroup: + db_dbgroup_name_type_12656f33_uniq: + - label + - type_string + db_dbgroup_uuid_af896177_uniq: + - uuid + db_dbgroup_dbnodes: + db_dbgroup_dbnodes_dbgroup_id_dbnode_id_eee23cce_uniq: + - dbgroup_id + - dbnode_id + db_dblog: + db_dblog_uuid_9cf77df3_uniq: + - uuid + db_dbnode: + db_dbnode_uuid_62e0bf98_uniq: + - uuid + db_dbsetting: + db_dbsetting_key_1b84beb4_uniq: + - key + db_dbuser: + db_dbuser_email_30150b7e_uniq: + - email + db_dbuser_groups: + db_dbuser_groups_dbuser_id_group_id_9155eb4f_uniq: + - dbuser_id + - group_id + db_dbuser_user_permissions: + db_dbuser_user_permissio_dbuser_id_permission_id_e6cbabe4_uniq: + - dbuser_id + - permission_id + db_dbworkflow: + db_dbworkflow_uuid_08947ee2_uniq: + - uuid + db_dbworkflowdata: + db_dbworkflowdata_parent_id_name_data_type_a4b50dae_uniq: + - data_type + - name + - parent_id + db_dbworkflowstep: + db_dbworkflowstep_parent_id_name_111027e3_uniq: + - name + - parent_id + db_dbworkflowstep_calculations: + db_dbworkflowstep_calcul_dbworkflowstep_id_dbnode_60f50d02_uniq: + - dbnode_id + - dbworkflowstep_id + db_dbworkflowstep_sub_workflows: + db_dbworkflowstep_sub_wo_dbworkflowstep_id_dbwork_e9b2b624_uniq: + - dbworkflow_id + - dbworkflowstep_id +foreign_keys: + db_dbattribute: + db_dbattribute_dbnode_id_253bf153_fk_db_dbnode_id: FOREIGN KEY (dbnode_id) REFERENCES + db_dbnode(id) DEFERRABLE INITIALLY DEFERRED + db_dbauthinfo: + db_dbauthinfo_aiidauser_id_0684fdfb_fk_db_dbuser_id: FOREIGN KEY (aiidauser_id) + REFERENCES db_dbuser(id) DEFERRABLE INITIALLY DEFERRED + db_dbauthinfo_dbcomputer_id_424f7ac4_fk_db_dbcomputer_id: FOREIGN KEY (dbcomputer_id) + REFERENCES db_dbcomputer(id) DEFERRABLE INITIALLY DEFERRED + db_dbcomment: + db_dbcomment_dbnode_id_3b812b6b_fk_db_dbnode_id: FOREIGN KEY (dbnode_id) REFERENCES + db_dbnode(id) DEFERRABLE INITIALLY DEFERRED + db_dbcomment_user_id_8ed5e360_fk_db_dbuser_id: FOREIGN KEY (user_id) REFERENCES + db_dbuser(id) DEFERRABLE INITIALLY DEFERRED + db_dbextra: + db_dbextra_dbnode_id_c7fe8961_fk_db_dbnode_id: FOREIGN KEY (dbnode_id) REFERENCES + db_dbnode(id) DEFERRABLE INITIALLY DEFERRED + db_dbgroup: + db_dbgroup_user_id_100f8a51_fk_db_dbuser_id: FOREIGN KEY (user_id) REFERENCES + db_dbuser(id) DEFERRABLE INITIALLY DEFERRED + db_dbgroup_dbnodes: + db_dbgroup_dbnodes_dbgroup_id_9d3a0f9d_fk_db_dbgroup_id: FOREIGN KEY (dbgroup_id) + REFERENCES db_dbgroup(id) DEFERRABLE INITIALLY DEFERRED + db_dbgroup_dbnodes_dbnode_id_118b9439_fk_db_dbnode_id: FOREIGN KEY (dbnode_id) + REFERENCES db_dbnode(id) DEFERRABLE INITIALLY DEFERRED + db_dblink: + db_dblink_input_id_9245bd73_fk_db_dbnode_id: FOREIGN KEY (input_id) REFERENCES + db_dbnode(id) DEFERRABLE INITIALLY DEFERRED + db_dblink_output_id_c0167528_fk_db_dbnode_id: FOREIGN KEY (output_id) REFERENCES + db_dbnode(id) DEFERRABLE INITIALLY DEFERRED + db_dblog: + db_dblog_dbnode_id_da34b732_fk_db_dbnode_id: FOREIGN KEY (dbnode_id) REFERENCES + db_dbnode(id) DEFERRABLE INITIALLY DEFERRED + db_dbnode: + db_dbnode_dbcomputer_id_315372a3_fk_db_dbcomputer_id: FOREIGN KEY (dbcomputer_id) + REFERENCES db_dbcomputer(id) DEFERRABLE INITIALLY DEFERRED + db_dbnode_user_id_12e7aeaf_fk_db_dbuser_id: FOREIGN KEY (user_id) REFERENCES db_dbuser(id) + DEFERRABLE INITIALLY DEFERRED + db_dbuser_groups: + db_dbuser_groups_dbuser_id_480b3520_fk_db_dbuser_id: FOREIGN KEY (dbuser_id) REFERENCES + db_dbuser(id) DEFERRABLE INITIALLY DEFERRED + db_dbuser_groups_group_id_8478d87e_fk_auth_group_id: FOREIGN KEY (group_id) REFERENCES + auth_group(id) DEFERRABLE INITIALLY DEFERRED + db_dbuser_user_permissions: + db_dbuser_user_permi_permission_id_c5aafc54_fk_auth_perm: FOREIGN KEY (permission_id) + REFERENCES auth_permission(id) DEFERRABLE INITIALLY DEFERRED + db_dbuser_user_permissions_dbuser_id_364456ee_fk_db_dbuser_id: FOREIGN KEY (dbuser_id) + REFERENCES db_dbuser(id) DEFERRABLE INITIALLY DEFERRED + db_dbworkflow: + db_dbworkflow_user_id_ef1f3251_fk_db_dbuser_id: FOREIGN KEY (user_id) REFERENCES + db_dbuser(id) DEFERRABLE INITIALLY DEFERRED + db_dbworkflowdata: + db_dbworkflowdata_aiida_obj_id_70a2d33b_fk_db_dbnode_id: FOREIGN KEY (aiida_obj_id) + REFERENCES db_dbnode(id) DEFERRABLE INITIALLY DEFERRED + db_dbworkflowdata_parent_id_ff4dbf8d_fk_db_dbworkflow_id: FOREIGN KEY (parent_id) + REFERENCES db_dbworkflow(id) DEFERRABLE INITIALLY DEFERRED + db_dbworkflowstep: + db_dbworkflowstep_parent_id_ffb754d9_fk_db_dbworkflow_id: FOREIGN KEY (parent_id) + REFERENCES db_dbworkflow(id) DEFERRABLE INITIALLY DEFERRED + db_dbworkflowstep_user_id_04282431_fk_db_dbuser_id: FOREIGN KEY (user_id) REFERENCES + db_dbuser(id) DEFERRABLE INITIALLY DEFERRED + db_dbworkflowstep_calculations: + db_dbworkflowstep_ca_dbnode_id_0d07b7a7_fk_db_dbnode: FOREIGN KEY (dbnode_id) + REFERENCES db_dbnode(id) DEFERRABLE INITIALLY DEFERRED + db_dbworkflowstep_ca_dbworkflowstep_id_575c3637_fk_db_dbwork: FOREIGN KEY (dbworkflowstep_id) + REFERENCES db_dbworkflowstep(id) DEFERRABLE INITIALLY DEFERRED + db_dbworkflowstep_sub_workflows: + db_dbworkflowstep_su_dbworkflow_id_dca4d103_fk_db_dbwork: FOREIGN KEY (dbworkflow_id) + REFERENCES db_dbworkflow(id) DEFERRABLE INITIALLY DEFERRED + db_dbworkflowstep_su_dbworkflowstep_id_e183bbb7_fk_db_dbwork: FOREIGN KEY (dbworkflowstep_id) + REFERENCES db_dbworkflowstep(id) DEFERRABLE INITIALLY DEFERRED +indexes: + db_dbattribute: + db_dbattribute_datatype_91c4dc04: CREATE INDEX db_dbattribute_datatype_91c4dc04 + ON public.db_dbattribute USING btree (datatype) + db_dbattribute_datatype_91c4dc04_like: CREATE INDEX db_dbattribute_datatype_91c4dc04_like + ON public.db_dbattribute USING btree (datatype varchar_pattern_ops) + db_dbattribute_dbnode_id_253bf153: CREATE INDEX db_dbattribute_dbnode_id_253bf153 + ON public.db_dbattribute USING btree (dbnode_id) + db_dbattribute_dbnode_id_key_c589e447_uniq: CREATE UNIQUE INDEX db_dbattribute_dbnode_id_key_c589e447_uniq + ON public.db_dbattribute USING btree (dbnode_id, key) + db_dbattribute_key_ac2bc4e4: CREATE INDEX db_dbattribute_key_ac2bc4e4 ON public.db_dbattribute + USING btree (key) + db_dbattribute_key_ac2bc4e4_like: CREATE INDEX db_dbattribute_key_ac2bc4e4_like + ON public.db_dbattribute USING btree (key varchar_pattern_ops) + db_dbattribute_pkey: CREATE UNIQUE INDEX db_dbattribute_pkey ON public.db_dbattribute + USING btree (id) + tval_idx_for_daemon: CREATE INDEX tval_idx_for_daemon ON public.db_dbattribute + USING btree (tval) WHERE (tval = ANY (ARRAY['COMPUTED'::text, 'WITHSCHEDULER'::text, + 'TOSUBMIT'::text])) + db_dbauthinfo: + db_dbauthinfo_aiidauser_id_0684fdfb: CREATE INDEX db_dbauthinfo_aiidauser_id_0684fdfb + ON public.db_dbauthinfo USING btree (aiidauser_id) + db_dbauthinfo_aiidauser_id_dbcomputer_id_777cdaa8_uniq: CREATE UNIQUE INDEX db_dbauthinfo_aiidauser_id_dbcomputer_id_777cdaa8_uniq + ON public.db_dbauthinfo USING btree (aiidauser_id, dbcomputer_id) + db_dbauthinfo_dbcomputer_id_424f7ac4: CREATE INDEX db_dbauthinfo_dbcomputer_id_424f7ac4 + ON public.db_dbauthinfo USING btree (dbcomputer_id) + db_dbauthinfo_pkey: CREATE UNIQUE INDEX db_dbauthinfo_pkey ON public.db_dbauthinfo + USING btree (id) + db_dbcomment: + db_dbcomment_dbnode_id_3b812b6b: CREATE INDEX db_dbcomment_dbnode_id_3b812b6b + ON public.db_dbcomment USING btree (dbnode_id) + db_dbcomment_pkey: CREATE UNIQUE INDEX db_dbcomment_pkey ON public.db_dbcomment + USING btree (id) + db_dbcomment_user_id_8ed5e360: CREATE INDEX db_dbcomment_user_id_8ed5e360 ON public.db_dbcomment + USING btree (user_id) + db_dbcomment_uuid_49bac08c_uniq: CREATE UNIQUE INDEX db_dbcomment_uuid_49bac08c_uniq + ON public.db_dbcomment USING btree (uuid) + db_dbcomputer: + db_dbcomputer_name_f1800b1a_like: CREATE INDEX db_dbcomputer_name_f1800b1a_like + ON public.db_dbcomputer USING btree (name varchar_pattern_ops) + db_dbcomputer_name_key: CREATE UNIQUE INDEX db_dbcomputer_name_key ON public.db_dbcomputer + USING btree (name) + db_dbcomputer_pkey: CREATE UNIQUE INDEX db_dbcomputer_pkey ON public.db_dbcomputer + USING btree (id) + db_dbcomputer_uuid_f35defa6_uniq: CREATE UNIQUE INDEX db_dbcomputer_uuid_f35defa6_uniq + ON public.db_dbcomputer USING btree (uuid) + db_dbextra: + db_dbextra_datatype_2eba38c6: CREATE INDEX db_dbextra_datatype_2eba38c6 ON public.db_dbextra + USING btree (datatype) + db_dbextra_datatype_2eba38c6_like: CREATE INDEX db_dbextra_datatype_2eba38c6_like + ON public.db_dbextra USING btree (datatype varchar_pattern_ops) + db_dbextra_dbnode_id_c7fe8961: CREATE INDEX db_dbextra_dbnode_id_c7fe8961 ON public.db_dbextra + USING btree (dbnode_id) + db_dbextra_dbnode_id_key_aa56fd37_uniq: CREATE UNIQUE INDEX db_dbextra_dbnode_id_key_aa56fd37_uniq + ON public.db_dbextra USING btree (dbnode_id, key) + db_dbextra_key_b1a8abc6: CREATE INDEX db_dbextra_key_b1a8abc6 ON public.db_dbextra + USING btree (key) + db_dbextra_key_b1a8abc6_like: CREATE INDEX db_dbextra_key_b1a8abc6_like ON public.db_dbextra + USING btree (key varchar_pattern_ops) + db_dbextra_pkey: CREATE UNIQUE INDEX db_dbextra_pkey ON public.db_dbextra USING + btree (id) + db_dbgroup: + db_dbgroup_name_66c75272: CREATE INDEX db_dbgroup_name_66c75272 ON public.db_dbgroup + USING btree (label) + db_dbgroup_name_66c75272_like: CREATE INDEX db_dbgroup_name_66c75272_like ON public.db_dbgroup + USING btree (label varchar_pattern_ops) + db_dbgroup_name_type_12656f33_uniq: CREATE UNIQUE INDEX db_dbgroup_name_type_12656f33_uniq + ON public.db_dbgroup USING btree (label, type_string) + db_dbgroup_pkey: CREATE UNIQUE INDEX db_dbgroup_pkey ON public.db_dbgroup USING + btree (id) + db_dbgroup_type_23b2a748: CREATE INDEX db_dbgroup_type_23b2a748 ON public.db_dbgroup + USING btree (type_string) + db_dbgroup_type_23b2a748_like: CREATE INDEX db_dbgroup_type_23b2a748_like ON public.db_dbgroup + USING btree (type_string varchar_pattern_ops) + db_dbgroup_user_id_100f8a51: CREATE INDEX db_dbgroup_user_id_100f8a51 ON public.db_dbgroup + USING btree (user_id) + db_dbgroup_uuid_af896177_uniq: CREATE UNIQUE INDEX db_dbgroup_uuid_af896177_uniq + ON public.db_dbgroup USING btree (uuid) + db_dbgroup_dbnodes: + db_dbgroup_dbnodes_dbgroup_id_9d3a0f9d: CREATE INDEX db_dbgroup_dbnodes_dbgroup_id_9d3a0f9d + ON public.db_dbgroup_dbnodes USING btree (dbgroup_id) + db_dbgroup_dbnodes_dbgroup_id_dbnode_id_eee23cce_uniq: CREATE UNIQUE INDEX db_dbgroup_dbnodes_dbgroup_id_dbnode_id_eee23cce_uniq + ON public.db_dbgroup_dbnodes USING btree (dbgroup_id, dbnode_id) + db_dbgroup_dbnodes_dbnode_id_118b9439: CREATE INDEX db_dbgroup_dbnodes_dbnode_id_118b9439 + ON public.db_dbgroup_dbnodes USING btree (dbnode_id) + db_dbgroup_dbnodes_pkey: CREATE UNIQUE INDEX db_dbgroup_dbnodes_pkey ON public.db_dbgroup_dbnodes + USING btree (id) + db_dblink: + db_dblink_input_id_9245bd73: CREATE INDEX db_dblink_input_id_9245bd73 ON public.db_dblink + USING btree (input_id) + db_dblink_label_f1343cfb: CREATE INDEX db_dblink_label_f1343cfb ON public.db_dblink + USING btree (label) + db_dblink_label_f1343cfb_like: CREATE INDEX db_dblink_label_f1343cfb_like ON public.db_dblink + USING btree (label varchar_pattern_ops) + db_dblink_output_id_c0167528: CREATE INDEX db_dblink_output_id_c0167528 ON public.db_dblink + USING btree (output_id) + db_dblink_pkey: CREATE UNIQUE INDEX db_dblink_pkey ON public.db_dblink USING btree + (id) + db_dblink_type_229f212b: CREATE INDEX db_dblink_type_229f212b ON public.db_dblink + USING btree (type) + db_dblink_type_229f212b_like: CREATE INDEX db_dblink_type_229f212b_like ON public.db_dblink + USING btree (type varchar_pattern_ops) + db_dblog: + db_dblog_dbnode_id_da34b732: CREATE INDEX db_dblog_dbnode_id_da34b732 ON public.db_dblog + USING btree (dbnode_id) + db_dblog_levelname_ad5dc346: CREATE INDEX db_dblog_levelname_ad5dc346 ON public.db_dblog + USING btree (levelname) + db_dblog_levelname_ad5dc346_like: CREATE INDEX db_dblog_levelname_ad5dc346_like + ON public.db_dblog USING btree (levelname varchar_pattern_ops) + db_dblog_loggername_00b5ba16: CREATE INDEX db_dblog_loggername_00b5ba16 ON public.db_dblog + USING btree (loggername) + db_dblog_loggername_00b5ba16_like: CREATE INDEX db_dblog_loggername_00b5ba16_like + ON public.db_dblog USING btree (loggername varchar_pattern_ops) + db_dblog_pkey: CREATE UNIQUE INDEX db_dblog_pkey ON public.db_dblog USING btree + (id) + db_dblog_uuid_9cf77df3_uniq: CREATE UNIQUE INDEX db_dblog_uuid_9cf77df3_uniq ON + public.db_dblog USING btree (uuid) + db_dbnode: + db_dbnode_ctime_71626ef5: CREATE INDEX db_dbnode_ctime_71626ef5 ON public.db_dbnode + USING btree (ctime) + db_dbnode_dbcomputer_id_315372a3: CREATE INDEX db_dbnode_dbcomputer_id_315372a3 + ON public.db_dbnode USING btree (dbcomputer_id) + db_dbnode_label_6469539e: CREATE INDEX db_dbnode_label_6469539e ON public.db_dbnode + USING btree (label) + db_dbnode_label_6469539e_like: CREATE INDEX db_dbnode_label_6469539e_like ON public.db_dbnode + USING btree (label varchar_pattern_ops) + db_dbnode_mtime_0554ea3d: CREATE INDEX db_dbnode_mtime_0554ea3d ON public.db_dbnode + USING btree (mtime) + db_dbnode_pkey: CREATE UNIQUE INDEX db_dbnode_pkey ON public.db_dbnode USING btree + (id) + db_dbnode_process_type_df7298d0: CREATE INDEX db_dbnode_process_type_df7298d0 + ON public.db_dbnode USING btree (process_type) + db_dbnode_process_type_df7298d0_like: CREATE INDEX db_dbnode_process_type_df7298d0_like + ON public.db_dbnode USING btree (process_type varchar_pattern_ops) + db_dbnode_type_a8ce9753: CREATE INDEX db_dbnode_type_a8ce9753 ON public.db_dbnode + USING btree (type) + db_dbnode_type_a8ce9753_like: CREATE INDEX db_dbnode_type_a8ce9753_like ON public.db_dbnode + USING btree (type varchar_pattern_ops) + db_dbnode_user_id_12e7aeaf: CREATE INDEX db_dbnode_user_id_12e7aeaf ON public.db_dbnode + USING btree (user_id) + db_dbnode_uuid_62e0bf98_uniq: CREATE UNIQUE INDEX db_dbnode_uuid_62e0bf98_uniq + ON public.db_dbnode USING btree (uuid) + db_dbsetting: + db_dbsetting_datatype_49f4397c: CREATE INDEX db_dbsetting_datatype_49f4397c ON + public.db_dbsetting USING btree (datatype) + db_dbsetting_datatype_49f4397c_like: CREATE INDEX db_dbsetting_datatype_49f4397c_like + ON public.db_dbsetting USING btree (datatype varchar_pattern_ops) + db_dbsetting_key_1b84beb4: CREATE INDEX db_dbsetting_key_1b84beb4 ON public.db_dbsetting + USING btree (key) + db_dbsetting_key_1b84beb4_like: CREATE INDEX db_dbsetting_key_1b84beb4_like ON + public.db_dbsetting USING btree (key varchar_pattern_ops) + db_dbsetting_key_1b84beb4_uniq: CREATE UNIQUE INDEX db_dbsetting_key_1b84beb4_uniq + ON public.db_dbsetting USING btree (key) + db_dbsetting_pkey: CREATE UNIQUE INDEX db_dbsetting_pkey ON public.db_dbsetting + USING btree (id) + db_dbuser: + db_dbuser_email_30150b7e_like: CREATE INDEX db_dbuser_email_30150b7e_like ON public.db_dbuser + USING btree (email varchar_pattern_ops) + db_dbuser_email_30150b7e_uniq: CREATE UNIQUE INDEX db_dbuser_email_30150b7e_uniq + ON public.db_dbuser USING btree (email) + db_dbuser_pkey: CREATE UNIQUE INDEX db_dbuser_pkey ON public.db_dbuser USING btree + (id) + db_dbuser_groups: + db_dbuser_groups_dbuser_id_480b3520: CREATE INDEX db_dbuser_groups_dbuser_id_480b3520 + ON public.db_dbuser_groups USING btree (dbuser_id) + db_dbuser_groups_dbuser_id_group_id_9155eb4f_uniq: CREATE UNIQUE INDEX db_dbuser_groups_dbuser_id_group_id_9155eb4f_uniq + ON public.db_dbuser_groups USING btree (dbuser_id, group_id) + db_dbuser_groups_group_id_8478d87e: CREATE INDEX db_dbuser_groups_group_id_8478d87e + ON public.db_dbuser_groups USING btree (group_id) + db_dbuser_groups_pkey: CREATE UNIQUE INDEX db_dbuser_groups_pkey ON public.db_dbuser_groups + USING btree (id) + db_dbuser_user_permissions: + db_dbuser_user_permissio_dbuser_id_permission_id_e6cbabe4_uniq: CREATE UNIQUE + INDEX db_dbuser_user_permissio_dbuser_id_permission_id_e6cbabe4_uniq ON public.db_dbuser_user_permissions + USING btree (dbuser_id, permission_id) + db_dbuser_user_permissions_dbuser_id_364456ee: CREATE INDEX db_dbuser_user_permissions_dbuser_id_364456ee + ON public.db_dbuser_user_permissions USING btree (dbuser_id) + db_dbuser_user_permissions_permission_id_c5aafc54: CREATE INDEX db_dbuser_user_permissions_permission_id_c5aafc54 + ON public.db_dbuser_user_permissions USING btree (permission_id) + db_dbuser_user_permissions_pkey: CREATE UNIQUE INDEX db_dbuser_user_permissions_pkey + ON public.db_dbuser_user_permissions USING btree (id) + db_dbworkflow: + db_dbworkflow_label_7368f34a: CREATE INDEX db_dbworkflow_label_7368f34a ON public.db_dbworkflow + USING btree (label) + db_dbworkflow_label_7368f34a_like: CREATE INDEX db_dbworkflow_label_7368f34a_like + ON public.db_dbworkflow USING btree (label varchar_pattern_ops) + db_dbworkflow_pkey: CREATE UNIQUE INDEX db_dbworkflow_pkey ON public.db_dbworkflow + USING btree (id) + db_dbworkflow_user_id_ef1f3251: CREATE INDEX db_dbworkflow_user_id_ef1f3251 ON + public.db_dbworkflow USING btree (user_id) + db_dbworkflow_uuid_08947ee2_uniq: CREATE UNIQUE INDEX db_dbworkflow_uuid_08947ee2_uniq + ON public.db_dbworkflow USING btree (uuid) + db_dbworkflowdata: + db_dbworkflowdata_aiida_obj_id_70a2d33b: CREATE INDEX db_dbworkflowdata_aiida_obj_id_70a2d33b + ON public.db_dbworkflowdata USING btree (aiida_obj_id) + db_dbworkflowdata_parent_id_ff4dbf8d: CREATE INDEX db_dbworkflowdata_parent_id_ff4dbf8d + ON public.db_dbworkflowdata USING btree (parent_id) + db_dbworkflowdata_parent_id_name_data_type_a4b50dae_uniq: CREATE UNIQUE INDEX + db_dbworkflowdata_parent_id_name_data_type_a4b50dae_uniq ON public.db_dbworkflowdata + USING btree (parent_id, name, data_type) + db_dbworkflowdata_pkey: CREATE UNIQUE INDEX db_dbworkflowdata_pkey ON public.db_dbworkflowdata + USING btree (id) + db_dbworkflowstep: + db_dbworkflowstep_parent_id_ffb754d9: CREATE INDEX db_dbworkflowstep_parent_id_ffb754d9 + ON public.db_dbworkflowstep USING btree (parent_id) + db_dbworkflowstep_parent_id_name_111027e3_uniq: CREATE UNIQUE INDEX db_dbworkflowstep_parent_id_name_111027e3_uniq + ON public.db_dbworkflowstep USING btree (parent_id, name) + db_dbworkflowstep_pkey: CREATE UNIQUE INDEX db_dbworkflowstep_pkey ON public.db_dbworkflowstep + USING btree (id) + db_dbworkflowstep_user_id_04282431: CREATE INDEX db_dbworkflowstep_user_id_04282431 + ON public.db_dbworkflowstep USING btree (user_id) + db_dbworkflowstep_calculations: + db_dbworkflowstep_calcul_dbworkflowstep_id_dbnode_60f50d02_uniq: CREATE UNIQUE + INDEX db_dbworkflowstep_calcul_dbworkflowstep_id_dbnode_60f50d02_uniq ON public.db_dbworkflowstep_calculations + USING btree (dbworkflowstep_id, dbnode_id) + db_dbworkflowstep_calculations_dbnode_id_0d07b7a7: CREATE INDEX db_dbworkflowstep_calculations_dbnode_id_0d07b7a7 + ON public.db_dbworkflowstep_calculations USING btree (dbnode_id) + db_dbworkflowstep_calculations_dbworkflowstep_id_575c3637: CREATE INDEX db_dbworkflowstep_calculations_dbworkflowstep_id_575c3637 + ON public.db_dbworkflowstep_calculations USING btree (dbworkflowstep_id) + db_dbworkflowstep_calculations_pkey: CREATE UNIQUE INDEX db_dbworkflowstep_calculations_pkey + ON public.db_dbworkflowstep_calculations USING btree (id) + db_dbworkflowstep_sub_workflows: + db_dbworkflowstep_sub_wo_dbworkflowstep_id_dbwork_e9b2b624_uniq: CREATE UNIQUE + INDEX db_dbworkflowstep_sub_wo_dbworkflowstep_id_dbwork_e9b2b624_uniq ON public.db_dbworkflowstep_sub_workflows + USING btree (dbworkflowstep_id, dbworkflow_id) + db_dbworkflowstep_sub_workflows_dbworkflow_id_dca4d103: CREATE INDEX db_dbworkflowstep_sub_workflows_dbworkflow_id_dca4d103 + ON public.db_dbworkflowstep_sub_workflows USING btree (dbworkflow_id) + db_dbworkflowstep_sub_workflows_dbworkflowstep_id_e183bbb7: CREATE INDEX db_dbworkflowstep_sub_workflows_dbworkflowstep_id_e183bbb7 + ON public.db_dbworkflowstep_sub_workflows USING btree (dbworkflowstep_id) + db_dbworkflowstep_sub_workflows_pkey: CREATE UNIQUE INDEX db_dbworkflowstep_sub_workflows_pkey + ON public.db_dbworkflowstep_sub_workflows USING btree (id) diff --git a/tests/backends/aiida_sqlalchemy/migrations/test_all_schema/test_django_django_0027_.yml b/tests/backends/aiida_sqlalchemy/migrations/test_all_schema/test_django_django_0027_.yml new file mode 100644 index 0000000000..3e6143e5bc --- /dev/null +++ b/tests/backends/aiida_sqlalchemy/migrations/test_all_schema/test_django_django_0027_.yml @@ -0,0 +1,1041 @@ +columns: + db_dbattribute: + bval: + data_type: boolean + default: null + is_nullable: true + datatype: + data_type: character varying + default: null + is_nullable: false + max_length: 10 + dbnode_id: + data_type: integer + default: null + is_nullable: false + dval: + data_type: timestamp with time zone + default: null + is_nullable: true + fval: + data_type: double precision + default: null + is_nullable: true + id: + data_type: integer + default: nextval('db_dbattribute_id_seq'::regclass) + is_nullable: false + ival: + data_type: integer + default: null + is_nullable: true + key: + data_type: character varying + default: null + is_nullable: false + max_length: 1024 + tval: + data_type: text + default: null + is_nullable: false + db_dbauthinfo: + aiidauser_id: + data_type: integer + default: null + is_nullable: false + auth_params: + data_type: text + default: null + is_nullable: false + dbcomputer_id: + data_type: integer + default: null + is_nullable: false + enabled: + data_type: boolean + default: null + is_nullable: false + id: + data_type: integer + default: nextval('db_dbauthinfo_id_seq'::regclass) + is_nullable: false + metadata: + data_type: text + default: null + is_nullable: false + db_dbcomment: + content: + data_type: text + default: null + is_nullable: false + ctime: + data_type: timestamp with time zone + default: null + is_nullable: false + dbnode_id: + data_type: integer + default: null + is_nullable: false + id: + data_type: integer + default: nextval('db_dbcomment_id_seq'::regclass) + is_nullable: false + mtime: + data_type: timestamp with time zone + default: null + is_nullable: false + user_id: + data_type: integer + default: null + is_nullable: false + uuid: + data_type: uuid + default: null + is_nullable: false + db_dbcomputer: + description: + data_type: text + default: null + is_nullable: false + enabled: + data_type: boolean + default: null + is_nullable: false + hostname: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + id: + data_type: integer + default: nextval('db_dbcomputer_id_seq'::regclass) + is_nullable: false + metadata: + data_type: text + default: null + is_nullable: false + name: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + scheduler_type: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + transport_params: + data_type: text + default: null + is_nullable: false + transport_type: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + uuid: + data_type: uuid + default: null + is_nullable: false + db_dbextra: + bval: + data_type: boolean + default: null + is_nullable: true + datatype: + data_type: character varying + default: null + is_nullable: false + max_length: 10 + dbnode_id: + data_type: integer + default: null + is_nullable: false + dval: + data_type: timestamp with time zone + default: null + is_nullable: true + fval: + data_type: double precision + default: null + is_nullable: true + id: + data_type: integer + default: nextval('db_dbextra_id_seq'::regclass) + is_nullable: false + ival: + data_type: integer + default: null + is_nullable: true + key: + data_type: character varying + default: null + is_nullable: false + max_length: 1024 + tval: + data_type: text + default: null + is_nullable: false + db_dbgroup: + description: + data_type: text + default: null + is_nullable: false + id: + data_type: integer + default: nextval('db_dbgroup_id_seq'::regclass) + is_nullable: false + label: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + time: + data_type: timestamp with time zone + default: null + is_nullable: false + type_string: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + user_id: + data_type: integer + default: null + is_nullable: false + uuid: + data_type: uuid + default: null + is_nullable: false + db_dbgroup_dbnodes: + dbgroup_id: + data_type: integer + default: null + is_nullable: false + dbnode_id: + data_type: integer + default: null + is_nullable: false + id: + data_type: integer + default: nextval('db_dbgroup_dbnodes_id_seq'::regclass) + is_nullable: false + db_dblink: + id: + data_type: integer + default: nextval('db_dblink_id_seq'::regclass) + is_nullable: false + input_id: + data_type: integer + default: null + is_nullable: false + label: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + output_id: + data_type: integer + default: null + is_nullable: false + type: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + db_dblog: + dbnode_id: + data_type: integer + default: null + is_nullable: false + id: + data_type: integer + default: nextval('db_dblog_id_seq'::regclass) + is_nullable: false + levelname: + data_type: character varying + default: null + is_nullable: false + max_length: 50 + loggername: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + message: + data_type: text + default: null + is_nullable: false + metadata: + data_type: text + default: null + is_nullable: false + time: + data_type: timestamp with time zone + default: null + is_nullable: false + uuid: + data_type: uuid + default: null + is_nullable: false + db_dbnode: + ctime: + data_type: timestamp with time zone + default: null + is_nullable: false + dbcomputer_id: + data_type: integer + default: null + is_nullable: true + description: + data_type: text + default: null + is_nullable: false + id: + data_type: integer + default: nextval('db_dbnode_id_seq'::regclass) + is_nullable: false + label: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + mtime: + data_type: timestamp with time zone + default: null + is_nullable: false + nodeversion: + data_type: integer + default: null + is_nullable: false + process_type: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + public: + data_type: boolean + default: null + is_nullable: false + type: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + user_id: + data_type: integer + default: null + is_nullable: false + uuid: + data_type: uuid + default: null + is_nullable: false + db_dbsetting: + bval: + data_type: boolean + default: null + is_nullable: true + datatype: + data_type: character varying + default: null + is_nullable: false + max_length: 10 + description: + data_type: text + default: null + is_nullable: false + dval: + data_type: timestamp with time zone + default: null + is_nullable: true + fval: + data_type: double precision + default: null + is_nullable: true + id: + data_type: integer + default: nextval('db_dbsetting_id_seq'::regclass) + is_nullable: false + ival: + data_type: integer + default: null + is_nullable: true + key: + data_type: character varying + default: null + is_nullable: false + max_length: 1024 + time: + data_type: timestamp with time zone + default: null + is_nullable: false + tval: + data_type: text + default: null + is_nullable: false + db_dbuser: + date_joined: + data_type: timestamp with time zone + default: null + is_nullable: false + email: + data_type: character varying + default: null + is_nullable: false + max_length: 254 + first_name: + data_type: character varying + default: null + is_nullable: false + max_length: 254 + id: + data_type: integer + default: nextval('db_dbuser_id_seq'::regclass) + is_nullable: false + institution: + data_type: character varying + default: null + is_nullable: false + max_length: 254 + is_active: + data_type: boolean + default: null + is_nullable: false + is_staff: + data_type: boolean + default: null + is_nullable: false + is_superuser: + data_type: boolean + default: null + is_nullable: false + last_login: + data_type: timestamp with time zone + default: null + is_nullable: true + last_name: + data_type: character varying + default: null + is_nullable: false + max_length: 254 + password: + data_type: character varying + default: null + is_nullable: false + max_length: 128 + db_dbuser_groups: + dbuser_id: + data_type: integer + default: null + is_nullable: false + group_id: + data_type: integer + default: null + is_nullable: false + id: + data_type: integer + default: nextval('db_dbuser_groups_id_seq'::regclass) + is_nullable: false + db_dbuser_user_permissions: + dbuser_id: + data_type: integer + default: null + is_nullable: false + id: + data_type: integer + default: nextval('db_dbuser_user_permissions_id_seq'::regclass) + is_nullable: false + permission_id: + data_type: integer + default: null + is_nullable: false + db_dbworkflow: + ctime: + data_type: timestamp with time zone + default: null + is_nullable: false + description: + data_type: text + default: null + is_nullable: false + id: + data_type: integer + default: nextval('db_dbworkflow_id_seq'::regclass) + is_nullable: false + label: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + lastsyncedversion: + data_type: integer + default: null + is_nullable: false + module: + data_type: text + default: null + is_nullable: false + module_class: + data_type: text + default: null + is_nullable: false + mtime: + data_type: timestamp with time zone + default: null + is_nullable: false + nodeversion: + data_type: integer + default: null + is_nullable: false + report: + data_type: text + default: null + is_nullable: false + script_md5: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + script_path: + data_type: text + default: null + is_nullable: false + state: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + user_id: + data_type: integer + default: null + is_nullable: false + uuid: + data_type: uuid + default: null + is_nullable: false + db_dbworkflowdata: + aiida_obj_id: + data_type: integer + default: null + is_nullable: true + data_type: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + id: + data_type: integer + default: nextval('db_dbworkflowdata_id_seq'::regclass) + is_nullable: false + json_value: + data_type: text + default: null + is_nullable: false + name: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + parent_id: + data_type: integer + default: null + is_nullable: false + time: + data_type: timestamp with time zone + default: null + is_nullable: false + value_type: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + db_dbworkflowstep: + id: + data_type: integer + default: nextval('db_dbworkflowstep_id_seq'::regclass) + is_nullable: false + name: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + nextcall: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + parent_id: + data_type: integer + default: null + is_nullable: false + state: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + time: + data_type: timestamp with time zone + default: null + is_nullable: false + user_id: + data_type: integer + default: null + is_nullable: false + db_dbworkflowstep_calculations: + dbnode_id: + data_type: integer + default: null + is_nullable: false + dbworkflowstep_id: + data_type: integer + default: null + is_nullable: false + id: + data_type: integer + default: nextval('db_dbworkflowstep_calculations_id_seq'::regclass) + is_nullable: false + db_dbworkflowstep_sub_workflows: + dbworkflow_id: + data_type: integer + default: null + is_nullable: false + dbworkflowstep_id: + data_type: integer + default: null + is_nullable: false + id: + data_type: integer + default: nextval('db_dbworkflowstep_sub_workflows_id_seq'::regclass) + is_nullable: false +constraints: + primary_key: + db_dbattribute: + db_dbattribute_pkey: + - id + db_dbauthinfo: + db_dbauthinfo_pkey: + - id + db_dbcomment: + db_dbcomment_pkey: + - id + db_dbcomputer: + db_dbcomputer_pkey: + - id + db_dbextra: + db_dbextra_pkey: + - id + db_dbgroup: + db_dbgroup_pkey: + - id + db_dbgroup_dbnodes: + db_dbgroup_dbnodes_pkey: + - id + db_dblink: + db_dblink_pkey: + - id + db_dblog: + db_dblog_pkey: + - id + db_dbnode: + db_dbnode_pkey: + - id + db_dbsetting: + db_dbsetting_pkey: + - id + db_dbuser: + db_dbuser_pkey: + - id + db_dbuser_groups: + db_dbuser_groups_pkey: + - id + db_dbuser_user_permissions: + db_dbuser_user_permissions_pkey: + - id + db_dbworkflow: + db_dbworkflow_pkey: + - id + db_dbworkflowdata: + db_dbworkflowdata_pkey: + - id + db_dbworkflowstep: + db_dbworkflowstep_pkey: + - id + db_dbworkflowstep_calculations: + db_dbworkflowstep_calculations_pkey: + - id + db_dbworkflowstep_sub_workflows: + db_dbworkflowstep_sub_workflows_pkey: + - id + unique: + db_dbattribute: + db_dbattribute_dbnode_id_key_c589e447_uniq: + - dbnode_id + - key + db_dbauthinfo: + db_dbauthinfo_aiidauser_id_dbcomputer_id_777cdaa8_uniq: + - aiidauser_id + - dbcomputer_id + db_dbcomment: + db_dbcomment_uuid_49bac08c_uniq: + - uuid + db_dbcomputer: + db_dbcomputer_name_key: + - name + db_dbcomputer_uuid_f35defa6_uniq: + - uuid + db_dbextra: + db_dbextra_dbnode_id_key_aa56fd37_uniq: + - dbnode_id + - key + db_dbgroup: + db_dbgroup_name_type_12656f33_uniq: + - label + - type_string + db_dbgroup_uuid_af896177_uniq: + - uuid + db_dbgroup_dbnodes: + db_dbgroup_dbnodes_dbgroup_id_dbnode_id_eee23cce_uniq: + - dbgroup_id + - dbnode_id + db_dblog: + db_dblog_uuid_9cf77df3_uniq: + - uuid + db_dbnode: + db_dbnode_uuid_62e0bf98_uniq: + - uuid + db_dbsetting: + db_dbsetting_key_1b84beb4_uniq: + - key + db_dbuser: + db_dbuser_email_30150b7e_uniq: + - email + db_dbuser_groups: + db_dbuser_groups_dbuser_id_group_id_9155eb4f_uniq: + - dbuser_id + - group_id + db_dbuser_user_permissions: + db_dbuser_user_permissio_dbuser_id_permission_id_e6cbabe4_uniq: + - dbuser_id + - permission_id + db_dbworkflow: + db_dbworkflow_uuid_08947ee2_uniq: + - uuid + db_dbworkflowdata: + db_dbworkflowdata_parent_id_name_data_type_a4b50dae_uniq: + - data_type + - name + - parent_id + db_dbworkflowstep: + db_dbworkflowstep_parent_id_name_111027e3_uniq: + - name + - parent_id + db_dbworkflowstep_calculations: + db_dbworkflowstep_calcul_dbworkflowstep_id_dbnode_60f50d02_uniq: + - dbnode_id + - dbworkflowstep_id + db_dbworkflowstep_sub_workflows: + db_dbworkflowstep_sub_wo_dbworkflowstep_id_dbwork_e9b2b624_uniq: + - dbworkflow_id + - dbworkflowstep_id +foreign_keys: + db_dbattribute: + db_dbattribute_dbnode_id_253bf153_fk_db_dbnode_id: FOREIGN KEY (dbnode_id) REFERENCES + db_dbnode(id) DEFERRABLE INITIALLY DEFERRED + db_dbauthinfo: + db_dbauthinfo_aiidauser_id_0684fdfb_fk_db_dbuser_id: FOREIGN KEY (aiidauser_id) + REFERENCES db_dbuser(id) DEFERRABLE INITIALLY DEFERRED + db_dbauthinfo_dbcomputer_id_424f7ac4_fk_db_dbcomputer_id: FOREIGN KEY (dbcomputer_id) + REFERENCES db_dbcomputer(id) DEFERRABLE INITIALLY DEFERRED + db_dbcomment: + db_dbcomment_dbnode_id_3b812b6b_fk_db_dbnode_id: FOREIGN KEY (dbnode_id) REFERENCES + db_dbnode(id) DEFERRABLE INITIALLY DEFERRED + db_dbcomment_user_id_8ed5e360_fk_db_dbuser_id: FOREIGN KEY (user_id) REFERENCES + db_dbuser(id) DEFERRABLE INITIALLY DEFERRED + db_dbextra: + db_dbextra_dbnode_id_c7fe8961_fk_db_dbnode_id: FOREIGN KEY (dbnode_id) REFERENCES + db_dbnode(id) DEFERRABLE INITIALLY DEFERRED + db_dbgroup: + db_dbgroup_user_id_100f8a51_fk_db_dbuser_id: FOREIGN KEY (user_id) REFERENCES + db_dbuser(id) DEFERRABLE INITIALLY DEFERRED + db_dbgroup_dbnodes: + db_dbgroup_dbnodes_dbgroup_id_9d3a0f9d_fk_db_dbgroup_id: FOREIGN KEY (dbgroup_id) + REFERENCES db_dbgroup(id) DEFERRABLE INITIALLY DEFERRED + db_dbgroup_dbnodes_dbnode_id_118b9439_fk_db_dbnode_id: FOREIGN KEY (dbnode_id) + REFERENCES db_dbnode(id) DEFERRABLE INITIALLY DEFERRED + db_dblink: + db_dblink_input_id_9245bd73_fk_db_dbnode_id: FOREIGN KEY (input_id) REFERENCES + db_dbnode(id) DEFERRABLE INITIALLY DEFERRED + db_dblink_output_id_c0167528_fk_db_dbnode_id: FOREIGN KEY (output_id) REFERENCES + db_dbnode(id) DEFERRABLE INITIALLY DEFERRED + db_dblog: + db_dblog_dbnode_id_da34b732_fk_db_dbnode_id: FOREIGN KEY (dbnode_id) REFERENCES + db_dbnode(id) DEFERRABLE INITIALLY DEFERRED + db_dbnode: + db_dbnode_dbcomputer_id_315372a3_fk_db_dbcomputer_id: FOREIGN KEY (dbcomputer_id) + REFERENCES db_dbcomputer(id) DEFERRABLE INITIALLY DEFERRED + db_dbnode_user_id_12e7aeaf_fk_db_dbuser_id: FOREIGN KEY (user_id) REFERENCES db_dbuser(id) + DEFERRABLE INITIALLY DEFERRED + db_dbuser_groups: + db_dbuser_groups_dbuser_id_480b3520_fk_db_dbuser_id: FOREIGN KEY (dbuser_id) REFERENCES + db_dbuser(id) DEFERRABLE INITIALLY DEFERRED + db_dbuser_groups_group_id_8478d87e_fk_auth_group_id: FOREIGN KEY (group_id) REFERENCES + auth_group(id) DEFERRABLE INITIALLY DEFERRED + db_dbuser_user_permissions: + db_dbuser_user_permi_permission_id_c5aafc54_fk_auth_perm: FOREIGN KEY (permission_id) + REFERENCES auth_permission(id) DEFERRABLE INITIALLY DEFERRED + db_dbuser_user_permissions_dbuser_id_364456ee_fk_db_dbuser_id: FOREIGN KEY (dbuser_id) + REFERENCES db_dbuser(id) DEFERRABLE INITIALLY DEFERRED + db_dbworkflow: + db_dbworkflow_user_id_ef1f3251_fk_db_dbuser_id: FOREIGN KEY (user_id) REFERENCES + db_dbuser(id) DEFERRABLE INITIALLY DEFERRED + db_dbworkflowdata: + db_dbworkflowdata_aiida_obj_id_70a2d33b_fk_db_dbnode_id: FOREIGN KEY (aiida_obj_id) + REFERENCES db_dbnode(id) DEFERRABLE INITIALLY DEFERRED + db_dbworkflowdata_parent_id_ff4dbf8d_fk_db_dbworkflow_id: FOREIGN KEY (parent_id) + REFERENCES db_dbworkflow(id) DEFERRABLE INITIALLY DEFERRED + db_dbworkflowstep: + db_dbworkflowstep_parent_id_ffb754d9_fk_db_dbworkflow_id: FOREIGN KEY (parent_id) + REFERENCES db_dbworkflow(id) DEFERRABLE INITIALLY DEFERRED + db_dbworkflowstep_user_id_04282431_fk_db_dbuser_id: FOREIGN KEY (user_id) REFERENCES + db_dbuser(id) DEFERRABLE INITIALLY DEFERRED + db_dbworkflowstep_calculations: + db_dbworkflowstep_ca_dbnode_id_0d07b7a7_fk_db_dbnode: FOREIGN KEY (dbnode_id) + REFERENCES db_dbnode(id) DEFERRABLE INITIALLY DEFERRED + db_dbworkflowstep_ca_dbworkflowstep_id_575c3637_fk_db_dbwork: FOREIGN KEY (dbworkflowstep_id) + REFERENCES db_dbworkflowstep(id) DEFERRABLE INITIALLY DEFERRED + db_dbworkflowstep_sub_workflows: + db_dbworkflowstep_su_dbworkflow_id_dca4d103_fk_db_dbwork: FOREIGN KEY (dbworkflow_id) + REFERENCES db_dbworkflow(id) DEFERRABLE INITIALLY DEFERRED + db_dbworkflowstep_su_dbworkflowstep_id_e183bbb7_fk_db_dbwork: FOREIGN KEY (dbworkflowstep_id) + REFERENCES db_dbworkflowstep(id) DEFERRABLE INITIALLY DEFERRED +indexes: + db_dbattribute: + db_dbattribute_datatype_91c4dc04: CREATE INDEX db_dbattribute_datatype_91c4dc04 + ON public.db_dbattribute USING btree (datatype) + db_dbattribute_datatype_91c4dc04_like: CREATE INDEX db_dbattribute_datatype_91c4dc04_like + ON public.db_dbattribute USING btree (datatype varchar_pattern_ops) + db_dbattribute_dbnode_id_253bf153: CREATE INDEX db_dbattribute_dbnode_id_253bf153 + ON public.db_dbattribute USING btree (dbnode_id) + db_dbattribute_dbnode_id_key_c589e447_uniq: CREATE UNIQUE INDEX db_dbattribute_dbnode_id_key_c589e447_uniq + ON public.db_dbattribute USING btree (dbnode_id, key) + db_dbattribute_key_ac2bc4e4: CREATE INDEX db_dbattribute_key_ac2bc4e4 ON public.db_dbattribute + USING btree (key) + db_dbattribute_key_ac2bc4e4_like: CREATE INDEX db_dbattribute_key_ac2bc4e4_like + ON public.db_dbattribute USING btree (key varchar_pattern_ops) + db_dbattribute_pkey: CREATE UNIQUE INDEX db_dbattribute_pkey ON public.db_dbattribute + USING btree (id) + tval_idx_for_daemon: CREATE INDEX tval_idx_for_daemon ON public.db_dbattribute + USING btree (tval) WHERE (tval = ANY (ARRAY['COMPUTED'::text, 'WITHSCHEDULER'::text, + 'TOSUBMIT'::text])) + db_dbauthinfo: + db_dbauthinfo_aiidauser_id_0684fdfb: CREATE INDEX db_dbauthinfo_aiidauser_id_0684fdfb + ON public.db_dbauthinfo USING btree (aiidauser_id) + db_dbauthinfo_aiidauser_id_dbcomputer_id_777cdaa8_uniq: CREATE UNIQUE INDEX db_dbauthinfo_aiidauser_id_dbcomputer_id_777cdaa8_uniq + ON public.db_dbauthinfo USING btree (aiidauser_id, dbcomputer_id) + db_dbauthinfo_dbcomputer_id_424f7ac4: CREATE INDEX db_dbauthinfo_dbcomputer_id_424f7ac4 + ON public.db_dbauthinfo USING btree (dbcomputer_id) + db_dbauthinfo_pkey: CREATE UNIQUE INDEX db_dbauthinfo_pkey ON public.db_dbauthinfo + USING btree (id) + db_dbcomment: + db_dbcomment_dbnode_id_3b812b6b: CREATE INDEX db_dbcomment_dbnode_id_3b812b6b + ON public.db_dbcomment USING btree (dbnode_id) + db_dbcomment_pkey: CREATE UNIQUE INDEX db_dbcomment_pkey ON public.db_dbcomment + USING btree (id) + db_dbcomment_user_id_8ed5e360: CREATE INDEX db_dbcomment_user_id_8ed5e360 ON public.db_dbcomment + USING btree (user_id) + db_dbcomment_uuid_49bac08c_uniq: CREATE UNIQUE INDEX db_dbcomment_uuid_49bac08c_uniq + ON public.db_dbcomment USING btree (uuid) + db_dbcomputer: + db_dbcomputer_name_f1800b1a_like: CREATE INDEX db_dbcomputer_name_f1800b1a_like + ON public.db_dbcomputer USING btree (name varchar_pattern_ops) + db_dbcomputer_name_key: CREATE UNIQUE INDEX db_dbcomputer_name_key ON public.db_dbcomputer + USING btree (name) + db_dbcomputer_pkey: CREATE UNIQUE INDEX db_dbcomputer_pkey ON public.db_dbcomputer + USING btree (id) + db_dbcomputer_uuid_f35defa6_uniq: CREATE UNIQUE INDEX db_dbcomputer_uuid_f35defa6_uniq + ON public.db_dbcomputer USING btree (uuid) + db_dbextra: + db_dbextra_datatype_2eba38c6: CREATE INDEX db_dbextra_datatype_2eba38c6 ON public.db_dbextra + USING btree (datatype) + db_dbextra_datatype_2eba38c6_like: CREATE INDEX db_dbextra_datatype_2eba38c6_like + ON public.db_dbextra USING btree (datatype varchar_pattern_ops) + db_dbextra_dbnode_id_c7fe8961: CREATE INDEX db_dbextra_dbnode_id_c7fe8961 ON public.db_dbextra + USING btree (dbnode_id) + db_dbextra_dbnode_id_key_aa56fd37_uniq: CREATE UNIQUE INDEX db_dbextra_dbnode_id_key_aa56fd37_uniq + ON public.db_dbextra USING btree (dbnode_id, key) + db_dbextra_key_b1a8abc6: CREATE INDEX db_dbextra_key_b1a8abc6 ON public.db_dbextra + USING btree (key) + db_dbextra_key_b1a8abc6_like: CREATE INDEX db_dbextra_key_b1a8abc6_like ON public.db_dbextra + USING btree (key varchar_pattern_ops) + db_dbextra_pkey: CREATE UNIQUE INDEX db_dbextra_pkey ON public.db_dbextra USING + btree (id) + db_dbgroup: + db_dbgroup_name_66c75272: CREATE INDEX db_dbgroup_name_66c75272 ON public.db_dbgroup + USING btree (label) + db_dbgroup_name_66c75272_like: CREATE INDEX db_dbgroup_name_66c75272_like ON public.db_dbgroup + USING btree (label varchar_pattern_ops) + db_dbgroup_name_type_12656f33_uniq: CREATE UNIQUE INDEX db_dbgroup_name_type_12656f33_uniq + ON public.db_dbgroup USING btree (label, type_string) + db_dbgroup_pkey: CREATE UNIQUE INDEX db_dbgroup_pkey ON public.db_dbgroup USING + btree (id) + db_dbgroup_type_23b2a748: CREATE INDEX db_dbgroup_type_23b2a748 ON public.db_dbgroup + USING btree (type_string) + db_dbgroup_type_23b2a748_like: CREATE INDEX db_dbgroup_type_23b2a748_like ON public.db_dbgroup + USING btree (type_string varchar_pattern_ops) + db_dbgroup_user_id_100f8a51: CREATE INDEX db_dbgroup_user_id_100f8a51 ON public.db_dbgroup + USING btree (user_id) + db_dbgroup_uuid_af896177_uniq: CREATE UNIQUE INDEX db_dbgroup_uuid_af896177_uniq + ON public.db_dbgroup USING btree (uuid) + db_dbgroup_dbnodes: + db_dbgroup_dbnodes_dbgroup_id_9d3a0f9d: CREATE INDEX db_dbgroup_dbnodes_dbgroup_id_9d3a0f9d + ON public.db_dbgroup_dbnodes USING btree (dbgroup_id) + db_dbgroup_dbnodes_dbgroup_id_dbnode_id_eee23cce_uniq: CREATE UNIQUE INDEX db_dbgroup_dbnodes_dbgroup_id_dbnode_id_eee23cce_uniq + ON public.db_dbgroup_dbnodes USING btree (dbgroup_id, dbnode_id) + db_dbgroup_dbnodes_dbnode_id_118b9439: CREATE INDEX db_dbgroup_dbnodes_dbnode_id_118b9439 + ON public.db_dbgroup_dbnodes USING btree (dbnode_id) + db_dbgroup_dbnodes_pkey: CREATE UNIQUE INDEX db_dbgroup_dbnodes_pkey ON public.db_dbgroup_dbnodes + USING btree (id) + db_dblink: + db_dblink_input_id_9245bd73: CREATE INDEX db_dblink_input_id_9245bd73 ON public.db_dblink + USING btree (input_id) + db_dblink_label_f1343cfb: CREATE INDEX db_dblink_label_f1343cfb ON public.db_dblink + USING btree (label) + db_dblink_label_f1343cfb_like: CREATE INDEX db_dblink_label_f1343cfb_like ON public.db_dblink + USING btree (label varchar_pattern_ops) + db_dblink_output_id_c0167528: CREATE INDEX db_dblink_output_id_c0167528 ON public.db_dblink + USING btree (output_id) + db_dblink_pkey: CREATE UNIQUE INDEX db_dblink_pkey ON public.db_dblink USING btree + (id) + db_dblink_type_229f212b: CREATE INDEX db_dblink_type_229f212b ON public.db_dblink + USING btree (type) + db_dblink_type_229f212b_like: CREATE INDEX db_dblink_type_229f212b_like ON public.db_dblink + USING btree (type varchar_pattern_ops) + db_dblog: + db_dblog_dbnode_id_da34b732: CREATE INDEX db_dblog_dbnode_id_da34b732 ON public.db_dblog + USING btree (dbnode_id) + db_dblog_levelname_ad5dc346: CREATE INDEX db_dblog_levelname_ad5dc346 ON public.db_dblog + USING btree (levelname) + db_dblog_levelname_ad5dc346_like: CREATE INDEX db_dblog_levelname_ad5dc346_like + ON public.db_dblog USING btree (levelname varchar_pattern_ops) + db_dblog_loggername_00b5ba16: CREATE INDEX db_dblog_loggername_00b5ba16 ON public.db_dblog + USING btree (loggername) + db_dblog_loggername_00b5ba16_like: CREATE INDEX db_dblog_loggername_00b5ba16_like + ON public.db_dblog USING btree (loggername varchar_pattern_ops) + db_dblog_pkey: CREATE UNIQUE INDEX db_dblog_pkey ON public.db_dblog USING btree + (id) + db_dblog_uuid_9cf77df3_uniq: CREATE UNIQUE INDEX db_dblog_uuid_9cf77df3_uniq ON + public.db_dblog USING btree (uuid) + db_dbnode: + db_dbnode_ctime_71626ef5: CREATE INDEX db_dbnode_ctime_71626ef5 ON public.db_dbnode + USING btree (ctime) + db_dbnode_dbcomputer_id_315372a3: CREATE INDEX db_dbnode_dbcomputer_id_315372a3 + ON public.db_dbnode USING btree (dbcomputer_id) + db_dbnode_label_6469539e: CREATE INDEX db_dbnode_label_6469539e ON public.db_dbnode + USING btree (label) + db_dbnode_label_6469539e_like: CREATE INDEX db_dbnode_label_6469539e_like ON public.db_dbnode + USING btree (label varchar_pattern_ops) + db_dbnode_mtime_0554ea3d: CREATE INDEX db_dbnode_mtime_0554ea3d ON public.db_dbnode + USING btree (mtime) + db_dbnode_pkey: CREATE UNIQUE INDEX db_dbnode_pkey ON public.db_dbnode USING btree + (id) + db_dbnode_process_type_df7298d0: CREATE INDEX db_dbnode_process_type_df7298d0 + ON public.db_dbnode USING btree (process_type) + db_dbnode_process_type_df7298d0_like: CREATE INDEX db_dbnode_process_type_df7298d0_like + ON public.db_dbnode USING btree (process_type varchar_pattern_ops) + db_dbnode_type_a8ce9753: CREATE INDEX db_dbnode_type_a8ce9753 ON public.db_dbnode + USING btree (type) + db_dbnode_type_a8ce9753_like: CREATE INDEX db_dbnode_type_a8ce9753_like ON public.db_dbnode + USING btree (type varchar_pattern_ops) + db_dbnode_user_id_12e7aeaf: CREATE INDEX db_dbnode_user_id_12e7aeaf ON public.db_dbnode + USING btree (user_id) + db_dbnode_uuid_62e0bf98_uniq: CREATE UNIQUE INDEX db_dbnode_uuid_62e0bf98_uniq + ON public.db_dbnode USING btree (uuid) + db_dbsetting: + db_dbsetting_datatype_49f4397c: CREATE INDEX db_dbsetting_datatype_49f4397c ON + public.db_dbsetting USING btree (datatype) + db_dbsetting_datatype_49f4397c_like: CREATE INDEX db_dbsetting_datatype_49f4397c_like + ON public.db_dbsetting USING btree (datatype varchar_pattern_ops) + db_dbsetting_key_1b84beb4: CREATE INDEX db_dbsetting_key_1b84beb4 ON public.db_dbsetting + USING btree (key) + db_dbsetting_key_1b84beb4_like: CREATE INDEX db_dbsetting_key_1b84beb4_like ON + public.db_dbsetting USING btree (key varchar_pattern_ops) + db_dbsetting_key_1b84beb4_uniq: CREATE UNIQUE INDEX db_dbsetting_key_1b84beb4_uniq + ON public.db_dbsetting USING btree (key) + db_dbsetting_pkey: CREATE UNIQUE INDEX db_dbsetting_pkey ON public.db_dbsetting + USING btree (id) + db_dbuser: + db_dbuser_email_30150b7e_like: CREATE INDEX db_dbuser_email_30150b7e_like ON public.db_dbuser + USING btree (email varchar_pattern_ops) + db_dbuser_email_30150b7e_uniq: CREATE UNIQUE INDEX db_dbuser_email_30150b7e_uniq + ON public.db_dbuser USING btree (email) + db_dbuser_pkey: CREATE UNIQUE INDEX db_dbuser_pkey ON public.db_dbuser USING btree + (id) + db_dbuser_groups: + db_dbuser_groups_dbuser_id_480b3520: CREATE INDEX db_dbuser_groups_dbuser_id_480b3520 + ON public.db_dbuser_groups USING btree (dbuser_id) + db_dbuser_groups_dbuser_id_group_id_9155eb4f_uniq: CREATE UNIQUE INDEX db_dbuser_groups_dbuser_id_group_id_9155eb4f_uniq + ON public.db_dbuser_groups USING btree (dbuser_id, group_id) + db_dbuser_groups_group_id_8478d87e: CREATE INDEX db_dbuser_groups_group_id_8478d87e + ON public.db_dbuser_groups USING btree (group_id) + db_dbuser_groups_pkey: CREATE UNIQUE INDEX db_dbuser_groups_pkey ON public.db_dbuser_groups + USING btree (id) + db_dbuser_user_permissions: + db_dbuser_user_permissio_dbuser_id_permission_id_e6cbabe4_uniq: CREATE UNIQUE + INDEX db_dbuser_user_permissio_dbuser_id_permission_id_e6cbabe4_uniq ON public.db_dbuser_user_permissions + USING btree (dbuser_id, permission_id) + db_dbuser_user_permissions_dbuser_id_364456ee: CREATE INDEX db_dbuser_user_permissions_dbuser_id_364456ee + ON public.db_dbuser_user_permissions USING btree (dbuser_id) + db_dbuser_user_permissions_permission_id_c5aafc54: CREATE INDEX db_dbuser_user_permissions_permission_id_c5aafc54 + ON public.db_dbuser_user_permissions USING btree (permission_id) + db_dbuser_user_permissions_pkey: CREATE UNIQUE INDEX db_dbuser_user_permissions_pkey + ON public.db_dbuser_user_permissions USING btree (id) + db_dbworkflow: + db_dbworkflow_label_7368f34a: CREATE INDEX db_dbworkflow_label_7368f34a ON public.db_dbworkflow + USING btree (label) + db_dbworkflow_label_7368f34a_like: CREATE INDEX db_dbworkflow_label_7368f34a_like + ON public.db_dbworkflow USING btree (label varchar_pattern_ops) + db_dbworkflow_pkey: CREATE UNIQUE INDEX db_dbworkflow_pkey ON public.db_dbworkflow + USING btree (id) + db_dbworkflow_user_id_ef1f3251: CREATE INDEX db_dbworkflow_user_id_ef1f3251 ON + public.db_dbworkflow USING btree (user_id) + db_dbworkflow_uuid_08947ee2_uniq: CREATE UNIQUE INDEX db_dbworkflow_uuid_08947ee2_uniq + ON public.db_dbworkflow USING btree (uuid) + db_dbworkflowdata: + db_dbworkflowdata_aiida_obj_id_70a2d33b: CREATE INDEX db_dbworkflowdata_aiida_obj_id_70a2d33b + ON public.db_dbworkflowdata USING btree (aiida_obj_id) + db_dbworkflowdata_parent_id_ff4dbf8d: CREATE INDEX db_dbworkflowdata_parent_id_ff4dbf8d + ON public.db_dbworkflowdata USING btree (parent_id) + db_dbworkflowdata_parent_id_name_data_type_a4b50dae_uniq: CREATE UNIQUE INDEX + db_dbworkflowdata_parent_id_name_data_type_a4b50dae_uniq ON public.db_dbworkflowdata + USING btree (parent_id, name, data_type) + db_dbworkflowdata_pkey: CREATE UNIQUE INDEX db_dbworkflowdata_pkey ON public.db_dbworkflowdata + USING btree (id) + db_dbworkflowstep: + db_dbworkflowstep_parent_id_ffb754d9: CREATE INDEX db_dbworkflowstep_parent_id_ffb754d9 + ON public.db_dbworkflowstep USING btree (parent_id) + db_dbworkflowstep_parent_id_name_111027e3_uniq: CREATE UNIQUE INDEX db_dbworkflowstep_parent_id_name_111027e3_uniq + ON public.db_dbworkflowstep USING btree (parent_id, name) + db_dbworkflowstep_pkey: CREATE UNIQUE INDEX db_dbworkflowstep_pkey ON public.db_dbworkflowstep + USING btree (id) + db_dbworkflowstep_user_id_04282431: CREATE INDEX db_dbworkflowstep_user_id_04282431 + ON public.db_dbworkflowstep USING btree (user_id) + db_dbworkflowstep_calculations: + db_dbworkflowstep_calcul_dbworkflowstep_id_dbnode_60f50d02_uniq: CREATE UNIQUE + INDEX db_dbworkflowstep_calcul_dbworkflowstep_id_dbnode_60f50d02_uniq ON public.db_dbworkflowstep_calculations + USING btree (dbworkflowstep_id, dbnode_id) + db_dbworkflowstep_calculations_dbnode_id_0d07b7a7: CREATE INDEX db_dbworkflowstep_calculations_dbnode_id_0d07b7a7 + ON public.db_dbworkflowstep_calculations USING btree (dbnode_id) + db_dbworkflowstep_calculations_dbworkflowstep_id_575c3637: CREATE INDEX db_dbworkflowstep_calculations_dbworkflowstep_id_575c3637 + ON public.db_dbworkflowstep_calculations USING btree (dbworkflowstep_id) + db_dbworkflowstep_calculations_pkey: CREATE UNIQUE INDEX db_dbworkflowstep_calculations_pkey + ON public.db_dbworkflowstep_calculations USING btree (id) + db_dbworkflowstep_sub_workflows: + db_dbworkflowstep_sub_wo_dbworkflowstep_id_dbwork_e9b2b624_uniq: CREATE UNIQUE + INDEX db_dbworkflowstep_sub_wo_dbworkflowstep_id_dbwork_e9b2b624_uniq ON public.db_dbworkflowstep_sub_workflows + USING btree (dbworkflowstep_id, dbworkflow_id) + db_dbworkflowstep_sub_workflows_dbworkflow_id_dca4d103: CREATE INDEX db_dbworkflowstep_sub_workflows_dbworkflow_id_dca4d103 + ON public.db_dbworkflowstep_sub_workflows USING btree (dbworkflow_id) + db_dbworkflowstep_sub_workflows_dbworkflowstep_id_e183bbb7: CREATE INDEX db_dbworkflowstep_sub_workflows_dbworkflowstep_id_e183bbb7 + ON public.db_dbworkflowstep_sub_workflows USING btree (dbworkflowstep_id) + db_dbworkflowstep_sub_workflows_pkey: CREATE UNIQUE INDEX db_dbworkflowstep_sub_workflows_pkey + ON public.db_dbworkflowstep_sub_workflows USING btree (id) diff --git a/tests/backends/aiida_sqlalchemy/migrations/test_all_schema/test_django_django_0028_.yml b/tests/backends/aiida_sqlalchemy/migrations/test_all_schema/test_django_django_0028_.yml new file mode 100644 index 0000000000..3e6143e5bc --- /dev/null +++ b/tests/backends/aiida_sqlalchemy/migrations/test_all_schema/test_django_django_0028_.yml @@ -0,0 +1,1041 @@ +columns: + db_dbattribute: + bval: + data_type: boolean + default: null + is_nullable: true + datatype: + data_type: character varying + default: null + is_nullable: false + max_length: 10 + dbnode_id: + data_type: integer + default: null + is_nullable: false + dval: + data_type: timestamp with time zone + default: null + is_nullable: true + fval: + data_type: double precision + default: null + is_nullable: true + id: + data_type: integer + default: nextval('db_dbattribute_id_seq'::regclass) + is_nullable: false + ival: + data_type: integer + default: null + is_nullable: true + key: + data_type: character varying + default: null + is_nullable: false + max_length: 1024 + tval: + data_type: text + default: null + is_nullable: false + db_dbauthinfo: + aiidauser_id: + data_type: integer + default: null + is_nullable: false + auth_params: + data_type: text + default: null + is_nullable: false + dbcomputer_id: + data_type: integer + default: null + is_nullable: false + enabled: + data_type: boolean + default: null + is_nullable: false + id: + data_type: integer + default: nextval('db_dbauthinfo_id_seq'::regclass) + is_nullable: false + metadata: + data_type: text + default: null + is_nullable: false + db_dbcomment: + content: + data_type: text + default: null + is_nullable: false + ctime: + data_type: timestamp with time zone + default: null + is_nullable: false + dbnode_id: + data_type: integer + default: null + is_nullable: false + id: + data_type: integer + default: nextval('db_dbcomment_id_seq'::regclass) + is_nullable: false + mtime: + data_type: timestamp with time zone + default: null + is_nullable: false + user_id: + data_type: integer + default: null + is_nullable: false + uuid: + data_type: uuid + default: null + is_nullable: false + db_dbcomputer: + description: + data_type: text + default: null + is_nullable: false + enabled: + data_type: boolean + default: null + is_nullable: false + hostname: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + id: + data_type: integer + default: nextval('db_dbcomputer_id_seq'::regclass) + is_nullable: false + metadata: + data_type: text + default: null + is_nullable: false + name: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + scheduler_type: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + transport_params: + data_type: text + default: null + is_nullable: false + transport_type: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + uuid: + data_type: uuid + default: null + is_nullable: false + db_dbextra: + bval: + data_type: boolean + default: null + is_nullable: true + datatype: + data_type: character varying + default: null + is_nullable: false + max_length: 10 + dbnode_id: + data_type: integer + default: null + is_nullable: false + dval: + data_type: timestamp with time zone + default: null + is_nullable: true + fval: + data_type: double precision + default: null + is_nullable: true + id: + data_type: integer + default: nextval('db_dbextra_id_seq'::regclass) + is_nullable: false + ival: + data_type: integer + default: null + is_nullable: true + key: + data_type: character varying + default: null + is_nullable: false + max_length: 1024 + tval: + data_type: text + default: null + is_nullable: false + db_dbgroup: + description: + data_type: text + default: null + is_nullable: false + id: + data_type: integer + default: nextval('db_dbgroup_id_seq'::regclass) + is_nullable: false + label: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + time: + data_type: timestamp with time zone + default: null + is_nullable: false + type_string: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + user_id: + data_type: integer + default: null + is_nullable: false + uuid: + data_type: uuid + default: null + is_nullable: false + db_dbgroup_dbnodes: + dbgroup_id: + data_type: integer + default: null + is_nullable: false + dbnode_id: + data_type: integer + default: null + is_nullable: false + id: + data_type: integer + default: nextval('db_dbgroup_dbnodes_id_seq'::regclass) + is_nullable: false + db_dblink: + id: + data_type: integer + default: nextval('db_dblink_id_seq'::regclass) + is_nullable: false + input_id: + data_type: integer + default: null + is_nullable: false + label: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + output_id: + data_type: integer + default: null + is_nullable: false + type: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + db_dblog: + dbnode_id: + data_type: integer + default: null + is_nullable: false + id: + data_type: integer + default: nextval('db_dblog_id_seq'::regclass) + is_nullable: false + levelname: + data_type: character varying + default: null + is_nullable: false + max_length: 50 + loggername: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + message: + data_type: text + default: null + is_nullable: false + metadata: + data_type: text + default: null + is_nullable: false + time: + data_type: timestamp with time zone + default: null + is_nullable: false + uuid: + data_type: uuid + default: null + is_nullable: false + db_dbnode: + ctime: + data_type: timestamp with time zone + default: null + is_nullable: false + dbcomputer_id: + data_type: integer + default: null + is_nullable: true + description: + data_type: text + default: null + is_nullable: false + id: + data_type: integer + default: nextval('db_dbnode_id_seq'::regclass) + is_nullable: false + label: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + mtime: + data_type: timestamp with time zone + default: null + is_nullable: false + nodeversion: + data_type: integer + default: null + is_nullable: false + process_type: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + public: + data_type: boolean + default: null + is_nullable: false + type: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + user_id: + data_type: integer + default: null + is_nullable: false + uuid: + data_type: uuid + default: null + is_nullable: false + db_dbsetting: + bval: + data_type: boolean + default: null + is_nullable: true + datatype: + data_type: character varying + default: null + is_nullable: false + max_length: 10 + description: + data_type: text + default: null + is_nullable: false + dval: + data_type: timestamp with time zone + default: null + is_nullable: true + fval: + data_type: double precision + default: null + is_nullable: true + id: + data_type: integer + default: nextval('db_dbsetting_id_seq'::regclass) + is_nullable: false + ival: + data_type: integer + default: null + is_nullable: true + key: + data_type: character varying + default: null + is_nullable: false + max_length: 1024 + time: + data_type: timestamp with time zone + default: null + is_nullable: false + tval: + data_type: text + default: null + is_nullable: false + db_dbuser: + date_joined: + data_type: timestamp with time zone + default: null + is_nullable: false + email: + data_type: character varying + default: null + is_nullable: false + max_length: 254 + first_name: + data_type: character varying + default: null + is_nullable: false + max_length: 254 + id: + data_type: integer + default: nextval('db_dbuser_id_seq'::regclass) + is_nullable: false + institution: + data_type: character varying + default: null + is_nullable: false + max_length: 254 + is_active: + data_type: boolean + default: null + is_nullable: false + is_staff: + data_type: boolean + default: null + is_nullable: false + is_superuser: + data_type: boolean + default: null + is_nullable: false + last_login: + data_type: timestamp with time zone + default: null + is_nullable: true + last_name: + data_type: character varying + default: null + is_nullable: false + max_length: 254 + password: + data_type: character varying + default: null + is_nullable: false + max_length: 128 + db_dbuser_groups: + dbuser_id: + data_type: integer + default: null + is_nullable: false + group_id: + data_type: integer + default: null + is_nullable: false + id: + data_type: integer + default: nextval('db_dbuser_groups_id_seq'::regclass) + is_nullable: false + db_dbuser_user_permissions: + dbuser_id: + data_type: integer + default: null + is_nullable: false + id: + data_type: integer + default: nextval('db_dbuser_user_permissions_id_seq'::regclass) + is_nullable: false + permission_id: + data_type: integer + default: null + is_nullable: false + db_dbworkflow: + ctime: + data_type: timestamp with time zone + default: null + is_nullable: false + description: + data_type: text + default: null + is_nullable: false + id: + data_type: integer + default: nextval('db_dbworkflow_id_seq'::regclass) + is_nullable: false + label: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + lastsyncedversion: + data_type: integer + default: null + is_nullable: false + module: + data_type: text + default: null + is_nullable: false + module_class: + data_type: text + default: null + is_nullable: false + mtime: + data_type: timestamp with time zone + default: null + is_nullable: false + nodeversion: + data_type: integer + default: null + is_nullable: false + report: + data_type: text + default: null + is_nullable: false + script_md5: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + script_path: + data_type: text + default: null + is_nullable: false + state: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + user_id: + data_type: integer + default: null + is_nullable: false + uuid: + data_type: uuid + default: null + is_nullable: false + db_dbworkflowdata: + aiida_obj_id: + data_type: integer + default: null + is_nullable: true + data_type: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + id: + data_type: integer + default: nextval('db_dbworkflowdata_id_seq'::regclass) + is_nullable: false + json_value: + data_type: text + default: null + is_nullable: false + name: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + parent_id: + data_type: integer + default: null + is_nullable: false + time: + data_type: timestamp with time zone + default: null + is_nullable: false + value_type: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + db_dbworkflowstep: + id: + data_type: integer + default: nextval('db_dbworkflowstep_id_seq'::regclass) + is_nullable: false + name: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + nextcall: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + parent_id: + data_type: integer + default: null + is_nullable: false + state: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + time: + data_type: timestamp with time zone + default: null + is_nullable: false + user_id: + data_type: integer + default: null + is_nullable: false + db_dbworkflowstep_calculations: + dbnode_id: + data_type: integer + default: null + is_nullable: false + dbworkflowstep_id: + data_type: integer + default: null + is_nullable: false + id: + data_type: integer + default: nextval('db_dbworkflowstep_calculations_id_seq'::regclass) + is_nullable: false + db_dbworkflowstep_sub_workflows: + dbworkflow_id: + data_type: integer + default: null + is_nullable: false + dbworkflowstep_id: + data_type: integer + default: null + is_nullable: false + id: + data_type: integer + default: nextval('db_dbworkflowstep_sub_workflows_id_seq'::regclass) + is_nullable: false +constraints: + primary_key: + db_dbattribute: + db_dbattribute_pkey: + - id + db_dbauthinfo: + db_dbauthinfo_pkey: + - id + db_dbcomment: + db_dbcomment_pkey: + - id + db_dbcomputer: + db_dbcomputer_pkey: + - id + db_dbextra: + db_dbextra_pkey: + - id + db_dbgroup: + db_dbgroup_pkey: + - id + db_dbgroup_dbnodes: + db_dbgroup_dbnodes_pkey: + - id + db_dblink: + db_dblink_pkey: + - id + db_dblog: + db_dblog_pkey: + - id + db_dbnode: + db_dbnode_pkey: + - id + db_dbsetting: + db_dbsetting_pkey: + - id + db_dbuser: + db_dbuser_pkey: + - id + db_dbuser_groups: + db_dbuser_groups_pkey: + - id + db_dbuser_user_permissions: + db_dbuser_user_permissions_pkey: + - id + db_dbworkflow: + db_dbworkflow_pkey: + - id + db_dbworkflowdata: + db_dbworkflowdata_pkey: + - id + db_dbworkflowstep: + db_dbworkflowstep_pkey: + - id + db_dbworkflowstep_calculations: + db_dbworkflowstep_calculations_pkey: + - id + db_dbworkflowstep_sub_workflows: + db_dbworkflowstep_sub_workflows_pkey: + - id + unique: + db_dbattribute: + db_dbattribute_dbnode_id_key_c589e447_uniq: + - dbnode_id + - key + db_dbauthinfo: + db_dbauthinfo_aiidauser_id_dbcomputer_id_777cdaa8_uniq: + - aiidauser_id + - dbcomputer_id + db_dbcomment: + db_dbcomment_uuid_49bac08c_uniq: + - uuid + db_dbcomputer: + db_dbcomputer_name_key: + - name + db_dbcomputer_uuid_f35defa6_uniq: + - uuid + db_dbextra: + db_dbextra_dbnode_id_key_aa56fd37_uniq: + - dbnode_id + - key + db_dbgroup: + db_dbgroup_name_type_12656f33_uniq: + - label + - type_string + db_dbgroup_uuid_af896177_uniq: + - uuid + db_dbgroup_dbnodes: + db_dbgroup_dbnodes_dbgroup_id_dbnode_id_eee23cce_uniq: + - dbgroup_id + - dbnode_id + db_dblog: + db_dblog_uuid_9cf77df3_uniq: + - uuid + db_dbnode: + db_dbnode_uuid_62e0bf98_uniq: + - uuid + db_dbsetting: + db_dbsetting_key_1b84beb4_uniq: + - key + db_dbuser: + db_dbuser_email_30150b7e_uniq: + - email + db_dbuser_groups: + db_dbuser_groups_dbuser_id_group_id_9155eb4f_uniq: + - dbuser_id + - group_id + db_dbuser_user_permissions: + db_dbuser_user_permissio_dbuser_id_permission_id_e6cbabe4_uniq: + - dbuser_id + - permission_id + db_dbworkflow: + db_dbworkflow_uuid_08947ee2_uniq: + - uuid + db_dbworkflowdata: + db_dbworkflowdata_parent_id_name_data_type_a4b50dae_uniq: + - data_type + - name + - parent_id + db_dbworkflowstep: + db_dbworkflowstep_parent_id_name_111027e3_uniq: + - name + - parent_id + db_dbworkflowstep_calculations: + db_dbworkflowstep_calcul_dbworkflowstep_id_dbnode_60f50d02_uniq: + - dbnode_id + - dbworkflowstep_id + db_dbworkflowstep_sub_workflows: + db_dbworkflowstep_sub_wo_dbworkflowstep_id_dbwork_e9b2b624_uniq: + - dbworkflow_id + - dbworkflowstep_id +foreign_keys: + db_dbattribute: + db_dbattribute_dbnode_id_253bf153_fk_db_dbnode_id: FOREIGN KEY (dbnode_id) REFERENCES + db_dbnode(id) DEFERRABLE INITIALLY DEFERRED + db_dbauthinfo: + db_dbauthinfo_aiidauser_id_0684fdfb_fk_db_dbuser_id: FOREIGN KEY (aiidauser_id) + REFERENCES db_dbuser(id) DEFERRABLE INITIALLY DEFERRED + db_dbauthinfo_dbcomputer_id_424f7ac4_fk_db_dbcomputer_id: FOREIGN KEY (dbcomputer_id) + REFERENCES db_dbcomputer(id) DEFERRABLE INITIALLY DEFERRED + db_dbcomment: + db_dbcomment_dbnode_id_3b812b6b_fk_db_dbnode_id: FOREIGN KEY (dbnode_id) REFERENCES + db_dbnode(id) DEFERRABLE INITIALLY DEFERRED + db_dbcomment_user_id_8ed5e360_fk_db_dbuser_id: FOREIGN KEY (user_id) REFERENCES + db_dbuser(id) DEFERRABLE INITIALLY DEFERRED + db_dbextra: + db_dbextra_dbnode_id_c7fe8961_fk_db_dbnode_id: FOREIGN KEY (dbnode_id) REFERENCES + db_dbnode(id) DEFERRABLE INITIALLY DEFERRED + db_dbgroup: + db_dbgroup_user_id_100f8a51_fk_db_dbuser_id: FOREIGN KEY (user_id) REFERENCES + db_dbuser(id) DEFERRABLE INITIALLY DEFERRED + db_dbgroup_dbnodes: + db_dbgroup_dbnodes_dbgroup_id_9d3a0f9d_fk_db_dbgroup_id: FOREIGN KEY (dbgroup_id) + REFERENCES db_dbgroup(id) DEFERRABLE INITIALLY DEFERRED + db_dbgroup_dbnodes_dbnode_id_118b9439_fk_db_dbnode_id: FOREIGN KEY (dbnode_id) + REFERENCES db_dbnode(id) DEFERRABLE INITIALLY DEFERRED + db_dblink: + db_dblink_input_id_9245bd73_fk_db_dbnode_id: FOREIGN KEY (input_id) REFERENCES + db_dbnode(id) DEFERRABLE INITIALLY DEFERRED + db_dblink_output_id_c0167528_fk_db_dbnode_id: FOREIGN KEY (output_id) REFERENCES + db_dbnode(id) DEFERRABLE INITIALLY DEFERRED + db_dblog: + db_dblog_dbnode_id_da34b732_fk_db_dbnode_id: FOREIGN KEY (dbnode_id) REFERENCES + db_dbnode(id) DEFERRABLE INITIALLY DEFERRED + db_dbnode: + db_dbnode_dbcomputer_id_315372a3_fk_db_dbcomputer_id: FOREIGN KEY (dbcomputer_id) + REFERENCES db_dbcomputer(id) DEFERRABLE INITIALLY DEFERRED + db_dbnode_user_id_12e7aeaf_fk_db_dbuser_id: FOREIGN KEY (user_id) REFERENCES db_dbuser(id) + DEFERRABLE INITIALLY DEFERRED + db_dbuser_groups: + db_dbuser_groups_dbuser_id_480b3520_fk_db_dbuser_id: FOREIGN KEY (dbuser_id) REFERENCES + db_dbuser(id) DEFERRABLE INITIALLY DEFERRED + db_dbuser_groups_group_id_8478d87e_fk_auth_group_id: FOREIGN KEY (group_id) REFERENCES + auth_group(id) DEFERRABLE INITIALLY DEFERRED + db_dbuser_user_permissions: + db_dbuser_user_permi_permission_id_c5aafc54_fk_auth_perm: FOREIGN KEY (permission_id) + REFERENCES auth_permission(id) DEFERRABLE INITIALLY DEFERRED + db_dbuser_user_permissions_dbuser_id_364456ee_fk_db_dbuser_id: FOREIGN KEY (dbuser_id) + REFERENCES db_dbuser(id) DEFERRABLE INITIALLY DEFERRED + db_dbworkflow: + db_dbworkflow_user_id_ef1f3251_fk_db_dbuser_id: FOREIGN KEY (user_id) REFERENCES + db_dbuser(id) DEFERRABLE INITIALLY DEFERRED + db_dbworkflowdata: + db_dbworkflowdata_aiida_obj_id_70a2d33b_fk_db_dbnode_id: FOREIGN KEY (aiida_obj_id) + REFERENCES db_dbnode(id) DEFERRABLE INITIALLY DEFERRED + db_dbworkflowdata_parent_id_ff4dbf8d_fk_db_dbworkflow_id: FOREIGN KEY (parent_id) + REFERENCES db_dbworkflow(id) DEFERRABLE INITIALLY DEFERRED + db_dbworkflowstep: + db_dbworkflowstep_parent_id_ffb754d9_fk_db_dbworkflow_id: FOREIGN KEY (parent_id) + REFERENCES db_dbworkflow(id) DEFERRABLE INITIALLY DEFERRED + db_dbworkflowstep_user_id_04282431_fk_db_dbuser_id: FOREIGN KEY (user_id) REFERENCES + db_dbuser(id) DEFERRABLE INITIALLY DEFERRED + db_dbworkflowstep_calculations: + db_dbworkflowstep_ca_dbnode_id_0d07b7a7_fk_db_dbnode: FOREIGN KEY (dbnode_id) + REFERENCES db_dbnode(id) DEFERRABLE INITIALLY DEFERRED + db_dbworkflowstep_ca_dbworkflowstep_id_575c3637_fk_db_dbwork: FOREIGN KEY (dbworkflowstep_id) + REFERENCES db_dbworkflowstep(id) DEFERRABLE INITIALLY DEFERRED + db_dbworkflowstep_sub_workflows: + db_dbworkflowstep_su_dbworkflow_id_dca4d103_fk_db_dbwork: FOREIGN KEY (dbworkflow_id) + REFERENCES db_dbworkflow(id) DEFERRABLE INITIALLY DEFERRED + db_dbworkflowstep_su_dbworkflowstep_id_e183bbb7_fk_db_dbwork: FOREIGN KEY (dbworkflowstep_id) + REFERENCES db_dbworkflowstep(id) DEFERRABLE INITIALLY DEFERRED +indexes: + db_dbattribute: + db_dbattribute_datatype_91c4dc04: CREATE INDEX db_dbattribute_datatype_91c4dc04 + ON public.db_dbattribute USING btree (datatype) + db_dbattribute_datatype_91c4dc04_like: CREATE INDEX db_dbattribute_datatype_91c4dc04_like + ON public.db_dbattribute USING btree (datatype varchar_pattern_ops) + db_dbattribute_dbnode_id_253bf153: CREATE INDEX db_dbattribute_dbnode_id_253bf153 + ON public.db_dbattribute USING btree (dbnode_id) + db_dbattribute_dbnode_id_key_c589e447_uniq: CREATE UNIQUE INDEX db_dbattribute_dbnode_id_key_c589e447_uniq + ON public.db_dbattribute USING btree (dbnode_id, key) + db_dbattribute_key_ac2bc4e4: CREATE INDEX db_dbattribute_key_ac2bc4e4 ON public.db_dbattribute + USING btree (key) + db_dbattribute_key_ac2bc4e4_like: CREATE INDEX db_dbattribute_key_ac2bc4e4_like + ON public.db_dbattribute USING btree (key varchar_pattern_ops) + db_dbattribute_pkey: CREATE UNIQUE INDEX db_dbattribute_pkey ON public.db_dbattribute + USING btree (id) + tval_idx_for_daemon: CREATE INDEX tval_idx_for_daemon ON public.db_dbattribute + USING btree (tval) WHERE (tval = ANY (ARRAY['COMPUTED'::text, 'WITHSCHEDULER'::text, + 'TOSUBMIT'::text])) + db_dbauthinfo: + db_dbauthinfo_aiidauser_id_0684fdfb: CREATE INDEX db_dbauthinfo_aiidauser_id_0684fdfb + ON public.db_dbauthinfo USING btree (aiidauser_id) + db_dbauthinfo_aiidauser_id_dbcomputer_id_777cdaa8_uniq: CREATE UNIQUE INDEX db_dbauthinfo_aiidauser_id_dbcomputer_id_777cdaa8_uniq + ON public.db_dbauthinfo USING btree (aiidauser_id, dbcomputer_id) + db_dbauthinfo_dbcomputer_id_424f7ac4: CREATE INDEX db_dbauthinfo_dbcomputer_id_424f7ac4 + ON public.db_dbauthinfo USING btree (dbcomputer_id) + db_dbauthinfo_pkey: CREATE UNIQUE INDEX db_dbauthinfo_pkey ON public.db_dbauthinfo + USING btree (id) + db_dbcomment: + db_dbcomment_dbnode_id_3b812b6b: CREATE INDEX db_dbcomment_dbnode_id_3b812b6b + ON public.db_dbcomment USING btree (dbnode_id) + db_dbcomment_pkey: CREATE UNIQUE INDEX db_dbcomment_pkey ON public.db_dbcomment + USING btree (id) + db_dbcomment_user_id_8ed5e360: CREATE INDEX db_dbcomment_user_id_8ed5e360 ON public.db_dbcomment + USING btree (user_id) + db_dbcomment_uuid_49bac08c_uniq: CREATE UNIQUE INDEX db_dbcomment_uuid_49bac08c_uniq + ON public.db_dbcomment USING btree (uuid) + db_dbcomputer: + db_dbcomputer_name_f1800b1a_like: CREATE INDEX db_dbcomputer_name_f1800b1a_like + ON public.db_dbcomputer USING btree (name varchar_pattern_ops) + db_dbcomputer_name_key: CREATE UNIQUE INDEX db_dbcomputer_name_key ON public.db_dbcomputer + USING btree (name) + db_dbcomputer_pkey: CREATE UNIQUE INDEX db_dbcomputer_pkey ON public.db_dbcomputer + USING btree (id) + db_dbcomputer_uuid_f35defa6_uniq: CREATE UNIQUE INDEX db_dbcomputer_uuid_f35defa6_uniq + ON public.db_dbcomputer USING btree (uuid) + db_dbextra: + db_dbextra_datatype_2eba38c6: CREATE INDEX db_dbextra_datatype_2eba38c6 ON public.db_dbextra + USING btree (datatype) + db_dbextra_datatype_2eba38c6_like: CREATE INDEX db_dbextra_datatype_2eba38c6_like + ON public.db_dbextra USING btree (datatype varchar_pattern_ops) + db_dbextra_dbnode_id_c7fe8961: CREATE INDEX db_dbextra_dbnode_id_c7fe8961 ON public.db_dbextra + USING btree (dbnode_id) + db_dbextra_dbnode_id_key_aa56fd37_uniq: CREATE UNIQUE INDEX db_dbextra_dbnode_id_key_aa56fd37_uniq + ON public.db_dbextra USING btree (dbnode_id, key) + db_dbextra_key_b1a8abc6: CREATE INDEX db_dbextra_key_b1a8abc6 ON public.db_dbextra + USING btree (key) + db_dbextra_key_b1a8abc6_like: CREATE INDEX db_dbextra_key_b1a8abc6_like ON public.db_dbextra + USING btree (key varchar_pattern_ops) + db_dbextra_pkey: CREATE UNIQUE INDEX db_dbextra_pkey ON public.db_dbextra USING + btree (id) + db_dbgroup: + db_dbgroup_name_66c75272: CREATE INDEX db_dbgroup_name_66c75272 ON public.db_dbgroup + USING btree (label) + db_dbgroup_name_66c75272_like: CREATE INDEX db_dbgroup_name_66c75272_like ON public.db_dbgroup + USING btree (label varchar_pattern_ops) + db_dbgroup_name_type_12656f33_uniq: CREATE UNIQUE INDEX db_dbgroup_name_type_12656f33_uniq + ON public.db_dbgroup USING btree (label, type_string) + db_dbgroup_pkey: CREATE UNIQUE INDEX db_dbgroup_pkey ON public.db_dbgroup USING + btree (id) + db_dbgroup_type_23b2a748: CREATE INDEX db_dbgroup_type_23b2a748 ON public.db_dbgroup + USING btree (type_string) + db_dbgroup_type_23b2a748_like: CREATE INDEX db_dbgroup_type_23b2a748_like ON public.db_dbgroup + USING btree (type_string varchar_pattern_ops) + db_dbgroup_user_id_100f8a51: CREATE INDEX db_dbgroup_user_id_100f8a51 ON public.db_dbgroup + USING btree (user_id) + db_dbgroup_uuid_af896177_uniq: CREATE UNIQUE INDEX db_dbgroup_uuid_af896177_uniq + ON public.db_dbgroup USING btree (uuid) + db_dbgroup_dbnodes: + db_dbgroup_dbnodes_dbgroup_id_9d3a0f9d: CREATE INDEX db_dbgroup_dbnodes_dbgroup_id_9d3a0f9d + ON public.db_dbgroup_dbnodes USING btree (dbgroup_id) + db_dbgroup_dbnodes_dbgroup_id_dbnode_id_eee23cce_uniq: CREATE UNIQUE INDEX db_dbgroup_dbnodes_dbgroup_id_dbnode_id_eee23cce_uniq + ON public.db_dbgroup_dbnodes USING btree (dbgroup_id, dbnode_id) + db_dbgroup_dbnodes_dbnode_id_118b9439: CREATE INDEX db_dbgroup_dbnodes_dbnode_id_118b9439 + ON public.db_dbgroup_dbnodes USING btree (dbnode_id) + db_dbgroup_dbnodes_pkey: CREATE UNIQUE INDEX db_dbgroup_dbnodes_pkey ON public.db_dbgroup_dbnodes + USING btree (id) + db_dblink: + db_dblink_input_id_9245bd73: CREATE INDEX db_dblink_input_id_9245bd73 ON public.db_dblink + USING btree (input_id) + db_dblink_label_f1343cfb: CREATE INDEX db_dblink_label_f1343cfb ON public.db_dblink + USING btree (label) + db_dblink_label_f1343cfb_like: CREATE INDEX db_dblink_label_f1343cfb_like ON public.db_dblink + USING btree (label varchar_pattern_ops) + db_dblink_output_id_c0167528: CREATE INDEX db_dblink_output_id_c0167528 ON public.db_dblink + USING btree (output_id) + db_dblink_pkey: CREATE UNIQUE INDEX db_dblink_pkey ON public.db_dblink USING btree + (id) + db_dblink_type_229f212b: CREATE INDEX db_dblink_type_229f212b ON public.db_dblink + USING btree (type) + db_dblink_type_229f212b_like: CREATE INDEX db_dblink_type_229f212b_like ON public.db_dblink + USING btree (type varchar_pattern_ops) + db_dblog: + db_dblog_dbnode_id_da34b732: CREATE INDEX db_dblog_dbnode_id_da34b732 ON public.db_dblog + USING btree (dbnode_id) + db_dblog_levelname_ad5dc346: CREATE INDEX db_dblog_levelname_ad5dc346 ON public.db_dblog + USING btree (levelname) + db_dblog_levelname_ad5dc346_like: CREATE INDEX db_dblog_levelname_ad5dc346_like + ON public.db_dblog USING btree (levelname varchar_pattern_ops) + db_dblog_loggername_00b5ba16: CREATE INDEX db_dblog_loggername_00b5ba16 ON public.db_dblog + USING btree (loggername) + db_dblog_loggername_00b5ba16_like: CREATE INDEX db_dblog_loggername_00b5ba16_like + ON public.db_dblog USING btree (loggername varchar_pattern_ops) + db_dblog_pkey: CREATE UNIQUE INDEX db_dblog_pkey ON public.db_dblog USING btree + (id) + db_dblog_uuid_9cf77df3_uniq: CREATE UNIQUE INDEX db_dblog_uuid_9cf77df3_uniq ON + public.db_dblog USING btree (uuid) + db_dbnode: + db_dbnode_ctime_71626ef5: CREATE INDEX db_dbnode_ctime_71626ef5 ON public.db_dbnode + USING btree (ctime) + db_dbnode_dbcomputer_id_315372a3: CREATE INDEX db_dbnode_dbcomputer_id_315372a3 + ON public.db_dbnode USING btree (dbcomputer_id) + db_dbnode_label_6469539e: CREATE INDEX db_dbnode_label_6469539e ON public.db_dbnode + USING btree (label) + db_dbnode_label_6469539e_like: CREATE INDEX db_dbnode_label_6469539e_like ON public.db_dbnode + USING btree (label varchar_pattern_ops) + db_dbnode_mtime_0554ea3d: CREATE INDEX db_dbnode_mtime_0554ea3d ON public.db_dbnode + USING btree (mtime) + db_dbnode_pkey: CREATE UNIQUE INDEX db_dbnode_pkey ON public.db_dbnode USING btree + (id) + db_dbnode_process_type_df7298d0: CREATE INDEX db_dbnode_process_type_df7298d0 + ON public.db_dbnode USING btree (process_type) + db_dbnode_process_type_df7298d0_like: CREATE INDEX db_dbnode_process_type_df7298d0_like + ON public.db_dbnode USING btree (process_type varchar_pattern_ops) + db_dbnode_type_a8ce9753: CREATE INDEX db_dbnode_type_a8ce9753 ON public.db_dbnode + USING btree (type) + db_dbnode_type_a8ce9753_like: CREATE INDEX db_dbnode_type_a8ce9753_like ON public.db_dbnode + USING btree (type varchar_pattern_ops) + db_dbnode_user_id_12e7aeaf: CREATE INDEX db_dbnode_user_id_12e7aeaf ON public.db_dbnode + USING btree (user_id) + db_dbnode_uuid_62e0bf98_uniq: CREATE UNIQUE INDEX db_dbnode_uuid_62e0bf98_uniq + ON public.db_dbnode USING btree (uuid) + db_dbsetting: + db_dbsetting_datatype_49f4397c: CREATE INDEX db_dbsetting_datatype_49f4397c ON + public.db_dbsetting USING btree (datatype) + db_dbsetting_datatype_49f4397c_like: CREATE INDEX db_dbsetting_datatype_49f4397c_like + ON public.db_dbsetting USING btree (datatype varchar_pattern_ops) + db_dbsetting_key_1b84beb4: CREATE INDEX db_dbsetting_key_1b84beb4 ON public.db_dbsetting + USING btree (key) + db_dbsetting_key_1b84beb4_like: CREATE INDEX db_dbsetting_key_1b84beb4_like ON + public.db_dbsetting USING btree (key varchar_pattern_ops) + db_dbsetting_key_1b84beb4_uniq: CREATE UNIQUE INDEX db_dbsetting_key_1b84beb4_uniq + ON public.db_dbsetting USING btree (key) + db_dbsetting_pkey: CREATE UNIQUE INDEX db_dbsetting_pkey ON public.db_dbsetting + USING btree (id) + db_dbuser: + db_dbuser_email_30150b7e_like: CREATE INDEX db_dbuser_email_30150b7e_like ON public.db_dbuser + USING btree (email varchar_pattern_ops) + db_dbuser_email_30150b7e_uniq: CREATE UNIQUE INDEX db_dbuser_email_30150b7e_uniq + ON public.db_dbuser USING btree (email) + db_dbuser_pkey: CREATE UNIQUE INDEX db_dbuser_pkey ON public.db_dbuser USING btree + (id) + db_dbuser_groups: + db_dbuser_groups_dbuser_id_480b3520: CREATE INDEX db_dbuser_groups_dbuser_id_480b3520 + ON public.db_dbuser_groups USING btree (dbuser_id) + db_dbuser_groups_dbuser_id_group_id_9155eb4f_uniq: CREATE UNIQUE INDEX db_dbuser_groups_dbuser_id_group_id_9155eb4f_uniq + ON public.db_dbuser_groups USING btree (dbuser_id, group_id) + db_dbuser_groups_group_id_8478d87e: CREATE INDEX db_dbuser_groups_group_id_8478d87e + ON public.db_dbuser_groups USING btree (group_id) + db_dbuser_groups_pkey: CREATE UNIQUE INDEX db_dbuser_groups_pkey ON public.db_dbuser_groups + USING btree (id) + db_dbuser_user_permissions: + db_dbuser_user_permissio_dbuser_id_permission_id_e6cbabe4_uniq: CREATE UNIQUE + INDEX db_dbuser_user_permissio_dbuser_id_permission_id_e6cbabe4_uniq ON public.db_dbuser_user_permissions + USING btree (dbuser_id, permission_id) + db_dbuser_user_permissions_dbuser_id_364456ee: CREATE INDEX db_dbuser_user_permissions_dbuser_id_364456ee + ON public.db_dbuser_user_permissions USING btree (dbuser_id) + db_dbuser_user_permissions_permission_id_c5aafc54: CREATE INDEX db_dbuser_user_permissions_permission_id_c5aafc54 + ON public.db_dbuser_user_permissions USING btree (permission_id) + db_dbuser_user_permissions_pkey: CREATE UNIQUE INDEX db_dbuser_user_permissions_pkey + ON public.db_dbuser_user_permissions USING btree (id) + db_dbworkflow: + db_dbworkflow_label_7368f34a: CREATE INDEX db_dbworkflow_label_7368f34a ON public.db_dbworkflow + USING btree (label) + db_dbworkflow_label_7368f34a_like: CREATE INDEX db_dbworkflow_label_7368f34a_like + ON public.db_dbworkflow USING btree (label varchar_pattern_ops) + db_dbworkflow_pkey: CREATE UNIQUE INDEX db_dbworkflow_pkey ON public.db_dbworkflow + USING btree (id) + db_dbworkflow_user_id_ef1f3251: CREATE INDEX db_dbworkflow_user_id_ef1f3251 ON + public.db_dbworkflow USING btree (user_id) + db_dbworkflow_uuid_08947ee2_uniq: CREATE UNIQUE INDEX db_dbworkflow_uuid_08947ee2_uniq + ON public.db_dbworkflow USING btree (uuid) + db_dbworkflowdata: + db_dbworkflowdata_aiida_obj_id_70a2d33b: CREATE INDEX db_dbworkflowdata_aiida_obj_id_70a2d33b + ON public.db_dbworkflowdata USING btree (aiida_obj_id) + db_dbworkflowdata_parent_id_ff4dbf8d: CREATE INDEX db_dbworkflowdata_parent_id_ff4dbf8d + ON public.db_dbworkflowdata USING btree (parent_id) + db_dbworkflowdata_parent_id_name_data_type_a4b50dae_uniq: CREATE UNIQUE INDEX + db_dbworkflowdata_parent_id_name_data_type_a4b50dae_uniq ON public.db_dbworkflowdata + USING btree (parent_id, name, data_type) + db_dbworkflowdata_pkey: CREATE UNIQUE INDEX db_dbworkflowdata_pkey ON public.db_dbworkflowdata + USING btree (id) + db_dbworkflowstep: + db_dbworkflowstep_parent_id_ffb754d9: CREATE INDEX db_dbworkflowstep_parent_id_ffb754d9 + ON public.db_dbworkflowstep USING btree (parent_id) + db_dbworkflowstep_parent_id_name_111027e3_uniq: CREATE UNIQUE INDEX db_dbworkflowstep_parent_id_name_111027e3_uniq + ON public.db_dbworkflowstep USING btree (parent_id, name) + db_dbworkflowstep_pkey: CREATE UNIQUE INDEX db_dbworkflowstep_pkey ON public.db_dbworkflowstep + USING btree (id) + db_dbworkflowstep_user_id_04282431: CREATE INDEX db_dbworkflowstep_user_id_04282431 + ON public.db_dbworkflowstep USING btree (user_id) + db_dbworkflowstep_calculations: + db_dbworkflowstep_calcul_dbworkflowstep_id_dbnode_60f50d02_uniq: CREATE UNIQUE + INDEX db_dbworkflowstep_calcul_dbworkflowstep_id_dbnode_60f50d02_uniq ON public.db_dbworkflowstep_calculations + USING btree (dbworkflowstep_id, dbnode_id) + db_dbworkflowstep_calculations_dbnode_id_0d07b7a7: CREATE INDEX db_dbworkflowstep_calculations_dbnode_id_0d07b7a7 + ON public.db_dbworkflowstep_calculations USING btree (dbnode_id) + db_dbworkflowstep_calculations_dbworkflowstep_id_575c3637: CREATE INDEX db_dbworkflowstep_calculations_dbworkflowstep_id_575c3637 + ON public.db_dbworkflowstep_calculations USING btree (dbworkflowstep_id) + db_dbworkflowstep_calculations_pkey: CREATE UNIQUE INDEX db_dbworkflowstep_calculations_pkey + ON public.db_dbworkflowstep_calculations USING btree (id) + db_dbworkflowstep_sub_workflows: + db_dbworkflowstep_sub_wo_dbworkflowstep_id_dbwork_e9b2b624_uniq: CREATE UNIQUE + INDEX db_dbworkflowstep_sub_wo_dbworkflowstep_id_dbwork_e9b2b624_uniq ON public.db_dbworkflowstep_sub_workflows + USING btree (dbworkflowstep_id, dbworkflow_id) + db_dbworkflowstep_sub_workflows_dbworkflow_id_dca4d103: CREATE INDEX db_dbworkflowstep_sub_workflows_dbworkflow_id_dca4d103 + ON public.db_dbworkflowstep_sub_workflows USING btree (dbworkflow_id) + db_dbworkflowstep_sub_workflows_dbworkflowstep_id_e183bbb7: CREATE INDEX db_dbworkflowstep_sub_workflows_dbworkflowstep_id_e183bbb7 + ON public.db_dbworkflowstep_sub_workflows USING btree (dbworkflowstep_id) + db_dbworkflowstep_sub_workflows_pkey: CREATE UNIQUE INDEX db_dbworkflowstep_sub_workflows_pkey + ON public.db_dbworkflowstep_sub_workflows USING btree (id) diff --git a/tests/backends/aiida_sqlalchemy/migrations/test_all_schema/test_django_django_0029_.yml b/tests/backends/aiida_sqlalchemy/migrations/test_all_schema/test_django_django_0029_.yml new file mode 100644 index 0000000000..3e6143e5bc --- /dev/null +++ b/tests/backends/aiida_sqlalchemy/migrations/test_all_schema/test_django_django_0029_.yml @@ -0,0 +1,1041 @@ +columns: + db_dbattribute: + bval: + data_type: boolean + default: null + is_nullable: true + datatype: + data_type: character varying + default: null + is_nullable: false + max_length: 10 + dbnode_id: + data_type: integer + default: null + is_nullable: false + dval: + data_type: timestamp with time zone + default: null + is_nullable: true + fval: + data_type: double precision + default: null + is_nullable: true + id: + data_type: integer + default: nextval('db_dbattribute_id_seq'::regclass) + is_nullable: false + ival: + data_type: integer + default: null + is_nullable: true + key: + data_type: character varying + default: null + is_nullable: false + max_length: 1024 + tval: + data_type: text + default: null + is_nullable: false + db_dbauthinfo: + aiidauser_id: + data_type: integer + default: null + is_nullable: false + auth_params: + data_type: text + default: null + is_nullable: false + dbcomputer_id: + data_type: integer + default: null + is_nullable: false + enabled: + data_type: boolean + default: null + is_nullable: false + id: + data_type: integer + default: nextval('db_dbauthinfo_id_seq'::regclass) + is_nullable: false + metadata: + data_type: text + default: null + is_nullable: false + db_dbcomment: + content: + data_type: text + default: null + is_nullable: false + ctime: + data_type: timestamp with time zone + default: null + is_nullable: false + dbnode_id: + data_type: integer + default: null + is_nullable: false + id: + data_type: integer + default: nextval('db_dbcomment_id_seq'::regclass) + is_nullable: false + mtime: + data_type: timestamp with time zone + default: null + is_nullable: false + user_id: + data_type: integer + default: null + is_nullable: false + uuid: + data_type: uuid + default: null + is_nullable: false + db_dbcomputer: + description: + data_type: text + default: null + is_nullable: false + enabled: + data_type: boolean + default: null + is_nullable: false + hostname: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + id: + data_type: integer + default: nextval('db_dbcomputer_id_seq'::regclass) + is_nullable: false + metadata: + data_type: text + default: null + is_nullable: false + name: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + scheduler_type: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + transport_params: + data_type: text + default: null + is_nullable: false + transport_type: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + uuid: + data_type: uuid + default: null + is_nullable: false + db_dbextra: + bval: + data_type: boolean + default: null + is_nullable: true + datatype: + data_type: character varying + default: null + is_nullable: false + max_length: 10 + dbnode_id: + data_type: integer + default: null + is_nullable: false + dval: + data_type: timestamp with time zone + default: null + is_nullable: true + fval: + data_type: double precision + default: null + is_nullable: true + id: + data_type: integer + default: nextval('db_dbextra_id_seq'::regclass) + is_nullable: false + ival: + data_type: integer + default: null + is_nullable: true + key: + data_type: character varying + default: null + is_nullable: false + max_length: 1024 + tval: + data_type: text + default: null + is_nullable: false + db_dbgroup: + description: + data_type: text + default: null + is_nullable: false + id: + data_type: integer + default: nextval('db_dbgroup_id_seq'::regclass) + is_nullable: false + label: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + time: + data_type: timestamp with time zone + default: null + is_nullable: false + type_string: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + user_id: + data_type: integer + default: null + is_nullable: false + uuid: + data_type: uuid + default: null + is_nullable: false + db_dbgroup_dbnodes: + dbgroup_id: + data_type: integer + default: null + is_nullable: false + dbnode_id: + data_type: integer + default: null + is_nullable: false + id: + data_type: integer + default: nextval('db_dbgroup_dbnodes_id_seq'::regclass) + is_nullable: false + db_dblink: + id: + data_type: integer + default: nextval('db_dblink_id_seq'::regclass) + is_nullable: false + input_id: + data_type: integer + default: null + is_nullable: false + label: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + output_id: + data_type: integer + default: null + is_nullable: false + type: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + db_dblog: + dbnode_id: + data_type: integer + default: null + is_nullable: false + id: + data_type: integer + default: nextval('db_dblog_id_seq'::regclass) + is_nullable: false + levelname: + data_type: character varying + default: null + is_nullable: false + max_length: 50 + loggername: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + message: + data_type: text + default: null + is_nullable: false + metadata: + data_type: text + default: null + is_nullable: false + time: + data_type: timestamp with time zone + default: null + is_nullable: false + uuid: + data_type: uuid + default: null + is_nullable: false + db_dbnode: + ctime: + data_type: timestamp with time zone + default: null + is_nullable: false + dbcomputer_id: + data_type: integer + default: null + is_nullable: true + description: + data_type: text + default: null + is_nullable: false + id: + data_type: integer + default: nextval('db_dbnode_id_seq'::regclass) + is_nullable: false + label: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + mtime: + data_type: timestamp with time zone + default: null + is_nullable: false + nodeversion: + data_type: integer + default: null + is_nullable: false + process_type: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + public: + data_type: boolean + default: null + is_nullable: false + type: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + user_id: + data_type: integer + default: null + is_nullable: false + uuid: + data_type: uuid + default: null + is_nullable: false + db_dbsetting: + bval: + data_type: boolean + default: null + is_nullable: true + datatype: + data_type: character varying + default: null + is_nullable: false + max_length: 10 + description: + data_type: text + default: null + is_nullable: false + dval: + data_type: timestamp with time zone + default: null + is_nullable: true + fval: + data_type: double precision + default: null + is_nullable: true + id: + data_type: integer + default: nextval('db_dbsetting_id_seq'::regclass) + is_nullable: false + ival: + data_type: integer + default: null + is_nullable: true + key: + data_type: character varying + default: null + is_nullable: false + max_length: 1024 + time: + data_type: timestamp with time zone + default: null + is_nullable: false + tval: + data_type: text + default: null + is_nullable: false + db_dbuser: + date_joined: + data_type: timestamp with time zone + default: null + is_nullable: false + email: + data_type: character varying + default: null + is_nullable: false + max_length: 254 + first_name: + data_type: character varying + default: null + is_nullable: false + max_length: 254 + id: + data_type: integer + default: nextval('db_dbuser_id_seq'::regclass) + is_nullable: false + institution: + data_type: character varying + default: null + is_nullable: false + max_length: 254 + is_active: + data_type: boolean + default: null + is_nullable: false + is_staff: + data_type: boolean + default: null + is_nullable: false + is_superuser: + data_type: boolean + default: null + is_nullable: false + last_login: + data_type: timestamp with time zone + default: null + is_nullable: true + last_name: + data_type: character varying + default: null + is_nullable: false + max_length: 254 + password: + data_type: character varying + default: null + is_nullable: false + max_length: 128 + db_dbuser_groups: + dbuser_id: + data_type: integer + default: null + is_nullable: false + group_id: + data_type: integer + default: null + is_nullable: false + id: + data_type: integer + default: nextval('db_dbuser_groups_id_seq'::regclass) + is_nullable: false + db_dbuser_user_permissions: + dbuser_id: + data_type: integer + default: null + is_nullable: false + id: + data_type: integer + default: nextval('db_dbuser_user_permissions_id_seq'::regclass) + is_nullable: false + permission_id: + data_type: integer + default: null + is_nullable: false + db_dbworkflow: + ctime: + data_type: timestamp with time zone + default: null + is_nullable: false + description: + data_type: text + default: null + is_nullable: false + id: + data_type: integer + default: nextval('db_dbworkflow_id_seq'::regclass) + is_nullable: false + label: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + lastsyncedversion: + data_type: integer + default: null + is_nullable: false + module: + data_type: text + default: null + is_nullable: false + module_class: + data_type: text + default: null + is_nullable: false + mtime: + data_type: timestamp with time zone + default: null + is_nullable: false + nodeversion: + data_type: integer + default: null + is_nullable: false + report: + data_type: text + default: null + is_nullable: false + script_md5: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + script_path: + data_type: text + default: null + is_nullable: false + state: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + user_id: + data_type: integer + default: null + is_nullable: false + uuid: + data_type: uuid + default: null + is_nullable: false + db_dbworkflowdata: + aiida_obj_id: + data_type: integer + default: null + is_nullable: true + data_type: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + id: + data_type: integer + default: nextval('db_dbworkflowdata_id_seq'::regclass) + is_nullable: false + json_value: + data_type: text + default: null + is_nullable: false + name: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + parent_id: + data_type: integer + default: null + is_nullable: false + time: + data_type: timestamp with time zone + default: null + is_nullable: false + value_type: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + db_dbworkflowstep: + id: + data_type: integer + default: nextval('db_dbworkflowstep_id_seq'::regclass) + is_nullable: false + name: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + nextcall: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + parent_id: + data_type: integer + default: null + is_nullable: false + state: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + time: + data_type: timestamp with time zone + default: null + is_nullable: false + user_id: + data_type: integer + default: null + is_nullable: false + db_dbworkflowstep_calculations: + dbnode_id: + data_type: integer + default: null + is_nullable: false + dbworkflowstep_id: + data_type: integer + default: null + is_nullable: false + id: + data_type: integer + default: nextval('db_dbworkflowstep_calculations_id_seq'::regclass) + is_nullable: false + db_dbworkflowstep_sub_workflows: + dbworkflow_id: + data_type: integer + default: null + is_nullable: false + dbworkflowstep_id: + data_type: integer + default: null + is_nullable: false + id: + data_type: integer + default: nextval('db_dbworkflowstep_sub_workflows_id_seq'::regclass) + is_nullable: false +constraints: + primary_key: + db_dbattribute: + db_dbattribute_pkey: + - id + db_dbauthinfo: + db_dbauthinfo_pkey: + - id + db_dbcomment: + db_dbcomment_pkey: + - id + db_dbcomputer: + db_dbcomputer_pkey: + - id + db_dbextra: + db_dbextra_pkey: + - id + db_dbgroup: + db_dbgroup_pkey: + - id + db_dbgroup_dbnodes: + db_dbgroup_dbnodes_pkey: + - id + db_dblink: + db_dblink_pkey: + - id + db_dblog: + db_dblog_pkey: + - id + db_dbnode: + db_dbnode_pkey: + - id + db_dbsetting: + db_dbsetting_pkey: + - id + db_dbuser: + db_dbuser_pkey: + - id + db_dbuser_groups: + db_dbuser_groups_pkey: + - id + db_dbuser_user_permissions: + db_dbuser_user_permissions_pkey: + - id + db_dbworkflow: + db_dbworkflow_pkey: + - id + db_dbworkflowdata: + db_dbworkflowdata_pkey: + - id + db_dbworkflowstep: + db_dbworkflowstep_pkey: + - id + db_dbworkflowstep_calculations: + db_dbworkflowstep_calculations_pkey: + - id + db_dbworkflowstep_sub_workflows: + db_dbworkflowstep_sub_workflows_pkey: + - id + unique: + db_dbattribute: + db_dbattribute_dbnode_id_key_c589e447_uniq: + - dbnode_id + - key + db_dbauthinfo: + db_dbauthinfo_aiidauser_id_dbcomputer_id_777cdaa8_uniq: + - aiidauser_id + - dbcomputer_id + db_dbcomment: + db_dbcomment_uuid_49bac08c_uniq: + - uuid + db_dbcomputer: + db_dbcomputer_name_key: + - name + db_dbcomputer_uuid_f35defa6_uniq: + - uuid + db_dbextra: + db_dbextra_dbnode_id_key_aa56fd37_uniq: + - dbnode_id + - key + db_dbgroup: + db_dbgroup_name_type_12656f33_uniq: + - label + - type_string + db_dbgroup_uuid_af896177_uniq: + - uuid + db_dbgroup_dbnodes: + db_dbgroup_dbnodes_dbgroup_id_dbnode_id_eee23cce_uniq: + - dbgroup_id + - dbnode_id + db_dblog: + db_dblog_uuid_9cf77df3_uniq: + - uuid + db_dbnode: + db_dbnode_uuid_62e0bf98_uniq: + - uuid + db_dbsetting: + db_dbsetting_key_1b84beb4_uniq: + - key + db_dbuser: + db_dbuser_email_30150b7e_uniq: + - email + db_dbuser_groups: + db_dbuser_groups_dbuser_id_group_id_9155eb4f_uniq: + - dbuser_id + - group_id + db_dbuser_user_permissions: + db_dbuser_user_permissio_dbuser_id_permission_id_e6cbabe4_uniq: + - dbuser_id + - permission_id + db_dbworkflow: + db_dbworkflow_uuid_08947ee2_uniq: + - uuid + db_dbworkflowdata: + db_dbworkflowdata_parent_id_name_data_type_a4b50dae_uniq: + - data_type + - name + - parent_id + db_dbworkflowstep: + db_dbworkflowstep_parent_id_name_111027e3_uniq: + - name + - parent_id + db_dbworkflowstep_calculations: + db_dbworkflowstep_calcul_dbworkflowstep_id_dbnode_60f50d02_uniq: + - dbnode_id + - dbworkflowstep_id + db_dbworkflowstep_sub_workflows: + db_dbworkflowstep_sub_wo_dbworkflowstep_id_dbwork_e9b2b624_uniq: + - dbworkflow_id + - dbworkflowstep_id +foreign_keys: + db_dbattribute: + db_dbattribute_dbnode_id_253bf153_fk_db_dbnode_id: FOREIGN KEY (dbnode_id) REFERENCES + db_dbnode(id) DEFERRABLE INITIALLY DEFERRED + db_dbauthinfo: + db_dbauthinfo_aiidauser_id_0684fdfb_fk_db_dbuser_id: FOREIGN KEY (aiidauser_id) + REFERENCES db_dbuser(id) DEFERRABLE INITIALLY DEFERRED + db_dbauthinfo_dbcomputer_id_424f7ac4_fk_db_dbcomputer_id: FOREIGN KEY (dbcomputer_id) + REFERENCES db_dbcomputer(id) DEFERRABLE INITIALLY DEFERRED + db_dbcomment: + db_dbcomment_dbnode_id_3b812b6b_fk_db_dbnode_id: FOREIGN KEY (dbnode_id) REFERENCES + db_dbnode(id) DEFERRABLE INITIALLY DEFERRED + db_dbcomment_user_id_8ed5e360_fk_db_dbuser_id: FOREIGN KEY (user_id) REFERENCES + db_dbuser(id) DEFERRABLE INITIALLY DEFERRED + db_dbextra: + db_dbextra_dbnode_id_c7fe8961_fk_db_dbnode_id: FOREIGN KEY (dbnode_id) REFERENCES + db_dbnode(id) DEFERRABLE INITIALLY DEFERRED + db_dbgroup: + db_dbgroup_user_id_100f8a51_fk_db_dbuser_id: FOREIGN KEY (user_id) REFERENCES + db_dbuser(id) DEFERRABLE INITIALLY DEFERRED + db_dbgroup_dbnodes: + db_dbgroup_dbnodes_dbgroup_id_9d3a0f9d_fk_db_dbgroup_id: FOREIGN KEY (dbgroup_id) + REFERENCES db_dbgroup(id) DEFERRABLE INITIALLY DEFERRED + db_dbgroup_dbnodes_dbnode_id_118b9439_fk_db_dbnode_id: FOREIGN KEY (dbnode_id) + REFERENCES db_dbnode(id) DEFERRABLE INITIALLY DEFERRED + db_dblink: + db_dblink_input_id_9245bd73_fk_db_dbnode_id: FOREIGN KEY (input_id) REFERENCES + db_dbnode(id) DEFERRABLE INITIALLY DEFERRED + db_dblink_output_id_c0167528_fk_db_dbnode_id: FOREIGN KEY (output_id) REFERENCES + db_dbnode(id) DEFERRABLE INITIALLY DEFERRED + db_dblog: + db_dblog_dbnode_id_da34b732_fk_db_dbnode_id: FOREIGN KEY (dbnode_id) REFERENCES + db_dbnode(id) DEFERRABLE INITIALLY DEFERRED + db_dbnode: + db_dbnode_dbcomputer_id_315372a3_fk_db_dbcomputer_id: FOREIGN KEY (dbcomputer_id) + REFERENCES db_dbcomputer(id) DEFERRABLE INITIALLY DEFERRED + db_dbnode_user_id_12e7aeaf_fk_db_dbuser_id: FOREIGN KEY (user_id) REFERENCES db_dbuser(id) + DEFERRABLE INITIALLY DEFERRED + db_dbuser_groups: + db_dbuser_groups_dbuser_id_480b3520_fk_db_dbuser_id: FOREIGN KEY (dbuser_id) REFERENCES + db_dbuser(id) DEFERRABLE INITIALLY DEFERRED + db_dbuser_groups_group_id_8478d87e_fk_auth_group_id: FOREIGN KEY (group_id) REFERENCES + auth_group(id) DEFERRABLE INITIALLY DEFERRED + db_dbuser_user_permissions: + db_dbuser_user_permi_permission_id_c5aafc54_fk_auth_perm: FOREIGN KEY (permission_id) + REFERENCES auth_permission(id) DEFERRABLE INITIALLY DEFERRED + db_dbuser_user_permissions_dbuser_id_364456ee_fk_db_dbuser_id: FOREIGN KEY (dbuser_id) + REFERENCES db_dbuser(id) DEFERRABLE INITIALLY DEFERRED + db_dbworkflow: + db_dbworkflow_user_id_ef1f3251_fk_db_dbuser_id: FOREIGN KEY (user_id) REFERENCES + db_dbuser(id) DEFERRABLE INITIALLY DEFERRED + db_dbworkflowdata: + db_dbworkflowdata_aiida_obj_id_70a2d33b_fk_db_dbnode_id: FOREIGN KEY (aiida_obj_id) + REFERENCES db_dbnode(id) DEFERRABLE INITIALLY DEFERRED + db_dbworkflowdata_parent_id_ff4dbf8d_fk_db_dbworkflow_id: FOREIGN KEY (parent_id) + REFERENCES db_dbworkflow(id) DEFERRABLE INITIALLY DEFERRED + db_dbworkflowstep: + db_dbworkflowstep_parent_id_ffb754d9_fk_db_dbworkflow_id: FOREIGN KEY (parent_id) + REFERENCES db_dbworkflow(id) DEFERRABLE INITIALLY DEFERRED + db_dbworkflowstep_user_id_04282431_fk_db_dbuser_id: FOREIGN KEY (user_id) REFERENCES + db_dbuser(id) DEFERRABLE INITIALLY DEFERRED + db_dbworkflowstep_calculations: + db_dbworkflowstep_ca_dbnode_id_0d07b7a7_fk_db_dbnode: FOREIGN KEY (dbnode_id) + REFERENCES db_dbnode(id) DEFERRABLE INITIALLY DEFERRED + db_dbworkflowstep_ca_dbworkflowstep_id_575c3637_fk_db_dbwork: FOREIGN KEY (dbworkflowstep_id) + REFERENCES db_dbworkflowstep(id) DEFERRABLE INITIALLY DEFERRED + db_dbworkflowstep_sub_workflows: + db_dbworkflowstep_su_dbworkflow_id_dca4d103_fk_db_dbwork: FOREIGN KEY (dbworkflow_id) + REFERENCES db_dbworkflow(id) DEFERRABLE INITIALLY DEFERRED + db_dbworkflowstep_su_dbworkflowstep_id_e183bbb7_fk_db_dbwork: FOREIGN KEY (dbworkflowstep_id) + REFERENCES db_dbworkflowstep(id) DEFERRABLE INITIALLY DEFERRED +indexes: + db_dbattribute: + db_dbattribute_datatype_91c4dc04: CREATE INDEX db_dbattribute_datatype_91c4dc04 + ON public.db_dbattribute USING btree (datatype) + db_dbattribute_datatype_91c4dc04_like: CREATE INDEX db_dbattribute_datatype_91c4dc04_like + ON public.db_dbattribute USING btree (datatype varchar_pattern_ops) + db_dbattribute_dbnode_id_253bf153: CREATE INDEX db_dbattribute_dbnode_id_253bf153 + ON public.db_dbattribute USING btree (dbnode_id) + db_dbattribute_dbnode_id_key_c589e447_uniq: CREATE UNIQUE INDEX db_dbattribute_dbnode_id_key_c589e447_uniq + ON public.db_dbattribute USING btree (dbnode_id, key) + db_dbattribute_key_ac2bc4e4: CREATE INDEX db_dbattribute_key_ac2bc4e4 ON public.db_dbattribute + USING btree (key) + db_dbattribute_key_ac2bc4e4_like: CREATE INDEX db_dbattribute_key_ac2bc4e4_like + ON public.db_dbattribute USING btree (key varchar_pattern_ops) + db_dbattribute_pkey: CREATE UNIQUE INDEX db_dbattribute_pkey ON public.db_dbattribute + USING btree (id) + tval_idx_for_daemon: CREATE INDEX tval_idx_for_daemon ON public.db_dbattribute + USING btree (tval) WHERE (tval = ANY (ARRAY['COMPUTED'::text, 'WITHSCHEDULER'::text, + 'TOSUBMIT'::text])) + db_dbauthinfo: + db_dbauthinfo_aiidauser_id_0684fdfb: CREATE INDEX db_dbauthinfo_aiidauser_id_0684fdfb + ON public.db_dbauthinfo USING btree (aiidauser_id) + db_dbauthinfo_aiidauser_id_dbcomputer_id_777cdaa8_uniq: CREATE UNIQUE INDEX db_dbauthinfo_aiidauser_id_dbcomputer_id_777cdaa8_uniq + ON public.db_dbauthinfo USING btree (aiidauser_id, dbcomputer_id) + db_dbauthinfo_dbcomputer_id_424f7ac4: CREATE INDEX db_dbauthinfo_dbcomputer_id_424f7ac4 + ON public.db_dbauthinfo USING btree (dbcomputer_id) + db_dbauthinfo_pkey: CREATE UNIQUE INDEX db_dbauthinfo_pkey ON public.db_dbauthinfo + USING btree (id) + db_dbcomment: + db_dbcomment_dbnode_id_3b812b6b: CREATE INDEX db_dbcomment_dbnode_id_3b812b6b + ON public.db_dbcomment USING btree (dbnode_id) + db_dbcomment_pkey: CREATE UNIQUE INDEX db_dbcomment_pkey ON public.db_dbcomment + USING btree (id) + db_dbcomment_user_id_8ed5e360: CREATE INDEX db_dbcomment_user_id_8ed5e360 ON public.db_dbcomment + USING btree (user_id) + db_dbcomment_uuid_49bac08c_uniq: CREATE UNIQUE INDEX db_dbcomment_uuid_49bac08c_uniq + ON public.db_dbcomment USING btree (uuid) + db_dbcomputer: + db_dbcomputer_name_f1800b1a_like: CREATE INDEX db_dbcomputer_name_f1800b1a_like + ON public.db_dbcomputer USING btree (name varchar_pattern_ops) + db_dbcomputer_name_key: CREATE UNIQUE INDEX db_dbcomputer_name_key ON public.db_dbcomputer + USING btree (name) + db_dbcomputer_pkey: CREATE UNIQUE INDEX db_dbcomputer_pkey ON public.db_dbcomputer + USING btree (id) + db_dbcomputer_uuid_f35defa6_uniq: CREATE UNIQUE INDEX db_dbcomputer_uuid_f35defa6_uniq + ON public.db_dbcomputer USING btree (uuid) + db_dbextra: + db_dbextra_datatype_2eba38c6: CREATE INDEX db_dbextra_datatype_2eba38c6 ON public.db_dbextra + USING btree (datatype) + db_dbextra_datatype_2eba38c6_like: CREATE INDEX db_dbextra_datatype_2eba38c6_like + ON public.db_dbextra USING btree (datatype varchar_pattern_ops) + db_dbextra_dbnode_id_c7fe8961: CREATE INDEX db_dbextra_dbnode_id_c7fe8961 ON public.db_dbextra + USING btree (dbnode_id) + db_dbextra_dbnode_id_key_aa56fd37_uniq: CREATE UNIQUE INDEX db_dbextra_dbnode_id_key_aa56fd37_uniq + ON public.db_dbextra USING btree (dbnode_id, key) + db_dbextra_key_b1a8abc6: CREATE INDEX db_dbextra_key_b1a8abc6 ON public.db_dbextra + USING btree (key) + db_dbextra_key_b1a8abc6_like: CREATE INDEX db_dbextra_key_b1a8abc6_like ON public.db_dbextra + USING btree (key varchar_pattern_ops) + db_dbextra_pkey: CREATE UNIQUE INDEX db_dbextra_pkey ON public.db_dbextra USING + btree (id) + db_dbgroup: + db_dbgroup_name_66c75272: CREATE INDEX db_dbgroup_name_66c75272 ON public.db_dbgroup + USING btree (label) + db_dbgroup_name_66c75272_like: CREATE INDEX db_dbgroup_name_66c75272_like ON public.db_dbgroup + USING btree (label varchar_pattern_ops) + db_dbgroup_name_type_12656f33_uniq: CREATE UNIQUE INDEX db_dbgroup_name_type_12656f33_uniq + ON public.db_dbgroup USING btree (label, type_string) + db_dbgroup_pkey: CREATE UNIQUE INDEX db_dbgroup_pkey ON public.db_dbgroup USING + btree (id) + db_dbgroup_type_23b2a748: CREATE INDEX db_dbgroup_type_23b2a748 ON public.db_dbgroup + USING btree (type_string) + db_dbgroup_type_23b2a748_like: CREATE INDEX db_dbgroup_type_23b2a748_like ON public.db_dbgroup + USING btree (type_string varchar_pattern_ops) + db_dbgroup_user_id_100f8a51: CREATE INDEX db_dbgroup_user_id_100f8a51 ON public.db_dbgroup + USING btree (user_id) + db_dbgroup_uuid_af896177_uniq: CREATE UNIQUE INDEX db_dbgroup_uuid_af896177_uniq + ON public.db_dbgroup USING btree (uuid) + db_dbgroup_dbnodes: + db_dbgroup_dbnodes_dbgroup_id_9d3a0f9d: CREATE INDEX db_dbgroup_dbnodes_dbgroup_id_9d3a0f9d + ON public.db_dbgroup_dbnodes USING btree (dbgroup_id) + db_dbgroup_dbnodes_dbgroup_id_dbnode_id_eee23cce_uniq: CREATE UNIQUE INDEX db_dbgroup_dbnodes_dbgroup_id_dbnode_id_eee23cce_uniq + ON public.db_dbgroup_dbnodes USING btree (dbgroup_id, dbnode_id) + db_dbgroup_dbnodes_dbnode_id_118b9439: CREATE INDEX db_dbgroup_dbnodes_dbnode_id_118b9439 + ON public.db_dbgroup_dbnodes USING btree (dbnode_id) + db_dbgroup_dbnodes_pkey: CREATE UNIQUE INDEX db_dbgroup_dbnodes_pkey ON public.db_dbgroup_dbnodes + USING btree (id) + db_dblink: + db_dblink_input_id_9245bd73: CREATE INDEX db_dblink_input_id_9245bd73 ON public.db_dblink + USING btree (input_id) + db_dblink_label_f1343cfb: CREATE INDEX db_dblink_label_f1343cfb ON public.db_dblink + USING btree (label) + db_dblink_label_f1343cfb_like: CREATE INDEX db_dblink_label_f1343cfb_like ON public.db_dblink + USING btree (label varchar_pattern_ops) + db_dblink_output_id_c0167528: CREATE INDEX db_dblink_output_id_c0167528 ON public.db_dblink + USING btree (output_id) + db_dblink_pkey: CREATE UNIQUE INDEX db_dblink_pkey ON public.db_dblink USING btree + (id) + db_dblink_type_229f212b: CREATE INDEX db_dblink_type_229f212b ON public.db_dblink + USING btree (type) + db_dblink_type_229f212b_like: CREATE INDEX db_dblink_type_229f212b_like ON public.db_dblink + USING btree (type varchar_pattern_ops) + db_dblog: + db_dblog_dbnode_id_da34b732: CREATE INDEX db_dblog_dbnode_id_da34b732 ON public.db_dblog + USING btree (dbnode_id) + db_dblog_levelname_ad5dc346: CREATE INDEX db_dblog_levelname_ad5dc346 ON public.db_dblog + USING btree (levelname) + db_dblog_levelname_ad5dc346_like: CREATE INDEX db_dblog_levelname_ad5dc346_like + ON public.db_dblog USING btree (levelname varchar_pattern_ops) + db_dblog_loggername_00b5ba16: CREATE INDEX db_dblog_loggername_00b5ba16 ON public.db_dblog + USING btree (loggername) + db_dblog_loggername_00b5ba16_like: CREATE INDEX db_dblog_loggername_00b5ba16_like + ON public.db_dblog USING btree (loggername varchar_pattern_ops) + db_dblog_pkey: CREATE UNIQUE INDEX db_dblog_pkey ON public.db_dblog USING btree + (id) + db_dblog_uuid_9cf77df3_uniq: CREATE UNIQUE INDEX db_dblog_uuid_9cf77df3_uniq ON + public.db_dblog USING btree (uuid) + db_dbnode: + db_dbnode_ctime_71626ef5: CREATE INDEX db_dbnode_ctime_71626ef5 ON public.db_dbnode + USING btree (ctime) + db_dbnode_dbcomputer_id_315372a3: CREATE INDEX db_dbnode_dbcomputer_id_315372a3 + ON public.db_dbnode USING btree (dbcomputer_id) + db_dbnode_label_6469539e: CREATE INDEX db_dbnode_label_6469539e ON public.db_dbnode + USING btree (label) + db_dbnode_label_6469539e_like: CREATE INDEX db_dbnode_label_6469539e_like ON public.db_dbnode + USING btree (label varchar_pattern_ops) + db_dbnode_mtime_0554ea3d: CREATE INDEX db_dbnode_mtime_0554ea3d ON public.db_dbnode + USING btree (mtime) + db_dbnode_pkey: CREATE UNIQUE INDEX db_dbnode_pkey ON public.db_dbnode USING btree + (id) + db_dbnode_process_type_df7298d0: CREATE INDEX db_dbnode_process_type_df7298d0 + ON public.db_dbnode USING btree (process_type) + db_dbnode_process_type_df7298d0_like: CREATE INDEX db_dbnode_process_type_df7298d0_like + ON public.db_dbnode USING btree (process_type varchar_pattern_ops) + db_dbnode_type_a8ce9753: CREATE INDEX db_dbnode_type_a8ce9753 ON public.db_dbnode + USING btree (type) + db_dbnode_type_a8ce9753_like: CREATE INDEX db_dbnode_type_a8ce9753_like ON public.db_dbnode + USING btree (type varchar_pattern_ops) + db_dbnode_user_id_12e7aeaf: CREATE INDEX db_dbnode_user_id_12e7aeaf ON public.db_dbnode + USING btree (user_id) + db_dbnode_uuid_62e0bf98_uniq: CREATE UNIQUE INDEX db_dbnode_uuid_62e0bf98_uniq + ON public.db_dbnode USING btree (uuid) + db_dbsetting: + db_dbsetting_datatype_49f4397c: CREATE INDEX db_dbsetting_datatype_49f4397c ON + public.db_dbsetting USING btree (datatype) + db_dbsetting_datatype_49f4397c_like: CREATE INDEX db_dbsetting_datatype_49f4397c_like + ON public.db_dbsetting USING btree (datatype varchar_pattern_ops) + db_dbsetting_key_1b84beb4: CREATE INDEX db_dbsetting_key_1b84beb4 ON public.db_dbsetting + USING btree (key) + db_dbsetting_key_1b84beb4_like: CREATE INDEX db_dbsetting_key_1b84beb4_like ON + public.db_dbsetting USING btree (key varchar_pattern_ops) + db_dbsetting_key_1b84beb4_uniq: CREATE UNIQUE INDEX db_dbsetting_key_1b84beb4_uniq + ON public.db_dbsetting USING btree (key) + db_dbsetting_pkey: CREATE UNIQUE INDEX db_dbsetting_pkey ON public.db_dbsetting + USING btree (id) + db_dbuser: + db_dbuser_email_30150b7e_like: CREATE INDEX db_dbuser_email_30150b7e_like ON public.db_dbuser + USING btree (email varchar_pattern_ops) + db_dbuser_email_30150b7e_uniq: CREATE UNIQUE INDEX db_dbuser_email_30150b7e_uniq + ON public.db_dbuser USING btree (email) + db_dbuser_pkey: CREATE UNIQUE INDEX db_dbuser_pkey ON public.db_dbuser USING btree + (id) + db_dbuser_groups: + db_dbuser_groups_dbuser_id_480b3520: CREATE INDEX db_dbuser_groups_dbuser_id_480b3520 + ON public.db_dbuser_groups USING btree (dbuser_id) + db_dbuser_groups_dbuser_id_group_id_9155eb4f_uniq: CREATE UNIQUE INDEX db_dbuser_groups_dbuser_id_group_id_9155eb4f_uniq + ON public.db_dbuser_groups USING btree (dbuser_id, group_id) + db_dbuser_groups_group_id_8478d87e: CREATE INDEX db_dbuser_groups_group_id_8478d87e + ON public.db_dbuser_groups USING btree (group_id) + db_dbuser_groups_pkey: CREATE UNIQUE INDEX db_dbuser_groups_pkey ON public.db_dbuser_groups + USING btree (id) + db_dbuser_user_permissions: + db_dbuser_user_permissio_dbuser_id_permission_id_e6cbabe4_uniq: CREATE UNIQUE + INDEX db_dbuser_user_permissio_dbuser_id_permission_id_e6cbabe4_uniq ON public.db_dbuser_user_permissions + USING btree (dbuser_id, permission_id) + db_dbuser_user_permissions_dbuser_id_364456ee: CREATE INDEX db_dbuser_user_permissions_dbuser_id_364456ee + ON public.db_dbuser_user_permissions USING btree (dbuser_id) + db_dbuser_user_permissions_permission_id_c5aafc54: CREATE INDEX db_dbuser_user_permissions_permission_id_c5aafc54 + ON public.db_dbuser_user_permissions USING btree (permission_id) + db_dbuser_user_permissions_pkey: CREATE UNIQUE INDEX db_dbuser_user_permissions_pkey + ON public.db_dbuser_user_permissions USING btree (id) + db_dbworkflow: + db_dbworkflow_label_7368f34a: CREATE INDEX db_dbworkflow_label_7368f34a ON public.db_dbworkflow + USING btree (label) + db_dbworkflow_label_7368f34a_like: CREATE INDEX db_dbworkflow_label_7368f34a_like + ON public.db_dbworkflow USING btree (label varchar_pattern_ops) + db_dbworkflow_pkey: CREATE UNIQUE INDEX db_dbworkflow_pkey ON public.db_dbworkflow + USING btree (id) + db_dbworkflow_user_id_ef1f3251: CREATE INDEX db_dbworkflow_user_id_ef1f3251 ON + public.db_dbworkflow USING btree (user_id) + db_dbworkflow_uuid_08947ee2_uniq: CREATE UNIQUE INDEX db_dbworkflow_uuid_08947ee2_uniq + ON public.db_dbworkflow USING btree (uuid) + db_dbworkflowdata: + db_dbworkflowdata_aiida_obj_id_70a2d33b: CREATE INDEX db_dbworkflowdata_aiida_obj_id_70a2d33b + ON public.db_dbworkflowdata USING btree (aiida_obj_id) + db_dbworkflowdata_parent_id_ff4dbf8d: CREATE INDEX db_dbworkflowdata_parent_id_ff4dbf8d + ON public.db_dbworkflowdata USING btree (parent_id) + db_dbworkflowdata_parent_id_name_data_type_a4b50dae_uniq: CREATE UNIQUE INDEX + db_dbworkflowdata_parent_id_name_data_type_a4b50dae_uniq ON public.db_dbworkflowdata + USING btree (parent_id, name, data_type) + db_dbworkflowdata_pkey: CREATE UNIQUE INDEX db_dbworkflowdata_pkey ON public.db_dbworkflowdata + USING btree (id) + db_dbworkflowstep: + db_dbworkflowstep_parent_id_ffb754d9: CREATE INDEX db_dbworkflowstep_parent_id_ffb754d9 + ON public.db_dbworkflowstep USING btree (parent_id) + db_dbworkflowstep_parent_id_name_111027e3_uniq: CREATE UNIQUE INDEX db_dbworkflowstep_parent_id_name_111027e3_uniq + ON public.db_dbworkflowstep USING btree (parent_id, name) + db_dbworkflowstep_pkey: CREATE UNIQUE INDEX db_dbworkflowstep_pkey ON public.db_dbworkflowstep + USING btree (id) + db_dbworkflowstep_user_id_04282431: CREATE INDEX db_dbworkflowstep_user_id_04282431 + ON public.db_dbworkflowstep USING btree (user_id) + db_dbworkflowstep_calculations: + db_dbworkflowstep_calcul_dbworkflowstep_id_dbnode_60f50d02_uniq: CREATE UNIQUE + INDEX db_dbworkflowstep_calcul_dbworkflowstep_id_dbnode_60f50d02_uniq ON public.db_dbworkflowstep_calculations + USING btree (dbworkflowstep_id, dbnode_id) + db_dbworkflowstep_calculations_dbnode_id_0d07b7a7: CREATE INDEX db_dbworkflowstep_calculations_dbnode_id_0d07b7a7 + ON public.db_dbworkflowstep_calculations USING btree (dbnode_id) + db_dbworkflowstep_calculations_dbworkflowstep_id_575c3637: CREATE INDEX db_dbworkflowstep_calculations_dbworkflowstep_id_575c3637 + ON public.db_dbworkflowstep_calculations USING btree (dbworkflowstep_id) + db_dbworkflowstep_calculations_pkey: CREATE UNIQUE INDEX db_dbworkflowstep_calculations_pkey + ON public.db_dbworkflowstep_calculations USING btree (id) + db_dbworkflowstep_sub_workflows: + db_dbworkflowstep_sub_wo_dbworkflowstep_id_dbwork_e9b2b624_uniq: CREATE UNIQUE + INDEX db_dbworkflowstep_sub_wo_dbworkflowstep_id_dbwork_e9b2b624_uniq ON public.db_dbworkflowstep_sub_workflows + USING btree (dbworkflowstep_id, dbworkflow_id) + db_dbworkflowstep_sub_workflows_dbworkflow_id_dca4d103: CREATE INDEX db_dbworkflowstep_sub_workflows_dbworkflow_id_dca4d103 + ON public.db_dbworkflowstep_sub_workflows USING btree (dbworkflow_id) + db_dbworkflowstep_sub_workflows_dbworkflowstep_id_e183bbb7: CREATE INDEX db_dbworkflowstep_sub_workflows_dbworkflowstep_id_e183bbb7 + ON public.db_dbworkflowstep_sub_workflows USING btree (dbworkflowstep_id) + db_dbworkflowstep_sub_workflows_pkey: CREATE UNIQUE INDEX db_dbworkflowstep_sub_workflows_pkey + ON public.db_dbworkflowstep_sub_workflows USING btree (id) diff --git a/tests/backends/aiida_sqlalchemy/migrations/test_all_schema/test_django_django_0030_.yml b/tests/backends/aiida_sqlalchemy/migrations/test_all_schema/test_django_django_0030_.yml new file mode 100644 index 0000000000..9114bf3b66 --- /dev/null +++ b/tests/backends/aiida_sqlalchemy/migrations/test_all_schema/test_django_django_0030_.yml @@ -0,0 +1,1041 @@ +columns: + db_dbattribute: + bval: + data_type: boolean + default: null + is_nullable: true + datatype: + data_type: character varying + default: null + is_nullable: false + max_length: 10 + dbnode_id: + data_type: integer + default: null + is_nullable: false + dval: + data_type: timestamp with time zone + default: null + is_nullable: true + fval: + data_type: double precision + default: null + is_nullable: true + id: + data_type: integer + default: nextval('db_dbattribute_id_seq'::regclass) + is_nullable: false + ival: + data_type: integer + default: null + is_nullable: true + key: + data_type: character varying + default: null + is_nullable: false + max_length: 1024 + tval: + data_type: text + default: null + is_nullable: false + db_dbauthinfo: + aiidauser_id: + data_type: integer + default: null + is_nullable: false + auth_params: + data_type: text + default: null + is_nullable: false + dbcomputer_id: + data_type: integer + default: null + is_nullable: false + enabled: + data_type: boolean + default: null + is_nullable: false + id: + data_type: integer + default: nextval('db_dbauthinfo_id_seq'::regclass) + is_nullable: false + metadata: + data_type: text + default: null + is_nullable: false + db_dbcomment: + content: + data_type: text + default: null + is_nullable: false + ctime: + data_type: timestamp with time zone + default: null + is_nullable: false + dbnode_id: + data_type: integer + default: null + is_nullable: false + id: + data_type: integer + default: nextval('db_dbcomment_id_seq'::regclass) + is_nullable: false + mtime: + data_type: timestamp with time zone + default: null + is_nullable: false + user_id: + data_type: integer + default: null + is_nullable: false + uuid: + data_type: uuid + default: null + is_nullable: false + db_dbcomputer: + description: + data_type: text + default: null + is_nullable: false + enabled: + data_type: boolean + default: null + is_nullable: false + hostname: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + id: + data_type: integer + default: nextval('db_dbcomputer_id_seq'::regclass) + is_nullable: false + metadata: + data_type: text + default: null + is_nullable: false + name: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + scheduler_type: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + transport_params: + data_type: text + default: null + is_nullable: false + transport_type: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + uuid: + data_type: uuid + default: null + is_nullable: false + db_dbextra: + bval: + data_type: boolean + default: null + is_nullable: true + datatype: + data_type: character varying + default: null + is_nullable: false + max_length: 10 + dbnode_id: + data_type: integer + default: null + is_nullable: false + dval: + data_type: timestamp with time zone + default: null + is_nullable: true + fval: + data_type: double precision + default: null + is_nullable: true + id: + data_type: integer + default: nextval('db_dbextra_id_seq'::regclass) + is_nullable: false + ival: + data_type: integer + default: null + is_nullable: true + key: + data_type: character varying + default: null + is_nullable: false + max_length: 1024 + tval: + data_type: text + default: null + is_nullable: false + db_dbgroup: + description: + data_type: text + default: null + is_nullable: false + id: + data_type: integer + default: nextval('db_dbgroup_id_seq'::regclass) + is_nullable: false + label: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + time: + data_type: timestamp with time zone + default: null + is_nullable: false + type_string: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + user_id: + data_type: integer + default: null + is_nullable: false + uuid: + data_type: uuid + default: null + is_nullable: false + db_dbgroup_dbnodes: + dbgroup_id: + data_type: integer + default: null + is_nullable: false + dbnode_id: + data_type: integer + default: null + is_nullable: false + id: + data_type: integer + default: nextval('db_dbgroup_dbnodes_id_seq'::regclass) + is_nullable: false + db_dblink: + id: + data_type: integer + default: nextval('db_dblink_id_seq'::regclass) + is_nullable: false + input_id: + data_type: integer + default: null + is_nullable: false + label: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + output_id: + data_type: integer + default: null + is_nullable: false + type: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + db_dblog: + dbnode_id: + data_type: integer + default: null + is_nullable: false + id: + data_type: integer + default: nextval('db_dblog_id_seq'::regclass) + is_nullable: false + levelname: + data_type: character varying + default: null + is_nullable: false + max_length: 50 + loggername: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + message: + data_type: text + default: null + is_nullable: false + metadata: + data_type: text + default: null + is_nullable: false + time: + data_type: timestamp with time zone + default: null + is_nullable: false + uuid: + data_type: uuid + default: null + is_nullable: false + db_dbnode: + ctime: + data_type: timestamp with time zone + default: null + is_nullable: false + dbcomputer_id: + data_type: integer + default: null + is_nullable: true + description: + data_type: text + default: null + is_nullable: false + id: + data_type: integer + default: nextval('db_dbnode_id_seq'::regclass) + is_nullable: false + label: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + mtime: + data_type: timestamp with time zone + default: null + is_nullable: false + node_type: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + nodeversion: + data_type: integer + default: null + is_nullable: false + process_type: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + public: + data_type: boolean + default: null + is_nullable: false + user_id: + data_type: integer + default: null + is_nullable: false + uuid: + data_type: uuid + default: null + is_nullable: false + db_dbsetting: + bval: + data_type: boolean + default: null + is_nullable: true + datatype: + data_type: character varying + default: null + is_nullable: false + max_length: 10 + description: + data_type: text + default: null + is_nullable: false + dval: + data_type: timestamp with time zone + default: null + is_nullable: true + fval: + data_type: double precision + default: null + is_nullable: true + id: + data_type: integer + default: nextval('db_dbsetting_id_seq'::regclass) + is_nullable: false + ival: + data_type: integer + default: null + is_nullable: true + key: + data_type: character varying + default: null + is_nullable: false + max_length: 1024 + time: + data_type: timestamp with time zone + default: null + is_nullable: false + tval: + data_type: text + default: null + is_nullable: false + db_dbuser: + date_joined: + data_type: timestamp with time zone + default: null + is_nullable: false + email: + data_type: character varying + default: null + is_nullable: false + max_length: 254 + first_name: + data_type: character varying + default: null + is_nullable: false + max_length: 254 + id: + data_type: integer + default: nextval('db_dbuser_id_seq'::regclass) + is_nullable: false + institution: + data_type: character varying + default: null + is_nullable: false + max_length: 254 + is_active: + data_type: boolean + default: null + is_nullable: false + is_staff: + data_type: boolean + default: null + is_nullable: false + is_superuser: + data_type: boolean + default: null + is_nullable: false + last_login: + data_type: timestamp with time zone + default: null + is_nullable: true + last_name: + data_type: character varying + default: null + is_nullable: false + max_length: 254 + password: + data_type: character varying + default: null + is_nullable: false + max_length: 128 + db_dbuser_groups: + dbuser_id: + data_type: integer + default: null + is_nullable: false + group_id: + data_type: integer + default: null + is_nullable: false + id: + data_type: integer + default: nextval('db_dbuser_groups_id_seq'::regclass) + is_nullable: false + db_dbuser_user_permissions: + dbuser_id: + data_type: integer + default: null + is_nullable: false + id: + data_type: integer + default: nextval('db_dbuser_user_permissions_id_seq'::regclass) + is_nullable: false + permission_id: + data_type: integer + default: null + is_nullable: false + db_dbworkflow: + ctime: + data_type: timestamp with time zone + default: null + is_nullable: false + description: + data_type: text + default: null + is_nullable: false + id: + data_type: integer + default: nextval('db_dbworkflow_id_seq'::regclass) + is_nullable: false + label: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + lastsyncedversion: + data_type: integer + default: null + is_nullable: false + module: + data_type: text + default: null + is_nullable: false + module_class: + data_type: text + default: null + is_nullable: false + mtime: + data_type: timestamp with time zone + default: null + is_nullable: false + nodeversion: + data_type: integer + default: null + is_nullable: false + report: + data_type: text + default: null + is_nullable: false + script_md5: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + script_path: + data_type: text + default: null + is_nullable: false + state: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + user_id: + data_type: integer + default: null + is_nullable: false + uuid: + data_type: uuid + default: null + is_nullable: false + db_dbworkflowdata: + aiida_obj_id: + data_type: integer + default: null + is_nullable: true + data_type: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + id: + data_type: integer + default: nextval('db_dbworkflowdata_id_seq'::regclass) + is_nullable: false + json_value: + data_type: text + default: null + is_nullable: false + name: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + parent_id: + data_type: integer + default: null + is_nullable: false + time: + data_type: timestamp with time zone + default: null + is_nullable: false + value_type: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + db_dbworkflowstep: + id: + data_type: integer + default: nextval('db_dbworkflowstep_id_seq'::regclass) + is_nullable: false + name: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + nextcall: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + parent_id: + data_type: integer + default: null + is_nullable: false + state: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + time: + data_type: timestamp with time zone + default: null + is_nullable: false + user_id: + data_type: integer + default: null + is_nullable: false + db_dbworkflowstep_calculations: + dbnode_id: + data_type: integer + default: null + is_nullable: false + dbworkflowstep_id: + data_type: integer + default: null + is_nullable: false + id: + data_type: integer + default: nextval('db_dbworkflowstep_calculations_id_seq'::regclass) + is_nullable: false + db_dbworkflowstep_sub_workflows: + dbworkflow_id: + data_type: integer + default: null + is_nullable: false + dbworkflowstep_id: + data_type: integer + default: null + is_nullable: false + id: + data_type: integer + default: nextval('db_dbworkflowstep_sub_workflows_id_seq'::regclass) + is_nullable: false +constraints: + primary_key: + db_dbattribute: + db_dbattribute_pkey: + - id + db_dbauthinfo: + db_dbauthinfo_pkey: + - id + db_dbcomment: + db_dbcomment_pkey: + - id + db_dbcomputer: + db_dbcomputer_pkey: + - id + db_dbextra: + db_dbextra_pkey: + - id + db_dbgroup: + db_dbgroup_pkey: + - id + db_dbgroup_dbnodes: + db_dbgroup_dbnodes_pkey: + - id + db_dblink: + db_dblink_pkey: + - id + db_dblog: + db_dblog_pkey: + - id + db_dbnode: + db_dbnode_pkey: + - id + db_dbsetting: + db_dbsetting_pkey: + - id + db_dbuser: + db_dbuser_pkey: + - id + db_dbuser_groups: + db_dbuser_groups_pkey: + - id + db_dbuser_user_permissions: + db_dbuser_user_permissions_pkey: + - id + db_dbworkflow: + db_dbworkflow_pkey: + - id + db_dbworkflowdata: + db_dbworkflowdata_pkey: + - id + db_dbworkflowstep: + db_dbworkflowstep_pkey: + - id + db_dbworkflowstep_calculations: + db_dbworkflowstep_calculations_pkey: + - id + db_dbworkflowstep_sub_workflows: + db_dbworkflowstep_sub_workflows_pkey: + - id + unique: + db_dbattribute: + db_dbattribute_dbnode_id_key_c589e447_uniq: + - dbnode_id + - key + db_dbauthinfo: + db_dbauthinfo_aiidauser_id_dbcomputer_id_777cdaa8_uniq: + - aiidauser_id + - dbcomputer_id + db_dbcomment: + db_dbcomment_uuid_49bac08c_uniq: + - uuid + db_dbcomputer: + db_dbcomputer_name_key: + - name + db_dbcomputer_uuid_f35defa6_uniq: + - uuid + db_dbextra: + db_dbextra_dbnode_id_key_aa56fd37_uniq: + - dbnode_id + - key + db_dbgroup: + db_dbgroup_name_type_12656f33_uniq: + - label + - type_string + db_dbgroup_uuid_af896177_uniq: + - uuid + db_dbgroup_dbnodes: + db_dbgroup_dbnodes_dbgroup_id_dbnode_id_eee23cce_uniq: + - dbgroup_id + - dbnode_id + db_dblog: + db_dblog_uuid_9cf77df3_uniq: + - uuid + db_dbnode: + db_dbnode_uuid_62e0bf98_uniq: + - uuid + db_dbsetting: + db_dbsetting_key_1b84beb4_uniq: + - key + db_dbuser: + db_dbuser_email_30150b7e_uniq: + - email + db_dbuser_groups: + db_dbuser_groups_dbuser_id_group_id_9155eb4f_uniq: + - dbuser_id + - group_id + db_dbuser_user_permissions: + db_dbuser_user_permissio_dbuser_id_permission_id_e6cbabe4_uniq: + - dbuser_id + - permission_id + db_dbworkflow: + db_dbworkflow_uuid_08947ee2_uniq: + - uuid + db_dbworkflowdata: + db_dbworkflowdata_parent_id_name_data_type_a4b50dae_uniq: + - data_type + - name + - parent_id + db_dbworkflowstep: + db_dbworkflowstep_parent_id_name_111027e3_uniq: + - name + - parent_id + db_dbworkflowstep_calculations: + db_dbworkflowstep_calcul_dbworkflowstep_id_dbnode_60f50d02_uniq: + - dbnode_id + - dbworkflowstep_id + db_dbworkflowstep_sub_workflows: + db_dbworkflowstep_sub_wo_dbworkflowstep_id_dbwork_e9b2b624_uniq: + - dbworkflow_id + - dbworkflowstep_id +foreign_keys: + db_dbattribute: + db_dbattribute_dbnode_id_253bf153_fk_db_dbnode_id: FOREIGN KEY (dbnode_id) REFERENCES + db_dbnode(id) DEFERRABLE INITIALLY DEFERRED + db_dbauthinfo: + db_dbauthinfo_aiidauser_id_0684fdfb_fk_db_dbuser_id: FOREIGN KEY (aiidauser_id) + REFERENCES db_dbuser(id) DEFERRABLE INITIALLY DEFERRED + db_dbauthinfo_dbcomputer_id_424f7ac4_fk_db_dbcomputer_id: FOREIGN KEY (dbcomputer_id) + REFERENCES db_dbcomputer(id) DEFERRABLE INITIALLY DEFERRED + db_dbcomment: + db_dbcomment_dbnode_id_3b812b6b_fk_db_dbnode_id: FOREIGN KEY (dbnode_id) REFERENCES + db_dbnode(id) DEFERRABLE INITIALLY DEFERRED + db_dbcomment_user_id_8ed5e360_fk_db_dbuser_id: FOREIGN KEY (user_id) REFERENCES + db_dbuser(id) DEFERRABLE INITIALLY DEFERRED + db_dbextra: + db_dbextra_dbnode_id_c7fe8961_fk_db_dbnode_id: FOREIGN KEY (dbnode_id) REFERENCES + db_dbnode(id) DEFERRABLE INITIALLY DEFERRED + db_dbgroup: + db_dbgroup_user_id_100f8a51_fk_db_dbuser_id: FOREIGN KEY (user_id) REFERENCES + db_dbuser(id) DEFERRABLE INITIALLY DEFERRED + db_dbgroup_dbnodes: + db_dbgroup_dbnodes_dbgroup_id_9d3a0f9d_fk_db_dbgroup_id: FOREIGN KEY (dbgroup_id) + REFERENCES db_dbgroup(id) DEFERRABLE INITIALLY DEFERRED + db_dbgroup_dbnodes_dbnode_id_118b9439_fk_db_dbnode_id: FOREIGN KEY (dbnode_id) + REFERENCES db_dbnode(id) DEFERRABLE INITIALLY DEFERRED + db_dblink: + db_dblink_input_id_9245bd73_fk_db_dbnode_id: FOREIGN KEY (input_id) REFERENCES + db_dbnode(id) DEFERRABLE INITIALLY DEFERRED + db_dblink_output_id_c0167528_fk_db_dbnode_id: FOREIGN KEY (output_id) REFERENCES + db_dbnode(id) DEFERRABLE INITIALLY DEFERRED + db_dblog: + db_dblog_dbnode_id_da34b732_fk_db_dbnode_id: FOREIGN KEY (dbnode_id) REFERENCES + db_dbnode(id) DEFERRABLE INITIALLY DEFERRED + db_dbnode: + db_dbnode_dbcomputer_id_315372a3_fk_db_dbcomputer_id: FOREIGN KEY (dbcomputer_id) + REFERENCES db_dbcomputer(id) DEFERRABLE INITIALLY DEFERRED + db_dbnode_user_id_12e7aeaf_fk_db_dbuser_id: FOREIGN KEY (user_id) REFERENCES db_dbuser(id) + DEFERRABLE INITIALLY DEFERRED + db_dbuser_groups: + db_dbuser_groups_dbuser_id_480b3520_fk_db_dbuser_id: FOREIGN KEY (dbuser_id) REFERENCES + db_dbuser(id) DEFERRABLE INITIALLY DEFERRED + db_dbuser_groups_group_id_8478d87e_fk_auth_group_id: FOREIGN KEY (group_id) REFERENCES + auth_group(id) DEFERRABLE INITIALLY DEFERRED + db_dbuser_user_permissions: + db_dbuser_user_permi_permission_id_c5aafc54_fk_auth_perm: FOREIGN KEY (permission_id) + REFERENCES auth_permission(id) DEFERRABLE INITIALLY DEFERRED + db_dbuser_user_permissions_dbuser_id_364456ee_fk_db_dbuser_id: FOREIGN KEY (dbuser_id) + REFERENCES db_dbuser(id) DEFERRABLE INITIALLY DEFERRED + db_dbworkflow: + db_dbworkflow_user_id_ef1f3251_fk_db_dbuser_id: FOREIGN KEY (user_id) REFERENCES + db_dbuser(id) DEFERRABLE INITIALLY DEFERRED + db_dbworkflowdata: + db_dbworkflowdata_aiida_obj_id_70a2d33b_fk_db_dbnode_id: FOREIGN KEY (aiida_obj_id) + REFERENCES db_dbnode(id) DEFERRABLE INITIALLY DEFERRED + db_dbworkflowdata_parent_id_ff4dbf8d_fk_db_dbworkflow_id: FOREIGN KEY (parent_id) + REFERENCES db_dbworkflow(id) DEFERRABLE INITIALLY DEFERRED + db_dbworkflowstep: + db_dbworkflowstep_parent_id_ffb754d9_fk_db_dbworkflow_id: FOREIGN KEY (parent_id) + REFERENCES db_dbworkflow(id) DEFERRABLE INITIALLY DEFERRED + db_dbworkflowstep_user_id_04282431_fk_db_dbuser_id: FOREIGN KEY (user_id) REFERENCES + db_dbuser(id) DEFERRABLE INITIALLY DEFERRED + db_dbworkflowstep_calculations: + db_dbworkflowstep_ca_dbnode_id_0d07b7a7_fk_db_dbnode: FOREIGN KEY (dbnode_id) + REFERENCES db_dbnode(id) DEFERRABLE INITIALLY DEFERRED + db_dbworkflowstep_ca_dbworkflowstep_id_575c3637_fk_db_dbwork: FOREIGN KEY (dbworkflowstep_id) + REFERENCES db_dbworkflowstep(id) DEFERRABLE INITIALLY DEFERRED + db_dbworkflowstep_sub_workflows: + db_dbworkflowstep_su_dbworkflow_id_dca4d103_fk_db_dbwork: FOREIGN KEY (dbworkflow_id) + REFERENCES db_dbworkflow(id) DEFERRABLE INITIALLY DEFERRED + db_dbworkflowstep_su_dbworkflowstep_id_e183bbb7_fk_db_dbwork: FOREIGN KEY (dbworkflowstep_id) + REFERENCES db_dbworkflowstep(id) DEFERRABLE INITIALLY DEFERRED +indexes: + db_dbattribute: + db_dbattribute_datatype_91c4dc04: CREATE INDEX db_dbattribute_datatype_91c4dc04 + ON public.db_dbattribute USING btree (datatype) + db_dbattribute_datatype_91c4dc04_like: CREATE INDEX db_dbattribute_datatype_91c4dc04_like + ON public.db_dbattribute USING btree (datatype varchar_pattern_ops) + db_dbattribute_dbnode_id_253bf153: CREATE INDEX db_dbattribute_dbnode_id_253bf153 + ON public.db_dbattribute USING btree (dbnode_id) + db_dbattribute_dbnode_id_key_c589e447_uniq: CREATE UNIQUE INDEX db_dbattribute_dbnode_id_key_c589e447_uniq + ON public.db_dbattribute USING btree (dbnode_id, key) + db_dbattribute_key_ac2bc4e4: CREATE INDEX db_dbattribute_key_ac2bc4e4 ON public.db_dbattribute + USING btree (key) + db_dbattribute_key_ac2bc4e4_like: CREATE INDEX db_dbattribute_key_ac2bc4e4_like + ON public.db_dbattribute USING btree (key varchar_pattern_ops) + db_dbattribute_pkey: CREATE UNIQUE INDEX db_dbattribute_pkey ON public.db_dbattribute + USING btree (id) + tval_idx_for_daemon: CREATE INDEX tval_idx_for_daemon ON public.db_dbattribute + USING btree (tval) WHERE (tval = ANY (ARRAY['COMPUTED'::text, 'WITHSCHEDULER'::text, + 'TOSUBMIT'::text])) + db_dbauthinfo: + db_dbauthinfo_aiidauser_id_0684fdfb: CREATE INDEX db_dbauthinfo_aiidauser_id_0684fdfb + ON public.db_dbauthinfo USING btree (aiidauser_id) + db_dbauthinfo_aiidauser_id_dbcomputer_id_777cdaa8_uniq: CREATE UNIQUE INDEX db_dbauthinfo_aiidauser_id_dbcomputer_id_777cdaa8_uniq + ON public.db_dbauthinfo USING btree (aiidauser_id, dbcomputer_id) + db_dbauthinfo_dbcomputer_id_424f7ac4: CREATE INDEX db_dbauthinfo_dbcomputer_id_424f7ac4 + ON public.db_dbauthinfo USING btree (dbcomputer_id) + db_dbauthinfo_pkey: CREATE UNIQUE INDEX db_dbauthinfo_pkey ON public.db_dbauthinfo + USING btree (id) + db_dbcomment: + db_dbcomment_dbnode_id_3b812b6b: CREATE INDEX db_dbcomment_dbnode_id_3b812b6b + ON public.db_dbcomment USING btree (dbnode_id) + db_dbcomment_pkey: CREATE UNIQUE INDEX db_dbcomment_pkey ON public.db_dbcomment + USING btree (id) + db_dbcomment_user_id_8ed5e360: CREATE INDEX db_dbcomment_user_id_8ed5e360 ON public.db_dbcomment + USING btree (user_id) + db_dbcomment_uuid_49bac08c_uniq: CREATE UNIQUE INDEX db_dbcomment_uuid_49bac08c_uniq + ON public.db_dbcomment USING btree (uuid) + db_dbcomputer: + db_dbcomputer_name_f1800b1a_like: CREATE INDEX db_dbcomputer_name_f1800b1a_like + ON public.db_dbcomputer USING btree (name varchar_pattern_ops) + db_dbcomputer_name_key: CREATE UNIQUE INDEX db_dbcomputer_name_key ON public.db_dbcomputer + USING btree (name) + db_dbcomputer_pkey: CREATE UNIQUE INDEX db_dbcomputer_pkey ON public.db_dbcomputer + USING btree (id) + db_dbcomputer_uuid_f35defa6_uniq: CREATE UNIQUE INDEX db_dbcomputer_uuid_f35defa6_uniq + ON public.db_dbcomputer USING btree (uuid) + db_dbextra: + db_dbextra_datatype_2eba38c6: CREATE INDEX db_dbextra_datatype_2eba38c6 ON public.db_dbextra + USING btree (datatype) + db_dbextra_datatype_2eba38c6_like: CREATE INDEX db_dbextra_datatype_2eba38c6_like + ON public.db_dbextra USING btree (datatype varchar_pattern_ops) + db_dbextra_dbnode_id_c7fe8961: CREATE INDEX db_dbextra_dbnode_id_c7fe8961 ON public.db_dbextra + USING btree (dbnode_id) + db_dbextra_dbnode_id_key_aa56fd37_uniq: CREATE UNIQUE INDEX db_dbextra_dbnode_id_key_aa56fd37_uniq + ON public.db_dbextra USING btree (dbnode_id, key) + db_dbextra_key_b1a8abc6: CREATE INDEX db_dbextra_key_b1a8abc6 ON public.db_dbextra + USING btree (key) + db_dbextra_key_b1a8abc6_like: CREATE INDEX db_dbextra_key_b1a8abc6_like ON public.db_dbextra + USING btree (key varchar_pattern_ops) + db_dbextra_pkey: CREATE UNIQUE INDEX db_dbextra_pkey ON public.db_dbextra USING + btree (id) + db_dbgroup: + db_dbgroup_name_66c75272: CREATE INDEX db_dbgroup_name_66c75272 ON public.db_dbgroup + USING btree (label) + db_dbgroup_name_66c75272_like: CREATE INDEX db_dbgroup_name_66c75272_like ON public.db_dbgroup + USING btree (label varchar_pattern_ops) + db_dbgroup_name_type_12656f33_uniq: CREATE UNIQUE INDEX db_dbgroup_name_type_12656f33_uniq + ON public.db_dbgroup USING btree (label, type_string) + db_dbgroup_pkey: CREATE UNIQUE INDEX db_dbgroup_pkey ON public.db_dbgroup USING + btree (id) + db_dbgroup_type_23b2a748: CREATE INDEX db_dbgroup_type_23b2a748 ON public.db_dbgroup + USING btree (type_string) + db_dbgroup_type_23b2a748_like: CREATE INDEX db_dbgroup_type_23b2a748_like ON public.db_dbgroup + USING btree (type_string varchar_pattern_ops) + db_dbgroup_user_id_100f8a51: CREATE INDEX db_dbgroup_user_id_100f8a51 ON public.db_dbgroup + USING btree (user_id) + db_dbgroup_uuid_af896177_uniq: CREATE UNIQUE INDEX db_dbgroup_uuid_af896177_uniq + ON public.db_dbgroup USING btree (uuid) + db_dbgroup_dbnodes: + db_dbgroup_dbnodes_dbgroup_id_9d3a0f9d: CREATE INDEX db_dbgroup_dbnodes_dbgroup_id_9d3a0f9d + ON public.db_dbgroup_dbnodes USING btree (dbgroup_id) + db_dbgroup_dbnodes_dbgroup_id_dbnode_id_eee23cce_uniq: CREATE UNIQUE INDEX db_dbgroup_dbnodes_dbgroup_id_dbnode_id_eee23cce_uniq + ON public.db_dbgroup_dbnodes USING btree (dbgroup_id, dbnode_id) + db_dbgroup_dbnodes_dbnode_id_118b9439: CREATE INDEX db_dbgroup_dbnodes_dbnode_id_118b9439 + ON public.db_dbgroup_dbnodes USING btree (dbnode_id) + db_dbgroup_dbnodes_pkey: CREATE UNIQUE INDEX db_dbgroup_dbnodes_pkey ON public.db_dbgroup_dbnodes + USING btree (id) + db_dblink: + db_dblink_input_id_9245bd73: CREATE INDEX db_dblink_input_id_9245bd73 ON public.db_dblink + USING btree (input_id) + db_dblink_label_f1343cfb: CREATE INDEX db_dblink_label_f1343cfb ON public.db_dblink + USING btree (label) + db_dblink_label_f1343cfb_like: CREATE INDEX db_dblink_label_f1343cfb_like ON public.db_dblink + USING btree (label varchar_pattern_ops) + db_dblink_output_id_c0167528: CREATE INDEX db_dblink_output_id_c0167528 ON public.db_dblink + USING btree (output_id) + db_dblink_pkey: CREATE UNIQUE INDEX db_dblink_pkey ON public.db_dblink USING btree + (id) + db_dblink_type_229f212b: CREATE INDEX db_dblink_type_229f212b ON public.db_dblink + USING btree (type) + db_dblink_type_229f212b_like: CREATE INDEX db_dblink_type_229f212b_like ON public.db_dblink + USING btree (type varchar_pattern_ops) + db_dblog: + db_dblog_dbnode_id_da34b732: CREATE INDEX db_dblog_dbnode_id_da34b732 ON public.db_dblog + USING btree (dbnode_id) + db_dblog_levelname_ad5dc346: CREATE INDEX db_dblog_levelname_ad5dc346 ON public.db_dblog + USING btree (levelname) + db_dblog_levelname_ad5dc346_like: CREATE INDEX db_dblog_levelname_ad5dc346_like + ON public.db_dblog USING btree (levelname varchar_pattern_ops) + db_dblog_loggername_00b5ba16: CREATE INDEX db_dblog_loggername_00b5ba16 ON public.db_dblog + USING btree (loggername) + db_dblog_loggername_00b5ba16_like: CREATE INDEX db_dblog_loggername_00b5ba16_like + ON public.db_dblog USING btree (loggername varchar_pattern_ops) + db_dblog_pkey: CREATE UNIQUE INDEX db_dblog_pkey ON public.db_dblog USING btree + (id) + db_dblog_uuid_9cf77df3_uniq: CREATE UNIQUE INDEX db_dblog_uuid_9cf77df3_uniq ON + public.db_dblog USING btree (uuid) + db_dbnode: + db_dbnode_ctime_71626ef5: CREATE INDEX db_dbnode_ctime_71626ef5 ON public.db_dbnode + USING btree (ctime) + db_dbnode_dbcomputer_id_315372a3: CREATE INDEX db_dbnode_dbcomputer_id_315372a3 + ON public.db_dbnode USING btree (dbcomputer_id) + db_dbnode_label_6469539e: CREATE INDEX db_dbnode_label_6469539e ON public.db_dbnode + USING btree (label) + db_dbnode_label_6469539e_like: CREATE INDEX db_dbnode_label_6469539e_like ON public.db_dbnode + USING btree (label varchar_pattern_ops) + db_dbnode_mtime_0554ea3d: CREATE INDEX db_dbnode_mtime_0554ea3d ON public.db_dbnode + USING btree (mtime) + db_dbnode_pkey: CREATE UNIQUE INDEX db_dbnode_pkey ON public.db_dbnode USING btree + (id) + db_dbnode_process_type_df7298d0: CREATE INDEX db_dbnode_process_type_df7298d0 + ON public.db_dbnode USING btree (process_type) + db_dbnode_process_type_df7298d0_like: CREATE INDEX db_dbnode_process_type_df7298d0_like + ON public.db_dbnode USING btree (process_type varchar_pattern_ops) + db_dbnode_type_a8ce9753: CREATE INDEX db_dbnode_type_a8ce9753 ON public.db_dbnode + USING btree (node_type) + db_dbnode_type_a8ce9753_like: CREATE INDEX db_dbnode_type_a8ce9753_like ON public.db_dbnode + USING btree (node_type varchar_pattern_ops) + db_dbnode_user_id_12e7aeaf: CREATE INDEX db_dbnode_user_id_12e7aeaf ON public.db_dbnode + USING btree (user_id) + db_dbnode_uuid_62e0bf98_uniq: CREATE UNIQUE INDEX db_dbnode_uuid_62e0bf98_uniq + ON public.db_dbnode USING btree (uuid) + db_dbsetting: + db_dbsetting_datatype_49f4397c: CREATE INDEX db_dbsetting_datatype_49f4397c ON + public.db_dbsetting USING btree (datatype) + db_dbsetting_datatype_49f4397c_like: CREATE INDEX db_dbsetting_datatype_49f4397c_like + ON public.db_dbsetting USING btree (datatype varchar_pattern_ops) + db_dbsetting_key_1b84beb4: CREATE INDEX db_dbsetting_key_1b84beb4 ON public.db_dbsetting + USING btree (key) + db_dbsetting_key_1b84beb4_like: CREATE INDEX db_dbsetting_key_1b84beb4_like ON + public.db_dbsetting USING btree (key varchar_pattern_ops) + db_dbsetting_key_1b84beb4_uniq: CREATE UNIQUE INDEX db_dbsetting_key_1b84beb4_uniq + ON public.db_dbsetting USING btree (key) + db_dbsetting_pkey: CREATE UNIQUE INDEX db_dbsetting_pkey ON public.db_dbsetting + USING btree (id) + db_dbuser: + db_dbuser_email_30150b7e_like: CREATE INDEX db_dbuser_email_30150b7e_like ON public.db_dbuser + USING btree (email varchar_pattern_ops) + db_dbuser_email_30150b7e_uniq: CREATE UNIQUE INDEX db_dbuser_email_30150b7e_uniq + ON public.db_dbuser USING btree (email) + db_dbuser_pkey: CREATE UNIQUE INDEX db_dbuser_pkey ON public.db_dbuser USING btree + (id) + db_dbuser_groups: + db_dbuser_groups_dbuser_id_480b3520: CREATE INDEX db_dbuser_groups_dbuser_id_480b3520 + ON public.db_dbuser_groups USING btree (dbuser_id) + db_dbuser_groups_dbuser_id_group_id_9155eb4f_uniq: CREATE UNIQUE INDEX db_dbuser_groups_dbuser_id_group_id_9155eb4f_uniq + ON public.db_dbuser_groups USING btree (dbuser_id, group_id) + db_dbuser_groups_group_id_8478d87e: CREATE INDEX db_dbuser_groups_group_id_8478d87e + ON public.db_dbuser_groups USING btree (group_id) + db_dbuser_groups_pkey: CREATE UNIQUE INDEX db_dbuser_groups_pkey ON public.db_dbuser_groups + USING btree (id) + db_dbuser_user_permissions: + db_dbuser_user_permissio_dbuser_id_permission_id_e6cbabe4_uniq: CREATE UNIQUE + INDEX db_dbuser_user_permissio_dbuser_id_permission_id_e6cbabe4_uniq ON public.db_dbuser_user_permissions + USING btree (dbuser_id, permission_id) + db_dbuser_user_permissions_dbuser_id_364456ee: CREATE INDEX db_dbuser_user_permissions_dbuser_id_364456ee + ON public.db_dbuser_user_permissions USING btree (dbuser_id) + db_dbuser_user_permissions_permission_id_c5aafc54: CREATE INDEX db_dbuser_user_permissions_permission_id_c5aafc54 + ON public.db_dbuser_user_permissions USING btree (permission_id) + db_dbuser_user_permissions_pkey: CREATE UNIQUE INDEX db_dbuser_user_permissions_pkey + ON public.db_dbuser_user_permissions USING btree (id) + db_dbworkflow: + db_dbworkflow_label_7368f34a: CREATE INDEX db_dbworkflow_label_7368f34a ON public.db_dbworkflow + USING btree (label) + db_dbworkflow_label_7368f34a_like: CREATE INDEX db_dbworkflow_label_7368f34a_like + ON public.db_dbworkflow USING btree (label varchar_pattern_ops) + db_dbworkflow_pkey: CREATE UNIQUE INDEX db_dbworkflow_pkey ON public.db_dbworkflow + USING btree (id) + db_dbworkflow_user_id_ef1f3251: CREATE INDEX db_dbworkflow_user_id_ef1f3251 ON + public.db_dbworkflow USING btree (user_id) + db_dbworkflow_uuid_08947ee2_uniq: CREATE UNIQUE INDEX db_dbworkflow_uuid_08947ee2_uniq + ON public.db_dbworkflow USING btree (uuid) + db_dbworkflowdata: + db_dbworkflowdata_aiida_obj_id_70a2d33b: CREATE INDEX db_dbworkflowdata_aiida_obj_id_70a2d33b + ON public.db_dbworkflowdata USING btree (aiida_obj_id) + db_dbworkflowdata_parent_id_ff4dbf8d: CREATE INDEX db_dbworkflowdata_parent_id_ff4dbf8d + ON public.db_dbworkflowdata USING btree (parent_id) + db_dbworkflowdata_parent_id_name_data_type_a4b50dae_uniq: CREATE UNIQUE INDEX + db_dbworkflowdata_parent_id_name_data_type_a4b50dae_uniq ON public.db_dbworkflowdata + USING btree (parent_id, name, data_type) + db_dbworkflowdata_pkey: CREATE UNIQUE INDEX db_dbworkflowdata_pkey ON public.db_dbworkflowdata + USING btree (id) + db_dbworkflowstep: + db_dbworkflowstep_parent_id_ffb754d9: CREATE INDEX db_dbworkflowstep_parent_id_ffb754d9 + ON public.db_dbworkflowstep USING btree (parent_id) + db_dbworkflowstep_parent_id_name_111027e3_uniq: CREATE UNIQUE INDEX db_dbworkflowstep_parent_id_name_111027e3_uniq + ON public.db_dbworkflowstep USING btree (parent_id, name) + db_dbworkflowstep_pkey: CREATE UNIQUE INDEX db_dbworkflowstep_pkey ON public.db_dbworkflowstep + USING btree (id) + db_dbworkflowstep_user_id_04282431: CREATE INDEX db_dbworkflowstep_user_id_04282431 + ON public.db_dbworkflowstep USING btree (user_id) + db_dbworkflowstep_calculations: + db_dbworkflowstep_calcul_dbworkflowstep_id_dbnode_60f50d02_uniq: CREATE UNIQUE + INDEX db_dbworkflowstep_calcul_dbworkflowstep_id_dbnode_60f50d02_uniq ON public.db_dbworkflowstep_calculations + USING btree (dbworkflowstep_id, dbnode_id) + db_dbworkflowstep_calculations_dbnode_id_0d07b7a7: CREATE INDEX db_dbworkflowstep_calculations_dbnode_id_0d07b7a7 + ON public.db_dbworkflowstep_calculations USING btree (dbnode_id) + db_dbworkflowstep_calculations_dbworkflowstep_id_575c3637: CREATE INDEX db_dbworkflowstep_calculations_dbworkflowstep_id_575c3637 + ON public.db_dbworkflowstep_calculations USING btree (dbworkflowstep_id) + db_dbworkflowstep_calculations_pkey: CREATE UNIQUE INDEX db_dbworkflowstep_calculations_pkey + ON public.db_dbworkflowstep_calculations USING btree (id) + db_dbworkflowstep_sub_workflows: + db_dbworkflowstep_sub_wo_dbworkflowstep_id_dbwork_e9b2b624_uniq: CREATE UNIQUE + INDEX db_dbworkflowstep_sub_wo_dbworkflowstep_id_dbwork_e9b2b624_uniq ON public.db_dbworkflowstep_sub_workflows + USING btree (dbworkflowstep_id, dbworkflow_id) + db_dbworkflowstep_sub_workflows_dbworkflow_id_dca4d103: CREATE INDEX db_dbworkflowstep_sub_workflows_dbworkflow_id_dca4d103 + ON public.db_dbworkflowstep_sub_workflows USING btree (dbworkflow_id) + db_dbworkflowstep_sub_workflows_dbworkflowstep_id_e183bbb7: CREATE INDEX db_dbworkflowstep_sub_workflows_dbworkflowstep_id_e183bbb7 + ON public.db_dbworkflowstep_sub_workflows USING btree (dbworkflowstep_id) + db_dbworkflowstep_sub_workflows_pkey: CREATE UNIQUE INDEX db_dbworkflowstep_sub_workflows_pkey + ON public.db_dbworkflowstep_sub_workflows USING btree (id) diff --git a/tests/backends/aiida_sqlalchemy/migrations/test_all_schema/test_django_django_0031_.yml b/tests/backends/aiida_sqlalchemy/migrations/test_all_schema/test_django_django_0031_.yml new file mode 100644 index 0000000000..cb486fd22f --- /dev/null +++ b/tests/backends/aiida_sqlalchemy/migrations/test_all_schema/test_django_django_0031_.yml @@ -0,0 +1,1037 @@ +columns: + db_dbattribute: + bval: + data_type: boolean + default: null + is_nullable: true + datatype: + data_type: character varying + default: null + is_nullable: false + max_length: 10 + dbnode_id: + data_type: integer + default: null + is_nullable: false + dval: + data_type: timestamp with time zone + default: null + is_nullable: true + fval: + data_type: double precision + default: null + is_nullable: true + id: + data_type: integer + default: nextval('db_dbattribute_id_seq'::regclass) + is_nullable: false + ival: + data_type: integer + default: null + is_nullable: true + key: + data_type: character varying + default: null + is_nullable: false + max_length: 1024 + tval: + data_type: text + default: null + is_nullable: false + db_dbauthinfo: + aiidauser_id: + data_type: integer + default: null + is_nullable: false + auth_params: + data_type: text + default: null + is_nullable: false + dbcomputer_id: + data_type: integer + default: null + is_nullable: false + enabled: + data_type: boolean + default: null + is_nullable: false + id: + data_type: integer + default: nextval('db_dbauthinfo_id_seq'::regclass) + is_nullable: false + metadata: + data_type: text + default: null + is_nullable: false + db_dbcomment: + content: + data_type: text + default: null + is_nullable: false + ctime: + data_type: timestamp with time zone + default: null + is_nullable: false + dbnode_id: + data_type: integer + default: null + is_nullable: false + id: + data_type: integer + default: nextval('db_dbcomment_id_seq'::regclass) + is_nullable: false + mtime: + data_type: timestamp with time zone + default: null + is_nullable: false + user_id: + data_type: integer + default: null + is_nullable: false + uuid: + data_type: uuid + default: null + is_nullable: false + db_dbcomputer: + description: + data_type: text + default: null + is_nullable: false + hostname: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + id: + data_type: integer + default: nextval('db_dbcomputer_id_seq'::regclass) + is_nullable: false + metadata: + data_type: text + default: null + is_nullable: false + name: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + scheduler_type: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + transport_params: + data_type: text + default: null + is_nullable: false + transport_type: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + uuid: + data_type: uuid + default: null + is_nullable: false + db_dbextra: + bval: + data_type: boolean + default: null + is_nullable: true + datatype: + data_type: character varying + default: null + is_nullable: false + max_length: 10 + dbnode_id: + data_type: integer + default: null + is_nullable: false + dval: + data_type: timestamp with time zone + default: null + is_nullable: true + fval: + data_type: double precision + default: null + is_nullable: true + id: + data_type: integer + default: nextval('db_dbextra_id_seq'::regclass) + is_nullable: false + ival: + data_type: integer + default: null + is_nullable: true + key: + data_type: character varying + default: null + is_nullable: false + max_length: 1024 + tval: + data_type: text + default: null + is_nullable: false + db_dbgroup: + description: + data_type: text + default: null + is_nullable: false + id: + data_type: integer + default: nextval('db_dbgroup_id_seq'::regclass) + is_nullable: false + label: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + time: + data_type: timestamp with time zone + default: null + is_nullable: false + type_string: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + user_id: + data_type: integer + default: null + is_nullable: false + uuid: + data_type: uuid + default: null + is_nullable: false + db_dbgroup_dbnodes: + dbgroup_id: + data_type: integer + default: null + is_nullable: false + dbnode_id: + data_type: integer + default: null + is_nullable: false + id: + data_type: integer + default: nextval('db_dbgroup_dbnodes_id_seq'::regclass) + is_nullable: false + db_dblink: + id: + data_type: integer + default: nextval('db_dblink_id_seq'::regclass) + is_nullable: false + input_id: + data_type: integer + default: null + is_nullable: false + label: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + output_id: + data_type: integer + default: null + is_nullable: false + type: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + db_dblog: + dbnode_id: + data_type: integer + default: null + is_nullable: false + id: + data_type: integer + default: nextval('db_dblog_id_seq'::regclass) + is_nullable: false + levelname: + data_type: character varying + default: null + is_nullable: false + max_length: 50 + loggername: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + message: + data_type: text + default: null + is_nullable: false + metadata: + data_type: text + default: null + is_nullable: false + time: + data_type: timestamp with time zone + default: null + is_nullable: false + uuid: + data_type: uuid + default: null + is_nullable: false + db_dbnode: + ctime: + data_type: timestamp with time zone + default: null + is_nullable: false + dbcomputer_id: + data_type: integer + default: null + is_nullable: true + description: + data_type: text + default: null + is_nullable: false + id: + data_type: integer + default: nextval('db_dbnode_id_seq'::regclass) + is_nullable: false + label: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + mtime: + data_type: timestamp with time zone + default: null + is_nullable: false + node_type: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + nodeversion: + data_type: integer + default: null + is_nullable: false + process_type: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + public: + data_type: boolean + default: null + is_nullable: false + user_id: + data_type: integer + default: null + is_nullable: false + uuid: + data_type: uuid + default: null + is_nullable: false + db_dbsetting: + bval: + data_type: boolean + default: null + is_nullable: true + datatype: + data_type: character varying + default: null + is_nullable: false + max_length: 10 + description: + data_type: text + default: null + is_nullable: false + dval: + data_type: timestamp with time zone + default: null + is_nullable: true + fval: + data_type: double precision + default: null + is_nullable: true + id: + data_type: integer + default: nextval('db_dbsetting_id_seq'::regclass) + is_nullable: false + ival: + data_type: integer + default: null + is_nullable: true + key: + data_type: character varying + default: null + is_nullable: false + max_length: 1024 + time: + data_type: timestamp with time zone + default: null + is_nullable: false + tval: + data_type: text + default: null + is_nullable: false + db_dbuser: + date_joined: + data_type: timestamp with time zone + default: null + is_nullable: false + email: + data_type: character varying + default: null + is_nullable: false + max_length: 254 + first_name: + data_type: character varying + default: null + is_nullable: false + max_length: 254 + id: + data_type: integer + default: nextval('db_dbuser_id_seq'::regclass) + is_nullable: false + institution: + data_type: character varying + default: null + is_nullable: false + max_length: 254 + is_active: + data_type: boolean + default: null + is_nullable: false + is_staff: + data_type: boolean + default: null + is_nullable: false + is_superuser: + data_type: boolean + default: null + is_nullable: false + last_login: + data_type: timestamp with time zone + default: null + is_nullable: true + last_name: + data_type: character varying + default: null + is_nullable: false + max_length: 254 + password: + data_type: character varying + default: null + is_nullable: false + max_length: 128 + db_dbuser_groups: + dbuser_id: + data_type: integer + default: null + is_nullable: false + group_id: + data_type: integer + default: null + is_nullable: false + id: + data_type: integer + default: nextval('db_dbuser_groups_id_seq'::regclass) + is_nullable: false + db_dbuser_user_permissions: + dbuser_id: + data_type: integer + default: null + is_nullable: false + id: + data_type: integer + default: nextval('db_dbuser_user_permissions_id_seq'::regclass) + is_nullable: false + permission_id: + data_type: integer + default: null + is_nullable: false + db_dbworkflow: + ctime: + data_type: timestamp with time zone + default: null + is_nullable: false + description: + data_type: text + default: null + is_nullable: false + id: + data_type: integer + default: nextval('db_dbworkflow_id_seq'::regclass) + is_nullable: false + label: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + lastsyncedversion: + data_type: integer + default: null + is_nullable: false + module: + data_type: text + default: null + is_nullable: false + module_class: + data_type: text + default: null + is_nullable: false + mtime: + data_type: timestamp with time zone + default: null + is_nullable: false + nodeversion: + data_type: integer + default: null + is_nullable: false + report: + data_type: text + default: null + is_nullable: false + script_md5: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + script_path: + data_type: text + default: null + is_nullable: false + state: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + user_id: + data_type: integer + default: null + is_nullable: false + uuid: + data_type: uuid + default: null + is_nullable: false + db_dbworkflowdata: + aiida_obj_id: + data_type: integer + default: null + is_nullable: true + data_type: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + id: + data_type: integer + default: nextval('db_dbworkflowdata_id_seq'::regclass) + is_nullable: false + json_value: + data_type: text + default: null + is_nullable: false + name: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + parent_id: + data_type: integer + default: null + is_nullable: false + time: + data_type: timestamp with time zone + default: null + is_nullable: false + value_type: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + db_dbworkflowstep: + id: + data_type: integer + default: nextval('db_dbworkflowstep_id_seq'::regclass) + is_nullable: false + name: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + nextcall: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + parent_id: + data_type: integer + default: null + is_nullable: false + state: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + time: + data_type: timestamp with time zone + default: null + is_nullable: false + user_id: + data_type: integer + default: null + is_nullable: false + db_dbworkflowstep_calculations: + dbnode_id: + data_type: integer + default: null + is_nullable: false + dbworkflowstep_id: + data_type: integer + default: null + is_nullable: false + id: + data_type: integer + default: nextval('db_dbworkflowstep_calculations_id_seq'::regclass) + is_nullable: false + db_dbworkflowstep_sub_workflows: + dbworkflow_id: + data_type: integer + default: null + is_nullable: false + dbworkflowstep_id: + data_type: integer + default: null + is_nullable: false + id: + data_type: integer + default: nextval('db_dbworkflowstep_sub_workflows_id_seq'::regclass) + is_nullable: false +constraints: + primary_key: + db_dbattribute: + db_dbattribute_pkey: + - id + db_dbauthinfo: + db_dbauthinfo_pkey: + - id + db_dbcomment: + db_dbcomment_pkey: + - id + db_dbcomputer: + db_dbcomputer_pkey: + - id + db_dbextra: + db_dbextra_pkey: + - id + db_dbgroup: + db_dbgroup_pkey: + - id + db_dbgroup_dbnodes: + db_dbgroup_dbnodes_pkey: + - id + db_dblink: + db_dblink_pkey: + - id + db_dblog: + db_dblog_pkey: + - id + db_dbnode: + db_dbnode_pkey: + - id + db_dbsetting: + db_dbsetting_pkey: + - id + db_dbuser: + db_dbuser_pkey: + - id + db_dbuser_groups: + db_dbuser_groups_pkey: + - id + db_dbuser_user_permissions: + db_dbuser_user_permissions_pkey: + - id + db_dbworkflow: + db_dbworkflow_pkey: + - id + db_dbworkflowdata: + db_dbworkflowdata_pkey: + - id + db_dbworkflowstep: + db_dbworkflowstep_pkey: + - id + db_dbworkflowstep_calculations: + db_dbworkflowstep_calculations_pkey: + - id + db_dbworkflowstep_sub_workflows: + db_dbworkflowstep_sub_workflows_pkey: + - id + unique: + db_dbattribute: + db_dbattribute_dbnode_id_key_c589e447_uniq: + - dbnode_id + - key + db_dbauthinfo: + db_dbauthinfo_aiidauser_id_dbcomputer_id_777cdaa8_uniq: + - aiidauser_id + - dbcomputer_id + db_dbcomment: + db_dbcomment_uuid_49bac08c_uniq: + - uuid + db_dbcomputer: + db_dbcomputer_name_key: + - name + db_dbcomputer_uuid_f35defa6_uniq: + - uuid + db_dbextra: + db_dbextra_dbnode_id_key_aa56fd37_uniq: + - dbnode_id + - key + db_dbgroup: + db_dbgroup_name_type_12656f33_uniq: + - label + - type_string + db_dbgroup_uuid_af896177_uniq: + - uuid + db_dbgroup_dbnodes: + db_dbgroup_dbnodes_dbgroup_id_dbnode_id_eee23cce_uniq: + - dbgroup_id + - dbnode_id + db_dblog: + db_dblog_uuid_9cf77df3_uniq: + - uuid + db_dbnode: + db_dbnode_uuid_62e0bf98_uniq: + - uuid + db_dbsetting: + db_dbsetting_key_1b84beb4_uniq: + - key + db_dbuser: + db_dbuser_email_30150b7e_uniq: + - email + db_dbuser_groups: + db_dbuser_groups_dbuser_id_group_id_9155eb4f_uniq: + - dbuser_id + - group_id + db_dbuser_user_permissions: + db_dbuser_user_permissio_dbuser_id_permission_id_e6cbabe4_uniq: + - dbuser_id + - permission_id + db_dbworkflow: + db_dbworkflow_uuid_08947ee2_uniq: + - uuid + db_dbworkflowdata: + db_dbworkflowdata_parent_id_name_data_type_a4b50dae_uniq: + - data_type + - name + - parent_id + db_dbworkflowstep: + db_dbworkflowstep_parent_id_name_111027e3_uniq: + - name + - parent_id + db_dbworkflowstep_calculations: + db_dbworkflowstep_calcul_dbworkflowstep_id_dbnode_60f50d02_uniq: + - dbnode_id + - dbworkflowstep_id + db_dbworkflowstep_sub_workflows: + db_dbworkflowstep_sub_wo_dbworkflowstep_id_dbwork_e9b2b624_uniq: + - dbworkflow_id + - dbworkflowstep_id +foreign_keys: + db_dbattribute: + db_dbattribute_dbnode_id_253bf153_fk_db_dbnode_id: FOREIGN KEY (dbnode_id) REFERENCES + db_dbnode(id) DEFERRABLE INITIALLY DEFERRED + db_dbauthinfo: + db_dbauthinfo_aiidauser_id_0684fdfb_fk_db_dbuser_id: FOREIGN KEY (aiidauser_id) + REFERENCES db_dbuser(id) DEFERRABLE INITIALLY DEFERRED + db_dbauthinfo_dbcomputer_id_424f7ac4_fk_db_dbcomputer_id: FOREIGN KEY (dbcomputer_id) + REFERENCES db_dbcomputer(id) DEFERRABLE INITIALLY DEFERRED + db_dbcomment: + db_dbcomment_dbnode_id_3b812b6b_fk_db_dbnode_id: FOREIGN KEY (dbnode_id) REFERENCES + db_dbnode(id) DEFERRABLE INITIALLY DEFERRED + db_dbcomment_user_id_8ed5e360_fk_db_dbuser_id: FOREIGN KEY (user_id) REFERENCES + db_dbuser(id) DEFERRABLE INITIALLY DEFERRED + db_dbextra: + db_dbextra_dbnode_id_c7fe8961_fk_db_dbnode_id: FOREIGN KEY (dbnode_id) REFERENCES + db_dbnode(id) DEFERRABLE INITIALLY DEFERRED + db_dbgroup: + db_dbgroup_user_id_100f8a51_fk_db_dbuser_id: FOREIGN KEY (user_id) REFERENCES + db_dbuser(id) DEFERRABLE INITIALLY DEFERRED + db_dbgroup_dbnodes: + db_dbgroup_dbnodes_dbgroup_id_9d3a0f9d_fk_db_dbgroup_id: FOREIGN KEY (dbgroup_id) + REFERENCES db_dbgroup(id) DEFERRABLE INITIALLY DEFERRED + db_dbgroup_dbnodes_dbnode_id_118b9439_fk_db_dbnode_id: FOREIGN KEY (dbnode_id) + REFERENCES db_dbnode(id) DEFERRABLE INITIALLY DEFERRED + db_dblink: + db_dblink_input_id_9245bd73_fk_db_dbnode_id: FOREIGN KEY (input_id) REFERENCES + db_dbnode(id) DEFERRABLE INITIALLY DEFERRED + db_dblink_output_id_c0167528_fk_db_dbnode_id: FOREIGN KEY (output_id) REFERENCES + db_dbnode(id) DEFERRABLE INITIALLY DEFERRED + db_dblog: + db_dblog_dbnode_id_da34b732_fk_db_dbnode_id: FOREIGN KEY (dbnode_id) REFERENCES + db_dbnode(id) DEFERRABLE INITIALLY DEFERRED + db_dbnode: + db_dbnode_dbcomputer_id_315372a3_fk_db_dbcomputer_id: FOREIGN KEY (dbcomputer_id) + REFERENCES db_dbcomputer(id) DEFERRABLE INITIALLY DEFERRED + db_dbnode_user_id_12e7aeaf_fk_db_dbuser_id: FOREIGN KEY (user_id) REFERENCES db_dbuser(id) + DEFERRABLE INITIALLY DEFERRED + db_dbuser_groups: + db_dbuser_groups_dbuser_id_480b3520_fk_db_dbuser_id: FOREIGN KEY (dbuser_id) REFERENCES + db_dbuser(id) DEFERRABLE INITIALLY DEFERRED + db_dbuser_groups_group_id_8478d87e_fk_auth_group_id: FOREIGN KEY (group_id) REFERENCES + auth_group(id) DEFERRABLE INITIALLY DEFERRED + db_dbuser_user_permissions: + db_dbuser_user_permi_permission_id_c5aafc54_fk_auth_perm: FOREIGN KEY (permission_id) + REFERENCES auth_permission(id) DEFERRABLE INITIALLY DEFERRED + db_dbuser_user_permissions_dbuser_id_364456ee_fk_db_dbuser_id: FOREIGN KEY (dbuser_id) + REFERENCES db_dbuser(id) DEFERRABLE INITIALLY DEFERRED + db_dbworkflow: + db_dbworkflow_user_id_ef1f3251_fk_db_dbuser_id: FOREIGN KEY (user_id) REFERENCES + db_dbuser(id) DEFERRABLE INITIALLY DEFERRED + db_dbworkflowdata: + db_dbworkflowdata_aiida_obj_id_70a2d33b_fk_db_dbnode_id: FOREIGN KEY (aiida_obj_id) + REFERENCES db_dbnode(id) DEFERRABLE INITIALLY DEFERRED + db_dbworkflowdata_parent_id_ff4dbf8d_fk_db_dbworkflow_id: FOREIGN KEY (parent_id) + REFERENCES db_dbworkflow(id) DEFERRABLE INITIALLY DEFERRED + db_dbworkflowstep: + db_dbworkflowstep_parent_id_ffb754d9_fk_db_dbworkflow_id: FOREIGN KEY (parent_id) + REFERENCES db_dbworkflow(id) DEFERRABLE INITIALLY DEFERRED + db_dbworkflowstep_user_id_04282431_fk_db_dbuser_id: FOREIGN KEY (user_id) REFERENCES + db_dbuser(id) DEFERRABLE INITIALLY DEFERRED + db_dbworkflowstep_calculations: + db_dbworkflowstep_ca_dbnode_id_0d07b7a7_fk_db_dbnode: FOREIGN KEY (dbnode_id) + REFERENCES db_dbnode(id) DEFERRABLE INITIALLY DEFERRED + db_dbworkflowstep_ca_dbworkflowstep_id_575c3637_fk_db_dbwork: FOREIGN KEY (dbworkflowstep_id) + REFERENCES db_dbworkflowstep(id) DEFERRABLE INITIALLY DEFERRED + db_dbworkflowstep_sub_workflows: + db_dbworkflowstep_su_dbworkflow_id_dca4d103_fk_db_dbwork: FOREIGN KEY (dbworkflow_id) + REFERENCES db_dbworkflow(id) DEFERRABLE INITIALLY DEFERRED + db_dbworkflowstep_su_dbworkflowstep_id_e183bbb7_fk_db_dbwork: FOREIGN KEY (dbworkflowstep_id) + REFERENCES db_dbworkflowstep(id) DEFERRABLE INITIALLY DEFERRED +indexes: + db_dbattribute: + db_dbattribute_datatype_91c4dc04: CREATE INDEX db_dbattribute_datatype_91c4dc04 + ON public.db_dbattribute USING btree (datatype) + db_dbattribute_datatype_91c4dc04_like: CREATE INDEX db_dbattribute_datatype_91c4dc04_like + ON public.db_dbattribute USING btree (datatype varchar_pattern_ops) + db_dbattribute_dbnode_id_253bf153: CREATE INDEX db_dbattribute_dbnode_id_253bf153 + ON public.db_dbattribute USING btree (dbnode_id) + db_dbattribute_dbnode_id_key_c589e447_uniq: CREATE UNIQUE INDEX db_dbattribute_dbnode_id_key_c589e447_uniq + ON public.db_dbattribute USING btree (dbnode_id, key) + db_dbattribute_key_ac2bc4e4: CREATE INDEX db_dbattribute_key_ac2bc4e4 ON public.db_dbattribute + USING btree (key) + db_dbattribute_key_ac2bc4e4_like: CREATE INDEX db_dbattribute_key_ac2bc4e4_like + ON public.db_dbattribute USING btree (key varchar_pattern_ops) + db_dbattribute_pkey: CREATE UNIQUE INDEX db_dbattribute_pkey ON public.db_dbattribute + USING btree (id) + tval_idx_for_daemon: CREATE INDEX tval_idx_for_daemon ON public.db_dbattribute + USING btree (tval) WHERE (tval = ANY (ARRAY['COMPUTED'::text, 'WITHSCHEDULER'::text, + 'TOSUBMIT'::text])) + db_dbauthinfo: + db_dbauthinfo_aiidauser_id_0684fdfb: CREATE INDEX db_dbauthinfo_aiidauser_id_0684fdfb + ON public.db_dbauthinfo USING btree (aiidauser_id) + db_dbauthinfo_aiidauser_id_dbcomputer_id_777cdaa8_uniq: CREATE UNIQUE INDEX db_dbauthinfo_aiidauser_id_dbcomputer_id_777cdaa8_uniq + ON public.db_dbauthinfo USING btree (aiidauser_id, dbcomputer_id) + db_dbauthinfo_dbcomputer_id_424f7ac4: CREATE INDEX db_dbauthinfo_dbcomputer_id_424f7ac4 + ON public.db_dbauthinfo USING btree (dbcomputer_id) + db_dbauthinfo_pkey: CREATE UNIQUE INDEX db_dbauthinfo_pkey ON public.db_dbauthinfo + USING btree (id) + db_dbcomment: + db_dbcomment_dbnode_id_3b812b6b: CREATE INDEX db_dbcomment_dbnode_id_3b812b6b + ON public.db_dbcomment USING btree (dbnode_id) + db_dbcomment_pkey: CREATE UNIQUE INDEX db_dbcomment_pkey ON public.db_dbcomment + USING btree (id) + db_dbcomment_user_id_8ed5e360: CREATE INDEX db_dbcomment_user_id_8ed5e360 ON public.db_dbcomment + USING btree (user_id) + db_dbcomment_uuid_49bac08c_uniq: CREATE UNIQUE INDEX db_dbcomment_uuid_49bac08c_uniq + ON public.db_dbcomment USING btree (uuid) + db_dbcomputer: + db_dbcomputer_name_f1800b1a_like: CREATE INDEX db_dbcomputer_name_f1800b1a_like + ON public.db_dbcomputer USING btree (name varchar_pattern_ops) + db_dbcomputer_name_key: CREATE UNIQUE INDEX db_dbcomputer_name_key ON public.db_dbcomputer + USING btree (name) + db_dbcomputer_pkey: CREATE UNIQUE INDEX db_dbcomputer_pkey ON public.db_dbcomputer + USING btree (id) + db_dbcomputer_uuid_f35defa6_uniq: CREATE UNIQUE INDEX db_dbcomputer_uuid_f35defa6_uniq + ON public.db_dbcomputer USING btree (uuid) + db_dbextra: + db_dbextra_datatype_2eba38c6: CREATE INDEX db_dbextra_datatype_2eba38c6 ON public.db_dbextra + USING btree (datatype) + db_dbextra_datatype_2eba38c6_like: CREATE INDEX db_dbextra_datatype_2eba38c6_like + ON public.db_dbextra USING btree (datatype varchar_pattern_ops) + db_dbextra_dbnode_id_c7fe8961: CREATE INDEX db_dbextra_dbnode_id_c7fe8961 ON public.db_dbextra + USING btree (dbnode_id) + db_dbextra_dbnode_id_key_aa56fd37_uniq: CREATE UNIQUE INDEX db_dbextra_dbnode_id_key_aa56fd37_uniq + ON public.db_dbextra USING btree (dbnode_id, key) + db_dbextra_key_b1a8abc6: CREATE INDEX db_dbextra_key_b1a8abc6 ON public.db_dbextra + USING btree (key) + db_dbextra_key_b1a8abc6_like: CREATE INDEX db_dbextra_key_b1a8abc6_like ON public.db_dbextra + USING btree (key varchar_pattern_ops) + db_dbextra_pkey: CREATE UNIQUE INDEX db_dbextra_pkey ON public.db_dbextra USING + btree (id) + db_dbgroup: + db_dbgroup_name_66c75272: CREATE INDEX db_dbgroup_name_66c75272 ON public.db_dbgroup + USING btree (label) + db_dbgroup_name_66c75272_like: CREATE INDEX db_dbgroup_name_66c75272_like ON public.db_dbgroup + USING btree (label varchar_pattern_ops) + db_dbgroup_name_type_12656f33_uniq: CREATE UNIQUE INDEX db_dbgroup_name_type_12656f33_uniq + ON public.db_dbgroup USING btree (label, type_string) + db_dbgroup_pkey: CREATE UNIQUE INDEX db_dbgroup_pkey ON public.db_dbgroup USING + btree (id) + db_dbgroup_type_23b2a748: CREATE INDEX db_dbgroup_type_23b2a748 ON public.db_dbgroup + USING btree (type_string) + db_dbgroup_type_23b2a748_like: CREATE INDEX db_dbgroup_type_23b2a748_like ON public.db_dbgroup + USING btree (type_string varchar_pattern_ops) + db_dbgroup_user_id_100f8a51: CREATE INDEX db_dbgroup_user_id_100f8a51 ON public.db_dbgroup + USING btree (user_id) + db_dbgroup_uuid_af896177_uniq: CREATE UNIQUE INDEX db_dbgroup_uuid_af896177_uniq + ON public.db_dbgroup USING btree (uuid) + db_dbgroup_dbnodes: + db_dbgroup_dbnodes_dbgroup_id_9d3a0f9d: CREATE INDEX db_dbgroup_dbnodes_dbgroup_id_9d3a0f9d + ON public.db_dbgroup_dbnodes USING btree (dbgroup_id) + db_dbgroup_dbnodes_dbgroup_id_dbnode_id_eee23cce_uniq: CREATE UNIQUE INDEX db_dbgroup_dbnodes_dbgroup_id_dbnode_id_eee23cce_uniq + ON public.db_dbgroup_dbnodes USING btree (dbgroup_id, dbnode_id) + db_dbgroup_dbnodes_dbnode_id_118b9439: CREATE INDEX db_dbgroup_dbnodes_dbnode_id_118b9439 + ON public.db_dbgroup_dbnodes USING btree (dbnode_id) + db_dbgroup_dbnodes_pkey: CREATE UNIQUE INDEX db_dbgroup_dbnodes_pkey ON public.db_dbgroup_dbnodes + USING btree (id) + db_dblink: + db_dblink_input_id_9245bd73: CREATE INDEX db_dblink_input_id_9245bd73 ON public.db_dblink + USING btree (input_id) + db_dblink_label_f1343cfb: CREATE INDEX db_dblink_label_f1343cfb ON public.db_dblink + USING btree (label) + db_dblink_label_f1343cfb_like: CREATE INDEX db_dblink_label_f1343cfb_like ON public.db_dblink + USING btree (label varchar_pattern_ops) + db_dblink_output_id_c0167528: CREATE INDEX db_dblink_output_id_c0167528 ON public.db_dblink + USING btree (output_id) + db_dblink_pkey: CREATE UNIQUE INDEX db_dblink_pkey ON public.db_dblink USING btree + (id) + db_dblink_type_229f212b: CREATE INDEX db_dblink_type_229f212b ON public.db_dblink + USING btree (type) + db_dblink_type_229f212b_like: CREATE INDEX db_dblink_type_229f212b_like ON public.db_dblink + USING btree (type varchar_pattern_ops) + db_dblog: + db_dblog_dbnode_id_da34b732: CREATE INDEX db_dblog_dbnode_id_da34b732 ON public.db_dblog + USING btree (dbnode_id) + db_dblog_levelname_ad5dc346: CREATE INDEX db_dblog_levelname_ad5dc346 ON public.db_dblog + USING btree (levelname) + db_dblog_levelname_ad5dc346_like: CREATE INDEX db_dblog_levelname_ad5dc346_like + ON public.db_dblog USING btree (levelname varchar_pattern_ops) + db_dblog_loggername_00b5ba16: CREATE INDEX db_dblog_loggername_00b5ba16 ON public.db_dblog + USING btree (loggername) + db_dblog_loggername_00b5ba16_like: CREATE INDEX db_dblog_loggername_00b5ba16_like + ON public.db_dblog USING btree (loggername varchar_pattern_ops) + db_dblog_pkey: CREATE UNIQUE INDEX db_dblog_pkey ON public.db_dblog USING btree + (id) + db_dblog_uuid_9cf77df3_uniq: CREATE UNIQUE INDEX db_dblog_uuid_9cf77df3_uniq ON + public.db_dblog USING btree (uuid) + db_dbnode: + db_dbnode_ctime_71626ef5: CREATE INDEX db_dbnode_ctime_71626ef5 ON public.db_dbnode + USING btree (ctime) + db_dbnode_dbcomputer_id_315372a3: CREATE INDEX db_dbnode_dbcomputer_id_315372a3 + ON public.db_dbnode USING btree (dbcomputer_id) + db_dbnode_label_6469539e: CREATE INDEX db_dbnode_label_6469539e ON public.db_dbnode + USING btree (label) + db_dbnode_label_6469539e_like: CREATE INDEX db_dbnode_label_6469539e_like ON public.db_dbnode + USING btree (label varchar_pattern_ops) + db_dbnode_mtime_0554ea3d: CREATE INDEX db_dbnode_mtime_0554ea3d ON public.db_dbnode + USING btree (mtime) + db_dbnode_pkey: CREATE UNIQUE INDEX db_dbnode_pkey ON public.db_dbnode USING btree + (id) + db_dbnode_process_type_df7298d0: CREATE INDEX db_dbnode_process_type_df7298d0 + ON public.db_dbnode USING btree (process_type) + db_dbnode_process_type_df7298d0_like: CREATE INDEX db_dbnode_process_type_df7298d0_like + ON public.db_dbnode USING btree (process_type varchar_pattern_ops) + db_dbnode_type_a8ce9753: CREATE INDEX db_dbnode_type_a8ce9753 ON public.db_dbnode + USING btree (node_type) + db_dbnode_type_a8ce9753_like: CREATE INDEX db_dbnode_type_a8ce9753_like ON public.db_dbnode + USING btree (node_type varchar_pattern_ops) + db_dbnode_user_id_12e7aeaf: CREATE INDEX db_dbnode_user_id_12e7aeaf ON public.db_dbnode + USING btree (user_id) + db_dbnode_uuid_62e0bf98_uniq: CREATE UNIQUE INDEX db_dbnode_uuid_62e0bf98_uniq + ON public.db_dbnode USING btree (uuid) + db_dbsetting: + db_dbsetting_datatype_49f4397c: CREATE INDEX db_dbsetting_datatype_49f4397c ON + public.db_dbsetting USING btree (datatype) + db_dbsetting_datatype_49f4397c_like: CREATE INDEX db_dbsetting_datatype_49f4397c_like + ON public.db_dbsetting USING btree (datatype varchar_pattern_ops) + db_dbsetting_key_1b84beb4: CREATE INDEX db_dbsetting_key_1b84beb4 ON public.db_dbsetting + USING btree (key) + db_dbsetting_key_1b84beb4_like: CREATE INDEX db_dbsetting_key_1b84beb4_like ON + public.db_dbsetting USING btree (key varchar_pattern_ops) + db_dbsetting_key_1b84beb4_uniq: CREATE UNIQUE INDEX db_dbsetting_key_1b84beb4_uniq + ON public.db_dbsetting USING btree (key) + db_dbsetting_pkey: CREATE UNIQUE INDEX db_dbsetting_pkey ON public.db_dbsetting + USING btree (id) + db_dbuser: + db_dbuser_email_30150b7e_like: CREATE INDEX db_dbuser_email_30150b7e_like ON public.db_dbuser + USING btree (email varchar_pattern_ops) + db_dbuser_email_30150b7e_uniq: CREATE UNIQUE INDEX db_dbuser_email_30150b7e_uniq + ON public.db_dbuser USING btree (email) + db_dbuser_pkey: CREATE UNIQUE INDEX db_dbuser_pkey ON public.db_dbuser USING btree + (id) + db_dbuser_groups: + db_dbuser_groups_dbuser_id_480b3520: CREATE INDEX db_dbuser_groups_dbuser_id_480b3520 + ON public.db_dbuser_groups USING btree (dbuser_id) + db_dbuser_groups_dbuser_id_group_id_9155eb4f_uniq: CREATE UNIQUE INDEX db_dbuser_groups_dbuser_id_group_id_9155eb4f_uniq + ON public.db_dbuser_groups USING btree (dbuser_id, group_id) + db_dbuser_groups_group_id_8478d87e: CREATE INDEX db_dbuser_groups_group_id_8478d87e + ON public.db_dbuser_groups USING btree (group_id) + db_dbuser_groups_pkey: CREATE UNIQUE INDEX db_dbuser_groups_pkey ON public.db_dbuser_groups + USING btree (id) + db_dbuser_user_permissions: + db_dbuser_user_permissio_dbuser_id_permission_id_e6cbabe4_uniq: CREATE UNIQUE + INDEX db_dbuser_user_permissio_dbuser_id_permission_id_e6cbabe4_uniq ON public.db_dbuser_user_permissions + USING btree (dbuser_id, permission_id) + db_dbuser_user_permissions_dbuser_id_364456ee: CREATE INDEX db_dbuser_user_permissions_dbuser_id_364456ee + ON public.db_dbuser_user_permissions USING btree (dbuser_id) + db_dbuser_user_permissions_permission_id_c5aafc54: CREATE INDEX db_dbuser_user_permissions_permission_id_c5aafc54 + ON public.db_dbuser_user_permissions USING btree (permission_id) + db_dbuser_user_permissions_pkey: CREATE UNIQUE INDEX db_dbuser_user_permissions_pkey + ON public.db_dbuser_user_permissions USING btree (id) + db_dbworkflow: + db_dbworkflow_label_7368f34a: CREATE INDEX db_dbworkflow_label_7368f34a ON public.db_dbworkflow + USING btree (label) + db_dbworkflow_label_7368f34a_like: CREATE INDEX db_dbworkflow_label_7368f34a_like + ON public.db_dbworkflow USING btree (label varchar_pattern_ops) + db_dbworkflow_pkey: CREATE UNIQUE INDEX db_dbworkflow_pkey ON public.db_dbworkflow + USING btree (id) + db_dbworkflow_user_id_ef1f3251: CREATE INDEX db_dbworkflow_user_id_ef1f3251 ON + public.db_dbworkflow USING btree (user_id) + db_dbworkflow_uuid_08947ee2_uniq: CREATE UNIQUE INDEX db_dbworkflow_uuid_08947ee2_uniq + ON public.db_dbworkflow USING btree (uuid) + db_dbworkflowdata: + db_dbworkflowdata_aiida_obj_id_70a2d33b: CREATE INDEX db_dbworkflowdata_aiida_obj_id_70a2d33b + ON public.db_dbworkflowdata USING btree (aiida_obj_id) + db_dbworkflowdata_parent_id_ff4dbf8d: CREATE INDEX db_dbworkflowdata_parent_id_ff4dbf8d + ON public.db_dbworkflowdata USING btree (parent_id) + db_dbworkflowdata_parent_id_name_data_type_a4b50dae_uniq: CREATE UNIQUE INDEX + db_dbworkflowdata_parent_id_name_data_type_a4b50dae_uniq ON public.db_dbworkflowdata + USING btree (parent_id, name, data_type) + db_dbworkflowdata_pkey: CREATE UNIQUE INDEX db_dbworkflowdata_pkey ON public.db_dbworkflowdata + USING btree (id) + db_dbworkflowstep: + db_dbworkflowstep_parent_id_ffb754d9: CREATE INDEX db_dbworkflowstep_parent_id_ffb754d9 + ON public.db_dbworkflowstep USING btree (parent_id) + db_dbworkflowstep_parent_id_name_111027e3_uniq: CREATE UNIQUE INDEX db_dbworkflowstep_parent_id_name_111027e3_uniq + ON public.db_dbworkflowstep USING btree (parent_id, name) + db_dbworkflowstep_pkey: CREATE UNIQUE INDEX db_dbworkflowstep_pkey ON public.db_dbworkflowstep + USING btree (id) + db_dbworkflowstep_user_id_04282431: CREATE INDEX db_dbworkflowstep_user_id_04282431 + ON public.db_dbworkflowstep USING btree (user_id) + db_dbworkflowstep_calculations: + db_dbworkflowstep_calcul_dbworkflowstep_id_dbnode_60f50d02_uniq: CREATE UNIQUE + INDEX db_dbworkflowstep_calcul_dbworkflowstep_id_dbnode_60f50d02_uniq ON public.db_dbworkflowstep_calculations + USING btree (dbworkflowstep_id, dbnode_id) + db_dbworkflowstep_calculations_dbnode_id_0d07b7a7: CREATE INDEX db_dbworkflowstep_calculations_dbnode_id_0d07b7a7 + ON public.db_dbworkflowstep_calculations USING btree (dbnode_id) + db_dbworkflowstep_calculations_dbworkflowstep_id_575c3637: CREATE INDEX db_dbworkflowstep_calculations_dbworkflowstep_id_575c3637 + ON public.db_dbworkflowstep_calculations USING btree (dbworkflowstep_id) + db_dbworkflowstep_calculations_pkey: CREATE UNIQUE INDEX db_dbworkflowstep_calculations_pkey + ON public.db_dbworkflowstep_calculations USING btree (id) + db_dbworkflowstep_sub_workflows: + db_dbworkflowstep_sub_wo_dbworkflowstep_id_dbwork_e9b2b624_uniq: CREATE UNIQUE + INDEX db_dbworkflowstep_sub_wo_dbworkflowstep_id_dbwork_e9b2b624_uniq ON public.db_dbworkflowstep_sub_workflows + USING btree (dbworkflowstep_id, dbworkflow_id) + db_dbworkflowstep_sub_workflows_dbworkflow_id_dca4d103: CREATE INDEX db_dbworkflowstep_sub_workflows_dbworkflow_id_dca4d103 + ON public.db_dbworkflowstep_sub_workflows USING btree (dbworkflow_id) + db_dbworkflowstep_sub_workflows_dbworkflowstep_id_e183bbb7: CREATE INDEX db_dbworkflowstep_sub_workflows_dbworkflowstep_id_e183bbb7 + ON public.db_dbworkflowstep_sub_workflows USING btree (dbworkflowstep_id) + db_dbworkflowstep_sub_workflows_pkey: CREATE UNIQUE INDEX db_dbworkflowstep_sub_workflows_pkey + ON public.db_dbworkflowstep_sub_workflows USING btree (id) diff --git a/tests/backends/aiida_sqlalchemy/migrations/test_all_schema/test_django_django_0032_.yml b/tests/backends/aiida_sqlalchemy/migrations/test_all_schema/test_django_django_0032_.yml new file mode 100644 index 0000000000..ab8a3e2e02 --- /dev/null +++ b/tests/backends/aiida_sqlalchemy/migrations/test_all_schema/test_django_django_0032_.yml @@ -0,0 +1,771 @@ +columns: + db_dbattribute: + bval: + data_type: boolean + default: null + is_nullable: true + datatype: + data_type: character varying + default: null + is_nullable: false + max_length: 10 + dbnode_id: + data_type: integer + default: null + is_nullable: false + dval: + data_type: timestamp with time zone + default: null + is_nullable: true + fval: + data_type: double precision + default: null + is_nullable: true + id: + data_type: integer + default: nextval('db_dbattribute_id_seq'::regclass) + is_nullable: false + ival: + data_type: integer + default: null + is_nullable: true + key: + data_type: character varying + default: null + is_nullable: false + max_length: 1024 + tval: + data_type: text + default: null + is_nullable: false + db_dbauthinfo: + aiidauser_id: + data_type: integer + default: null + is_nullable: false + auth_params: + data_type: text + default: null + is_nullable: false + dbcomputer_id: + data_type: integer + default: null + is_nullable: false + enabled: + data_type: boolean + default: null + is_nullable: false + id: + data_type: integer + default: nextval('db_dbauthinfo_id_seq'::regclass) + is_nullable: false + metadata: + data_type: text + default: null + is_nullable: false + db_dbcomment: + content: + data_type: text + default: null + is_nullable: false + ctime: + data_type: timestamp with time zone + default: null + is_nullable: false + dbnode_id: + data_type: integer + default: null + is_nullable: false + id: + data_type: integer + default: nextval('db_dbcomment_id_seq'::regclass) + is_nullable: false + mtime: + data_type: timestamp with time zone + default: null + is_nullable: false + user_id: + data_type: integer + default: null + is_nullable: false + uuid: + data_type: uuid + default: null + is_nullable: false + db_dbcomputer: + description: + data_type: text + default: null + is_nullable: false + hostname: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + id: + data_type: integer + default: nextval('db_dbcomputer_id_seq'::regclass) + is_nullable: false + metadata: + data_type: text + default: null + is_nullable: false + name: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + scheduler_type: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + transport_params: + data_type: text + default: null + is_nullable: false + transport_type: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + uuid: + data_type: uuid + default: null + is_nullable: false + db_dbextra: + bval: + data_type: boolean + default: null + is_nullable: true + datatype: + data_type: character varying + default: null + is_nullable: false + max_length: 10 + dbnode_id: + data_type: integer + default: null + is_nullable: false + dval: + data_type: timestamp with time zone + default: null + is_nullable: true + fval: + data_type: double precision + default: null + is_nullable: true + id: + data_type: integer + default: nextval('db_dbextra_id_seq'::regclass) + is_nullable: false + ival: + data_type: integer + default: null + is_nullable: true + key: + data_type: character varying + default: null + is_nullable: false + max_length: 1024 + tval: + data_type: text + default: null + is_nullable: false + db_dbgroup: + description: + data_type: text + default: null + is_nullable: false + id: + data_type: integer + default: nextval('db_dbgroup_id_seq'::regclass) + is_nullable: false + label: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + time: + data_type: timestamp with time zone + default: null + is_nullable: false + type_string: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + user_id: + data_type: integer + default: null + is_nullable: false + uuid: + data_type: uuid + default: null + is_nullable: false + db_dbgroup_dbnodes: + dbgroup_id: + data_type: integer + default: null + is_nullable: false + dbnode_id: + data_type: integer + default: null + is_nullable: false + id: + data_type: integer + default: nextval('db_dbgroup_dbnodes_id_seq'::regclass) + is_nullable: false + db_dblink: + id: + data_type: integer + default: nextval('db_dblink_id_seq'::regclass) + is_nullable: false + input_id: + data_type: integer + default: null + is_nullable: false + label: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + output_id: + data_type: integer + default: null + is_nullable: false + type: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + db_dblog: + dbnode_id: + data_type: integer + default: null + is_nullable: false + id: + data_type: integer + default: nextval('db_dblog_id_seq'::regclass) + is_nullable: false + levelname: + data_type: character varying + default: null + is_nullable: false + max_length: 50 + loggername: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + message: + data_type: text + default: null + is_nullable: false + metadata: + data_type: text + default: null + is_nullable: false + time: + data_type: timestamp with time zone + default: null + is_nullable: false + uuid: + data_type: uuid + default: null + is_nullable: false + db_dbnode: + ctime: + data_type: timestamp with time zone + default: null + is_nullable: false + dbcomputer_id: + data_type: integer + default: null + is_nullable: true + description: + data_type: text + default: null + is_nullable: false + id: + data_type: integer + default: nextval('db_dbnode_id_seq'::regclass) + is_nullable: false + label: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + mtime: + data_type: timestamp with time zone + default: null + is_nullable: false + node_type: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + nodeversion: + data_type: integer + default: null + is_nullable: false + process_type: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + public: + data_type: boolean + default: null + is_nullable: false + user_id: + data_type: integer + default: null + is_nullable: false + uuid: + data_type: uuid + default: null + is_nullable: false + db_dbsetting: + bval: + data_type: boolean + default: null + is_nullable: true + datatype: + data_type: character varying + default: null + is_nullable: false + max_length: 10 + description: + data_type: text + default: null + is_nullable: false + dval: + data_type: timestamp with time zone + default: null + is_nullable: true + fval: + data_type: double precision + default: null + is_nullable: true + id: + data_type: integer + default: nextval('db_dbsetting_id_seq'::regclass) + is_nullable: false + ival: + data_type: integer + default: null + is_nullable: true + key: + data_type: character varying + default: null + is_nullable: false + max_length: 1024 + time: + data_type: timestamp with time zone + default: null + is_nullable: false + tval: + data_type: text + default: null + is_nullable: false + db_dbuser: + date_joined: + data_type: timestamp with time zone + default: null + is_nullable: false + email: + data_type: character varying + default: null + is_nullable: false + max_length: 254 + first_name: + data_type: character varying + default: null + is_nullable: false + max_length: 254 + id: + data_type: integer + default: nextval('db_dbuser_id_seq'::regclass) + is_nullable: false + institution: + data_type: character varying + default: null + is_nullable: false + max_length: 254 + is_active: + data_type: boolean + default: null + is_nullable: false + is_staff: + data_type: boolean + default: null + is_nullable: false + is_superuser: + data_type: boolean + default: null + is_nullable: false + last_login: + data_type: timestamp with time zone + default: null + is_nullable: true + last_name: + data_type: character varying + default: null + is_nullable: false + max_length: 254 + password: + data_type: character varying + default: null + is_nullable: false + max_length: 128 + db_dbuser_groups: + dbuser_id: + data_type: integer + default: null + is_nullable: false + group_id: + data_type: integer + default: null + is_nullable: false + id: + data_type: integer + default: nextval('db_dbuser_groups_id_seq'::regclass) + is_nullable: false + db_dbuser_user_permissions: + dbuser_id: + data_type: integer + default: null + is_nullable: false + id: + data_type: integer + default: nextval('db_dbuser_user_permissions_id_seq'::regclass) + is_nullable: false + permission_id: + data_type: integer + default: null + is_nullable: false +constraints: + primary_key: + db_dbattribute: + db_dbattribute_pkey: + - id + db_dbauthinfo: + db_dbauthinfo_pkey: + - id + db_dbcomment: + db_dbcomment_pkey: + - id + db_dbcomputer: + db_dbcomputer_pkey: + - id + db_dbextra: + db_dbextra_pkey: + - id + db_dbgroup: + db_dbgroup_pkey: + - id + db_dbgroup_dbnodes: + db_dbgroup_dbnodes_pkey: + - id + db_dblink: + db_dblink_pkey: + - id + db_dblog: + db_dblog_pkey: + - id + db_dbnode: + db_dbnode_pkey: + - id + db_dbsetting: + db_dbsetting_pkey: + - id + db_dbuser: + db_dbuser_pkey: + - id + db_dbuser_groups: + db_dbuser_groups_pkey: + - id + db_dbuser_user_permissions: + db_dbuser_user_permissions_pkey: + - id + unique: + db_dbattribute: + db_dbattribute_dbnode_id_key_c589e447_uniq: + - dbnode_id + - key + db_dbauthinfo: + db_dbauthinfo_aiidauser_id_dbcomputer_id_777cdaa8_uniq: + - aiidauser_id + - dbcomputer_id + db_dbcomment: + db_dbcomment_uuid_49bac08c_uniq: + - uuid + db_dbcomputer: + db_dbcomputer_name_key: + - name + db_dbcomputer_uuid_f35defa6_uniq: + - uuid + db_dbextra: + db_dbextra_dbnode_id_key_aa56fd37_uniq: + - dbnode_id + - key + db_dbgroup: + db_dbgroup_name_type_12656f33_uniq: + - label + - type_string + db_dbgroup_uuid_af896177_uniq: + - uuid + db_dbgroup_dbnodes: + db_dbgroup_dbnodes_dbgroup_id_dbnode_id_eee23cce_uniq: + - dbgroup_id + - dbnode_id + db_dblog: + db_dblog_uuid_9cf77df3_uniq: + - uuid + db_dbnode: + db_dbnode_uuid_62e0bf98_uniq: + - uuid + db_dbsetting: + db_dbsetting_key_1b84beb4_uniq: + - key + db_dbuser: + db_dbuser_email_30150b7e_uniq: + - email + db_dbuser_groups: + db_dbuser_groups_dbuser_id_group_id_9155eb4f_uniq: + - dbuser_id + - group_id + db_dbuser_user_permissions: + db_dbuser_user_permissio_dbuser_id_permission_id_e6cbabe4_uniq: + - dbuser_id + - permission_id +foreign_keys: + db_dbattribute: + db_dbattribute_dbnode_id_253bf153_fk_db_dbnode_id: FOREIGN KEY (dbnode_id) REFERENCES + db_dbnode(id) DEFERRABLE INITIALLY DEFERRED + db_dbauthinfo: + db_dbauthinfo_aiidauser_id_0684fdfb_fk_db_dbuser_id: FOREIGN KEY (aiidauser_id) + REFERENCES db_dbuser(id) DEFERRABLE INITIALLY DEFERRED + db_dbauthinfo_dbcomputer_id_424f7ac4_fk_db_dbcomputer_id: FOREIGN KEY (dbcomputer_id) + REFERENCES db_dbcomputer(id) DEFERRABLE INITIALLY DEFERRED + db_dbcomment: + db_dbcomment_dbnode_id_3b812b6b_fk_db_dbnode_id: FOREIGN KEY (dbnode_id) REFERENCES + db_dbnode(id) DEFERRABLE INITIALLY DEFERRED + db_dbcomment_user_id_8ed5e360_fk_db_dbuser_id: FOREIGN KEY (user_id) REFERENCES + db_dbuser(id) DEFERRABLE INITIALLY DEFERRED + db_dbextra: + db_dbextra_dbnode_id_c7fe8961_fk_db_dbnode_id: FOREIGN KEY (dbnode_id) REFERENCES + db_dbnode(id) DEFERRABLE INITIALLY DEFERRED + db_dbgroup: + db_dbgroup_user_id_100f8a51_fk_db_dbuser_id: FOREIGN KEY (user_id) REFERENCES + db_dbuser(id) DEFERRABLE INITIALLY DEFERRED + db_dbgroup_dbnodes: + db_dbgroup_dbnodes_dbgroup_id_9d3a0f9d_fk_db_dbgroup_id: FOREIGN KEY (dbgroup_id) + REFERENCES db_dbgroup(id) DEFERRABLE INITIALLY DEFERRED + db_dbgroup_dbnodes_dbnode_id_118b9439_fk_db_dbnode_id: FOREIGN KEY (dbnode_id) + REFERENCES db_dbnode(id) DEFERRABLE INITIALLY DEFERRED + db_dblink: + db_dblink_input_id_9245bd73_fk_db_dbnode_id: FOREIGN KEY (input_id) REFERENCES + db_dbnode(id) DEFERRABLE INITIALLY DEFERRED + db_dblink_output_id_c0167528_fk_db_dbnode_id: FOREIGN KEY (output_id) REFERENCES + db_dbnode(id) DEFERRABLE INITIALLY DEFERRED + db_dblog: + db_dblog_dbnode_id_da34b732_fk_db_dbnode_id: FOREIGN KEY (dbnode_id) REFERENCES + db_dbnode(id) DEFERRABLE INITIALLY DEFERRED + db_dbnode: + db_dbnode_dbcomputer_id_315372a3_fk_db_dbcomputer_id: FOREIGN KEY (dbcomputer_id) + REFERENCES db_dbcomputer(id) DEFERRABLE INITIALLY DEFERRED + db_dbnode_user_id_12e7aeaf_fk_db_dbuser_id: FOREIGN KEY (user_id) REFERENCES db_dbuser(id) + DEFERRABLE INITIALLY DEFERRED + db_dbuser_groups: + db_dbuser_groups_dbuser_id_480b3520_fk_db_dbuser_id: FOREIGN KEY (dbuser_id) REFERENCES + db_dbuser(id) DEFERRABLE INITIALLY DEFERRED + db_dbuser_groups_group_id_8478d87e_fk_auth_group_id: FOREIGN KEY (group_id) REFERENCES + auth_group(id) DEFERRABLE INITIALLY DEFERRED + db_dbuser_user_permissions: + db_dbuser_user_permi_permission_id_c5aafc54_fk_auth_perm: FOREIGN KEY (permission_id) + REFERENCES auth_permission(id) DEFERRABLE INITIALLY DEFERRED + db_dbuser_user_permissions_dbuser_id_364456ee_fk_db_dbuser_id: FOREIGN KEY (dbuser_id) + REFERENCES db_dbuser(id) DEFERRABLE INITIALLY DEFERRED +indexes: + db_dbattribute: + db_dbattribute_datatype_91c4dc04: CREATE INDEX db_dbattribute_datatype_91c4dc04 + ON public.db_dbattribute USING btree (datatype) + db_dbattribute_datatype_91c4dc04_like: CREATE INDEX db_dbattribute_datatype_91c4dc04_like + ON public.db_dbattribute USING btree (datatype varchar_pattern_ops) + db_dbattribute_dbnode_id_253bf153: CREATE INDEX db_dbattribute_dbnode_id_253bf153 + ON public.db_dbattribute USING btree (dbnode_id) + db_dbattribute_dbnode_id_key_c589e447_uniq: CREATE UNIQUE INDEX db_dbattribute_dbnode_id_key_c589e447_uniq + ON public.db_dbattribute USING btree (dbnode_id, key) + db_dbattribute_key_ac2bc4e4: CREATE INDEX db_dbattribute_key_ac2bc4e4 ON public.db_dbattribute + USING btree (key) + db_dbattribute_key_ac2bc4e4_like: CREATE INDEX db_dbattribute_key_ac2bc4e4_like + ON public.db_dbattribute USING btree (key varchar_pattern_ops) + db_dbattribute_pkey: CREATE UNIQUE INDEX db_dbattribute_pkey ON public.db_dbattribute + USING btree (id) + tval_idx_for_daemon: CREATE INDEX tval_idx_for_daemon ON public.db_dbattribute + USING btree (tval) WHERE (tval = ANY (ARRAY['COMPUTED'::text, 'WITHSCHEDULER'::text, + 'TOSUBMIT'::text])) + db_dbauthinfo: + db_dbauthinfo_aiidauser_id_0684fdfb: CREATE INDEX db_dbauthinfo_aiidauser_id_0684fdfb + ON public.db_dbauthinfo USING btree (aiidauser_id) + db_dbauthinfo_aiidauser_id_dbcomputer_id_777cdaa8_uniq: CREATE UNIQUE INDEX db_dbauthinfo_aiidauser_id_dbcomputer_id_777cdaa8_uniq + ON public.db_dbauthinfo USING btree (aiidauser_id, dbcomputer_id) + db_dbauthinfo_dbcomputer_id_424f7ac4: CREATE INDEX db_dbauthinfo_dbcomputer_id_424f7ac4 + ON public.db_dbauthinfo USING btree (dbcomputer_id) + db_dbauthinfo_pkey: CREATE UNIQUE INDEX db_dbauthinfo_pkey ON public.db_dbauthinfo + USING btree (id) + db_dbcomment: + db_dbcomment_dbnode_id_3b812b6b: CREATE INDEX db_dbcomment_dbnode_id_3b812b6b + ON public.db_dbcomment USING btree (dbnode_id) + db_dbcomment_pkey: CREATE UNIQUE INDEX db_dbcomment_pkey ON public.db_dbcomment + USING btree (id) + db_dbcomment_user_id_8ed5e360: CREATE INDEX db_dbcomment_user_id_8ed5e360 ON public.db_dbcomment + USING btree (user_id) + db_dbcomment_uuid_49bac08c_uniq: CREATE UNIQUE INDEX db_dbcomment_uuid_49bac08c_uniq + ON public.db_dbcomment USING btree (uuid) + db_dbcomputer: + db_dbcomputer_name_f1800b1a_like: CREATE INDEX db_dbcomputer_name_f1800b1a_like + ON public.db_dbcomputer USING btree (name varchar_pattern_ops) + db_dbcomputer_name_key: CREATE UNIQUE INDEX db_dbcomputer_name_key ON public.db_dbcomputer + USING btree (name) + db_dbcomputer_pkey: CREATE UNIQUE INDEX db_dbcomputer_pkey ON public.db_dbcomputer + USING btree (id) + db_dbcomputer_uuid_f35defa6_uniq: CREATE UNIQUE INDEX db_dbcomputer_uuid_f35defa6_uniq + ON public.db_dbcomputer USING btree (uuid) + db_dbextra: + db_dbextra_datatype_2eba38c6: CREATE INDEX db_dbextra_datatype_2eba38c6 ON public.db_dbextra + USING btree (datatype) + db_dbextra_datatype_2eba38c6_like: CREATE INDEX db_dbextra_datatype_2eba38c6_like + ON public.db_dbextra USING btree (datatype varchar_pattern_ops) + db_dbextra_dbnode_id_c7fe8961: CREATE INDEX db_dbextra_dbnode_id_c7fe8961 ON public.db_dbextra + USING btree (dbnode_id) + db_dbextra_dbnode_id_key_aa56fd37_uniq: CREATE UNIQUE INDEX db_dbextra_dbnode_id_key_aa56fd37_uniq + ON public.db_dbextra USING btree (dbnode_id, key) + db_dbextra_key_b1a8abc6: CREATE INDEX db_dbextra_key_b1a8abc6 ON public.db_dbextra + USING btree (key) + db_dbextra_key_b1a8abc6_like: CREATE INDEX db_dbextra_key_b1a8abc6_like ON public.db_dbextra + USING btree (key varchar_pattern_ops) + db_dbextra_pkey: CREATE UNIQUE INDEX db_dbextra_pkey ON public.db_dbextra USING + btree (id) + db_dbgroup: + db_dbgroup_name_66c75272: CREATE INDEX db_dbgroup_name_66c75272 ON public.db_dbgroup + USING btree (label) + db_dbgroup_name_66c75272_like: CREATE INDEX db_dbgroup_name_66c75272_like ON public.db_dbgroup + USING btree (label varchar_pattern_ops) + db_dbgroup_name_type_12656f33_uniq: CREATE UNIQUE INDEX db_dbgroup_name_type_12656f33_uniq + ON public.db_dbgroup USING btree (label, type_string) + db_dbgroup_pkey: CREATE UNIQUE INDEX db_dbgroup_pkey ON public.db_dbgroup USING + btree (id) + db_dbgroup_type_23b2a748: CREATE INDEX db_dbgroup_type_23b2a748 ON public.db_dbgroup + USING btree (type_string) + db_dbgroup_type_23b2a748_like: CREATE INDEX db_dbgroup_type_23b2a748_like ON public.db_dbgroup + USING btree (type_string varchar_pattern_ops) + db_dbgroup_user_id_100f8a51: CREATE INDEX db_dbgroup_user_id_100f8a51 ON public.db_dbgroup + USING btree (user_id) + db_dbgroup_uuid_af896177_uniq: CREATE UNIQUE INDEX db_dbgroup_uuid_af896177_uniq + ON public.db_dbgroup USING btree (uuid) + db_dbgroup_dbnodes: + db_dbgroup_dbnodes_dbgroup_id_9d3a0f9d: CREATE INDEX db_dbgroup_dbnodes_dbgroup_id_9d3a0f9d + ON public.db_dbgroup_dbnodes USING btree (dbgroup_id) + db_dbgroup_dbnodes_dbgroup_id_dbnode_id_eee23cce_uniq: CREATE UNIQUE INDEX db_dbgroup_dbnodes_dbgroup_id_dbnode_id_eee23cce_uniq + ON public.db_dbgroup_dbnodes USING btree (dbgroup_id, dbnode_id) + db_dbgroup_dbnodes_dbnode_id_118b9439: CREATE INDEX db_dbgroup_dbnodes_dbnode_id_118b9439 + ON public.db_dbgroup_dbnodes USING btree (dbnode_id) + db_dbgroup_dbnodes_pkey: CREATE UNIQUE INDEX db_dbgroup_dbnodes_pkey ON public.db_dbgroup_dbnodes + USING btree (id) + db_dblink: + db_dblink_input_id_9245bd73: CREATE INDEX db_dblink_input_id_9245bd73 ON public.db_dblink + USING btree (input_id) + db_dblink_label_f1343cfb: CREATE INDEX db_dblink_label_f1343cfb ON public.db_dblink + USING btree (label) + db_dblink_label_f1343cfb_like: CREATE INDEX db_dblink_label_f1343cfb_like ON public.db_dblink + USING btree (label varchar_pattern_ops) + db_dblink_output_id_c0167528: CREATE INDEX db_dblink_output_id_c0167528 ON public.db_dblink + USING btree (output_id) + db_dblink_pkey: CREATE UNIQUE INDEX db_dblink_pkey ON public.db_dblink USING btree + (id) + db_dblink_type_229f212b: CREATE INDEX db_dblink_type_229f212b ON public.db_dblink + USING btree (type) + db_dblink_type_229f212b_like: CREATE INDEX db_dblink_type_229f212b_like ON public.db_dblink + USING btree (type varchar_pattern_ops) + db_dblog: + db_dblog_dbnode_id_da34b732: CREATE INDEX db_dblog_dbnode_id_da34b732 ON public.db_dblog + USING btree (dbnode_id) + db_dblog_levelname_ad5dc346: CREATE INDEX db_dblog_levelname_ad5dc346 ON public.db_dblog + USING btree (levelname) + db_dblog_levelname_ad5dc346_like: CREATE INDEX db_dblog_levelname_ad5dc346_like + ON public.db_dblog USING btree (levelname varchar_pattern_ops) + db_dblog_loggername_00b5ba16: CREATE INDEX db_dblog_loggername_00b5ba16 ON public.db_dblog + USING btree (loggername) + db_dblog_loggername_00b5ba16_like: CREATE INDEX db_dblog_loggername_00b5ba16_like + ON public.db_dblog USING btree (loggername varchar_pattern_ops) + db_dblog_pkey: CREATE UNIQUE INDEX db_dblog_pkey ON public.db_dblog USING btree + (id) + db_dblog_uuid_9cf77df3_uniq: CREATE UNIQUE INDEX db_dblog_uuid_9cf77df3_uniq ON + public.db_dblog USING btree (uuid) + db_dbnode: + db_dbnode_ctime_71626ef5: CREATE INDEX db_dbnode_ctime_71626ef5 ON public.db_dbnode + USING btree (ctime) + db_dbnode_dbcomputer_id_315372a3: CREATE INDEX db_dbnode_dbcomputer_id_315372a3 + ON public.db_dbnode USING btree (dbcomputer_id) + db_dbnode_label_6469539e: CREATE INDEX db_dbnode_label_6469539e ON public.db_dbnode + USING btree (label) + db_dbnode_label_6469539e_like: CREATE INDEX db_dbnode_label_6469539e_like ON public.db_dbnode + USING btree (label varchar_pattern_ops) + db_dbnode_mtime_0554ea3d: CREATE INDEX db_dbnode_mtime_0554ea3d ON public.db_dbnode + USING btree (mtime) + db_dbnode_pkey: CREATE UNIQUE INDEX db_dbnode_pkey ON public.db_dbnode USING btree + (id) + db_dbnode_process_type_df7298d0: CREATE INDEX db_dbnode_process_type_df7298d0 + ON public.db_dbnode USING btree (process_type) + db_dbnode_process_type_df7298d0_like: CREATE INDEX db_dbnode_process_type_df7298d0_like + ON public.db_dbnode USING btree (process_type varchar_pattern_ops) + db_dbnode_type_a8ce9753: CREATE INDEX db_dbnode_type_a8ce9753 ON public.db_dbnode + USING btree (node_type) + db_dbnode_type_a8ce9753_like: CREATE INDEX db_dbnode_type_a8ce9753_like ON public.db_dbnode + USING btree (node_type varchar_pattern_ops) + db_dbnode_user_id_12e7aeaf: CREATE INDEX db_dbnode_user_id_12e7aeaf ON public.db_dbnode + USING btree (user_id) + db_dbnode_uuid_62e0bf98_uniq: CREATE UNIQUE INDEX db_dbnode_uuid_62e0bf98_uniq + ON public.db_dbnode USING btree (uuid) + db_dbsetting: + db_dbsetting_datatype_49f4397c: CREATE INDEX db_dbsetting_datatype_49f4397c ON + public.db_dbsetting USING btree (datatype) + db_dbsetting_datatype_49f4397c_like: CREATE INDEX db_dbsetting_datatype_49f4397c_like + ON public.db_dbsetting USING btree (datatype varchar_pattern_ops) + db_dbsetting_key_1b84beb4: CREATE INDEX db_dbsetting_key_1b84beb4 ON public.db_dbsetting + USING btree (key) + db_dbsetting_key_1b84beb4_like: CREATE INDEX db_dbsetting_key_1b84beb4_like ON + public.db_dbsetting USING btree (key varchar_pattern_ops) + db_dbsetting_key_1b84beb4_uniq: CREATE UNIQUE INDEX db_dbsetting_key_1b84beb4_uniq + ON public.db_dbsetting USING btree (key) + db_dbsetting_pkey: CREATE UNIQUE INDEX db_dbsetting_pkey ON public.db_dbsetting + USING btree (id) + db_dbuser: + db_dbuser_email_30150b7e_like: CREATE INDEX db_dbuser_email_30150b7e_like ON public.db_dbuser + USING btree (email varchar_pattern_ops) + db_dbuser_email_30150b7e_uniq: CREATE UNIQUE INDEX db_dbuser_email_30150b7e_uniq + ON public.db_dbuser USING btree (email) + db_dbuser_pkey: CREATE UNIQUE INDEX db_dbuser_pkey ON public.db_dbuser USING btree + (id) + db_dbuser_groups: + db_dbuser_groups_dbuser_id_480b3520: CREATE INDEX db_dbuser_groups_dbuser_id_480b3520 + ON public.db_dbuser_groups USING btree (dbuser_id) + db_dbuser_groups_dbuser_id_group_id_9155eb4f_uniq: CREATE UNIQUE INDEX db_dbuser_groups_dbuser_id_group_id_9155eb4f_uniq + ON public.db_dbuser_groups USING btree (dbuser_id, group_id) + db_dbuser_groups_group_id_8478d87e: CREATE INDEX db_dbuser_groups_group_id_8478d87e + ON public.db_dbuser_groups USING btree (group_id) + db_dbuser_groups_pkey: CREATE UNIQUE INDEX db_dbuser_groups_pkey ON public.db_dbuser_groups + USING btree (id) + db_dbuser_user_permissions: + db_dbuser_user_permissio_dbuser_id_permission_id_e6cbabe4_uniq: CREATE UNIQUE + INDEX db_dbuser_user_permissio_dbuser_id_permission_id_e6cbabe4_uniq ON public.db_dbuser_user_permissions + USING btree (dbuser_id, permission_id) + db_dbuser_user_permissions_dbuser_id_364456ee: CREATE INDEX db_dbuser_user_permissions_dbuser_id_364456ee + ON public.db_dbuser_user_permissions USING btree (dbuser_id) + db_dbuser_user_permissions_permission_id_c5aafc54: CREATE INDEX db_dbuser_user_permissions_permission_id_c5aafc54 + ON public.db_dbuser_user_permissions USING btree (permission_id) + db_dbuser_user_permissions_pkey: CREATE UNIQUE INDEX db_dbuser_user_permissions_pkey + ON public.db_dbuser_user_permissions USING btree (id) diff --git a/tests/backends/aiida_sqlalchemy/migrations/test_all_schema/test_django_django_0033_.yml b/tests/backends/aiida_sqlalchemy/migrations/test_all_schema/test_django_django_0033_.yml new file mode 100644 index 0000000000..0f021dca3f --- /dev/null +++ b/tests/backends/aiida_sqlalchemy/migrations/test_all_schema/test_django_django_0033_.yml @@ -0,0 +1,771 @@ +columns: + db_dbattribute: + bval: + data_type: boolean + default: null + is_nullable: true + datatype: + data_type: character varying + default: null + is_nullable: false + max_length: 10 + dbnode_id: + data_type: integer + default: null + is_nullable: false + dval: + data_type: timestamp with time zone + default: null + is_nullable: true + fval: + data_type: double precision + default: null + is_nullable: true + id: + data_type: integer + default: nextval('db_dbattribute_id_seq'::regclass) + is_nullable: false + ival: + data_type: integer + default: null + is_nullable: true + key: + data_type: character varying + default: null + is_nullable: false + max_length: 1024 + tval: + data_type: text + default: null + is_nullable: false + db_dbauthinfo: + aiidauser_id: + data_type: integer + default: null + is_nullable: false + auth_params: + data_type: jsonb + default: null + is_nullable: false + dbcomputer_id: + data_type: integer + default: null + is_nullable: false + enabled: + data_type: boolean + default: null + is_nullable: false + id: + data_type: integer + default: nextval('db_dbauthinfo_id_seq'::regclass) + is_nullable: false + metadata: + data_type: jsonb + default: null + is_nullable: false + db_dbcomment: + content: + data_type: text + default: null + is_nullable: false + ctime: + data_type: timestamp with time zone + default: null + is_nullable: false + dbnode_id: + data_type: integer + default: null + is_nullable: false + id: + data_type: integer + default: nextval('db_dbcomment_id_seq'::regclass) + is_nullable: false + mtime: + data_type: timestamp with time zone + default: null + is_nullable: false + user_id: + data_type: integer + default: null + is_nullable: false + uuid: + data_type: uuid + default: null + is_nullable: false + db_dbcomputer: + description: + data_type: text + default: null + is_nullable: false + hostname: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + id: + data_type: integer + default: nextval('db_dbcomputer_id_seq'::regclass) + is_nullable: false + metadata: + data_type: jsonb + default: null + is_nullable: false + name: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + scheduler_type: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + transport_params: + data_type: jsonb + default: null + is_nullable: false + transport_type: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + uuid: + data_type: uuid + default: null + is_nullable: false + db_dbextra: + bval: + data_type: boolean + default: null + is_nullable: true + datatype: + data_type: character varying + default: null + is_nullable: false + max_length: 10 + dbnode_id: + data_type: integer + default: null + is_nullable: false + dval: + data_type: timestamp with time zone + default: null + is_nullable: true + fval: + data_type: double precision + default: null + is_nullable: true + id: + data_type: integer + default: nextval('db_dbextra_id_seq'::regclass) + is_nullable: false + ival: + data_type: integer + default: null + is_nullable: true + key: + data_type: character varying + default: null + is_nullable: false + max_length: 1024 + tval: + data_type: text + default: null + is_nullable: false + db_dbgroup: + description: + data_type: text + default: null + is_nullable: false + id: + data_type: integer + default: nextval('db_dbgroup_id_seq'::regclass) + is_nullable: false + label: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + time: + data_type: timestamp with time zone + default: null + is_nullable: false + type_string: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + user_id: + data_type: integer + default: null + is_nullable: false + uuid: + data_type: uuid + default: null + is_nullable: false + db_dbgroup_dbnodes: + dbgroup_id: + data_type: integer + default: null + is_nullable: false + dbnode_id: + data_type: integer + default: null + is_nullable: false + id: + data_type: integer + default: nextval('db_dbgroup_dbnodes_id_seq'::regclass) + is_nullable: false + db_dblink: + id: + data_type: integer + default: nextval('db_dblink_id_seq'::regclass) + is_nullable: false + input_id: + data_type: integer + default: null + is_nullable: false + label: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + output_id: + data_type: integer + default: null + is_nullable: false + type: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + db_dblog: + dbnode_id: + data_type: integer + default: null + is_nullable: false + id: + data_type: integer + default: nextval('db_dblog_id_seq'::regclass) + is_nullable: false + levelname: + data_type: character varying + default: null + is_nullable: false + max_length: 50 + loggername: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + message: + data_type: text + default: null + is_nullable: false + metadata: + data_type: jsonb + default: null + is_nullable: false + time: + data_type: timestamp with time zone + default: null + is_nullable: false + uuid: + data_type: uuid + default: null + is_nullable: false + db_dbnode: + ctime: + data_type: timestamp with time zone + default: null + is_nullable: false + dbcomputer_id: + data_type: integer + default: null + is_nullable: true + description: + data_type: text + default: null + is_nullable: false + id: + data_type: integer + default: nextval('db_dbnode_id_seq'::regclass) + is_nullable: false + label: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + mtime: + data_type: timestamp with time zone + default: null + is_nullable: false + node_type: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + nodeversion: + data_type: integer + default: null + is_nullable: false + process_type: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + public: + data_type: boolean + default: null + is_nullable: false + user_id: + data_type: integer + default: null + is_nullable: false + uuid: + data_type: uuid + default: null + is_nullable: false + db_dbsetting: + bval: + data_type: boolean + default: null + is_nullable: true + datatype: + data_type: character varying + default: null + is_nullable: false + max_length: 10 + description: + data_type: text + default: null + is_nullable: false + dval: + data_type: timestamp with time zone + default: null + is_nullable: true + fval: + data_type: double precision + default: null + is_nullable: true + id: + data_type: integer + default: nextval('db_dbsetting_id_seq'::regclass) + is_nullable: false + ival: + data_type: integer + default: null + is_nullable: true + key: + data_type: character varying + default: null + is_nullable: false + max_length: 1024 + time: + data_type: timestamp with time zone + default: null + is_nullable: false + tval: + data_type: text + default: null + is_nullable: false + db_dbuser: + date_joined: + data_type: timestamp with time zone + default: null + is_nullable: false + email: + data_type: character varying + default: null + is_nullable: false + max_length: 254 + first_name: + data_type: character varying + default: null + is_nullable: false + max_length: 254 + id: + data_type: integer + default: nextval('db_dbuser_id_seq'::regclass) + is_nullable: false + institution: + data_type: character varying + default: null + is_nullable: false + max_length: 254 + is_active: + data_type: boolean + default: null + is_nullable: false + is_staff: + data_type: boolean + default: null + is_nullable: false + is_superuser: + data_type: boolean + default: null + is_nullable: false + last_login: + data_type: timestamp with time zone + default: null + is_nullable: true + last_name: + data_type: character varying + default: null + is_nullable: false + max_length: 254 + password: + data_type: character varying + default: null + is_nullable: false + max_length: 128 + db_dbuser_groups: + dbuser_id: + data_type: integer + default: null + is_nullable: false + group_id: + data_type: integer + default: null + is_nullable: false + id: + data_type: integer + default: nextval('db_dbuser_groups_id_seq'::regclass) + is_nullable: false + db_dbuser_user_permissions: + dbuser_id: + data_type: integer + default: null + is_nullable: false + id: + data_type: integer + default: nextval('db_dbuser_user_permissions_id_seq'::regclass) + is_nullable: false + permission_id: + data_type: integer + default: null + is_nullable: false +constraints: + primary_key: + db_dbattribute: + db_dbattribute_pkey: + - id + db_dbauthinfo: + db_dbauthinfo_pkey: + - id + db_dbcomment: + db_dbcomment_pkey: + - id + db_dbcomputer: + db_dbcomputer_pkey: + - id + db_dbextra: + db_dbextra_pkey: + - id + db_dbgroup: + db_dbgroup_pkey: + - id + db_dbgroup_dbnodes: + db_dbgroup_dbnodes_pkey: + - id + db_dblink: + db_dblink_pkey: + - id + db_dblog: + db_dblog_pkey: + - id + db_dbnode: + db_dbnode_pkey: + - id + db_dbsetting: + db_dbsetting_pkey: + - id + db_dbuser: + db_dbuser_pkey: + - id + db_dbuser_groups: + db_dbuser_groups_pkey: + - id + db_dbuser_user_permissions: + db_dbuser_user_permissions_pkey: + - id + unique: + db_dbattribute: + db_dbattribute_dbnode_id_key_c589e447_uniq: + - dbnode_id + - key + db_dbauthinfo: + db_dbauthinfo_aiidauser_id_dbcomputer_id_777cdaa8_uniq: + - aiidauser_id + - dbcomputer_id + db_dbcomment: + db_dbcomment_uuid_49bac08c_uniq: + - uuid + db_dbcomputer: + db_dbcomputer_name_key: + - name + db_dbcomputer_uuid_f35defa6_uniq: + - uuid + db_dbextra: + db_dbextra_dbnode_id_key_aa56fd37_uniq: + - dbnode_id + - key + db_dbgroup: + db_dbgroup_name_type_12656f33_uniq: + - label + - type_string + db_dbgroup_uuid_af896177_uniq: + - uuid + db_dbgroup_dbnodes: + db_dbgroup_dbnodes_dbgroup_id_dbnode_id_eee23cce_uniq: + - dbgroup_id + - dbnode_id + db_dblog: + db_dblog_uuid_9cf77df3_uniq: + - uuid + db_dbnode: + db_dbnode_uuid_62e0bf98_uniq: + - uuid + db_dbsetting: + db_dbsetting_key_1b84beb4_uniq: + - key + db_dbuser: + db_dbuser_email_30150b7e_uniq: + - email + db_dbuser_groups: + db_dbuser_groups_dbuser_id_group_id_9155eb4f_uniq: + - dbuser_id + - group_id + db_dbuser_user_permissions: + db_dbuser_user_permissio_dbuser_id_permission_id_e6cbabe4_uniq: + - dbuser_id + - permission_id +foreign_keys: + db_dbattribute: + db_dbattribute_dbnode_id_253bf153_fk_db_dbnode_id: FOREIGN KEY (dbnode_id) REFERENCES + db_dbnode(id) DEFERRABLE INITIALLY DEFERRED + db_dbauthinfo: + db_dbauthinfo_aiidauser_id_0684fdfb_fk_db_dbuser_id: FOREIGN KEY (aiidauser_id) + REFERENCES db_dbuser(id) DEFERRABLE INITIALLY DEFERRED + db_dbauthinfo_dbcomputer_id_424f7ac4_fk_db_dbcomputer_id: FOREIGN KEY (dbcomputer_id) + REFERENCES db_dbcomputer(id) DEFERRABLE INITIALLY DEFERRED + db_dbcomment: + db_dbcomment_dbnode_id_3b812b6b_fk_db_dbnode_id: FOREIGN KEY (dbnode_id) REFERENCES + db_dbnode(id) DEFERRABLE INITIALLY DEFERRED + db_dbcomment_user_id_8ed5e360_fk_db_dbuser_id: FOREIGN KEY (user_id) REFERENCES + db_dbuser(id) DEFERRABLE INITIALLY DEFERRED + db_dbextra: + db_dbextra_dbnode_id_c7fe8961_fk_db_dbnode_id: FOREIGN KEY (dbnode_id) REFERENCES + db_dbnode(id) DEFERRABLE INITIALLY DEFERRED + db_dbgroup: + db_dbgroup_user_id_100f8a51_fk_db_dbuser_id: FOREIGN KEY (user_id) REFERENCES + db_dbuser(id) DEFERRABLE INITIALLY DEFERRED + db_dbgroup_dbnodes: + db_dbgroup_dbnodes_dbgroup_id_9d3a0f9d_fk_db_dbgroup_id: FOREIGN KEY (dbgroup_id) + REFERENCES db_dbgroup(id) DEFERRABLE INITIALLY DEFERRED + db_dbgroup_dbnodes_dbnode_id_118b9439_fk_db_dbnode_id: FOREIGN KEY (dbnode_id) + REFERENCES db_dbnode(id) DEFERRABLE INITIALLY DEFERRED + db_dblink: + db_dblink_input_id_9245bd73_fk_db_dbnode_id: FOREIGN KEY (input_id) REFERENCES + db_dbnode(id) DEFERRABLE INITIALLY DEFERRED + db_dblink_output_id_c0167528_fk_db_dbnode_id: FOREIGN KEY (output_id) REFERENCES + db_dbnode(id) DEFERRABLE INITIALLY DEFERRED + db_dblog: + db_dblog_dbnode_id_da34b732_fk_db_dbnode_id: FOREIGN KEY (dbnode_id) REFERENCES + db_dbnode(id) DEFERRABLE INITIALLY DEFERRED + db_dbnode: + db_dbnode_dbcomputer_id_315372a3_fk_db_dbcomputer_id: FOREIGN KEY (dbcomputer_id) + REFERENCES db_dbcomputer(id) DEFERRABLE INITIALLY DEFERRED + db_dbnode_user_id_12e7aeaf_fk_db_dbuser_id: FOREIGN KEY (user_id) REFERENCES db_dbuser(id) + DEFERRABLE INITIALLY DEFERRED + db_dbuser_groups: + db_dbuser_groups_dbuser_id_480b3520_fk_db_dbuser_id: FOREIGN KEY (dbuser_id) REFERENCES + db_dbuser(id) DEFERRABLE INITIALLY DEFERRED + db_dbuser_groups_group_id_8478d87e_fk_auth_group_id: FOREIGN KEY (group_id) REFERENCES + auth_group(id) DEFERRABLE INITIALLY DEFERRED + db_dbuser_user_permissions: + db_dbuser_user_permi_permission_id_c5aafc54_fk_auth_perm: FOREIGN KEY (permission_id) + REFERENCES auth_permission(id) DEFERRABLE INITIALLY DEFERRED + db_dbuser_user_permissions_dbuser_id_364456ee_fk_db_dbuser_id: FOREIGN KEY (dbuser_id) + REFERENCES db_dbuser(id) DEFERRABLE INITIALLY DEFERRED +indexes: + db_dbattribute: + db_dbattribute_datatype_91c4dc04: CREATE INDEX db_dbattribute_datatype_91c4dc04 + ON public.db_dbattribute USING btree (datatype) + db_dbattribute_datatype_91c4dc04_like: CREATE INDEX db_dbattribute_datatype_91c4dc04_like + ON public.db_dbattribute USING btree (datatype varchar_pattern_ops) + db_dbattribute_dbnode_id_253bf153: CREATE INDEX db_dbattribute_dbnode_id_253bf153 + ON public.db_dbattribute USING btree (dbnode_id) + db_dbattribute_dbnode_id_key_c589e447_uniq: CREATE UNIQUE INDEX db_dbattribute_dbnode_id_key_c589e447_uniq + ON public.db_dbattribute USING btree (dbnode_id, key) + db_dbattribute_key_ac2bc4e4: CREATE INDEX db_dbattribute_key_ac2bc4e4 ON public.db_dbattribute + USING btree (key) + db_dbattribute_key_ac2bc4e4_like: CREATE INDEX db_dbattribute_key_ac2bc4e4_like + ON public.db_dbattribute USING btree (key varchar_pattern_ops) + db_dbattribute_pkey: CREATE UNIQUE INDEX db_dbattribute_pkey ON public.db_dbattribute + USING btree (id) + tval_idx_for_daemon: CREATE INDEX tval_idx_for_daemon ON public.db_dbattribute + USING btree (tval) WHERE (tval = ANY (ARRAY['COMPUTED'::text, 'WITHSCHEDULER'::text, + 'TOSUBMIT'::text])) + db_dbauthinfo: + db_dbauthinfo_aiidauser_id_0684fdfb: CREATE INDEX db_dbauthinfo_aiidauser_id_0684fdfb + ON public.db_dbauthinfo USING btree (aiidauser_id) + db_dbauthinfo_aiidauser_id_dbcomputer_id_777cdaa8_uniq: CREATE UNIQUE INDEX db_dbauthinfo_aiidauser_id_dbcomputer_id_777cdaa8_uniq + ON public.db_dbauthinfo USING btree (aiidauser_id, dbcomputer_id) + db_dbauthinfo_dbcomputer_id_424f7ac4: CREATE INDEX db_dbauthinfo_dbcomputer_id_424f7ac4 + ON public.db_dbauthinfo USING btree (dbcomputer_id) + db_dbauthinfo_pkey: CREATE UNIQUE INDEX db_dbauthinfo_pkey ON public.db_dbauthinfo + USING btree (id) + db_dbcomment: + db_dbcomment_dbnode_id_3b812b6b: CREATE INDEX db_dbcomment_dbnode_id_3b812b6b + ON public.db_dbcomment USING btree (dbnode_id) + db_dbcomment_pkey: CREATE UNIQUE INDEX db_dbcomment_pkey ON public.db_dbcomment + USING btree (id) + db_dbcomment_user_id_8ed5e360: CREATE INDEX db_dbcomment_user_id_8ed5e360 ON public.db_dbcomment + USING btree (user_id) + db_dbcomment_uuid_49bac08c_uniq: CREATE UNIQUE INDEX db_dbcomment_uuid_49bac08c_uniq + ON public.db_dbcomment USING btree (uuid) + db_dbcomputer: + db_dbcomputer_name_f1800b1a_like: CREATE INDEX db_dbcomputer_name_f1800b1a_like + ON public.db_dbcomputer USING btree (name varchar_pattern_ops) + db_dbcomputer_name_key: CREATE UNIQUE INDEX db_dbcomputer_name_key ON public.db_dbcomputer + USING btree (name) + db_dbcomputer_pkey: CREATE UNIQUE INDEX db_dbcomputer_pkey ON public.db_dbcomputer + USING btree (id) + db_dbcomputer_uuid_f35defa6_uniq: CREATE UNIQUE INDEX db_dbcomputer_uuid_f35defa6_uniq + ON public.db_dbcomputer USING btree (uuid) + db_dbextra: + db_dbextra_datatype_2eba38c6: CREATE INDEX db_dbextra_datatype_2eba38c6 ON public.db_dbextra + USING btree (datatype) + db_dbextra_datatype_2eba38c6_like: CREATE INDEX db_dbextra_datatype_2eba38c6_like + ON public.db_dbextra USING btree (datatype varchar_pattern_ops) + db_dbextra_dbnode_id_c7fe8961: CREATE INDEX db_dbextra_dbnode_id_c7fe8961 ON public.db_dbextra + USING btree (dbnode_id) + db_dbextra_dbnode_id_key_aa56fd37_uniq: CREATE UNIQUE INDEX db_dbextra_dbnode_id_key_aa56fd37_uniq + ON public.db_dbextra USING btree (dbnode_id, key) + db_dbextra_key_b1a8abc6: CREATE INDEX db_dbextra_key_b1a8abc6 ON public.db_dbextra + USING btree (key) + db_dbextra_key_b1a8abc6_like: CREATE INDEX db_dbextra_key_b1a8abc6_like ON public.db_dbextra + USING btree (key varchar_pattern_ops) + db_dbextra_pkey: CREATE UNIQUE INDEX db_dbextra_pkey ON public.db_dbextra USING + btree (id) + db_dbgroup: + db_dbgroup_name_66c75272: CREATE INDEX db_dbgroup_name_66c75272 ON public.db_dbgroup + USING btree (label) + db_dbgroup_name_66c75272_like: CREATE INDEX db_dbgroup_name_66c75272_like ON public.db_dbgroup + USING btree (label varchar_pattern_ops) + db_dbgroup_name_type_12656f33_uniq: CREATE UNIQUE INDEX db_dbgroup_name_type_12656f33_uniq + ON public.db_dbgroup USING btree (label, type_string) + db_dbgroup_pkey: CREATE UNIQUE INDEX db_dbgroup_pkey ON public.db_dbgroup USING + btree (id) + db_dbgroup_type_23b2a748: CREATE INDEX db_dbgroup_type_23b2a748 ON public.db_dbgroup + USING btree (type_string) + db_dbgroup_type_23b2a748_like: CREATE INDEX db_dbgroup_type_23b2a748_like ON public.db_dbgroup + USING btree (type_string varchar_pattern_ops) + db_dbgroup_user_id_100f8a51: CREATE INDEX db_dbgroup_user_id_100f8a51 ON public.db_dbgroup + USING btree (user_id) + db_dbgroup_uuid_af896177_uniq: CREATE UNIQUE INDEX db_dbgroup_uuid_af896177_uniq + ON public.db_dbgroup USING btree (uuid) + db_dbgroup_dbnodes: + db_dbgroup_dbnodes_dbgroup_id_9d3a0f9d: CREATE INDEX db_dbgroup_dbnodes_dbgroup_id_9d3a0f9d + ON public.db_dbgroup_dbnodes USING btree (dbgroup_id) + db_dbgroup_dbnodes_dbgroup_id_dbnode_id_eee23cce_uniq: CREATE UNIQUE INDEX db_dbgroup_dbnodes_dbgroup_id_dbnode_id_eee23cce_uniq + ON public.db_dbgroup_dbnodes USING btree (dbgroup_id, dbnode_id) + db_dbgroup_dbnodes_dbnode_id_118b9439: CREATE INDEX db_dbgroup_dbnodes_dbnode_id_118b9439 + ON public.db_dbgroup_dbnodes USING btree (dbnode_id) + db_dbgroup_dbnodes_pkey: CREATE UNIQUE INDEX db_dbgroup_dbnodes_pkey ON public.db_dbgroup_dbnodes + USING btree (id) + db_dblink: + db_dblink_input_id_9245bd73: CREATE INDEX db_dblink_input_id_9245bd73 ON public.db_dblink + USING btree (input_id) + db_dblink_label_f1343cfb: CREATE INDEX db_dblink_label_f1343cfb ON public.db_dblink + USING btree (label) + db_dblink_label_f1343cfb_like: CREATE INDEX db_dblink_label_f1343cfb_like ON public.db_dblink + USING btree (label varchar_pattern_ops) + db_dblink_output_id_c0167528: CREATE INDEX db_dblink_output_id_c0167528 ON public.db_dblink + USING btree (output_id) + db_dblink_pkey: CREATE UNIQUE INDEX db_dblink_pkey ON public.db_dblink USING btree + (id) + db_dblink_type_229f212b: CREATE INDEX db_dblink_type_229f212b ON public.db_dblink + USING btree (type) + db_dblink_type_229f212b_like: CREATE INDEX db_dblink_type_229f212b_like ON public.db_dblink + USING btree (type varchar_pattern_ops) + db_dblog: + db_dblog_dbnode_id_da34b732: CREATE INDEX db_dblog_dbnode_id_da34b732 ON public.db_dblog + USING btree (dbnode_id) + db_dblog_levelname_ad5dc346: CREATE INDEX db_dblog_levelname_ad5dc346 ON public.db_dblog + USING btree (levelname) + db_dblog_levelname_ad5dc346_like: CREATE INDEX db_dblog_levelname_ad5dc346_like + ON public.db_dblog USING btree (levelname varchar_pattern_ops) + db_dblog_loggername_00b5ba16: CREATE INDEX db_dblog_loggername_00b5ba16 ON public.db_dblog + USING btree (loggername) + db_dblog_loggername_00b5ba16_like: CREATE INDEX db_dblog_loggername_00b5ba16_like + ON public.db_dblog USING btree (loggername varchar_pattern_ops) + db_dblog_pkey: CREATE UNIQUE INDEX db_dblog_pkey ON public.db_dblog USING btree + (id) + db_dblog_uuid_9cf77df3_uniq: CREATE UNIQUE INDEX db_dblog_uuid_9cf77df3_uniq ON + public.db_dblog USING btree (uuid) + db_dbnode: + db_dbnode_ctime_71626ef5: CREATE INDEX db_dbnode_ctime_71626ef5 ON public.db_dbnode + USING btree (ctime) + db_dbnode_dbcomputer_id_315372a3: CREATE INDEX db_dbnode_dbcomputer_id_315372a3 + ON public.db_dbnode USING btree (dbcomputer_id) + db_dbnode_label_6469539e: CREATE INDEX db_dbnode_label_6469539e ON public.db_dbnode + USING btree (label) + db_dbnode_label_6469539e_like: CREATE INDEX db_dbnode_label_6469539e_like ON public.db_dbnode + USING btree (label varchar_pattern_ops) + db_dbnode_mtime_0554ea3d: CREATE INDEX db_dbnode_mtime_0554ea3d ON public.db_dbnode + USING btree (mtime) + db_dbnode_pkey: CREATE UNIQUE INDEX db_dbnode_pkey ON public.db_dbnode USING btree + (id) + db_dbnode_process_type_df7298d0: CREATE INDEX db_dbnode_process_type_df7298d0 + ON public.db_dbnode USING btree (process_type) + db_dbnode_process_type_df7298d0_like: CREATE INDEX db_dbnode_process_type_df7298d0_like + ON public.db_dbnode USING btree (process_type varchar_pattern_ops) + db_dbnode_type_a8ce9753: CREATE INDEX db_dbnode_type_a8ce9753 ON public.db_dbnode + USING btree (node_type) + db_dbnode_type_a8ce9753_like: CREATE INDEX db_dbnode_type_a8ce9753_like ON public.db_dbnode + USING btree (node_type varchar_pattern_ops) + db_dbnode_user_id_12e7aeaf: CREATE INDEX db_dbnode_user_id_12e7aeaf ON public.db_dbnode + USING btree (user_id) + db_dbnode_uuid_62e0bf98_uniq: CREATE UNIQUE INDEX db_dbnode_uuid_62e0bf98_uniq + ON public.db_dbnode USING btree (uuid) + db_dbsetting: + db_dbsetting_datatype_49f4397c: CREATE INDEX db_dbsetting_datatype_49f4397c ON + public.db_dbsetting USING btree (datatype) + db_dbsetting_datatype_49f4397c_like: CREATE INDEX db_dbsetting_datatype_49f4397c_like + ON public.db_dbsetting USING btree (datatype varchar_pattern_ops) + db_dbsetting_key_1b84beb4: CREATE INDEX db_dbsetting_key_1b84beb4 ON public.db_dbsetting + USING btree (key) + db_dbsetting_key_1b84beb4_like: CREATE INDEX db_dbsetting_key_1b84beb4_like ON + public.db_dbsetting USING btree (key varchar_pattern_ops) + db_dbsetting_key_1b84beb4_uniq: CREATE UNIQUE INDEX db_dbsetting_key_1b84beb4_uniq + ON public.db_dbsetting USING btree (key) + db_dbsetting_pkey: CREATE UNIQUE INDEX db_dbsetting_pkey ON public.db_dbsetting + USING btree (id) + db_dbuser: + db_dbuser_email_30150b7e_like: CREATE INDEX db_dbuser_email_30150b7e_like ON public.db_dbuser + USING btree (email varchar_pattern_ops) + db_dbuser_email_30150b7e_uniq: CREATE UNIQUE INDEX db_dbuser_email_30150b7e_uniq + ON public.db_dbuser USING btree (email) + db_dbuser_pkey: CREATE UNIQUE INDEX db_dbuser_pkey ON public.db_dbuser USING btree + (id) + db_dbuser_groups: + db_dbuser_groups_dbuser_id_480b3520: CREATE INDEX db_dbuser_groups_dbuser_id_480b3520 + ON public.db_dbuser_groups USING btree (dbuser_id) + db_dbuser_groups_dbuser_id_group_id_9155eb4f_uniq: CREATE UNIQUE INDEX db_dbuser_groups_dbuser_id_group_id_9155eb4f_uniq + ON public.db_dbuser_groups USING btree (dbuser_id, group_id) + db_dbuser_groups_group_id_8478d87e: CREATE INDEX db_dbuser_groups_group_id_8478d87e + ON public.db_dbuser_groups USING btree (group_id) + db_dbuser_groups_pkey: CREATE UNIQUE INDEX db_dbuser_groups_pkey ON public.db_dbuser_groups + USING btree (id) + db_dbuser_user_permissions: + db_dbuser_user_permissio_dbuser_id_permission_id_e6cbabe4_uniq: CREATE UNIQUE + INDEX db_dbuser_user_permissio_dbuser_id_permission_id_e6cbabe4_uniq ON public.db_dbuser_user_permissions + USING btree (dbuser_id, permission_id) + db_dbuser_user_permissions_dbuser_id_364456ee: CREATE INDEX db_dbuser_user_permissions_dbuser_id_364456ee + ON public.db_dbuser_user_permissions USING btree (dbuser_id) + db_dbuser_user_permissions_permission_id_c5aafc54: CREATE INDEX db_dbuser_user_permissions_permission_id_c5aafc54 + ON public.db_dbuser_user_permissions USING btree (permission_id) + db_dbuser_user_permissions_pkey: CREATE UNIQUE INDEX db_dbuser_user_permissions_pkey + ON public.db_dbuser_user_permissions USING btree (id) diff --git a/tests/backends/aiida_sqlalchemy/migrations/test_all_schema/test_django_django_0034_.yml b/tests/backends/aiida_sqlalchemy/migrations/test_all_schema/test_django_django_0034_.yml new file mode 100644 index 0000000000..d97e601769 --- /dev/null +++ b/tests/backends/aiida_sqlalchemy/migrations/test_all_schema/test_django_django_0034_.yml @@ -0,0 +1,763 @@ +columns: + db_dbattribute: + bval: + data_type: boolean + default: null + is_nullable: true + datatype: + data_type: character varying + default: null + is_nullable: false + max_length: 10 + dbnode_id: + data_type: integer + default: null + is_nullable: false + dval: + data_type: timestamp with time zone + default: null + is_nullable: true + fval: + data_type: double precision + default: null + is_nullable: true + id: + data_type: integer + default: nextval('db_dbattribute_id_seq'::regclass) + is_nullable: false + ival: + data_type: integer + default: null + is_nullable: true + key: + data_type: character varying + default: null + is_nullable: false + max_length: 1024 + tval: + data_type: text + default: null + is_nullable: false + db_dbauthinfo: + aiidauser_id: + data_type: integer + default: null + is_nullable: false + auth_params: + data_type: jsonb + default: null + is_nullable: false + dbcomputer_id: + data_type: integer + default: null + is_nullable: false + enabled: + data_type: boolean + default: null + is_nullable: false + id: + data_type: integer + default: nextval('db_dbauthinfo_id_seq'::regclass) + is_nullable: false + metadata: + data_type: jsonb + default: null + is_nullable: false + db_dbcomment: + content: + data_type: text + default: null + is_nullable: false + ctime: + data_type: timestamp with time zone + default: null + is_nullable: false + dbnode_id: + data_type: integer + default: null + is_nullable: false + id: + data_type: integer + default: nextval('db_dbcomment_id_seq'::regclass) + is_nullable: false + mtime: + data_type: timestamp with time zone + default: null + is_nullable: false + user_id: + data_type: integer + default: null + is_nullable: false + uuid: + data_type: uuid + default: null + is_nullable: false + db_dbcomputer: + description: + data_type: text + default: null + is_nullable: false + hostname: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + id: + data_type: integer + default: nextval('db_dbcomputer_id_seq'::regclass) + is_nullable: false + metadata: + data_type: jsonb + default: null + is_nullable: false + name: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + scheduler_type: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + transport_params: + data_type: jsonb + default: null + is_nullable: false + transport_type: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + uuid: + data_type: uuid + default: null + is_nullable: false + db_dbextra: + bval: + data_type: boolean + default: null + is_nullable: true + datatype: + data_type: character varying + default: null + is_nullable: false + max_length: 10 + dbnode_id: + data_type: integer + default: null + is_nullable: false + dval: + data_type: timestamp with time zone + default: null + is_nullable: true + fval: + data_type: double precision + default: null + is_nullable: true + id: + data_type: integer + default: nextval('db_dbextra_id_seq'::regclass) + is_nullable: false + ival: + data_type: integer + default: null + is_nullable: true + key: + data_type: character varying + default: null + is_nullable: false + max_length: 1024 + tval: + data_type: text + default: null + is_nullable: false + db_dbgroup: + description: + data_type: text + default: null + is_nullable: false + id: + data_type: integer + default: nextval('db_dbgroup_id_seq'::regclass) + is_nullable: false + label: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + time: + data_type: timestamp with time zone + default: null + is_nullable: false + type_string: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + user_id: + data_type: integer + default: null + is_nullable: false + uuid: + data_type: uuid + default: null + is_nullable: false + db_dbgroup_dbnodes: + dbgroup_id: + data_type: integer + default: null + is_nullable: false + dbnode_id: + data_type: integer + default: null + is_nullable: false + id: + data_type: integer + default: nextval('db_dbgroup_dbnodes_id_seq'::regclass) + is_nullable: false + db_dblink: + id: + data_type: integer + default: nextval('db_dblink_id_seq'::regclass) + is_nullable: false + input_id: + data_type: integer + default: null + is_nullable: false + label: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + output_id: + data_type: integer + default: null + is_nullable: false + type: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + db_dblog: + dbnode_id: + data_type: integer + default: null + is_nullable: false + id: + data_type: integer + default: nextval('db_dblog_id_seq'::regclass) + is_nullable: false + levelname: + data_type: character varying + default: null + is_nullable: false + max_length: 50 + loggername: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + message: + data_type: text + default: null + is_nullable: false + metadata: + data_type: jsonb + default: null + is_nullable: false + time: + data_type: timestamp with time zone + default: null + is_nullable: false + uuid: + data_type: uuid + default: null + is_nullable: false + db_dbnode: + ctime: + data_type: timestamp with time zone + default: null + is_nullable: false + dbcomputer_id: + data_type: integer + default: null + is_nullable: true + description: + data_type: text + default: null + is_nullable: false + id: + data_type: integer + default: nextval('db_dbnode_id_seq'::regclass) + is_nullable: false + label: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + mtime: + data_type: timestamp with time zone + default: null + is_nullable: false + node_type: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + process_type: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + user_id: + data_type: integer + default: null + is_nullable: false + uuid: + data_type: uuid + default: null + is_nullable: false + db_dbsetting: + bval: + data_type: boolean + default: null + is_nullable: true + datatype: + data_type: character varying + default: null + is_nullable: false + max_length: 10 + description: + data_type: text + default: null + is_nullable: false + dval: + data_type: timestamp with time zone + default: null + is_nullable: true + fval: + data_type: double precision + default: null + is_nullable: true + id: + data_type: integer + default: nextval('db_dbsetting_id_seq'::regclass) + is_nullable: false + ival: + data_type: integer + default: null + is_nullable: true + key: + data_type: character varying + default: null + is_nullable: false + max_length: 1024 + time: + data_type: timestamp with time zone + default: null + is_nullable: false + tval: + data_type: text + default: null + is_nullable: false + db_dbuser: + date_joined: + data_type: timestamp with time zone + default: null + is_nullable: false + email: + data_type: character varying + default: null + is_nullable: false + max_length: 254 + first_name: + data_type: character varying + default: null + is_nullable: false + max_length: 254 + id: + data_type: integer + default: nextval('db_dbuser_id_seq'::regclass) + is_nullable: false + institution: + data_type: character varying + default: null + is_nullable: false + max_length: 254 + is_active: + data_type: boolean + default: null + is_nullable: false + is_staff: + data_type: boolean + default: null + is_nullable: false + is_superuser: + data_type: boolean + default: null + is_nullable: false + last_login: + data_type: timestamp with time zone + default: null + is_nullable: true + last_name: + data_type: character varying + default: null + is_nullable: false + max_length: 254 + password: + data_type: character varying + default: null + is_nullable: false + max_length: 128 + db_dbuser_groups: + dbuser_id: + data_type: integer + default: null + is_nullable: false + group_id: + data_type: integer + default: null + is_nullable: false + id: + data_type: integer + default: nextval('db_dbuser_groups_id_seq'::regclass) + is_nullable: false + db_dbuser_user_permissions: + dbuser_id: + data_type: integer + default: null + is_nullable: false + id: + data_type: integer + default: nextval('db_dbuser_user_permissions_id_seq'::regclass) + is_nullable: false + permission_id: + data_type: integer + default: null + is_nullable: false +constraints: + primary_key: + db_dbattribute: + db_dbattribute_pkey: + - id + db_dbauthinfo: + db_dbauthinfo_pkey: + - id + db_dbcomment: + db_dbcomment_pkey: + - id + db_dbcomputer: + db_dbcomputer_pkey: + - id + db_dbextra: + db_dbextra_pkey: + - id + db_dbgroup: + db_dbgroup_pkey: + - id + db_dbgroup_dbnodes: + db_dbgroup_dbnodes_pkey: + - id + db_dblink: + db_dblink_pkey: + - id + db_dblog: + db_dblog_pkey: + - id + db_dbnode: + db_dbnode_pkey: + - id + db_dbsetting: + db_dbsetting_pkey: + - id + db_dbuser: + db_dbuser_pkey: + - id + db_dbuser_groups: + db_dbuser_groups_pkey: + - id + db_dbuser_user_permissions: + db_dbuser_user_permissions_pkey: + - id + unique: + db_dbattribute: + db_dbattribute_dbnode_id_key_c589e447_uniq: + - dbnode_id + - key + db_dbauthinfo: + db_dbauthinfo_aiidauser_id_dbcomputer_id_777cdaa8_uniq: + - aiidauser_id + - dbcomputer_id + db_dbcomment: + db_dbcomment_uuid_49bac08c_uniq: + - uuid + db_dbcomputer: + db_dbcomputer_name_key: + - name + db_dbcomputer_uuid_f35defa6_uniq: + - uuid + db_dbextra: + db_dbextra_dbnode_id_key_aa56fd37_uniq: + - dbnode_id + - key + db_dbgroup: + db_dbgroup_name_type_12656f33_uniq: + - label + - type_string + db_dbgroup_uuid_af896177_uniq: + - uuid + db_dbgroup_dbnodes: + db_dbgroup_dbnodes_dbgroup_id_dbnode_id_eee23cce_uniq: + - dbgroup_id + - dbnode_id + db_dblog: + db_dblog_uuid_9cf77df3_uniq: + - uuid + db_dbnode: + db_dbnode_uuid_62e0bf98_uniq: + - uuid + db_dbsetting: + db_dbsetting_key_1b84beb4_uniq: + - key + db_dbuser: + db_dbuser_email_30150b7e_uniq: + - email + db_dbuser_groups: + db_dbuser_groups_dbuser_id_group_id_9155eb4f_uniq: + - dbuser_id + - group_id + db_dbuser_user_permissions: + db_dbuser_user_permissio_dbuser_id_permission_id_e6cbabe4_uniq: + - dbuser_id + - permission_id +foreign_keys: + db_dbattribute: + db_dbattribute_dbnode_id_253bf153_fk_db_dbnode_id: FOREIGN KEY (dbnode_id) REFERENCES + db_dbnode(id) DEFERRABLE INITIALLY DEFERRED + db_dbauthinfo: + db_dbauthinfo_aiidauser_id_0684fdfb_fk_db_dbuser_id: FOREIGN KEY (aiidauser_id) + REFERENCES db_dbuser(id) DEFERRABLE INITIALLY DEFERRED + db_dbauthinfo_dbcomputer_id_424f7ac4_fk_db_dbcomputer_id: FOREIGN KEY (dbcomputer_id) + REFERENCES db_dbcomputer(id) DEFERRABLE INITIALLY DEFERRED + db_dbcomment: + db_dbcomment_dbnode_id_3b812b6b_fk_db_dbnode_id: FOREIGN KEY (dbnode_id) REFERENCES + db_dbnode(id) DEFERRABLE INITIALLY DEFERRED + db_dbcomment_user_id_8ed5e360_fk_db_dbuser_id: FOREIGN KEY (user_id) REFERENCES + db_dbuser(id) DEFERRABLE INITIALLY DEFERRED + db_dbextra: + db_dbextra_dbnode_id_c7fe8961_fk_db_dbnode_id: FOREIGN KEY (dbnode_id) REFERENCES + db_dbnode(id) DEFERRABLE INITIALLY DEFERRED + db_dbgroup: + db_dbgroup_user_id_100f8a51_fk_db_dbuser_id: FOREIGN KEY (user_id) REFERENCES + db_dbuser(id) DEFERRABLE INITIALLY DEFERRED + db_dbgroup_dbnodes: + db_dbgroup_dbnodes_dbgroup_id_9d3a0f9d_fk_db_dbgroup_id: FOREIGN KEY (dbgroup_id) + REFERENCES db_dbgroup(id) DEFERRABLE INITIALLY DEFERRED + db_dbgroup_dbnodes_dbnode_id_118b9439_fk_db_dbnode_id: FOREIGN KEY (dbnode_id) + REFERENCES db_dbnode(id) DEFERRABLE INITIALLY DEFERRED + db_dblink: + db_dblink_input_id_9245bd73_fk_db_dbnode_id: FOREIGN KEY (input_id) REFERENCES + db_dbnode(id) DEFERRABLE INITIALLY DEFERRED + db_dblink_output_id_c0167528_fk_db_dbnode_id: FOREIGN KEY (output_id) REFERENCES + db_dbnode(id) DEFERRABLE INITIALLY DEFERRED + db_dblog: + db_dblog_dbnode_id_da34b732_fk_db_dbnode_id: FOREIGN KEY (dbnode_id) REFERENCES + db_dbnode(id) DEFERRABLE INITIALLY DEFERRED + db_dbnode: + db_dbnode_dbcomputer_id_315372a3_fk_db_dbcomputer_id: FOREIGN KEY (dbcomputer_id) + REFERENCES db_dbcomputer(id) DEFERRABLE INITIALLY DEFERRED + db_dbnode_user_id_12e7aeaf_fk_db_dbuser_id: FOREIGN KEY (user_id) REFERENCES db_dbuser(id) + DEFERRABLE INITIALLY DEFERRED + db_dbuser_groups: + db_dbuser_groups_dbuser_id_480b3520_fk_db_dbuser_id: FOREIGN KEY (dbuser_id) REFERENCES + db_dbuser(id) DEFERRABLE INITIALLY DEFERRED + db_dbuser_groups_group_id_8478d87e_fk_auth_group_id: FOREIGN KEY (group_id) REFERENCES + auth_group(id) DEFERRABLE INITIALLY DEFERRED + db_dbuser_user_permissions: + db_dbuser_user_permi_permission_id_c5aafc54_fk_auth_perm: FOREIGN KEY (permission_id) + REFERENCES auth_permission(id) DEFERRABLE INITIALLY DEFERRED + db_dbuser_user_permissions_dbuser_id_364456ee_fk_db_dbuser_id: FOREIGN KEY (dbuser_id) + REFERENCES db_dbuser(id) DEFERRABLE INITIALLY DEFERRED +indexes: + db_dbattribute: + db_dbattribute_datatype_91c4dc04: CREATE INDEX db_dbattribute_datatype_91c4dc04 + ON public.db_dbattribute USING btree (datatype) + db_dbattribute_datatype_91c4dc04_like: CREATE INDEX db_dbattribute_datatype_91c4dc04_like + ON public.db_dbattribute USING btree (datatype varchar_pattern_ops) + db_dbattribute_dbnode_id_253bf153: CREATE INDEX db_dbattribute_dbnode_id_253bf153 + ON public.db_dbattribute USING btree (dbnode_id) + db_dbattribute_dbnode_id_key_c589e447_uniq: CREATE UNIQUE INDEX db_dbattribute_dbnode_id_key_c589e447_uniq + ON public.db_dbattribute USING btree (dbnode_id, key) + db_dbattribute_key_ac2bc4e4: CREATE INDEX db_dbattribute_key_ac2bc4e4 ON public.db_dbattribute + USING btree (key) + db_dbattribute_key_ac2bc4e4_like: CREATE INDEX db_dbattribute_key_ac2bc4e4_like + ON public.db_dbattribute USING btree (key varchar_pattern_ops) + db_dbattribute_pkey: CREATE UNIQUE INDEX db_dbattribute_pkey ON public.db_dbattribute + USING btree (id) + tval_idx_for_daemon: CREATE INDEX tval_idx_for_daemon ON public.db_dbattribute + USING btree (tval) WHERE (tval = ANY (ARRAY['COMPUTED'::text, 'WITHSCHEDULER'::text, + 'TOSUBMIT'::text])) + db_dbauthinfo: + db_dbauthinfo_aiidauser_id_0684fdfb: CREATE INDEX db_dbauthinfo_aiidauser_id_0684fdfb + ON public.db_dbauthinfo USING btree (aiidauser_id) + db_dbauthinfo_aiidauser_id_dbcomputer_id_777cdaa8_uniq: CREATE UNIQUE INDEX db_dbauthinfo_aiidauser_id_dbcomputer_id_777cdaa8_uniq + ON public.db_dbauthinfo USING btree (aiidauser_id, dbcomputer_id) + db_dbauthinfo_dbcomputer_id_424f7ac4: CREATE INDEX db_dbauthinfo_dbcomputer_id_424f7ac4 + ON public.db_dbauthinfo USING btree (dbcomputer_id) + db_dbauthinfo_pkey: CREATE UNIQUE INDEX db_dbauthinfo_pkey ON public.db_dbauthinfo + USING btree (id) + db_dbcomment: + db_dbcomment_dbnode_id_3b812b6b: CREATE INDEX db_dbcomment_dbnode_id_3b812b6b + ON public.db_dbcomment USING btree (dbnode_id) + db_dbcomment_pkey: CREATE UNIQUE INDEX db_dbcomment_pkey ON public.db_dbcomment + USING btree (id) + db_dbcomment_user_id_8ed5e360: CREATE INDEX db_dbcomment_user_id_8ed5e360 ON public.db_dbcomment + USING btree (user_id) + db_dbcomment_uuid_49bac08c_uniq: CREATE UNIQUE INDEX db_dbcomment_uuid_49bac08c_uniq + ON public.db_dbcomment USING btree (uuid) + db_dbcomputer: + db_dbcomputer_name_f1800b1a_like: CREATE INDEX db_dbcomputer_name_f1800b1a_like + ON public.db_dbcomputer USING btree (name varchar_pattern_ops) + db_dbcomputer_name_key: CREATE UNIQUE INDEX db_dbcomputer_name_key ON public.db_dbcomputer + USING btree (name) + db_dbcomputer_pkey: CREATE UNIQUE INDEX db_dbcomputer_pkey ON public.db_dbcomputer + USING btree (id) + db_dbcomputer_uuid_f35defa6_uniq: CREATE UNIQUE INDEX db_dbcomputer_uuid_f35defa6_uniq + ON public.db_dbcomputer USING btree (uuid) + db_dbextra: + db_dbextra_datatype_2eba38c6: CREATE INDEX db_dbextra_datatype_2eba38c6 ON public.db_dbextra + USING btree (datatype) + db_dbextra_datatype_2eba38c6_like: CREATE INDEX db_dbextra_datatype_2eba38c6_like + ON public.db_dbextra USING btree (datatype varchar_pattern_ops) + db_dbextra_dbnode_id_c7fe8961: CREATE INDEX db_dbextra_dbnode_id_c7fe8961 ON public.db_dbextra + USING btree (dbnode_id) + db_dbextra_dbnode_id_key_aa56fd37_uniq: CREATE UNIQUE INDEX db_dbextra_dbnode_id_key_aa56fd37_uniq + ON public.db_dbextra USING btree (dbnode_id, key) + db_dbextra_key_b1a8abc6: CREATE INDEX db_dbextra_key_b1a8abc6 ON public.db_dbextra + USING btree (key) + db_dbextra_key_b1a8abc6_like: CREATE INDEX db_dbextra_key_b1a8abc6_like ON public.db_dbextra + USING btree (key varchar_pattern_ops) + db_dbextra_pkey: CREATE UNIQUE INDEX db_dbextra_pkey ON public.db_dbextra USING + btree (id) + db_dbgroup: + db_dbgroup_name_66c75272: CREATE INDEX db_dbgroup_name_66c75272 ON public.db_dbgroup + USING btree (label) + db_dbgroup_name_66c75272_like: CREATE INDEX db_dbgroup_name_66c75272_like ON public.db_dbgroup + USING btree (label varchar_pattern_ops) + db_dbgroup_name_type_12656f33_uniq: CREATE UNIQUE INDEX db_dbgroup_name_type_12656f33_uniq + ON public.db_dbgroup USING btree (label, type_string) + db_dbgroup_pkey: CREATE UNIQUE INDEX db_dbgroup_pkey ON public.db_dbgroup USING + btree (id) + db_dbgroup_type_23b2a748: CREATE INDEX db_dbgroup_type_23b2a748 ON public.db_dbgroup + USING btree (type_string) + db_dbgroup_type_23b2a748_like: CREATE INDEX db_dbgroup_type_23b2a748_like ON public.db_dbgroup + USING btree (type_string varchar_pattern_ops) + db_dbgroup_user_id_100f8a51: CREATE INDEX db_dbgroup_user_id_100f8a51 ON public.db_dbgroup + USING btree (user_id) + db_dbgroup_uuid_af896177_uniq: CREATE UNIQUE INDEX db_dbgroup_uuid_af896177_uniq + ON public.db_dbgroup USING btree (uuid) + db_dbgroup_dbnodes: + db_dbgroup_dbnodes_dbgroup_id_9d3a0f9d: CREATE INDEX db_dbgroup_dbnodes_dbgroup_id_9d3a0f9d + ON public.db_dbgroup_dbnodes USING btree (dbgroup_id) + db_dbgroup_dbnodes_dbgroup_id_dbnode_id_eee23cce_uniq: CREATE UNIQUE INDEX db_dbgroup_dbnodes_dbgroup_id_dbnode_id_eee23cce_uniq + ON public.db_dbgroup_dbnodes USING btree (dbgroup_id, dbnode_id) + db_dbgroup_dbnodes_dbnode_id_118b9439: CREATE INDEX db_dbgroup_dbnodes_dbnode_id_118b9439 + ON public.db_dbgroup_dbnodes USING btree (dbnode_id) + db_dbgroup_dbnodes_pkey: CREATE UNIQUE INDEX db_dbgroup_dbnodes_pkey ON public.db_dbgroup_dbnodes + USING btree (id) + db_dblink: + db_dblink_input_id_9245bd73: CREATE INDEX db_dblink_input_id_9245bd73 ON public.db_dblink + USING btree (input_id) + db_dblink_label_f1343cfb: CREATE INDEX db_dblink_label_f1343cfb ON public.db_dblink + USING btree (label) + db_dblink_label_f1343cfb_like: CREATE INDEX db_dblink_label_f1343cfb_like ON public.db_dblink + USING btree (label varchar_pattern_ops) + db_dblink_output_id_c0167528: CREATE INDEX db_dblink_output_id_c0167528 ON public.db_dblink + USING btree (output_id) + db_dblink_pkey: CREATE UNIQUE INDEX db_dblink_pkey ON public.db_dblink USING btree + (id) + db_dblink_type_229f212b: CREATE INDEX db_dblink_type_229f212b ON public.db_dblink + USING btree (type) + db_dblink_type_229f212b_like: CREATE INDEX db_dblink_type_229f212b_like ON public.db_dblink + USING btree (type varchar_pattern_ops) + db_dblog: + db_dblog_dbnode_id_da34b732: CREATE INDEX db_dblog_dbnode_id_da34b732 ON public.db_dblog + USING btree (dbnode_id) + db_dblog_levelname_ad5dc346: CREATE INDEX db_dblog_levelname_ad5dc346 ON public.db_dblog + USING btree (levelname) + db_dblog_levelname_ad5dc346_like: CREATE INDEX db_dblog_levelname_ad5dc346_like + ON public.db_dblog USING btree (levelname varchar_pattern_ops) + db_dblog_loggername_00b5ba16: CREATE INDEX db_dblog_loggername_00b5ba16 ON public.db_dblog + USING btree (loggername) + db_dblog_loggername_00b5ba16_like: CREATE INDEX db_dblog_loggername_00b5ba16_like + ON public.db_dblog USING btree (loggername varchar_pattern_ops) + db_dblog_pkey: CREATE UNIQUE INDEX db_dblog_pkey ON public.db_dblog USING btree + (id) + db_dblog_uuid_9cf77df3_uniq: CREATE UNIQUE INDEX db_dblog_uuid_9cf77df3_uniq ON + public.db_dblog USING btree (uuid) + db_dbnode: + db_dbnode_ctime_71626ef5: CREATE INDEX db_dbnode_ctime_71626ef5 ON public.db_dbnode + USING btree (ctime) + db_dbnode_dbcomputer_id_315372a3: CREATE INDEX db_dbnode_dbcomputer_id_315372a3 + ON public.db_dbnode USING btree (dbcomputer_id) + db_dbnode_label_6469539e: CREATE INDEX db_dbnode_label_6469539e ON public.db_dbnode + USING btree (label) + db_dbnode_label_6469539e_like: CREATE INDEX db_dbnode_label_6469539e_like ON public.db_dbnode + USING btree (label varchar_pattern_ops) + db_dbnode_mtime_0554ea3d: CREATE INDEX db_dbnode_mtime_0554ea3d ON public.db_dbnode + USING btree (mtime) + db_dbnode_pkey: CREATE UNIQUE INDEX db_dbnode_pkey ON public.db_dbnode USING btree + (id) + db_dbnode_process_type_df7298d0: CREATE INDEX db_dbnode_process_type_df7298d0 + ON public.db_dbnode USING btree (process_type) + db_dbnode_process_type_df7298d0_like: CREATE INDEX db_dbnode_process_type_df7298d0_like + ON public.db_dbnode USING btree (process_type varchar_pattern_ops) + db_dbnode_type_a8ce9753: CREATE INDEX db_dbnode_type_a8ce9753 ON public.db_dbnode + USING btree (node_type) + db_dbnode_type_a8ce9753_like: CREATE INDEX db_dbnode_type_a8ce9753_like ON public.db_dbnode + USING btree (node_type varchar_pattern_ops) + db_dbnode_user_id_12e7aeaf: CREATE INDEX db_dbnode_user_id_12e7aeaf ON public.db_dbnode + USING btree (user_id) + db_dbnode_uuid_62e0bf98_uniq: CREATE UNIQUE INDEX db_dbnode_uuid_62e0bf98_uniq + ON public.db_dbnode USING btree (uuid) + db_dbsetting: + db_dbsetting_datatype_49f4397c: CREATE INDEX db_dbsetting_datatype_49f4397c ON + public.db_dbsetting USING btree (datatype) + db_dbsetting_datatype_49f4397c_like: CREATE INDEX db_dbsetting_datatype_49f4397c_like + ON public.db_dbsetting USING btree (datatype varchar_pattern_ops) + db_dbsetting_key_1b84beb4: CREATE INDEX db_dbsetting_key_1b84beb4 ON public.db_dbsetting + USING btree (key) + db_dbsetting_key_1b84beb4_like: CREATE INDEX db_dbsetting_key_1b84beb4_like ON + public.db_dbsetting USING btree (key varchar_pattern_ops) + db_dbsetting_key_1b84beb4_uniq: CREATE UNIQUE INDEX db_dbsetting_key_1b84beb4_uniq + ON public.db_dbsetting USING btree (key) + db_dbsetting_pkey: CREATE UNIQUE INDEX db_dbsetting_pkey ON public.db_dbsetting + USING btree (id) + db_dbuser: + db_dbuser_email_30150b7e_like: CREATE INDEX db_dbuser_email_30150b7e_like ON public.db_dbuser + USING btree (email varchar_pattern_ops) + db_dbuser_email_30150b7e_uniq: CREATE UNIQUE INDEX db_dbuser_email_30150b7e_uniq + ON public.db_dbuser USING btree (email) + db_dbuser_pkey: CREATE UNIQUE INDEX db_dbuser_pkey ON public.db_dbuser USING btree + (id) + db_dbuser_groups: + db_dbuser_groups_dbuser_id_480b3520: CREATE INDEX db_dbuser_groups_dbuser_id_480b3520 + ON public.db_dbuser_groups USING btree (dbuser_id) + db_dbuser_groups_dbuser_id_group_id_9155eb4f_uniq: CREATE UNIQUE INDEX db_dbuser_groups_dbuser_id_group_id_9155eb4f_uniq + ON public.db_dbuser_groups USING btree (dbuser_id, group_id) + db_dbuser_groups_group_id_8478d87e: CREATE INDEX db_dbuser_groups_group_id_8478d87e + ON public.db_dbuser_groups USING btree (group_id) + db_dbuser_groups_pkey: CREATE UNIQUE INDEX db_dbuser_groups_pkey ON public.db_dbuser_groups + USING btree (id) + db_dbuser_user_permissions: + db_dbuser_user_permissio_dbuser_id_permission_id_e6cbabe4_uniq: CREATE UNIQUE + INDEX db_dbuser_user_permissio_dbuser_id_permission_id_e6cbabe4_uniq ON public.db_dbuser_user_permissions + USING btree (dbuser_id, permission_id) + db_dbuser_user_permissions_dbuser_id_364456ee: CREATE INDEX db_dbuser_user_permissions_dbuser_id_364456ee + ON public.db_dbuser_user_permissions USING btree (dbuser_id) + db_dbuser_user_permissions_permission_id_c5aafc54: CREATE INDEX db_dbuser_user_permissions_permission_id_c5aafc54 + ON public.db_dbuser_user_permissions USING btree (permission_id) + db_dbuser_user_permissions_pkey: CREATE UNIQUE INDEX db_dbuser_user_permissions_pkey + ON public.db_dbuser_user_permissions USING btree (id) diff --git a/tests/backends/aiida_sqlalchemy/migrations/test_all_schema/test_django_django_0035_.yml b/tests/backends/aiida_sqlalchemy/migrations/test_all_schema/test_django_django_0035_.yml new file mode 100644 index 0000000000..f5ca03981c --- /dev/null +++ b/tests/backends/aiida_sqlalchemy/migrations/test_all_schema/test_django_django_0035_.yml @@ -0,0 +1,669 @@ +columns: + db_dbattribute: + bval: + data_type: boolean + default: null + is_nullable: true + datatype: + data_type: character varying + default: null + is_nullable: false + max_length: 10 + dbnode_id: + data_type: integer + default: null + is_nullable: false + dval: + data_type: timestamp with time zone + default: null + is_nullable: true + fval: + data_type: double precision + default: null + is_nullable: true + id: + data_type: integer + default: nextval('db_dbattribute_id_seq'::regclass) + is_nullable: false + ival: + data_type: integer + default: null + is_nullable: true + key: + data_type: character varying + default: null + is_nullable: false + max_length: 1024 + tval: + data_type: text + default: null + is_nullable: false + db_dbauthinfo: + aiidauser_id: + data_type: integer + default: null + is_nullable: false + auth_params: + data_type: jsonb + default: null + is_nullable: false + dbcomputer_id: + data_type: integer + default: null + is_nullable: false + enabled: + data_type: boolean + default: null + is_nullable: false + id: + data_type: integer + default: nextval('db_dbauthinfo_id_seq'::regclass) + is_nullable: false + metadata: + data_type: jsonb + default: null + is_nullable: false + db_dbcomment: + content: + data_type: text + default: null + is_nullable: false + ctime: + data_type: timestamp with time zone + default: null + is_nullable: false + dbnode_id: + data_type: integer + default: null + is_nullable: false + id: + data_type: integer + default: nextval('db_dbcomment_id_seq'::regclass) + is_nullable: false + mtime: + data_type: timestamp with time zone + default: null + is_nullable: false + user_id: + data_type: integer + default: null + is_nullable: false + uuid: + data_type: uuid + default: null + is_nullable: false + db_dbcomputer: + description: + data_type: text + default: null + is_nullable: false + hostname: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + id: + data_type: integer + default: nextval('db_dbcomputer_id_seq'::regclass) + is_nullable: false + metadata: + data_type: jsonb + default: null + is_nullable: false + name: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + scheduler_type: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + transport_params: + data_type: jsonb + default: null + is_nullable: false + transport_type: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + uuid: + data_type: uuid + default: null + is_nullable: false + db_dbextra: + bval: + data_type: boolean + default: null + is_nullable: true + datatype: + data_type: character varying + default: null + is_nullable: false + max_length: 10 + dbnode_id: + data_type: integer + default: null + is_nullable: false + dval: + data_type: timestamp with time zone + default: null + is_nullable: true + fval: + data_type: double precision + default: null + is_nullable: true + id: + data_type: integer + default: nextval('db_dbextra_id_seq'::regclass) + is_nullable: false + ival: + data_type: integer + default: null + is_nullable: true + key: + data_type: character varying + default: null + is_nullable: false + max_length: 1024 + tval: + data_type: text + default: null + is_nullable: false + db_dbgroup: + description: + data_type: text + default: null + is_nullable: false + id: + data_type: integer + default: nextval('db_dbgroup_id_seq'::regclass) + is_nullable: false + label: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + time: + data_type: timestamp with time zone + default: null + is_nullable: false + type_string: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + user_id: + data_type: integer + default: null + is_nullable: false + uuid: + data_type: uuid + default: null + is_nullable: false + db_dbgroup_dbnodes: + dbgroup_id: + data_type: integer + default: null + is_nullable: false + dbnode_id: + data_type: integer + default: null + is_nullable: false + id: + data_type: integer + default: nextval('db_dbgroup_dbnodes_id_seq'::regclass) + is_nullable: false + db_dblink: + id: + data_type: integer + default: nextval('db_dblink_id_seq'::regclass) + is_nullable: false + input_id: + data_type: integer + default: null + is_nullable: false + label: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + output_id: + data_type: integer + default: null + is_nullable: false + type: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + db_dblog: + dbnode_id: + data_type: integer + default: null + is_nullable: false + id: + data_type: integer + default: nextval('db_dblog_id_seq'::regclass) + is_nullable: false + levelname: + data_type: character varying + default: null + is_nullable: false + max_length: 50 + loggername: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + message: + data_type: text + default: null + is_nullable: false + metadata: + data_type: jsonb + default: null + is_nullable: false + time: + data_type: timestamp with time zone + default: null + is_nullable: false + uuid: + data_type: uuid + default: null + is_nullable: false + db_dbnode: + ctime: + data_type: timestamp with time zone + default: null + is_nullable: false + dbcomputer_id: + data_type: integer + default: null + is_nullable: true + description: + data_type: text + default: null + is_nullable: false + id: + data_type: integer + default: nextval('db_dbnode_id_seq'::regclass) + is_nullable: false + label: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + mtime: + data_type: timestamp with time zone + default: null + is_nullable: false + node_type: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + process_type: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + user_id: + data_type: integer + default: null + is_nullable: false + uuid: + data_type: uuid + default: null + is_nullable: false + db_dbsetting: + bval: + data_type: boolean + default: null + is_nullable: true + datatype: + data_type: character varying + default: null + is_nullable: false + max_length: 10 + description: + data_type: text + default: null + is_nullable: false + dval: + data_type: timestamp with time zone + default: null + is_nullable: true + fval: + data_type: double precision + default: null + is_nullable: true + id: + data_type: integer + default: nextval('db_dbsetting_id_seq'::regclass) + is_nullable: false + ival: + data_type: integer + default: null + is_nullable: true + key: + data_type: character varying + default: null + is_nullable: false + max_length: 1024 + time: + data_type: timestamp with time zone + default: null + is_nullable: false + tval: + data_type: text + default: null + is_nullable: false + db_dbuser: + email: + data_type: character varying + default: null + is_nullable: false + max_length: 254 + first_name: + data_type: character varying + default: null + is_nullable: false + max_length: 254 + id: + data_type: integer + default: nextval('db_dbuser_id_seq'::regclass) + is_nullable: false + institution: + data_type: character varying + default: null + is_nullable: false + max_length: 254 + last_name: + data_type: character varying + default: null + is_nullable: false + max_length: 254 +constraints: + primary_key: + db_dbattribute: + db_dbattribute_pkey: + - id + db_dbauthinfo: + db_dbauthinfo_pkey: + - id + db_dbcomment: + db_dbcomment_pkey: + - id + db_dbcomputer: + db_dbcomputer_pkey: + - id + db_dbextra: + db_dbextra_pkey: + - id + db_dbgroup: + db_dbgroup_pkey: + - id + db_dbgroup_dbnodes: + db_dbgroup_dbnodes_pkey: + - id + db_dblink: + db_dblink_pkey: + - id + db_dblog: + db_dblog_pkey: + - id + db_dbnode: + db_dbnode_pkey: + - id + db_dbsetting: + db_dbsetting_pkey: + - id + db_dbuser: + db_dbuser_pkey: + - id + unique: + db_dbattribute: + db_dbattribute_dbnode_id_key_c589e447_uniq: + - dbnode_id + - key + db_dbauthinfo: + db_dbauthinfo_aiidauser_id_dbcomputer_id_777cdaa8_uniq: + - aiidauser_id + - dbcomputer_id + db_dbcomment: + db_dbcomment_uuid_49bac08c_uniq: + - uuid + db_dbcomputer: + db_dbcomputer_name_key: + - name + db_dbcomputer_uuid_f35defa6_uniq: + - uuid + db_dbextra: + db_dbextra_dbnode_id_key_aa56fd37_uniq: + - dbnode_id + - key + db_dbgroup: + db_dbgroup_name_type_12656f33_uniq: + - label + - type_string + db_dbgroup_uuid_af896177_uniq: + - uuid + db_dbgroup_dbnodes: + db_dbgroup_dbnodes_dbgroup_id_dbnode_id_eee23cce_uniq: + - dbgroup_id + - dbnode_id + db_dblog: + db_dblog_uuid_9cf77df3_uniq: + - uuid + db_dbnode: + db_dbnode_uuid_62e0bf98_uniq: + - uuid + db_dbsetting: + db_dbsetting_key_1b84beb4_uniq: + - key + db_dbuser: + db_dbuser_email_30150b7e_uniq: + - email +foreign_keys: + db_dbattribute: + db_dbattribute_dbnode_id_253bf153_fk_db_dbnode_id: FOREIGN KEY (dbnode_id) REFERENCES + db_dbnode(id) DEFERRABLE INITIALLY DEFERRED + db_dbauthinfo: + db_dbauthinfo_aiidauser_id_0684fdfb_fk_db_dbuser_id: FOREIGN KEY (aiidauser_id) + REFERENCES db_dbuser(id) DEFERRABLE INITIALLY DEFERRED + db_dbauthinfo_dbcomputer_id_424f7ac4_fk_db_dbcomputer_id: FOREIGN KEY (dbcomputer_id) + REFERENCES db_dbcomputer(id) DEFERRABLE INITIALLY DEFERRED + db_dbcomment: + db_dbcomment_dbnode_id_3b812b6b_fk_db_dbnode_id: FOREIGN KEY (dbnode_id) REFERENCES + db_dbnode(id) DEFERRABLE INITIALLY DEFERRED + db_dbcomment_user_id_8ed5e360_fk_db_dbuser_id: FOREIGN KEY (user_id) REFERENCES + db_dbuser(id) DEFERRABLE INITIALLY DEFERRED + db_dbextra: + db_dbextra_dbnode_id_c7fe8961_fk_db_dbnode_id: FOREIGN KEY (dbnode_id) REFERENCES + db_dbnode(id) DEFERRABLE INITIALLY DEFERRED + db_dbgroup: + db_dbgroup_user_id_100f8a51_fk_db_dbuser_id: FOREIGN KEY (user_id) REFERENCES + db_dbuser(id) DEFERRABLE INITIALLY DEFERRED + db_dbgroup_dbnodes: + db_dbgroup_dbnodes_dbgroup_id_9d3a0f9d_fk_db_dbgroup_id: FOREIGN KEY (dbgroup_id) + REFERENCES db_dbgroup(id) DEFERRABLE INITIALLY DEFERRED + db_dbgroup_dbnodes_dbnode_id_118b9439_fk_db_dbnode_id: FOREIGN KEY (dbnode_id) + REFERENCES db_dbnode(id) DEFERRABLE INITIALLY DEFERRED + db_dblink: + db_dblink_input_id_9245bd73_fk_db_dbnode_id: FOREIGN KEY (input_id) REFERENCES + db_dbnode(id) DEFERRABLE INITIALLY DEFERRED + db_dblink_output_id_c0167528_fk_db_dbnode_id: FOREIGN KEY (output_id) REFERENCES + db_dbnode(id) DEFERRABLE INITIALLY DEFERRED + db_dblog: + db_dblog_dbnode_id_da34b732_fk_db_dbnode_id: FOREIGN KEY (dbnode_id) REFERENCES + db_dbnode(id) DEFERRABLE INITIALLY DEFERRED + db_dbnode: + db_dbnode_dbcomputer_id_315372a3_fk_db_dbcomputer_id: FOREIGN KEY (dbcomputer_id) + REFERENCES db_dbcomputer(id) DEFERRABLE INITIALLY DEFERRED + db_dbnode_user_id_12e7aeaf_fk_db_dbuser_id: FOREIGN KEY (user_id) REFERENCES db_dbuser(id) + DEFERRABLE INITIALLY DEFERRED +indexes: + db_dbattribute: + db_dbattribute_datatype_91c4dc04: CREATE INDEX db_dbattribute_datatype_91c4dc04 + ON public.db_dbattribute USING btree (datatype) + db_dbattribute_datatype_91c4dc04_like: CREATE INDEX db_dbattribute_datatype_91c4dc04_like + ON public.db_dbattribute USING btree (datatype varchar_pattern_ops) + db_dbattribute_dbnode_id_253bf153: CREATE INDEX db_dbattribute_dbnode_id_253bf153 + ON public.db_dbattribute USING btree (dbnode_id) + db_dbattribute_dbnode_id_key_c589e447_uniq: CREATE UNIQUE INDEX db_dbattribute_dbnode_id_key_c589e447_uniq + ON public.db_dbattribute USING btree (dbnode_id, key) + db_dbattribute_key_ac2bc4e4: CREATE INDEX db_dbattribute_key_ac2bc4e4 ON public.db_dbattribute + USING btree (key) + db_dbattribute_key_ac2bc4e4_like: CREATE INDEX db_dbattribute_key_ac2bc4e4_like + ON public.db_dbattribute USING btree (key varchar_pattern_ops) + db_dbattribute_pkey: CREATE UNIQUE INDEX db_dbattribute_pkey ON public.db_dbattribute + USING btree (id) + tval_idx_for_daemon: CREATE INDEX tval_idx_for_daemon ON public.db_dbattribute + USING btree (tval) WHERE (tval = ANY (ARRAY['COMPUTED'::text, 'WITHSCHEDULER'::text, + 'TOSUBMIT'::text])) + db_dbauthinfo: + db_dbauthinfo_aiidauser_id_0684fdfb: CREATE INDEX db_dbauthinfo_aiidauser_id_0684fdfb + ON public.db_dbauthinfo USING btree (aiidauser_id) + db_dbauthinfo_aiidauser_id_dbcomputer_id_777cdaa8_uniq: CREATE UNIQUE INDEX db_dbauthinfo_aiidauser_id_dbcomputer_id_777cdaa8_uniq + ON public.db_dbauthinfo USING btree (aiidauser_id, dbcomputer_id) + db_dbauthinfo_dbcomputer_id_424f7ac4: CREATE INDEX db_dbauthinfo_dbcomputer_id_424f7ac4 + ON public.db_dbauthinfo USING btree (dbcomputer_id) + db_dbauthinfo_pkey: CREATE UNIQUE INDEX db_dbauthinfo_pkey ON public.db_dbauthinfo + USING btree (id) + db_dbcomment: + db_dbcomment_dbnode_id_3b812b6b: CREATE INDEX db_dbcomment_dbnode_id_3b812b6b + ON public.db_dbcomment USING btree (dbnode_id) + db_dbcomment_pkey: CREATE UNIQUE INDEX db_dbcomment_pkey ON public.db_dbcomment + USING btree (id) + db_dbcomment_user_id_8ed5e360: CREATE INDEX db_dbcomment_user_id_8ed5e360 ON public.db_dbcomment + USING btree (user_id) + db_dbcomment_uuid_49bac08c_uniq: CREATE UNIQUE INDEX db_dbcomment_uuid_49bac08c_uniq + ON public.db_dbcomment USING btree (uuid) + db_dbcomputer: + db_dbcomputer_name_f1800b1a_like: CREATE INDEX db_dbcomputer_name_f1800b1a_like + ON public.db_dbcomputer USING btree (name varchar_pattern_ops) + db_dbcomputer_name_key: CREATE UNIQUE INDEX db_dbcomputer_name_key ON public.db_dbcomputer + USING btree (name) + db_dbcomputer_pkey: CREATE UNIQUE INDEX db_dbcomputer_pkey ON public.db_dbcomputer + USING btree (id) + db_dbcomputer_uuid_f35defa6_uniq: CREATE UNIQUE INDEX db_dbcomputer_uuid_f35defa6_uniq + ON public.db_dbcomputer USING btree (uuid) + db_dbextra: + db_dbextra_datatype_2eba38c6: CREATE INDEX db_dbextra_datatype_2eba38c6 ON public.db_dbextra + USING btree (datatype) + db_dbextra_datatype_2eba38c6_like: CREATE INDEX db_dbextra_datatype_2eba38c6_like + ON public.db_dbextra USING btree (datatype varchar_pattern_ops) + db_dbextra_dbnode_id_c7fe8961: CREATE INDEX db_dbextra_dbnode_id_c7fe8961 ON public.db_dbextra + USING btree (dbnode_id) + db_dbextra_dbnode_id_key_aa56fd37_uniq: CREATE UNIQUE INDEX db_dbextra_dbnode_id_key_aa56fd37_uniq + ON public.db_dbextra USING btree (dbnode_id, key) + db_dbextra_key_b1a8abc6: CREATE INDEX db_dbextra_key_b1a8abc6 ON public.db_dbextra + USING btree (key) + db_dbextra_key_b1a8abc6_like: CREATE INDEX db_dbextra_key_b1a8abc6_like ON public.db_dbextra + USING btree (key varchar_pattern_ops) + db_dbextra_pkey: CREATE UNIQUE INDEX db_dbextra_pkey ON public.db_dbextra USING + btree (id) + db_dbgroup: + db_dbgroup_name_66c75272: CREATE INDEX db_dbgroup_name_66c75272 ON public.db_dbgroup + USING btree (label) + db_dbgroup_name_66c75272_like: CREATE INDEX db_dbgroup_name_66c75272_like ON public.db_dbgroup + USING btree (label varchar_pattern_ops) + db_dbgroup_name_type_12656f33_uniq: CREATE UNIQUE INDEX db_dbgroup_name_type_12656f33_uniq + ON public.db_dbgroup USING btree (label, type_string) + db_dbgroup_pkey: CREATE UNIQUE INDEX db_dbgroup_pkey ON public.db_dbgroup USING + btree (id) + db_dbgroup_type_23b2a748: CREATE INDEX db_dbgroup_type_23b2a748 ON public.db_dbgroup + USING btree (type_string) + db_dbgroup_type_23b2a748_like: CREATE INDEX db_dbgroup_type_23b2a748_like ON public.db_dbgroup + USING btree (type_string varchar_pattern_ops) + db_dbgroup_user_id_100f8a51: CREATE INDEX db_dbgroup_user_id_100f8a51 ON public.db_dbgroup + USING btree (user_id) + db_dbgroup_uuid_af896177_uniq: CREATE UNIQUE INDEX db_dbgroup_uuid_af896177_uniq + ON public.db_dbgroup USING btree (uuid) + db_dbgroup_dbnodes: + db_dbgroup_dbnodes_dbgroup_id_9d3a0f9d: CREATE INDEX db_dbgroup_dbnodes_dbgroup_id_9d3a0f9d + ON public.db_dbgroup_dbnodes USING btree (dbgroup_id) + db_dbgroup_dbnodes_dbgroup_id_dbnode_id_eee23cce_uniq: CREATE UNIQUE INDEX db_dbgroup_dbnodes_dbgroup_id_dbnode_id_eee23cce_uniq + ON public.db_dbgroup_dbnodes USING btree (dbgroup_id, dbnode_id) + db_dbgroup_dbnodes_dbnode_id_118b9439: CREATE INDEX db_dbgroup_dbnodes_dbnode_id_118b9439 + ON public.db_dbgroup_dbnodes USING btree (dbnode_id) + db_dbgroup_dbnodes_pkey: CREATE UNIQUE INDEX db_dbgroup_dbnodes_pkey ON public.db_dbgroup_dbnodes + USING btree (id) + db_dblink: + db_dblink_input_id_9245bd73: CREATE INDEX db_dblink_input_id_9245bd73 ON public.db_dblink + USING btree (input_id) + db_dblink_label_f1343cfb: CREATE INDEX db_dblink_label_f1343cfb ON public.db_dblink + USING btree (label) + db_dblink_label_f1343cfb_like: CREATE INDEX db_dblink_label_f1343cfb_like ON public.db_dblink + USING btree (label varchar_pattern_ops) + db_dblink_output_id_c0167528: CREATE INDEX db_dblink_output_id_c0167528 ON public.db_dblink + USING btree (output_id) + db_dblink_pkey: CREATE UNIQUE INDEX db_dblink_pkey ON public.db_dblink USING btree + (id) + db_dblink_type_229f212b: CREATE INDEX db_dblink_type_229f212b ON public.db_dblink + USING btree (type) + db_dblink_type_229f212b_like: CREATE INDEX db_dblink_type_229f212b_like ON public.db_dblink + USING btree (type varchar_pattern_ops) + db_dblog: + db_dblog_dbnode_id_da34b732: CREATE INDEX db_dblog_dbnode_id_da34b732 ON public.db_dblog + USING btree (dbnode_id) + db_dblog_levelname_ad5dc346: CREATE INDEX db_dblog_levelname_ad5dc346 ON public.db_dblog + USING btree (levelname) + db_dblog_levelname_ad5dc346_like: CREATE INDEX db_dblog_levelname_ad5dc346_like + ON public.db_dblog USING btree (levelname varchar_pattern_ops) + db_dblog_loggername_00b5ba16: CREATE INDEX db_dblog_loggername_00b5ba16 ON public.db_dblog + USING btree (loggername) + db_dblog_loggername_00b5ba16_like: CREATE INDEX db_dblog_loggername_00b5ba16_like + ON public.db_dblog USING btree (loggername varchar_pattern_ops) + db_dblog_pkey: CREATE UNIQUE INDEX db_dblog_pkey ON public.db_dblog USING btree + (id) + db_dblog_uuid_9cf77df3_uniq: CREATE UNIQUE INDEX db_dblog_uuid_9cf77df3_uniq ON + public.db_dblog USING btree (uuid) + db_dbnode: + db_dbnode_ctime_71626ef5: CREATE INDEX db_dbnode_ctime_71626ef5 ON public.db_dbnode + USING btree (ctime) + db_dbnode_dbcomputer_id_315372a3: CREATE INDEX db_dbnode_dbcomputer_id_315372a3 + ON public.db_dbnode USING btree (dbcomputer_id) + db_dbnode_label_6469539e: CREATE INDEX db_dbnode_label_6469539e ON public.db_dbnode + USING btree (label) + db_dbnode_label_6469539e_like: CREATE INDEX db_dbnode_label_6469539e_like ON public.db_dbnode + USING btree (label varchar_pattern_ops) + db_dbnode_mtime_0554ea3d: CREATE INDEX db_dbnode_mtime_0554ea3d ON public.db_dbnode + USING btree (mtime) + db_dbnode_pkey: CREATE UNIQUE INDEX db_dbnode_pkey ON public.db_dbnode USING btree + (id) + db_dbnode_process_type_df7298d0: CREATE INDEX db_dbnode_process_type_df7298d0 + ON public.db_dbnode USING btree (process_type) + db_dbnode_process_type_df7298d0_like: CREATE INDEX db_dbnode_process_type_df7298d0_like + ON public.db_dbnode USING btree (process_type varchar_pattern_ops) + db_dbnode_type_a8ce9753: CREATE INDEX db_dbnode_type_a8ce9753 ON public.db_dbnode + USING btree (node_type) + db_dbnode_type_a8ce9753_like: CREATE INDEX db_dbnode_type_a8ce9753_like ON public.db_dbnode + USING btree (node_type varchar_pattern_ops) + db_dbnode_user_id_12e7aeaf: CREATE INDEX db_dbnode_user_id_12e7aeaf ON public.db_dbnode + USING btree (user_id) + db_dbnode_uuid_62e0bf98_uniq: CREATE UNIQUE INDEX db_dbnode_uuid_62e0bf98_uniq + ON public.db_dbnode USING btree (uuid) + db_dbsetting: + db_dbsetting_datatype_49f4397c: CREATE INDEX db_dbsetting_datatype_49f4397c ON + public.db_dbsetting USING btree (datatype) + db_dbsetting_datatype_49f4397c_like: CREATE INDEX db_dbsetting_datatype_49f4397c_like + ON public.db_dbsetting USING btree (datatype varchar_pattern_ops) + db_dbsetting_key_1b84beb4: CREATE INDEX db_dbsetting_key_1b84beb4 ON public.db_dbsetting + USING btree (key) + db_dbsetting_key_1b84beb4_like: CREATE INDEX db_dbsetting_key_1b84beb4_like ON + public.db_dbsetting USING btree (key varchar_pattern_ops) + db_dbsetting_key_1b84beb4_uniq: CREATE UNIQUE INDEX db_dbsetting_key_1b84beb4_uniq + ON public.db_dbsetting USING btree (key) + db_dbsetting_pkey: CREATE UNIQUE INDEX db_dbsetting_pkey ON public.db_dbsetting + USING btree (id) + db_dbuser: + db_dbuser_email_30150b7e_like: CREATE INDEX db_dbuser_email_30150b7e_like ON public.db_dbuser + USING btree (email varchar_pattern_ops) + db_dbuser_email_30150b7e_uniq: CREATE UNIQUE INDEX db_dbuser_email_30150b7e_uniq + ON public.db_dbuser USING btree (email) + db_dbuser_pkey: CREATE UNIQUE INDEX db_dbuser_pkey ON public.db_dbuser USING btree + (id) diff --git a/tests/backends/aiida_sqlalchemy/migrations/test_all_schema/test_django_django_0036_.yml b/tests/backends/aiida_sqlalchemy/migrations/test_all_schema/test_django_django_0036_.yml new file mode 100644 index 0000000000..9310c3564d --- /dev/null +++ b/tests/backends/aiida_sqlalchemy/migrations/test_all_schema/test_django_django_0036_.yml @@ -0,0 +1,665 @@ +columns: + db_dbattribute: + bval: + data_type: boolean + default: null + is_nullable: true + datatype: + data_type: character varying + default: null + is_nullable: false + max_length: 10 + dbnode_id: + data_type: integer + default: null + is_nullable: false + dval: + data_type: timestamp with time zone + default: null + is_nullable: true + fval: + data_type: double precision + default: null + is_nullable: true + id: + data_type: integer + default: nextval('db_dbattribute_id_seq'::regclass) + is_nullable: false + ival: + data_type: integer + default: null + is_nullable: true + key: + data_type: character varying + default: null + is_nullable: false + max_length: 1024 + tval: + data_type: text + default: null + is_nullable: false + db_dbauthinfo: + aiidauser_id: + data_type: integer + default: null + is_nullable: false + auth_params: + data_type: jsonb + default: null + is_nullable: false + dbcomputer_id: + data_type: integer + default: null + is_nullable: false + enabled: + data_type: boolean + default: null + is_nullable: false + id: + data_type: integer + default: nextval('db_dbauthinfo_id_seq'::regclass) + is_nullable: false + metadata: + data_type: jsonb + default: null + is_nullable: false + db_dbcomment: + content: + data_type: text + default: null + is_nullable: false + ctime: + data_type: timestamp with time zone + default: null + is_nullable: false + dbnode_id: + data_type: integer + default: null + is_nullable: false + id: + data_type: integer + default: nextval('db_dbcomment_id_seq'::regclass) + is_nullable: false + mtime: + data_type: timestamp with time zone + default: null + is_nullable: false + user_id: + data_type: integer + default: null + is_nullable: false + uuid: + data_type: uuid + default: null + is_nullable: false + db_dbcomputer: + description: + data_type: text + default: null + is_nullable: false + hostname: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + id: + data_type: integer + default: nextval('db_dbcomputer_id_seq'::regclass) + is_nullable: false + metadata: + data_type: jsonb + default: null + is_nullable: false + name: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + scheduler_type: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + transport_type: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + uuid: + data_type: uuid + default: null + is_nullable: false + db_dbextra: + bval: + data_type: boolean + default: null + is_nullable: true + datatype: + data_type: character varying + default: null + is_nullable: false + max_length: 10 + dbnode_id: + data_type: integer + default: null + is_nullable: false + dval: + data_type: timestamp with time zone + default: null + is_nullable: true + fval: + data_type: double precision + default: null + is_nullable: true + id: + data_type: integer + default: nextval('db_dbextra_id_seq'::regclass) + is_nullable: false + ival: + data_type: integer + default: null + is_nullable: true + key: + data_type: character varying + default: null + is_nullable: false + max_length: 1024 + tval: + data_type: text + default: null + is_nullable: false + db_dbgroup: + description: + data_type: text + default: null + is_nullable: false + id: + data_type: integer + default: nextval('db_dbgroup_id_seq'::regclass) + is_nullable: false + label: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + time: + data_type: timestamp with time zone + default: null + is_nullable: false + type_string: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + user_id: + data_type: integer + default: null + is_nullable: false + uuid: + data_type: uuid + default: null + is_nullable: false + db_dbgroup_dbnodes: + dbgroup_id: + data_type: integer + default: null + is_nullable: false + dbnode_id: + data_type: integer + default: null + is_nullable: false + id: + data_type: integer + default: nextval('db_dbgroup_dbnodes_id_seq'::regclass) + is_nullable: false + db_dblink: + id: + data_type: integer + default: nextval('db_dblink_id_seq'::regclass) + is_nullable: false + input_id: + data_type: integer + default: null + is_nullable: false + label: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + output_id: + data_type: integer + default: null + is_nullable: false + type: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + db_dblog: + dbnode_id: + data_type: integer + default: null + is_nullable: false + id: + data_type: integer + default: nextval('db_dblog_id_seq'::regclass) + is_nullable: false + levelname: + data_type: character varying + default: null + is_nullable: false + max_length: 50 + loggername: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + message: + data_type: text + default: null + is_nullable: false + metadata: + data_type: jsonb + default: null + is_nullable: false + time: + data_type: timestamp with time zone + default: null + is_nullable: false + uuid: + data_type: uuid + default: null + is_nullable: false + db_dbnode: + ctime: + data_type: timestamp with time zone + default: null + is_nullable: false + dbcomputer_id: + data_type: integer + default: null + is_nullable: true + description: + data_type: text + default: null + is_nullable: false + id: + data_type: integer + default: nextval('db_dbnode_id_seq'::regclass) + is_nullable: false + label: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + mtime: + data_type: timestamp with time zone + default: null + is_nullable: false + node_type: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + process_type: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + user_id: + data_type: integer + default: null + is_nullable: false + uuid: + data_type: uuid + default: null + is_nullable: false + db_dbsetting: + bval: + data_type: boolean + default: null + is_nullable: true + datatype: + data_type: character varying + default: null + is_nullable: false + max_length: 10 + description: + data_type: text + default: null + is_nullable: false + dval: + data_type: timestamp with time zone + default: null + is_nullable: true + fval: + data_type: double precision + default: null + is_nullable: true + id: + data_type: integer + default: nextval('db_dbsetting_id_seq'::regclass) + is_nullable: false + ival: + data_type: integer + default: null + is_nullable: true + key: + data_type: character varying + default: null + is_nullable: false + max_length: 1024 + time: + data_type: timestamp with time zone + default: null + is_nullable: false + tval: + data_type: text + default: null + is_nullable: false + db_dbuser: + email: + data_type: character varying + default: null + is_nullable: false + max_length: 254 + first_name: + data_type: character varying + default: null + is_nullable: false + max_length: 254 + id: + data_type: integer + default: nextval('db_dbuser_id_seq'::regclass) + is_nullable: false + institution: + data_type: character varying + default: null + is_nullable: false + max_length: 254 + last_name: + data_type: character varying + default: null + is_nullable: false + max_length: 254 +constraints: + primary_key: + db_dbattribute: + db_dbattribute_pkey: + - id + db_dbauthinfo: + db_dbauthinfo_pkey: + - id + db_dbcomment: + db_dbcomment_pkey: + - id + db_dbcomputer: + db_dbcomputer_pkey: + - id + db_dbextra: + db_dbextra_pkey: + - id + db_dbgroup: + db_dbgroup_pkey: + - id + db_dbgroup_dbnodes: + db_dbgroup_dbnodes_pkey: + - id + db_dblink: + db_dblink_pkey: + - id + db_dblog: + db_dblog_pkey: + - id + db_dbnode: + db_dbnode_pkey: + - id + db_dbsetting: + db_dbsetting_pkey: + - id + db_dbuser: + db_dbuser_pkey: + - id + unique: + db_dbattribute: + db_dbattribute_dbnode_id_key_c589e447_uniq: + - dbnode_id + - key + db_dbauthinfo: + db_dbauthinfo_aiidauser_id_dbcomputer_id_777cdaa8_uniq: + - aiidauser_id + - dbcomputer_id + db_dbcomment: + db_dbcomment_uuid_49bac08c_uniq: + - uuid + db_dbcomputer: + db_dbcomputer_name_key: + - name + db_dbcomputer_uuid_f35defa6_uniq: + - uuid + db_dbextra: + db_dbextra_dbnode_id_key_aa56fd37_uniq: + - dbnode_id + - key + db_dbgroup: + db_dbgroup_name_type_12656f33_uniq: + - label + - type_string + db_dbgroup_uuid_af896177_uniq: + - uuid + db_dbgroup_dbnodes: + db_dbgroup_dbnodes_dbgroup_id_dbnode_id_eee23cce_uniq: + - dbgroup_id + - dbnode_id + db_dblog: + db_dblog_uuid_9cf77df3_uniq: + - uuid + db_dbnode: + db_dbnode_uuid_62e0bf98_uniq: + - uuid + db_dbsetting: + db_dbsetting_key_1b84beb4_uniq: + - key + db_dbuser: + db_dbuser_email_30150b7e_uniq: + - email +foreign_keys: + db_dbattribute: + db_dbattribute_dbnode_id_253bf153_fk_db_dbnode_id: FOREIGN KEY (dbnode_id) REFERENCES + db_dbnode(id) DEFERRABLE INITIALLY DEFERRED + db_dbauthinfo: + db_dbauthinfo_aiidauser_id_0684fdfb_fk_db_dbuser_id: FOREIGN KEY (aiidauser_id) + REFERENCES db_dbuser(id) DEFERRABLE INITIALLY DEFERRED + db_dbauthinfo_dbcomputer_id_424f7ac4_fk_db_dbcomputer_id: FOREIGN KEY (dbcomputer_id) + REFERENCES db_dbcomputer(id) DEFERRABLE INITIALLY DEFERRED + db_dbcomment: + db_dbcomment_dbnode_id_3b812b6b_fk_db_dbnode_id: FOREIGN KEY (dbnode_id) REFERENCES + db_dbnode(id) DEFERRABLE INITIALLY DEFERRED + db_dbcomment_user_id_8ed5e360_fk_db_dbuser_id: FOREIGN KEY (user_id) REFERENCES + db_dbuser(id) DEFERRABLE INITIALLY DEFERRED + db_dbextra: + db_dbextra_dbnode_id_c7fe8961_fk_db_dbnode_id: FOREIGN KEY (dbnode_id) REFERENCES + db_dbnode(id) DEFERRABLE INITIALLY DEFERRED + db_dbgroup: + db_dbgroup_user_id_100f8a51_fk_db_dbuser_id: FOREIGN KEY (user_id) REFERENCES + db_dbuser(id) DEFERRABLE INITIALLY DEFERRED + db_dbgroup_dbnodes: + db_dbgroup_dbnodes_dbgroup_id_9d3a0f9d_fk_db_dbgroup_id: FOREIGN KEY (dbgroup_id) + REFERENCES db_dbgroup(id) DEFERRABLE INITIALLY DEFERRED + db_dbgroup_dbnodes_dbnode_id_118b9439_fk_db_dbnode_id: FOREIGN KEY (dbnode_id) + REFERENCES db_dbnode(id) DEFERRABLE INITIALLY DEFERRED + db_dblink: + db_dblink_input_id_9245bd73_fk_db_dbnode_id: FOREIGN KEY (input_id) REFERENCES + db_dbnode(id) DEFERRABLE INITIALLY DEFERRED + db_dblink_output_id_c0167528_fk_db_dbnode_id: FOREIGN KEY (output_id) REFERENCES + db_dbnode(id) DEFERRABLE INITIALLY DEFERRED + db_dblog: + db_dblog_dbnode_id_da34b732_fk_db_dbnode_id: FOREIGN KEY (dbnode_id) REFERENCES + db_dbnode(id) DEFERRABLE INITIALLY DEFERRED + db_dbnode: + db_dbnode_dbcomputer_id_315372a3_fk_db_dbcomputer_id: FOREIGN KEY (dbcomputer_id) + REFERENCES db_dbcomputer(id) DEFERRABLE INITIALLY DEFERRED + db_dbnode_user_id_12e7aeaf_fk_db_dbuser_id: FOREIGN KEY (user_id) REFERENCES db_dbuser(id) + DEFERRABLE INITIALLY DEFERRED +indexes: + db_dbattribute: + db_dbattribute_datatype_91c4dc04: CREATE INDEX db_dbattribute_datatype_91c4dc04 + ON public.db_dbattribute USING btree (datatype) + db_dbattribute_datatype_91c4dc04_like: CREATE INDEX db_dbattribute_datatype_91c4dc04_like + ON public.db_dbattribute USING btree (datatype varchar_pattern_ops) + db_dbattribute_dbnode_id_253bf153: CREATE INDEX db_dbattribute_dbnode_id_253bf153 + ON public.db_dbattribute USING btree (dbnode_id) + db_dbattribute_dbnode_id_key_c589e447_uniq: CREATE UNIQUE INDEX db_dbattribute_dbnode_id_key_c589e447_uniq + ON public.db_dbattribute USING btree (dbnode_id, key) + db_dbattribute_key_ac2bc4e4: CREATE INDEX db_dbattribute_key_ac2bc4e4 ON public.db_dbattribute + USING btree (key) + db_dbattribute_key_ac2bc4e4_like: CREATE INDEX db_dbattribute_key_ac2bc4e4_like + ON public.db_dbattribute USING btree (key varchar_pattern_ops) + db_dbattribute_pkey: CREATE UNIQUE INDEX db_dbattribute_pkey ON public.db_dbattribute + USING btree (id) + tval_idx_for_daemon: CREATE INDEX tval_idx_for_daemon ON public.db_dbattribute + USING btree (tval) WHERE (tval = ANY (ARRAY['COMPUTED'::text, 'WITHSCHEDULER'::text, + 'TOSUBMIT'::text])) + db_dbauthinfo: + db_dbauthinfo_aiidauser_id_0684fdfb: CREATE INDEX db_dbauthinfo_aiidauser_id_0684fdfb + ON public.db_dbauthinfo USING btree (aiidauser_id) + db_dbauthinfo_aiidauser_id_dbcomputer_id_777cdaa8_uniq: CREATE UNIQUE INDEX db_dbauthinfo_aiidauser_id_dbcomputer_id_777cdaa8_uniq + ON public.db_dbauthinfo USING btree (aiidauser_id, dbcomputer_id) + db_dbauthinfo_dbcomputer_id_424f7ac4: CREATE INDEX db_dbauthinfo_dbcomputer_id_424f7ac4 + ON public.db_dbauthinfo USING btree (dbcomputer_id) + db_dbauthinfo_pkey: CREATE UNIQUE INDEX db_dbauthinfo_pkey ON public.db_dbauthinfo + USING btree (id) + db_dbcomment: + db_dbcomment_dbnode_id_3b812b6b: CREATE INDEX db_dbcomment_dbnode_id_3b812b6b + ON public.db_dbcomment USING btree (dbnode_id) + db_dbcomment_pkey: CREATE UNIQUE INDEX db_dbcomment_pkey ON public.db_dbcomment + USING btree (id) + db_dbcomment_user_id_8ed5e360: CREATE INDEX db_dbcomment_user_id_8ed5e360 ON public.db_dbcomment + USING btree (user_id) + db_dbcomment_uuid_49bac08c_uniq: CREATE UNIQUE INDEX db_dbcomment_uuid_49bac08c_uniq + ON public.db_dbcomment USING btree (uuid) + db_dbcomputer: + db_dbcomputer_name_f1800b1a_like: CREATE INDEX db_dbcomputer_name_f1800b1a_like + ON public.db_dbcomputer USING btree (name varchar_pattern_ops) + db_dbcomputer_name_key: CREATE UNIQUE INDEX db_dbcomputer_name_key ON public.db_dbcomputer + USING btree (name) + db_dbcomputer_pkey: CREATE UNIQUE INDEX db_dbcomputer_pkey ON public.db_dbcomputer + USING btree (id) + db_dbcomputer_uuid_f35defa6_uniq: CREATE UNIQUE INDEX db_dbcomputer_uuid_f35defa6_uniq + ON public.db_dbcomputer USING btree (uuid) + db_dbextra: + db_dbextra_datatype_2eba38c6: CREATE INDEX db_dbextra_datatype_2eba38c6 ON public.db_dbextra + USING btree (datatype) + db_dbextra_datatype_2eba38c6_like: CREATE INDEX db_dbextra_datatype_2eba38c6_like + ON public.db_dbextra USING btree (datatype varchar_pattern_ops) + db_dbextra_dbnode_id_c7fe8961: CREATE INDEX db_dbextra_dbnode_id_c7fe8961 ON public.db_dbextra + USING btree (dbnode_id) + db_dbextra_dbnode_id_key_aa56fd37_uniq: CREATE UNIQUE INDEX db_dbextra_dbnode_id_key_aa56fd37_uniq + ON public.db_dbextra USING btree (dbnode_id, key) + db_dbextra_key_b1a8abc6: CREATE INDEX db_dbextra_key_b1a8abc6 ON public.db_dbextra + USING btree (key) + db_dbextra_key_b1a8abc6_like: CREATE INDEX db_dbextra_key_b1a8abc6_like ON public.db_dbextra + USING btree (key varchar_pattern_ops) + db_dbextra_pkey: CREATE UNIQUE INDEX db_dbextra_pkey ON public.db_dbextra USING + btree (id) + db_dbgroup: + db_dbgroup_name_66c75272: CREATE INDEX db_dbgroup_name_66c75272 ON public.db_dbgroup + USING btree (label) + db_dbgroup_name_66c75272_like: CREATE INDEX db_dbgroup_name_66c75272_like ON public.db_dbgroup + USING btree (label varchar_pattern_ops) + db_dbgroup_name_type_12656f33_uniq: CREATE UNIQUE INDEX db_dbgroup_name_type_12656f33_uniq + ON public.db_dbgroup USING btree (label, type_string) + db_dbgroup_pkey: CREATE UNIQUE INDEX db_dbgroup_pkey ON public.db_dbgroup USING + btree (id) + db_dbgroup_type_23b2a748: CREATE INDEX db_dbgroup_type_23b2a748 ON public.db_dbgroup + USING btree (type_string) + db_dbgroup_type_23b2a748_like: CREATE INDEX db_dbgroup_type_23b2a748_like ON public.db_dbgroup + USING btree (type_string varchar_pattern_ops) + db_dbgroup_user_id_100f8a51: CREATE INDEX db_dbgroup_user_id_100f8a51 ON public.db_dbgroup + USING btree (user_id) + db_dbgroup_uuid_af896177_uniq: CREATE UNIQUE INDEX db_dbgroup_uuid_af896177_uniq + ON public.db_dbgroup USING btree (uuid) + db_dbgroup_dbnodes: + db_dbgroup_dbnodes_dbgroup_id_9d3a0f9d: CREATE INDEX db_dbgroup_dbnodes_dbgroup_id_9d3a0f9d + ON public.db_dbgroup_dbnodes USING btree (dbgroup_id) + db_dbgroup_dbnodes_dbgroup_id_dbnode_id_eee23cce_uniq: CREATE UNIQUE INDEX db_dbgroup_dbnodes_dbgroup_id_dbnode_id_eee23cce_uniq + ON public.db_dbgroup_dbnodes USING btree (dbgroup_id, dbnode_id) + db_dbgroup_dbnodes_dbnode_id_118b9439: CREATE INDEX db_dbgroup_dbnodes_dbnode_id_118b9439 + ON public.db_dbgroup_dbnodes USING btree (dbnode_id) + db_dbgroup_dbnodes_pkey: CREATE UNIQUE INDEX db_dbgroup_dbnodes_pkey ON public.db_dbgroup_dbnodes + USING btree (id) + db_dblink: + db_dblink_input_id_9245bd73: CREATE INDEX db_dblink_input_id_9245bd73 ON public.db_dblink + USING btree (input_id) + db_dblink_label_f1343cfb: CREATE INDEX db_dblink_label_f1343cfb ON public.db_dblink + USING btree (label) + db_dblink_label_f1343cfb_like: CREATE INDEX db_dblink_label_f1343cfb_like ON public.db_dblink + USING btree (label varchar_pattern_ops) + db_dblink_output_id_c0167528: CREATE INDEX db_dblink_output_id_c0167528 ON public.db_dblink + USING btree (output_id) + db_dblink_pkey: CREATE UNIQUE INDEX db_dblink_pkey ON public.db_dblink USING btree + (id) + db_dblink_type_229f212b: CREATE INDEX db_dblink_type_229f212b ON public.db_dblink + USING btree (type) + db_dblink_type_229f212b_like: CREATE INDEX db_dblink_type_229f212b_like ON public.db_dblink + USING btree (type varchar_pattern_ops) + db_dblog: + db_dblog_dbnode_id_da34b732: CREATE INDEX db_dblog_dbnode_id_da34b732 ON public.db_dblog + USING btree (dbnode_id) + db_dblog_levelname_ad5dc346: CREATE INDEX db_dblog_levelname_ad5dc346 ON public.db_dblog + USING btree (levelname) + db_dblog_levelname_ad5dc346_like: CREATE INDEX db_dblog_levelname_ad5dc346_like + ON public.db_dblog USING btree (levelname varchar_pattern_ops) + db_dblog_loggername_00b5ba16: CREATE INDEX db_dblog_loggername_00b5ba16 ON public.db_dblog + USING btree (loggername) + db_dblog_loggername_00b5ba16_like: CREATE INDEX db_dblog_loggername_00b5ba16_like + ON public.db_dblog USING btree (loggername varchar_pattern_ops) + db_dblog_pkey: CREATE UNIQUE INDEX db_dblog_pkey ON public.db_dblog USING btree + (id) + db_dblog_uuid_9cf77df3_uniq: CREATE UNIQUE INDEX db_dblog_uuid_9cf77df3_uniq ON + public.db_dblog USING btree (uuid) + db_dbnode: + db_dbnode_ctime_71626ef5: CREATE INDEX db_dbnode_ctime_71626ef5 ON public.db_dbnode + USING btree (ctime) + db_dbnode_dbcomputer_id_315372a3: CREATE INDEX db_dbnode_dbcomputer_id_315372a3 + ON public.db_dbnode USING btree (dbcomputer_id) + db_dbnode_label_6469539e: CREATE INDEX db_dbnode_label_6469539e ON public.db_dbnode + USING btree (label) + db_dbnode_label_6469539e_like: CREATE INDEX db_dbnode_label_6469539e_like ON public.db_dbnode + USING btree (label varchar_pattern_ops) + db_dbnode_mtime_0554ea3d: CREATE INDEX db_dbnode_mtime_0554ea3d ON public.db_dbnode + USING btree (mtime) + db_dbnode_pkey: CREATE UNIQUE INDEX db_dbnode_pkey ON public.db_dbnode USING btree + (id) + db_dbnode_process_type_df7298d0: CREATE INDEX db_dbnode_process_type_df7298d0 + ON public.db_dbnode USING btree (process_type) + db_dbnode_process_type_df7298d0_like: CREATE INDEX db_dbnode_process_type_df7298d0_like + ON public.db_dbnode USING btree (process_type varchar_pattern_ops) + db_dbnode_type_a8ce9753: CREATE INDEX db_dbnode_type_a8ce9753 ON public.db_dbnode + USING btree (node_type) + db_dbnode_type_a8ce9753_like: CREATE INDEX db_dbnode_type_a8ce9753_like ON public.db_dbnode + USING btree (node_type varchar_pattern_ops) + db_dbnode_user_id_12e7aeaf: CREATE INDEX db_dbnode_user_id_12e7aeaf ON public.db_dbnode + USING btree (user_id) + db_dbnode_uuid_62e0bf98_uniq: CREATE UNIQUE INDEX db_dbnode_uuid_62e0bf98_uniq + ON public.db_dbnode USING btree (uuid) + db_dbsetting: + db_dbsetting_datatype_49f4397c: CREATE INDEX db_dbsetting_datatype_49f4397c ON + public.db_dbsetting USING btree (datatype) + db_dbsetting_datatype_49f4397c_like: CREATE INDEX db_dbsetting_datatype_49f4397c_like + ON public.db_dbsetting USING btree (datatype varchar_pattern_ops) + db_dbsetting_key_1b84beb4: CREATE INDEX db_dbsetting_key_1b84beb4 ON public.db_dbsetting + USING btree (key) + db_dbsetting_key_1b84beb4_like: CREATE INDEX db_dbsetting_key_1b84beb4_like ON + public.db_dbsetting USING btree (key varchar_pattern_ops) + db_dbsetting_key_1b84beb4_uniq: CREATE UNIQUE INDEX db_dbsetting_key_1b84beb4_uniq + ON public.db_dbsetting USING btree (key) + db_dbsetting_pkey: CREATE UNIQUE INDEX db_dbsetting_pkey ON public.db_dbsetting + USING btree (id) + db_dbuser: + db_dbuser_email_30150b7e_like: CREATE INDEX db_dbuser_email_30150b7e_like ON public.db_dbuser + USING btree (email varchar_pattern_ops) + db_dbuser_email_30150b7e_uniq: CREATE UNIQUE INDEX db_dbuser_email_30150b7e_uniq + ON public.db_dbuser USING btree (email) + db_dbuser_pkey: CREATE UNIQUE INDEX db_dbuser_pkey ON public.db_dbuser USING btree + (id) diff --git a/tests/backends/aiida_sqlalchemy/migrations/test_all_schema/test_django_django_0037_.yml b/tests/backends/aiida_sqlalchemy/migrations/test_all_schema/test_django_django_0037_.yml new file mode 100644 index 0000000000..0dd8d3809a --- /dev/null +++ b/tests/backends/aiida_sqlalchemy/migrations/test_all_schema/test_django_django_0037_.yml @@ -0,0 +1,515 @@ +columns: + db_dbauthinfo: + aiidauser_id: + data_type: integer + default: null + is_nullable: false + auth_params: + data_type: jsonb + default: null + is_nullable: false + dbcomputer_id: + data_type: integer + default: null + is_nullable: false + enabled: + data_type: boolean + default: null + is_nullable: false + id: + data_type: integer + default: nextval('db_dbauthinfo_id_seq'::regclass) + is_nullable: false + metadata: + data_type: jsonb + default: null + is_nullable: false + db_dbcomment: + content: + data_type: text + default: null + is_nullable: false + ctime: + data_type: timestamp with time zone + default: null + is_nullable: false + dbnode_id: + data_type: integer + default: null + is_nullable: false + id: + data_type: integer + default: nextval('db_dbcomment_id_seq'::regclass) + is_nullable: false + mtime: + data_type: timestamp with time zone + default: null + is_nullable: false + user_id: + data_type: integer + default: null + is_nullable: false + uuid: + data_type: uuid + default: null + is_nullable: false + db_dbcomputer: + description: + data_type: text + default: null + is_nullable: false + hostname: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + id: + data_type: integer + default: nextval('db_dbcomputer_id_seq'::regclass) + is_nullable: false + metadata: + data_type: jsonb + default: null + is_nullable: false + name: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + scheduler_type: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + transport_type: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + uuid: + data_type: uuid + default: null + is_nullable: false + db_dbgroup: + description: + data_type: text + default: null + is_nullable: false + id: + data_type: integer + default: nextval('db_dbgroup_id_seq'::regclass) + is_nullable: false + label: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + time: + data_type: timestamp with time zone + default: null + is_nullable: false + type_string: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + user_id: + data_type: integer + default: null + is_nullable: false + uuid: + data_type: uuid + default: null + is_nullable: false + db_dbgroup_dbnodes: + dbgroup_id: + data_type: integer + default: null + is_nullable: false + dbnode_id: + data_type: integer + default: null + is_nullable: false + id: + data_type: integer + default: nextval('db_dbgroup_dbnodes_id_seq'::regclass) + is_nullable: false + db_dblink: + id: + data_type: integer + default: nextval('db_dblink_id_seq'::regclass) + is_nullable: false + input_id: + data_type: integer + default: null + is_nullable: false + label: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + output_id: + data_type: integer + default: null + is_nullable: false + type: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + db_dblog: + dbnode_id: + data_type: integer + default: null + is_nullable: false + id: + data_type: integer + default: nextval('db_dblog_id_seq'::regclass) + is_nullable: false + levelname: + data_type: character varying + default: null + is_nullable: false + max_length: 50 + loggername: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + message: + data_type: text + default: null + is_nullable: false + metadata: + data_type: jsonb + default: null + is_nullable: false + time: + data_type: timestamp with time zone + default: null + is_nullable: false + uuid: + data_type: uuid + default: null + is_nullable: false + db_dbnode: + attributes: + data_type: jsonb + default: null + is_nullable: true + ctime: + data_type: timestamp with time zone + default: null + is_nullable: false + dbcomputer_id: + data_type: integer + default: null + is_nullable: true + description: + data_type: text + default: null + is_nullable: false + extras: + data_type: jsonb + default: null + is_nullable: true + id: + data_type: integer + default: nextval('db_dbnode_id_seq'::regclass) + is_nullable: false + label: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + mtime: + data_type: timestamp with time zone + default: null + is_nullable: false + node_type: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + process_type: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + user_id: + data_type: integer + default: null + is_nullable: false + uuid: + data_type: uuid + default: null + is_nullable: false + db_dbsetting: + description: + data_type: text + default: null + is_nullable: false + id: + data_type: integer + default: nextval('db_dbsetting_id_seq'::regclass) + is_nullable: false + key: + data_type: character varying + default: null + is_nullable: false + max_length: 1024 + time: + data_type: timestamp with time zone + default: null + is_nullable: false + val: + data_type: jsonb + default: null + is_nullable: true + db_dbuser: + email: + data_type: character varying + default: null + is_nullable: false + max_length: 254 + first_name: + data_type: character varying + default: null + is_nullable: false + max_length: 254 + id: + data_type: integer + default: nextval('db_dbuser_id_seq'::regclass) + is_nullable: false + institution: + data_type: character varying + default: null + is_nullable: false + max_length: 254 + last_name: + data_type: character varying + default: null + is_nullable: false + max_length: 254 +constraints: + primary_key: + db_dbauthinfo: + db_dbauthinfo_pkey: + - id + db_dbcomment: + db_dbcomment_pkey: + - id + db_dbcomputer: + db_dbcomputer_pkey: + - id + db_dbgroup: + db_dbgroup_pkey: + - id + db_dbgroup_dbnodes: + db_dbgroup_dbnodes_pkey: + - id + db_dblink: + db_dblink_pkey: + - id + db_dblog: + db_dblog_pkey: + - id + db_dbnode: + db_dbnode_pkey: + - id + db_dbsetting: + db_dbsetting_pkey: + - id + db_dbuser: + db_dbuser_pkey: + - id + unique: + db_dbauthinfo: + db_dbauthinfo_aiidauser_id_dbcomputer_id_777cdaa8_uniq: + - aiidauser_id + - dbcomputer_id + db_dbcomment: + db_dbcomment_uuid_49bac08c_uniq: + - uuid + db_dbcomputer: + db_dbcomputer_name_key: + - name + db_dbcomputer_uuid_f35defa6_uniq: + - uuid + db_dbgroup: + db_dbgroup_name_type_12656f33_uniq: + - label + - type_string + db_dbgroup_uuid_af896177_uniq: + - uuid + db_dbgroup_dbnodes: + db_dbgroup_dbnodes_dbgroup_id_dbnode_id_eee23cce_uniq: + - dbgroup_id + - dbnode_id + db_dblog: + db_dblog_uuid_9cf77df3_uniq: + - uuid + db_dbnode: + db_dbnode_uuid_62e0bf98_uniq: + - uuid + db_dbsetting: + db_dbsetting_key_1b84beb4_uniq: + - key + db_dbuser: + db_dbuser_email_30150b7e_uniq: + - email +foreign_keys: + db_dbauthinfo: + db_dbauthinfo_aiidauser_id_0684fdfb_fk_db_dbuser_id: FOREIGN KEY (aiidauser_id) + REFERENCES db_dbuser(id) DEFERRABLE INITIALLY DEFERRED + db_dbauthinfo_dbcomputer_id_424f7ac4_fk_db_dbcomputer_id: FOREIGN KEY (dbcomputer_id) + REFERENCES db_dbcomputer(id) DEFERRABLE INITIALLY DEFERRED + db_dbcomment: + db_dbcomment_dbnode_id_3b812b6b_fk_db_dbnode_id: FOREIGN KEY (dbnode_id) REFERENCES + db_dbnode(id) DEFERRABLE INITIALLY DEFERRED + db_dbcomment_user_id_8ed5e360_fk_db_dbuser_id: FOREIGN KEY (user_id) REFERENCES + db_dbuser(id) DEFERRABLE INITIALLY DEFERRED + db_dbgroup: + db_dbgroup_user_id_100f8a51_fk_db_dbuser_id: FOREIGN KEY (user_id) REFERENCES + db_dbuser(id) DEFERRABLE INITIALLY DEFERRED + db_dbgroup_dbnodes: + db_dbgroup_dbnodes_dbgroup_id_9d3a0f9d_fk_db_dbgroup_id: FOREIGN KEY (dbgroup_id) + REFERENCES db_dbgroup(id) DEFERRABLE INITIALLY DEFERRED + db_dbgroup_dbnodes_dbnode_id_118b9439_fk_db_dbnode_id: FOREIGN KEY (dbnode_id) + REFERENCES db_dbnode(id) DEFERRABLE INITIALLY DEFERRED + db_dblink: + db_dblink_input_id_9245bd73_fk_db_dbnode_id: FOREIGN KEY (input_id) REFERENCES + db_dbnode(id) DEFERRABLE INITIALLY DEFERRED + db_dblink_output_id_c0167528_fk_db_dbnode_id: FOREIGN KEY (output_id) REFERENCES + db_dbnode(id) DEFERRABLE INITIALLY DEFERRED + db_dblog: + db_dblog_dbnode_id_da34b732_fk_db_dbnode_id: FOREIGN KEY (dbnode_id) REFERENCES + db_dbnode(id) DEFERRABLE INITIALLY DEFERRED + db_dbnode: + db_dbnode_dbcomputer_id_315372a3_fk_db_dbcomputer_id: FOREIGN KEY (dbcomputer_id) + REFERENCES db_dbcomputer(id) DEFERRABLE INITIALLY DEFERRED + db_dbnode_user_id_12e7aeaf_fk_db_dbuser_id: FOREIGN KEY (user_id) REFERENCES db_dbuser(id) + DEFERRABLE INITIALLY DEFERRED +indexes: + db_dbauthinfo: + db_dbauthinfo_aiidauser_id_0684fdfb: CREATE INDEX db_dbauthinfo_aiidauser_id_0684fdfb + ON public.db_dbauthinfo USING btree (aiidauser_id) + db_dbauthinfo_aiidauser_id_dbcomputer_id_777cdaa8_uniq: CREATE UNIQUE INDEX db_dbauthinfo_aiidauser_id_dbcomputer_id_777cdaa8_uniq + ON public.db_dbauthinfo USING btree (aiidauser_id, dbcomputer_id) + db_dbauthinfo_dbcomputer_id_424f7ac4: CREATE INDEX db_dbauthinfo_dbcomputer_id_424f7ac4 + ON public.db_dbauthinfo USING btree (dbcomputer_id) + db_dbauthinfo_pkey: CREATE UNIQUE INDEX db_dbauthinfo_pkey ON public.db_dbauthinfo + USING btree (id) + db_dbcomment: + db_dbcomment_dbnode_id_3b812b6b: CREATE INDEX db_dbcomment_dbnode_id_3b812b6b + ON public.db_dbcomment USING btree (dbnode_id) + db_dbcomment_pkey: CREATE UNIQUE INDEX db_dbcomment_pkey ON public.db_dbcomment + USING btree (id) + db_dbcomment_user_id_8ed5e360: CREATE INDEX db_dbcomment_user_id_8ed5e360 ON public.db_dbcomment + USING btree (user_id) + db_dbcomment_uuid_49bac08c_uniq: CREATE UNIQUE INDEX db_dbcomment_uuid_49bac08c_uniq + ON public.db_dbcomment USING btree (uuid) + db_dbcomputer: + db_dbcomputer_name_f1800b1a_like: CREATE INDEX db_dbcomputer_name_f1800b1a_like + ON public.db_dbcomputer USING btree (name varchar_pattern_ops) + db_dbcomputer_name_key: CREATE UNIQUE INDEX db_dbcomputer_name_key ON public.db_dbcomputer + USING btree (name) + db_dbcomputer_pkey: CREATE UNIQUE INDEX db_dbcomputer_pkey ON public.db_dbcomputer + USING btree (id) + db_dbcomputer_uuid_f35defa6_uniq: CREATE UNIQUE INDEX db_dbcomputer_uuid_f35defa6_uniq + ON public.db_dbcomputer USING btree (uuid) + db_dbgroup: + db_dbgroup_name_66c75272: CREATE INDEX db_dbgroup_name_66c75272 ON public.db_dbgroup + USING btree (label) + db_dbgroup_name_66c75272_like: CREATE INDEX db_dbgroup_name_66c75272_like ON public.db_dbgroup + USING btree (label varchar_pattern_ops) + db_dbgroup_name_type_12656f33_uniq: CREATE UNIQUE INDEX db_dbgroup_name_type_12656f33_uniq + ON public.db_dbgroup USING btree (label, type_string) + db_dbgroup_pkey: CREATE UNIQUE INDEX db_dbgroup_pkey ON public.db_dbgroup USING + btree (id) + db_dbgroup_type_23b2a748: CREATE INDEX db_dbgroup_type_23b2a748 ON public.db_dbgroup + USING btree (type_string) + db_dbgroup_type_23b2a748_like: CREATE INDEX db_dbgroup_type_23b2a748_like ON public.db_dbgroup + USING btree (type_string varchar_pattern_ops) + db_dbgroup_user_id_100f8a51: CREATE INDEX db_dbgroup_user_id_100f8a51 ON public.db_dbgroup + USING btree (user_id) + db_dbgroup_uuid_af896177_uniq: CREATE UNIQUE INDEX db_dbgroup_uuid_af896177_uniq + ON public.db_dbgroup USING btree (uuid) + db_dbgroup_dbnodes: + db_dbgroup_dbnodes_dbgroup_id_9d3a0f9d: CREATE INDEX db_dbgroup_dbnodes_dbgroup_id_9d3a0f9d + ON public.db_dbgroup_dbnodes USING btree (dbgroup_id) + db_dbgroup_dbnodes_dbgroup_id_dbnode_id_eee23cce_uniq: CREATE UNIQUE INDEX db_dbgroup_dbnodes_dbgroup_id_dbnode_id_eee23cce_uniq + ON public.db_dbgroup_dbnodes USING btree (dbgroup_id, dbnode_id) + db_dbgroup_dbnodes_dbnode_id_118b9439: CREATE INDEX db_dbgroup_dbnodes_dbnode_id_118b9439 + ON public.db_dbgroup_dbnodes USING btree (dbnode_id) + db_dbgroup_dbnodes_pkey: CREATE UNIQUE INDEX db_dbgroup_dbnodes_pkey ON public.db_dbgroup_dbnodes + USING btree (id) + db_dblink: + db_dblink_input_id_9245bd73: CREATE INDEX db_dblink_input_id_9245bd73 ON public.db_dblink + USING btree (input_id) + db_dblink_label_f1343cfb: CREATE INDEX db_dblink_label_f1343cfb ON public.db_dblink + USING btree (label) + db_dblink_label_f1343cfb_like: CREATE INDEX db_dblink_label_f1343cfb_like ON public.db_dblink + USING btree (label varchar_pattern_ops) + db_dblink_output_id_c0167528: CREATE INDEX db_dblink_output_id_c0167528 ON public.db_dblink + USING btree (output_id) + db_dblink_pkey: CREATE UNIQUE INDEX db_dblink_pkey ON public.db_dblink USING btree + (id) + db_dblink_type_229f212b: CREATE INDEX db_dblink_type_229f212b ON public.db_dblink + USING btree (type) + db_dblink_type_229f212b_like: CREATE INDEX db_dblink_type_229f212b_like ON public.db_dblink + USING btree (type varchar_pattern_ops) + db_dblog: + db_dblog_dbnode_id_da34b732: CREATE INDEX db_dblog_dbnode_id_da34b732 ON public.db_dblog + USING btree (dbnode_id) + db_dblog_levelname_ad5dc346: CREATE INDEX db_dblog_levelname_ad5dc346 ON public.db_dblog + USING btree (levelname) + db_dblog_levelname_ad5dc346_like: CREATE INDEX db_dblog_levelname_ad5dc346_like + ON public.db_dblog USING btree (levelname varchar_pattern_ops) + db_dblog_loggername_00b5ba16: CREATE INDEX db_dblog_loggername_00b5ba16 ON public.db_dblog + USING btree (loggername) + db_dblog_loggername_00b5ba16_like: CREATE INDEX db_dblog_loggername_00b5ba16_like + ON public.db_dblog USING btree (loggername varchar_pattern_ops) + db_dblog_pkey: CREATE UNIQUE INDEX db_dblog_pkey ON public.db_dblog USING btree + (id) + db_dblog_uuid_9cf77df3_uniq: CREATE UNIQUE INDEX db_dblog_uuid_9cf77df3_uniq ON + public.db_dblog USING btree (uuid) + db_dbnode: + db_dbnode_ctime_71626ef5: CREATE INDEX db_dbnode_ctime_71626ef5 ON public.db_dbnode + USING btree (ctime) + db_dbnode_dbcomputer_id_315372a3: CREATE INDEX db_dbnode_dbcomputer_id_315372a3 + ON public.db_dbnode USING btree (dbcomputer_id) + db_dbnode_label_6469539e: CREATE INDEX db_dbnode_label_6469539e ON public.db_dbnode + USING btree (label) + db_dbnode_label_6469539e_like: CREATE INDEX db_dbnode_label_6469539e_like ON public.db_dbnode + USING btree (label varchar_pattern_ops) + db_dbnode_mtime_0554ea3d: CREATE INDEX db_dbnode_mtime_0554ea3d ON public.db_dbnode + USING btree (mtime) + db_dbnode_pkey: CREATE UNIQUE INDEX db_dbnode_pkey ON public.db_dbnode USING btree + (id) + db_dbnode_process_type_df7298d0: CREATE INDEX db_dbnode_process_type_df7298d0 + ON public.db_dbnode USING btree (process_type) + db_dbnode_process_type_df7298d0_like: CREATE INDEX db_dbnode_process_type_df7298d0_like + ON public.db_dbnode USING btree (process_type varchar_pattern_ops) + db_dbnode_type_a8ce9753: CREATE INDEX db_dbnode_type_a8ce9753 ON public.db_dbnode + USING btree (node_type) + db_dbnode_type_a8ce9753_like: CREATE INDEX db_dbnode_type_a8ce9753_like ON public.db_dbnode + USING btree (node_type varchar_pattern_ops) + db_dbnode_user_id_12e7aeaf: CREATE INDEX db_dbnode_user_id_12e7aeaf ON public.db_dbnode + USING btree (user_id) + db_dbnode_uuid_62e0bf98_uniq: CREATE UNIQUE INDEX db_dbnode_uuid_62e0bf98_uniq + ON public.db_dbnode USING btree (uuid) + db_dbsetting: + db_dbsetting_key_1b84beb4_like: CREATE INDEX db_dbsetting_key_1b84beb4_like ON + public.db_dbsetting USING btree (key varchar_pattern_ops) + db_dbsetting_key_1b84beb4_uniq: CREATE UNIQUE INDEX db_dbsetting_key_1b84beb4_uniq + ON public.db_dbsetting USING btree (key) + db_dbsetting_pkey: CREATE UNIQUE INDEX db_dbsetting_pkey ON public.db_dbsetting + USING btree (id) + db_dbuser: + db_dbuser_email_30150b7e_like: CREATE INDEX db_dbuser_email_30150b7e_like ON public.db_dbuser + USING btree (email varchar_pattern_ops) + db_dbuser_email_30150b7e_uniq: CREATE UNIQUE INDEX db_dbuser_email_30150b7e_uniq + ON public.db_dbuser USING btree (email) + db_dbuser_pkey: CREATE UNIQUE INDEX db_dbuser_pkey ON public.db_dbuser USING btree + (id) diff --git a/tests/backends/aiida_sqlalchemy/migrations/test_all_schema/test_django_django_0038_.yml b/tests/backends/aiida_sqlalchemy/migrations/test_all_schema/test_django_django_0038_.yml new file mode 100644 index 0000000000..0dd8d3809a --- /dev/null +++ b/tests/backends/aiida_sqlalchemy/migrations/test_all_schema/test_django_django_0038_.yml @@ -0,0 +1,515 @@ +columns: + db_dbauthinfo: + aiidauser_id: + data_type: integer + default: null + is_nullable: false + auth_params: + data_type: jsonb + default: null + is_nullable: false + dbcomputer_id: + data_type: integer + default: null + is_nullable: false + enabled: + data_type: boolean + default: null + is_nullable: false + id: + data_type: integer + default: nextval('db_dbauthinfo_id_seq'::regclass) + is_nullable: false + metadata: + data_type: jsonb + default: null + is_nullable: false + db_dbcomment: + content: + data_type: text + default: null + is_nullable: false + ctime: + data_type: timestamp with time zone + default: null + is_nullable: false + dbnode_id: + data_type: integer + default: null + is_nullable: false + id: + data_type: integer + default: nextval('db_dbcomment_id_seq'::regclass) + is_nullable: false + mtime: + data_type: timestamp with time zone + default: null + is_nullable: false + user_id: + data_type: integer + default: null + is_nullable: false + uuid: + data_type: uuid + default: null + is_nullable: false + db_dbcomputer: + description: + data_type: text + default: null + is_nullable: false + hostname: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + id: + data_type: integer + default: nextval('db_dbcomputer_id_seq'::regclass) + is_nullable: false + metadata: + data_type: jsonb + default: null + is_nullable: false + name: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + scheduler_type: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + transport_type: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + uuid: + data_type: uuid + default: null + is_nullable: false + db_dbgroup: + description: + data_type: text + default: null + is_nullable: false + id: + data_type: integer + default: nextval('db_dbgroup_id_seq'::regclass) + is_nullable: false + label: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + time: + data_type: timestamp with time zone + default: null + is_nullable: false + type_string: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + user_id: + data_type: integer + default: null + is_nullable: false + uuid: + data_type: uuid + default: null + is_nullable: false + db_dbgroup_dbnodes: + dbgroup_id: + data_type: integer + default: null + is_nullable: false + dbnode_id: + data_type: integer + default: null + is_nullable: false + id: + data_type: integer + default: nextval('db_dbgroup_dbnodes_id_seq'::regclass) + is_nullable: false + db_dblink: + id: + data_type: integer + default: nextval('db_dblink_id_seq'::regclass) + is_nullable: false + input_id: + data_type: integer + default: null + is_nullable: false + label: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + output_id: + data_type: integer + default: null + is_nullable: false + type: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + db_dblog: + dbnode_id: + data_type: integer + default: null + is_nullable: false + id: + data_type: integer + default: nextval('db_dblog_id_seq'::regclass) + is_nullable: false + levelname: + data_type: character varying + default: null + is_nullable: false + max_length: 50 + loggername: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + message: + data_type: text + default: null + is_nullable: false + metadata: + data_type: jsonb + default: null + is_nullable: false + time: + data_type: timestamp with time zone + default: null + is_nullable: false + uuid: + data_type: uuid + default: null + is_nullable: false + db_dbnode: + attributes: + data_type: jsonb + default: null + is_nullable: true + ctime: + data_type: timestamp with time zone + default: null + is_nullable: false + dbcomputer_id: + data_type: integer + default: null + is_nullable: true + description: + data_type: text + default: null + is_nullable: false + extras: + data_type: jsonb + default: null + is_nullable: true + id: + data_type: integer + default: nextval('db_dbnode_id_seq'::regclass) + is_nullable: false + label: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + mtime: + data_type: timestamp with time zone + default: null + is_nullable: false + node_type: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + process_type: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + user_id: + data_type: integer + default: null + is_nullable: false + uuid: + data_type: uuid + default: null + is_nullable: false + db_dbsetting: + description: + data_type: text + default: null + is_nullable: false + id: + data_type: integer + default: nextval('db_dbsetting_id_seq'::regclass) + is_nullable: false + key: + data_type: character varying + default: null + is_nullable: false + max_length: 1024 + time: + data_type: timestamp with time zone + default: null + is_nullable: false + val: + data_type: jsonb + default: null + is_nullable: true + db_dbuser: + email: + data_type: character varying + default: null + is_nullable: false + max_length: 254 + first_name: + data_type: character varying + default: null + is_nullable: false + max_length: 254 + id: + data_type: integer + default: nextval('db_dbuser_id_seq'::regclass) + is_nullable: false + institution: + data_type: character varying + default: null + is_nullable: false + max_length: 254 + last_name: + data_type: character varying + default: null + is_nullable: false + max_length: 254 +constraints: + primary_key: + db_dbauthinfo: + db_dbauthinfo_pkey: + - id + db_dbcomment: + db_dbcomment_pkey: + - id + db_dbcomputer: + db_dbcomputer_pkey: + - id + db_dbgroup: + db_dbgroup_pkey: + - id + db_dbgroup_dbnodes: + db_dbgroup_dbnodes_pkey: + - id + db_dblink: + db_dblink_pkey: + - id + db_dblog: + db_dblog_pkey: + - id + db_dbnode: + db_dbnode_pkey: + - id + db_dbsetting: + db_dbsetting_pkey: + - id + db_dbuser: + db_dbuser_pkey: + - id + unique: + db_dbauthinfo: + db_dbauthinfo_aiidauser_id_dbcomputer_id_777cdaa8_uniq: + - aiidauser_id + - dbcomputer_id + db_dbcomment: + db_dbcomment_uuid_49bac08c_uniq: + - uuid + db_dbcomputer: + db_dbcomputer_name_key: + - name + db_dbcomputer_uuid_f35defa6_uniq: + - uuid + db_dbgroup: + db_dbgroup_name_type_12656f33_uniq: + - label + - type_string + db_dbgroup_uuid_af896177_uniq: + - uuid + db_dbgroup_dbnodes: + db_dbgroup_dbnodes_dbgroup_id_dbnode_id_eee23cce_uniq: + - dbgroup_id + - dbnode_id + db_dblog: + db_dblog_uuid_9cf77df3_uniq: + - uuid + db_dbnode: + db_dbnode_uuid_62e0bf98_uniq: + - uuid + db_dbsetting: + db_dbsetting_key_1b84beb4_uniq: + - key + db_dbuser: + db_dbuser_email_30150b7e_uniq: + - email +foreign_keys: + db_dbauthinfo: + db_dbauthinfo_aiidauser_id_0684fdfb_fk_db_dbuser_id: FOREIGN KEY (aiidauser_id) + REFERENCES db_dbuser(id) DEFERRABLE INITIALLY DEFERRED + db_dbauthinfo_dbcomputer_id_424f7ac4_fk_db_dbcomputer_id: FOREIGN KEY (dbcomputer_id) + REFERENCES db_dbcomputer(id) DEFERRABLE INITIALLY DEFERRED + db_dbcomment: + db_dbcomment_dbnode_id_3b812b6b_fk_db_dbnode_id: FOREIGN KEY (dbnode_id) REFERENCES + db_dbnode(id) DEFERRABLE INITIALLY DEFERRED + db_dbcomment_user_id_8ed5e360_fk_db_dbuser_id: FOREIGN KEY (user_id) REFERENCES + db_dbuser(id) DEFERRABLE INITIALLY DEFERRED + db_dbgroup: + db_dbgroup_user_id_100f8a51_fk_db_dbuser_id: FOREIGN KEY (user_id) REFERENCES + db_dbuser(id) DEFERRABLE INITIALLY DEFERRED + db_dbgroup_dbnodes: + db_dbgroup_dbnodes_dbgroup_id_9d3a0f9d_fk_db_dbgroup_id: FOREIGN KEY (dbgroup_id) + REFERENCES db_dbgroup(id) DEFERRABLE INITIALLY DEFERRED + db_dbgroup_dbnodes_dbnode_id_118b9439_fk_db_dbnode_id: FOREIGN KEY (dbnode_id) + REFERENCES db_dbnode(id) DEFERRABLE INITIALLY DEFERRED + db_dblink: + db_dblink_input_id_9245bd73_fk_db_dbnode_id: FOREIGN KEY (input_id) REFERENCES + db_dbnode(id) DEFERRABLE INITIALLY DEFERRED + db_dblink_output_id_c0167528_fk_db_dbnode_id: FOREIGN KEY (output_id) REFERENCES + db_dbnode(id) DEFERRABLE INITIALLY DEFERRED + db_dblog: + db_dblog_dbnode_id_da34b732_fk_db_dbnode_id: FOREIGN KEY (dbnode_id) REFERENCES + db_dbnode(id) DEFERRABLE INITIALLY DEFERRED + db_dbnode: + db_dbnode_dbcomputer_id_315372a3_fk_db_dbcomputer_id: FOREIGN KEY (dbcomputer_id) + REFERENCES db_dbcomputer(id) DEFERRABLE INITIALLY DEFERRED + db_dbnode_user_id_12e7aeaf_fk_db_dbuser_id: FOREIGN KEY (user_id) REFERENCES db_dbuser(id) + DEFERRABLE INITIALLY DEFERRED +indexes: + db_dbauthinfo: + db_dbauthinfo_aiidauser_id_0684fdfb: CREATE INDEX db_dbauthinfo_aiidauser_id_0684fdfb + ON public.db_dbauthinfo USING btree (aiidauser_id) + db_dbauthinfo_aiidauser_id_dbcomputer_id_777cdaa8_uniq: CREATE UNIQUE INDEX db_dbauthinfo_aiidauser_id_dbcomputer_id_777cdaa8_uniq + ON public.db_dbauthinfo USING btree (aiidauser_id, dbcomputer_id) + db_dbauthinfo_dbcomputer_id_424f7ac4: CREATE INDEX db_dbauthinfo_dbcomputer_id_424f7ac4 + ON public.db_dbauthinfo USING btree (dbcomputer_id) + db_dbauthinfo_pkey: CREATE UNIQUE INDEX db_dbauthinfo_pkey ON public.db_dbauthinfo + USING btree (id) + db_dbcomment: + db_dbcomment_dbnode_id_3b812b6b: CREATE INDEX db_dbcomment_dbnode_id_3b812b6b + ON public.db_dbcomment USING btree (dbnode_id) + db_dbcomment_pkey: CREATE UNIQUE INDEX db_dbcomment_pkey ON public.db_dbcomment + USING btree (id) + db_dbcomment_user_id_8ed5e360: CREATE INDEX db_dbcomment_user_id_8ed5e360 ON public.db_dbcomment + USING btree (user_id) + db_dbcomment_uuid_49bac08c_uniq: CREATE UNIQUE INDEX db_dbcomment_uuid_49bac08c_uniq + ON public.db_dbcomment USING btree (uuid) + db_dbcomputer: + db_dbcomputer_name_f1800b1a_like: CREATE INDEX db_dbcomputer_name_f1800b1a_like + ON public.db_dbcomputer USING btree (name varchar_pattern_ops) + db_dbcomputer_name_key: CREATE UNIQUE INDEX db_dbcomputer_name_key ON public.db_dbcomputer + USING btree (name) + db_dbcomputer_pkey: CREATE UNIQUE INDEX db_dbcomputer_pkey ON public.db_dbcomputer + USING btree (id) + db_dbcomputer_uuid_f35defa6_uniq: CREATE UNIQUE INDEX db_dbcomputer_uuid_f35defa6_uniq + ON public.db_dbcomputer USING btree (uuid) + db_dbgroup: + db_dbgroup_name_66c75272: CREATE INDEX db_dbgroup_name_66c75272 ON public.db_dbgroup + USING btree (label) + db_dbgroup_name_66c75272_like: CREATE INDEX db_dbgroup_name_66c75272_like ON public.db_dbgroup + USING btree (label varchar_pattern_ops) + db_dbgroup_name_type_12656f33_uniq: CREATE UNIQUE INDEX db_dbgroup_name_type_12656f33_uniq + ON public.db_dbgroup USING btree (label, type_string) + db_dbgroup_pkey: CREATE UNIQUE INDEX db_dbgroup_pkey ON public.db_dbgroup USING + btree (id) + db_dbgroup_type_23b2a748: CREATE INDEX db_dbgroup_type_23b2a748 ON public.db_dbgroup + USING btree (type_string) + db_dbgroup_type_23b2a748_like: CREATE INDEX db_dbgroup_type_23b2a748_like ON public.db_dbgroup + USING btree (type_string varchar_pattern_ops) + db_dbgroup_user_id_100f8a51: CREATE INDEX db_dbgroup_user_id_100f8a51 ON public.db_dbgroup + USING btree (user_id) + db_dbgroup_uuid_af896177_uniq: CREATE UNIQUE INDEX db_dbgroup_uuid_af896177_uniq + ON public.db_dbgroup USING btree (uuid) + db_dbgroup_dbnodes: + db_dbgroup_dbnodes_dbgroup_id_9d3a0f9d: CREATE INDEX db_dbgroup_dbnodes_dbgroup_id_9d3a0f9d + ON public.db_dbgroup_dbnodes USING btree (dbgroup_id) + db_dbgroup_dbnodes_dbgroup_id_dbnode_id_eee23cce_uniq: CREATE UNIQUE INDEX db_dbgroup_dbnodes_dbgroup_id_dbnode_id_eee23cce_uniq + ON public.db_dbgroup_dbnodes USING btree (dbgroup_id, dbnode_id) + db_dbgroup_dbnodes_dbnode_id_118b9439: CREATE INDEX db_dbgroup_dbnodes_dbnode_id_118b9439 + ON public.db_dbgroup_dbnodes USING btree (dbnode_id) + db_dbgroup_dbnodes_pkey: CREATE UNIQUE INDEX db_dbgroup_dbnodes_pkey ON public.db_dbgroup_dbnodes + USING btree (id) + db_dblink: + db_dblink_input_id_9245bd73: CREATE INDEX db_dblink_input_id_9245bd73 ON public.db_dblink + USING btree (input_id) + db_dblink_label_f1343cfb: CREATE INDEX db_dblink_label_f1343cfb ON public.db_dblink + USING btree (label) + db_dblink_label_f1343cfb_like: CREATE INDEX db_dblink_label_f1343cfb_like ON public.db_dblink + USING btree (label varchar_pattern_ops) + db_dblink_output_id_c0167528: CREATE INDEX db_dblink_output_id_c0167528 ON public.db_dblink + USING btree (output_id) + db_dblink_pkey: CREATE UNIQUE INDEX db_dblink_pkey ON public.db_dblink USING btree + (id) + db_dblink_type_229f212b: CREATE INDEX db_dblink_type_229f212b ON public.db_dblink + USING btree (type) + db_dblink_type_229f212b_like: CREATE INDEX db_dblink_type_229f212b_like ON public.db_dblink + USING btree (type varchar_pattern_ops) + db_dblog: + db_dblog_dbnode_id_da34b732: CREATE INDEX db_dblog_dbnode_id_da34b732 ON public.db_dblog + USING btree (dbnode_id) + db_dblog_levelname_ad5dc346: CREATE INDEX db_dblog_levelname_ad5dc346 ON public.db_dblog + USING btree (levelname) + db_dblog_levelname_ad5dc346_like: CREATE INDEX db_dblog_levelname_ad5dc346_like + ON public.db_dblog USING btree (levelname varchar_pattern_ops) + db_dblog_loggername_00b5ba16: CREATE INDEX db_dblog_loggername_00b5ba16 ON public.db_dblog + USING btree (loggername) + db_dblog_loggername_00b5ba16_like: CREATE INDEX db_dblog_loggername_00b5ba16_like + ON public.db_dblog USING btree (loggername varchar_pattern_ops) + db_dblog_pkey: CREATE UNIQUE INDEX db_dblog_pkey ON public.db_dblog USING btree + (id) + db_dblog_uuid_9cf77df3_uniq: CREATE UNIQUE INDEX db_dblog_uuid_9cf77df3_uniq ON + public.db_dblog USING btree (uuid) + db_dbnode: + db_dbnode_ctime_71626ef5: CREATE INDEX db_dbnode_ctime_71626ef5 ON public.db_dbnode + USING btree (ctime) + db_dbnode_dbcomputer_id_315372a3: CREATE INDEX db_dbnode_dbcomputer_id_315372a3 + ON public.db_dbnode USING btree (dbcomputer_id) + db_dbnode_label_6469539e: CREATE INDEX db_dbnode_label_6469539e ON public.db_dbnode + USING btree (label) + db_dbnode_label_6469539e_like: CREATE INDEX db_dbnode_label_6469539e_like ON public.db_dbnode + USING btree (label varchar_pattern_ops) + db_dbnode_mtime_0554ea3d: CREATE INDEX db_dbnode_mtime_0554ea3d ON public.db_dbnode + USING btree (mtime) + db_dbnode_pkey: CREATE UNIQUE INDEX db_dbnode_pkey ON public.db_dbnode USING btree + (id) + db_dbnode_process_type_df7298d0: CREATE INDEX db_dbnode_process_type_df7298d0 + ON public.db_dbnode USING btree (process_type) + db_dbnode_process_type_df7298d0_like: CREATE INDEX db_dbnode_process_type_df7298d0_like + ON public.db_dbnode USING btree (process_type varchar_pattern_ops) + db_dbnode_type_a8ce9753: CREATE INDEX db_dbnode_type_a8ce9753 ON public.db_dbnode + USING btree (node_type) + db_dbnode_type_a8ce9753_like: CREATE INDEX db_dbnode_type_a8ce9753_like ON public.db_dbnode + USING btree (node_type varchar_pattern_ops) + db_dbnode_user_id_12e7aeaf: CREATE INDEX db_dbnode_user_id_12e7aeaf ON public.db_dbnode + USING btree (user_id) + db_dbnode_uuid_62e0bf98_uniq: CREATE UNIQUE INDEX db_dbnode_uuid_62e0bf98_uniq + ON public.db_dbnode USING btree (uuid) + db_dbsetting: + db_dbsetting_key_1b84beb4_like: CREATE INDEX db_dbsetting_key_1b84beb4_like ON + public.db_dbsetting USING btree (key varchar_pattern_ops) + db_dbsetting_key_1b84beb4_uniq: CREATE UNIQUE INDEX db_dbsetting_key_1b84beb4_uniq + ON public.db_dbsetting USING btree (key) + db_dbsetting_pkey: CREATE UNIQUE INDEX db_dbsetting_pkey ON public.db_dbsetting + USING btree (id) + db_dbuser: + db_dbuser_email_30150b7e_like: CREATE INDEX db_dbuser_email_30150b7e_like ON public.db_dbuser + USING btree (email varchar_pattern_ops) + db_dbuser_email_30150b7e_uniq: CREATE UNIQUE INDEX db_dbuser_email_30150b7e_uniq + ON public.db_dbuser USING btree (email) + db_dbuser_pkey: CREATE UNIQUE INDEX db_dbuser_pkey ON public.db_dbuser USING btree + (id) diff --git a/tests/backends/aiida_sqlalchemy/migrations/test_all_schema/test_django_django_0039_.yml b/tests/backends/aiida_sqlalchemy/migrations/test_all_schema/test_django_django_0039_.yml new file mode 100644 index 0000000000..0dd8d3809a --- /dev/null +++ b/tests/backends/aiida_sqlalchemy/migrations/test_all_schema/test_django_django_0039_.yml @@ -0,0 +1,515 @@ +columns: + db_dbauthinfo: + aiidauser_id: + data_type: integer + default: null + is_nullable: false + auth_params: + data_type: jsonb + default: null + is_nullable: false + dbcomputer_id: + data_type: integer + default: null + is_nullable: false + enabled: + data_type: boolean + default: null + is_nullable: false + id: + data_type: integer + default: nextval('db_dbauthinfo_id_seq'::regclass) + is_nullable: false + metadata: + data_type: jsonb + default: null + is_nullable: false + db_dbcomment: + content: + data_type: text + default: null + is_nullable: false + ctime: + data_type: timestamp with time zone + default: null + is_nullable: false + dbnode_id: + data_type: integer + default: null + is_nullable: false + id: + data_type: integer + default: nextval('db_dbcomment_id_seq'::regclass) + is_nullable: false + mtime: + data_type: timestamp with time zone + default: null + is_nullable: false + user_id: + data_type: integer + default: null + is_nullable: false + uuid: + data_type: uuid + default: null + is_nullable: false + db_dbcomputer: + description: + data_type: text + default: null + is_nullable: false + hostname: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + id: + data_type: integer + default: nextval('db_dbcomputer_id_seq'::regclass) + is_nullable: false + metadata: + data_type: jsonb + default: null + is_nullable: false + name: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + scheduler_type: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + transport_type: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + uuid: + data_type: uuid + default: null + is_nullable: false + db_dbgroup: + description: + data_type: text + default: null + is_nullable: false + id: + data_type: integer + default: nextval('db_dbgroup_id_seq'::regclass) + is_nullable: false + label: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + time: + data_type: timestamp with time zone + default: null + is_nullable: false + type_string: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + user_id: + data_type: integer + default: null + is_nullable: false + uuid: + data_type: uuid + default: null + is_nullable: false + db_dbgroup_dbnodes: + dbgroup_id: + data_type: integer + default: null + is_nullable: false + dbnode_id: + data_type: integer + default: null + is_nullable: false + id: + data_type: integer + default: nextval('db_dbgroup_dbnodes_id_seq'::regclass) + is_nullable: false + db_dblink: + id: + data_type: integer + default: nextval('db_dblink_id_seq'::regclass) + is_nullable: false + input_id: + data_type: integer + default: null + is_nullable: false + label: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + output_id: + data_type: integer + default: null + is_nullable: false + type: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + db_dblog: + dbnode_id: + data_type: integer + default: null + is_nullable: false + id: + data_type: integer + default: nextval('db_dblog_id_seq'::regclass) + is_nullable: false + levelname: + data_type: character varying + default: null + is_nullable: false + max_length: 50 + loggername: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + message: + data_type: text + default: null + is_nullable: false + metadata: + data_type: jsonb + default: null + is_nullable: false + time: + data_type: timestamp with time zone + default: null + is_nullable: false + uuid: + data_type: uuid + default: null + is_nullable: false + db_dbnode: + attributes: + data_type: jsonb + default: null + is_nullable: true + ctime: + data_type: timestamp with time zone + default: null + is_nullable: false + dbcomputer_id: + data_type: integer + default: null + is_nullable: true + description: + data_type: text + default: null + is_nullable: false + extras: + data_type: jsonb + default: null + is_nullable: true + id: + data_type: integer + default: nextval('db_dbnode_id_seq'::regclass) + is_nullable: false + label: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + mtime: + data_type: timestamp with time zone + default: null + is_nullable: false + node_type: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + process_type: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + user_id: + data_type: integer + default: null + is_nullable: false + uuid: + data_type: uuid + default: null + is_nullable: false + db_dbsetting: + description: + data_type: text + default: null + is_nullable: false + id: + data_type: integer + default: nextval('db_dbsetting_id_seq'::regclass) + is_nullable: false + key: + data_type: character varying + default: null + is_nullable: false + max_length: 1024 + time: + data_type: timestamp with time zone + default: null + is_nullable: false + val: + data_type: jsonb + default: null + is_nullable: true + db_dbuser: + email: + data_type: character varying + default: null + is_nullable: false + max_length: 254 + first_name: + data_type: character varying + default: null + is_nullable: false + max_length: 254 + id: + data_type: integer + default: nextval('db_dbuser_id_seq'::regclass) + is_nullable: false + institution: + data_type: character varying + default: null + is_nullable: false + max_length: 254 + last_name: + data_type: character varying + default: null + is_nullable: false + max_length: 254 +constraints: + primary_key: + db_dbauthinfo: + db_dbauthinfo_pkey: + - id + db_dbcomment: + db_dbcomment_pkey: + - id + db_dbcomputer: + db_dbcomputer_pkey: + - id + db_dbgroup: + db_dbgroup_pkey: + - id + db_dbgroup_dbnodes: + db_dbgroup_dbnodes_pkey: + - id + db_dblink: + db_dblink_pkey: + - id + db_dblog: + db_dblog_pkey: + - id + db_dbnode: + db_dbnode_pkey: + - id + db_dbsetting: + db_dbsetting_pkey: + - id + db_dbuser: + db_dbuser_pkey: + - id + unique: + db_dbauthinfo: + db_dbauthinfo_aiidauser_id_dbcomputer_id_777cdaa8_uniq: + - aiidauser_id + - dbcomputer_id + db_dbcomment: + db_dbcomment_uuid_49bac08c_uniq: + - uuid + db_dbcomputer: + db_dbcomputer_name_key: + - name + db_dbcomputer_uuid_f35defa6_uniq: + - uuid + db_dbgroup: + db_dbgroup_name_type_12656f33_uniq: + - label + - type_string + db_dbgroup_uuid_af896177_uniq: + - uuid + db_dbgroup_dbnodes: + db_dbgroup_dbnodes_dbgroup_id_dbnode_id_eee23cce_uniq: + - dbgroup_id + - dbnode_id + db_dblog: + db_dblog_uuid_9cf77df3_uniq: + - uuid + db_dbnode: + db_dbnode_uuid_62e0bf98_uniq: + - uuid + db_dbsetting: + db_dbsetting_key_1b84beb4_uniq: + - key + db_dbuser: + db_dbuser_email_30150b7e_uniq: + - email +foreign_keys: + db_dbauthinfo: + db_dbauthinfo_aiidauser_id_0684fdfb_fk_db_dbuser_id: FOREIGN KEY (aiidauser_id) + REFERENCES db_dbuser(id) DEFERRABLE INITIALLY DEFERRED + db_dbauthinfo_dbcomputer_id_424f7ac4_fk_db_dbcomputer_id: FOREIGN KEY (dbcomputer_id) + REFERENCES db_dbcomputer(id) DEFERRABLE INITIALLY DEFERRED + db_dbcomment: + db_dbcomment_dbnode_id_3b812b6b_fk_db_dbnode_id: FOREIGN KEY (dbnode_id) REFERENCES + db_dbnode(id) DEFERRABLE INITIALLY DEFERRED + db_dbcomment_user_id_8ed5e360_fk_db_dbuser_id: FOREIGN KEY (user_id) REFERENCES + db_dbuser(id) DEFERRABLE INITIALLY DEFERRED + db_dbgroup: + db_dbgroup_user_id_100f8a51_fk_db_dbuser_id: FOREIGN KEY (user_id) REFERENCES + db_dbuser(id) DEFERRABLE INITIALLY DEFERRED + db_dbgroup_dbnodes: + db_dbgroup_dbnodes_dbgroup_id_9d3a0f9d_fk_db_dbgroup_id: FOREIGN KEY (dbgroup_id) + REFERENCES db_dbgroup(id) DEFERRABLE INITIALLY DEFERRED + db_dbgroup_dbnodes_dbnode_id_118b9439_fk_db_dbnode_id: FOREIGN KEY (dbnode_id) + REFERENCES db_dbnode(id) DEFERRABLE INITIALLY DEFERRED + db_dblink: + db_dblink_input_id_9245bd73_fk_db_dbnode_id: FOREIGN KEY (input_id) REFERENCES + db_dbnode(id) DEFERRABLE INITIALLY DEFERRED + db_dblink_output_id_c0167528_fk_db_dbnode_id: FOREIGN KEY (output_id) REFERENCES + db_dbnode(id) DEFERRABLE INITIALLY DEFERRED + db_dblog: + db_dblog_dbnode_id_da34b732_fk_db_dbnode_id: FOREIGN KEY (dbnode_id) REFERENCES + db_dbnode(id) DEFERRABLE INITIALLY DEFERRED + db_dbnode: + db_dbnode_dbcomputer_id_315372a3_fk_db_dbcomputer_id: FOREIGN KEY (dbcomputer_id) + REFERENCES db_dbcomputer(id) DEFERRABLE INITIALLY DEFERRED + db_dbnode_user_id_12e7aeaf_fk_db_dbuser_id: FOREIGN KEY (user_id) REFERENCES db_dbuser(id) + DEFERRABLE INITIALLY DEFERRED +indexes: + db_dbauthinfo: + db_dbauthinfo_aiidauser_id_0684fdfb: CREATE INDEX db_dbauthinfo_aiidauser_id_0684fdfb + ON public.db_dbauthinfo USING btree (aiidauser_id) + db_dbauthinfo_aiidauser_id_dbcomputer_id_777cdaa8_uniq: CREATE UNIQUE INDEX db_dbauthinfo_aiidauser_id_dbcomputer_id_777cdaa8_uniq + ON public.db_dbauthinfo USING btree (aiidauser_id, dbcomputer_id) + db_dbauthinfo_dbcomputer_id_424f7ac4: CREATE INDEX db_dbauthinfo_dbcomputer_id_424f7ac4 + ON public.db_dbauthinfo USING btree (dbcomputer_id) + db_dbauthinfo_pkey: CREATE UNIQUE INDEX db_dbauthinfo_pkey ON public.db_dbauthinfo + USING btree (id) + db_dbcomment: + db_dbcomment_dbnode_id_3b812b6b: CREATE INDEX db_dbcomment_dbnode_id_3b812b6b + ON public.db_dbcomment USING btree (dbnode_id) + db_dbcomment_pkey: CREATE UNIQUE INDEX db_dbcomment_pkey ON public.db_dbcomment + USING btree (id) + db_dbcomment_user_id_8ed5e360: CREATE INDEX db_dbcomment_user_id_8ed5e360 ON public.db_dbcomment + USING btree (user_id) + db_dbcomment_uuid_49bac08c_uniq: CREATE UNIQUE INDEX db_dbcomment_uuid_49bac08c_uniq + ON public.db_dbcomment USING btree (uuid) + db_dbcomputer: + db_dbcomputer_name_f1800b1a_like: CREATE INDEX db_dbcomputer_name_f1800b1a_like + ON public.db_dbcomputer USING btree (name varchar_pattern_ops) + db_dbcomputer_name_key: CREATE UNIQUE INDEX db_dbcomputer_name_key ON public.db_dbcomputer + USING btree (name) + db_dbcomputer_pkey: CREATE UNIQUE INDEX db_dbcomputer_pkey ON public.db_dbcomputer + USING btree (id) + db_dbcomputer_uuid_f35defa6_uniq: CREATE UNIQUE INDEX db_dbcomputer_uuid_f35defa6_uniq + ON public.db_dbcomputer USING btree (uuid) + db_dbgroup: + db_dbgroup_name_66c75272: CREATE INDEX db_dbgroup_name_66c75272 ON public.db_dbgroup + USING btree (label) + db_dbgroup_name_66c75272_like: CREATE INDEX db_dbgroup_name_66c75272_like ON public.db_dbgroup + USING btree (label varchar_pattern_ops) + db_dbgroup_name_type_12656f33_uniq: CREATE UNIQUE INDEX db_dbgroup_name_type_12656f33_uniq + ON public.db_dbgroup USING btree (label, type_string) + db_dbgroup_pkey: CREATE UNIQUE INDEX db_dbgroup_pkey ON public.db_dbgroup USING + btree (id) + db_dbgroup_type_23b2a748: CREATE INDEX db_dbgroup_type_23b2a748 ON public.db_dbgroup + USING btree (type_string) + db_dbgroup_type_23b2a748_like: CREATE INDEX db_dbgroup_type_23b2a748_like ON public.db_dbgroup + USING btree (type_string varchar_pattern_ops) + db_dbgroup_user_id_100f8a51: CREATE INDEX db_dbgroup_user_id_100f8a51 ON public.db_dbgroup + USING btree (user_id) + db_dbgroup_uuid_af896177_uniq: CREATE UNIQUE INDEX db_dbgroup_uuid_af896177_uniq + ON public.db_dbgroup USING btree (uuid) + db_dbgroup_dbnodes: + db_dbgroup_dbnodes_dbgroup_id_9d3a0f9d: CREATE INDEX db_dbgroup_dbnodes_dbgroup_id_9d3a0f9d + ON public.db_dbgroup_dbnodes USING btree (dbgroup_id) + db_dbgroup_dbnodes_dbgroup_id_dbnode_id_eee23cce_uniq: CREATE UNIQUE INDEX db_dbgroup_dbnodes_dbgroup_id_dbnode_id_eee23cce_uniq + ON public.db_dbgroup_dbnodes USING btree (dbgroup_id, dbnode_id) + db_dbgroup_dbnodes_dbnode_id_118b9439: CREATE INDEX db_dbgroup_dbnodes_dbnode_id_118b9439 + ON public.db_dbgroup_dbnodes USING btree (dbnode_id) + db_dbgroup_dbnodes_pkey: CREATE UNIQUE INDEX db_dbgroup_dbnodes_pkey ON public.db_dbgroup_dbnodes + USING btree (id) + db_dblink: + db_dblink_input_id_9245bd73: CREATE INDEX db_dblink_input_id_9245bd73 ON public.db_dblink + USING btree (input_id) + db_dblink_label_f1343cfb: CREATE INDEX db_dblink_label_f1343cfb ON public.db_dblink + USING btree (label) + db_dblink_label_f1343cfb_like: CREATE INDEX db_dblink_label_f1343cfb_like ON public.db_dblink + USING btree (label varchar_pattern_ops) + db_dblink_output_id_c0167528: CREATE INDEX db_dblink_output_id_c0167528 ON public.db_dblink + USING btree (output_id) + db_dblink_pkey: CREATE UNIQUE INDEX db_dblink_pkey ON public.db_dblink USING btree + (id) + db_dblink_type_229f212b: CREATE INDEX db_dblink_type_229f212b ON public.db_dblink + USING btree (type) + db_dblink_type_229f212b_like: CREATE INDEX db_dblink_type_229f212b_like ON public.db_dblink + USING btree (type varchar_pattern_ops) + db_dblog: + db_dblog_dbnode_id_da34b732: CREATE INDEX db_dblog_dbnode_id_da34b732 ON public.db_dblog + USING btree (dbnode_id) + db_dblog_levelname_ad5dc346: CREATE INDEX db_dblog_levelname_ad5dc346 ON public.db_dblog + USING btree (levelname) + db_dblog_levelname_ad5dc346_like: CREATE INDEX db_dblog_levelname_ad5dc346_like + ON public.db_dblog USING btree (levelname varchar_pattern_ops) + db_dblog_loggername_00b5ba16: CREATE INDEX db_dblog_loggername_00b5ba16 ON public.db_dblog + USING btree (loggername) + db_dblog_loggername_00b5ba16_like: CREATE INDEX db_dblog_loggername_00b5ba16_like + ON public.db_dblog USING btree (loggername varchar_pattern_ops) + db_dblog_pkey: CREATE UNIQUE INDEX db_dblog_pkey ON public.db_dblog USING btree + (id) + db_dblog_uuid_9cf77df3_uniq: CREATE UNIQUE INDEX db_dblog_uuid_9cf77df3_uniq ON + public.db_dblog USING btree (uuid) + db_dbnode: + db_dbnode_ctime_71626ef5: CREATE INDEX db_dbnode_ctime_71626ef5 ON public.db_dbnode + USING btree (ctime) + db_dbnode_dbcomputer_id_315372a3: CREATE INDEX db_dbnode_dbcomputer_id_315372a3 + ON public.db_dbnode USING btree (dbcomputer_id) + db_dbnode_label_6469539e: CREATE INDEX db_dbnode_label_6469539e ON public.db_dbnode + USING btree (label) + db_dbnode_label_6469539e_like: CREATE INDEX db_dbnode_label_6469539e_like ON public.db_dbnode + USING btree (label varchar_pattern_ops) + db_dbnode_mtime_0554ea3d: CREATE INDEX db_dbnode_mtime_0554ea3d ON public.db_dbnode + USING btree (mtime) + db_dbnode_pkey: CREATE UNIQUE INDEX db_dbnode_pkey ON public.db_dbnode USING btree + (id) + db_dbnode_process_type_df7298d0: CREATE INDEX db_dbnode_process_type_df7298d0 + ON public.db_dbnode USING btree (process_type) + db_dbnode_process_type_df7298d0_like: CREATE INDEX db_dbnode_process_type_df7298d0_like + ON public.db_dbnode USING btree (process_type varchar_pattern_ops) + db_dbnode_type_a8ce9753: CREATE INDEX db_dbnode_type_a8ce9753 ON public.db_dbnode + USING btree (node_type) + db_dbnode_type_a8ce9753_like: CREATE INDEX db_dbnode_type_a8ce9753_like ON public.db_dbnode + USING btree (node_type varchar_pattern_ops) + db_dbnode_user_id_12e7aeaf: CREATE INDEX db_dbnode_user_id_12e7aeaf ON public.db_dbnode + USING btree (user_id) + db_dbnode_uuid_62e0bf98_uniq: CREATE UNIQUE INDEX db_dbnode_uuid_62e0bf98_uniq + ON public.db_dbnode USING btree (uuid) + db_dbsetting: + db_dbsetting_key_1b84beb4_like: CREATE INDEX db_dbsetting_key_1b84beb4_like ON + public.db_dbsetting USING btree (key varchar_pattern_ops) + db_dbsetting_key_1b84beb4_uniq: CREATE UNIQUE INDEX db_dbsetting_key_1b84beb4_uniq + ON public.db_dbsetting USING btree (key) + db_dbsetting_pkey: CREATE UNIQUE INDEX db_dbsetting_pkey ON public.db_dbsetting + USING btree (id) + db_dbuser: + db_dbuser_email_30150b7e_like: CREATE INDEX db_dbuser_email_30150b7e_like ON public.db_dbuser + USING btree (email varchar_pattern_ops) + db_dbuser_email_30150b7e_uniq: CREATE UNIQUE INDEX db_dbuser_email_30150b7e_uniq + ON public.db_dbuser USING btree (email) + db_dbuser_pkey: CREATE UNIQUE INDEX db_dbuser_pkey ON public.db_dbuser USING btree + (id) diff --git a/tests/backends/aiida_sqlalchemy/migrations/test_all_schema/test_django_django_0040_.yml b/tests/backends/aiida_sqlalchemy/migrations/test_all_schema/test_django_django_0040_.yml new file mode 100644 index 0000000000..0dd8d3809a --- /dev/null +++ b/tests/backends/aiida_sqlalchemy/migrations/test_all_schema/test_django_django_0040_.yml @@ -0,0 +1,515 @@ +columns: + db_dbauthinfo: + aiidauser_id: + data_type: integer + default: null + is_nullable: false + auth_params: + data_type: jsonb + default: null + is_nullable: false + dbcomputer_id: + data_type: integer + default: null + is_nullable: false + enabled: + data_type: boolean + default: null + is_nullable: false + id: + data_type: integer + default: nextval('db_dbauthinfo_id_seq'::regclass) + is_nullable: false + metadata: + data_type: jsonb + default: null + is_nullable: false + db_dbcomment: + content: + data_type: text + default: null + is_nullable: false + ctime: + data_type: timestamp with time zone + default: null + is_nullable: false + dbnode_id: + data_type: integer + default: null + is_nullable: false + id: + data_type: integer + default: nextval('db_dbcomment_id_seq'::regclass) + is_nullable: false + mtime: + data_type: timestamp with time zone + default: null + is_nullable: false + user_id: + data_type: integer + default: null + is_nullable: false + uuid: + data_type: uuid + default: null + is_nullable: false + db_dbcomputer: + description: + data_type: text + default: null + is_nullable: false + hostname: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + id: + data_type: integer + default: nextval('db_dbcomputer_id_seq'::regclass) + is_nullable: false + metadata: + data_type: jsonb + default: null + is_nullable: false + name: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + scheduler_type: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + transport_type: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + uuid: + data_type: uuid + default: null + is_nullable: false + db_dbgroup: + description: + data_type: text + default: null + is_nullable: false + id: + data_type: integer + default: nextval('db_dbgroup_id_seq'::regclass) + is_nullable: false + label: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + time: + data_type: timestamp with time zone + default: null + is_nullable: false + type_string: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + user_id: + data_type: integer + default: null + is_nullable: false + uuid: + data_type: uuid + default: null + is_nullable: false + db_dbgroup_dbnodes: + dbgroup_id: + data_type: integer + default: null + is_nullable: false + dbnode_id: + data_type: integer + default: null + is_nullable: false + id: + data_type: integer + default: nextval('db_dbgroup_dbnodes_id_seq'::regclass) + is_nullable: false + db_dblink: + id: + data_type: integer + default: nextval('db_dblink_id_seq'::regclass) + is_nullable: false + input_id: + data_type: integer + default: null + is_nullable: false + label: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + output_id: + data_type: integer + default: null + is_nullable: false + type: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + db_dblog: + dbnode_id: + data_type: integer + default: null + is_nullable: false + id: + data_type: integer + default: nextval('db_dblog_id_seq'::regclass) + is_nullable: false + levelname: + data_type: character varying + default: null + is_nullable: false + max_length: 50 + loggername: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + message: + data_type: text + default: null + is_nullable: false + metadata: + data_type: jsonb + default: null + is_nullable: false + time: + data_type: timestamp with time zone + default: null + is_nullable: false + uuid: + data_type: uuid + default: null + is_nullable: false + db_dbnode: + attributes: + data_type: jsonb + default: null + is_nullable: true + ctime: + data_type: timestamp with time zone + default: null + is_nullable: false + dbcomputer_id: + data_type: integer + default: null + is_nullable: true + description: + data_type: text + default: null + is_nullable: false + extras: + data_type: jsonb + default: null + is_nullable: true + id: + data_type: integer + default: nextval('db_dbnode_id_seq'::regclass) + is_nullable: false + label: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + mtime: + data_type: timestamp with time zone + default: null + is_nullable: false + node_type: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + process_type: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + user_id: + data_type: integer + default: null + is_nullable: false + uuid: + data_type: uuid + default: null + is_nullable: false + db_dbsetting: + description: + data_type: text + default: null + is_nullable: false + id: + data_type: integer + default: nextval('db_dbsetting_id_seq'::regclass) + is_nullable: false + key: + data_type: character varying + default: null + is_nullable: false + max_length: 1024 + time: + data_type: timestamp with time zone + default: null + is_nullable: false + val: + data_type: jsonb + default: null + is_nullable: true + db_dbuser: + email: + data_type: character varying + default: null + is_nullable: false + max_length: 254 + first_name: + data_type: character varying + default: null + is_nullable: false + max_length: 254 + id: + data_type: integer + default: nextval('db_dbuser_id_seq'::regclass) + is_nullable: false + institution: + data_type: character varying + default: null + is_nullable: false + max_length: 254 + last_name: + data_type: character varying + default: null + is_nullable: false + max_length: 254 +constraints: + primary_key: + db_dbauthinfo: + db_dbauthinfo_pkey: + - id + db_dbcomment: + db_dbcomment_pkey: + - id + db_dbcomputer: + db_dbcomputer_pkey: + - id + db_dbgroup: + db_dbgroup_pkey: + - id + db_dbgroup_dbnodes: + db_dbgroup_dbnodes_pkey: + - id + db_dblink: + db_dblink_pkey: + - id + db_dblog: + db_dblog_pkey: + - id + db_dbnode: + db_dbnode_pkey: + - id + db_dbsetting: + db_dbsetting_pkey: + - id + db_dbuser: + db_dbuser_pkey: + - id + unique: + db_dbauthinfo: + db_dbauthinfo_aiidauser_id_dbcomputer_id_777cdaa8_uniq: + - aiidauser_id + - dbcomputer_id + db_dbcomment: + db_dbcomment_uuid_49bac08c_uniq: + - uuid + db_dbcomputer: + db_dbcomputer_name_key: + - name + db_dbcomputer_uuid_f35defa6_uniq: + - uuid + db_dbgroup: + db_dbgroup_name_type_12656f33_uniq: + - label + - type_string + db_dbgroup_uuid_af896177_uniq: + - uuid + db_dbgroup_dbnodes: + db_dbgroup_dbnodes_dbgroup_id_dbnode_id_eee23cce_uniq: + - dbgroup_id + - dbnode_id + db_dblog: + db_dblog_uuid_9cf77df3_uniq: + - uuid + db_dbnode: + db_dbnode_uuid_62e0bf98_uniq: + - uuid + db_dbsetting: + db_dbsetting_key_1b84beb4_uniq: + - key + db_dbuser: + db_dbuser_email_30150b7e_uniq: + - email +foreign_keys: + db_dbauthinfo: + db_dbauthinfo_aiidauser_id_0684fdfb_fk_db_dbuser_id: FOREIGN KEY (aiidauser_id) + REFERENCES db_dbuser(id) DEFERRABLE INITIALLY DEFERRED + db_dbauthinfo_dbcomputer_id_424f7ac4_fk_db_dbcomputer_id: FOREIGN KEY (dbcomputer_id) + REFERENCES db_dbcomputer(id) DEFERRABLE INITIALLY DEFERRED + db_dbcomment: + db_dbcomment_dbnode_id_3b812b6b_fk_db_dbnode_id: FOREIGN KEY (dbnode_id) REFERENCES + db_dbnode(id) DEFERRABLE INITIALLY DEFERRED + db_dbcomment_user_id_8ed5e360_fk_db_dbuser_id: FOREIGN KEY (user_id) REFERENCES + db_dbuser(id) DEFERRABLE INITIALLY DEFERRED + db_dbgroup: + db_dbgroup_user_id_100f8a51_fk_db_dbuser_id: FOREIGN KEY (user_id) REFERENCES + db_dbuser(id) DEFERRABLE INITIALLY DEFERRED + db_dbgroup_dbnodes: + db_dbgroup_dbnodes_dbgroup_id_9d3a0f9d_fk_db_dbgroup_id: FOREIGN KEY (dbgroup_id) + REFERENCES db_dbgroup(id) DEFERRABLE INITIALLY DEFERRED + db_dbgroup_dbnodes_dbnode_id_118b9439_fk_db_dbnode_id: FOREIGN KEY (dbnode_id) + REFERENCES db_dbnode(id) DEFERRABLE INITIALLY DEFERRED + db_dblink: + db_dblink_input_id_9245bd73_fk_db_dbnode_id: FOREIGN KEY (input_id) REFERENCES + db_dbnode(id) DEFERRABLE INITIALLY DEFERRED + db_dblink_output_id_c0167528_fk_db_dbnode_id: FOREIGN KEY (output_id) REFERENCES + db_dbnode(id) DEFERRABLE INITIALLY DEFERRED + db_dblog: + db_dblog_dbnode_id_da34b732_fk_db_dbnode_id: FOREIGN KEY (dbnode_id) REFERENCES + db_dbnode(id) DEFERRABLE INITIALLY DEFERRED + db_dbnode: + db_dbnode_dbcomputer_id_315372a3_fk_db_dbcomputer_id: FOREIGN KEY (dbcomputer_id) + REFERENCES db_dbcomputer(id) DEFERRABLE INITIALLY DEFERRED + db_dbnode_user_id_12e7aeaf_fk_db_dbuser_id: FOREIGN KEY (user_id) REFERENCES db_dbuser(id) + DEFERRABLE INITIALLY DEFERRED +indexes: + db_dbauthinfo: + db_dbauthinfo_aiidauser_id_0684fdfb: CREATE INDEX db_dbauthinfo_aiidauser_id_0684fdfb + ON public.db_dbauthinfo USING btree (aiidauser_id) + db_dbauthinfo_aiidauser_id_dbcomputer_id_777cdaa8_uniq: CREATE UNIQUE INDEX db_dbauthinfo_aiidauser_id_dbcomputer_id_777cdaa8_uniq + ON public.db_dbauthinfo USING btree (aiidauser_id, dbcomputer_id) + db_dbauthinfo_dbcomputer_id_424f7ac4: CREATE INDEX db_dbauthinfo_dbcomputer_id_424f7ac4 + ON public.db_dbauthinfo USING btree (dbcomputer_id) + db_dbauthinfo_pkey: CREATE UNIQUE INDEX db_dbauthinfo_pkey ON public.db_dbauthinfo + USING btree (id) + db_dbcomment: + db_dbcomment_dbnode_id_3b812b6b: CREATE INDEX db_dbcomment_dbnode_id_3b812b6b + ON public.db_dbcomment USING btree (dbnode_id) + db_dbcomment_pkey: CREATE UNIQUE INDEX db_dbcomment_pkey ON public.db_dbcomment + USING btree (id) + db_dbcomment_user_id_8ed5e360: CREATE INDEX db_dbcomment_user_id_8ed5e360 ON public.db_dbcomment + USING btree (user_id) + db_dbcomment_uuid_49bac08c_uniq: CREATE UNIQUE INDEX db_dbcomment_uuid_49bac08c_uniq + ON public.db_dbcomment USING btree (uuid) + db_dbcomputer: + db_dbcomputer_name_f1800b1a_like: CREATE INDEX db_dbcomputer_name_f1800b1a_like + ON public.db_dbcomputer USING btree (name varchar_pattern_ops) + db_dbcomputer_name_key: CREATE UNIQUE INDEX db_dbcomputer_name_key ON public.db_dbcomputer + USING btree (name) + db_dbcomputer_pkey: CREATE UNIQUE INDEX db_dbcomputer_pkey ON public.db_dbcomputer + USING btree (id) + db_dbcomputer_uuid_f35defa6_uniq: CREATE UNIQUE INDEX db_dbcomputer_uuid_f35defa6_uniq + ON public.db_dbcomputer USING btree (uuid) + db_dbgroup: + db_dbgroup_name_66c75272: CREATE INDEX db_dbgroup_name_66c75272 ON public.db_dbgroup + USING btree (label) + db_dbgroup_name_66c75272_like: CREATE INDEX db_dbgroup_name_66c75272_like ON public.db_dbgroup + USING btree (label varchar_pattern_ops) + db_dbgroup_name_type_12656f33_uniq: CREATE UNIQUE INDEX db_dbgroup_name_type_12656f33_uniq + ON public.db_dbgroup USING btree (label, type_string) + db_dbgroup_pkey: CREATE UNIQUE INDEX db_dbgroup_pkey ON public.db_dbgroup USING + btree (id) + db_dbgroup_type_23b2a748: CREATE INDEX db_dbgroup_type_23b2a748 ON public.db_dbgroup + USING btree (type_string) + db_dbgroup_type_23b2a748_like: CREATE INDEX db_dbgroup_type_23b2a748_like ON public.db_dbgroup + USING btree (type_string varchar_pattern_ops) + db_dbgroup_user_id_100f8a51: CREATE INDEX db_dbgroup_user_id_100f8a51 ON public.db_dbgroup + USING btree (user_id) + db_dbgroup_uuid_af896177_uniq: CREATE UNIQUE INDEX db_dbgroup_uuid_af896177_uniq + ON public.db_dbgroup USING btree (uuid) + db_dbgroup_dbnodes: + db_dbgroup_dbnodes_dbgroup_id_9d3a0f9d: CREATE INDEX db_dbgroup_dbnodes_dbgroup_id_9d3a0f9d + ON public.db_dbgroup_dbnodes USING btree (dbgroup_id) + db_dbgroup_dbnodes_dbgroup_id_dbnode_id_eee23cce_uniq: CREATE UNIQUE INDEX db_dbgroup_dbnodes_dbgroup_id_dbnode_id_eee23cce_uniq + ON public.db_dbgroup_dbnodes USING btree (dbgroup_id, dbnode_id) + db_dbgroup_dbnodes_dbnode_id_118b9439: CREATE INDEX db_dbgroup_dbnodes_dbnode_id_118b9439 + ON public.db_dbgroup_dbnodes USING btree (dbnode_id) + db_dbgroup_dbnodes_pkey: CREATE UNIQUE INDEX db_dbgroup_dbnodes_pkey ON public.db_dbgroup_dbnodes + USING btree (id) + db_dblink: + db_dblink_input_id_9245bd73: CREATE INDEX db_dblink_input_id_9245bd73 ON public.db_dblink + USING btree (input_id) + db_dblink_label_f1343cfb: CREATE INDEX db_dblink_label_f1343cfb ON public.db_dblink + USING btree (label) + db_dblink_label_f1343cfb_like: CREATE INDEX db_dblink_label_f1343cfb_like ON public.db_dblink + USING btree (label varchar_pattern_ops) + db_dblink_output_id_c0167528: CREATE INDEX db_dblink_output_id_c0167528 ON public.db_dblink + USING btree (output_id) + db_dblink_pkey: CREATE UNIQUE INDEX db_dblink_pkey ON public.db_dblink USING btree + (id) + db_dblink_type_229f212b: CREATE INDEX db_dblink_type_229f212b ON public.db_dblink + USING btree (type) + db_dblink_type_229f212b_like: CREATE INDEX db_dblink_type_229f212b_like ON public.db_dblink + USING btree (type varchar_pattern_ops) + db_dblog: + db_dblog_dbnode_id_da34b732: CREATE INDEX db_dblog_dbnode_id_da34b732 ON public.db_dblog + USING btree (dbnode_id) + db_dblog_levelname_ad5dc346: CREATE INDEX db_dblog_levelname_ad5dc346 ON public.db_dblog + USING btree (levelname) + db_dblog_levelname_ad5dc346_like: CREATE INDEX db_dblog_levelname_ad5dc346_like + ON public.db_dblog USING btree (levelname varchar_pattern_ops) + db_dblog_loggername_00b5ba16: CREATE INDEX db_dblog_loggername_00b5ba16 ON public.db_dblog + USING btree (loggername) + db_dblog_loggername_00b5ba16_like: CREATE INDEX db_dblog_loggername_00b5ba16_like + ON public.db_dblog USING btree (loggername varchar_pattern_ops) + db_dblog_pkey: CREATE UNIQUE INDEX db_dblog_pkey ON public.db_dblog USING btree + (id) + db_dblog_uuid_9cf77df3_uniq: CREATE UNIQUE INDEX db_dblog_uuid_9cf77df3_uniq ON + public.db_dblog USING btree (uuid) + db_dbnode: + db_dbnode_ctime_71626ef5: CREATE INDEX db_dbnode_ctime_71626ef5 ON public.db_dbnode + USING btree (ctime) + db_dbnode_dbcomputer_id_315372a3: CREATE INDEX db_dbnode_dbcomputer_id_315372a3 + ON public.db_dbnode USING btree (dbcomputer_id) + db_dbnode_label_6469539e: CREATE INDEX db_dbnode_label_6469539e ON public.db_dbnode + USING btree (label) + db_dbnode_label_6469539e_like: CREATE INDEX db_dbnode_label_6469539e_like ON public.db_dbnode + USING btree (label varchar_pattern_ops) + db_dbnode_mtime_0554ea3d: CREATE INDEX db_dbnode_mtime_0554ea3d ON public.db_dbnode + USING btree (mtime) + db_dbnode_pkey: CREATE UNIQUE INDEX db_dbnode_pkey ON public.db_dbnode USING btree + (id) + db_dbnode_process_type_df7298d0: CREATE INDEX db_dbnode_process_type_df7298d0 + ON public.db_dbnode USING btree (process_type) + db_dbnode_process_type_df7298d0_like: CREATE INDEX db_dbnode_process_type_df7298d0_like + ON public.db_dbnode USING btree (process_type varchar_pattern_ops) + db_dbnode_type_a8ce9753: CREATE INDEX db_dbnode_type_a8ce9753 ON public.db_dbnode + USING btree (node_type) + db_dbnode_type_a8ce9753_like: CREATE INDEX db_dbnode_type_a8ce9753_like ON public.db_dbnode + USING btree (node_type varchar_pattern_ops) + db_dbnode_user_id_12e7aeaf: CREATE INDEX db_dbnode_user_id_12e7aeaf ON public.db_dbnode + USING btree (user_id) + db_dbnode_uuid_62e0bf98_uniq: CREATE UNIQUE INDEX db_dbnode_uuid_62e0bf98_uniq + ON public.db_dbnode USING btree (uuid) + db_dbsetting: + db_dbsetting_key_1b84beb4_like: CREATE INDEX db_dbsetting_key_1b84beb4_like ON + public.db_dbsetting USING btree (key varchar_pattern_ops) + db_dbsetting_key_1b84beb4_uniq: CREATE UNIQUE INDEX db_dbsetting_key_1b84beb4_uniq + ON public.db_dbsetting USING btree (key) + db_dbsetting_pkey: CREATE UNIQUE INDEX db_dbsetting_pkey ON public.db_dbsetting + USING btree (id) + db_dbuser: + db_dbuser_email_30150b7e_like: CREATE INDEX db_dbuser_email_30150b7e_like ON public.db_dbuser + USING btree (email varchar_pattern_ops) + db_dbuser_email_30150b7e_uniq: CREATE UNIQUE INDEX db_dbuser_email_30150b7e_uniq + ON public.db_dbuser USING btree (email) + db_dbuser_pkey: CREATE UNIQUE INDEX db_dbuser_pkey ON public.db_dbuser USING btree + (id) diff --git a/tests/backends/aiida_sqlalchemy/migrations/test_all_schema/test_django_django_0041_.yml b/tests/backends/aiida_sqlalchemy/migrations/test_all_schema/test_django_django_0041_.yml new file mode 100644 index 0000000000..0dd8d3809a --- /dev/null +++ b/tests/backends/aiida_sqlalchemy/migrations/test_all_schema/test_django_django_0041_.yml @@ -0,0 +1,515 @@ +columns: + db_dbauthinfo: + aiidauser_id: + data_type: integer + default: null + is_nullable: false + auth_params: + data_type: jsonb + default: null + is_nullable: false + dbcomputer_id: + data_type: integer + default: null + is_nullable: false + enabled: + data_type: boolean + default: null + is_nullable: false + id: + data_type: integer + default: nextval('db_dbauthinfo_id_seq'::regclass) + is_nullable: false + metadata: + data_type: jsonb + default: null + is_nullable: false + db_dbcomment: + content: + data_type: text + default: null + is_nullable: false + ctime: + data_type: timestamp with time zone + default: null + is_nullable: false + dbnode_id: + data_type: integer + default: null + is_nullable: false + id: + data_type: integer + default: nextval('db_dbcomment_id_seq'::regclass) + is_nullable: false + mtime: + data_type: timestamp with time zone + default: null + is_nullable: false + user_id: + data_type: integer + default: null + is_nullable: false + uuid: + data_type: uuid + default: null + is_nullable: false + db_dbcomputer: + description: + data_type: text + default: null + is_nullable: false + hostname: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + id: + data_type: integer + default: nextval('db_dbcomputer_id_seq'::regclass) + is_nullable: false + metadata: + data_type: jsonb + default: null + is_nullable: false + name: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + scheduler_type: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + transport_type: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + uuid: + data_type: uuid + default: null + is_nullable: false + db_dbgroup: + description: + data_type: text + default: null + is_nullable: false + id: + data_type: integer + default: nextval('db_dbgroup_id_seq'::regclass) + is_nullable: false + label: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + time: + data_type: timestamp with time zone + default: null + is_nullable: false + type_string: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + user_id: + data_type: integer + default: null + is_nullable: false + uuid: + data_type: uuid + default: null + is_nullable: false + db_dbgroup_dbnodes: + dbgroup_id: + data_type: integer + default: null + is_nullable: false + dbnode_id: + data_type: integer + default: null + is_nullable: false + id: + data_type: integer + default: nextval('db_dbgroup_dbnodes_id_seq'::regclass) + is_nullable: false + db_dblink: + id: + data_type: integer + default: nextval('db_dblink_id_seq'::regclass) + is_nullable: false + input_id: + data_type: integer + default: null + is_nullable: false + label: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + output_id: + data_type: integer + default: null + is_nullable: false + type: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + db_dblog: + dbnode_id: + data_type: integer + default: null + is_nullable: false + id: + data_type: integer + default: nextval('db_dblog_id_seq'::regclass) + is_nullable: false + levelname: + data_type: character varying + default: null + is_nullable: false + max_length: 50 + loggername: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + message: + data_type: text + default: null + is_nullable: false + metadata: + data_type: jsonb + default: null + is_nullable: false + time: + data_type: timestamp with time zone + default: null + is_nullable: false + uuid: + data_type: uuid + default: null + is_nullable: false + db_dbnode: + attributes: + data_type: jsonb + default: null + is_nullable: true + ctime: + data_type: timestamp with time zone + default: null + is_nullable: false + dbcomputer_id: + data_type: integer + default: null + is_nullable: true + description: + data_type: text + default: null + is_nullable: false + extras: + data_type: jsonb + default: null + is_nullable: true + id: + data_type: integer + default: nextval('db_dbnode_id_seq'::regclass) + is_nullable: false + label: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + mtime: + data_type: timestamp with time zone + default: null + is_nullable: false + node_type: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + process_type: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + user_id: + data_type: integer + default: null + is_nullable: false + uuid: + data_type: uuid + default: null + is_nullable: false + db_dbsetting: + description: + data_type: text + default: null + is_nullable: false + id: + data_type: integer + default: nextval('db_dbsetting_id_seq'::regclass) + is_nullable: false + key: + data_type: character varying + default: null + is_nullable: false + max_length: 1024 + time: + data_type: timestamp with time zone + default: null + is_nullable: false + val: + data_type: jsonb + default: null + is_nullable: true + db_dbuser: + email: + data_type: character varying + default: null + is_nullable: false + max_length: 254 + first_name: + data_type: character varying + default: null + is_nullable: false + max_length: 254 + id: + data_type: integer + default: nextval('db_dbuser_id_seq'::regclass) + is_nullable: false + institution: + data_type: character varying + default: null + is_nullable: false + max_length: 254 + last_name: + data_type: character varying + default: null + is_nullable: false + max_length: 254 +constraints: + primary_key: + db_dbauthinfo: + db_dbauthinfo_pkey: + - id + db_dbcomment: + db_dbcomment_pkey: + - id + db_dbcomputer: + db_dbcomputer_pkey: + - id + db_dbgroup: + db_dbgroup_pkey: + - id + db_dbgroup_dbnodes: + db_dbgroup_dbnodes_pkey: + - id + db_dblink: + db_dblink_pkey: + - id + db_dblog: + db_dblog_pkey: + - id + db_dbnode: + db_dbnode_pkey: + - id + db_dbsetting: + db_dbsetting_pkey: + - id + db_dbuser: + db_dbuser_pkey: + - id + unique: + db_dbauthinfo: + db_dbauthinfo_aiidauser_id_dbcomputer_id_777cdaa8_uniq: + - aiidauser_id + - dbcomputer_id + db_dbcomment: + db_dbcomment_uuid_49bac08c_uniq: + - uuid + db_dbcomputer: + db_dbcomputer_name_key: + - name + db_dbcomputer_uuid_f35defa6_uniq: + - uuid + db_dbgroup: + db_dbgroup_name_type_12656f33_uniq: + - label + - type_string + db_dbgroup_uuid_af896177_uniq: + - uuid + db_dbgroup_dbnodes: + db_dbgroup_dbnodes_dbgroup_id_dbnode_id_eee23cce_uniq: + - dbgroup_id + - dbnode_id + db_dblog: + db_dblog_uuid_9cf77df3_uniq: + - uuid + db_dbnode: + db_dbnode_uuid_62e0bf98_uniq: + - uuid + db_dbsetting: + db_dbsetting_key_1b84beb4_uniq: + - key + db_dbuser: + db_dbuser_email_30150b7e_uniq: + - email +foreign_keys: + db_dbauthinfo: + db_dbauthinfo_aiidauser_id_0684fdfb_fk_db_dbuser_id: FOREIGN KEY (aiidauser_id) + REFERENCES db_dbuser(id) DEFERRABLE INITIALLY DEFERRED + db_dbauthinfo_dbcomputer_id_424f7ac4_fk_db_dbcomputer_id: FOREIGN KEY (dbcomputer_id) + REFERENCES db_dbcomputer(id) DEFERRABLE INITIALLY DEFERRED + db_dbcomment: + db_dbcomment_dbnode_id_3b812b6b_fk_db_dbnode_id: FOREIGN KEY (dbnode_id) REFERENCES + db_dbnode(id) DEFERRABLE INITIALLY DEFERRED + db_dbcomment_user_id_8ed5e360_fk_db_dbuser_id: FOREIGN KEY (user_id) REFERENCES + db_dbuser(id) DEFERRABLE INITIALLY DEFERRED + db_dbgroup: + db_dbgroup_user_id_100f8a51_fk_db_dbuser_id: FOREIGN KEY (user_id) REFERENCES + db_dbuser(id) DEFERRABLE INITIALLY DEFERRED + db_dbgroup_dbnodes: + db_dbgroup_dbnodes_dbgroup_id_9d3a0f9d_fk_db_dbgroup_id: FOREIGN KEY (dbgroup_id) + REFERENCES db_dbgroup(id) DEFERRABLE INITIALLY DEFERRED + db_dbgroup_dbnodes_dbnode_id_118b9439_fk_db_dbnode_id: FOREIGN KEY (dbnode_id) + REFERENCES db_dbnode(id) DEFERRABLE INITIALLY DEFERRED + db_dblink: + db_dblink_input_id_9245bd73_fk_db_dbnode_id: FOREIGN KEY (input_id) REFERENCES + db_dbnode(id) DEFERRABLE INITIALLY DEFERRED + db_dblink_output_id_c0167528_fk_db_dbnode_id: FOREIGN KEY (output_id) REFERENCES + db_dbnode(id) DEFERRABLE INITIALLY DEFERRED + db_dblog: + db_dblog_dbnode_id_da34b732_fk_db_dbnode_id: FOREIGN KEY (dbnode_id) REFERENCES + db_dbnode(id) DEFERRABLE INITIALLY DEFERRED + db_dbnode: + db_dbnode_dbcomputer_id_315372a3_fk_db_dbcomputer_id: FOREIGN KEY (dbcomputer_id) + REFERENCES db_dbcomputer(id) DEFERRABLE INITIALLY DEFERRED + db_dbnode_user_id_12e7aeaf_fk_db_dbuser_id: FOREIGN KEY (user_id) REFERENCES db_dbuser(id) + DEFERRABLE INITIALLY DEFERRED +indexes: + db_dbauthinfo: + db_dbauthinfo_aiidauser_id_0684fdfb: CREATE INDEX db_dbauthinfo_aiidauser_id_0684fdfb + ON public.db_dbauthinfo USING btree (aiidauser_id) + db_dbauthinfo_aiidauser_id_dbcomputer_id_777cdaa8_uniq: CREATE UNIQUE INDEX db_dbauthinfo_aiidauser_id_dbcomputer_id_777cdaa8_uniq + ON public.db_dbauthinfo USING btree (aiidauser_id, dbcomputer_id) + db_dbauthinfo_dbcomputer_id_424f7ac4: CREATE INDEX db_dbauthinfo_dbcomputer_id_424f7ac4 + ON public.db_dbauthinfo USING btree (dbcomputer_id) + db_dbauthinfo_pkey: CREATE UNIQUE INDEX db_dbauthinfo_pkey ON public.db_dbauthinfo + USING btree (id) + db_dbcomment: + db_dbcomment_dbnode_id_3b812b6b: CREATE INDEX db_dbcomment_dbnode_id_3b812b6b + ON public.db_dbcomment USING btree (dbnode_id) + db_dbcomment_pkey: CREATE UNIQUE INDEX db_dbcomment_pkey ON public.db_dbcomment + USING btree (id) + db_dbcomment_user_id_8ed5e360: CREATE INDEX db_dbcomment_user_id_8ed5e360 ON public.db_dbcomment + USING btree (user_id) + db_dbcomment_uuid_49bac08c_uniq: CREATE UNIQUE INDEX db_dbcomment_uuid_49bac08c_uniq + ON public.db_dbcomment USING btree (uuid) + db_dbcomputer: + db_dbcomputer_name_f1800b1a_like: CREATE INDEX db_dbcomputer_name_f1800b1a_like + ON public.db_dbcomputer USING btree (name varchar_pattern_ops) + db_dbcomputer_name_key: CREATE UNIQUE INDEX db_dbcomputer_name_key ON public.db_dbcomputer + USING btree (name) + db_dbcomputer_pkey: CREATE UNIQUE INDEX db_dbcomputer_pkey ON public.db_dbcomputer + USING btree (id) + db_dbcomputer_uuid_f35defa6_uniq: CREATE UNIQUE INDEX db_dbcomputer_uuid_f35defa6_uniq + ON public.db_dbcomputer USING btree (uuid) + db_dbgroup: + db_dbgroup_name_66c75272: CREATE INDEX db_dbgroup_name_66c75272 ON public.db_dbgroup + USING btree (label) + db_dbgroup_name_66c75272_like: CREATE INDEX db_dbgroup_name_66c75272_like ON public.db_dbgroup + USING btree (label varchar_pattern_ops) + db_dbgroup_name_type_12656f33_uniq: CREATE UNIQUE INDEX db_dbgroup_name_type_12656f33_uniq + ON public.db_dbgroup USING btree (label, type_string) + db_dbgroup_pkey: CREATE UNIQUE INDEX db_dbgroup_pkey ON public.db_dbgroup USING + btree (id) + db_dbgroup_type_23b2a748: CREATE INDEX db_dbgroup_type_23b2a748 ON public.db_dbgroup + USING btree (type_string) + db_dbgroup_type_23b2a748_like: CREATE INDEX db_dbgroup_type_23b2a748_like ON public.db_dbgroup + USING btree (type_string varchar_pattern_ops) + db_dbgroup_user_id_100f8a51: CREATE INDEX db_dbgroup_user_id_100f8a51 ON public.db_dbgroup + USING btree (user_id) + db_dbgroup_uuid_af896177_uniq: CREATE UNIQUE INDEX db_dbgroup_uuid_af896177_uniq + ON public.db_dbgroup USING btree (uuid) + db_dbgroup_dbnodes: + db_dbgroup_dbnodes_dbgroup_id_9d3a0f9d: CREATE INDEX db_dbgroup_dbnodes_dbgroup_id_9d3a0f9d + ON public.db_dbgroup_dbnodes USING btree (dbgroup_id) + db_dbgroup_dbnodes_dbgroup_id_dbnode_id_eee23cce_uniq: CREATE UNIQUE INDEX db_dbgroup_dbnodes_dbgroup_id_dbnode_id_eee23cce_uniq + ON public.db_dbgroup_dbnodes USING btree (dbgroup_id, dbnode_id) + db_dbgroup_dbnodes_dbnode_id_118b9439: CREATE INDEX db_dbgroup_dbnodes_dbnode_id_118b9439 + ON public.db_dbgroup_dbnodes USING btree (dbnode_id) + db_dbgroup_dbnodes_pkey: CREATE UNIQUE INDEX db_dbgroup_dbnodes_pkey ON public.db_dbgroup_dbnodes + USING btree (id) + db_dblink: + db_dblink_input_id_9245bd73: CREATE INDEX db_dblink_input_id_9245bd73 ON public.db_dblink + USING btree (input_id) + db_dblink_label_f1343cfb: CREATE INDEX db_dblink_label_f1343cfb ON public.db_dblink + USING btree (label) + db_dblink_label_f1343cfb_like: CREATE INDEX db_dblink_label_f1343cfb_like ON public.db_dblink + USING btree (label varchar_pattern_ops) + db_dblink_output_id_c0167528: CREATE INDEX db_dblink_output_id_c0167528 ON public.db_dblink + USING btree (output_id) + db_dblink_pkey: CREATE UNIQUE INDEX db_dblink_pkey ON public.db_dblink USING btree + (id) + db_dblink_type_229f212b: CREATE INDEX db_dblink_type_229f212b ON public.db_dblink + USING btree (type) + db_dblink_type_229f212b_like: CREATE INDEX db_dblink_type_229f212b_like ON public.db_dblink + USING btree (type varchar_pattern_ops) + db_dblog: + db_dblog_dbnode_id_da34b732: CREATE INDEX db_dblog_dbnode_id_da34b732 ON public.db_dblog + USING btree (dbnode_id) + db_dblog_levelname_ad5dc346: CREATE INDEX db_dblog_levelname_ad5dc346 ON public.db_dblog + USING btree (levelname) + db_dblog_levelname_ad5dc346_like: CREATE INDEX db_dblog_levelname_ad5dc346_like + ON public.db_dblog USING btree (levelname varchar_pattern_ops) + db_dblog_loggername_00b5ba16: CREATE INDEX db_dblog_loggername_00b5ba16 ON public.db_dblog + USING btree (loggername) + db_dblog_loggername_00b5ba16_like: CREATE INDEX db_dblog_loggername_00b5ba16_like + ON public.db_dblog USING btree (loggername varchar_pattern_ops) + db_dblog_pkey: CREATE UNIQUE INDEX db_dblog_pkey ON public.db_dblog USING btree + (id) + db_dblog_uuid_9cf77df3_uniq: CREATE UNIQUE INDEX db_dblog_uuid_9cf77df3_uniq ON + public.db_dblog USING btree (uuid) + db_dbnode: + db_dbnode_ctime_71626ef5: CREATE INDEX db_dbnode_ctime_71626ef5 ON public.db_dbnode + USING btree (ctime) + db_dbnode_dbcomputer_id_315372a3: CREATE INDEX db_dbnode_dbcomputer_id_315372a3 + ON public.db_dbnode USING btree (dbcomputer_id) + db_dbnode_label_6469539e: CREATE INDEX db_dbnode_label_6469539e ON public.db_dbnode + USING btree (label) + db_dbnode_label_6469539e_like: CREATE INDEX db_dbnode_label_6469539e_like ON public.db_dbnode + USING btree (label varchar_pattern_ops) + db_dbnode_mtime_0554ea3d: CREATE INDEX db_dbnode_mtime_0554ea3d ON public.db_dbnode + USING btree (mtime) + db_dbnode_pkey: CREATE UNIQUE INDEX db_dbnode_pkey ON public.db_dbnode USING btree + (id) + db_dbnode_process_type_df7298d0: CREATE INDEX db_dbnode_process_type_df7298d0 + ON public.db_dbnode USING btree (process_type) + db_dbnode_process_type_df7298d0_like: CREATE INDEX db_dbnode_process_type_df7298d0_like + ON public.db_dbnode USING btree (process_type varchar_pattern_ops) + db_dbnode_type_a8ce9753: CREATE INDEX db_dbnode_type_a8ce9753 ON public.db_dbnode + USING btree (node_type) + db_dbnode_type_a8ce9753_like: CREATE INDEX db_dbnode_type_a8ce9753_like ON public.db_dbnode + USING btree (node_type varchar_pattern_ops) + db_dbnode_user_id_12e7aeaf: CREATE INDEX db_dbnode_user_id_12e7aeaf ON public.db_dbnode + USING btree (user_id) + db_dbnode_uuid_62e0bf98_uniq: CREATE UNIQUE INDEX db_dbnode_uuid_62e0bf98_uniq + ON public.db_dbnode USING btree (uuid) + db_dbsetting: + db_dbsetting_key_1b84beb4_like: CREATE INDEX db_dbsetting_key_1b84beb4_like ON + public.db_dbsetting USING btree (key varchar_pattern_ops) + db_dbsetting_key_1b84beb4_uniq: CREATE UNIQUE INDEX db_dbsetting_key_1b84beb4_uniq + ON public.db_dbsetting USING btree (key) + db_dbsetting_pkey: CREATE UNIQUE INDEX db_dbsetting_pkey ON public.db_dbsetting + USING btree (id) + db_dbuser: + db_dbuser_email_30150b7e_like: CREATE INDEX db_dbuser_email_30150b7e_like ON public.db_dbuser + USING btree (email varchar_pattern_ops) + db_dbuser_email_30150b7e_uniq: CREATE UNIQUE INDEX db_dbuser_email_30150b7e_uniq + ON public.db_dbuser USING btree (email) + db_dbuser_pkey: CREATE UNIQUE INDEX db_dbuser_pkey ON public.db_dbuser USING btree + (id) diff --git a/tests/backends/aiida_sqlalchemy/migrations/test_all_schema/test_django_django_0042_.yml b/tests/backends/aiida_sqlalchemy/migrations/test_all_schema/test_django_django_0042_.yml new file mode 100644 index 0000000000..0dd8d3809a --- /dev/null +++ b/tests/backends/aiida_sqlalchemy/migrations/test_all_schema/test_django_django_0042_.yml @@ -0,0 +1,515 @@ +columns: + db_dbauthinfo: + aiidauser_id: + data_type: integer + default: null + is_nullable: false + auth_params: + data_type: jsonb + default: null + is_nullable: false + dbcomputer_id: + data_type: integer + default: null + is_nullable: false + enabled: + data_type: boolean + default: null + is_nullable: false + id: + data_type: integer + default: nextval('db_dbauthinfo_id_seq'::regclass) + is_nullable: false + metadata: + data_type: jsonb + default: null + is_nullable: false + db_dbcomment: + content: + data_type: text + default: null + is_nullable: false + ctime: + data_type: timestamp with time zone + default: null + is_nullable: false + dbnode_id: + data_type: integer + default: null + is_nullable: false + id: + data_type: integer + default: nextval('db_dbcomment_id_seq'::regclass) + is_nullable: false + mtime: + data_type: timestamp with time zone + default: null + is_nullable: false + user_id: + data_type: integer + default: null + is_nullable: false + uuid: + data_type: uuid + default: null + is_nullable: false + db_dbcomputer: + description: + data_type: text + default: null + is_nullable: false + hostname: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + id: + data_type: integer + default: nextval('db_dbcomputer_id_seq'::regclass) + is_nullable: false + metadata: + data_type: jsonb + default: null + is_nullable: false + name: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + scheduler_type: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + transport_type: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + uuid: + data_type: uuid + default: null + is_nullable: false + db_dbgroup: + description: + data_type: text + default: null + is_nullable: false + id: + data_type: integer + default: nextval('db_dbgroup_id_seq'::regclass) + is_nullable: false + label: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + time: + data_type: timestamp with time zone + default: null + is_nullable: false + type_string: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + user_id: + data_type: integer + default: null + is_nullable: false + uuid: + data_type: uuid + default: null + is_nullable: false + db_dbgroup_dbnodes: + dbgroup_id: + data_type: integer + default: null + is_nullable: false + dbnode_id: + data_type: integer + default: null + is_nullable: false + id: + data_type: integer + default: nextval('db_dbgroup_dbnodes_id_seq'::regclass) + is_nullable: false + db_dblink: + id: + data_type: integer + default: nextval('db_dblink_id_seq'::regclass) + is_nullable: false + input_id: + data_type: integer + default: null + is_nullable: false + label: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + output_id: + data_type: integer + default: null + is_nullable: false + type: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + db_dblog: + dbnode_id: + data_type: integer + default: null + is_nullable: false + id: + data_type: integer + default: nextval('db_dblog_id_seq'::regclass) + is_nullable: false + levelname: + data_type: character varying + default: null + is_nullable: false + max_length: 50 + loggername: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + message: + data_type: text + default: null + is_nullable: false + metadata: + data_type: jsonb + default: null + is_nullable: false + time: + data_type: timestamp with time zone + default: null + is_nullable: false + uuid: + data_type: uuid + default: null + is_nullable: false + db_dbnode: + attributes: + data_type: jsonb + default: null + is_nullable: true + ctime: + data_type: timestamp with time zone + default: null + is_nullable: false + dbcomputer_id: + data_type: integer + default: null + is_nullable: true + description: + data_type: text + default: null + is_nullable: false + extras: + data_type: jsonb + default: null + is_nullable: true + id: + data_type: integer + default: nextval('db_dbnode_id_seq'::regclass) + is_nullable: false + label: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + mtime: + data_type: timestamp with time zone + default: null + is_nullable: false + node_type: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + process_type: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + user_id: + data_type: integer + default: null + is_nullable: false + uuid: + data_type: uuid + default: null + is_nullable: false + db_dbsetting: + description: + data_type: text + default: null + is_nullable: false + id: + data_type: integer + default: nextval('db_dbsetting_id_seq'::regclass) + is_nullable: false + key: + data_type: character varying + default: null + is_nullable: false + max_length: 1024 + time: + data_type: timestamp with time zone + default: null + is_nullable: false + val: + data_type: jsonb + default: null + is_nullable: true + db_dbuser: + email: + data_type: character varying + default: null + is_nullable: false + max_length: 254 + first_name: + data_type: character varying + default: null + is_nullable: false + max_length: 254 + id: + data_type: integer + default: nextval('db_dbuser_id_seq'::regclass) + is_nullable: false + institution: + data_type: character varying + default: null + is_nullable: false + max_length: 254 + last_name: + data_type: character varying + default: null + is_nullable: false + max_length: 254 +constraints: + primary_key: + db_dbauthinfo: + db_dbauthinfo_pkey: + - id + db_dbcomment: + db_dbcomment_pkey: + - id + db_dbcomputer: + db_dbcomputer_pkey: + - id + db_dbgroup: + db_dbgroup_pkey: + - id + db_dbgroup_dbnodes: + db_dbgroup_dbnodes_pkey: + - id + db_dblink: + db_dblink_pkey: + - id + db_dblog: + db_dblog_pkey: + - id + db_dbnode: + db_dbnode_pkey: + - id + db_dbsetting: + db_dbsetting_pkey: + - id + db_dbuser: + db_dbuser_pkey: + - id + unique: + db_dbauthinfo: + db_dbauthinfo_aiidauser_id_dbcomputer_id_777cdaa8_uniq: + - aiidauser_id + - dbcomputer_id + db_dbcomment: + db_dbcomment_uuid_49bac08c_uniq: + - uuid + db_dbcomputer: + db_dbcomputer_name_key: + - name + db_dbcomputer_uuid_f35defa6_uniq: + - uuid + db_dbgroup: + db_dbgroup_name_type_12656f33_uniq: + - label + - type_string + db_dbgroup_uuid_af896177_uniq: + - uuid + db_dbgroup_dbnodes: + db_dbgroup_dbnodes_dbgroup_id_dbnode_id_eee23cce_uniq: + - dbgroup_id + - dbnode_id + db_dblog: + db_dblog_uuid_9cf77df3_uniq: + - uuid + db_dbnode: + db_dbnode_uuid_62e0bf98_uniq: + - uuid + db_dbsetting: + db_dbsetting_key_1b84beb4_uniq: + - key + db_dbuser: + db_dbuser_email_30150b7e_uniq: + - email +foreign_keys: + db_dbauthinfo: + db_dbauthinfo_aiidauser_id_0684fdfb_fk_db_dbuser_id: FOREIGN KEY (aiidauser_id) + REFERENCES db_dbuser(id) DEFERRABLE INITIALLY DEFERRED + db_dbauthinfo_dbcomputer_id_424f7ac4_fk_db_dbcomputer_id: FOREIGN KEY (dbcomputer_id) + REFERENCES db_dbcomputer(id) DEFERRABLE INITIALLY DEFERRED + db_dbcomment: + db_dbcomment_dbnode_id_3b812b6b_fk_db_dbnode_id: FOREIGN KEY (dbnode_id) REFERENCES + db_dbnode(id) DEFERRABLE INITIALLY DEFERRED + db_dbcomment_user_id_8ed5e360_fk_db_dbuser_id: FOREIGN KEY (user_id) REFERENCES + db_dbuser(id) DEFERRABLE INITIALLY DEFERRED + db_dbgroup: + db_dbgroup_user_id_100f8a51_fk_db_dbuser_id: FOREIGN KEY (user_id) REFERENCES + db_dbuser(id) DEFERRABLE INITIALLY DEFERRED + db_dbgroup_dbnodes: + db_dbgroup_dbnodes_dbgroup_id_9d3a0f9d_fk_db_dbgroup_id: FOREIGN KEY (dbgroup_id) + REFERENCES db_dbgroup(id) DEFERRABLE INITIALLY DEFERRED + db_dbgroup_dbnodes_dbnode_id_118b9439_fk_db_dbnode_id: FOREIGN KEY (dbnode_id) + REFERENCES db_dbnode(id) DEFERRABLE INITIALLY DEFERRED + db_dblink: + db_dblink_input_id_9245bd73_fk_db_dbnode_id: FOREIGN KEY (input_id) REFERENCES + db_dbnode(id) DEFERRABLE INITIALLY DEFERRED + db_dblink_output_id_c0167528_fk_db_dbnode_id: FOREIGN KEY (output_id) REFERENCES + db_dbnode(id) DEFERRABLE INITIALLY DEFERRED + db_dblog: + db_dblog_dbnode_id_da34b732_fk_db_dbnode_id: FOREIGN KEY (dbnode_id) REFERENCES + db_dbnode(id) DEFERRABLE INITIALLY DEFERRED + db_dbnode: + db_dbnode_dbcomputer_id_315372a3_fk_db_dbcomputer_id: FOREIGN KEY (dbcomputer_id) + REFERENCES db_dbcomputer(id) DEFERRABLE INITIALLY DEFERRED + db_dbnode_user_id_12e7aeaf_fk_db_dbuser_id: FOREIGN KEY (user_id) REFERENCES db_dbuser(id) + DEFERRABLE INITIALLY DEFERRED +indexes: + db_dbauthinfo: + db_dbauthinfo_aiidauser_id_0684fdfb: CREATE INDEX db_dbauthinfo_aiidauser_id_0684fdfb + ON public.db_dbauthinfo USING btree (aiidauser_id) + db_dbauthinfo_aiidauser_id_dbcomputer_id_777cdaa8_uniq: CREATE UNIQUE INDEX db_dbauthinfo_aiidauser_id_dbcomputer_id_777cdaa8_uniq + ON public.db_dbauthinfo USING btree (aiidauser_id, dbcomputer_id) + db_dbauthinfo_dbcomputer_id_424f7ac4: CREATE INDEX db_dbauthinfo_dbcomputer_id_424f7ac4 + ON public.db_dbauthinfo USING btree (dbcomputer_id) + db_dbauthinfo_pkey: CREATE UNIQUE INDEX db_dbauthinfo_pkey ON public.db_dbauthinfo + USING btree (id) + db_dbcomment: + db_dbcomment_dbnode_id_3b812b6b: CREATE INDEX db_dbcomment_dbnode_id_3b812b6b + ON public.db_dbcomment USING btree (dbnode_id) + db_dbcomment_pkey: CREATE UNIQUE INDEX db_dbcomment_pkey ON public.db_dbcomment + USING btree (id) + db_dbcomment_user_id_8ed5e360: CREATE INDEX db_dbcomment_user_id_8ed5e360 ON public.db_dbcomment + USING btree (user_id) + db_dbcomment_uuid_49bac08c_uniq: CREATE UNIQUE INDEX db_dbcomment_uuid_49bac08c_uniq + ON public.db_dbcomment USING btree (uuid) + db_dbcomputer: + db_dbcomputer_name_f1800b1a_like: CREATE INDEX db_dbcomputer_name_f1800b1a_like + ON public.db_dbcomputer USING btree (name varchar_pattern_ops) + db_dbcomputer_name_key: CREATE UNIQUE INDEX db_dbcomputer_name_key ON public.db_dbcomputer + USING btree (name) + db_dbcomputer_pkey: CREATE UNIQUE INDEX db_dbcomputer_pkey ON public.db_dbcomputer + USING btree (id) + db_dbcomputer_uuid_f35defa6_uniq: CREATE UNIQUE INDEX db_dbcomputer_uuid_f35defa6_uniq + ON public.db_dbcomputer USING btree (uuid) + db_dbgroup: + db_dbgroup_name_66c75272: CREATE INDEX db_dbgroup_name_66c75272 ON public.db_dbgroup + USING btree (label) + db_dbgroup_name_66c75272_like: CREATE INDEX db_dbgroup_name_66c75272_like ON public.db_dbgroup + USING btree (label varchar_pattern_ops) + db_dbgroup_name_type_12656f33_uniq: CREATE UNIQUE INDEX db_dbgroup_name_type_12656f33_uniq + ON public.db_dbgroup USING btree (label, type_string) + db_dbgroup_pkey: CREATE UNIQUE INDEX db_dbgroup_pkey ON public.db_dbgroup USING + btree (id) + db_dbgroup_type_23b2a748: CREATE INDEX db_dbgroup_type_23b2a748 ON public.db_dbgroup + USING btree (type_string) + db_dbgroup_type_23b2a748_like: CREATE INDEX db_dbgroup_type_23b2a748_like ON public.db_dbgroup + USING btree (type_string varchar_pattern_ops) + db_dbgroup_user_id_100f8a51: CREATE INDEX db_dbgroup_user_id_100f8a51 ON public.db_dbgroup + USING btree (user_id) + db_dbgroup_uuid_af896177_uniq: CREATE UNIQUE INDEX db_dbgroup_uuid_af896177_uniq + ON public.db_dbgroup USING btree (uuid) + db_dbgroup_dbnodes: + db_dbgroup_dbnodes_dbgroup_id_9d3a0f9d: CREATE INDEX db_dbgroup_dbnodes_dbgroup_id_9d3a0f9d + ON public.db_dbgroup_dbnodes USING btree (dbgroup_id) + db_dbgroup_dbnodes_dbgroup_id_dbnode_id_eee23cce_uniq: CREATE UNIQUE INDEX db_dbgroup_dbnodes_dbgroup_id_dbnode_id_eee23cce_uniq + ON public.db_dbgroup_dbnodes USING btree (dbgroup_id, dbnode_id) + db_dbgroup_dbnodes_dbnode_id_118b9439: CREATE INDEX db_dbgroup_dbnodes_dbnode_id_118b9439 + ON public.db_dbgroup_dbnodes USING btree (dbnode_id) + db_dbgroup_dbnodes_pkey: CREATE UNIQUE INDEX db_dbgroup_dbnodes_pkey ON public.db_dbgroup_dbnodes + USING btree (id) + db_dblink: + db_dblink_input_id_9245bd73: CREATE INDEX db_dblink_input_id_9245bd73 ON public.db_dblink + USING btree (input_id) + db_dblink_label_f1343cfb: CREATE INDEX db_dblink_label_f1343cfb ON public.db_dblink + USING btree (label) + db_dblink_label_f1343cfb_like: CREATE INDEX db_dblink_label_f1343cfb_like ON public.db_dblink + USING btree (label varchar_pattern_ops) + db_dblink_output_id_c0167528: CREATE INDEX db_dblink_output_id_c0167528 ON public.db_dblink + USING btree (output_id) + db_dblink_pkey: CREATE UNIQUE INDEX db_dblink_pkey ON public.db_dblink USING btree + (id) + db_dblink_type_229f212b: CREATE INDEX db_dblink_type_229f212b ON public.db_dblink + USING btree (type) + db_dblink_type_229f212b_like: CREATE INDEX db_dblink_type_229f212b_like ON public.db_dblink + USING btree (type varchar_pattern_ops) + db_dblog: + db_dblog_dbnode_id_da34b732: CREATE INDEX db_dblog_dbnode_id_da34b732 ON public.db_dblog + USING btree (dbnode_id) + db_dblog_levelname_ad5dc346: CREATE INDEX db_dblog_levelname_ad5dc346 ON public.db_dblog + USING btree (levelname) + db_dblog_levelname_ad5dc346_like: CREATE INDEX db_dblog_levelname_ad5dc346_like + ON public.db_dblog USING btree (levelname varchar_pattern_ops) + db_dblog_loggername_00b5ba16: CREATE INDEX db_dblog_loggername_00b5ba16 ON public.db_dblog + USING btree (loggername) + db_dblog_loggername_00b5ba16_like: CREATE INDEX db_dblog_loggername_00b5ba16_like + ON public.db_dblog USING btree (loggername varchar_pattern_ops) + db_dblog_pkey: CREATE UNIQUE INDEX db_dblog_pkey ON public.db_dblog USING btree + (id) + db_dblog_uuid_9cf77df3_uniq: CREATE UNIQUE INDEX db_dblog_uuid_9cf77df3_uniq ON + public.db_dblog USING btree (uuid) + db_dbnode: + db_dbnode_ctime_71626ef5: CREATE INDEX db_dbnode_ctime_71626ef5 ON public.db_dbnode + USING btree (ctime) + db_dbnode_dbcomputer_id_315372a3: CREATE INDEX db_dbnode_dbcomputer_id_315372a3 + ON public.db_dbnode USING btree (dbcomputer_id) + db_dbnode_label_6469539e: CREATE INDEX db_dbnode_label_6469539e ON public.db_dbnode + USING btree (label) + db_dbnode_label_6469539e_like: CREATE INDEX db_dbnode_label_6469539e_like ON public.db_dbnode + USING btree (label varchar_pattern_ops) + db_dbnode_mtime_0554ea3d: CREATE INDEX db_dbnode_mtime_0554ea3d ON public.db_dbnode + USING btree (mtime) + db_dbnode_pkey: CREATE UNIQUE INDEX db_dbnode_pkey ON public.db_dbnode USING btree + (id) + db_dbnode_process_type_df7298d0: CREATE INDEX db_dbnode_process_type_df7298d0 + ON public.db_dbnode USING btree (process_type) + db_dbnode_process_type_df7298d0_like: CREATE INDEX db_dbnode_process_type_df7298d0_like + ON public.db_dbnode USING btree (process_type varchar_pattern_ops) + db_dbnode_type_a8ce9753: CREATE INDEX db_dbnode_type_a8ce9753 ON public.db_dbnode + USING btree (node_type) + db_dbnode_type_a8ce9753_like: CREATE INDEX db_dbnode_type_a8ce9753_like ON public.db_dbnode + USING btree (node_type varchar_pattern_ops) + db_dbnode_user_id_12e7aeaf: CREATE INDEX db_dbnode_user_id_12e7aeaf ON public.db_dbnode + USING btree (user_id) + db_dbnode_uuid_62e0bf98_uniq: CREATE UNIQUE INDEX db_dbnode_uuid_62e0bf98_uniq + ON public.db_dbnode USING btree (uuid) + db_dbsetting: + db_dbsetting_key_1b84beb4_like: CREATE INDEX db_dbsetting_key_1b84beb4_like ON + public.db_dbsetting USING btree (key varchar_pattern_ops) + db_dbsetting_key_1b84beb4_uniq: CREATE UNIQUE INDEX db_dbsetting_key_1b84beb4_uniq + ON public.db_dbsetting USING btree (key) + db_dbsetting_pkey: CREATE UNIQUE INDEX db_dbsetting_pkey ON public.db_dbsetting + USING btree (id) + db_dbuser: + db_dbuser_email_30150b7e_like: CREATE INDEX db_dbuser_email_30150b7e_like ON public.db_dbuser + USING btree (email varchar_pattern_ops) + db_dbuser_email_30150b7e_uniq: CREATE UNIQUE INDEX db_dbuser_email_30150b7e_uniq + ON public.db_dbuser USING btree (email) + db_dbuser_pkey: CREATE UNIQUE INDEX db_dbuser_pkey ON public.db_dbuser USING btree + (id) diff --git a/tests/backends/aiida_sqlalchemy/migrations/test_all_schema/test_django_django_0043_.yml b/tests/backends/aiida_sqlalchemy/migrations/test_all_schema/test_django_django_0043_.yml new file mode 100644 index 0000000000..0dd8d3809a --- /dev/null +++ b/tests/backends/aiida_sqlalchemy/migrations/test_all_schema/test_django_django_0043_.yml @@ -0,0 +1,515 @@ +columns: + db_dbauthinfo: + aiidauser_id: + data_type: integer + default: null + is_nullable: false + auth_params: + data_type: jsonb + default: null + is_nullable: false + dbcomputer_id: + data_type: integer + default: null + is_nullable: false + enabled: + data_type: boolean + default: null + is_nullable: false + id: + data_type: integer + default: nextval('db_dbauthinfo_id_seq'::regclass) + is_nullable: false + metadata: + data_type: jsonb + default: null + is_nullable: false + db_dbcomment: + content: + data_type: text + default: null + is_nullable: false + ctime: + data_type: timestamp with time zone + default: null + is_nullable: false + dbnode_id: + data_type: integer + default: null + is_nullable: false + id: + data_type: integer + default: nextval('db_dbcomment_id_seq'::regclass) + is_nullable: false + mtime: + data_type: timestamp with time zone + default: null + is_nullable: false + user_id: + data_type: integer + default: null + is_nullable: false + uuid: + data_type: uuid + default: null + is_nullable: false + db_dbcomputer: + description: + data_type: text + default: null + is_nullable: false + hostname: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + id: + data_type: integer + default: nextval('db_dbcomputer_id_seq'::regclass) + is_nullable: false + metadata: + data_type: jsonb + default: null + is_nullable: false + name: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + scheduler_type: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + transport_type: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + uuid: + data_type: uuid + default: null + is_nullable: false + db_dbgroup: + description: + data_type: text + default: null + is_nullable: false + id: + data_type: integer + default: nextval('db_dbgroup_id_seq'::regclass) + is_nullable: false + label: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + time: + data_type: timestamp with time zone + default: null + is_nullable: false + type_string: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + user_id: + data_type: integer + default: null + is_nullable: false + uuid: + data_type: uuid + default: null + is_nullable: false + db_dbgroup_dbnodes: + dbgroup_id: + data_type: integer + default: null + is_nullable: false + dbnode_id: + data_type: integer + default: null + is_nullable: false + id: + data_type: integer + default: nextval('db_dbgroup_dbnodes_id_seq'::regclass) + is_nullable: false + db_dblink: + id: + data_type: integer + default: nextval('db_dblink_id_seq'::regclass) + is_nullable: false + input_id: + data_type: integer + default: null + is_nullable: false + label: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + output_id: + data_type: integer + default: null + is_nullable: false + type: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + db_dblog: + dbnode_id: + data_type: integer + default: null + is_nullable: false + id: + data_type: integer + default: nextval('db_dblog_id_seq'::regclass) + is_nullable: false + levelname: + data_type: character varying + default: null + is_nullable: false + max_length: 50 + loggername: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + message: + data_type: text + default: null + is_nullable: false + metadata: + data_type: jsonb + default: null + is_nullable: false + time: + data_type: timestamp with time zone + default: null + is_nullable: false + uuid: + data_type: uuid + default: null + is_nullable: false + db_dbnode: + attributes: + data_type: jsonb + default: null + is_nullable: true + ctime: + data_type: timestamp with time zone + default: null + is_nullable: false + dbcomputer_id: + data_type: integer + default: null + is_nullable: true + description: + data_type: text + default: null + is_nullable: false + extras: + data_type: jsonb + default: null + is_nullable: true + id: + data_type: integer + default: nextval('db_dbnode_id_seq'::regclass) + is_nullable: false + label: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + mtime: + data_type: timestamp with time zone + default: null + is_nullable: false + node_type: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + process_type: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + user_id: + data_type: integer + default: null + is_nullable: false + uuid: + data_type: uuid + default: null + is_nullable: false + db_dbsetting: + description: + data_type: text + default: null + is_nullable: false + id: + data_type: integer + default: nextval('db_dbsetting_id_seq'::regclass) + is_nullable: false + key: + data_type: character varying + default: null + is_nullable: false + max_length: 1024 + time: + data_type: timestamp with time zone + default: null + is_nullable: false + val: + data_type: jsonb + default: null + is_nullable: true + db_dbuser: + email: + data_type: character varying + default: null + is_nullable: false + max_length: 254 + first_name: + data_type: character varying + default: null + is_nullable: false + max_length: 254 + id: + data_type: integer + default: nextval('db_dbuser_id_seq'::regclass) + is_nullable: false + institution: + data_type: character varying + default: null + is_nullable: false + max_length: 254 + last_name: + data_type: character varying + default: null + is_nullable: false + max_length: 254 +constraints: + primary_key: + db_dbauthinfo: + db_dbauthinfo_pkey: + - id + db_dbcomment: + db_dbcomment_pkey: + - id + db_dbcomputer: + db_dbcomputer_pkey: + - id + db_dbgroup: + db_dbgroup_pkey: + - id + db_dbgroup_dbnodes: + db_dbgroup_dbnodes_pkey: + - id + db_dblink: + db_dblink_pkey: + - id + db_dblog: + db_dblog_pkey: + - id + db_dbnode: + db_dbnode_pkey: + - id + db_dbsetting: + db_dbsetting_pkey: + - id + db_dbuser: + db_dbuser_pkey: + - id + unique: + db_dbauthinfo: + db_dbauthinfo_aiidauser_id_dbcomputer_id_777cdaa8_uniq: + - aiidauser_id + - dbcomputer_id + db_dbcomment: + db_dbcomment_uuid_49bac08c_uniq: + - uuid + db_dbcomputer: + db_dbcomputer_name_key: + - name + db_dbcomputer_uuid_f35defa6_uniq: + - uuid + db_dbgroup: + db_dbgroup_name_type_12656f33_uniq: + - label + - type_string + db_dbgroup_uuid_af896177_uniq: + - uuid + db_dbgroup_dbnodes: + db_dbgroup_dbnodes_dbgroup_id_dbnode_id_eee23cce_uniq: + - dbgroup_id + - dbnode_id + db_dblog: + db_dblog_uuid_9cf77df3_uniq: + - uuid + db_dbnode: + db_dbnode_uuid_62e0bf98_uniq: + - uuid + db_dbsetting: + db_dbsetting_key_1b84beb4_uniq: + - key + db_dbuser: + db_dbuser_email_30150b7e_uniq: + - email +foreign_keys: + db_dbauthinfo: + db_dbauthinfo_aiidauser_id_0684fdfb_fk_db_dbuser_id: FOREIGN KEY (aiidauser_id) + REFERENCES db_dbuser(id) DEFERRABLE INITIALLY DEFERRED + db_dbauthinfo_dbcomputer_id_424f7ac4_fk_db_dbcomputer_id: FOREIGN KEY (dbcomputer_id) + REFERENCES db_dbcomputer(id) DEFERRABLE INITIALLY DEFERRED + db_dbcomment: + db_dbcomment_dbnode_id_3b812b6b_fk_db_dbnode_id: FOREIGN KEY (dbnode_id) REFERENCES + db_dbnode(id) DEFERRABLE INITIALLY DEFERRED + db_dbcomment_user_id_8ed5e360_fk_db_dbuser_id: FOREIGN KEY (user_id) REFERENCES + db_dbuser(id) DEFERRABLE INITIALLY DEFERRED + db_dbgroup: + db_dbgroup_user_id_100f8a51_fk_db_dbuser_id: FOREIGN KEY (user_id) REFERENCES + db_dbuser(id) DEFERRABLE INITIALLY DEFERRED + db_dbgroup_dbnodes: + db_dbgroup_dbnodes_dbgroup_id_9d3a0f9d_fk_db_dbgroup_id: FOREIGN KEY (dbgroup_id) + REFERENCES db_dbgroup(id) DEFERRABLE INITIALLY DEFERRED + db_dbgroup_dbnodes_dbnode_id_118b9439_fk_db_dbnode_id: FOREIGN KEY (dbnode_id) + REFERENCES db_dbnode(id) DEFERRABLE INITIALLY DEFERRED + db_dblink: + db_dblink_input_id_9245bd73_fk_db_dbnode_id: FOREIGN KEY (input_id) REFERENCES + db_dbnode(id) DEFERRABLE INITIALLY DEFERRED + db_dblink_output_id_c0167528_fk_db_dbnode_id: FOREIGN KEY (output_id) REFERENCES + db_dbnode(id) DEFERRABLE INITIALLY DEFERRED + db_dblog: + db_dblog_dbnode_id_da34b732_fk_db_dbnode_id: FOREIGN KEY (dbnode_id) REFERENCES + db_dbnode(id) DEFERRABLE INITIALLY DEFERRED + db_dbnode: + db_dbnode_dbcomputer_id_315372a3_fk_db_dbcomputer_id: FOREIGN KEY (dbcomputer_id) + REFERENCES db_dbcomputer(id) DEFERRABLE INITIALLY DEFERRED + db_dbnode_user_id_12e7aeaf_fk_db_dbuser_id: FOREIGN KEY (user_id) REFERENCES db_dbuser(id) + DEFERRABLE INITIALLY DEFERRED +indexes: + db_dbauthinfo: + db_dbauthinfo_aiidauser_id_0684fdfb: CREATE INDEX db_dbauthinfo_aiidauser_id_0684fdfb + ON public.db_dbauthinfo USING btree (aiidauser_id) + db_dbauthinfo_aiidauser_id_dbcomputer_id_777cdaa8_uniq: CREATE UNIQUE INDEX db_dbauthinfo_aiidauser_id_dbcomputer_id_777cdaa8_uniq + ON public.db_dbauthinfo USING btree (aiidauser_id, dbcomputer_id) + db_dbauthinfo_dbcomputer_id_424f7ac4: CREATE INDEX db_dbauthinfo_dbcomputer_id_424f7ac4 + ON public.db_dbauthinfo USING btree (dbcomputer_id) + db_dbauthinfo_pkey: CREATE UNIQUE INDEX db_dbauthinfo_pkey ON public.db_dbauthinfo + USING btree (id) + db_dbcomment: + db_dbcomment_dbnode_id_3b812b6b: CREATE INDEX db_dbcomment_dbnode_id_3b812b6b + ON public.db_dbcomment USING btree (dbnode_id) + db_dbcomment_pkey: CREATE UNIQUE INDEX db_dbcomment_pkey ON public.db_dbcomment + USING btree (id) + db_dbcomment_user_id_8ed5e360: CREATE INDEX db_dbcomment_user_id_8ed5e360 ON public.db_dbcomment + USING btree (user_id) + db_dbcomment_uuid_49bac08c_uniq: CREATE UNIQUE INDEX db_dbcomment_uuid_49bac08c_uniq + ON public.db_dbcomment USING btree (uuid) + db_dbcomputer: + db_dbcomputer_name_f1800b1a_like: CREATE INDEX db_dbcomputer_name_f1800b1a_like + ON public.db_dbcomputer USING btree (name varchar_pattern_ops) + db_dbcomputer_name_key: CREATE UNIQUE INDEX db_dbcomputer_name_key ON public.db_dbcomputer + USING btree (name) + db_dbcomputer_pkey: CREATE UNIQUE INDEX db_dbcomputer_pkey ON public.db_dbcomputer + USING btree (id) + db_dbcomputer_uuid_f35defa6_uniq: CREATE UNIQUE INDEX db_dbcomputer_uuid_f35defa6_uniq + ON public.db_dbcomputer USING btree (uuid) + db_dbgroup: + db_dbgroup_name_66c75272: CREATE INDEX db_dbgroup_name_66c75272 ON public.db_dbgroup + USING btree (label) + db_dbgroup_name_66c75272_like: CREATE INDEX db_dbgroup_name_66c75272_like ON public.db_dbgroup + USING btree (label varchar_pattern_ops) + db_dbgroup_name_type_12656f33_uniq: CREATE UNIQUE INDEX db_dbgroup_name_type_12656f33_uniq + ON public.db_dbgroup USING btree (label, type_string) + db_dbgroup_pkey: CREATE UNIQUE INDEX db_dbgroup_pkey ON public.db_dbgroup USING + btree (id) + db_dbgroup_type_23b2a748: CREATE INDEX db_dbgroup_type_23b2a748 ON public.db_dbgroup + USING btree (type_string) + db_dbgroup_type_23b2a748_like: CREATE INDEX db_dbgroup_type_23b2a748_like ON public.db_dbgroup + USING btree (type_string varchar_pattern_ops) + db_dbgroup_user_id_100f8a51: CREATE INDEX db_dbgroup_user_id_100f8a51 ON public.db_dbgroup + USING btree (user_id) + db_dbgroup_uuid_af896177_uniq: CREATE UNIQUE INDEX db_dbgroup_uuid_af896177_uniq + ON public.db_dbgroup USING btree (uuid) + db_dbgroup_dbnodes: + db_dbgroup_dbnodes_dbgroup_id_9d3a0f9d: CREATE INDEX db_dbgroup_dbnodes_dbgroup_id_9d3a0f9d + ON public.db_dbgroup_dbnodes USING btree (dbgroup_id) + db_dbgroup_dbnodes_dbgroup_id_dbnode_id_eee23cce_uniq: CREATE UNIQUE INDEX db_dbgroup_dbnodes_dbgroup_id_dbnode_id_eee23cce_uniq + ON public.db_dbgroup_dbnodes USING btree (dbgroup_id, dbnode_id) + db_dbgroup_dbnodes_dbnode_id_118b9439: CREATE INDEX db_dbgroup_dbnodes_dbnode_id_118b9439 + ON public.db_dbgroup_dbnodes USING btree (dbnode_id) + db_dbgroup_dbnodes_pkey: CREATE UNIQUE INDEX db_dbgroup_dbnodes_pkey ON public.db_dbgroup_dbnodes + USING btree (id) + db_dblink: + db_dblink_input_id_9245bd73: CREATE INDEX db_dblink_input_id_9245bd73 ON public.db_dblink + USING btree (input_id) + db_dblink_label_f1343cfb: CREATE INDEX db_dblink_label_f1343cfb ON public.db_dblink + USING btree (label) + db_dblink_label_f1343cfb_like: CREATE INDEX db_dblink_label_f1343cfb_like ON public.db_dblink + USING btree (label varchar_pattern_ops) + db_dblink_output_id_c0167528: CREATE INDEX db_dblink_output_id_c0167528 ON public.db_dblink + USING btree (output_id) + db_dblink_pkey: CREATE UNIQUE INDEX db_dblink_pkey ON public.db_dblink USING btree + (id) + db_dblink_type_229f212b: CREATE INDEX db_dblink_type_229f212b ON public.db_dblink + USING btree (type) + db_dblink_type_229f212b_like: CREATE INDEX db_dblink_type_229f212b_like ON public.db_dblink + USING btree (type varchar_pattern_ops) + db_dblog: + db_dblog_dbnode_id_da34b732: CREATE INDEX db_dblog_dbnode_id_da34b732 ON public.db_dblog + USING btree (dbnode_id) + db_dblog_levelname_ad5dc346: CREATE INDEX db_dblog_levelname_ad5dc346 ON public.db_dblog + USING btree (levelname) + db_dblog_levelname_ad5dc346_like: CREATE INDEX db_dblog_levelname_ad5dc346_like + ON public.db_dblog USING btree (levelname varchar_pattern_ops) + db_dblog_loggername_00b5ba16: CREATE INDEX db_dblog_loggername_00b5ba16 ON public.db_dblog + USING btree (loggername) + db_dblog_loggername_00b5ba16_like: CREATE INDEX db_dblog_loggername_00b5ba16_like + ON public.db_dblog USING btree (loggername varchar_pattern_ops) + db_dblog_pkey: CREATE UNIQUE INDEX db_dblog_pkey ON public.db_dblog USING btree + (id) + db_dblog_uuid_9cf77df3_uniq: CREATE UNIQUE INDEX db_dblog_uuid_9cf77df3_uniq ON + public.db_dblog USING btree (uuid) + db_dbnode: + db_dbnode_ctime_71626ef5: CREATE INDEX db_dbnode_ctime_71626ef5 ON public.db_dbnode + USING btree (ctime) + db_dbnode_dbcomputer_id_315372a3: CREATE INDEX db_dbnode_dbcomputer_id_315372a3 + ON public.db_dbnode USING btree (dbcomputer_id) + db_dbnode_label_6469539e: CREATE INDEX db_dbnode_label_6469539e ON public.db_dbnode + USING btree (label) + db_dbnode_label_6469539e_like: CREATE INDEX db_dbnode_label_6469539e_like ON public.db_dbnode + USING btree (label varchar_pattern_ops) + db_dbnode_mtime_0554ea3d: CREATE INDEX db_dbnode_mtime_0554ea3d ON public.db_dbnode + USING btree (mtime) + db_dbnode_pkey: CREATE UNIQUE INDEX db_dbnode_pkey ON public.db_dbnode USING btree + (id) + db_dbnode_process_type_df7298d0: CREATE INDEX db_dbnode_process_type_df7298d0 + ON public.db_dbnode USING btree (process_type) + db_dbnode_process_type_df7298d0_like: CREATE INDEX db_dbnode_process_type_df7298d0_like + ON public.db_dbnode USING btree (process_type varchar_pattern_ops) + db_dbnode_type_a8ce9753: CREATE INDEX db_dbnode_type_a8ce9753 ON public.db_dbnode + USING btree (node_type) + db_dbnode_type_a8ce9753_like: CREATE INDEX db_dbnode_type_a8ce9753_like ON public.db_dbnode + USING btree (node_type varchar_pattern_ops) + db_dbnode_user_id_12e7aeaf: CREATE INDEX db_dbnode_user_id_12e7aeaf ON public.db_dbnode + USING btree (user_id) + db_dbnode_uuid_62e0bf98_uniq: CREATE UNIQUE INDEX db_dbnode_uuid_62e0bf98_uniq + ON public.db_dbnode USING btree (uuid) + db_dbsetting: + db_dbsetting_key_1b84beb4_like: CREATE INDEX db_dbsetting_key_1b84beb4_like ON + public.db_dbsetting USING btree (key varchar_pattern_ops) + db_dbsetting_key_1b84beb4_uniq: CREATE UNIQUE INDEX db_dbsetting_key_1b84beb4_uniq + ON public.db_dbsetting USING btree (key) + db_dbsetting_pkey: CREATE UNIQUE INDEX db_dbsetting_pkey ON public.db_dbsetting + USING btree (id) + db_dbuser: + db_dbuser_email_30150b7e_like: CREATE INDEX db_dbuser_email_30150b7e_like ON public.db_dbuser + USING btree (email varchar_pattern_ops) + db_dbuser_email_30150b7e_uniq: CREATE UNIQUE INDEX db_dbuser_email_30150b7e_uniq + ON public.db_dbuser USING btree (email) + db_dbuser_pkey: CREATE UNIQUE INDEX db_dbuser_pkey ON public.db_dbuser USING btree + (id) diff --git a/tests/backends/aiida_sqlalchemy/migrations/test_all_schema/test_django_django_0044_.yml b/tests/backends/aiida_sqlalchemy/migrations/test_all_schema/test_django_django_0044_.yml new file mode 100644 index 0000000000..0dd8d3809a --- /dev/null +++ b/tests/backends/aiida_sqlalchemy/migrations/test_all_schema/test_django_django_0044_.yml @@ -0,0 +1,515 @@ +columns: + db_dbauthinfo: + aiidauser_id: + data_type: integer + default: null + is_nullable: false + auth_params: + data_type: jsonb + default: null + is_nullable: false + dbcomputer_id: + data_type: integer + default: null + is_nullable: false + enabled: + data_type: boolean + default: null + is_nullable: false + id: + data_type: integer + default: nextval('db_dbauthinfo_id_seq'::regclass) + is_nullable: false + metadata: + data_type: jsonb + default: null + is_nullable: false + db_dbcomment: + content: + data_type: text + default: null + is_nullable: false + ctime: + data_type: timestamp with time zone + default: null + is_nullable: false + dbnode_id: + data_type: integer + default: null + is_nullable: false + id: + data_type: integer + default: nextval('db_dbcomment_id_seq'::regclass) + is_nullable: false + mtime: + data_type: timestamp with time zone + default: null + is_nullable: false + user_id: + data_type: integer + default: null + is_nullable: false + uuid: + data_type: uuid + default: null + is_nullable: false + db_dbcomputer: + description: + data_type: text + default: null + is_nullable: false + hostname: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + id: + data_type: integer + default: nextval('db_dbcomputer_id_seq'::regclass) + is_nullable: false + metadata: + data_type: jsonb + default: null + is_nullable: false + name: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + scheduler_type: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + transport_type: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + uuid: + data_type: uuid + default: null + is_nullable: false + db_dbgroup: + description: + data_type: text + default: null + is_nullable: false + id: + data_type: integer + default: nextval('db_dbgroup_id_seq'::regclass) + is_nullable: false + label: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + time: + data_type: timestamp with time zone + default: null + is_nullable: false + type_string: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + user_id: + data_type: integer + default: null + is_nullable: false + uuid: + data_type: uuid + default: null + is_nullable: false + db_dbgroup_dbnodes: + dbgroup_id: + data_type: integer + default: null + is_nullable: false + dbnode_id: + data_type: integer + default: null + is_nullable: false + id: + data_type: integer + default: nextval('db_dbgroup_dbnodes_id_seq'::regclass) + is_nullable: false + db_dblink: + id: + data_type: integer + default: nextval('db_dblink_id_seq'::regclass) + is_nullable: false + input_id: + data_type: integer + default: null + is_nullable: false + label: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + output_id: + data_type: integer + default: null + is_nullable: false + type: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + db_dblog: + dbnode_id: + data_type: integer + default: null + is_nullable: false + id: + data_type: integer + default: nextval('db_dblog_id_seq'::regclass) + is_nullable: false + levelname: + data_type: character varying + default: null + is_nullable: false + max_length: 50 + loggername: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + message: + data_type: text + default: null + is_nullable: false + metadata: + data_type: jsonb + default: null + is_nullable: false + time: + data_type: timestamp with time zone + default: null + is_nullable: false + uuid: + data_type: uuid + default: null + is_nullable: false + db_dbnode: + attributes: + data_type: jsonb + default: null + is_nullable: true + ctime: + data_type: timestamp with time zone + default: null + is_nullable: false + dbcomputer_id: + data_type: integer + default: null + is_nullable: true + description: + data_type: text + default: null + is_nullable: false + extras: + data_type: jsonb + default: null + is_nullable: true + id: + data_type: integer + default: nextval('db_dbnode_id_seq'::regclass) + is_nullable: false + label: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + mtime: + data_type: timestamp with time zone + default: null + is_nullable: false + node_type: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + process_type: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + user_id: + data_type: integer + default: null + is_nullable: false + uuid: + data_type: uuid + default: null + is_nullable: false + db_dbsetting: + description: + data_type: text + default: null + is_nullable: false + id: + data_type: integer + default: nextval('db_dbsetting_id_seq'::regclass) + is_nullable: false + key: + data_type: character varying + default: null + is_nullable: false + max_length: 1024 + time: + data_type: timestamp with time zone + default: null + is_nullable: false + val: + data_type: jsonb + default: null + is_nullable: true + db_dbuser: + email: + data_type: character varying + default: null + is_nullable: false + max_length: 254 + first_name: + data_type: character varying + default: null + is_nullable: false + max_length: 254 + id: + data_type: integer + default: nextval('db_dbuser_id_seq'::regclass) + is_nullable: false + institution: + data_type: character varying + default: null + is_nullable: false + max_length: 254 + last_name: + data_type: character varying + default: null + is_nullable: false + max_length: 254 +constraints: + primary_key: + db_dbauthinfo: + db_dbauthinfo_pkey: + - id + db_dbcomment: + db_dbcomment_pkey: + - id + db_dbcomputer: + db_dbcomputer_pkey: + - id + db_dbgroup: + db_dbgroup_pkey: + - id + db_dbgroup_dbnodes: + db_dbgroup_dbnodes_pkey: + - id + db_dblink: + db_dblink_pkey: + - id + db_dblog: + db_dblog_pkey: + - id + db_dbnode: + db_dbnode_pkey: + - id + db_dbsetting: + db_dbsetting_pkey: + - id + db_dbuser: + db_dbuser_pkey: + - id + unique: + db_dbauthinfo: + db_dbauthinfo_aiidauser_id_dbcomputer_id_777cdaa8_uniq: + - aiidauser_id + - dbcomputer_id + db_dbcomment: + db_dbcomment_uuid_49bac08c_uniq: + - uuid + db_dbcomputer: + db_dbcomputer_name_key: + - name + db_dbcomputer_uuid_f35defa6_uniq: + - uuid + db_dbgroup: + db_dbgroup_name_type_12656f33_uniq: + - label + - type_string + db_dbgroup_uuid_af896177_uniq: + - uuid + db_dbgroup_dbnodes: + db_dbgroup_dbnodes_dbgroup_id_dbnode_id_eee23cce_uniq: + - dbgroup_id + - dbnode_id + db_dblog: + db_dblog_uuid_9cf77df3_uniq: + - uuid + db_dbnode: + db_dbnode_uuid_62e0bf98_uniq: + - uuid + db_dbsetting: + db_dbsetting_key_1b84beb4_uniq: + - key + db_dbuser: + db_dbuser_email_30150b7e_uniq: + - email +foreign_keys: + db_dbauthinfo: + db_dbauthinfo_aiidauser_id_0684fdfb_fk_db_dbuser_id: FOREIGN KEY (aiidauser_id) + REFERENCES db_dbuser(id) DEFERRABLE INITIALLY DEFERRED + db_dbauthinfo_dbcomputer_id_424f7ac4_fk_db_dbcomputer_id: FOREIGN KEY (dbcomputer_id) + REFERENCES db_dbcomputer(id) DEFERRABLE INITIALLY DEFERRED + db_dbcomment: + db_dbcomment_dbnode_id_3b812b6b_fk_db_dbnode_id: FOREIGN KEY (dbnode_id) REFERENCES + db_dbnode(id) DEFERRABLE INITIALLY DEFERRED + db_dbcomment_user_id_8ed5e360_fk_db_dbuser_id: FOREIGN KEY (user_id) REFERENCES + db_dbuser(id) DEFERRABLE INITIALLY DEFERRED + db_dbgroup: + db_dbgroup_user_id_100f8a51_fk_db_dbuser_id: FOREIGN KEY (user_id) REFERENCES + db_dbuser(id) DEFERRABLE INITIALLY DEFERRED + db_dbgroup_dbnodes: + db_dbgroup_dbnodes_dbgroup_id_9d3a0f9d_fk_db_dbgroup_id: FOREIGN KEY (dbgroup_id) + REFERENCES db_dbgroup(id) DEFERRABLE INITIALLY DEFERRED + db_dbgroup_dbnodes_dbnode_id_118b9439_fk_db_dbnode_id: FOREIGN KEY (dbnode_id) + REFERENCES db_dbnode(id) DEFERRABLE INITIALLY DEFERRED + db_dblink: + db_dblink_input_id_9245bd73_fk_db_dbnode_id: FOREIGN KEY (input_id) REFERENCES + db_dbnode(id) DEFERRABLE INITIALLY DEFERRED + db_dblink_output_id_c0167528_fk_db_dbnode_id: FOREIGN KEY (output_id) REFERENCES + db_dbnode(id) DEFERRABLE INITIALLY DEFERRED + db_dblog: + db_dblog_dbnode_id_da34b732_fk_db_dbnode_id: FOREIGN KEY (dbnode_id) REFERENCES + db_dbnode(id) DEFERRABLE INITIALLY DEFERRED + db_dbnode: + db_dbnode_dbcomputer_id_315372a3_fk_db_dbcomputer_id: FOREIGN KEY (dbcomputer_id) + REFERENCES db_dbcomputer(id) DEFERRABLE INITIALLY DEFERRED + db_dbnode_user_id_12e7aeaf_fk_db_dbuser_id: FOREIGN KEY (user_id) REFERENCES db_dbuser(id) + DEFERRABLE INITIALLY DEFERRED +indexes: + db_dbauthinfo: + db_dbauthinfo_aiidauser_id_0684fdfb: CREATE INDEX db_dbauthinfo_aiidauser_id_0684fdfb + ON public.db_dbauthinfo USING btree (aiidauser_id) + db_dbauthinfo_aiidauser_id_dbcomputer_id_777cdaa8_uniq: CREATE UNIQUE INDEX db_dbauthinfo_aiidauser_id_dbcomputer_id_777cdaa8_uniq + ON public.db_dbauthinfo USING btree (aiidauser_id, dbcomputer_id) + db_dbauthinfo_dbcomputer_id_424f7ac4: CREATE INDEX db_dbauthinfo_dbcomputer_id_424f7ac4 + ON public.db_dbauthinfo USING btree (dbcomputer_id) + db_dbauthinfo_pkey: CREATE UNIQUE INDEX db_dbauthinfo_pkey ON public.db_dbauthinfo + USING btree (id) + db_dbcomment: + db_dbcomment_dbnode_id_3b812b6b: CREATE INDEX db_dbcomment_dbnode_id_3b812b6b + ON public.db_dbcomment USING btree (dbnode_id) + db_dbcomment_pkey: CREATE UNIQUE INDEX db_dbcomment_pkey ON public.db_dbcomment + USING btree (id) + db_dbcomment_user_id_8ed5e360: CREATE INDEX db_dbcomment_user_id_8ed5e360 ON public.db_dbcomment + USING btree (user_id) + db_dbcomment_uuid_49bac08c_uniq: CREATE UNIQUE INDEX db_dbcomment_uuid_49bac08c_uniq + ON public.db_dbcomment USING btree (uuid) + db_dbcomputer: + db_dbcomputer_name_f1800b1a_like: CREATE INDEX db_dbcomputer_name_f1800b1a_like + ON public.db_dbcomputer USING btree (name varchar_pattern_ops) + db_dbcomputer_name_key: CREATE UNIQUE INDEX db_dbcomputer_name_key ON public.db_dbcomputer + USING btree (name) + db_dbcomputer_pkey: CREATE UNIQUE INDEX db_dbcomputer_pkey ON public.db_dbcomputer + USING btree (id) + db_dbcomputer_uuid_f35defa6_uniq: CREATE UNIQUE INDEX db_dbcomputer_uuid_f35defa6_uniq + ON public.db_dbcomputer USING btree (uuid) + db_dbgroup: + db_dbgroup_name_66c75272: CREATE INDEX db_dbgroup_name_66c75272 ON public.db_dbgroup + USING btree (label) + db_dbgroup_name_66c75272_like: CREATE INDEX db_dbgroup_name_66c75272_like ON public.db_dbgroup + USING btree (label varchar_pattern_ops) + db_dbgroup_name_type_12656f33_uniq: CREATE UNIQUE INDEX db_dbgroup_name_type_12656f33_uniq + ON public.db_dbgroup USING btree (label, type_string) + db_dbgroup_pkey: CREATE UNIQUE INDEX db_dbgroup_pkey ON public.db_dbgroup USING + btree (id) + db_dbgroup_type_23b2a748: CREATE INDEX db_dbgroup_type_23b2a748 ON public.db_dbgroup + USING btree (type_string) + db_dbgroup_type_23b2a748_like: CREATE INDEX db_dbgroup_type_23b2a748_like ON public.db_dbgroup + USING btree (type_string varchar_pattern_ops) + db_dbgroup_user_id_100f8a51: CREATE INDEX db_dbgroup_user_id_100f8a51 ON public.db_dbgroup + USING btree (user_id) + db_dbgroup_uuid_af896177_uniq: CREATE UNIQUE INDEX db_dbgroup_uuid_af896177_uniq + ON public.db_dbgroup USING btree (uuid) + db_dbgroup_dbnodes: + db_dbgroup_dbnodes_dbgroup_id_9d3a0f9d: CREATE INDEX db_dbgroup_dbnodes_dbgroup_id_9d3a0f9d + ON public.db_dbgroup_dbnodes USING btree (dbgroup_id) + db_dbgroup_dbnodes_dbgroup_id_dbnode_id_eee23cce_uniq: CREATE UNIQUE INDEX db_dbgroup_dbnodes_dbgroup_id_dbnode_id_eee23cce_uniq + ON public.db_dbgroup_dbnodes USING btree (dbgroup_id, dbnode_id) + db_dbgroup_dbnodes_dbnode_id_118b9439: CREATE INDEX db_dbgroup_dbnodes_dbnode_id_118b9439 + ON public.db_dbgroup_dbnodes USING btree (dbnode_id) + db_dbgroup_dbnodes_pkey: CREATE UNIQUE INDEX db_dbgroup_dbnodes_pkey ON public.db_dbgroup_dbnodes + USING btree (id) + db_dblink: + db_dblink_input_id_9245bd73: CREATE INDEX db_dblink_input_id_9245bd73 ON public.db_dblink + USING btree (input_id) + db_dblink_label_f1343cfb: CREATE INDEX db_dblink_label_f1343cfb ON public.db_dblink + USING btree (label) + db_dblink_label_f1343cfb_like: CREATE INDEX db_dblink_label_f1343cfb_like ON public.db_dblink + USING btree (label varchar_pattern_ops) + db_dblink_output_id_c0167528: CREATE INDEX db_dblink_output_id_c0167528 ON public.db_dblink + USING btree (output_id) + db_dblink_pkey: CREATE UNIQUE INDEX db_dblink_pkey ON public.db_dblink USING btree + (id) + db_dblink_type_229f212b: CREATE INDEX db_dblink_type_229f212b ON public.db_dblink + USING btree (type) + db_dblink_type_229f212b_like: CREATE INDEX db_dblink_type_229f212b_like ON public.db_dblink + USING btree (type varchar_pattern_ops) + db_dblog: + db_dblog_dbnode_id_da34b732: CREATE INDEX db_dblog_dbnode_id_da34b732 ON public.db_dblog + USING btree (dbnode_id) + db_dblog_levelname_ad5dc346: CREATE INDEX db_dblog_levelname_ad5dc346 ON public.db_dblog + USING btree (levelname) + db_dblog_levelname_ad5dc346_like: CREATE INDEX db_dblog_levelname_ad5dc346_like + ON public.db_dblog USING btree (levelname varchar_pattern_ops) + db_dblog_loggername_00b5ba16: CREATE INDEX db_dblog_loggername_00b5ba16 ON public.db_dblog + USING btree (loggername) + db_dblog_loggername_00b5ba16_like: CREATE INDEX db_dblog_loggername_00b5ba16_like + ON public.db_dblog USING btree (loggername varchar_pattern_ops) + db_dblog_pkey: CREATE UNIQUE INDEX db_dblog_pkey ON public.db_dblog USING btree + (id) + db_dblog_uuid_9cf77df3_uniq: CREATE UNIQUE INDEX db_dblog_uuid_9cf77df3_uniq ON + public.db_dblog USING btree (uuid) + db_dbnode: + db_dbnode_ctime_71626ef5: CREATE INDEX db_dbnode_ctime_71626ef5 ON public.db_dbnode + USING btree (ctime) + db_dbnode_dbcomputer_id_315372a3: CREATE INDEX db_dbnode_dbcomputer_id_315372a3 + ON public.db_dbnode USING btree (dbcomputer_id) + db_dbnode_label_6469539e: CREATE INDEX db_dbnode_label_6469539e ON public.db_dbnode + USING btree (label) + db_dbnode_label_6469539e_like: CREATE INDEX db_dbnode_label_6469539e_like ON public.db_dbnode + USING btree (label varchar_pattern_ops) + db_dbnode_mtime_0554ea3d: CREATE INDEX db_dbnode_mtime_0554ea3d ON public.db_dbnode + USING btree (mtime) + db_dbnode_pkey: CREATE UNIQUE INDEX db_dbnode_pkey ON public.db_dbnode USING btree + (id) + db_dbnode_process_type_df7298d0: CREATE INDEX db_dbnode_process_type_df7298d0 + ON public.db_dbnode USING btree (process_type) + db_dbnode_process_type_df7298d0_like: CREATE INDEX db_dbnode_process_type_df7298d0_like + ON public.db_dbnode USING btree (process_type varchar_pattern_ops) + db_dbnode_type_a8ce9753: CREATE INDEX db_dbnode_type_a8ce9753 ON public.db_dbnode + USING btree (node_type) + db_dbnode_type_a8ce9753_like: CREATE INDEX db_dbnode_type_a8ce9753_like ON public.db_dbnode + USING btree (node_type varchar_pattern_ops) + db_dbnode_user_id_12e7aeaf: CREATE INDEX db_dbnode_user_id_12e7aeaf ON public.db_dbnode + USING btree (user_id) + db_dbnode_uuid_62e0bf98_uniq: CREATE UNIQUE INDEX db_dbnode_uuid_62e0bf98_uniq + ON public.db_dbnode USING btree (uuid) + db_dbsetting: + db_dbsetting_key_1b84beb4_like: CREATE INDEX db_dbsetting_key_1b84beb4_like ON + public.db_dbsetting USING btree (key varchar_pattern_ops) + db_dbsetting_key_1b84beb4_uniq: CREATE UNIQUE INDEX db_dbsetting_key_1b84beb4_uniq + ON public.db_dbsetting USING btree (key) + db_dbsetting_pkey: CREATE UNIQUE INDEX db_dbsetting_pkey ON public.db_dbsetting + USING btree (id) + db_dbuser: + db_dbuser_email_30150b7e_like: CREATE INDEX db_dbuser_email_30150b7e_like ON public.db_dbuser + USING btree (email varchar_pattern_ops) + db_dbuser_email_30150b7e_uniq: CREATE UNIQUE INDEX db_dbuser_email_30150b7e_uniq + ON public.db_dbuser USING btree (email) + db_dbuser_pkey: CREATE UNIQUE INDEX db_dbuser_pkey ON public.db_dbuser USING btree + (id) diff --git a/tests/backends/aiida_sqlalchemy/migrations/test_all_schema/test_django_django_0045_.yml b/tests/backends/aiida_sqlalchemy/migrations/test_all_schema/test_django_django_0045_.yml new file mode 100644 index 0000000000..65345de14f --- /dev/null +++ b/tests/backends/aiida_sqlalchemy/migrations/test_all_schema/test_django_django_0045_.yml @@ -0,0 +1,519 @@ +columns: + db_dbauthinfo: + aiidauser_id: + data_type: integer + default: null + is_nullable: false + auth_params: + data_type: jsonb + default: null + is_nullable: false + dbcomputer_id: + data_type: integer + default: null + is_nullable: false + enabled: + data_type: boolean + default: null + is_nullable: false + id: + data_type: integer + default: nextval('db_dbauthinfo_id_seq'::regclass) + is_nullable: false + metadata: + data_type: jsonb + default: null + is_nullable: false + db_dbcomment: + content: + data_type: text + default: null + is_nullable: false + ctime: + data_type: timestamp with time zone + default: null + is_nullable: false + dbnode_id: + data_type: integer + default: null + is_nullable: false + id: + data_type: integer + default: nextval('db_dbcomment_id_seq'::regclass) + is_nullable: false + mtime: + data_type: timestamp with time zone + default: null + is_nullable: false + user_id: + data_type: integer + default: null + is_nullable: false + uuid: + data_type: uuid + default: null + is_nullable: false + db_dbcomputer: + description: + data_type: text + default: null + is_nullable: false + hostname: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + id: + data_type: integer + default: nextval('db_dbcomputer_id_seq'::regclass) + is_nullable: false + metadata: + data_type: jsonb + default: null + is_nullable: false + name: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + scheduler_type: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + transport_type: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + uuid: + data_type: uuid + default: null + is_nullable: false + db_dbgroup: + description: + data_type: text + default: null + is_nullable: false + extras: + data_type: jsonb + default: null + is_nullable: false + id: + data_type: integer + default: nextval('db_dbgroup_id_seq'::regclass) + is_nullable: false + label: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + time: + data_type: timestamp with time zone + default: null + is_nullable: false + type_string: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + user_id: + data_type: integer + default: null + is_nullable: false + uuid: + data_type: uuid + default: null + is_nullable: false + db_dbgroup_dbnodes: + dbgroup_id: + data_type: integer + default: null + is_nullable: false + dbnode_id: + data_type: integer + default: null + is_nullable: false + id: + data_type: integer + default: nextval('db_dbgroup_dbnodes_id_seq'::regclass) + is_nullable: false + db_dblink: + id: + data_type: integer + default: nextval('db_dblink_id_seq'::regclass) + is_nullable: false + input_id: + data_type: integer + default: null + is_nullable: false + label: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + output_id: + data_type: integer + default: null + is_nullable: false + type: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + db_dblog: + dbnode_id: + data_type: integer + default: null + is_nullable: false + id: + data_type: integer + default: nextval('db_dblog_id_seq'::regclass) + is_nullable: false + levelname: + data_type: character varying + default: null + is_nullable: false + max_length: 50 + loggername: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + message: + data_type: text + default: null + is_nullable: false + metadata: + data_type: jsonb + default: null + is_nullable: false + time: + data_type: timestamp with time zone + default: null + is_nullable: false + uuid: + data_type: uuid + default: null + is_nullable: false + db_dbnode: + attributes: + data_type: jsonb + default: null + is_nullable: true + ctime: + data_type: timestamp with time zone + default: null + is_nullable: false + dbcomputer_id: + data_type: integer + default: null + is_nullable: true + description: + data_type: text + default: null + is_nullable: false + extras: + data_type: jsonb + default: null + is_nullable: true + id: + data_type: integer + default: nextval('db_dbnode_id_seq'::regclass) + is_nullable: false + label: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + mtime: + data_type: timestamp with time zone + default: null + is_nullable: false + node_type: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + process_type: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + user_id: + data_type: integer + default: null + is_nullable: false + uuid: + data_type: uuid + default: null + is_nullable: false + db_dbsetting: + description: + data_type: text + default: null + is_nullable: false + id: + data_type: integer + default: nextval('db_dbsetting_id_seq'::regclass) + is_nullable: false + key: + data_type: character varying + default: null + is_nullable: false + max_length: 1024 + time: + data_type: timestamp with time zone + default: null + is_nullable: false + val: + data_type: jsonb + default: null + is_nullable: true + db_dbuser: + email: + data_type: character varying + default: null + is_nullable: false + max_length: 254 + first_name: + data_type: character varying + default: null + is_nullable: false + max_length: 254 + id: + data_type: integer + default: nextval('db_dbuser_id_seq'::regclass) + is_nullable: false + institution: + data_type: character varying + default: null + is_nullable: false + max_length: 254 + last_name: + data_type: character varying + default: null + is_nullable: false + max_length: 254 +constraints: + primary_key: + db_dbauthinfo: + db_dbauthinfo_pkey: + - id + db_dbcomment: + db_dbcomment_pkey: + - id + db_dbcomputer: + db_dbcomputer_pkey: + - id + db_dbgroup: + db_dbgroup_pkey: + - id + db_dbgroup_dbnodes: + db_dbgroup_dbnodes_pkey: + - id + db_dblink: + db_dblink_pkey: + - id + db_dblog: + db_dblog_pkey: + - id + db_dbnode: + db_dbnode_pkey: + - id + db_dbsetting: + db_dbsetting_pkey: + - id + db_dbuser: + db_dbuser_pkey: + - id + unique: + db_dbauthinfo: + db_dbauthinfo_aiidauser_id_dbcomputer_id_777cdaa8_uniq: + - aiidauser_id + - dbcomputer_id + db_dbcomment: + db_dbcomment_uuid_49bac08c_uniq: + - uuid + db_dbcomputer: + db_dbcomputer_name_key: + - name + db_dbcomputer_uuid_f35defa6_uniq: + - uuid + db_dbgroup: + db_dbgroup_name_type_12656f33_uniq: + - label + - type_string + db_dbgroup_uuid_af896177_uniq: + - uuid + db_dbgroup_dbnodes: + db_dbgroup_dbnodes_dbgroup_id_dbnode_id_eee23cce_uniq: + - dbgroup_id + - dbnode_id + db_dblog: + db_dblog_uuid_9cf77df3_uniq: + - uuid + db_dbnode: + db_dbnode_uuid_62e0bf98_uniq: + - uuid + db_dbsetting: + db_dbsetting_key_1b84beb4_uniq: + - key + db_dbuser: + db_dbuser_email_30150b7e_uniq: + - email +foreign_keys: + db_dbauthinfo: + db_dbauthinfo_aiidauser_id_0684fdfb_fk_db_dbuser_id: FOREIGN KEY (aiidauser_id) + REFERENCES db_dbuser(id) DEFERRABLE INITIALLY DEFERRED + db_dbauthinfo_dbcomputer_id_424f7ac4_fk_db_dbcomputer_id: FOREIGN KEY (dbcomputer_id) + REFERENCES db_dbcomputer(id) DEFERRABLE INITIALLY DEFERRED + db_dbcomment: + db_dbcomment_dbnode_id_3b812b6b_fk_db_dbnode_id: FOREIGN KEY (dbnode_id) REFERENCES + db_dbnode(id) DEFERRABLE INITIALLY DEFERRED + db_dbcomment_user_id_8ed5e360_fk_db_dbuser_id: FOREIGN KEY (user_id) REFERENCES + db_dbuser(id) DEFERRABLE INITIALLY DEFERRED + db_dbgroup: + db_dbgroup_user_id_100f8a51_fk_db_dbuser_id: FOREIGN KEY (user_id) REFERENCES + db_dbuser(id) DEFERRABLE INITIALLY DEFERRED + db_dbgroup_dbnodes: + db_dbgroup_dbnodes_dbgroup_id_9d3a0f9d_fk_db_dbgroup_id: FOREIGN KEY (dbgroup_id) + REFERENCES db_dbgroup(id) DEFERRABLE INITIALLY DEFERRED + db_dbgroup_dbnodes_dbnode_id_118b9439_fk_db_dbnode_id: FOREIGN KEY (dbnode_id) + REFERENCES db_dbnode(id) DEFERRABLE INITIALLY DEFERRED + db_dblink: + db_dblink_input_id_9245bd73_fk_db_dbnode_id: FOREIGN KEY (input_id) REFERENCES + db_dbnode(id) DEFERRABLE INITIALLY DEFERRED + db_dblink_output_id_c0167528_fk_db_dbnode_id: FOREIGN KEY (output_id) REFERENCES + db_dbnode(id) DEFERRABLE INITIALLY DEFERRED + db_dblog: + db_dblog_dbnode_id_da34b732_fk_db_dbnode_id: FOREIGN KEY (dbnode_id) REFERENCES + db_dbnode(id) DEFERRABLE INITIALLY DEFERRED + db_dbnode: + db_dbnode_dbcomputer_id_315372a3_fk_db_dbcomputer_id: FOREIGN KEY (dbcomputer_id) + REFERENCES db_dbcomputer(id) DEFERRABLE INITIALLY DEFERRED + db_dbnode_user_id_12e7aeaf_fk_db_dbuser_id: FOREIGN KEY (user_id) REFERENCES db_dbuser(id) + DEFERRABLE INITIALLY DEFERRED +indexes: + db_dbauthinfo: + db_dbauthinfo_aiidauser_id_0684fdfb: CREATE INDEX db_dbauthinfo_aiidauser_id_0684fdfb + ON public.db_dbauthinfo USING btree (aiidauser_id) + db_dbauthinfo_aiidauser_id_dbcomputer_id_777cdaa8_uniq: CREATE UNIQUE INDEX db_dbauthinfo_aiidauser_id_dbcomputer_id_777cdaa8_uniq + ON public.db_dbauthinfo USING btree (aiidauser_id, dbcomputer_id) + db_dbauthinfo_dbcomputer_id_424f7ac4: CREATE INDEX db_dbauthinfo_dbcomputer_id_424f7ac4 + ON public.db_dbauthinfo USING btree (dbcomputer_id) + db_dbauthinfo_pkey: CREATE UNIQUE INDEX db_dbauthinfo_pkey ON public.db_dbauthinfo + USING btree (id) + db_dbcomment: + db_dbcomment_dbnode_id_3b812b6b: CREATE INDEX db_dbcomment_dbnode_id_3b812b6b + ON public.db_dbcomment USING btree (dbnode_id) + db_dbcomment_pkey: CREATE UNIQUE INDEX db_dbcomment_pkey ON public.db_dbcomment + USING btree (id) + db_dbcomment_user_id_8ed5e360: CREATE INDEX db_dbcomment_user_id_8ed5e360 ON public.db_dbcomment + USING btree (user_id) + db_dbcomment_uuid_49bac08c_uniq: CREATE UNIQUE INDEX db_dbcomment_uuid_49bac08c_uniq + ON public.db_dbcomment USING btree (uuid) + db_dbcomputer: + db_dbcomputer_name_f1800b1a_like: CREATE INDEX db_dbcomputer_name_f1800b1a_like + ON public.db_dbcomputer USING btree (name varchar_pattern_ops) + db_dbcomputer_name_key: CREATE UNIQUE INDEX db_dbcomputer_name_key ON public.db_dbcomputer + USING btree (name) + db_dbcomputer_pkey: CREATE UNIQUE INDEX db_dbcomputer_pkey ON public.db_dbcomputer + USING btree (id) + db_dbcomputer_uuid_f35defa6_uniq: CREATE UNIQUE INDEX db_dbcomputer_uuid_f35defa6_uniq + ON public.db_dbcomputer USING btree (uuid) + db_dbgroup: + db_dbgroup_name_66c75272: CREATE INDEX db_dbgroup_name_66c75272 ON public.db_dbgroup + USING btree (label) + db_dbgroup_name_66c75272_like: CREATE INDEX db_dbgroup_name_66c75272_like ON public.db_dbgroup + USING btree (label varchar_pattern_ops) + db_dbgroup_name_type_12656f33_uniq: CREATE UNIQUE INDEX db_dbgroup_name_type_12656f33_uniq + ON public.db_dbgroup USING btree (label, type_string) + db_dbgroup_pkey: CREATE UNIQUE INDEX db_dbgroup_pkey ON public.db_dbgroup USING + btree (id) + db_dbgroup_type_23b2a748: CREATE INDEX db_dbgroup_type_23b2a748 ON public.db_dbgroup + USING btree (type_string) + db_dbgroup_type_23b2a748_like: CREATE INDEX db_dbgroup_type_23b2a748_like ON public.db_dbgroup + USING btree (type_string varchar_pattern_ops) + db_dbgroup_user_id_100f8a51: CREATE INDEX db_dbgroup_user_id_100f8a51 ON public.db_dbgroup + USING btree (user_id) + db_dbgroup_uuid_af896177_uniq: CREATE UNIQUE INDEX db_dbgroup_uuid_af896177_uniq + ON public.db_dbgroup USING btree (uuid) + db_dbgroup_dbnodes: + db_dbgroup_dbnodes_dbgroup_id_9d3a0f9d: CREATE INDEX db_dbgroup_dbnodes_dbgroup_id_9d3a0f9d + ON public.db_dbgroup_dbnodes USING btree (dbgroup_id) + db_dbgroup_dbnodes_dbgroup_id_dbnode_id_eee23cce_uniq: CREATE UNIQUE INDEX db_dbgroup_dbnodes_dbgroup_id_dbnode_id_eee23cce_uniq + ON public.db_dbgroup_dbnodes USING btree (dbgroup_id, dbnode_id) + db_dbgroup_dbnodes_dbnode_id_118b9439: CREATE INDEX db_dbgroup_dbnodes_dbnode_id_118b9439 + ON public.db_dbgroup_dbnodes USING btree (dbnode_id) + db_dbgroup_dbnodes_pkey: CREATE UNIQUE INDEX db_dbgroup_dbnodes_pkey ON public.db_dbgroup_dbnodes + USING btree (id) + db_dblink: + db_dblink_input_id_9245bd73: CREATE INDEX db_dblink_input_id_9245bd73 ON public.db_dblink + USING btree (input_id) + db_dblink_label_f1343cfb: CREATE INDEX db_dblink_label_f1343cfb ON public.db_dblink + USING btree (label) + db_dblink_label_f1343cfb_like: CREATE INDEX db_dblink_label_f1343cfb_like ON public.db_dblink + USING btree (label varchar_pattern_ops) + db_dblink_output_id_c0167528: CREATE INDEX db_dblink_output_id_c0167528 ON public.db_dblink + USING btree (output_id) + db_dblink_pkey: CREATE UNIQUE INDEX db_dblink_pkey ON public.db_dblink USING btree + (id) + db_dblink_type_229f212b: CREATE INDEX db_dblink_type_229f212b ON public.db_dblink + USING btree (type) + db_dblink_type_229f212b_like: CREATE INDEX db_dblink_type_229f212b_like ON public.db_dblink + USING btree (type varchar_pattern_ops) + db_dblog: + db_dblog_dbnode_id_da34b732: CREATE INDEX db_dblog_dbnode_id_da34b732 ON public.db_dblog + USING btree (dbnode_id) + db_dblog_levelname_ad5dc346: CREATE INDEX db_dblog_levelname_ad5dc346 ON public.db_dblog + USING btree (levelname) + db_dblog_levelname_ad5dc346_like: CREATE INDEX db_dblog_levelname_ad5dc346_like + ON public.db_dblog USING btree (levelname varchar_pattern_ops) + db_dblog_loggername_00b5ba16: CREATE INDEX db_dblog_loggername_00b5ba16 ON public.db_dblog + USING btree (loggername) + db_dblog_loggername_00b5ba16_like: CREATE INDEX db_dblog_loggername_00b5ba16_like + ON public.db_dblog USING btree (loggername varchar_pattern_ops) + db_dblog_pkey: CREATE UNIQUE INDEX db_dblog_pkey ON public.db_dblog USING btree + (id) + db_dblog_uuid_9cf77df3_uniq: CREATE UNIQUE INDEX db_dblog_uuid_9cf77df3_uniq ON + public.db_dblog USING btree (uuid) + db_dbnode: + db_dbnode_ctime_71626ef5: CREATE INDEX db_dbnode_ctime_71626ef5 ON public.db_dbnode + USING btree (ctime) + db_dbnode_dbcomputer_id_315372a3: CREATE INDEX db_dbnode_dbcomputer_id_315372a3 + ON public.db_dbnode USING btree (dbcomputer_id) + db_dbnode_label_6469539e: CREATE INDEX db_dbnode_label_6469539e ON public.db_dbnode + USING btree (label) + db_dbnode_label_6469539e_like: CREATE INDEX db_dbnode_label_6469539e_like ON public.db_dbnode + USING btree (label varchar_pattern_ops) + db_dbnode_mtime_0554ea3d: CREATE INDEX db_dbnode_mtime_0554ea3d ON public.db_dbnode + USING btree (mtime) + db_dbnode_pkey: CREATE UNIQUE INDEX db_dbnode_pkey ON public.db_dbnode USING btree + (id) + db_dbnode_process_type_df7298d0: CREATE INDEX db_dbnode_process_type_df7298d0 + ON public.db_dbnode USING btree (process_type) + db_dbnode_process_type_df7298d0_like: CREATE INDEX db_dbnode_process_type_df7298d0_like + ON public.db_dbnode USING btree (process_type varchar_pattern_ops) + db_dbnode_type_a8ce9753: CREATE INDEX db_dbnode_type_a8ce9753 ON public.db_dbnode + USING btree (node_type) + db_dbnode_type_a8ce9753_like: CREATE INDEX db_dbnode_type_a8ce9753_like ON public.db_dbnode + USING btree (node_type varchar_pattern_ops) + db_dbnode_user_id_12e7aeaf: CREATE INDEX db_dbnode_user_id_12e7aeaf ON public.db_dbnode + USING btree (user_id) + db_dbnode_uuid_62e0bf98_uniq: CREATE UNIQUE INDEX db_dbnode_uuid_62e0bf98_uniq + ON public.db_dbnode USING btree (uuid) + db_dbsetting: + db_dbsetting_key_1b84beb4_like: CREATE INDEX db_dbsetting_key_1b84beb4_like ON + public.db_dbsetting USING btree (key varchar_pattern_ops) + db_dbsetting_key_1b84beb4_uniq: CREATE UNIQUE INDEX db_dbsetting_key_1b84beb4_uniq + ON public.db_dbsetting USING btree (key) + db_dbsetting_pkey: CREATE UNIQUE INDEX db_dbsetting_pkey ON public.db_dbsetting + USING btree (id) + db_dbuser: + db_dbuser_email_30150b7e_like: CREATE INDEX db_dbuser_email_30150b7e_like ON public.db_dbuser + USING btree (email varchar_pattern_ops) + db_dbuser_email_30150b7e_uniq: CREATE UNIQUE INDEX db_dbuser_email_30150b7e_uniq + ON public.db_dbuser USING btree (email) + db_dbuser_pkey: CREATE UNIQUE INDEX db_dbuser_pkey ON public.db_dbuser USING btree + (id) diff --git a/tests/backends/aiida_sqlalchemy/migrations/test_all_schema/test_django_django_0046_.yml b/tests/backends/aiida_sqlalchemy/migrations/test_all_schema/test_django_django_0046_.yml new file mode 100644 index 0000000000..da230b8a6c --- /dev/null +++ b/tests/backends/aiida_sqlalchemy/migrations/test_all_schema/test_django_django_0046_.yml @@ -0,0 +1,523 @@ +columns: + db_dbauthinfo: + aiidauser_id: + data_type: integer + default: null + is_nullable: false + auth_params: + data_type: jsonb + default: null + is_nullable: false + dbcomputer_id: + data_type: integer + default: null + is_nullable: false + enabled: + data_type: boolean + default: null + is_nullable: false + id: + data_type: integer + default: nextval('db_dbauthinfo_id_seq'::regclass) + is_nullable: false + metadata: + data_type: jsonb + default: null + is_nullable: false + db_dbcomment: + content: + data_type: text + default: null + is_nullable: false + ctime: + data_type: timestamp with time zone + default: null + is_nullable: false + dbnode_id: + data_type: integer + default: null + is_nullable: false + id: + data_type: integer + default: nextval('db_dbcomment_id_seq'::regclass) + is_nullable: false + mtime: + data_type: timestamp with time zone + default: null + is_nullable: false + user_id: + data_type: integer + default: null + is_nullable: false + uuid: + data_type: uuid + default: null + is_nullable: false + db_dbcomputer: + description: + data_type: text + default: null + is_nullable: false + hostname: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + id: + data_type: integer + default: nextval('db_dbcomputer_id_seq'::regclass) + is_nullable: false + metadata: + data_type: jsonb + default: null + is_nullable: false + name: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + scheduler_type: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + transport_type: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + uuid: + data_type: uuid + default: null + is_nullable: false + db_dbgroup: + description: + data_type: text + default: null + is_nullable: false + extras: + data_type: jsonb + default: null + is_nullable: false + id: + data_type: integer + default: nextval('db_dbgroup_id_seq'::regclass) + is_nullable: false + label: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + time: + data_type: timestamp with time zone + default: null + is_nullable: false + type_string: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + user_id: + data_type: integer + default: null + is_nullable: false + uuid: + data_type: uuid + default: null + is_nullable: false + db_dbgroup_dbnodes: + dbgroup_id: + data_type: integer + default: null + is_nullable: false + dbnode_id: + data_type: integer + default: null + is_nullable: false + id: + data_type: integer + default: nextval('db_dbgroup_dbnodes_id_seq'::regclass) + is_nullable: false + db_dblink: + id: + data_type: integer + default: nextval('db_dblink_id_seq'::regclass) + is_nullable: false + input_id: + data_type: integer + default: null + is_nullable: false + label: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + output_id: + data_type: integer + default: null + is_nullable: false + type: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + db_dblog: + dbnode_id: + data_type: integer + default: null + is_nullable: false + id: + data_type: integer + default: nextval('db_dblog_id_seq'::regclass) + is_nullable: false + levelname: + data_type: character varying + default: null + is_nullable: false + max_length: 50 + loggername: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + message: + data_type: text + default: null + is_nullable: false + metadata: + data_type: jsonb + default: null + is_nullable: false + time: + data_type: timestamp with time zone + default: null + is_nullable: false + uuid: + data_type: uuid + default: null + is_nullable: false + db_dbnode: + attributes: + data_type: jsonb + default: null + is_nullable: true + ctime: + data_type: timestamp with time zone + default: null + is_nullable: false + dbcomputer_id: + data_type: integer + default: null + is_nullable: true + description: + data_type: text + default: null + is_nullable: false + extras: + data_type: jsonb + default: null + is_nullable: true + id: + data_type: integer + default: nextval('db_dbnode_id_seq'::regclass) + is_nullable: false + label: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + mtime: + data_type: timestamp with time zone + default: null + is_nullable: false + node_type: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + process_type: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + repository_metadata: + data_type: jsonb + default: null + is_nullable: false + user_id: + data_type: integer + default: null + is_nullable: false + uuid: + data_type: uuid + default: null + is_nullable: false + db_dbsetting: + description: + data_type: text + default: null + is_nullable: false + id: + data_type: integer + default: nextval('db_dbsetting_id_seq'::regclass) + is_nullable: false + key: + data_type: character varying + default: null + is_nullable: false + max_length: 1024 + time: + data_type: timestamp with time zone + default: null + is_nullable: false + val: + data_type: jsonb + default: null + is_nullable: true + db_dbuser: + email: + data_type: character varying + default: null + is_nullable: false + max_length: 254 + first_name: + data_type: character varying + default: null + is_nullable: false + max_length: 254 + id: + data_type: integer + default: nextval('db_dbuser_id_seq'::regclass) + is_nullable: false + institution: + data_type: character varying + default: null + is_nullable: false + max_length: 254 + last_name: + data_type: character varying + default: null + is_nullable: false + max_length: 254 +constraints: + primary_key: + db_dbauthinfo: + db_dbauthinfo_pkey: + - id + db_dbcomment: + db_dbcomment_pkey: + - id + db_dbcomputer: + db_dbcomputer_pkey: + - id + db_dbgroup: + db_dbgroup_pkey: + - id + db_dbgroup_dbnodes: + db_dbgroup_dbnodes_pkey: + - id + db_dblink: + db_dblink_pkey: + - id + db_dblog: + db_dblog_pkey: + - id + db_dbnode: + db_dbnode_pkey: + - id + db_dbsetting: + db_dbsetting_pkey: + - id + db_dbuser: + db_dbuser_pkey: + - id + unique: + db_dbauthinfo: + db_dbauthinfo_aiidauser_id_dbcomputer_id_777cdaa8_uniq: + - aiidauser_id + - dbcomputer_id + db_dbcomment: + db_dbcomment_uuid_49bac08c_uniq: + - uuid + db_dbcomputer: + db_dbcomputer_name_key: + - name + db_dbcomputer_uuid_f35defa6_uniq: + - uuid + db_dbgroup: + db_dbgroup_name_type_12656f33_uniq: + - label + - type_string + db_dbgroup_uuid_af896177_uniq: + - uuid + db_dbgroup_dbnodes: + db_dbgroup_dbnodes_dbgroup_id_dbnode_id_eee23cce_uniq: + - dbgroup_id + - dbnode_id + db_dblog: + db_dblog_uuid_9cf77df3_uniq: + - uuid + db_dbnode: + db_dbnode_uuid_62e0bf98_uniq: + - uuid + db_dbsetting: + db_dbsetting_key_1b84beb4_uniq: + - key + db_dbuser: + db_dbuser_email_30150b7e_uniq: + - email +foreign_keys: + db_dbauthinfo: + db_dbauthinfo_aiidauser_id_0684fdfb_fk_db_dbuser_id: FOREIGN KEY (aiidauser_id) + REFERENCES db_dbuser(id) DEFERRABLE INITIALLY DEFERRED + db_dbauthinfo_dbcomputer_id_424f7ac4_fk_db_dbcomputer_id: FOREIGN KEY (dbcomputer_id) + REFERENCES db_dbcomputer(id) DEFERRABLE INITIALLY DEFERRED + db_dbcomment: + db_dbcomment_dbnode_id_3b812b6b_fk_db_dbnode_id: FOREIGN KEY (dbnode_id) REFERENCES + db_dbnode(id) DEFERRABLE INITIALLY DEFERRED + db_dbcomment_user_id_8ed5e360_fk_db_dbuser_id: FOREIGN KEY (user_id) REFERENCES + db_dbuser(id) DEFERRABLE INITIALLY DEFERRED + db_dbgroup: + db_dbgroup_user_id_100f8a51_fk_db_dbuser_id: FOREIGN KEY (user_id) REFERENCES + db_dbuser(id) DEFERRABLE INITIALLY DEFERRED + db_dbgroup_dbnodes: + db_dbgroup_dbnodes_dbgroup_id_9d3a0f9d_fk_db_dbgroup_id: FOREIGN KEY (dbgroup_id) + REFERENCES db_dbgroup(id) DEFERRABLE INITIALLY DEFERRED + db_dbgroup_dbnodes_dbnode_id_118b9439_fk_db_dbnode_id: FOREIGN KEY (dbnode_id) + REFERENCES db_dbnode(id) DEFERRABLE INITIALLY DEFERRED + db_dblink: + db_dblink_input_id_9245bd73_fk_db_dbnode_id: FOREIGN KEY (input_id) REFERENCES + db_dbnode(id) DEFERRABLE INITIALLY DEFERRED + db_dblink_output_id_c0167528_fk_db_dbnode_id: FOREIGN KEY (output_id) REFERENCES + db_dbnode(id) DEFERRABLE INITIALLY DEFERRED + db_dblog: + db_dblog_dbnode_id_da34b732_fk_db_dbnode_id: FOREIGN KEY (dbnode_id) REFERENCES + db_dbnode(id) DEFERRABLE INITIALLY DEFERRED + db_dbnode: + db_dbnode_dbcomputer_id_315372a3_fk_db_dbcomputer_id: FOREIGN KEY (dbcomputer_id) + REFERENCES db_dbcomputer(id) DEFERRABLE INITIALLY DEFERRED + db_dbnode_user_id_12e7aeaf_fk_db_dbuser_id: FOREIGN KEY (user_id) REFERENCES db_dbuser(id) + DEFERRABLE INITIALLY DEFERRED +indexes: + db_dbauthinfo: + db_dbauthinfo_aiidauser_id_0684fdfb: CREATE INDEX db_dbauthinfo_aiidauser_id_0684fdfb + ON public.db_dbauthinfo USING btree (aiidauser_id) + db_dbauthinfo_aiidauser_id_dbcomputer_id_777cdaa8_uniq: CREATE UNIQUE INDEX db_dbauthinfo_aiidauser_id_dbcomputer_id_777cdaa8_uniq + ON public.db_dbauthinfo USING btree (aiidauser_id, dbcomputer_id) + db_dbauthinfo_dbcomputer_id_424f7ac4: CREATE INDEX db_dbauthinfo_dbcomputer_id_424f7ac4 + ON public.db_dbauthinfo USING btree (dbcomputer_id) + db_dbauthinfo_pkey: CREATE UNIQUE INDEX db_dbauthinfo_pkey ON public.db_dbauthinfo + USING btree (id) + db_dbcomment: + db_dbcomment_dbnode_id_3b812b6b: CREATE INDEX db_dbcomment_dbnode_id_3b812b6b + ON public.db_dbcomment USING btree (dbnode_id) + db_dbcomment_pkey: CREATE UNIQUE INDEX db_dbcomment_pkey ON public.db_dbcomment + USING btree (id) + db_dbcomment_user_id_8ed5e360: CREATE INDEX db_dbcomment_user_id_8ed5e360 ON public.db_dbcomment + USING btree (user_id) + db_dbcomment_uuid_49bac08c_uniq: CREATE UNIQUE INDEX db_dbcomment_uuid_49bac08c_uniq + ON public.db_dbcomment USING btree (uuid) + db_dbcomputer: + db_dbcomputer_name_f1800b1a_like: CREATE INDEX db_dbcomputer_name_f1800b1a_like + ON public.db_dbcomputer USING btree (name varchar_pattern_ops) + db_dbcomputer_name_key: CREATE UNIQUE INDEX db_dbcomputer_name_key ON public.db_dbcomputer + USING btree (name) + db_dbcomputer_pkey: CREATE UNIQUE INDEX db_dbcomputer_pkey ON public.db_dbcomputer + USING btree (id) + db_dbcomputer_uuid_f35defa6_uniq: CREATE UNIQUE INDEX db_dbcomputer_uuid_f35defa6_uniq + ON public.db_dbcomputer USING btree (uuid) + db_dbgroup: + db_dbgroup_name_66c75272: CREATE INDEX db_dbgroup_name_66c75272 ON public.db_dbgroup + USING btree (label) + db_dbgroup_name_66c75272_like: CREATE INDEX db_dbgroup_name_66c75272_like ON public.db_dbgroup + USING btree (label varchar_pattern_ops) + db_dbgroup_name_type_12656f33_uniq: CREATE UNIQUE INDEX db_dbgroup_name_type_12656f33_uniq + ON public.db_dbgroup USING btree (label, type_string) + db_dbgroup_pkey: CREATE UNIQUE INDEX db_dbgroup_pkey ON public.db_dbgroup USING + btree (id) + db_dbgroup_type_23b2a748: CREATE INDEX db_dbgroup_type_23b2a748 ON public.db_dbgroup + USING btree (type_string) + db_dbgroup_type_23b2a748_like: CREATE INDEX db_dbgroup_type_23b2a748_like ON public.db_dbgroup + USING btree (type_string varchar_pattern_ops) + db_dbgroup_user_id_100f8a51: CREATE INDEX db_dbgroup_user_id_100f8a51 ON public.db_dbgroup + USING btree (user_id) + db_dbgroup_uuid_af896177_uniq: CREATE UNIQUE INDEX db_dbgroup_uuid_af896177_uniq + ON public.db_dbgroup USING btree (uuid) + db_dbgroup_dbnodes: + db_dbgroup_dbnodes_dbgroup_id_9d3a0f9d: CREATE INDEX db_dbgroup_dbnodes_dbgroup_id_9d3a0f9d + ON public.db_dbgroup_dbnodes USING btree (dbgroup_id) + db_dbgroup_dbnodes_dbgroup_id_dbnode_id_eee23cce_uniq: CREATE UNIQUE INDEX db_dbgroup_dbnodes_dbgroup_id_dbnode_id_eee23cce_uniq + ON public.db_dbgroup_dbnodes USING btree (dbgroup_id, dbnode_id) + db_dbgroup_dbnodes_dbnode_id_118b9439: CREATE INDEX db_dbgroup_dbnodes_dbnode_id_118b9439 + ON public.db_dbgroup_dbnodes USING btree (dbnode_id) + db_dbgroup_dbnodes_pkey: CREATE UNIQUE INDEX db_dbgroup_dbnodes_pkey ON public.db_dbgroup_dbnodes + USING btree (id) + db_dblink: + db_dblink_input_id_9245bd73: CREATE INDEX db_dblink_input_id_9245bd73 ON public.db_dblink + USING btree (input_id) + db_dblink_label_f1343cfb: CREATE INDEX db_dblink_label_f1343cfb ON public.db_dblink + USING btree (label) + db_dblink_label_f1343cfb_like: CREATE INDEX db_dblink_label_f1343cfb_like ON public.db_dblink + USING btree (label varchar_pattern_ops) + db_dblink_output_id_c0167528: CREATE INDEX db_dblink_output_id_c0167528 ON public.db_dblink + USING btree (output_id) + db_dblink_pkey: CREATE UNIQUE INDEX db_dblink_pkey ON public.db_dblink USING btree + (id) + db_dblink_type_229f212b: CREATE INDEX db_dblink_type_229f212b ON public.db_dblink + USING btree (type) + db_dblink_type_229f212b_like: CREATE INDEX db_dblink_type_229f212b_like ON public.db_dblink + USING btree (type varchar_pattern_ops) + db_dblog: + db_dblog_dbnode_id_da34b732: CREATE INDEX db_dblog_dbnode_id_da34b732 ON public.db_dblog + USING btree (dbnode_id) + db_dblog_levelname_ad5dc346: CREATE INDEX db_dblog_levelname_ad5dc346 ON public.db_dblog + USING btree (levelname) + db_dblog_levelname_ad5dc346_like: CREATE INDEX db_dblog_levelname_ad5dc346_like + ON public.db_dblog USING btree (levelname varchar_pattern_ops) + db_dblog_loggername_00b5ba16: CREATE INDEX db_dblog_loggername_00b5ba16 ON public.db_dblog + USING btree (loggername) + db_dblog_loggername_00b5ba16_like: CREATE INDEX db_dblog_loggername_00b5ba16_like + ON public.db_dblog USING btree (loggername varchar_pattern_ops) + db_dblog_pkey: CREATE UNIQUE INDEX db_dblog_pkey ON public.db_dblog USING btree + (id) + db_dblog_uuid_9cf77df3_uniq: CREATE UNIQUE INDEX db_dblog_uuid_9cf77df3_uniq ON + public.db_dblog USING btree (uuid) + db_dbnode: + db_dbnode_ctime_71626ef5: CREATE INDEX db_dbnode_ctime_71626ef5 ON public.db_dbnode + USING btree (ctime) + db_dbnode_dbcomputer_id_315372a3: CREATE INDEX db_dbnode_dbcomputer_id_315372a3 + ON public.db_dbnode USING btree (dbcomputer_id) + db_dbnode_label_6469539e: CREATE INDEX db_dbnode_label_6469539e ON public.db_dbnode + USING btree (label) + db_dbnode_label_6469539e_like: CREATE INDEX db_dbnode_label_6469539e_like ON public.db_dbnode + USING btree (label varchar_pattern_ops) + db_dbnode_mtime_0554ea3d: CREATE INDEX db_dbnode_mtime_0554ea3d ON public.db_dbnode + USING btree (mtime) + db_dbnode_pkey: CREATE UNIQUE INDEX db_dbnode_pkey ON public.db_dbnode USING btree + (id) + db_dbnode_process_type_df7298d0: CREATE INDEX db_dbnode_process_type_df7298d0 + ON public.db_dbnode USING btree (process_type) + db_dbnode_process_type_df7298d0_like: CREATE INDEX db_dbnode_process_type_df7298d0_like + ON public.db_dbnode USING btree (process_type varchar_pattern_ops) + db_dbnode_type_a8ce9753: CREATE INDEX db_dbnode_type_a8ce9753 ON public.db_dbnode + USING btree (node_type) + db_dbnode_type_a8ce9753_like: CREATE INDEX db_dbnode_type_a8ce9753_like ON public.db_dbnode + USING btree (node_type varchar_pattern_ops) + db_dbnode_user_id_12e7aeaf: CREATE INDEX db_dbnode_user_id_12e7aeaf ON public.db_dbnode + USING btree (user_id) + db_dbnode_uuid_62e0bf98_uniq: CREATE UNIQUE INDEX db_dbnode_uuid_62e0bf98_uniq + ON public.db_dbnode USING btree (uuid) + db_dbsetting: + db_dbsetting_key_1b84beb4_like: CREATE INDEX db_dbsetting_key_1b84beb4_like ON + public.db_dbsetting USING btree (key varchar_pattern_ops) + db_dbsetting_key_1b84beb4_uniq: CREATE UNIQUE INDEX db_dbsetting_key_1b84beb4_uniq + ON public.db_dbsetting USING btree (key) + db_dbsetting_pkey: CREATE UNIQUE INDEX db_dbsetting_pkey ON public.db_dbsetting + USING btree (id) + db_dbuser: + db_dbuser_email_30150b7e_like: CREATE INDEX db_dbuser_email_30150b7e_like ON public.db_dbuser + USING btree (email varchar_pattern_ops) + db_dbuser_email_30150b7e_uniq: CREATE UNIQUE INDEX db_dbuser_email_30150b7e_uniq + ON public.db_dbuser USING btree (email) + db_dbuser_pkey: CREATE UNIQUE INDEX db_dbuser_pkey ON public.db_dbuser USING btree + (id) diff --git a/tests/backends/aiida_sqlalchemy/migrations/test_all_schema/test_django_django_0047_.yml b/tests/backends/aiida_sqlalchemy/migrations/test_all_schema/test_django_django_0047_.yml new file mode 100644 index 0000000000..da230b8a6c --- /dev/null +++ b/tests/backends/aiida_sqlalchemy/migrations/test_all_schema/test_django_django_0047_.yml @@ -0,0 +1,523 @@ +columns: + db_dbauthinfo: + aiidauser_id: + data_type: integer + default: null + is_nullable: false + auth_params: + data_type: jsonb + default: null + is_nullable: false + dbcomputer_id: + data_type: integer + default: null + is_nullable: false + enabled: + data_type: boolean + default: null + is_nullable: false + id: + data_type: integer + default: nextval('db_dbauthinfo_id_seq'::regclass) + is_nullable: false + metadata: + data_type: jsonb + default: null + is_nullable: false + db_dbcomment: + content: + data_type: text + default: null + is_nullable: false + ctime: + data_type: timestamp with time zone + default: null + is_nullable: false + dbnode_id: + data_type: integer + default: null + is_nullable: false + id: + data_type: integer + default: nextval('db_dbcomment_id_seq'::regclass) + is_nullable: false + mtime: + data_type: timestamp with time zone + default: null + is_nullable: false + user_id: + data_type: integer + default: null + is_nullable: false + uuid: + data_type: uuid + default: null + is_nullable: false + db_dbcomputer: + description: + data_type: text + default: null + is_nullable: false + hostname: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + id: + data_type: integer + default: nextval('db_dbcomputer_id_seq'::regclass) + is_nullable: false + metadata: + data_type: jsonb + default: null + is_nullable: false + name: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + scheduler_type: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + transport_type: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + uuid: + data_type: uuid + default: null + is_nullable: false + db_dbgroup: + description: + data_type: text + default: null + is_nullable: false + extras: + data_type: jsonb + default: null + is_nullable: false + id: + data_type: integer + default: nextval('db_dbgroup_id_seq'::regclass) + is_nullable: false + label: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + time: + data_type: timestamp with time zone + default: null + is_nullable: false + type_string: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + user_id: + data_type: integer + default: null + is_nullable: false + uuid: + data_type: uuid + default: null + is_nullable: false + db_dbgroup_dbnodes: + dbgroup_id: + data_type: integer + default: null + is_nullable: false + dbnode_id: + data_type: integer + default: null + is_nullable: false + id: + data_type: integer + default: nextval('db_dbgroup_dbnodes_id_seq'::regclass) + is_nullable: false + db_dblink: + id: + data_type: integer + default: nextval('db_dblink_id_seq'::regclass) + is_nullable: false + input_id: + data_type: integer + default: null + is_nullable: false + label: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + output_id: + data_type: integer + default: null + is_nullable: false + type: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + db_dblog: + dbnode_id: + data_type: integer + default: null + is_nullable: false + id: + data_type: integer + default: nextval('db_dblog_id_seq'::regclass) + is_nullable: false + levelname: + data_type: character varying + default: null + is_nullable: false + max_length: 50 + loggername: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + message: + data_type: text + default: null + is_nullable: false + metadata: + data_type: jsonb + default: null + is_nullable: false + time: + data_type: timestamp with time zone + default: null + is_nullable: false + uuid: + data_type: uuid + default: null + is_nullable: false + db_dbnode: + attributes: + data_type: jsonb + default: null + is_nullable: true + ctime: + data_type: timestamp with time zone + default: null + is_nullable: false + dbcomputer_id: + data_type: integer + default: null + is_nullable: true + description: + data_type: text + default: null + is_nullable: false + extras: + data_type: jsonb + default: null + is_nullable: true + id: + data_type: integer + default: nextval('db_dbnode_id_seq'::regclass) + is_nullable: false + label: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + mtime: + data_type: timestamp with time zone + default: null + is_nullable: false + node_type: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + process_type: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + repository_metadata: + data_type: jsonb + default: null + is_nullable: false + user_id: + data_type: integer + default: null + is_nullable: false + uuid: + data_type: uuid + default: null + is_nullable: false + db_dbsetting: + description: + data_type: text + default: null + is_nullable: false + id: + data_type: integer + default: nextval('db_dbsetting_id_seq'::regclass) + is_nullable: false + key: + data_type: character varying + default: null + is_nullable: false + max_length: 1024 + time: + data_type: timestamp with time zone + default: null + is_nullable: false + val: + data_type: jsonb + default: null + is_nullable: true + db_dbuser: + email: + data_type: character varying + default: null + is_nullable: false + max_length: 254 + first_name: + data_type: character varying + default: null + is_nullable: false + max_length: 254 + id: + data_type: integer + default: nextval('db_dbuser_id_seq'::regclass) + is_nullable: false + institution: + data_type: character varying + default: null + is_nullable: false + max_length: 254 + last_name: + data_type: character varying + default: null + is_nullable: false + max_length: 254 +constraints: + primary_key: + db_dbauthinfo: + db_dbauthinfo_pkey: + - id + db_dbcomment: + db_dbcomment_pkey: + - id + db_dbcomputer: + db_dbcomputer_pkey: + - id + db_dbgroup: + db_dbgroup_pkey: + - id + db_dbgroup_dbnodes: + db_dbgroup_dbnodes_pkey: + - id + db_dblink: + db_dblink_pkey: + - id + db_dblog: + db_dblog_pkey: + - id + db_dbnode: + db_dbnode_pkey: + - id + db_dbsetting: + db_dbsetting_pkey: + - id + db_dbuser: + db_dbuser_pkey: + - id + unique: + db_dbauthinfo: + db_dbauthinfo_aiidauser_id_dbcomputer_id_777cdaa8_uniq: + - aiidauser_id + - dbcomputer_id + db_dbcomment: + db_dbcomment_uuid_49bac08c_uniq: + - uuid + db_dbcomputer: + db_dbcomputer_name_key: + - name + db_dbcomputer_uuid_f35defa6_uniq: + - uuid + db_dbgroup: + db_dbgroup_name_type_12656f33_uniq: + - label + - type_string + db_dbgroup_uuid_af896177_uniq: + - uuid + db_dbgroup_dbnodes: + db_dbgroup_dbnodes_dbgroup_id_dbnode_id_eee23cce_uniq: + - dbgroup_id + - dbnode_id + db_dblog: + db_dblog_uuid_9cf77df3_uniq: + - uuid + db_dbnode: + db_dbnode_uuid_62e0bf98_uniq: + - uuid + db_dbsetting: + db_dbsetting_key_1b84beb4_uniq: + - key + db_dbuser: + db_dbuser_email_30150b7e_uniq: + - email +foreign_keys: + db_dbauthinfo: + db_dbauthinfo_aiidauser_id_0684fdfb_fk_db_dbuser_id: FOREIGN KEY (aiidauser_id) + REFERENCES db_dbuser(id) DEFERRABLE INITIALLY DEFERRED + db_dbauthinfo_dbcomputer_id_424f7ac4_fk_db_dbcomputer_id: FOREIGN KEY (dbcomputer_id) + REFERENCES db_dbcomputer(id) DEFERRABLE INITIALLY DEFERRED + db_dbcomment: + db_dbcomment_dbnode_id_3b812b6b_fk_db_dbnode_id: FOREIGN KEY (dbnode_id) REFERENCES + db_dbnode(id) DEFERRABLE INITIALLY DEFERRED + db_dbcomment_user_id_8ed5e360_fk_db_dbuser_id: FOREIGN KEY (user_id) REFERENCES + db_dbuser(id) DEFERRABLE INITIALLY DEFERRED + db_dbgroup: + db_dbgroup_user_id_100f8a51_fk_db_dbuser_id: FOREIGN KEY (user_id) REFERENCES + db_dbuser(id) DEFERRABLE INITIALLY DEFERRED + db_dbgroup_dbnodes: + db_dbgroup_dbnodes_dbgroup_id_9d3a0f9d_fk_db_dbgroup_id: FOREIGN KEY (dbgroup_id) + REFERENCES db_dbgroup(id) DEFERRABLE INITIALLY DEFERRED + db_dbgroup_dbnodes_dbnode_id_118b9439_fk_db_dbnode_id: FOREIGN KEY (dbnode_id) + REFERENCES db_dbnode(id) DEFERRABLE INITIALLY DEFERRED + db_dblink: + db_dblink_input_id_9245bd73_fk_db_dbnode_id: FOREIGN KEY (input_id) REFERENCES + db_dbnode(id) DEFERRABLE INITIALLY DEFERRED + db_dblink_output_id_c0167528_fk_db_dbnode_id: FOREIGN KEY (output_id) REFERENCES + db_dbnode(id) DEFERRABLE INITIALLY DEFERRED + db_dblog: + db_dblog_dbnode_id_da34b732_fk_db_dbnode_id: FOREIGN KEY (dbnode_id) REFERENCES + db_dbnode(id) DEFERRABLE INITIALLY DEFERRED + db_dbnode: + db_dbnode_dbcomputer_id_315372a3_fk_db_dbcomputer_id: FOREIGN KEY (dbcomputer_id) + REFERENCES db_dbcomputer(id) DEFERRABLE INITIALLY DEFERRED + db_dbnode_user_id_12e7aeaf_fk_db_dbuser_id: FOREIGN KEY (user_id) REFERENCES db_dbuser(id) + DEFERRABLE INITIALLY DEFERRED +indexes: + db_dbauthinfo: + db_dbauthinfo_aiidauser_id_0684fdfb: CREATE INDEX db_dbauthinfo_aiidauser_id_0684fdfb + ON public.db_dbauthinfo USING btree (aiidauser_id) + db_dbauthinfo_aiidauser_id_dbcomputer_id_777cdaa8_uniq: CREATE UNIQUE INDEX db_dbauthinfo_aiidauser_id_dbcomputer_id_777cdaa8_uniq + ON public.db_dbauthinfo USING btree (aiidauser_id, dbcomputer_id) + db_dbauthinfo_dbcomputer_id_424f7ac4: CREATE INDEX db_dbauthinfo_dbcomputer_id_424f7ac4 + ON public.db_dbauthinfo USING btree (dbcomputer_id) + db_dbauthinfo_pkey: CREATE UNIQUE INDEX db_dbauthinfo_pkey ON public.db_dbauthinfo + USING btree (id) + db_dbcomment: + db_dbcomment_dbnode_id_3b812b6b: CREATE INDEX db_dbcomment_dbnode_id_3b812b6b + ON public.db_dbcomment USING btree (dbnode_id) + db_dbcomment_pkey: CREATE UNIQUE INDEX db_dbcomment_pkey ON public.db_dbcomment + USING btree (id) + db_dbcomment_user_id_8ed5e360: CREATE INDEX db_dbcomment_user_id_8ed5e360 ON public.db_dbcomment + USING btree (user_id) + db_dbcomment_uuid_49bac08c_uniq: CREATE UNIQUE INDEX db_dbcomment_uuid_49bac08c_uniq + ON public.db_dbcomment USING btree (uuid) + db_dbcomputer: + db_dbcomputer_name_f1800b1a_like: CREATE INDEX db_dbcomputer_name_f1800b1a_like + ON public.db_dbcomputer USING btree (name varchar_pattern_ops) + db_dbcomputer_name_key: CREATE UNIQUE INDEX db_dbcomputer_name_key ON public.db_dbcomputer + USING btree (name) + db_dbcomputer_pkey: CREATE UNIQUE INDEX db_dbcomputer_pkey ON public.db_dbcomputer + USING btree (id) + db_dbcomputer_uuid_f35defa6_uniq: CREATE UNIQUE INDEX db_dbcomputer_uuid_f35defa6_uniq + ON public.db_dbcomputer USING btree (uuid) + db_dbgroup: + db_dbgroup_name_66c75272: CREATE INDEX db_dbgroup_name_66c75272 ON public.db_dbgroup + USING btree (label) + db_dbgroup_name_66c75272_like: CREATE INDEX db_dbgroup_name_66c75272_like ON public.db_dbgroup + USING btree (label varchar_pattern_ops) + db_dbgroup_name_type_12656f33_uniq: CREATE UNIQUE INDEX db_dbgroup_name_type_12656f33_uniq + ON public.db_dbgroup USING btree (label, type_string) + db_dbgroup_pkey: CREATE UNIQUE INDEX db_dbgroup_pkey ON public.db_dbgroup USING + btree (id) + db_dbgroup_type_23b2a748: CREATE INDEX db_dbgroup_type_23b2a748 ON public.db_dbgroup + USING btree (type_string) + db_dbgroup_type_23b2a748_like: CREATE INDEX db_dbgroup_type_23b2a748_like ON public.db_dbgroup + USING btree (type_string varchar_pattern_ops) + db_dbgroup_user_id_100f8a51: CREATE INDEX db_dbgroup_user_id_100f8a51 ON public.db_dbgroup + USING btree (user_id) + db_dbgroup_uuid_af896177_uniq: CREATE UNIQUE INDEX db_dbgroup_uuid_af896177_uniq + ON public.db_dbgroup USING btree (uuid) + db_dbgroup_dbnodes: + db_dbgroup_dbnodes_dbgroup_id_9d3a0f9d: CREATE INDEX db_dbgroup_dbnodes_dbgroup_id_9d3a0f9d + ON public.db_dbgroup_dbnodes USING btree (dbgroup_id) + db_dbgroup_dbnodes_dbgroup_id_dbnode_id_eee23cce_uniq: CREATE UNIQUE INDEX db_dbgroup_dbnodes_dbgroup_id_dbnode_id_eee23cce_uniq + ON public.db_dbgroup_dbnodes USING btree (dbgroup_id, dbnode_id) + db_dbgroup_dbnodes_dbnode_id_118b9439: CREATE INDEX db_dbgroup_dbnodes_dbnode_id_118b9439 + ON public.db_dbgroup_dbnodes USING btree (dbnode_id) + db_dbgroup_dbnodes_pkey: CREATE UNIQUE INDEX db_dbgroup_dbnodes_pkey ON public.db_dbgroup_dbnodes + USING btree (id) + db_dblink: + db_dblink_input_id_9245bd73: CREATE INDEX db_dblink_input_id_9245bd73 ON public.db_dblink + USING btree (input_id) + db_dblink_label_f1343cfb: CREATE INDEX db_dblink_label_f1343cfb ON public.db_dblink + USING btree (label) + db_dblink_label_f1343cfb_like: CREATE INDEX db_dblink_label_f1343cfb_like ON public.db_dblink + USING btree (label varchar_pattern_ops) + db_dblink_output_id_c0167528: CREATE INDEX db_dblink_output_id_c0167528 ON public.db_dblink + USING btree (output_id) + db_dblink_pkey: CREATE UNIQUE INDEX db_dblink_pkey ON public.db_dblink USING btree + (id) + db_dblink_type_229f212b: CREATE INDEX db_dblink_type_229f212b ON public.db_dblink + USING btree (type) + db_dblink_type_229f212b_like: CREATE INDEX db_dblink_type_229f212b_like ON public.db_dblink + USING btree (type varchar_pattern_ops) + db_dblog: + db_dblog_dbnode_id_da34b732: CREATE INDEX db_dblog_dbnode_id_da34b732 ON public.db_dblog + USING btree (dbnode_id) + db_dblog_levelname_ad5dc346: CREATE INDEX db_dblog_levelname_ad5dc346 ON public.db_dblog + USING btree (levelname) + db_dblog_levelname_ad5dc346_like: CREATE INDEX db_dblog_levelname_ad5dc346_like + ON public.db_dblog USING btree (levelname varchar_pattern_ops) + db_dblog_loggername_00b5ba16: CREATE INDEX db_dblog_loggername_00b5ba16 ON public.db_dblog + USING btree (loggername) + db_dblog_loggername_00b5ba16_like: CREATE INDEX db_dblog_loggername_00b5ba16_like + ON public.db_dblog USING btree (loggername varchar_pattern_ops) + db_dblog_pkey: CREATE UNIQUE INDEX db_dblog_pkey ON public.db_dblog USING btree + (id) + db_dblog_uuid_9cf77df3_uniq: CREATE UNIQUE INDEX db_dblog_uuid_9cf77df3_uniq ON + public.db_dblog USING btree (uuid) + db_dbnode: + db_dbnode_ctime_71626ef5: CREATE INDEX db_dbnode_ctime_71626ef5 ON public.db_dbnode + USING btree (ctime) + db_dbnode_dbcomputer_id_315372a3: CREATE INDEX db_dbnode_dbcomputer_id_315372a3 + ON public.db_dbnode USING btree (dbcomputer_id) + db_dbnode_label_6469539e: CREATE INDEX db_dbnode_label_6469539e ON public.db_dbnode + USING btree (label) + db_dbnode_label_6469539e_like: CREATE INDEX db_dbnode_label_6469539e_like ON public.db_dbnode + USING btree (label varchar_pattern_ops) + db_dbnode_mtime_0554ea3d: CREATE INDEX db_dbnode_mtime_0554ea3d ON public.db_dbnode + USING btree (mtime) + db_dbnode_pkey: CREATE UNIQUE INDEX db_dbnode_pkey ON public.db_dbnode USING btree + (id) + db_dbnode_process_type_df7298d0: CREATE INDEX db_dbnode_process_type_df7298d0 + ON public.db_dbnode USING btree (process_type) + db_dbnode_process_type_df7298d0_like: CREATE INDEX db_dbnode_process_type_df7298d0_like + ON public.db_dbnode USING btree (process_type varchar_pattern_ops) + db_dbnode_type_a8ce9753: CREATE INDEX db_dbnode_type_a8ce9753 ON public.db_dbnode + USING btree (node_type) + db_dbnode_type_a8ce9753_like: CREATE INDEX db_dbnode_type_a8ce9753_like ON public.db_dbnode + USING btree (node_type varchar_pattern_ops) + db_dbnode_user_id_12e7aeaf: CREATE INDEX db_dbnode_user_id_12e7aeaf ON public.db_dbnode + USING btree (user_id) + db_dbnode_uuid_62e0bf98_uniq: CREATE UNIQUE INDEX db_dbnode_uuid_62e0bf98_uniq + ON public.db_dbnode USING btree (uuid) + db_dbsetting: + db_dbsetting_key_1b84beb4_like: CREATE INDEX db_dbsetting_key_1b84beb4_like ON + public.db_dbsetting USING btree (key varchar_pattern_ops) + db_dbsetting_key_1b84beb4_uniq: CREATE UNIQUE INDEX db_dbsetting_key_1b84beb4_uniq + ON public.db_dbsetting USING btree (key) + db_dbsetting_pkey: CREATE UNIQUE INDEX db_dbsetting_pkey ON public.db_dbsetting + USING btree (id) + db_dbuser: + db_dbuser_email_30150b7e_like: CREATE INDEX db_dbuser_email_30150b7e_like ON public.db_dbuser + USING btree (email varchar_pattern_ops) + db_dbuser_email_30150b7e_uniq: CREATE UNIQUE INDEX db_dbuser_email_30150b7e_uniq + ON public.db_dbuser USING btree (email) + db_dbuser_pkey: CREATE UNIQUE INDEX db_dbuser_pkey ON public.db_dbuser USING btree + (id) diff --git a/tests/backends/aiida_sqlalchemy/migrations/test_all_schema/test_django_django_0048_.yml b/tests/backends/aiida_sqlalchemy/migrations/test_all_schema/test_django_django_0048_.yml new file mode 100644 index 0000000000..9d4fea24f6 --- /dev/null +++ b/tests/backends/aiida_sqlalchemy/migrations/test_all_schema/test_django_django_0048_.yml @@ -0,0 +1,523 @@ +columns: + db_dbauthinfo: + aiidauser_id: + data_type: integer + default: null + is_nullable: false + auth_params: + data_type: jsonb + default: null + is_nullable: false + dbcomputer_id: + data_type: integer + default: null + is_nullable: false + enabled: + data_type: boolean + default: null + is_nullable: false + id: + data_type: integer + default: nextval('db_dbauthinfo_id_seq'::regclass) + is_nullable: false + metadata: + data_type: jsonb + default: null + is_nullable: false + db_dbcomment: + content: + data_type: text + default: null + is_nullable: false + ctime: + data_type: timestamp with time zone + default: null + is_nullable: false + dbnode_id: + data_type: integer + default: null + is_nullable: false + id: + data_type: integer + default: nextval('db_dbcomment_id_seq'::regclass) + is_nullable: false + mtime: + data_type: timestamp with time zone + default: null + is_nullable: false + user_id: + data_type: integer + default: null + is_nullable: false + uuid: + data_type: uuid + default: null + is_nullable: false + db_dbcomputer: + description: + data_type: text + default: null + is_nullable: false + hostname: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + id: + data_type: integer + default: nextval('db_dbcomputer_id_seq'::regclass) + is_nullable: false + label: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + metadata: + data_type: jsonb + default: null + is_nullable: false + scheduler_type: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + transport_type: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + uuid: + data_type: uuid + default: null + is_nullable: false + db_dbgroup: + description: + data_type: text + default: null + is_nullable: false + extras: + data_type: jsonb + default: null + is_nullable: false + id: + data_type: integer + default: nextval('db_dbgroup_id_seq'::regclass) + is_nullable: false + label: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + time: + data_type: timestamp with time zone + default: null + is_nullable: false + type_string: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + user_id: + data_type: integer + default: null + is_nullable: false + uuid: + data_type: uuid + default: null + is_nullable: false + db_dbgroup_dbnodes: + dbgroup_id: + data_type: integer + default: null + is_nullable: false + dbnode_id: + data_type: integer + default: null + is_nullable: false + id: + data_type: integer + default: nextval('db_dbgroup_dbnodes_id_seq'::regclass) + is_nullable: false + db_dblink: + id: + data_type: integer + default: nextval('db_dblink_id_seq'::regclass) + is_nullable: false + input_id: + data_type: integer + default: null + is_nullable: false + label: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + output_id: + data_type: integer + default: null + is_nullable: false + type: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + db_dblog: + dbnode_id: + data_type: integer + default: null + is_nullable: false + id: + data_type: integer + default: nextval('db_dblog_id_seq'::regclass) + is_nullable: false + levelname: + data_type: character varying + default: null + is_nullable: false + max_length: 50 + loggername: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + message: + data_type: text + default: null + is_nullable: false + metadata: + data_type: jsonb + default: null + is_nullable: false + time: + data_type: timestamp with time zone + default: null + is_nullable: false + uuid: + data_type: uuid + default: null + is_nullable: false + db_dbnode: + attributes: + data_type: jsonb + default: null + is_nullable: true + ctime: + data_type: timestamp with time zone + default: null + is_nullable: false + dbcomputer_id: + data_type: integer + default: null + is_nullable: true + description: + data_type: text + default: null + is_nullable: false + extras: + data_type: jsonb + default: null + is_nullable: true + id: + data_type: integer + default: nextval('db_dbnode_id_seq'::regclass) + is_nullable: false + label: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + mtime: + data_type: timestamp with time zone + default: null + is_nullable: false + node_type: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + process_type: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + repository_metadata: + data_type: jsonb + default: null + is_nullable: false + user_id: + data_type: integer + default: null + is_nullable: false + uuid: + data_type: uuid + default: null + is_nullable: false + db_dbsetting: + description: + data_type: text + default: null + is_nullable: false + id: + data_type: integer + default: nextval('db_dbsetting_id_seq'::regclass) + is_nullable: false + key: + data_type: character varying + default: null + is_nullable: false + max_length: 1024 + time: + data_type: timestamp with time zone + default: null + is_nullable: false + val: + data_type: jsonb + default: null + is_nullable: true + db_dbuser: + email: + data_type: character varying + default: null + is_nullable: false + max_length: 254 + first_name: + data_type: character varying + default: null + is_nullable: false + max_length: 254 + id: + data_type: integer + default: nextval('db_dbuser_id_seq'::regclass) + is_nullable: false + institution: + data_type: character varying + default: null + is_nullable: false + max_length: 254 + last_name: + data_type: character varying + default: null + is_nullable: false + max_length: 254 +constraints: + primary_key: + db_dbauthinfo: + db_dbauthinfo_pkey: + - id + db_dbcomment: + db_dbcomment_pkey: + - id + db_dbcomputer: + db_dbcomputer_pkey: + - id + db_dbgroup: + db_dbgroup_pkey: + - id + db_dbgroup_dbnodes: + db_dbgroup_dbnodes_pkey: + - id + db_dblink: + db_dblink_pkey: + - id + db_dblog: + db_dblog_pkey: + - id + db_dbnode: + db_dbnode_pkey: + - id + db_dbsetting: + db_dbsetting_pkey: + - id + db_dbuser: + db_dbuser_pkey: + - id + unique: + db_dbauthinfo: + db_dbauthinfo_aiidauser_id_dbcomputer_id_777cdaa8_uniq: + - aiidauser_id + - dbcomputer_id + db_dbcomment: + db_dbcomment_uuid_49bac08c_uniq: + - uuid + db_dbcomputer: + db_dbcomputer_label_bc480bab_uniq: + - label + db_dbcomputer_uuid_f35defa6_uniq: + - uuid + db_dbgroup: + db_dbgroup_name_type_12656f33_uniq: + - label + - type_string + db_dbgroup_uuid_af896177_uniq: + - uuid + db_dbgroup_dbnodes: + db_dbgroup_dbnodes_dbgroup_id_dbnode_id_eee23cce_uniq: + - dbgroup_id + - dbnode_id + db_dblog: + db_dblog_uuid_9cf77df3_uniq: + - uuid + db_dbnode: + db_dbnode_uuid_62e0bf98_uniq: + - uuid + db_dbsetting: + db_dbsetting_key_1b84beb4_uniq: + - key + db_dbuser: + db_dbuser_email_30150b7e_uniq: + - email +foreign_keys: + db_dbauthinfo: + db_dbauthinfo_aiidauser_id_0684fdfb_fk_db_dbuser_id: FOREIGN KEY (aiidauser_id) + REFERENCES db_dbuser(id) DEFERRABLE INITIALLY DEFERRED + db_dbauthinfo_dbcomputer_id_424f7ac4_fk_db_dbcomputer_id: FOREIGN KEY (dbcomputer_id) + REFERENCES db_dbcomputer(id) DEFERRABLE INITIALLY DEFERRED + db_dbcomment: + db_dbcomment_dbnode_id_3b812b6b_fk_db_dbnode_id: FOREIGN KEY (dbnode_id) REFERENCES + db_dbnode(id) DEFERRABLE INITIALLY DEFERRED + db_dbcomment_user_id_8ed5e360_fk_db_dbuser_id: FOREIGN KEY (user_id) REFERENCES + db_dbuser(id) DEFERRABLE INITIALLY DEFERRED + db_dbgroup: + db_dbgroup_user_id_100f8a51_fk_db_dbuser_id: FOREIGN KEY (user_id) REFERENCES + db_dbuser(id) DEFERRABLE INITIALLY DEFERRED + db_dbgroup_dbnodes: + db_dbgroup_dbnodes_dbgroup_id_9d3a0f9d_fk_db_dbgroup_id: FOREIGN KEY (dbgroup_id) + REFERENCES db_dbgroup(id) DEFERRABLE INITIALLY DEFERRED + db_dbgroup_dbnodes_dbnode_id_118b9439_fk_db_dbnode_id: FOREIGN KEY (dbnode_id) + REFERENCES db_dbnode(id) DEFERRABLE INITIALLY DEFERRED + db_dblink: + db_dblink_input_id_9245bd73_fk_db_dbnode_id: FOREIGN KEY (input_id) REFERENCES + db_dbnode(id) DEFERRABLE INITIALLY DEFERRED + db_dblink_output_id_c0167528_fk_db_dbnode_id: FOREIGN KEY (output_id) REFERENCES + db_dbnode(id) DEFERRABLE INITIALLY DEFERRED + db_dblog: + db_dblog_dbnode_id_da34b732_fk_db_dbnode_id: FOREIGN KEY (dbnode_id) REFERENCES + db_dbnode(id) DEFERRABLE INITIALLY DEFERRED + db_dbnode: + db_dbnode_dbcomputer_id_315372a3_fk_db_dbcomputer_id: FOREIGN KEY (dbcomputer_id) + REFERENCES db_dbcomputer(id) DEFERRABLE INITIALLY DEFERRED + db_dbnode_user_id_12e7aeaf_fk_db_dbuser_id: FOREIGN KEY (user_id) REFERENCES db_dbuser(id) + DEFERRABLE INITIALLY DEFERRED +indexes: + db_dbauthinfo: + db_dbauthinfo_aiidauser_id_0684fdfb: CREATE INDEX db_dbauthinfo_aiidauser_id_0684fdfb + ON public.db_dbauthinfo USING btree (aiidauser_id) + db_dbauthinfo_aiidauser_id_dbcomputer_id_777cdaa8_uniq: CREATE UNIQUE INDEX db_dbauthinfo_aiidauser_id_dbcomputer_id_777cdaa8_uniq + ON public.db_dbauthinfo USING btree (aiidauser_id, dbcomputer_id) + db_dbauthinfo_dbcomputer_id_424f7ac4: CREATE INDEX db_dbauthinfo_dbcomputer_id_424f7ac4 + ON public.db_dbauthinfo USING btree (dbcomputer_id) + db_dbauthinfo_pkey: CREATE UNIQUE INDEX db_dbauthinfo_pkey ON public.db_dbauthinfo + USING btree (id) + db_dbcomment: + db_dbcomment_dbnode_id_3b812b6b: CREATE INDEX db_dbcomment_dbnode_id_3b812b6b + ON public.db_dbcomment USING btree (dbnode_id) + db_dbcomment_pkey: CREATE UNIQUE INDEX db_dbcomment_pkey ON public.db_dbcomment + USING btree (id) + db_dbcomment_user_id_8ed5e360: CREATE INDEX db_dbcomment_user_id_8ed5e360 ON public.db_dbcomment + USING btree (user_id) + db_dbcomment_uuid_49bac08c_uniq: CREATE UNIQUE INDEX db_dbcomment_uuid_49bac08c_uniq + ON public.db_dbcomment USING btree (uuid) + db_dbcomputer: + db_dbcomputer_label_bc480bab_like: CREATE INDEX db_dbcomputer_label_bc480bab_like + ON public.db_dbcomputer USING btree (label varchar_pattern_ops) + db_dbcomputer_label_bc480bab_uniq: CREATE UNIQUE INDEX db_dbcomputer_label_bc480bab_uniq + ON public.db_dbcomputer USING btree (label) + db_dbcomputer_pkey: CREATE UNIQUE INDEX db_dbcomputer_pkey ON public.db_dbcomputer + USING btree (id) + db_dbcomputer_uuid_f35defa6_uniq: CREATE UNIQUE INDEX db_dbcomputer_uuid_f35defa6_uniq + ON public.db_dbcomputer USING btree (uuid) + db_dbgroup: + db_dbgroup_name_66c75272: CREATE INDEX db_dbgroup_name_66c75272 ON public.db_dbgroup + USING btree (label) + db_dbgroup_name_66c75272_like: CREATE INDEX db_dbgroup_name_66c75272_like ON public.db_dbgroup + USING btree (label varchar_pattern_ops) + db_dbgroup_name_type_12656f33_uniq: CREATE UNIQUE INDEX db_dbgroup_name_type_12656f33_uniq + ON public.db_dbgroup USING btree (label, type_string) + db_dbgroup_pkey: CREATE UNIQUE INDEX db_dbgroup_pkey ON public.db_dbgroup USING + btree (id) + db_dbgroup_type_23b2a748: CREATE INDEX db_dbgroup_type_23b2a748 ON public.db_dbgroup + USING btree (type_string) + db_dbgroup_type_23b2a748_like: CREATE INDEX db_dbgroup_type_23b2a748_like ON public.db_dbgroup + USING btree (type_string varchar_pattern_ops) + db_dbgroup_user_id_100f8a51: CREATE INDEX db_dbgroup_user_id_100f8a51 ON public.db_dbgroup + USING btree (user_id) + db_dbgroup_uuid_af896177_uniq: CREATE UNIQUE INDEX db_dbgroup_uuid_af896177_uniq + ON public.db_dbgroup USING btree (uuid) + db_dbgroup_dbnodes: + db_dbgroup_dbnodes_dbgroup_id_9d3a0f9d: CREATE INDEX db_dbgroup_dbnodes_dbgroup_id_9d3a0f9d + ON public.db_dbgroup_dbnodes USING btree (dbgroup_id) + db_dbgroup_dbnodes_dbgroup_id_dbnode_id_eee23cce_uniq: CREATE UNIQUE INDEX db_dbgroup_dbnodes_dbgroup_id_dbnode_id_eee23cce_uniq + ON public.db_dbgroup_dbnodes USING btree (dbgroup_id, dbnode_id) + db_dbgroup_dbnodes_dbnode_id_118b9439: CREATE INDEX db_dbgroup_dbnodes_dbnode_id_118b9439 + ON public.db_dbgroup_dbnodes USING btree (dbnode_id) + db_dbgroup_dbnodes_pkey: CREATE UNIQUE INDEX db_dbgroup_dbnodes_pkey ON public.db_dbgroup_dbnodes + USING btree (id) + db_dblink: + db_dblink_input_id_9245bd73: CREATE INDEX db_dblink_input_id_9245bd73 ON public.db_dblink + USING btree (input_id) + db_dblink_label_f1343cfb: CREATE INDEX db_dblink_label_f1343cfb ON public.db_dblink + USING btree (label) + db_dblink_label_f1343cfb_like: CREATE INDEX db_dblink_label_f1343cfb_like ON public.db_dblink + USING btree (label varchar_pattern_ops) + db_dblink_output_id_c0167528: CREATE INDEX db_dblink_output_id_c0167528 ON public.db_dblink + USING btree (output_id) + db_dblink_pkey: CREATE UNIQUE INDEX db_dblink_pkey ON public.db_dblink USING btree + (id) + db_dblink_type_229f212b: CREATE INDEX db_dblink_type_229f212b ON public.db_dblink + USING btree (type) + db_dblink_type_229f212b_like: CREATE INDEX db_dblink_type_229f212b_like ON public.db_dblink + USING btree (type varchar_pattern_ops) + db_dblog: + db_dblog_dbnode_id_da34b732: CREATE INDEX db_dblog_dbnode_id_da34b732 ON public.db_dblog + USING btree (dbnode_id) + db_dblog_levelname_ad5dc346: CREATE INDEX db_dblog_levelname_ad5dc346 ON public.db_dblog + USING btree (levelname) + db_dblog_levelname_ad5dc346_like: CREATE INDEX db_dblog_levelname_ad5dc346_like + ON public.db_dblog USING btree (levelname varchar_pattern_ops) + db_dblog_loggername_00b5ba16: CREATE INDEX db_dblog_loggername_00b5ba16 ON public.db_dblog + USING btree (loggername) + db_dblog_loggername_00b5ba16_like: CREATE INDEX db_dblog_loggername_00b5ba16_like + ON public.db_dblog USING btree (loggername varchar_pattern_ops) + db_dblog_pkey: CREATE UNIQUE INDEX db_dblog_pkey ON public.db_dblog USING btree + (id) + db_dblog_uuid_9cf77df3_uniq: CREATE UNIQUE INDEX db_dblog_uuid_9cf77df3_uniq ON + public.db_dblog USING btree (uuid) + db_dbnode: + db_dbnode_ctime_71626ef5: CREATE INDEX db_dbnode_ctime_71626ef5 ON public.db_dbnode + USING btree (ctime) + db_dbnode_dbcomputer_id_315372a3: CREATE INDEX db_dbnode_dbcomputer_id_315372a3 + ON public.db_dbnode USING btree (dbcomputer_id) + db_dbnode_label_6469539e: CREATE INDEX db_dbnode_label_6469539e ON public.db_dbnode + USING btree (label) + db_dbnode_label_6469539e_like: CREATE INDEX db_dbnode_label_6469539e_like ON public.db_dbnode + USING btree (label varchar_pattern_ops) + db_dbnode_mtime_0554ea3d: CREATE INDEX db_dbnode_mtime_0554ea3d ON public.db_dbnode + USING btree (mtime) + db_dbnode_pkey: CREATE UNIQUE INDEX db_dbnode_pkey ON public.db_dbnode USING btree + (id) + db_dbnode_process_type_df7298d0: CREATE INDEX db_dbnode_process_type_df7298d0 + ON public.db_dbnode USING btree (process_type) + db_dbnode_process_type_df7298d0_like: CREATE INDEX db_dbnode_process_type_df7298d0_like + ON public.db_dbnode USING btree (process_type varchar_pattern_ops) + db_dbnode_type_a8ce9753: CREATE INDEX db_dbnode_type_a8ce9753 ON public.db_dbnode + USING btree (node_type) + db_dbnode_type_a8ce9753_like: CREATE INDEX db_dbnode_type_a8ce9753_like ON public.db_dbnode + USING btree (node_type varchar_pattern_ops) + db_dbnode_user_id_12e7aeaf: CREATE INDEX db_dbnode_user_id_12e7aeaf ON public.db_dbnode + USING btree (user_id) + db_dbnode_uuid_62e0bf98_uniq: CREATE UNIQUE INDEX db_dbnode_uuid_62e0bf98_uniq + ON public.db_dbnode USING btree (uuid) + db_dbsetting: + db_dbsetting_key_1b84beb4_like: CREATE INDEX db_dbsetting_key_1b84beb4_like ON + public.db_dbsetting USING btree (key varchar_pattern_ops) + db_dbsetting_key_1b84beb4_uniq: CREATE UNIQUE INDEX db_dbsetting_key_1b84beb4_uniq + ON public.db_dbsetting USING btree (key) + db_dbsetting_pkey: CREATE UNIQUE INDEX db_dbsetting_pkey ON public.db_dbsetting + USING btree (id) + db_dbuser: + db_dbuser_email_30150b7e_like: CREATE INDEX db_dbuser_email_30150b7e_like ON public.db_dbuser + USING btree (email varchar_pattern_ops) + db_dbuser_email_30150b7e_uniq: CREATE UNIQUE INDEX db_dbuser_email_30150b7e_uniq + ON public.db_dbuser USING btree (email) + db_dbuser_pkey: CREATE UNIQUE INDEX db_dbuser_pkey ON public.db_dbuser USING btree + (id) diff --git a/tests/backends/aiida_sqlalchemy/migrations/test_all_schema/test_django_django_0049_.yml b/tests/backends/aiida_sqlalchemy/migrations/test_all_schema/test_django_django_0049_.yml new file mode 100644 index 0000000000..9d4fea24f6 --- /dev/null +++ b/tests/backends/aiida_sqlalchemy/migrations/test_all_schema/test_django_django_0049_.yml @@ -0,0 +1,523 @@ +columns: + db_dbauthinfo: + aiidauser_id: + data_type: integer + default: null + is_nullable: false + auth_params: + data_type: jsonb + default: null + is_nullable: false + dbcomputer_id: + data_type: integer + default: null + is_nullable: false + enabled: + data_type: boolean + default: null + is_nullable: false + id: + data_type: integer + default: nextval('db_dbauthinfo_id_seq'::regclass) + is_nullable: false + metadata: + data_type: jsonb + default: null + is_nullable: false + db_dbcomment: + content: + data_type: text + default: null + is_nullable: false + ctime: + data_type: timestamp with time zone + default: null + is_nullable: false + dbnode_id: + data_type: integer + default: null + is_nullable: false + id: + data_type: integer + default: nextval('db_dbcomment_id_seq'::regclass) + is_nullable: false + mtime: + data_type: timestamp with time zone + default: null + is_nullable: false + user_id: + data_type: integer + default: null + is_nullable: false + uuid: + data_type: uuid + default: null + is_nullable: false + db_dbcomputer: + description: + data_type: text + default: null + is_nullable: false + hostname: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + id: + data_type: integer + default: nextval('db_dbcomputer_id_seq'::regclass) + is_nullable: false + label: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + metadata: + data_type: jsonb + default: null + is_nullable: false + scheduler_type: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + transport_type: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + uuid: + data_type: uuid + default: null + is_nullable: false + db_dbgroup: + description: + data_type: text + default: null + is_nullable: false + extras: + data_type: jsonb + default: null + is_nullable: false + id: + data_type: integer + default: nextval('db_dbgroup_id_seq'::regclass) + is_nullable: false + label: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + time: + data_type: timestamp with time zone + default: null + is_nullable: false + type_string: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + user_id: + data_type: integer + default: null + is_nullable: false + uuid: + data_type: uuid + default: null + is_nullable: false + db_dbgroup_dbnodes: + dbgroup_id: + data_type: integer + default: null + is_nullable: false + dbnode_id: + data_type: integer + default: null + is_nullable: false + id: + data_type: integer + default: nextval('db_dbgroup_dbnodes_id_seq'::regclass) + is_nullable: false + db_dblink: + id: + data_type: integer + default: nextval('db_dblink_id_seq'::regclass) + is_nullable: false + input_id: + data_type: integer + default: null + is_nullable: false + label: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + output_id: + data_type: integer + default: null + is_nullable: false + type: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + db_dblog: + dbnode_id: + data_type: integer + default: null + is_nullable: false + id: + data_type: integer + default: nextval('db_dblog_id_seq'::regclass) + is_nullable: false + levelname: + data_type: character varying + default: null + is_nullable: false + max_length: 50 + loggername: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + message: + data_type: text + default: null + is_nullable: false + metadata: + data_type: jsonb + default: null + is_nullable: false + time: + data_type: timestamp with time zone + default: null + is_nullable: false + uuid: + data_type: uuid + default: null + is_nullable: false + db_dbnode: + attributes: + data_type: jsonb + default: null + is_nullable: true + ctime: + data_type: timestamp with time zone + default: null + is_nullable: false + dbcomputer_id: + data_type: integer + default: null + is_nullable: true + description: + data_type: text + default: null + is_nullable: false + extras: + data_type: jsonb + default: null + is_nullable: true + id: + data_type: integer + default: nextval('db_dbnode_id_seq'::regclass) + is_nullable: false + label: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + mtime: + data_type: timestamp with time zone + default: null + is_nullable: false + node_type: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + process_type: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + repository_metadata: + data_type: jsonb + default: null + is_nullable: false + user_id: + data_type: integer + default: null + is_nullable: false + uuid: + data_type: uuid + default: null + is_nullable: false + db_dbsetting: + description: + data_type: text + default: null + is_nullable: false + id: + data_type: integer + default: nextval('db_dbsetting_id_seq'::regclass) + is_nullable: false + key: + data_type: character varying + default: null + is_nullable: false + max_length: 1024 + time: + data_type: timestamp with time zone + default: null + is_nullable: false + val: + data_type: jsonb + default: null + is_nullable: true + db_dbuser: + email: + data_type: character varying + default: null + is_nullable: false + max_length: 254 + first_name: + data_type: character varying + default: null + is_nullable: false + max_length: 254 + id: + data_type: integer + default: nextval('db_dbuser_id_seq'::regclass) + is_nullable: false + institution: + data_type: character varying + default: null + is_nullable: false + max_length: 254 + last_name: + data_type: character varying + default: null + is_nullable: false + max_length: 254 +constraints: + primary_key: + db_dbauthinfo: + db_dbauthinfo_pkey: + - id + db_dbcomment: + db_dbcomment_pkey: + - id + db_dbcomputer: + db_dbcomputer_pkey: + - id + db_dbgroup: + db_dbgroup_pkey: + - id + db_dbgroup_dbnodes: + db_dbgroup_dbnodes_pkey: + - id + db_dblink: + db_dblink_pkey: + - id + db_dblog: + db_dblog_pkey: + - id + db_dbnode: + db_dbnode_pkey: + - id + db_dbsetting: + db_dbsetting_pkey: + - id + db_dbuser: + db_dbuser_pkey: + - id + unique: + db_dbauthinfo: + db_dbauthinfo_aiidauser_id_dbcomputer_id_777cdaa8_uniq: + - aiidauser_id + - dbcomputer_id + db_dbcomment: + db_dbcomment_uuid_49bac08c_uniq: + - uuid + db_dbcomputer: + db_dbcomputer_label_bc480bab_uniq: + - label + db_dbcomputer_uuid_f35defa6_uniq: + - uuid + db_dbgroup: + db_dbgroup_name_type_12656f33_uniq: + - label + - type_string + db_dbgroup_uuid_af896177_uniq: + - uuid + db_dbgroup_dbnodes: + db_dbgroup_dbnodes_dbgroup_id_dbnode_id_eee23cce_uniq: + - dbgroup_id + - dbnode_id + db_dblog: + db_dblog_uuid_9cf77df3_uniq: + - uuid + db_dbnode: + db_dbnode_uuid_62e0bf98_uniq: + - uuid + db_dbsetting: + db_dbsetting_key_1b84beb4_uniq: + - key + db_dbuser: + db_dbuser_email_30150b7e_uniq: + - email +foreign_keys: + db_dbauthinfo: + db_dbauthinfo_aiidauser_id_0684fdfb_fk_db_dbuser_id: FOREIGN KEY (aiidauser_id) + REFERENCES db_dbuser(id) DEFERRABLE INITIALLY DEFERRED + db_dbauthinfo_dbcomputer_id_424f7ac4_fk_db_dbcomputer_id: FOREIGN KEY (dbcomputer_id) + REFERENCES db_dbcomputer(id) DEFERRABLE INITIALLY DEFERRED + db_dbcomment: + db_dbcomment_dbnode_id_3b812b6b_fk_db_dbnode_id: FOREIGN KEY (dbnode_id) REFERENCES + db_dbnode(id) DEFERRABLE INITIALLY DEFERRED + db_dbcomment_user_id_8ed5e360_fk_db_dbuser_id: FOREIGN KEY (user_id) REFERENCES + db_dbuser(id) DEFERRABLE INITIALLY DEFERRED + db_dbgroup: + db_dbgroup_user_id_100f8a51_fk_db_dbuser_id: FOREIGN KEY (user_id) REFERENCES + db_dbuser(id) DEFERRABLE INITIALLY DEFERRED + db_dbgroup_dbnodes: + db_dbgroup_dbnodes_dbgroup_id_9d3a0f9d_fk_db_dbgroup_id: FOREIGN KEY (dbgroup_id) + REFERENCES db_dbgroup(id) DEFERRABLE INITIALLY DEFERRED + db_dbgroup_dbnodes_dbnode_id_118b9439_fk_db_dbnode_id: FOREIGN KEY (dbnode_id) + REFERENCES db_dbnode(id) DEFERRABLE INITIALLY DEFERRED + db_dblink: + db_dblink_input_id_9245bd73_fk_db_dbnode_id: FOREIGN KEY (input_id) REFERENCES + db_dbnode(id) DEFERRABLE INITIALLY DEFERRED + db_dblink_output_id_c0167528_fk_db_dbnode_id: FOREIGN KEY (output_id) REFERENCES + db_dbnode(id) DEFERRABLE INITIALLY DEFERRED + db_dblog: + db_dblog_dbnode_id_da34b732_fk_db_dbnode_id: FOREIGN KEY (dbnode_id) REFERENCES + db_dbnode(id) DEFERRABLE INITIALLY DEFERRED + db_dbnode: + db_dbnode_dbcomputer_id_315372a3_fk_db_dbcomputer_id: FOREIGN KEY (dbcomputer_id) + REFERENCES db_dbcomputer(id) DEFERRABLE INITIALLY DEFERRED + db_dbnode_user_id_12e7aeaf_fk_db_dbuser_id: FOREIGN KEY (user_id) REFERENCES db_dbuser(id) + DEFERRABLE INITIALLY DEFERRED +indexes: + db_dbauthinfo: + db_dbauthinfo_aiidauser_id_0684fdfb: CREATE INDEX db_dbauthinfo_aiidauser_id_0684fdfb + ON public.db_dbauthinfo USING btree (aiidauser_id) + db_dbauthinfo_aiidauser_id_dbcomputer_id_777cdaa8_uniq: CREATE UNIQUE INDEX db_dbauthinfo_aiidauser_id_dbcomputer_id_777cdaa8_uniq + ON public.db_dbauthinfo USING btree (aiidauser_id, dbcomputer_id) + db_dbauthinfo_dbcomputer_id_424f7ac4: CREATE INDEX db_dbauthinfo_dbcomputer_id_424f7ac4 + ON public.db_dbauthinfo USING btree (dbcomputer_id) + db_dbauthinfo_pkey: CREATE UNIQUE INDEX db_dbauthinfo_pkey ON public.db_dbauthinfo + USING btree (id) + db_dbcomment: + db_dbcomment_dbnode_id_3b812b6b: CREATE INDEX db_dbcomment_dbnode_id_3b812b6b + ON public.db_dbcomment USING btree (dbnode_id) + db_dbcomment_pkey: CREATE UNIQUE INDEX db_dbcomment_pkey ON public.db_dbcomment + USING btree (id) + db_dbcomment_user_id_8ed5e360: CREATE INDEX db_dbcomment_user_id_8ed5e360 ON public.db_dbcomment + USING btree (user_id) + db_dbcomment_uuid_49bac08c_uniq: CREATE UNIQUE INDEX db_dbcomment_uuid_49bac08c_uniq + ON public.db_dbcomment USING btree (uuid) + db_dbcomputer: + db_dbcomputer_label_bc480bab_like: CREATE INDEX db_dbcomputer_label_bc480bab_like + ON public.db_dbcomputer USING btree (label varchar_pattern_ops) + db_dbcomputer_label_bc480bab_uniq: CREATE UNIQUE INDEX db_dbcomputer_label_bc480bab_uniq + ON public.db_dbcomputer USING btree (label) + db_dbcomputer_pkey: CREATE UNIQUE INDEX db_dbcomputer_pkey ON public.db_dbcomputer + USING btree (id) + db_dbcomputer_uuid_f35defa6_uniq: CREATE UNIQUE INDEX db_dbcomputer_uuid_f35defa6_uniq + ON public.db_dbcomputer USING btree (uuid) + db_dbgroup: + db_dbgroup_name_66c75272: CREATE INDEX db_dbgroup_name_66c75272 ON public.db_dbgroup + USING btree (label) + db_dbgroup_name_66c75272_like: CREATE INDEX db_dbgroup_name_66c75272_like ON public.db_dbgroup + USING btree (label varchar_pattern_ops) + db_dbgroup_name_type_12656f33_uniq: CREATE UNIQUE INDEX db_dbgroup_name_type_12656f33_uniq + ON public.db_dbgroup USING btree (label, type_string) + db_dbgroup_pkey: CREATE UNIQUE INDEX db_dbgroup_pkey ON public.db_dbgroup USING + btree (id) + db_dbgroup_type_23b2a748: CREATE INDEX db_dbgroup_type_23b2a748 ON public.db_dbgroup + USING btree (type_string) + db_dbgroup_type_23b2a748_like: CREATE INDEX db_dbgroup_type_23b2a748_like ON public.db_dbgroup + USING btree (type_string varchar_pattern_ops) + db_dbgroup_user_id_100f8a51: CREATE INDEX db_dbgroup_user_id_100f8a51 ON public.db_dbgroup + USING btree (user_id) + db_dbgroup_uuid_af896177_uniq: CREATE UNIQUE INDEX db_dbgroup_uuid_af896177_uniq + ON public.db_dbgroup USING btree (uuid) + db_dbgroup_dbnodes: + db_dbgroup_dbnodes_dbgroup_id_9d3a0f9d: CREATE INDEX db_dbgroup_dbnodes_dbgroup_id_9d3a0f9d + ON public.db_dbgroup_dbnodes USING btree (dbgroup_id) + db_dbgroup_dbnodes_dbgroup_id_dbnode_id_eee23cce_uniq: CREATE UNIQUE INDEX db_dbgroup_dbnodes_dbgroup_id_dbnode_id_eee23cce_uniq + ON public.db_dbgroup_dbnodes USING btree (dbgroup_id, dbnode_id) + db_dbgroup_dbnodes_dbnode_id_118b9439: CREATE INDEX db_dbgroup_dbnodes_dbnode_id_118b9439 + ON public.db_dbgroup_dbnodes USING btree (dbnode_id) + db_dbgroup_dbnodes_pkey: CREATE UNIQUE INDEX db_dbgroup_dbnodes_pkey ON public.db_dbgroup_dbnodes + USING btree (id) + db_dblink: + db_dblink_input_id_9245bd73: CREATE INDEX db_dblink_input_id_9245bd73 ON public.db_dblink + USING btree (input_id) + db_dblink_label_f1343cfb: CREATE INDEX db_dblink_label_f1343cfb ON public.db_dblink + USING btree (label) + db_dblink_label_f1343cfb_like: CREATE INDEX db_dblink_label_f1343cfb_like ON public.db_dblink + USING btree (label varchar_pattern_ops) + db_dblink_output_id_c0167528: CREATE INDEX db_dblink_output_id_c0167528 ON public.db_dblink + USING btree (output_id) + db_dblink_pkey: CREATE UNIQUE INDEX db_dblink_pkey ON public.db_dblink USING btree + (id) + db_dblink_type_229f212b: CREATE INDEX db_dblink_type_229f212b ON public.db_dblink + USING btree (type) + db_dblink_type_229f212b_like: CREATE INDEX db_dblink_type_229f212b_like ON public.db_dblink + USING btree (type varchar_pattern_ops) + db_dblog: + db_dblog_dbnode_id_da34b732: CREATE INDEX db_dblog_dbnode_id_da34b732 ON public.db_dblog + USING btree (dbnode_id) + db_dblog_levelname_ad5dc346: CREATE INDEX db_dblog_levelname_ad5dc346 ON public.db_dblog + USING btree (levelname) + db_dblog_levelname_ad5dc346_like: CREATE INDEX db_dblog_levelname_ad5dc346_like + ON public.db_dblog USING btree (levelname varchar_pattern_ops) + db_dblog_loggername_00b5ba16: CREATE INDEX db_dblog_loggername_00b5ba16 ON public.db_dblog + USING btree (loggername) + db_dblog_loggername_00b5ba16_like: CREATE INDEX db_dblog_loggername_00b5ba16_like + ON public.db_dblog USING btree (loggername varchar_pattern_ops) + db_dblog_pkey: CREATE UNIQUE INDEX db_dblog_pkey ON public.db_dblog USING btree + (id) + db_dblog_uuid_9cf77df3_uniq: CREATE UNIQUE INDEX db_dblog_uuid_9cf77df3_uniq ON + public.db_dblog USING btree (uuid) + db_dbnode: + db_dbnode_ctime_71626ef5: CREATE INDEX db_dbnode_ctime_71626ef5 ON public.db_dbnode + USING btree (ctime) + db_dbnode_dbcomputer_id_315372a3: CREATE INDEX db_dbnode_dbcomputer_id_315372a3 + ON public.db_dbnode USING btree (dbcomputer_id) + db_dbnode_label_6469539e: CREATE INDEX db_dbnode_label_6469539e ON public.db_dbnode + USING btree (label) + db_dbnode_label_6469539e_like: CREATE INDEX db_dbnode_label_6469539e_like ON public.db_dbnode + USING btree (label varchar_pattern_ops) + db_dbnode_mtime_0554ea3d: CREATE INDEX db_dbnode_mtime_0554ea3d ON public.db_dbnode + USING btree (mtime) + db_dbnode_pkey: CREATE UNIQUE INDEX db_dbnode_pkey ON public.db_dbnode USING btree + (id) + db_dbnode_process_type_df7298d0: CREATE INDEX db_dbnode_process_type_df7298d0 + ON public.db_dbnode USING btree (process_type) + db_dbnode_process_type_df7298d0_like: CREATE INDEX db_dbnode_process_type_df7298d0_like + ON public.db_dbnode USING btree (process_type varchar_pattern_ops) + db_dbnode_type_a8ce9753: CREATE INDEX db_dbnode_type_a8ce9753 ON public.db_dbnode + USING btree (node_type) + db_dbnode_type_a8ce9753_like: CREATE INDEX db_dbnode_type_a8ce9753_like ON public.db_dbnode + USING btree (node_type varchar_pattern_ops) + db_dbnode_user_id_12e7aeaf: CREATE INDEX db_dbnode_user_id_12e7aeaf ON public.db_dbnode + USING btree (user_id) + db_dbnode_uuid_62e0bf98_uniq: CREATE UNIQUE INDEX db_dbnode_uuid_62e0bf98_uniq + ON public.db_dbnode USING btree (uuid) + db_dbsetting: + db_dbsetting_key_1b84beb4_like: CREATE INDEX db_dbsetting_key_1b84beb4_like ON + public.db_dbsetting USING btree (key varchar_pattern_ops) + db_dbsetting_key_1b84beb4_uniq: CREATE UNIQUE INDEX db_dbsetting_key_1b84beb4_uniq + ON public.db_dbsetting USING btree (key) + db_dbsetting_pkey: CREATE UNIQUE INDEX db_dbsetting_pkey ON public.db_dbsetting + USING btree (id) + db_dbuser: + db_dbuser_email_30150b7e_like: CREATE INDEX db_dbuser_email_30150b7e_like ON public.db_dbuser + USING btree (email varchar_pattern_ops) + db_dbuser_email_30150b7e_uniq: CREATE UNIQUE INDEX db_dbuser_email_30150b7e_uniq + ON public.db_dbuser USING btree (email) + db_dbuser_pkey: CREATE UNIQUE INDEX db_dbuser_pkey ON public.db_dbuser USING btree + (id) diff --git a/tests/backends/aiida_sqlalchemy/migrations/test_all_schema/test_django_django_0050_.yml b/tests/backends/aiida_sqlalchemy/migrations/test_all_schema/test_django_django_0050_.yml new file mode 100644 index 0000000000..30b7e96182 --- /dev/null +++ b/tests/backends/aiida_sqlalchemy/migrations/test_all_schema/test_django_django_0050_.yml @@ -0,0 +1,523 @@ +columns: + db_dbauthinfo: + aiidauser_id: + data_type: integer + default: null + is_nullable: false + auth_params: + data_type: jsonb + default: null + is_nullable: false + dbcomputer_id: + data_type: integer + default: null + is_nullable: false + enabled: + data_type: boolean + default: null + is_nullable: false + id: + data_type: integer + default: nextval('db_dbauthinfo_id_seq'::regclass) + is_nullable: false + metadata: + data_type: jsonb + default: null + is_nullable: false + db_dbcomment: + content: + data_type: text + default: null + is_nullable: false + ctime: + data_type: timestamp with time zone + default: null + is_nullable: false + dbnode_id: + data_type: integer + default: null + is_nullable: false + id: + data_type: integer + default: nextval('db_dbcomment_id_seq'::regclass) + is_nullable: false + mtime: + data_type: timestamp with time zone + default: null + is_nullable: false + user_id: + data_type: integer + default: null + is_nullable: false + uuid: + data_type: uuid + default: null + is_nullable: false + db_dbcomputer: + description: + data_type: text + default: null + is_nullable: false + hostname: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + id: + data_type: integer + default: nextval('db_dbcomputer_id_seq'::regclass) + is_nullable: false + label: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + metadata: + data_type: jsonb + default: null + is_nullable: false + scheduler_type: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + transport_type: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + uuid: + data_type: uuid + default: null + is_nullable: false + db_dbgroup: + description: + data_type: text + default: null + is_nullable: false + extras: + data_type: jsonb + default: null + is_nullable: false + id: + data_type: integer + default: nextval('db_dbgroup_id_seq'::regclass) + is_nullable: false + label: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + time: + data_type: timestamp with time zone + default: null + is_nullable: false + type_string: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + user_id: + data_type: integer + default: null + is_nullable: false + uuid: + data_type: uuid + default: null + is_nullable: false + db_dbgroup_dbnodes: + dbgroup_id: + data_type: integer + default: null + is_nullable: false + dbnode_id: + data_type: integer + default: null + is_nullable: false + id: + data_type: integer + default: nextval('db_dbgroup_dbnodes_id_seq'::regclass) + is_nullable: false + db_dblink: + id: + data_type: integer + default: nextval('db_dblink_id_seq'::regclass) + is_nullable: false + input_id: + data_type: integer + default: null + is_nullable: false + label: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + output_id: + data_type: integer + default: null + is_nullable: false + type: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + db_dblog: + dbnode_id: + data_type: integer + default: null + is_nullable: false + id: + data_type: integer + default: nextval('db_dblog_id_seq'::regclass) + is_nullable: false + levelname: + data_type: character varying + default: null + is_nullable: false + max_length: 50 + loggername: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + message: + data_type: text + default: null + is_nullable: false + metadata: + data_type: jsonb + default: null + is_nullable: false + time: + data_type: timestamp with time zone + default: null + is_nullable: false + uuid: + data_type: uuid + default: null + is_nullable: false + db_dbnode: + attributes: + data_type: jsonb + default: null + is_nullable: true + ctime: + data_type: timestamp with time zone + default: null + is_nullable: false + dbcomputer_id: + data_type: integer + default: null + is_nullable: true + description: + data_type: text + default: null + is_nullable: false + extras: + data_type: jsonb + default: null + is_nullable: true + id: + data_type: integer + default: nextval('db_dbnode_id_seq'::regclass) + is_nullable: false + label: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + mtime: + data_type: timestamp with time zone + default: null + is_nullable: false + node_type: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + process_type: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + repository_metadata: + data_type: jsonb + default: null + is_nullable: false + user_id: + data_type: integer + default: null + is_nullable: false + uuid: + data_type: uuid + default: null + is_nullable: false + db_dbsetting: + description: + data_type: text + default: null + is_nullable: false + id: + data_type: integer + default: nextval('db_dbsetting_id_seq'::regclass) + is_nullable: false + key: + data_type: character varying + default: null + is_nullable: false + max_length: 1024 + time: + data_type: timestamp with time zone + default: null + is_nullable: false + val: + data_type: jsonb + default: null + is_nullable: true + db_dbuser: + email: + data_type: character varying + default: null + is_nullable: false + max_length: 254 + first_name: + data_type: character varying + default: null + is_nullable: false + max_length: 254 + id: + data_type: integer + default: nextval('db_dbuser_id_seq'::regclass) + is_nullable: false + institution: + data_type: character varying + default: null + is_nullable: false + max_length: 254 + last_name: + data_type: character varying + default: null + is_nullable: false + max_length: 254 +constraints: + primary_key: + db_dbauthinfo: + db_dbauthinfo_pkey: + - id + db_dbcomment: + db_dbcomment_pkey: + - id + db_dbcomputer: + db_dbcomputer_pkey: + - id + db_dbgroup: + db_dbgroup_pkey: + - id + db_dbgroup_dbnodes: + db_dbgroup_dbnodes_pkey: + - id + db_dblink: + db_dblink_pkey: + - id + db_dblog: + db_dblog_pkey: + - id + db_dbnode: + db_dbnode_pkey: + - id + db_dbsetting: + db_dbsetting_pkey: + - id + db_dbuser: + db_dbuser_pkey: + - id + unique: + db_dbauthinfo: + uq_db_dbauthinfo_aiidauser_id_dbcomputer_id: + - aiidauser_id + - dbcomputer_id + db_dbcomment: + uq_db_dbcomment_uuid: + - uuid + db_dbcomputer: + uq_db_dbcomputer_label: + - label + uq_db_dbcomputer_uuid: + - uuid + db_dbgroup: + uq_db_dbgroup_label_type_string: + - label + - type_string + uq_db_dbgroup_uuid: + - uuid + db_dbgroup_dbnodes: + uq_db_dbgroup_dbnodes_dbgroup_id_dbnode_id: + - dbgroup_id + - dbnode_id + db_dblog: + uq_db_dblog_uuid: + - uuid + db_dbnode: + uq_db_dbnode_uuid: + - uuid + db_dbsetting: + uq_db_dbsetting_key: + - key + db_dbuser: + uq_db_dbuser_email: + - email +foreign_keys: + db_dbauthinfo: + fk_db_dbauthinfo_aiidauser_id_db_dbuser: FOREIGN KEY (aiidauser_id) REFERENCES + db_dbuser(id) ON DELETE CASCADE DEFERRABLE INITIALLY DEFERRED + fk_db_dbauthinfo_dbcomputer_id_db_dbcomputer: FOREIGN KEY (dbcomputer_id) REFERENCES + db_dbcomputer(id) ON DELETE CASCADE DEFERRABLE INITIALLY DEFERRED + db_dbcomment: + fk_db_dbcomment_dbnode_id_db_dbnode: FOREIGN KEY (dbnode_id) REFERENCES db_dbnode(id) + ON DELETE CASCADE DEFERRABLE INITIALLY DEFERRED + fk_db_dbcomment_user_id_db_dbuser: FOREIGN KEY (user_id) REFERENCES db_dbuser(id) + ON DELETE CASCADE DEFERRABLE INITIALLY DEFERRED + db_dbgroup: + db_dbgroup_user_id_db_dbuser: FOREIGN KEY (user_id) REFERENCES db_dbuser(id) ON + DELETE CASCADE DEFERRABLE INITIALLY DEFERRED + db_dbgroup_dbnodes: + fk_db_dbgroup_dbnodes_dbgroup_id_db_dbgroup: FOREIGN KEY (dbgroup_id) REFERENCES + db_dbgroup(id) DEFERRABLE INITIALLY DEFERRED + fk_db_dbgroup_dbnodes_dbnode_id_db_dbnode: FOREIGN KEY (dbnode_id) REFERENCES + db_dbnode(id) DEFERRABLE INITIALLY DEFERRED + db_dblink: + fk_db_dblink_input_id_db_dbnode: FOREIGN KEY (input_id) REFERENCES db_dbnode(id) + DEFERRABLE INITIALLY DEFERRED + fk_db_dblink_output_id_db_dbnode: FOREIGN KEY (output_id) REFERENCES db_dbnode(id) + ON DELETE CASCADE DEFERRABLE INITIALLY DEFERRED + db_dblog: + fk_db_dblog_dbnode_id_db_dbnode: FOREIGN KEY (dbnode_id) REFERENCES db_dbnode(id) + ON DELETE CASCADE DEFERRABLE INITIALLY DEFERRED + db_dbnode: + fk_db_dbnode_dbcomputer_id_db_dbcomputer: FOREIGN KEY (dbcomputer_id) REFERENCES + db_dbcomputer(id) ON DELETE RESTRICT DEFERRABLE INITIALLY DEFERRED + fk_db_dbnode_user_id_db_dbuser: FOREIGN KEY (user_id) REFERENCES db_dbuser(id) + ON DELETE RESTRICT DEFERRABLE INITIALLY DEFERRED +indexes: + db_dbauthinfo: + db_dbauthinfo_pkey: CREATE UNIQUE INDEX db_dbauthinfo_pkey ON public.db_dbauthinfo + USING btree (id) + ix_db_dbauthinfo_aiidauser_id: CREATE INDEX ix_db_dbauthinfo_aiidauser_id ON public.db_dbauthinfo + USING btree (aiidauser_id) + ix_db_dbauthinfo_dbcomputer_id: CREATE INDEX ix_db_dbauthinfo_dbcomputer_id ON + public.db_dbauthinfo USING btree (dbcomputer_id) + uq_db_dbauthinfo_aiidauser_id_dbcomputer_id: CREATE UNIQUE INDEX uq_db_dbauthinfo_aiidauser_id_dbcomputer_id + ON public.db_dbauthinfo USING btree (aiidauser_id, dbcomputer_id) + db_dbcomment: + db_dbcomment_pkey: CREATE UNIQUE INDEX db_dbcomment_pkey ON public.db_dbcomment + USING btree (id) + ix_db_dbcomment_dbnode_id: CREATE INDEX ix_db_dbcomment_dbnode_id ON public.db_dbcomment + USING btree (dbnode_id) + ix_db_dbcomment_user_id: CREATE INDEX ix_db_dbcomment_user_id ON public.db_dbcomment + USING btree (user_id) + uq_db_dbcomment_uuid: CREATE UNIQUE INDEX uq_db_dbcomment_uuid ON public.db_dbcomment + USING btree (uuid) + db_dbcomputer: + db_dbcomputer_pkey: CREATE UNIQUE INDEX db_dbcomputer_pkey ON public.db_dbcomputer + USING btree (id) + ix_pat_db_dbcomputer_label: CREATE INDEX ix_pat_db_dbcomputer_label ON public.db_dbcomputer + USING btree (label varchar_pattern_ops) + uq_db_dbcomputer_label: CREATE UNIQUE INDEX uq_db_dbcomputer_label ON public.db_dbcomputer + USING btree (label) + uq_db_dbcomputer_uuid: CREATE UNIQUE INDEX uq_db_dbcomputer_uuid ON public.db_dbcomputer + USING btree (uuid) + db_dbgroup: + db_dbgroup_pkey: CREATE UNIQUE INDEX db_dbgroup_pkey ON public.db_dbgroup USING + btree (id) + ix_db_dbgroup_label: CREATE INDEX ix_db_dbgroup_label ON public.db_dbgroup USING + btree (label) + ix_db_dbgroup_type_string: CREATE INDEX ix_db_dbgroup_type_string ON public.db_dbgroup + USING btree (type_string) + ix_db_dbgroup_user_id: CREATE INDEX ix_db_dbgroup_user_id ON public.db_dbgroup + USING btree (user_id) + ix_pat_db_dbgroup_label: CREATE INDEX ix_pat_db_dbgroup_label ON public.db_dbgroup + USING btree (label varchar_pattern_ops) + ix_pat_db_dbgroup_type_string: CREATE INDEX ix_pat_db_dbgroup_type_string ON public.db_dbgroup + USING btree (type_string varchar_pattern_ops) + uq_db_dbgroup_label_type_string: CREATE UNIQUE INDEX uq_db_dbgroup_label_type_string + ON public.db_dbgroup USING btree (label, type_string) + uq_db_dbgroup_uuid: CREATE UNIQUE INDEX uq_db_dbgroup_uuid ON public.db_dbgroup + USING btree (uuid) + db_dbgroup_dbnodes: + db_dbgroup_dbnodes_pkey: CREATE UNIQUE INDEX db_dbgroup_dbnodes_pkey ON public.db_dbgroup_dbnodes + USING btree (id) + ix_db_dbgroup_dbnodes_dbgroup_id: CREATE INDEX ix_db_dbgroup_dbnodes_dbgroup_id + ON public.db_dbgroup_dbnodes USING btree (dbgroup_id) + ix_db_dbgroup_dbnodes_dbnode_id: CREATE INDEX ix_db_dbgroup_dbnodes_dbnode_id + ON public.db_dbgroup_dbnodes USING btree (dbnode_id) + uq_db_dbgroup_dbnodes_dbgroup_id_dbnode_id: CREATE UNIQUE INDEX uq_db_dbgroup_dbnodes_dbgroup_id_dbnode_id + ON public.db_dbgroup_dbnodes USING btree (dbgroup_id, dbnode_id) + db_dblink: + db_dblink_pkey: CREATE UNIQUE INDEX db_dblink_pkey ON public.db_dblink USING btree + (id) + ix_db_dblink_input_id: CREATE INDEX ix_db_dblink_input_id ON public.db_dblink + USING btree (input_id) + ix_db_dblink_label: CREATE INDEX ix_db_dblink_label ON public.db_dblink USING + btree (label) + ix_db_dblink_output_id: CREATE INDEX ix_db_dblink_output_id ON public.db_dblink + USING btree (output_id) + ix_db_dblink_type: CREATE INDEX ix_db_dblink_type ON public.db_dblink USING btree + (type) + ix_pat_db_dblink_label: CREATE INDEX ix_pat_db_dblink_label ON public.db_dblink + USING btree (label varchar_pattern_ops) + ix_pat_db_dblink_type: CREATE INDEX ix_pat_db_dblink_type ON public.db_dblink + USING btree (type varchar_pattern_ops) + db_dblog: + db_dblog_pkey: CREATE UNIQUE INDEX db_dblog_pkey ON public.db_dblog USING btree + (id) + ix_db_dblog_dbnode_id: CREATE INDEX ix_db_dblog_dbnode_id ON public.db_dblog USING + btree (dbnode_id) + ix_db_dblog_levelname: CREATE INDEX ix_db_dblog_levelname ON public.db_dblog USING + btree (levelname) + ix_db_dblog_loggername: CREATE INDEX ix_db_dblog_loggername ON public.db_dblog + USING btree (loggername) + ix_pat_db_dblog_levelname: CREATE INDEX ix_pat_db_dblog_levelname ON public.db_dblog + USING btree (levelname varchar_pattern_ops) + ix_pat_db_dblog_loggername: CREATE INDEX ix_pat_db_dblog_loggername ON public.db_dblog + USING btree (loggername varchar_pattern_ops) + uq_db_dblog_uuid: CREATE UNIQUE INDEX uq_db_dblog_uuid ON public.db_dblog USING + btree (uuid) + db_dbnode: + db_dbnode_pkey: CREATE UNIQUE INDEX db_dbnode_pkey ON public.db_dbnode USING btree + (id) + ix_db_dbnode_ctime: CREATE INDEX ix_db_dbnode_ctime ON public.db_dbnode USING + btree (ctime) + ix_db_dbnode_dbcomputer_id: CREATE INDEX ix_db_dbnode_dbcomputer_id ON public.db_dbnode + USING btree (dbcomputer_id) + ix_db_dbnode_label: CREATE INDEX ix_db_dbnode_label ON public.db_dbnode USING + btree (label) + ix_db_dbnode_mtime: CREATE INDEX ix_db_dbnode_mtime ON public.db_dbnode USING + btree (mtime) + ix_db_dbnode_node_type: CREATE INDEX ix_db_dbnode_node_type ON public.db_dbnode + USING btree (node_type) + ix_db_dbnode_process_type: CREATE INDEX ix_db_dbnode_process_type ON public.db_dbnode + USING btree (process_type) + ix_db_dbnode_user_id: CREATE INDEX ix_db_dbnode_user_id ON public.db_dbnode USING + btree (user_id) + ix_pat_db_dbnode_label: CREATE INDEX ix_pat_db_dbnode_label ON public.db_dbnode + USING btree (label varchar_pattern_ops) + ix_pat_db_dbnode_node_type: CREATE INDEX ix_pat_db_dbnode_node_type ON public.db_dbnode + USING btree (node_type varchar_pattern_ops) + ix_pat_db_dbnode_process_type: CREATE INDEX ix_pat_db_dbnode_process_type ON public.db_dbnode + USING btree (process_type varchar_pattern_ops) + uq_db_dbnode_uuid: CREATE UNIQUE INDEX uq_db_dbnode_uuid ON public.db_dbnode USING + btree (uuid) + db_dbsetting: + db_dbsetting_pkey: CREATE UNIQUE INDEX db_dbsetting_pkey ON public.db_dbsetting + USING btree (id) + ix_pat_db_dbsetting_key: CREATE INDEX ix_pat_db_dbsetting_key ON public.db_dbsetting + USING btree (key varchar_pattern_ops) + uq_db_dbsetting_key: CREATE UNIQUE INDEX uq_db_dbsetting_key ON public.db_dbsetting + USING btree (key) + db_dbuser: + db_dbuser_pkey: CREATE UNIQUE INDEX db_dbuser_pkey ON public.db_dbuser USING btree + (id) + ix_pat_db_dbuser_email: CREATE INDEX ix_pat_db_dbuser_email ON public.db_dbuser + USING btree (email varchar_pattern_ops) + uq_db_dbuser_email: CREATE UNIQUE INDEX uq_db_dbuser_email ON public.db_dbuser + USING btree (email) diff --git a/tests/backends/aiida_sqlalchemy/migrations/test_all_schema/test_main_main_0001_.yml b/tests/backends/aiida_sqlalchemy/migrations/test_all_schema/test_main_main_0001_.yml new file mode 100644 index 0000000000..b7dcb1cd45 --- /dev/null +++ b/tests/backends/aiida_sqlalchemy/migrations/test_all_schema/test_main_main_0001_.yml @@ -0,0 +1,523 @@ +columns: + db_dbauthinfo: + aiidauser_id: + data_type: integer + default: null + is_nullable: false + auth_params: + data_type: jsonb + default: null + is_nullable: false + dbcomputer_id: + data_type: integer + default: null + is_nullable: false + enabled: + data_type: boolean + default: null + is_nullable: false + id: + data_type: integer + default: nextval('db_dbauthinfo_id_seq'::regclass) + is_nullable: false + metadata: + data_type: jsonb + default: null + is_nullable: false + db_dbcomment: + content: + data_type: text + default: null + is_nullable: false + ctime: + data_type: timestamp with time zone + default: null + is_nullable: false + dbnode_id: + data_type: integer + default: null + is_nullable: false + id: + data_type: integer + default: nextval('db_dbcomment_id_seq'::regclass) + is_nullable: false + mtime: + data_type: timestamp with time zone + default: null + is_nullable: false + user_id: + data_type: integer + default: null + is_nullable: false + uuid: + data_type: uuid + default: null + is_nullable: false + db_dbcomputer: + description: + data_type: text + default: null + is_nullable: false + hostname: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + id: + data_type: integer + default: nextval('db_dbcomputer_id_seq'::regclass) + is_nullable: false + label: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + metadata: + data_type: jsonb + default: null + is_nullable: false + scheduler_type: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + transport_type: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + uuid: + data_type: uuid + default: null + is_nullable: false + db_dbgroup: + description: + data_type: text + default: null + is_nullable: false + extras: + data_type: jsonb + default: null + is_nullable: false + id: + data_type: integer + default: nextval('db_dbgroup_id_seq'::regclass) + is_nullable: false + label: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + time: + data_type: timestamp with time zone + default: null + is_nullable: false + type_string: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + user_id: + data_type: integer + default: null + is_nullable: false + uuid: + data_type: uuid + default: null + is_nullable: false + db_dbgroup_dbnodes: + dbgroup_id: + data_type: integer + default: null + is_nullable: false + dbnode_id: + data_type: integer + default: null + is_nullable: false + id: + data_type: integer + default: nextval('db_dbgroup_dbnodes_id_seq'::regclass) + is_nullable: false + db_dblink: + id: + data_type: integer + default: nextval('db_dblink_id_seq'::regclass) + is_nullable: false + input_id: + data_type: integer + default: null + is_nullable: false + label: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + output_id: + data_type: integer + default: null + is_nullable: false + type: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + db_dblog: + dbnode_id: + data_type: integer + default: null + is_nullable: false + id: + data_type: integer + default: nextval('db_dblog_id_seq'::regclass) + is_nullable: false + levelname: + data_type: character varying + default: null + is_nullable: false + max_length: 50 + loggername: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + message: + data_type: text + default: null + is_nullable: false + metadata: + data_type: jsonb + default: null + is_nullable: false + time: + data_type: timestamp with time zone + default: null + is_nullable: false + uuid: + data_type: uuid + default: null + is_nullable: false + db_dbnode: + attributes: + data_type: jsonb + default: null + is_nullable: true + ctime: + data_type: timestamp with time zone + default: null + is_nullable: false + dbcomputer_id: + data_type: integer + default: null + is_nullable: true + description: + data_type: text + default: null + is_nullable: false + extras: + data_type: jsonb + default: null + is_nullable: true + id: + data_type: integer + default: nextval('db_dbnode_id_seq'::regclass) + is_nullable: false + label: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + mtime: + data_type: timestamp with time zone + default: null + is_nullable: false + node_type: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + process_type: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + repository_metadata: + data_type: jsonb + default: null + is_nullable: false + user_id: + data_type: integer + default: null + is_nullable: false + uuid: + data_type: uuid + default: null + is_nullable: false + db_dbsetting: + description: + data_type: text + default: null + is_nullable: false + id: + data_type: integer + default: nextval('db_dbsetting_id_seq'::regclass) + is_nullable: false + key: + data_type: character varying + default: null + is_nullable: false + max_length: 1024 + time: + data_type: timestamp with time zone + default: null + is_nullable: false + val: + data_type: jsonb + default: null + is_nullable: true + db_dbuser: + email: + data_type: character varying + default: null + is_nullable: false + max_length: 254 + first_name: + data_type: character varying + default: null + is_nullable: false + max_length: 254 + id: + data_type: integer + default: nextval('db_dbuser_id_seq'::regclass) + is_nullable: false + institution: + data_type: character varying + default: null + is_nullable: false + max_length: 254 + last_name: + data_type: character varying + default: null + is_nullable: false + max_length: 254 +constraints: + primary_key: + db_dbauthinfo: + db_dbauthinfo_pkey: + - id + db_dbcomment: + db_dbcomment_pkey: + - id + db_dbcomputer: + db_dbcomputer_pkey: + - id + db_dbgroup: + db_dbgroup_pkey: + - id + db_dbgroup_dbnodes: + db_dbgroup_dbnodes_pkey: + - id + db_dblink: + db_dblink_pkey: + - id + db_dblog: + db_dblog_pkey: + - id + db_dbnode: + db_dbnode_pkey: + - id + db_dbsetting: + db_dbsetting_pkey: + - id + db_dbuser: + db_dbuser_pkey: + - id + unique: + db_dbauthinfo: + uq_db_dbauthinfo_aiidauser_id_dbcomputer_id: + - aiidauser_id + - dbcomputer_id + db_dbcomment: + uq_db_dbcomment_uuid: + - uuid + db_dbcomputer: + uq_db_dbcomputer_label: + - label + uq_db_dbcomputer_uuid: + - uuid + db_dbgroup: + uq_db_dbgroup_label_type_string: + - label + - type_string + uq_db_dbgroup_uuid: + - uuid + db_dbgroup_dbnodes: + uq_db_dbgroup_dbnodes_dbgroup_id_dbnode_id: + - dbgroup_id + - dbnode_id + db_dblog: + uq_db_dblog_uuid: + - uuid + db_dbnode: + uq_db_dbnode_uuid: + - uuid + db_dbsetting: + uq_db_dbsetting_key: + - key + db_dbuser: + uq_db_dbuser_email: + - email +foreign_keys: + db_dbauthinfo: + fk_db_dbauthinfo_aiidauser_id_db_dbuser: FOREIGN KEY (aiidauser_id) REFERENCES + db_dbuser(id) ON DELETE CASCADE DEFERRABLE INITIALLY DEFERRED + fk_db_dbauthinfo_dbcomputer_id_db_dbcomputer: FOREIGN KEY (dbcomputer_id) REFERENCES + db_dbcomputer(id) ON DELETE CASCADE DEFERRABLE INITIALLY DEFERRED + db_dbcomment: + fk_db_dbcomment_dbnode_id_db_dbnode: FOREIGN KEY (dbnode_id) REFERENCES db_dbnode(id) + ON DELETE CASCADE DEFERRABLE INITIALLY DEFERRED + fk_db_dbcomment_user_id_db_dbuser: FOREIGN KEY (user_id) REFERENCES db_dbuser(id) + ON DELETE CASCADE DEFERRABLE INITIALLY DEFERRED + db_dbgroup: + fk_db_dbgroup_user_id_db_dbuser: FOREIGN KEY (user_id) REFERENCES db_dbuser(id) + ON DELETE CASCADE DEFERRABLE INITIALLY DEFERRED + db_dbgroup_dbnodes: + fk_db_dbgroup_dbnodes_dbgroup_id_db_dbgroup: FOREIGN KEY (dbgroup_id) REFERENCES + db_dbgroup(id) DEFERRABLE INITIALLY DEFERRED + fk_db_dbgroup_dbnodes_dbnode_id_db_dbnode: FOREIGN KEY (dbnode_id) REFERENCES + db_dbnode(id) DEFERRABLE INITIALLY DEFERRED + db_dblink: + fk_db_dblink_input_id_db_dbnode: FOREIGN KEY (input_id) REFERENCES db_dbnode(id) + DEFERRABLE INITIALLY DEFERRED + fk_db_dblink_output_id_db_dbnode: FOREIGN KEY (output_id) REFERENCES db_dbnode(id) + ON DELETE CASCADE DEFERRABLE INITIALLY DEFERRED + db_dblog: + fk_db_dblog_dbnode_id_db_dbnode: FOREIGN KEY (dbnode_id) REFERENCES db_dbnode(id) + ON DELETE CASCADE DEFERRABLE INITIALLY DEFERRED + db_dbnode: + fk_db_dbnode_dbcomputer_id_db_dbcomputer: FOREIGN KEY (dbcomputer_id) REFERENCES + db_dbcomputer(id) ON DELETE RESTRICT DEFERRABLE INITIALLY DEFERRED + fk_db_dbnode_user_id_db_dbuser: FOREIGN KEY (user_id) REFERENCES db_dbuser(id) + ON DELETE RESTRICT DEFERRABLE INITIALLY DEFERRED +indexes: + db_dbauthinfo: + db_dbauthinfo_pkey: CREATE UNIQUE INDEX db_dbauthinfo_pkey ON public.db_dbauthinfo + USING btree (id) + ix_db_dbauthinfo_db_dbauthinfo_aiidauser_id: CREATE INDEX ix_db_dbauthinfo_db_dbauthinfo_aiidauser_id + ON public.db_dbauthinfo USING btree (aiidauser_id) + ix_db_dbauthinfo_db_dbauthinfo_dbcomputer_id: CREATE INDEX ix_db_dbauthinfo_db_dbauthinfo_dbcomputer_id + ON public.db_dbauthinfo USING btree (dbcomputer_id) + uq_db_dbauthinfo_aiidauser_id_dbcomputer_id: CREATE UNIQUE INDEX uq_db_dbauthinfo_aiidauser_id_dbcomputer_id + ON public.db_dbauthinfo USING btree (aiidauser_id, dbcomputer_id) + db_dbcomment: + db_dbcomment_pkey: CREATE UNIQUE INDEX db_dbcomment_pkey ON public.db_dbcomment + USING btree (id) + ix_db_dbcomment_db_dbcomment_dbnode_id: CREATE INDEX ix_db_dbcomment_db_dbcomment_dbnode_id + ON public.db_dbcomment USING btree (dbnode_id) + ix_db_dbcomment_db_dbcomment_user_id: CREATE INDEX ix_db_dbcomment_db_dbcomment_user_id + ON public.db_dbcomment USING btree (user_id) + uq_db_dbcomment_uuid: CREATE UNIQUE INDEX uq_db_dbcomment_uuid ON public.db_dbcomment + USING btree (uuid) + db_dbcomputer: + db_dbcomputer_pkey: CREATE UNIQUE INDEX db_dbcomputer_pkey ON public.db_dbcomputer + USING btree (id) + ix_pat_db_dbcomputer_label: CREATE INDEX ix_pat_db_dbcomputer_label ON public.db_dbcomputer + USING btree (label varchar_pattern_ops) + uq_db_dbcomputer_label: CREATE UNIQUE INDEX uq_db_dbcomputer_label ON public.db_dbcomputer + USING btree (label) + uq_db_dbcomputer_uuid: CREATE UNIQUE INDEX uq_db_dbcomputer_uuid ON public.db_dbcomputer + USING btree (uuid) + db_dbgroup: + db_dbgroup_pkey: CREATE UNIQUE INDEX db_dbgroup_pkey ON public.db_dbgroup USING + btree (id) + ix_db_dbgroup_db_dbgroup_label: CREATE INDEX ix_db_dbgroup_db_dbgroup_label ON + public.db_dbgroup USING btree (label) + ix_db_dbgroup_db_dbgroup_type_string: CREATE INDEX ix_db_dbgroup_db_dbgroup_type_string + ON public.db_dbgroup USING btree (type_string) + ix_db_dbgroup_db_dbgroup_user_id: CREATE INDEX ix_db_dbgroup_db_dbgroup_user_id + ON public.db_dbgroup USING btree (user_id) + ix_pat_db_dbgroup_label: CREATE INDEX ix_pat_db_dbgroup_label ON public.db_dbgroup + USING btree (label varchar_pattern_ops) + ix_pat_db_dbgroup_type_string: CREATE INDEX ix_pat_db_dbgroup_type_string ON public.db_dbgroup + USING btree (type_string varchar_pattern_ops) + uq_db_dbgroup_label_type_string: CREATE UNIQUE INDEX uq_db_dbgroup_label_type_string + ON public.db_dbgroup USING btree (label, type_string) + uq_db_dbgroup_uuid: CREATE UNIQUE INDEX uq_db_dbgroup_uuid ON public.db_dbgroup + USING btree (uuid) + db_dbgroup_dbnodes: + db_dbgroup_dbnodes_pkey: CREATE UNIQUE INDEX db_dbgroup_dbnodes_pkey ON public.db_dbgroup_dbnodes + USING btree (id) + ix_db_dbgroup_dbnodes_db_dbgroup_dbnodes_dbgroup_id: CREATE INDEX ix_db_dbgroup_dbnodes_db_dbgroup_dbnodes_dbgroup_id + ON public.db_dbgroup_dbnodes USING btree (dbgroup_id) + ix_db_dbgroup_dbnodes_db_dbgroup_dbnodes_dbnode_id: CREATE INDEX ix_db_dbgroup_dbnodes_db_dbgroup_dbnodes_dbnode_id + ON public.db_dbgroup_dbnodes USING btree (dbnode_id) + uq_db_dbgroup_dbnodes_dbgroup_id_dbnode_id: CREATE UNIQUE INDEX uq_db_dbgroup_dbnodes_dbgroup_id_dbnode_id + ON public.db_dbgroup_dbnodes USING btree (dbgroup_id, dbnode_id) + db_dblink: + db_dblink_pkey: CREATE UNIQUE INDEX db_dblink_pkey ON public.db_dblink USING btree + (id) + ix_db_dblink_db_dblink_input_id: CREATE INDEX ix_db_dblink_db_dblink_input_id + ON public.db_dblink USING btree (input_id) + ix_db_dblink_db_dblink_label: CREATE INDEX ix_db_dblink_db_dblink_label ON public.db_dblink + USING btree (label) + ix_db_dblink_db_dblink_output_id: CREATE INDEX ix_db_dblink_db_dblink_output_id + ON public.db_dblink USING btree (output_id) + ix_db_dblink_db_dblink_type: CREATE INDEX ix_db_dblink_db_dblink_type ON public.db_dblink + USING btree (type) + ix_pat_db_dblink_label: CREATE INDEX ix_pat_db_dblink_label ON public.db_dblink + USING btree (label varchar_pattern_ops) + ix_pat_db_dblink_type: CREATE INDEX ix_pat_db_dblink_type ON public.db_dblink + USING btree (type varchar_pattern_ops) + db_dblog: + db_dblog_pkey: CREATE UNIQUE INDEX db_dblog_pkey ON public.db_dblog USING btree + (id) + ix_db_dblog_db_dblog_dbnode_id: CREATE INDEX ix_db_dblog_db_dblog_dbnode_id ON + public.db_dblog USING btree (dbnode_id) + ix_db_dblog_db_dblog_levelname: CREATE INDEX ix_db_dblog_db_dblog_levelname ON + public.db_dblog USING btree (levelname) + ix_db_dblog_db_dblog_loggername: CREATE INDEX ix_db_dblog_db_dblog_loggername + ON public.db_dblog USING btree (loggername) + ix_pat_db_dblog_levelname: CREATE INDEX ix_pat_db_dblog_levelname ON public.db_dblog + USING btree (levelname varchar_pattern_ops) + ix_pat_db_dblog_loggername: CREATE INDEX ix_pat_db_dblog_loggername ON public.db_dblog + USING btree (loggername varchar_pattern_ops) + uq_db_dblog_uuid: CREATE UNIQUE INDEX uq_db_dblog_uuid ON public.db_dblog USING + btree (uuid) + db_dbnode: + db_dbnode_pkey: CREATE UNIQUE INDEX db_dbnode_pkey ON public.db_dbnode USING btree + (id) + ix_db_dbnode_db_dbnode_ctime: CREATE INDEX ix_db_dbnode_db_dbnode_ctime ON public.db_dbnode + USING btree (ctime) + ix_db_dbnode_db_dbnode_dbcomputer_id: CREATE INDEX ix_db_dbnode_db_dbnode_dbcomputer_id + ON public.db_dbnode USING btree (dbcomputer_id) + ix_db_dbnode_db_dbnode_label: CREATE INDEX ix_db_dbnode_db_dbnode_label ON public.db_dbnode + USING btree (label) + ix_db_dbnode_db_dbnode_mtime: CREATE INDEX ix_db_dbnode_db_dbnode_mtime ON public.db_dbnode + USING btree (mtime) + ix_db_dbnode_db_dbnode_node_type: CREATE INDEX ix_db_dbnode_db_dbnode_node_type + ON public.db_dbnode USING btree (node_type) + ix_db_dbnode_db_dbnode_process_type: CREATE INDEX ix_db_dbnode_db_dbnode_process_type + ON public.db_dbnode USING btree (process_type) + ix_db_dbnode_db_dbnode_user_id: CREATE INDEX ix_db_dbnode_db_dbnode_user_id ON + public.db_dbnode USING btree (user_id) + ix_pat_db_dbnode_label: CREATE INDEX ix_pat_db_dbnode_label ON public.db_dbnode + USING btree (label varchar_pattern_ops) + ix_pat_db_dbnode_node_type: CREATE INDEX ix_pat_db_dbnode_node_type ON public.db_dbnode + USING btree (node_type varchar_pattern_ops) + ix_pat_db_dbnode_process_type: CREATE INDEX ix_pat_db_dbnode_process_type ON public.db_dbnode + USING btree (process_type varchar_pattern_ops) + uq_db_dbnode_uuid: CREATE UNIQUE INDEX uq_db_dbnode_uuid ON public.db_dbnode USING + btree (uuid) + db_dbsetting: + db_dbsetting_pkey: CREATE UNIQUE INDEX db_dbsetting_pkey ON public.db_dbsetting + USING btree (id) + ix_pat_db_dbsetting_key: CREATE INDEX ix_pat_db_dbsetting_key ON public.db_dbsetting + USING btree (key varchar_pattern_ops) + uq_db_dbsetting_key: CREATE UNIQUE INDEX uq_db_dbsetting_key ON public.db_dbsetting + USING btree (key) + db_dbuser: + db_dbuser_pkey: CREATE UNIQUE INDEX db_dbuser_pkey ON public.db_dbuser USING btree + (id) + ix_pat_db_dbuser_email: CREATE INDEX ix_pat_db_dbuser_email ON public.db_dbuser + USING btree (email varchar_pattern_ops) + uq_db_dbuser_email: CREATE UNIQUE INDEX uq_db_dbuser_email ON public.db_dbuser + USING btree (email) diff --git a/tests/backends/aiida_sqlalchemy/migrations/test_all_schema/test_sqla_0_e15ef2630a1b_.yml b/tests/backends/aiida_sqlalchemy/migrations/test_all_schema/test_sqla_0_e15ef2630a1b_.yml new file mode 100644 index 0000000000..b0bb3de537 --- /dev/null +++ b/tests/backends/aiida_sqlalchemy/migrations/test_all_schema/test_sqla_0_e15ef2630a1b_.yml @@ -0,0 +1,801 @@ +columns: + db_dbauthinfo: + aiidauser_id: + data_type: integer + default: null + is_nullable: true + auth_params: + data_type: jsonb + default: null + is_nullable: true + dbcomputer_id: + data_type: integer + default: null + is_nullable: true + enabled: + data_type: boolean + default: null + is_nullable: true + id: + data_type: integer + default: nextval('db_dbauthinfo_id_seq'::regclass) + is_nullable: false + metadata: + data_type: jsonb + default: null + is_nullable: true + db_dbcalcstate: + dbnode_id: + data_type: integer + default: null + is_nullable: true + id: + data_type: integer + default: nextval('db_dbcalcstate_id_seq'::regclass) + is_nullable: false + state: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + time: + data_type: timestamp with time zone + default: null + is_nullable: true + db_dbcomment: + content: + data_type: text + default: null + is_nullable: true + ctime: + data_type: timestamp with time zone + default: null + is_nullable: true + dbnode_id: + data_type: integer + default: null + is_nullable: true + id: + data_type: integer + default: nextval('db_dbcomment_id_seq'::regclass) + is_nullable: false + mtime: + data_type: timestamp with time zone + default: null + is_nullable: true + user_id: + data_type: integer + default: null + is_nullable: true + uuid: + data_type: uuid + default: null + is_nullable: true + db_dbcomputer: + description: + data_type: text + default: null + is_nullable: true + enabled: + data_type: boolean + default: null + is_nullable: true + hostname: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + id: + data_type: integer + default: nextval('db_dbcomputer_id_seq'::regclass) + is_nullable: false + metadata: + data_type: jsonb + default: null + is_nullable: true + name: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + scheduler_type: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + transport_params: + data_type: jsonb + default: null + is_nullable: true + transport_type: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + uuid: + data_type: uuid + default: null + is_nullable: true + db_dbgroup: + description: + data_type: text + default: null + is_nullable: true + id: + data_type: integer + default: nextval('db_dbgroup_id_seq'::regclass) + is_nullable: false + name: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + time: + data_type: timestamp with time zone + default: null + is_nullable: true + type: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + user_id: + data_type: integer + default: null + is_nullable: true + uuid: + data_type: uuid + default: null + is_nullable: true + db_dbgroup_dbnodes: + dbgroup_id: + data_type: integer + default: null + is_nullable: true + dbnode_id: + data_type: integer + default: null + is_nullable: true + id: + data_type: integer + default: nextval('db_dbgroup_dbnodes_id_seq'::regclass) + is_nullable: false + db_dblink: + id: + data_type: integer + default: nextval('db_dblink_id_seq'::regclass) + is_nullable: false + input_id: + data_type: integer + default: null + is_nullable: true + label: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + output_id: + data_type: integer + default: null + is_nullable: true + type: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + db_dblock: + creation: + data_type: timestamp with time zone + default: null + is_nullable: true + key: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + owner: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + timeout: + data_type: integer + default: null + is_nullable: true + db_dblog: + id: + data_type: integer + default: nextval('db_dblog_id_seq'::regclass) + is_nullable: false + levelname: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + loggername: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + message: + data_type: text + default: null + is_nullable: true + metadata: + data_type: jsonb + default: null + is_nullable: true + objname: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + objpk: + data_type: integer + default: null + is_nullable: true + time: + data_type: timestamp with time zone + default: null + is_nullable: true + db_dbnode: + attributes: + data_type: jsonb + default: null + is_nullable: true + ctime: + data_type: timestamp with time zone + default: null + is_nullable: true + dbcomputer_id: + data_type: integer + default: null + is_nullable: true + description: + data_type: text + default: null + is_nullable: true + extras: + data_type: jsonb + default: null + is_nullable: true + id: + data_type: integer + default: nextval('db_dbnode_id_seq'::regclass) + is_nullable: false + label: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + mtime: + data_type: timestamp with time zone + default: null + is_nullable: true + nodeversion: + data_type: integer + default: null + is_nullable: true + public: + data_type: boolean + default: null + is_nullable: true + type: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + user_id: + data_type: integer + default: null + is_nullable: false + uuid: + data_type: uuid + default: null + is_nullable: true + db_dbpath: + child_id: + data_type: integer + default: null + is_nullable: true + depth: + data_type: integer + default: null + is_nullable: true + direct_edge_id: + data_type: integer + default: null + is_nullable: true + entry_edge_id: + data_type: integer + default: null + is_nullable: true + exit_edge_id: + data_type: integer + default: null + is_nullable: true + id: + data_type: integer + default: nextval('db_dbpath_id_seq'::regclass) + is_nullable: false + parent_id: + data_type: integer + default: null + is_nullable: true + db_dbsetting: + description: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + id: + data_type: integer + default: nextval('db_dbsetting_id_seq'::regclass) + is_nullable: false + key: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + time: + data_type: timestamp with time zone + default: null + is_nullable: true + val: + data_type: jsonb + default: null + is_nullable: true + db_dbuser: + date_joined: + data_type: timestamp with time zone + default: null + is_nullable: true + email: + data_type: character varying + default: null + is_nullable: true + max_length: 254 + first_name: + data_type: character varying + default: null + is_nullable: true + max_length: 254 + id: + data_type: integer + default: nextval('db_dbuser_id_seq'::regclass) + is_nullable: false + institution: + data_type: character varying + default: null + is_nullable: true + max_length: 254 + is_active: + data_type: boolean + default: null + is_nullable: true + is_staff: + data_type: boolean + default: null + is_nullable: true + is_superuser: + data_type: boolean + default: null + is_nullable: false + last_login: + data_type: timestamp with time zone + default: null + is_nullable: true + last_name: + data_type: character varying + default: null + is_nullable: true + max_length: 254 + password: + data_type: character varying + default: null + is_nullable: true + max_length: 128 + db_dbworkflow: + ctime: + data_type: timestamp with time zone + default: null + is_nullable: true + description: + data_type: text + default: null + is_nullable: true + id: + data_type: integer + default: nextval('db_dbworkflow_id_seq'::regclass) + is_nullable: false + label: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + lastsyncedversion: + data_type: integer + default: null + is_nullable: true + module: + data_type: text + default: null + is_nullable: true + module_class: + data_type: text + default: null + is_nullable: true + mtime: + data_type: timestamp with time zone + default: null + is_nullable: true + nodeversion: + data_type: integer + default: null + is_nullable: true + report: + data_type: text + default: null + is_nullable: true + script_md5: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + script_path: + data_type: text + default: null + is_nullable: true + state: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + user_id: + data_type: integer + default: null + is_nullable: true + uuid: + data_type: uuid + default: null + is_nullable: true + db_dbworkflowdata: + aiida_obj_id: + data_type: integer + default: null + is_nullable: true + data_type: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + id: + data_type: integer + default: nextval('db_dbworkflowdata_id_seq'::regclass) + is_nullable: false + json_value: + data_type: text + default: null + is_nullable: true + name: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + parent_id: + data_type: integer + default: null + is_nullable: true + time: + data_type: timestamp with time zone + default: null + is_nullable: true + value_type: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + db_dbworkflowstep: + id: + data_type: integer + default: nextval('db_dbworkflowstep_id_seq'::regclass) + is_nullable: false + name: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + nextcall: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + parent_id: + data_type: integer + default: null + is_nullable: true + state: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + time: + data_type: timestamp with time zone + default: null + is_nullable: true + user_id: + data_type: integer + default: null + is_nullable: true + db_dbworkflowstep_calculations: + dbnode_id: + data_type: integer + default: null + is_nullable: true + dbworkflowstep_id: + data_type: integer + default: null + is_nullable: true + id: + data_type: integer + default: nextval('db_dbworkflowstep_calculations_id_seq'::regclass) + is_nullable: false + db_dbworkflowstep_sub_workflows: + dbworkflow_id: + data_type: integer + default: null + is_nullable: true + dbworkflowstep_id: + data_type: integer + default: null + is_nullable: true + id: + data_type: integer + default: nextval('db_dbworkflowstep_sub_workflows_id_seq'::regclass) + is_nullable: false +constraints: + primary_key: + db_dbauthinfo: + db_dbauthinfo_pkey: + - id + db_dbcalcstate: + db_dbcalcstate_pkey: + - id + db_dbcomment: + db_dbcomment_pkey: + - id + db_dbcomputer: + db_dbcomputer_pkey: + - id + db_dbgroup: + db_dbgroup_pkey: + - id + db_dbgroup_dbnodes: + db_dbgroup_dbnodes_pkey: + - id + db_dblink: + db_dblink_pkey: + - id + db_dblock: + db_dblock_pkey: + - key + db_dblog: + db_dblog_pkey: + - id + db_dbnode: + db_dbnode_pkey: + - id + db_dbpath: + db_dbpath_pkey: + - id + db_dbsetting: + db_dbsetting_pkey: + - id + db_dbuser: + db_dbuser_pkey: + - id + db_dbworkflow: + db_dbworkflow_pkey: + - id + db_dbworkflowdata: + db_dbworkflowdata_pkey: + - id + db_dbworkflowstep: + db_dbworkflowstep_pkey: + - id + db_dbworkflowstep_calculations: + db_dbworkflowstep_calculations_pkey: + - id + db_dbworkflowstep_sub_workflows: + db_dbworkflowstep_sub_workflows_pkey: + - id + unique: + db_dbauthinfo: + db_dbauthinfo_aiidauser_id_dbcomputer_id_key: + - aiidauser_id + - dbcomputer_id + db_dbcalcstate: + db_dbcalcstate_dbnode_id_state_key: + - dbnode_id + - state + db_dbcomputer: + db_dbcomputer_name_key: + - name + db_dbgroup: + db_dbgroup_name_type_key: + - name + - type + db_dbsetting: + db_dbsetting_key_key: + - key + db_dbworkflowdata: + db_dbworkflowdata_parent_id_name_data_type_key: + - data_type + - name + - parent_id + db_dbworkflowstep: + db_dbworkflowstep_parent_id_name_key: + - name + - parent_id + db_dbworkflowstep_calculations: + db_dbworkflowstep_calculations_id_dbnode_id_key: + - dbnode_id + - dbworkflowstep_id + db_dbworkflowstep_sub_workflows: + db_dbworkflowstep_sub_workflows_id_dbworkflow__key: + - dbworkflow_id + - dbworkflowstep_id +foreign_keys: + db_dbauthinfo: + db_dbauthinfo_aiidauser_id_fkey: FOREIGN KEY (aiidauser_id) REFERENCES db_dbuser(id) + ON DELETE CASCADE DEFERRABLE INITIALLY DEFERRED + db_dbauthinfo_dbcomputer_id_fkey: FOREIGN KEY (dbcomputer_id) REFERENCES db_dbcomputer(id) + ON DELETE CASCADE DEFERRABLE INITIALLY DEFERRED + db_dbcalcstate: + db_dbcalcstate_dbnode_id_fkey: FOREIGN KEY (dbnode_id) REFERENCES db_dbnode(id) + ON DELETE CASCADE DEFERRABLE INITIALLY DEFERRED + db_dbcomment: + db_dbcomment_dbnode_id_fkey: FOREIGN KEY (dbnode_id) REFERENCES db_dbnode(id) + ON DELETE CASCADE DEFERRABLE INITIALLY DEFERRED + db_dbcomment_user_id_fkey: FOREIGN KEY (user_id) REFERENCES db_dbuser(id) ON DELETE + CASCADE DEFERRABLE INITIALLY DEFERRED + db_dbgroup: + db_dbgroup_user_id_fkey: FOREIGN KEY (user_id) REFERENCES db_dbuser(id) ON DELETE + CASCADE DEFERRABLE INITIALLY DEFERRED + db_dbgroup_dbnodes: + db_dbgroup_dbnodes_dbgroup_id_fkey: FOREIGN KEY (dbgroup_id) REFERENCES db_dbgroup(id) + DEFERRABLE INITIALLY DEFERRED + db_dbgroup_dbnodes_dbnode_id_fkey: FOREIGN KEY (dbnode_id) REFERENCES db_dbnode(id) + DEFERRABLE INITIALLY DEFERRED + db_dblink: + db_dblink_input_id_fkey: FOREIGN KEY (input_id) REFERENCES db_dbnode(id) DEFERRABLE + INITIALLY DEFERRED + db_dblink_output_id_fkey: FOREIGN KEY (output_id) REFERENCES db_dbnode(id) ON + DELETE CASCADE DEFERRABLE INITIALLY DEFERRED + db_dbnode: + db_dbnode_dbcomputer_id_fkey: FOREIGN KEY (dbcomputer_id) REFERENCES db_dbcomputer(id) + ON DELETE RESTRICT DEFERRABLE INITIALLY DEFERRED + db_dbnode_user_id_fkey: FOREIGN KEY (user_id) REFERENCES db_dbuser(id) ON DELETE + RESTRICT DEFERRABLE INITIALLY DEFERRED + db_dbpath: + db_dbpath_child_id_fkey: FOREIGN KEY (child_id) REFERENCES db_dbnode(id) DEFERRABLE + INITIALLY DEFERRED + db_dbpath_parent_id_fkey: FOREIGN KEY (parent_id) REFERENCES db_dbnode(id) DEFERRABLE + INITIALLY DEFERRED + db_dbworkflow: + db_dbworkflow_user_id_fkey: FOREIGN KEY (user_id) REFERENCES db_dbuser(id) + db_dbworkflowdata: + db_dbworkflowdata_aiida_obj_id_fkey: FOREIGN KEY (aiida_obj_id) REFERENCES db_dbnode(id) + db_dbworkflowdata_parent_id_fkey: FOREIGN KEY (parent_id) REFERENCES db_dbworkflow(id) + db_dbworkflowstep: + db_dbworkflowstep_parent_id_fkey: FOREIGN KEY (parent_id) REFERENCES db_dbworkflow(id) + db_dbworkflowstep_user_id_fkey: FOREIGN KEY (user_id) REFERENCES db_dbuser(id) + db_dbworkflowstep_calculations: + db_dbworkflowstep_calculations_dbnode_id_fkey: FOREIGN KEY (dbnode_id) REFERENCES + db_dbnode(id) + db_dbworkflowstep_calculations_dbworkflowstep_id_fkey: FOREIGN KEY (dbworkflowstep_id) + REFERENCES db_dbworkflowstep(id) + db_dbworkflowstep_sub_workflows: + db_dbworkflowstep_sub_workflows_dbworkflow_id_fkey: FOREIGN KEY (dbworkflow_id) + REFERENCES db_dbworkflow(id) + db_dbworkflowstep_sub_workflows_dbworkflowstep_id_fkey: FOREIGN KEY (dbworkflowstep_id) + REFERENCES db_dbworkflowstep(id) +indexes: + db_dbauthinfo: + db_dbauthinfo_aiidauser_id_dbcomputer_id_key: CREATE UNIQUE INDEX db_dbauthinfo_aiidauser_id_dbcomputer_id_key + ON public.db_dbauthinfo USING btree (aiidauser_id, dbcomputer_id) + db_dbauthinfo_pkey: CREATE UNIQUE INDEX db_dbauthinfo_pkey ON public.db_dbauthinfo + USING btree (id) + db_dbcalcstate: + db_dbcalcstate_dbnode_id_state_key: CREATE UNIQUE INDEX db_dbcalcstate_dbnode_id_state_key + ON public.db_dbcalcstate USING btree (dbnode_id, state) + db_dbcalcstate_pkey: CREATE UNIQUE INDEX db_dbcalcstate_pkey ON public.db_dbcalcstate + USING btree (id) + ix_db_dbcalcstate_state: CREATE INDEX ix_db_dbcalcstate_state ON public.db_dbcalcstate + USING btree (state) + db_dbcomment: + db_dbcomment_pkey: CREATE UNIQUE INDEX db_dbcomment_pkey ON public.db_dbcomment + USING btree (id) + db_dbcomputer: + db_dbcomputer_name_key: CREATE UNIQUE INDEX db_dbcomputer_name_key ON public.db_dbcomputer + USING btree (name) + db_dbcomputer_pkey: CREATE UNIQUE INDEX db_dbcomputer_pkey ON public.db_dbcomputer + USING btree (id) + db_dbgroup: + db_dbgroup_name_type_key: CREATE UNIQUE INDEX db_dbgroup_name_type_key ON public.db_dbgroup + USING btree (name, type) + db_dbgroup_pkey: CREATE UNIQUE INDEX db_dbgroup_pkey ON public.db_dbgroup USING + btree (id) + ix_db_dbgroup_name: CREATE INDEX ix_db_dbgroup_name ON public.db_dbgroup USING + btree (name) + ix_db_dbgroup_type: CREATE INDEX ix_db_dbgroup_type ON public.db_dbgroup USING + btree (type) + db_dbgroup_dbnodes: + db_dbgroup_dbnodes_pkey: CREATE UNIQUE INDEX db_dbgroup_dbnodes_pkey ON public.db_dbgroup_dbnodes + USING btree (id) + db_dblink: + db_dblink_pkey: CREATE UNIQUE INDEX db_dblink_pkey ON public.db_dblink USING btree + (id) + ix_db_dblink_label: CREATE INDEX ix_db_dblink_label ON public.db_dblink USING + btree (label) + db_dblock: + db_dblock_pkey: CREATE UNIQUE INDEX db_dblock_pkey ON public.db_dblock USING btree + (key) + db_dblog: + db_dblog_pkey: CREATE UNIQUE INDEX db_dblog_pkey ON public.db_dblog USING btree + (id) + ix_db_dblog_levelname: CREATE INDEX ix_db_dblog_levelname ON public.db_dblog USING + btree (levelname) + ix_db_dblog_loggername: CREATE INDEX ix_db_dblog_loggername ON public.db_dblog + USING btree (loggername) + ix_db_dblog_objname: CREATE INDEX ix_db_dblog_objname ON public.db_dblog USING + btree (objname) + ix_db_dblog_objpk: CREATE INDEX ix_db_dblog_objpk ON public.db_dblog USING btree + (objpk) + db_dbnode: + db_dbnode_pkey: CREATE UNIQUE INDEX db_dbnode_pkey ON public.db_dbnode USING btree + (id) + ix_db_dbnode_label: CREATE INDEX ix_db_dbnode_label ON public.db_dbnode USING + btree (label) + ix_db_dbnode_type: CREATE INDEX ix_db_dbnode_type ON public.db_dbnode USING btree + (type) + db_dbpath: + db_dbpath_pkey: CREATE UNIQUE INDEX db_dbpath_pkey ON public.db_dbpath USING btree + (id) + db_dbsetting: + db_dbsetting_key_key: CREATE UNIQUE INDEX db_dbsetting_key_key ON public.db_dbsetting + USING btree (key) + db_dbsetting_pkey: CREATE UNIQUE INDEX db_dbsetting_pkey ON public.db_dbsetting + USING btree (id) + ix_db_dbsetting_key: CREATE INDEX ix_db_dbsetting_key ON public.db_dbsetting USING + btree (key) + db_dbuser: + db_dbuser_pkey: CREATE UNIQUE INDEX db_dbuser_pkey ON public.db_dbuser USING btree + (id) + ix_db_dbuser_email: CREATE UNIQUE INDEX ix_db_dbuser_email ON public.db_dbuser + USING btree (email) + db_dbworkflow: + db_dbworkflow_pkey: CREATE UNIQUE INDEX db_dbworkflow_pkey ON public.db_dbworkflow + USING btree (id) + ix_db_dbworkflow_label: CREATE INDEX ix_db_dbworkflow_label ON public.db_dbworkflow + USING btree (label) + db_dbworkflowdata: + db_dbworkflowdata_parent_id_name_data_type_key: CREATE UNIQUE INDEX db_dbworkflowdata_parent_id_name_data_type_key + ON public.db_dbworkflowdata USING btree (parent_id, name, data_type) + db_dbworkflowdata_pkey: CREATE UNIQUE INDEX db_dbworkflowdata_pkey ON public.db_dbworkflowdata + USING btree (id) + db_dbworkflowstep: + db_dbworkflowstep_parent_id_name_key: CREATE UNIQUE INDEX db_dbworkflowstep_parent_id_name_key + ON public.db_dbworkflowstep USING btree (parent_id, name) + db_dbworkflowstep_pkey: CREATE UNIQUE INDEX db_dbworkflowstep_pkey ON public.db_dbworkflowstep + USING btree (id) + db_dbworkflowstep_calculations: + db_dbworkflowstep_calculations_id_dbnode_id_key: CREATE UNIQUE INDEX db_dbworkflowstep_calculations_id_dbnode_id_key + ON public.db_dbworkflowstep_calculations USING btree (dbworkflowstep_id, dbnode_id) + db_dbworkflowstep_calculations_pkey: CREATE UNIQUE INDEX db_dbworkflowstep_calculations_pkey + ON public.db_dbworkflowstep_calculations USING btree (id) + db_dbworkflowstep_sub_workflows: + db_dbworkflowstep_sub_workflows_id_dbworkflow__key: CREATE UNIQUE INDEX db_dbworkflowstep_sub_workflows_id_dbworkflow__key + ON public.db_dbworkflowstep_sub_workflows USING btree (dbworkflowstep_id, dbworkflow_id) + db_dbworkflowstep_sub_workflows_pkey: CREATE UNIQUE INDEX db_dbworkflowstep_sub_workflows_pkey + ON public.db_dbworkflowstep_sub_workflows USING btree (id) diff --git a/tests/backends/aiida_sqlalchemy/migrations/test_all_schema/test_sqla_10_59edaf8a8b79_.yml b/tests/backends/aiida_sqlalchemy/migrations/test_all_schema/test_sqla_10_59edaf8a8b79_.yml new file mode 100644 index 0000000000..c0c9a46bf7 --- /dev/null +++ b/tests/backends/aiida_sqlalchemy/migrations/test_all_schema/test_sqla_10_59edaf8a8b79_.yml @@ -0,0 +1,757 @@ +columns: + db_dbauthinfo: + aiidauser_id: + data_type: integer + default: null + is_nullable: true + auth_params: + data_type: jsonb + default: null + is_nullable: true + dbcomputer_id: + data_type: integer + default: null + is_nullable: true + enabled: + data_type: boolean + default: null + is_nullable: true + id: + data_type: integer + default: nextval('db_dbauthinfo_id_seq'::regclass) + is_nullable: false + metadata: + data_type: jsonb + default: null + is_nullable: true + db_dbcalcstate: + dbnode_id: + data_type: integer + default: null + is_nullable: true + id: + data_type: integer + default: nextval('db_dbcalcstate_id_seq'::regclass) + is_nullable: false + state: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + time: + data_type: timestamp with time zone + default: null + is_nullable: true + db_dbcomment: + content: + data_type: text + default: null + is_nullable: true + ctime: + data_type: timestamp with time zone + default: null + is_nullable: true + dbnode_id: + data_type: integer + default: null + is_nullable: true + id: + data_type: integer + default: nextval('db_dbcomment_id_seq'::regclass) + is_nullable: false + mtime: + data_type: timestamp with time zone + default: null + is_nullable: true + user_id: + data_type: integer + default: null + is_nullable: true + uuid: + data_type: uuid + default: null + is_nullable: true + db_dbcomputer: + description: + data_type: text + default: null + is_nullable: true + enabled: + data_type: boolean + default: null + is_nullable: true + hostname: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + id: + data_type: integer + default: nextval('db_dbcomputer_id_seq'::regclass) + is_nullable: false + metadata: + data_type: jsonb + default: null + is_nullable: true + name: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + scheduler_type: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + transport_params: + data_type: jsonb + default: null + is_nullable: true + transport_type: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + uuid: + data_type: uuid + default: null + is_nullable: true + db_dbgroup: + description: + data_type: text + default: null + is_nullable: true + id: + data_type: integer + default: nextval('db_dbgroup_id_seq'::regclass) + is_nullable: false + name: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + time: + data_type: timestamp with time zone + default: null + is_nullable: true + type: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + user_id: + data_type: integer + default: null + is_nullable: true + uuid: + data_type: uuid + default: null + is_nullable: true + db_dbgroup_dbnodes: + dbgroup_id: + data_type: integer + default: null + is_nullable: true + dbnode_id: + data_type: integer + default: null + is_nullable: true + id: + data_type: integer + default: nextval('db_dbgroup_dbnodes_id_seq'::regclass) + is_nullable: false + db_dblink: + id: + data_type: integer + default: nextval('db_dblink_id_seq'::regclass) + is_nullable: false + input_id: + data_type: integer + default: null + is_nullable: true + label: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + output_id: + data_type: integer + default: null + is_nullable: true + type: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + db_dblog: + id: + data_type: integer + default: nextval('db_dblog_id_seq'::regclass) + is_nullable: false + levelname: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + loggername: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + message: + data_type: text + default: null + is_nullable: true + metadata: + data_type: jsonb + default: null + is_nullable: true + objname: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + objpk: + data_type: integer + default: null + is_nullable: true + time: + data_type: timestamp with time zone + default: null + is_nullable: true + db_dbnode: + attributes: + data_type: jsonb + default: null + is_nullable: true + ctime: + data_type: timestamp with time zone + default: null + is_nullable: true + dbcomputer_id: + data_type: integer + default: null + is_nullable: true + description: + data_type: text + default: null + is_nullable: true + extras: + data_type: jsonb + default: null + is_nullable: true + id: + data_type: integer + default: nextval('db_dbnode_id_seq'::regclass) + is_nullable: false + label: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + mtime: + data_type: timestamp with time zone + default: null + is_nullable: true + nodeversion: + data_type: integer + default: null + is_nullable: true + process_type: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + public: + data_type: boolean + default: null + is_nullable: true + type: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + user_id: + data_type: integer + default: null + is_nullable: false + uuid: + data_type: uuid + default: null + is_nullable: true + db_dbsetting: + description: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + id: + data_type: integer + default: nextval('db_dbsetting_id_seq'::regclass) + is_nullable: false + key: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + time: + data_type: timestamp with time zone + default: null + is_nullable: true + val: + data_type: jsonb + default: null + is_nullable: true + db_dbuser: + date_joined: + data_type: timestamp with time zone + default: null + is_nullable: true + email: + data_type: character varying + default: null + is_nullable: true + max_length: 254 + first_name: + data_type: character varying + default: null + is_nullable: true + max_length: 254 + id: + data_type: integer + default: nextval('db_dbuser_id_seq'::regclass) + is_nullable: false + institution: + data_type: character varying + default: null + is_nullable: true + max_length: 254 + is_active: + data_type: boolean + default: null + is_nullable: true + is_staff: + data_type: boolean + default: null + is_nullable: true + is_superuser: + data_type: boolean + default: null + is_nullable: false + last_login: + data_type: timestamp with time zone + default: null + is_nullable: true + last_name: + data_type: character varying + default: null + is_nullable: true + max_length: 254 + password: + data_type: character varying + default: null + is_nullable: true + max_length: 128 + db_dbworkflow: + ctime: + data_type: timestamp with time zone + default: null + is_nullable: true + description: + data_type: text + default: null + is_nullable: true + id: + data_type: integer + default: nextval('db_dbworkflow_id_seq'::regclass) + is_nullable: false + label: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + lastsyncedversion: + data_type: integer + default: null + is_nullable: true + module: + data_type: text + default: null + is_nullable: true + module_class: + data_type: text + default: null + is_nullable: true + mtime: + data_type: timestamp with time zone + default: null + is_nullable: true + nodeversion: + data_type: integer + default: null + is_nullable: true + report: + data_type: text + default: null + is_nullable: true + script_md5: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + script_path: + data_type: text + default: null + is_nullable: true + state: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + user_id: + data_type: integer + default: null + is_nullable: true + uuid: + data_type: uuid + default: null + is_nullable: true + db_dbworkflowdata: + aiida_obj_id: + data_type: integer + default: null + is_nullable: true + data_type: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + id: + data_type: integer + default: nextval('db_dbworkflowdata_id_seq'::regclass) + is_nullable: false + json_value: + data_type: text + default: null + is_nullable: true + name: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + parent_id: + data_type: integer + default: null + is_nullable: true + time: + data_type: timestamp with time zone + default: null + is_nullable: true + value_type: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + db_dbworkflowstep: + id: + data_type: integer + default: nextval('db_dbworkflowstep_id_seq'::regclass) + is_nullable: false + name: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + nextcall: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + parent_id: + data_type: integer + default: null + is_nullable: true + state: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + time: + data_type: timestamp with time zone + default: null + is_nullable: true + user_id: + data_type: integer + default: null + is_nullable: true + db_dbworkflowstep_calculations: + dbnode_id: + data_type: integer + default: null + is_nullable: true + dbworkflowstep_id: + data_type: integer + default: null + is_nullable: true + id: + data_type: integer + default: nextval('db_dbworkflowstep_calculations_id_seq'::regclass) + is_nullable: false + db_dbworkflowstep_sub_workflows: + dbworkflow_id: + data_type: integer + default: null + is_nullable: true + dbworkflowstep_id: + data_type: integer + default: null + is_nullable: true + id: + data_type: integer + default: nextval('db_dbworkflowstep_sub_workflows_id_seq'::regclass) + is_nullable: false +constraints: + primary_key: + db_dbauthinfo: + db_dbauthinfo_pkey: + - id + db_dbcalcstate: + db_dbcalcstate_pkey: + - id + db_dbcomment: + db_dbcomment_pkey: + - id + db_dbcomputer: + db_dbcomputer_pkey: + - id + db_dbgroup: + db_dbgroup_pkey: + - id + db_dbgroup_dbnodes: + db_dbgroup_dbnodes_pkey: + - id + db_dblink: + db_dblink_pkey: + - id + db_dblog: + db_dblog_pkey: + - id + db_dbnode: + db_dbnode_pkey: + - id + db_dbsetting: + db_dbsetting_pkey: + - id + db_dbuser: + db_dbuser_pkey: + - id + db_dbworkflow: + db_dbworkflow_pkey: + - id + db_dbworkflowdata: + db_dbworkflowdata_pkey: + - id + db_dbworkflowstep: + db_dbworkflowstep_pkey: + - id + db_dbworkflowstep_calculations: + db_dbworkflowstep_calculations_pkey: + - id + db_dbworkflowstep_sub_workflows: + db_dbworkflowstep_sub_workflows_pkey: + - id + unique: + db_dbauthinfo: + db_dbauthinfo_aiidauser_id_dbcomputer_id_key: + - aiidauser_id + - dbcomputer_id + db_dbcalcstate: + db_dbcalcstate_dbnode_id_state_key: + - dbnode_id + - state + db_dbcomputer: + db_dbcomputer_name_key: + - name + db_dbgroup: + db_dbgroup_name_type_key: + - name + - type + db_dbgroup_dbnodes: + db_dbgroup_dbnodes_dbgroup_id_dbnode_id_key: + - dbgroup_id + - dbnode_id + db_dbsetting: + db_dbsetting_key_key: + - key + db_dbworkflowdata: + db_dbworkflowdata_parent_id_name_data_type_key: + - data_type + - name + - parent_id + db_dbworkflowstep: + db_dbworkflowstep_parent_id_name_key: + - name + - parent_id + db_dbworkflowstep_calculations: + db_dbworkflowstep_calculations_id_dbnode_id_key: + - dbnode_id + - dbworkflowstep_id + db_dbworkflowstep_sub_workflows: + db_dbworkflowstep_sub_workflows_id_dbworkflow__key: + - dbworkflow_id + - dbworkflowstep_id +foreign_keys: + db_dbauthinfo: + db_dbauthinfo_aiidauser_id_fkey: FOREIGN KEY (aiidauser_id) REFERENCES db_dbuser(id) + ON DELETE CASCADE DEFERRABLE INITIALLY DEFERRED + db_dbauthinfo_dbcomputer_id_fkey: FOREIGN KEY (dbcomputer_id) REFERENCES db_dbcomputer(id) + ON DELETE CASCADE DEFERRABLE INITIALLY DEFERRED + db_dbcalcstate: + db_dbcalcstate_dbnode_id_fkey: FOREIGN KEY (dbnode_id) REFERENCES db_dbnode(id) + ON DELETE CASCADE DEFERRABLE INITIALLY DEFERRED + db_dbcomment: + db_dbcomment_dbnode_id_fkey: FOREIGN KEY (dbnode_id) REFERENCES db_dbnode(id) + ON DELETE CASCADE DEFERRABLE INITIALLY DEFERRED + db_dbcomment_user_id_fkey: FOREIGN KEY (user_id) REFERENCES db_dbuser(id) ON DELETE + CASCADE DEFERRABLE INITIALLY DEFERRED + db_dbgroup: + db_dbgroup_user_id_fkey: FOREIGN KEY (user_id) REFERENCES db_dbuser(id) ON DELETE + CASCADE DEFERRABLE INITIALLY DEFERRED + db_dbgroup_dbnodes: + db_dbgroup_dbnodes_dbgroup_id_fkey: FOREIGN KEY (dbgroup_id) REFERENCES db_dbgroup(id) + DEFERRABLE INITIALLY DEFERRED + db_dbgroup_dbnodes_dbnode_id_fkey: FOREIGN KEY (dbnode_id) REFERENCES db_dbnode(id) + DEFERRABLE INITIALLY DEFERRED + db_dblink: + db_dblink_input_id_fkey: FOREIGN KEY (input_id) REFERENCES db_dbnode(id) DEFERRABLE + INITIALLY DEFERRED + db_dblink_output_id_fkey: FOREIGN KEY (output_id) REFERENCES db_dbnode(id) ON + DELETE CASCADE DEFERRABLE INITIALLY DEFERRED + db_dbnode: + db_dbnode_dbcomputer_id_fkey: FOREIGN KEY (dbcomputer_id) REFERENCES db_dbcomputer(id) + ON DELETE RESTRICT DEFERRABLE INITIALLY DEFERRED + db_dbnode_user_id_fkey: FOREIGN KEY (user_id) REFERENCES db_dbuser(id) ON DELETE + RESTRICT DEFERRABLE INITIALLY DEFERRED + db_dbworkflow: + db_dbworkflow_user_id_fkey: FOREIGN KEY (user_id) REFERENCES db_dbuser(id) + db_dbworkflowdata: + db_dbworkflowdata_aiida_obj_id_fkey: FOREIGN KEY (aiida_obj_id) REFERENCES db_dbnode(id) + db_dbworkflowdata_parent_id_fkey: FOREIGN KEY (parent_id) REFERENCES db_dbworkflow(id) + db_dbworkflowstep: + db_dbworkflowstep_parent_id_fkey: FOREIGN KEY (parent_id) REFERENCES db_dbworkflow(id) + db_dbworkflowstep_user_id_fkey: FOREIGN KEY (user_id) REFERENCES db_dbuser(id) + db_dbworkflowstep_calculations: + db_dbworkflowstep_calculations_dbnode_id_fkey: FOREIGN KEY (dbnode_id) REFERENCES + db_dbnode(id) + db_dbworkflowstep_calculations_dbworkflowstep_id_fkey: FOREIGN KEY (dbworkflowstep_id) + REFERENCES db_dbworkflowstep(id) + db_dbworkflowstep_sub_workflows: + db_dbworkflowstep_sub_workflows_dbworkflow_id_fkey: FOREIGN KEY (dbworkflow_id) + REFERENCES db_dbworkflow(id) + db_dbworkflowstep_sub_workflows_dbworkflowstep_id_fkey: FOREIGN KEY (dbworkflowstep_id) + REFERENCES db_dbworkflowstep(id) +indexes: + db_dbauthinfo: + db_dbauthinfo_aiidauser_id_dbcomputer_id_key: CREATE UNIQUE INDEX db_dbauthinfo_aiidauser_id_dbcomputer_id_key + ON public.db_dbauthinfo USING btree (aiidauser_id, dbcomputer_id) + db_dbauthinfo_pkey: CREATE UNIQUE INDEX db_dbauthinfo_pkey ON public.db_dbauthinfo + USING btree (id) + db_dbcalcstate: + db_dbcalcstate_dbnode_id_state_key: CREATE UNIQUE INDEX db_dbcalcstate_dbnode_id_state_key + ON public.db_dbcalcstate USING btree (dbnode_id, state) + db_dbcalcstate_pkey: CREATE UNIQUE INDEX db_dbcalcstate_pkey ON public.db_dbcalcstate + USING btree (id) + ix_db_dbcalcstate_state: CREATE INDEX ix_db_dbcalcstate_state ON public.db_dbcalcstate + USING btree (state) + db_dbcomment: + db_dbcomment_pkey: CREATE UNIQUE INDEX db_dbcomment_pkey ON public.db_dbcomment + USING btree (id) + db_dbcomputer: + db_dbcomputer_name_key: CREATE UNIQUE INDEX db_dbcomputer_name_key ON public.db_dbcomputer + USING btree (name) + db_dbcomputer_pkey: CREATE UNIQUE INDEX db_dbcomputer_pkey ON public.db_dbcomputer + USING btree (id) + db_dbgroup: + db_dbgroup_name_type_key: CREATE UNIQUE INDEX db_dbgroup_name_type_key ON public.db_dbgroup + USING btree (name, type) + db_dbgroup_pkey: CREATE UNIQUE INDEX db_dbgroup_pkey ON public.db_dbgroup USING + btree (id) + ix_db_dbgroup_name: CREATE INDEX ix_db_dbgroup_name ON public.db_dbgroup USING + btree (name) + ix_db_dbgroup_type: CREATE INDEX ix_db_dbgroup_type ON public.db_dbgroup USING + btree (type) + db_dbgroup_dbnodes: + db_dbgroup_dbnodes_dbgroup_id_dbnode_id_key: CREATE UNIQUE INDEX db_dbgroup_dbnodes_dbgroup_id_dbnode_id_key + ON public.db_dbgroup_dbnodes USING btree (dbgroup_id, dbnode_id) + db_dbgroup_dbnodes_dbgroup_id_idx: CREATE INDEX db_dbgroup_dbnodes_dbgroup_id_idx + ON public.db_dbgroup_dbnodes USING btree (dbgroup_id) + db_dbgroup_dbnodes_dbnode_id_idx: CREATE INDEX db_dbgroup_dbnodes_dbnode_id_idx + ON public.db_dbgroup_dbnodes USING btree (dbnode_id) + db_dbgroup_dbnodes_pkey: CREATE UNIQUE INDEX db_dbgroup_dbnodes_pkey ON public.db_dbgroup_dbnodes + USING btree (id) + db_dblink: + db_dblink_pkey: CREATE UNIQUE INDEX db_dblink_pkey ON public.db_dblink USING btree + (id) + ix_db_dblink_label: CREATE INDEX ix_db_dblink_label ON public.db_dblink USING + btree (label) + db_dblog: + db_dblog_pkey: CREATE UNIQUE INDEX db_dblog_pkey ON public.db_dblog USING btree + (id) + ix_db_dblog_levelname: CREATE INDEX ix_db_dblog_levelname ON public.db_dblog USING + btree (levelname) + ix_db_dblog_loggername: CREATE INDEX ix_db_dblog_loggername ON public.db_dblog + USING btree (loggername) + ix_db_dblog_objname: CREATE INDEX ix_db_dblog_objname ON public.db_dblog USING + btree (objname) + ix_db_dblog_objpk: CREATE INDEX ix_db_dblog_objpk ON public.db_dblog USING btree + (objpk) + db_dbnode: + db_dbnode_pkey: CREATE UNIQUE INDEX db_dbnode_pkey ON public.db_dbnode USING btree + (id) + ix_db_dbnode_label: CREATE INDEX ix_db_dbnode_label ON public.db_dbnode USING + btree (label) + ix_db_dbnode_process_type: CREATE INDEX ix_db_dbnode_process_type ON public.db_dbnode + USING btree (process_type) + ix_db_dbnode_type: CREATE INDEX ix_db_dbnode_type ON public.db_dbnode USING btree + (type) + db_dbsetting: + db_dbsetting_key_key: CREATE UNIQUE INDEX db_dbsetting_key_key ON public.db_dbsetting + USING btree (key) + db_dbsetting_pkey: CREATE UNIQUE INDEX db_dbsetting_pkey ON public.db_dbsetting + USING btree (id) + ix_db_dbsetting_key: CREATE INDEX ix_db_dbsetting_key ON public.db_dbsetting USING + btree (key) + db_dbuser: + db_dbuser_pkey: CREATE UNIQUE INDEX db_dbuser_pkey ON public.db_dbuser USING btree + (id) + ix_db_dbuser_email: CREATE UNIQUE INDEX ix_db_dbuser_email ON public.db_dbuser + USING btree (email) + db_dbworkflow: + db_dbworkflow_pkey: CREATE UNIQUE INDEX db_dbworkflow_pkey ON public.db_dbworkflow + USING btree (id) + ix_db_dbworkflow_label: CREATE INDEX ix_db_dbworkflow_label ON public.db_dbworkflow + USING btree (label) + db_dbworkflowdata: + db_dbworkflowdata_parent_id_name_data_type_key: CREATE UNIQUE INDEX db_dbworkflowdata_parent_id_name_data_type_key + ON public.db_dbworkflowdata USING btree (parent_id, name, data_type) + db_dbworkflowdata_pkey: CREATE UNIQUE INDEX db_dbworkflowdata_pkey ON public.db_dbworkflowdata + USING btree (id) + ix_db_dbworkflowdata_aiida_obj_id: CREATE INDEX ix_db_dbworkflowdata_aiida_obj_id + ON public.db_dbworkflowdata USING btree (aiida_obj_id) + ix_db_dbworkflowdata_parent_id: CREATE INDEX ix_db_dbworkflowdata_parent_id ON + public.db_dbworkflowdata USING btree (parent_id) + db_dbworkflowstep: + db_dbworkflowstep_parent_id_name_key: CREATE UNIQUE INDEX db_dbworkflowstep_parent_id_name_key + ON public.db_dbworkflowstep USING btree (parent_id, name) + db_dbworkflowstep_pkey: CREATE UNIQUE INDEX db_dbworkflowstep_pkey ON public.db_dbworkflowstep + USING btree (id) + db_dbworkflowstep_calculations: + db_dbworkflowstep_calculations_id_dbnode_id_key: CREATE UNIQUE INDEX db_dbworkflowstep_calculations_id_dbnode_id_key + ON public.db_dbworkflowstep_calculations USING btree (dbworkflowstep_id, dbnode_id) + db_dbworkflowstep_calculations_pkey: CREATE UNIQUE INDEX db_dbworkflowstep_calculations_pkey + ON public.db_dbworkflowstep_calculations USING btree (id) + db_dbworkflowstep_sub_workflows: + db_dbworkflowstep_sub_workflows_id_dbworkflow__key: CREATE UNIQUE INDEX db_dbworkflowstep_sub_workflows_id_dbworkflow__key + ON public.db_dbworkflowstep_sub_workflows USING btree (dbworkflowstep_id, dbworkflow_id) + db_dbworkflowstep_sub_workflows_pkey: CREATE UNIQUE INDEX db_dbworkflowstep_sub_workflows_pkey + ON public.db_dbworkflowstep_sub_workflows USING btree (id) diff --git a/tests/backends/aiida_sqlalchemy/migrations/test_all_schema/test_sqla_11_62fe0d36de90_.yml b/tests/backends/aiida_sqlalchemy/migrations/test_all_schema/test_sqla_11_62fe0d36de90_.yml new file mode 100644 index 0000000000..d1499943d0 --- /dev/null +++ b/tests/backends/aiida_sqlalchemy/migrations/test_all_schema/test_sqla_11_62fe0d36de90_.yml @@ -0,0 +1,762 @@ +columns: + db_dbauthinfo: + aiidauser_id: + data_type: integer + default: null + is_nullable: true + auth_params: + data_type: jsonb + default: null + is_nullable: true + dbcomputer_id: + data_type: integer + default: null + is_nullable: true + enabled: + data_type: boolean + default: null + is_nullable: true + id: + data_type: integer + default: nextval('db_dbauthinfo_id_seq'::regclass) + is_nullable: false + metadata: + data_type: jsonb + default: null + is_nullable: true + db_dbcalcstate: + dbnode_id: + data_type: integer + default: null + is_nullable: true + id: + data_type: integer + default: nextval('db_dbcalcstate_id_seq'::regclass) + is_nullable: false + state: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + time: + data_type: timestamp with time zone + default: null + is_nullable: true + db_dbcomment: + content: + data_type: text + default: null + is_nullable: true + ctime: + data_type: timestamp with time zone + default: null + is_nullable: true + dbnode_id: + data_type: integer + default: null + is_nullable: true + id: + data_type: integer + default: nextval('db_dbcomment_id_seq'::regclass) + is_nullable: false + mtime: + data_type: timestamp with time zone + default: null + is_nullable: true + user_id: + data_type: integer + default: null + is_nullable: true + uuid: + data_type: uuid + default: null + is_nullable: true + db_dbcomputer: + description: + data_type: text + default: null + is_nullable: true + enabled: + data_type: boolean + default: null + is_nullable: true + hostname: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + id: + data_type: integer + default: nextval('db_dbcomputer_id_seq'::regclass) + is_nullable: false + metadata: + data_type: jsonb + default: null + is_nullable: true + name: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + scheduler_type: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + transport_params: + data_type: jsonb + default: null + is_nullable: true + transport_type: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + uuid: + data_type: uuid + default: null + is_nullable: true + db_dbgroup: + description: + data_type: text + default: null + is_nullable: true + id: + data_type: integer + default: nextval('db_dbgroup_id_seq'::regclass) + is_nullable: false + name: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + time: + data_type: timestamp with time zone + default: null + is_nullable: true + type: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + user_id: + data_type: integer + default: null + is_nullable: true + uuid: + data_type: uuid + default: null + is_nullable: true + db_dbgroup_dbnodes: + dbgroup_id: + data_type: integer + default: null + is_nullable: true + dbnode_id: + data_type: integer + default: null + is_nullable: true + id: + data_type: integer + default: nextval('db_dbgroup_dbnodes_id_seq'::regclass) + is_nullable: false + db_dblink: + id: + data_type: integer + default: nextval('db_dblink_id_seq'::regclass) + is_nullable: false + input_id: + data_type: integer + default: null + is_nullable: true + label: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + output_id: + data_type: integer + default: null + is_nullable: true + type: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + db_dblog: + id: + data_type: integer + default: nextval('db_dblog_id_seq'::regclass) + is_nullable: false + levelname: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + loggername: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + message: + data_type: text + default: null + is_nullable: true + metadata: + data_type: jsonb + default: null + is_nullable: true + objname: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + objpk: + data_type: integer + default: null + is_nullable: true + time: + data_type: timestamp with time zone + default: null + is_nullable: true + db_dbnode: + attributes: + data_type: jsonb + default: null + is_nullable: true + ctime: + data_type: timestamp with time zone + default: null + is_nullable: true + dbcomputer_id: + data_type: integer + default: null + is_nullable: true + description: + data_type: text + default: null + is_nullable: true + extras: + data_type: jsonb + default: null + is_nullable: true + id: + data_type: integer + default: nextval('db_dbnode_id_seq'::regclass) + is_nullable: false + label: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + mtime: + data_type: timestamp with time zone + default: null + is_nullable: true + nodeversion: + data_type: integer + default: null + is_nullable: true + process_type: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + public: + data_type: boolean + default: null + is_nullable: true + type: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + user_id: + data_type: integer + default: null + is_nullable: false + uuid: + data_type: uuid + default: null + is_nullable: true + db_dbsetting: + description: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + id: + data_type: integer + default: nextval('db_dbsetting_id_seq'::regclass) + is_nullable: false + key: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + time: + data_type: timestamp with time zone + default: null + is_nullable: true + val: + data_type: jsonb + default: null + is_nullable: true + db_dbuser: + date_joined: + data_type: timestamp with time zone + default: null + is_nullable: true + email: + data_type: character varying + default: null + is_nullable: true + max_length: 254 + first_name: + data_type: character varying + default: null + is_nullable: true + max_length: 254 + id: + data_type: integer + default: nextval('db_dbuser_id_seq'::regclass) + is_nullable: false + institution: + data_type: character varying + default: null + is_nullable: true + max_length: 254 + is_active: + data_type: boolean + default: null + is_nullable: true + is_staff: + data_type: boolean + default: null + is_nullable: true + is_superuser: + data_type: boolean + default: null + is_nullable: false + last_login: + data_type: timestamp with time zone + default: null + is_nullable: true + last_name: + data_type: character varying + default: null + is_nullable: true + max_length: 254 + password: + data_type: character varying + default: null + is_nullable: true + max_length: 128 + db_dbworkflow: + ctime: + data_type: timestamp with time zone + default: null + is_nullable: true + description: + data_type: text + default: null + is_nullable: true + id: + data_type: integer + default: nextval('db_dbworkflow_id_seq'::regclass) + is_nullable: false + label: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + lastsyncedversion: + data_type: integer + default: null + is_nullable: true + module: + data_type: text + default: null + is_nullable: true + module_class: + data_type: text + default: null + is_nullable: true + mtime: + data_type: timestamp with time zone + default: null + is_nullable: true + nodeversion: + data_type: integer + default: null + is_nullable: true + report: + data_type: text + default: null + is_nullable: true + script_md5: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + script_path: + data_type: text + default: null + is_nullable: true + state: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + user_id: + data_type: integer + default: null + is_nullable: true + uuid: + data_type: uuid + default: null + is_nullable: true + db_dbworkflowdata: + aiida_obj_id: + data_type: integer + default: null + is_nullable: true + data_type: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + id: + data_type: integer + default: nextval('db_dbworkflowdata_id_seq'::regclass) + is_nullable: false + json_value: + data_type: text + default: null + is_nullable: true + name: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + parent_id: + data_type: integer + default: null + is_nullable: true + time: + data_type: timestamp with time zone + default: null + is_nullable: true + value_type: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + db_dbworkflowstep: + id: + data_type: integer + default: nextval('db_dbworkflowstep_id_seq'::regclass) + is_nullable: false + name: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + nextcall: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + parent_id: + data_type: integer + default: null + is_nullable: true + state: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + time: + data_type: timestamp with time zone + default: null + is_nullable: true + user_id: + data_type: integer + default: null + is_nullable: true + db_dbworkflowstep_calculations: + dbnode_id: + data_type: integer + default: null + is_nullable: true + dbworkflowstep_id: + data_type: integer + default: null + is_nullable: true + id: + data_type: integer + default: nextval('db_dbworkflowstep_calculations_id_seq'::regclass) + is_nullable: false + db_dbworkflowstep_sub_workflows: + dbworkflow_id: + data_type: integer + default: null + is_nullable: true + dbworkflowstep_id: + data_type: integer + default: null + is_nullable: true + id: + data_type: integer + default: nextval('db_dbworkflowstep_sub_workflows_id_seq'::regclass) + is_nullable: false +constraints: + primary_key: + db_dbauthinfo: + db_dbauthinfo_pkey: + - id + db_dbcalcstate: + db_dbcalcstate_pkey: + - id + db_dbcomment: + db_dbcomment_pkey: + - id + db_dbcomputer: + db_dbcomputer_pkey: + - id + db_dbgroup: + db_dbgroup_pkey: + - id + db_dbgroup_dbnodes: + db_dbgroup_dbnodes_pkey: + - id + db_dblink: + db_dblink_pkey: + - id + db_dblog: + db_dblog_pkey: + - id + db_dbnode: + db_dbnode_pkey: + - id + db_dbsetting: + db_dbsetting_pkey: + - id + db_dbuser: + db_dbuser_pkey: + - id + db_dbworkflow: + db_dbworkflow_pkey: + - id + db_dbworkflowdata: + db_dbworkflowdata_pkey: + - id + db_dbworkflowstep: + db_dbworkflowstep_pkey: + - id + db_dbworkflowstep_calculations: + db_dbworkflowstep_calculations_pkey: + - id + db_dbworkflowstep_sub_workflows: + db_dbworkflowstep_sub_workflows_pkey: + - id + unique: + db_dbauthinfo: + db_dbauthinfo_aiidauser_id_dbcomputer_id_key: + - aiidauser_id + - dbcomputer_id + db_dbcalcstate: + db_dbcalcstate_dbnode_id_state_key: + - dbnode_id + - state + db_dbcomputer: + db_dbcomputer_name_key: + - name + db_dbgroup: + db_dbgroup_name_type_key: + - name + - type + db_dbgroup_dbnodes: + db_dbgroup_dbnodes_dbgroup_id_dbnode_id_key: + - dbgroup_id + - dbnode_id + db_dbnode: + db_dbnode_uuid_key: + - uuid + db_dbsetting: + db_dbsetting_key_key: + - key + db_dbworkflowdata: + db_dbworkflowdata_parent_id_name_data_type_key: + - data_type + - name + - parent_id + db_dbworkflowstep: + db_dbworkflowstep_parent_id_name_key: + - name + - parent_id + db_dbworkflowstep_calculations: + db_dbworkflowstep_calculations_id_dbnode_id_key: + - dbnode_id + - dbworkflowstep_id + db_dbworkflowstep_sub_workflows: + db_dbworkflowstep_sub_workflows_id_dbworkflow__key: + - dbworkflow_id + - dbworkflowstep_id +foreign_keys: + db_dbauthinfo: + db_dbauthinfo_aiidauser_id_fkey: FOREIGN KEY (aiidauser_id) REFERENCES db_dbuser(id) + ON DELETE CASCADE DEFERRABLE INITIALLY DEFERRED + db_dbauthinfo_dbcomputer_id_fkey: FOREIGN KEY (dbcomputer_id) REFERENCES db_dbcomputer(id) + ON DELETE CASCADE DEFERRABLE INITIALLY DEFERRED + db_dbcalcstate: + db_dbcalcstate_dbnode_id_fkey: FOREIGN KEY (dbnode_id) REFERENCES db_dbnode(id) + ON DELETE CASCADE DEFERRABLE INITIALLY DEFERRED + db_dbcomment: + db_dbcomment_dbnode_id_fkey: FOREIGN KEY (dbnode_id) REFERENCES db_dbnode(id) + ON DELETE CASCADE DEFERRABLE INITIALLY DEFERRED + db_dbcomment_user_id_fkey: FOREIGN KEY (user_id) REFERENCES db_dbuser(id) ON DELETE + CASCADE DEFERRABLE INITIALLY DEFERRED + db_dbgroup: + db_dbgroup_user_id_fkey: FOREIGN KEY (user_id) REFERENCES db_dbuser(id) ON DELETE + CASCADE DEFERRABLE INITIALLY DEFERRED + db_dbgroup_dbnodes: + db_dbgroup_dbnodes_dbgroup_id_fkey: FOREIGN KEY (dbgroup_id) REFERENCES db_dbgroup(id) + DEFERRABLE INITIALLY DEFERRED + db_dbgroup_dbnodes_dbnode_id_fkey: FOREIGN KEY (dbnode_id) REFERENCES db_dbnode(id) + DEFERRABLE INITIALLY DEFERRED + db_dblink: + db_dblink_input_id_fkey: FOREIGN KEY (input_id) REFERENCES db_dbnode(id) DEFERRABLE + INITIALLY DEFERRED + db_dblink_output_id_fkey: FOREIGN KEY (output_id) REFERENCES db_dbnode(id) ON + DELETE CASCADE DEFERRABLE INITIALLY DEFERRED + db_dbnode: + db_dbnode_dbcomputer_id_fkey: FOREIGN KEY (dbcomputer_id) REFERENCES db_dbcomputer(id) + ON DELETE RESTRICT DEFERRABLE INITIALLY DEFERRED + db_dbnode_user_id_fkey: FOREIGN KEY (user_id) REFERENCES db_dbuser(id) ON DELETE + RESTRICT DEFERRABLE INITIALLY DEFERRED + db_dbworkflow: + db_dbworkflow_user_id_fkey: FOREIGN KEY (user_id) REFERENCES db_dbuser(id) + db_dbworkflowdata: + db_dbworkflowdata_aiida_obj_id_fkey: FOREIGN KEY (aiida_obj_id) REFERENCES db_dbnode(id) + db_dbworkflowdata_parent_id_fkey: FOREIGN KEY (parent_id) REFERENCES db_dbworkflow(id) + db_dbworkflowstep: + db_dbworkflowstep_parent_id_fkey: FOREIGN KEY (parent_id) REFERENCES db_dbworkflow(id) + db_dbworkflowstep_user_id_fkey: FOREIGN KEY (user_id) REFERENCES db_dbuser(id) + db_dbworkflowstep_calculations: + db_dbworkflowstep_calculations_dbnode_id_fkey: FOREIGN KEY (dbnode_id) REFERENCES + db_dbnode(id) + db_dbworkflowstep_calculations_dbworkflowstep_id_fkey: FOREIGN KEY (dbworkflowstep_id) + REFERENCES db_dbworkflowstep(id) + db_dbworkflowstep_sub_workflows: + db_dbworkflowstep_sub_workflows_dbworkflow_id_fkey: FOREIGN KEY (dbworkflow_id) + REFERENCES db_dbworkflow(id) + db_dbworkflowstep_sub_workflows_dbworkflowstep_id_fkey: FOREIGN KEY (dbworkflowstep_id) + REFERENCES db_dbworkflowstep(id) +indexes: + db_dbauthinfo: + db_dbauthinfo_aiidauser_id_dbcomputer_id_key: CREATE UNIQUE INDEX db_dbauthinfo_aiidauser_id_dbcomputer_id_key + ON public.db_dbauthinfo USING btree (aiidauser_id, dbcomputer_id) + db_dbauthinfo_pkey: CREATE UNIQUE INDEX db_dbauthinfo_pkey ON public.db_dbauthinfo + USING btree (id) + db_dbcalcstate: + db_dbcalcstate_dbnode_id_state_key: CREATE UNIQUE INDEX db_dbcalcstate_dbnode_id_state_key + ON public.db_dbcalcstate USING btree (dbnode_id, state) + db_dbcalcstate_pkey: CREATE UNIQUE INDEX db_dbcalcstate_pkey ON public.db_dbcalcstate + USING btree (id) + ix_db_dbcalcstate_state: CREATE INDEX ix_db_dbcalcstate_state ON public.db_dbcalcstate + USING btree (state) + db_dbcomment: + db_dbcomment_pkey: CREATE UNIQUE INDEX db_dbcomment_pkey ON public.db_dbcomment + USING btree (id) + db_dbcomputer: + db_dbcomputer_name_key: CREATE UNIQUE INDEX db_dbcomputer_name_key ON public.db_dbcomputer + USING btree (name) + db_dbcomputer_pkey: CREATE UNIQUE INDEX db_dbcomputer_pkey ON public.db_dbcomputer + USING btree (id) + db_dbgroup: + db_dbgroup_name_type_key: CREATE UNIQUE INDEX db_dbgroup_name_type_key ON public.db_dbgroup + USING btree (name, type) + db_dbgroup_pkey: CREATE UNIQUE INDEX db_dbgroup_pkey ON public.db_dbgroup USING + btree (id) + ix_db_dbgroup_name: CREATE INDEX ix_db_dbgroup_name ON public.db_dbgroup USING + btree (name) + ix_db_dbgroup_type: CREATE INDEX ix_db_dbgroup_type ON public.db_dbgroup USING + btree (type) + db_dbgroup_dbnodes: + db_dbgroup_dbnodes_dbgroup_id_dbnode_id_key: CREATE UNIQUE INDEX db_dbgroup_dbnodes_dbgroup_id_dbnode_id_key + ON public.db_dbgroup_dbnodes USING btree (dbgroup_id, dbnode_id) + db_dbgroup_dbnodes_dbgroup_id_idx: CREATE INDEX db_dbgroup_dbnodes_dbgroup_id_idx + ON public.db_dbgroup_dbnodes USING btree (dbgroup_id) + db_dbgroup_dbnodes_dbnode_id_idx: CREATE INDEX db_dbgroup_dbnodes_dbnode_id_idx + ON public.db_dbgroup_dbnodes USING btree (dbnode_id) + db_dbgroup_dbnodes_pkey: CREATE UNIQUE INDEX db_dbgroup_dbnodes_pkey ON public.db_dbgroup_dbnodes + USING btree (id) + db_dblink: + db_dblink_pkey: CREATE UNIQUE INDEX db_dblink_pkey ON public.db_dblink USING btree + (id) + ix_db_dblink_label: CREATE INDEX ix_db_dblink_label ON public.db_dblink USING + btree (label) + db_dblog: + db_dblog_pkey: CREATE UNIQUE INDEX db_dblog_pkey ON public.db_dblog USING btree + (id) + ix_db_dblog_levelname: CREATE INDEX ix_db_dblog_levelname ON public.db_dblog USING + btree (levelname) + ix_db_dblog_loggername: CREATE INDEX ix_db_dblog_loggername ON public.db_dblog + USING btree (loggername) + ix_db_dblog_objname: CREATE INDEX ix_db_dblog_objname ON public.db_dblog USING + btree (objname) + ix_db_dblog_objpk: CREATE INDEX ix_db_dblog_objpk ON public.db_dblog USING btree + (objpk) + db_dbnode: + db_dbnode_pkey: CREATE UNIQUE INDEX db_dbnode_pkey ON public.db_dbnode USING btree + (id) + db_dbnode_uuid_key: CREATE UNIQUE INDEX db_dbnode_uuid_key ON public.db_dbnode + USING btree (uuid) + ix_db_dbnode_label: CREATE INDEX ix_db_dbnode_label ON public.db_dbnode USING + btree (label) + ix_db_dbnode_process_type: CREATE INDEX ix_db_dbnode_process_type ON public.db_dbnode + USING btree (process_type) + ix_db_dbnode_type: CREATE INDEX ix_db_dbnode_type ON public.db_dbnode USING btree + (type) + db_dbsetting: + db_dbsetting_key_key: CREATE UNIQUE INDEX db_dbsetting_key_key ON public.db_dbsetting + USING btree (key) + db_dbsetting_pkey: CREATE UNIQUE INDEX db_dbsetting_pkey ON public.db_dbsetting + USING btree (id) + ix_db_dbsetting_key: CREATE INDEX ix_db_dbsetting_key ON public.db_dbsetting USING + btree (key) + db_dbuser: + db_dbuser_pkey: CREATE UNIQUE INDEX db_dbuser_pkey ON public.db_dbuser USING btree + (id) + ix_db_dbuser_email: CREATE UNIQUE INDEX ix_db_dbuser_email ON public.db_dbuser + USING btree (email) + db_dbworkflow: + db_dbworkflow_pkey: CREATE UNIQUE INDEX db_dbworkflow_pkey ON public.db_dbworkflow + USING btree (id) + ix_db_dbworkflow_label: CREATE INDEX ix_db_dbworkflow_label ON public.db_dbworkflow + USING btree (label) + db_dbworkflowdata: + db_dbworkflowdata_parent_id_name_data_type_key: CREATE UNIQUE INDEX db_dbworkflowdata_parent_id_name_data_type_key + ON public.db_dbworkflowdata USING btree (parent_id, name, data_type) + db_dbworkflowdata_pkey: CREATE UNIQUE INDEX db_dbworkflowdata_pkey ON public.db_dbworkflowdata + USING btree (id) + ix_db_dbworkflowdata_aiida_obj_id: CREATE INDEX ix_db_dbworkflowdata_aiida_obj_id + ON public.db_dbworkflowdata USING btree (aiida_obj_id) + ix_db_dbworkflowdata_parent_id: CREATE INDEX ix_db_dbworkflowdata_parent_id ON + public.db_dbworkflowdata USING btree (parent_id) + db_dbworkflowstep: + db_dbworkflowstep_parent_id_name_key: CREATE UNIQUE INDEX db_dbworkflowstep_parent_id_name_key + ON public.db_dbworkflowstep USING btree (parent_id, name) + db_dbworkflowstep_pkey: CREATE UNIQUE INDEX db_dbworkflowstep_pkey ON public.db_dbworkflowstep + USING btree (id) + db_dbworkflowstep_calculations: + db_dbworkflowstep_calculations_id_dbnode_id_key: CREATE UNIQUE INDEX db_dbworkflowstep_calculations_id_dbnode_id_key + ON public.db_dbworkflowstep_calculations USING btree (dbworkflowstep_id, dbnode_id) + db_dbworkflowstep_calculations_pkey: CREATE UNIQUE INDEX db_dbworkflowstep_calculations_pkey + ON public.db_dbworkflowstep_calculations USING btree (id) + db_dbworkflowstep_sub_workflows: + db_dbworkflowstep_sub_workflows_id_dbworkflow__key: CREATE UNIQUE INDEX db_dbworkflowstep_sub_workflows_id_dbworkflow__key + ON public.db_dbworkflowstep_sub_workflows USING btree (dbworkflowstep_id, dbworkflow_id) + db_dbworkflowstep_sub_workflows_pkey: CREATE UNIQUE INDEX db_dbworkflowstep_sub_workflows_pkey + ON public.db_dbworkflowstep_sub_workflows USING btree (id) diff --git a/tests/backends/aiida_sqlalchemy/migrations/test_all_schema/test_sqla_12_5d4d844852b6_.yml b/tests/backends/aiida_sqlalchemy/migrations/test_all_schema/test_sqla_12_5d4d844852b6_.yml new file mode 100644 index 0000000000..d1499943d0 --- /dev/null +++ b/tests/backends/aiida_sqlalchemy/migrations/test_all_schema/test_sqla_12_5d4d844852b6_.yml @@ -0,0 +1,762 @@ +columns: + db_dbauthinfo: + aiidauser_id: + data_type: integer + default: null + is_nullable: true + auth_params: + data_type: jsonb + default: null + is_nullable: true + dbcomputer_id: + data_type: integer + default: null + is_nullable: true + enabled: + data_type: boolean + default: null + is_nullable: true + id: + data_type: integer + default: nextval('db_dbauthinfo_id_seq'::regclass) + is_nullable: false + metadata: + data_type: jsonb + default: null + is_nullable: true + db_dbcalcstate: + dbnode_id: + data_type: integer + default: null + is_nullable: true + id: + data_type: integer + default: nextval('db_dbcalcstate_id_seq'::regclass) + is_nullable: false + state: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + time: + data_type: timestamp with time zone + default: null + is_nullable: true + db_dbcomment: + content: + data_type: text + default: null + is_nullable: true + ctime: + data_type: timestamp with time zone + default: null + is_nullable: true + dbnode_id: + data_type: integer + default: null + is_nullable: true + id: + data_type: integer + default: nextval('db_dbcomment_id_seq'::regclass) + is_nullable: false + mtime: + data_type: timestamp with time zone + default: null + is_nullable: true + user_id: + data_type: integer + default: null + is_nullable: true + uuid: + data_type: uuid + default: null + is_nullable: true + db_dbcomputer: + description: + data_type: text + default: null + is_nullable: true + enabled: + data_type: boolean + default: null + is_nullable: true + hostname: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + id: + data_type: integer + default: nextval('db_dbcomputer_id_seq'::regclass) + is_nullable: false + metadata: + data_type: jsonb + default: null + is_nullable: true + name: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + scheduler_type: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + transport_params: + data_type: jsonb + default: null + is_nullable: true + transport_type: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + uuid: + data_type: uuid + default: null + is_nullable: true + db_dbgroup: + description: + data_type: text + default: null + is_nullable: true + id: + data_type: integer + default: nextval('db_dbgroup_id_seq'::regclass) + is_nullable: false + name: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + time: + data_type: timestamp with time zone + default: null + is_nullable: true + type: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + user_id: + data_type: integer + default: null + is_nullable: true + uuid: + data_type: uuid + default: null + is_nullable: true + db_dbgroup_dbnodes: + dbgroup_id: + data_type: integer + default: null + is_nullable: true + dbnode_id: + data_type: integer + default: null + is_nullable: true + id: + data_type: integer + default: nextval('db_dbgroup_dbnodes_id_seq'::regclass) + is_nullable: false + db_dblink: + id: + data_type: integer + default: nextval('db_dblink_id_seq'::regclass) + is_nullable: false + input_id: + data_type: integer + default: null + is_nullable: true + label: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + output_id: + data_type: integer + default: null + is_nullable: true + type: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + db_dblog: + id: + data_type: integer + default: nextval('db_dblog_id_seq'::regclass) + is_nullable: false + levelname: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + loggername: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + message: + data_type: text + default: null + is_nullable: true + metadata: + data_type: jsonb + default: null + is_nullable: true + objname: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + objpk: + data_type: integer + default: null + is_nullable: true + time: + data_type: timestamp with time zone + default: null + is_nullable: true + db_dbnode: + attributes: + data_type: jsonb + default: null + is_nullable: true + ctime: + data_type: timestamp with time zone + default: null + is_nullable: true + dbcomputer_id: + data_type: integer + default: null + is_nullable: true + description: + data_type: text + default: null + is_nullable: true + extras: + data_type: jsonb + default: null + is_nullable: true + id: + data_type: integer + default: nextval('db_dbnode_id_seq'::regclass) + is_nullable: false + label: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + mtime: + data_type: timestamp with time zone + default: null + is_nullable: true + nodeversion: + data_type: integer + default: null + is_nullable: true + process_type: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + public: + data_type: boolean + default: null + is_nullable: true + type: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + user_id: + data_type: integer + default: null + is_nullable: false + uuid: + data_type: uuid + default: null + is_nullable: true + db_dbsetting: + description: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + id: + data_type: integer + default: nextval('db_dbsetting_id_seq'::regclass) + is_nullable: false + key: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + time: + data_type: timestamp with time zone + default: null + is_nullable: true + val: + data_type: jsonb + default: null + is_nullable: true + db_dbuser: + date_joined: + data_type: timestamp with time zone + default: null + is_nullable: true + email: + data_type: character varying + default: null + is_nullable: true + max_length: 254 + first_name: + data_type: character varying + default: null + is_nullable: true + max_length: 254 + id: + data_type: integer + default: nextval('db_dbuser_id_seq'::regclass) + is_nullable: false + institution: + data_type: character varying + default: null + is_nullable: true + max_length: 254 + is_active: + data_type: boolean + default: null + is_nullable: true + is_staff: + data_type: boolean + default: null + is_nullable: true + is_superuser: + data_type: boolean + default: null + is_nullable: false + last_login: + data_type: timestamp with time zone + default: null + is_nullable: true + last_name: + data_type: character varying + default: null + is_nullable: true + max_length: 254 + password: + data_type: character varying + default: null + is_nullable: true + max_length: 128 + db_dbworkflow: + ctime: + data_type: timestamp with time zone + default: null + is_nullable: true + description: + data_type: text + default: null + is_nullable: true + id: + data_type: integer + default: nextval('db_dbworkflow_id_seq'::regclass) + is_nullable: false + label: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + lastsyncedversion: + data_type: integer + default: null + is_nullable: true + module: + data_type: text + default: null + is_nullable: true + module_class: + data_type: text + default: null + is_nullable: true + mtime: + data_type: timestamp with time zone + default: null + is_nullable: true + nodeversion: + data_type: integer + default: null + is_nullable: true + report: + data_type: text + default: null + is_nullable: true + script_md5: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + script_path: + data_type: text + default: null + is_nullable: true + state: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + user_id: + data_type: integer + default: null + is_nullable: true + uuid: + data_type: uuid + default: null + is_nullable: true + db_dbworkflowdata: + aiida_obj_id: + data_type: integer + default: null + is_nullable: true + data_type: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + id: + data_type: integer + default: nextval('db_dbworkflowdata_id_seq'::regclass) + is_nullable: false + json_value: + data_type: text + default: null + is_nullable: true + name: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + parent_id: + data_type: integer + default: null + is_nullable: true + time: + data_type: timestamp with time zone + default: null + is_nullable: true + value_type: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + db_dbworkflowstep: + id: + data_type: integer + default: nextval('db_dbworkflowstep_id_seq'::regclass) + is_nullable: false + name: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + nextcall: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + parent_id: + data_type: integer + default: null + is_nullable: true + state: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + time: + data_type: timestamp with time zone + default: null + is_nullable: true + user_id: + data_type: integer + default: null + is_nullable: true + db_dbworkflowstep_calculations: + dbnode_id: + data_type: integer + default: null + is_nullable: true + dbworkflowstep_id: + data_type: integer + default: null + is_nullable: true + id: + data_type: integer + default: nextval('db_dbworkflowstep_calculations_id_seq'::regclass) + is_nullable: false + db_dbworkflowstep_sub_workflows: + dbworkflow_id: + data_type: integer + default: null + is_nullable: true + dbworkflowstep_id: + data_type: integer + default: null + is_nullable: true + id: + data_type: integer + default: nextval('db_dbworkflowstep_sub_workflows_id_seq'::regclass) + is_nullable: false +constraints: + primary_key: + db_dbauthinfo: + db_dbauthinfo_pkey: + - id + db_dbcalcstate: + db_dbcalcstate_pkey: + - id + db_dbcomment: + db_dbcomment_pkey: + - id + db_dbcomputer: + db_dbcomputer_pkey: + - id + db_dbgroup: + db_dbgroup_pkey: + - id + db_dbgroup_dbnodes: + db_dbgroup_dbnodes_pkey: + - id + db_dblink: + db_dblink_pkey: + - id + db_dblog: + db_dblog_pkey: + - id + db_dbnode: + db_dbnode_pkey: + - id + db_dbsetting: + db_dbsetting_pkey: + - id + db_dbuser: + db_dbuser_pkey: + - id + db_dbworkflow: + db_dbworkflow_pkey: + - id + db_dbworkflowdata: + db_dbworkflowdata_pkey: + - id + db_dbworkflowstep: + db_dbworkflowstep_pkey: + - id + db_dbworkflowstep_calculations: + db_dbworkflowstep_calculations_pkey: + - id + db_dbworkflowstep_sub_workflows: + db_dbworkflowstep_sub_workflows_pkey: + - id + unique: + db_dbauthinfo: + db_dbauthinfo_aiidauser_id_dbcomputer_id_key: + - aiidauser_id + - dbcomputer_id + db_dbcalcstate: + db_dbcalcstate_dbnode_id_state_key: + - dbnode_id + - state + db_dbcomputer: + db_dbcomputer_name_key: + - name + db_dbgroup: + db_dbgroup_name_type_key: + - name + - type + db_dbgroup_dbnodes: + db_dbgroup_dbnodes_dbgroup_id_dbnode_id_key: + - dbgroup_id + - dbnode_id + db_dbnode: + db_dbnode_uuid_key: + - uuid + db_dbsetting: + db_dbsetting_key_key: + - key + db_dbworkflowdata: + db_dbworkflowdata_parent_id_name_data_type_key: + - data_type + - name + - parent_id + db_dbworkflowstep: + db_dbworkflowstep_parent_id_name_key: + - name + - parent_id + db_dbworkflowstep_calculations: + db_dbworkflowstep_calculations_id_dbnode_id_key: + - dbnode_id + - dbworkflowstep_id + db_dbworkflowstep_sub_workflows: + db_dbworkflowstep_sub_workflows_id_dbworkflow__key: + - dbworkflow_id + - dbworkflowstep_id +foreign_keys: + db_dbauthinfo: + db_dbauthinfo_aiidauser_id_fkey: FOREIGN KEY (aiidauser_id) REFERENCES db_dbuser(id) + ON DELETE CASCADE DEFERRABLE INITIALLY DEFERRED + db_dbauthinfo_dbcomputer_id_fkey: FOREIGN KEY (dbcomputer_id) REFERENCES db_dbcomputer(id) + ON DELETE CASCADE DEFERRABLE INITIALLY DEFERRED + db_dbcalcstate: + db_dbcalcstate_dbnode_id_fkey: FOREIGN KEY (dbnode_id) REFERENCES db_dbnode(id) + ON DELETE CASCADE DEFERRABLE INITIALLY DEFERRED + db_dbcomment: + db_dbcomment_dbnode_id_fkey: FOREIGN KEY (dbnode_id) REFERENCES db_dbnode(id) + ON DELETE CASCADE DEFERRABLE INITIALLY DEFERRED + db_dbcomment_user_id_fkey: FOREIGN KEY (user_id) REFERENCES db_dbuser(id) ON DELETE + CASCADE DEFERRABLE INITIALLY DEFERRED + db_dbgroup: + db_dbgroup_user_id_fkey: FOREIGN KEY (user_id) REFERENCES db_dbuser(id) ON DELETE + CASCADE DEFERRABLE INITIALLY DEFERRED + db_dbgroup_dbnodes: + db_dbgroup_dbnodes_dbgroup_id_fkey: FOREIGN KEY (dbgroup_id) REFERENCES db_dbgroup(id) + DEFERRABLE INITIALLY DEFERRED + db_dbgroup_dbnodes_dbnode_id_fkey: FOREIGN KEY (dbnode_id) REFERENCES db_dbnode(id) + DEFERRABLE INITIALLY DEFERRED + db_dblink: + db_dblink_input_id_fkey: FOREIGN KEY (input_id) REFERENCES db_dbnode(id) DEFERRABLE + INITIALLY DEFERRED + db_dblink_output_id_fkey: FOREIGN KEY (output_id) REFERENCES db_dbnode(id) ON + DELETE CASCADE DEFERRABLE INITIALLY DEFERRED + db_dbnode: + db_dbnode_dbcomputer_id_fkey: FOREIGN KEY (dbcomputer_id) REFERENCES db_dbcomputer(id) + ON DELETE RESTRICT DEFERRABLE INITIALLY DEFERRED + db_dbnode_user_id_fkey: FOREIGN KEY (user_id) REFERENCES db_dbuser(id) ON DELETE + RESTRICT DEFERRABLE INITIALLY DEFERRED + db_dbworkflow: + db_dbworkflow_user_id_fkey: FOREIGN KEY (user_id) REFERENCES db_dbuser(id) + db_dbworkflowdata: + db_dbworkflowdata_aiida_obj_id_fkey: FOREIGN KEY (aiida_obj_id) REFERENCES db_dbnode(id) + db_dbworkflowdata_parent_id_fkey: FOREIGN KEY (parent_id) REFERENCES db_dbworkflow(id) + db_dbworkflowstep: + db_dbworkflowstep_parent_id_fkey: FOREIGN KEY (parent_id) REFERENCES db_dbworkflow(id) + db_dbworkflowstep_user_id_fkey: FOREIGN KEY (user_id) REFERENCES db_dbuser(id) + db_dbworkflowstep_calculations: + db_dbworkflowstep_calculations_dbnode_id_fkey: FOREIGN KEY (dbnode_id) REFERENCES + db_dbnode(id) + db_dbworkflowstep_calculations_dbworkflowstep_id_fkey: FOREIGN KEY (dbworkflowstep_id) + REFERENCES db_dbworkflowstep(id) + db_dbworkflowstep_sub_workflows: + db_dbworkflowstep_sub_workflows_dbworkflow_id_fkey: FOREIGN KEY (dbworkflow_id) + REFERENCES db_dbworkflow(id) + db_dbworkflowstep_sub_workflows_dbworkflowstep_id_fkey: FOREIGN KEY (dbworkflowstep_id) + REFERENCES db_dbworkflowstep(id) +indexes: + db_dbauthinfo: + db_dbauthinfo_aiidauser_id_dbcomputer_id_key: CREATE UNIQUE INDEX db_dbauthinfo_aiidauser_id_dbcomputer_id_key + ON public.db_dbauthinfo USING btree (aiidauser_id, dbcomputer_id) + db_dbauthinfo_pkey: CREATE UNIQUE INDEX db_dbauthinfo_pkey ON public.db_dbauthinfo + USING btree (id) + db_dbcalcstate: + db_dbcalcstate_dbnode_id_state_key: CREATE UNIQUE INDEX db_dbcalcstate_dbnode_id_state_key + ON public.db_dbcalcstate USING btree (dbnode_id, state) + db_dbcalcstate_pkey: CREATE UNIQUE INDEX db_dbcalcstate_pkey ON public.db_dbcalcstate + USING btree (id) + ix_db_dbcalcstate_state: CREATE INDEX ix_db_dbcalcstate_state ON public.db_dbcalcstate + USING btree (state) + db_dbcomment: + db_dbcomment_pkey: CREATE UNIQUE INDEX db_dbcomment_pkey ON public.db_dbcomment + USING btree (id) + db_dbcomputer: + db_dbcomputer_name_key: CREATE UNIQUE INDEX db_dbcomputer_name_key ON public.db_dbcomputer + USING btree (name) + db_dbcomputer_pkey: CREATE UNIQUE INDEX db_dbcomputer_pkey ON public.db_dbcomputer + USING btree (id) + db_dbgroup: + db_dbgroup_name_type_key: CREATE UNIQUE INDEX db_dbgroup_name_type_key ON public.db_dbgroup + USING btree (name, type) + db_dbgroup_pkey: CREATE UNIQUE INDEX db_dbgroup_pkey ON public.db_dbgroup USING + btree (id) + ix_db_dbgroup_name: CREATE INDEX ix_db_dbgroup_name ON public.db_dbgroup USING + btree (name) + ix_db_dbgroup_type: CREATE INDEX ix_db_dbgroup_type ON public.db_dbgroup USING + btree (type) + db_dbgroup_dbnodes: + db_dbgroup_dbnodes_dbgroup_id_dbnode_id_key: CREATE UNIQUE INDEX db_dbgroup_dbnodes_dbgroup_id_dbnode_id_key + ON public.db_dbgroup_dbnodes USING btree (dbgroup_id, dbnode_id) + db_dbgroup_dbnodes_dbgroup_id_idx: CREATE INDEX db_dbgroup_dbnodes_dbgroup_id_idx + ON public.db_dbgroup_dbnodes USING btree (dbgroup_id) + db_dbgroup_dbnodes_dbnode_id_idx: CREATE INDEX db_dbgroup_dbnodes_dbnode_id_idx + ON public.db_dbgroup_dbnodes USING btree (dbnode_id) + db_dbgroup_dbnodes_pkey: CREATE UNIQUE INDEX db_dbgroup_dbnodes_pkey ON public.db_dbgroup_dbnodes + USING btree (id) + db_dblink: + db_dblink_pkey: CREATE UNIQUE INDEX db_dblink_pkey ON public.db_dblink USING btree + (id) + ix_db_dblink_label: CREATE INDEX ix_db_dblink_label ON public.db_dblink USING + btree (label) + db_dblog: + db_dblog_pkey: CREATE UNIQUE INDEX db_dblog_pkey ON public.db_dblog USING btree + (id) + ix_db_dblog_levelname: CREATE INDEX ix_db_dblog_levelname ON public.db_dblog USING + btree (levelname) + ix_db_dblog_loggername: CREATE INDEX ix_db_dblog_loggername ON public.db_dblog + USING btree (loggername) + ix_db_dblog_objname: CREATE INDEX ix_db_dblog_objname ON public.db_dblog USING + btree (objname) + ix_db_dblog_objpk: CREATE INDEX ix_db_dblog_objpk ON public.db_dblog USING btree + (objpk) + db_dbnode: + db_dbnode_pkey: CREATE UNIQUE INDEX db_dbnode_pkey ON public.db_dbnode USING btree + (id) + db_dbnode_uuid_key: CREATE UNIQUE INDEX db_dbnode_uuid_key ON public.db_dbnode + USING btree (uuid) + ix_db_dbnode_label: CREATE INDEX ix_db_dbnode_label ON public.db_dbnode USING + btree (label) + ix_db_dbnode_process_type: CREATE INDEX ix_db_dbnode_process_type ON public.db_dbnode + USING btree (process_type) + ix_db_dbnode_type: CREATE INDEX ix_db_dbnode_type ON public.db_dbnode USING btree + (type) + db_dbsetting: + db_dbsetting_key_key: CREATE UNIQUE INDEX db_dbsetting_key_key ON public.db_dbsetting + USING btree (key) + db_dbsetting_pkey: CREATE UNIQUE INDEX db_dbsetting_pkey ON public.db_dbsetting + USING btree (id) + ix_db_dbsetting_key: CREATE INDEX ix_db_dbsetting_key ON public.db_dbsetting USING + btree (key) + db_dbuser: + db_dbuser_pkey: CREATE UNIQUE INDEX db_dbuser_pkey ON public.db_dbuser USING btree + (id) + ix_db_dbuser_email: CREATE UNIQUE INDEX ix_db_dbuser_email ON public.db_dbuser + USING btree (email) + db_dbworkflow: + db_dbworkflow_pkey: CREATE UNIQUE INDEX db_dbworkflow_pkey ON public.db_dbworkflow + USING btree (id) + ix_db_dbworkflow_label: CREATE INDEX ix_db_dbworkflow_label ON public.db_dbworkflow + USING btree (label) + db_dbworkflowdata: + db_dbworkflowdata_parent_id_name_data_type_key: CREATE UNIQUE INDEX db_dbworkflowdata_parent_id_name_data_type_key + ON public.db_dbworkflowdata USING btree (parent_id, name, data_type) + db_dbworkflowdata_pkey: CREATE UNIQUE INDEX db_dbworkflowdata_pkey ON public.db_dbworkflowdata + USING btree (id) + ix_db_dbworkflowdata_aiida_obj_id: CREATE INDEX ix_db_dbworkflowdata_aiida_obj_id + ON public.db_dbworkflowdata USING btree (aiida_obj_id) + ix_db_dbworkflowdata_parent_id: CREATE INDEX ix_db_dbworkflowdata_parent_id ON + public.db_dbworkflowdata USING btree (parent_id) + db_dbworkflowstep: + db_dbworkflowstep_parent_id_name_key: CREATE UNIQUE INDEX db_dbworkflowstep_parent_id_name_key + ON public.db_dbworkflowstep USING btree (parent_id, name) + db_dbworkflowstep_pkey: CREATE UNIQUE INDEX db_dbworkflowstep_pkey ON public.db_dbworkflowstep + USING btree (id) + db_dbworkflowstep_calculations: + db_dbworkflowstep_calculations_id_dbnode_id_key: CREATE UNIQUE INDEX db_dbworkflowstep_calculations_id_dbnode_id_key + ON public.db_dbworkflowstep_calculations USING btree (dbworkflowstep_id, dbnode_id) + db_dbworkflowstep_calculations_pkey: CREATE UNIQUE INDEX db_dbworkflowstep_calculations_pkey + ON public.db_dbworkflowstep_calculations USING btree (id) + db_dbworkflowstep_sub_workflows: + db_dbworkflowstep_sub_workflows_id_dbworkflow__key: CREATE UNIQUE INDEX db_dbworkflowstep_sub_workflows_id_dbworkflow__key + ON public.db_dbworkflowstep_sub_workflows USING btree (dbworkflowstep_id, dbworkflow_id) + db_dbworkflowstep_sub_workflows_pkey: CREATE UNIQUE INDEX db_dbworkflowstep_sub_workflows_pkey + ON public.db_dbworkflowstep_sub_workflows USING btree (id) diff --git a/tests/backends/aiida_sqlalchemy/migrations/test_all_schema/test_sqla_13_a603da2cc809_.yml b/tests/backends/aiida_sqlalchemy/migrations/test_all_schema/test_sqla_13_a603da2cc809_.yml new file mode 100644 index 0000000000..d1499943d0 --- /dev/null +++ b/tests/backends/aiida_sqlalchemy/migrations/test_all_schema/test_sqla_13_a603da2cc809_.yml @@ -0,0 +1,762 @@ +columns: + db_dbauthinfo: + aiidauser_id: + data_type: integer + default: null + is_nullable: true + auth_params: + data_type: jsonb + default: null + is_nullable: true + dbcomputer_id: + data_type: integer + default: null + is_nullable: true + enabled: + data_type: boolean + default: null + is_nullable: true + id: + data_type: integer + default: nextval('db_dbauthinfo_id_seq'::regclass) + is_nullable: false + metadata: + data_type: jsonb + default: null + is_nullable: true + db_dbcalcstate: + dbnode_id: + data_type: integer + default: null + is_nullable: true + id: + data_type: integer + default: nextval('db_dbcalcstate_id_seq'::regclass) + is_nullable: false + state: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + time: + data_type: timestamp with time zone + default: null + is_nullable: true + db_dbcomment: + content: + data_type: text + default: null + is_nullable: true + ctime: + data_type: timestamp with time zone + default: null + is_nullable: true + dbnode_id: + data_type: integer + default: null + is_nullable: true + id: + data_type: integer + default: nextval('db_dbcomment_id_seq'::regclass) + is_nullable: false + mtime: + data_type: timestamp with time zone + default: null + is_nullable: true + user_id: + data_type: integer + default: null + is_nullable: true + uuid: + data_type: uuid + default: null + is_nullable: true + db_dbcomputer: + description: + data_type: text + default: null + is_nullable: true + enabled: + data_type: boolean + default: null + is_nullable: true + hostname: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + id: + data_type: integer + default: nextval('db_dbcomputer_id_seq'::regclass) + is_nullable: false + metadata: + data_type: jsonb + default: null + is_nullable: true + name: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + scheduler_type: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + transport_params: + data_type: jsonb + default: null + is_nullable: true + transport_type: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + uuid: + data_type: uuid + default: null + is_nullable: true + db_dbgroup: + description: + data_type: text + default: null + is_nullable: true + id: + data_type: integer + default: nextval('db_dbgroup_id_seq'::regclass) + is_nullable: false + name: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + time: + data_type: timestamp with time zone + default: null + is_nullable: true + type: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + user_id: + data_type: integer + default: null + is_nullable: true + uuid: + data_type: uuid + default: null + is_nullable: true + db_dbgroup_dbnodes: + dbgroup_id: + data_type: integer + default: null + is_nullable: true + dbnode_id: + data_type: integer + default: null + is_nullable: true + id: + data_type: integer + default: nextval('db_dbgroup_dbnodes_id_seq'::regclass) + is_nullable: false + db_dblink: + id: + data_type: integer + default: nextval('db_dblink_id_seq'::regclass) + is_nullable: false + input_id: + data_type: integer + default: null + is_nullable: true + label: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + output_id: + data_type: integer + default: null + is_nullable: true + type: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + db_dblog: + id: + data_type: integer + default: nextval('db_dblog_id_seq'::regclass) + is_nullable: false + levelname: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + loggername: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + message: + data_type: text + default: null + is_nullable: true + metadata: + data_type: jsonb + default: null + is_nullable: true + objname: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + objpk: + data_type: integer + default: null + is_nullable: true + time: + data_type: timestamp with time zone + default: null + is_nullable: true + db_dbnode: + attributes: + data_type: jsonb + default: null + is_nullable: true + ctime: + data_type: timestamp with time zone + default: null + is_nullable: true + dbcomputer_id: + data_type: integer + default: null + is_nullable: true + description: + data_type: text + default: null + is_nullable: true + extras: + data_type: jsonb + default: null + is_nullable: true + id: + data_type: integer + default: nextval('db_dbnode_id_seq'::regclass) + is_nullable: false + label: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + mtime: + data_type: timestamp with time zone + default: null + is_nullable: true + nodeversion: + data_type: integer + default: null + is_nullable: true + process_type: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + public: + data_type: boolean + default: null + is_nullable: true + type: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + user_id: + data_type: integer + default: null + is_nullable: false + uuid: + data_type: uuid + default: null + is_nullable: true + db_dbsetting: + description: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + id: + data_type: integer + default: nextval('db_dbsetting_id_seq'::regclass) + is_nullable: false + key: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + time: + data_type: timestamp with time zone + default: null + is_nullable: true + val: + data_type: jsonb + default: null + is_nullable: true + db_dbuser: + date_joined: + data_type: timestamp with time zone + default: null + is_nullable: true + email: + data_type: character varying + default: null + is_nullable: true + max_length: 254 + first_name: + data_type: character varying + default: null + is_nullable: true + max_length: 254 + id: + data_type: integer + default: nextval('db_dbuser_id_seq'::regclass) + is_nullable: false + institution: + data_type: character varying + default: null + is_nullable: true + max_length: 254 + is_active: + data_type: boolean + default: null + is_nullable: true + is_staff: + data_type: boolean + default: null + is_nullable: true + is_superuser: + data_type: boolean + default: null + is_nullable: false + last_login: + data_type: timestamp with time zone + default: null + is_nullable: true + last_name: + data_type: character varying + default: null + is_nullable: true + max_length: 254 + password: + data_type: character varying + default: null + is_nullable: true + max_length: 128 + db_dbworkflow: + ctime: + data_type: timestamp with time zone + default: null + is_nullable: true + description: + data_type: text + default: null + is_nullable: true + id: + data_type: integer + default: nextval('db_dbworkflow_id_seq'::regclass) + is_nullable: false + label: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + lastsyncedversion: + data_type: integer + default: null + is_nullable: true + module: + data_type: text + default: null + is_nullable: true + module_class: + data_type: text + default: null + is_nullable: true + mtime: + data_type: timestamp with time zone + default: null + is_nullable: true + nodeversion: + data_type: integer + default: null + is_nullable: true + report: + data_type: text + default: null + is_nullable: true + script_md5: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + script_path: + data_type: text + default: null + is_nullable: true + state: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + user_id: + data_type: integer + default: null + is_nullable: true + uuid: + data_type: uuid + default: null + is_nullable: true + db_dbworkflowdata: + aiida_obj_id: + data_type: integer + default: null + is_nullable: true + data_type: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + id: + data_type: integer + default: nextval('db_dbworkflowdata_id_seq'::regclass) + is_nullable: false + json_value: + data_type: text + default: null + is_nullable: true + name: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + parent_id: + data_type: integer + default: null + is_nullable: true + time: + data_type: timestamp with time zone + default: null + is_nullable: true + value_type: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + db_dbworkflowstep: + id: + data_type: integer + default: nextval('db_dbworkflowstep_id_seq'::regclass) + is_nullable: false + name: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + nextcall: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + parent_id: + data_type: integer + default: null + is_nullable: true + state: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + time: + data_type: timestamp with time zone + default: null + is_nullable: true + user_id: + data_type: integer + default: null + is_nullable: true + db_dbworkflowstep_calculations: + dbnode_id: + data_type: integer + default: null + is_nullable: true + dbworkflowstep_id: + data_type: integer + default: null + is_nullable: true + id: + data_type: integer + default: nextval('db_dbworkflowstep_calculations_id_seq'::regclass) + is_nullable: false + db_dbworkflowstep_sub_workflows: + dbworkflow_id: + data_type: integer + default: null + is_nullable: true + dbworkflowstep_id: + data_type: integer + default: null + is_nullable: true + id: + data_type: integer + default: nextval('db_dbworkflowstep_sub_workflows_id_seq'::regclass) + is_nullable: false +constraints: + primary_key: + db_dbauthinfo: + db_dbauthinfo_pkey: + - id + db_dbcalcstate: + db_dbcalcstate_pkey: + - id + db_dbcomment: + db_dbcomment_pkey: + - id + db_dbcomputer: + db_dbcomputer_pkey: + - id + db_dbgroup: + db_dbgroup_pkey: + - id + db_dbgroup_dbnodes: + db_dbgroup_dbnodes_pkey: + - id + db_dblink: + db_dblink_pkey: + - id + db_dblog: + db_dblog_pkey: + - id + db_dbnode: + db_dbnode_pkey: + - id + db_dbsetting: + db_dbsetting_pkey: + - id + db_dbuser: + db_dbuser_pkey: + - id + db_dbworkflow: + db_dbworkflow_pkey: + - id + db_dbworkflowdata: + db_dbworkflowdata_pkey: + - id + db_dbworkflowstep: + db_dbworkflowstep_pkey: + - id + db_dbworkflowstep_calculations: + db_dbworkflowstep_calculations_pkey: + - id + db_dbworkflowstep_sub_workflows: + db_dbworkflowstep_sub_workflows_pkey: + - id + unique: + db_dbauthinfo: + db_dbauthinfo_aiidauser_id_dbcomputer_id_key: + - aiidauser_id + - dbcomputer_id + db_dbcalcstate: + db_dbcalcstate_dbnode_id_state_key: + - dbnode_id + - state + db_dbcomputer: + db_dbcomputer_name_key: + - name + db_dbgroup: + db_dbgroup_name_type_key: + - name + - type + db_dbgroup_dbnodes: + db_dbgroup_dbnodes_dbgroup_id_dbnode_id_key: + - dbgroup_id + - dbnode_id + db_dbnode: + db_dbnode_uuid_key: + - uuid + db_dbsetting: + db_dbsetting_key_key: + - key + db_dbworkflowdata: + db_dbworkflowdata_parent_id_name_data_type_key: + - data_type + - name + - parent_id + db_dbworkflowstep: + db_dbworkflowstep_parent_id_name_key: + - name + - parent_id + db_dbworkflowstep_calculations: + db_dbworkflowstep_calculations_id_dbnode_id_key: + - dbnode_id + - dbworkflowstep_id + db_dbworkflowstep_sub_workflows: + db_dbworkflowstep_sub_workflows_id_dbworkflow__key: + - dbworkflow_id + - dbworkflowstep_id +foreign_keys: + db_dbauthinfo: + db_dbauthinfo_aiidauser_id_fkey: FOREIGN KEY (aiidauser_id) REFERENCES db_dbuser(id) + ON DELETE CASCADE DEFERRABLE INITIALLY DEFERRED + db_dbauthinfo_dbcomputer_id_fkey: FOREIGN KEY (dbcomputer_id) REFERENCES db_dbcomputer(id) + ON DELETE CASCADE DEFERRABLE INITIALLY DEFERRED + db_dbcalcstate: + db_dbcalcstate_dbnode_id_fkey: FOREIGN KEY (dbnode_id) REFERENCES db_dbnode(id) + ON DELETE CASCADE DEFERRABLE INITIALLY DEFERRED + db_dbcomment: + db_dbcomment_dbnode_id_fkey: FOREIGN KEY (dbnode_id) REFERENCES db_dbnode(id) + ON DELETE CASCADE DEFERRABLE INITIALLY DEFERRED + db_dbcomment_user_id_fkey: FOREIGN KEY (user_id) REFERENCES db_dbuser(id) ON DELETE + CASCADE DEFERRABLE INITIALLY DEFERRED + db_dbgroup: + db_dbgroup_user_id_fkey: FOREIGN KEY (user_id) REFERENCES db_dbuser(id) ON DELETE + CASCADE DEFERRABLE INITIALLY DEFERRED + db_dbgroup_dbnodes: + db_dbgroup_dbnodes_dbgroup_id_fkey: FOREIGN KEY (dbgroup_id) REFERENCES db_dbgroup(id) + DEFERRABLE INITIALLY DEFERRED + db_dbgroup_dbnodes_dbnode_id_fkey: FOREIGN KEY (dbnode_id) REFERENCES db_dbnode(id) + DEFERRABLE INITIALLY DEFERRED + db_dblink: + db_dblink_input_id_fkey: FOREIGN KEY (input_id) REFERENCES db_dbnode(id) DEFERRABLE + INITIALLY DEFERRED + db_dblink_output_id_fkey: FOREIGN KEY (output_id) REFERENCES db_dbnode(id) ON + DELETE CASCADE DEFERRABLE INITIALLY DEFERRED + db_dbnode: + db_dbnode_dbcomputer_id_fkey: FOREIGN KEY (dbcomputer_id) REFERENCES db_dbcomputer(id) + ON DELETE RESTRICT DEFERRABLE INITIALLY DEFERRED + db_dbnode_user_id_fkey: FOREIGN KEY (user_id) REFERENCES db_dbuser(id) ON DELETE + RESTRICT DEFERRABLE INITIALLY DEFERRED + db_dbworkflow: + db_dbworkflow_user_id_fkey: FOREIGN KEY (user_id) REFERENCES db_dbuser(id) + db_dbworkflowdata: + db_dbworkflowdata_aiida_obj_id_fkey: FOREIGN KEY (aiida_obj_id) REFERENCES db_dbnode(id) + db_dbworkflowdata_parent_id_fkey: FOREIGN KEY (parent_id) REFERENCES db_dbworkflow(id) + db_dbworkflowstep: + db_dbworkflowstep_parent_id_fkey: FOREIGN KEY (parent_id) REFERENCES db_dbworkflow(id) + db_dbworkflowstep_user_id_fkey: FOREIGN KEY (user_id) REFERENCES db_dbuser(id) + db_dbworkflowstep_calculations: + db_dbworkflowstep_calculations_dbnode_id_fkey: FOREIGN KEY (dbnode_id) REFERENCES + db_dbnode(id) + db_dbworkflowstep_calculations_dbworkflowstep_id_fkey: FOREIGN KEY (dbworkflowstep_id) + REFERENCES db_dbworkflowstep(id) + db_dbworkflowstep_sub_workflows: + db_dbworkflowstep_sub_workflows_dbworkflow_id_fkey: FOREIGN KEY (dbworkflow_id) + REFERENCES db_dbworkflow(id) + db_dbworkflowstep_sub_workflows_dbworkflowstep_id_fkey: FOREIGN KEY (dbworkflowstep_id) + REFERENCES db_dbworkflowstep(id) +indexes: + db_dbauthinfo: + db_dbauthinfo_aiidauser_id_dbcomputer_id_key: CREATE UNIQUE INDEX db_dbauthinfo_aiidauser_id_dbcomputer_id_key + ON public.db_dbauthinfo USING btree (aiidauser_id, dbcomputer_id) + db_dbauthinfo_pkey: CREATE UNIQUE INDEX db_dbauthinfo_pkey ON public.db_dbauthinfo + USING btree (id) + db_dbcalcstate: + db_dbcalcstate_dbnode_id_state_key: CREATE UNIQUE INDEX db_dbcalcstate_dbnode_id_state_key + ON public.db_dbcalcstate USING btree (dbnode_id, state) + db_dbcalcstate_pkey: CREATE UNIQUE INDEX db_dbcalcstate_pkey ON public.db_dbcalcstate + USING btree (id) + ix_db_dbcalcstate_state: CREATE INDEX ix_db_dbcalcstate_state ON public.db_dbcalcstate + USING btree (state) + db_dbcomment: + db_dbcomment_pkey: CREATE UNIQUE INDEX db_dbcomment_pkey ON public.db_dbcomment + USING btree (id) + db_dbcomputer: + db_dbcomputer_name_key: CREATE UNIQUE INDEX db_dbcomputer_name_key ON public.db_dbcomputer + USING btree (name) + db_dbcomputer_pkey: CREATE UNIQUE INDEX db_dbcomputer_pkey ON public.db_dbcomputer + USING btree (id) + db_dbgroup: + db_dbgroup_name_type_key: CREATE UNIQUE INDEX db_dbgroup_name_type_key ON public.db_dbgroup + USING btree (name, type) + db_dbgroup_pkey: CREATE UNIQUE INDEX db_dbgroup_pkey ON public.db_dbgroup USING + btree (id) + ix_db_dbgroup_name: CREATE INDEX ix_db_dbgroup_name ON public.db_dbgroup USING + btree (name) + ix_db_dbgroup_type: CREATE INDEX ix_db_dbgroup_type ON public.db_dbgroup USING + btree (type) + db_dbgroup_dbnodes: + db_dbgroup_dbnodes_dbgroup_id_dbnode_id_key: CREATE UNIQUE INDEX db_dbgroup_dbnodes_dbgroup_id_dbnode_id_key + ON public.db_dbgroup_dbnodes USING btree (dbgroup_id, dbnode_id) + db_dbgroup_dbnodes_dbgroup_id_idx: CREATE INDEX db_dbgroup_dbnodes_dbgroup_id_idx + ON public.db_dbgroup_dbnodes USING btree (dbgroup_id) + db_dbgroup_dbnodes_dbnode_id_idx: CREATE INDEX db_dbgroup_dbnodes_dbnode_id_idx + ON public.db_dbgroup_dbnodes USING btree (dbnode_id) + db_dbgroup_dbnodes_pkey: CREATE UNIQUE INDEX db_dbgroup_dbnodes_pkey ON public.db_dbgroup_dbnodes + USING btree (id) + db_dblink: + db_dblink_pkey: CREATE UNIQUE INDEX db_dblink_pkey ON public.db_dblink USING btree + (id) + ix_db_dblink_label: CREATE INDEX ix_db_dblink_label ON public.db_dblink USING + btree (label) + db_dblog: + db_dblog_pkey: CREATE UNIQUE INDEX db_dblog_pkey ON public.db_dblog USING btree + (id) + ix_db_dblog_levelname: CREATE INDEX ix_db_dblog_levelname ON public.db_dblog USING + btree (levelname) + ix_db_dblog_loggername: CREATE INDEX ix_db_dblog_loggername ON public.db_dblog + USING btree (loggername) + ix_db_dblog_objname: CREATE INDEX ix_db_dblog_objname ON public.db_dblog USING + btree (objname) + ix_db_dblog_objpk: CREATE INDEX ix_db_dblog_objpk ON public.db_dblog USING btree + (objpk) + db_dbnode: + db_dbnode_pkey: CREATE UNIQUE INDEX db_dbnode_pkey ON public.db_dbnode USING btree + (id) + db_dbnode_uuid_key: CREATE UNIQUE INDEX db_dbnode_uuid_key ON public.db_dbnode + USING btree (uuid) + ix_db_dbnode_label: CREATE INDEX ix_db_dbnode_label ON public.db_dbnode USING + btree (label) + ix_db_dbnode_process_type: CREATE INDEX ix_db_dbnode_process_type ON public.db_dbnode + USING btree (process_type) + ix_db_dbnode_type: CREATE INDEX ix_db_dbnode_type ON public.db_dbnode USING btree + (type) + db_dbsetting: + db_dbsetting_key_key: CREATE UNIQUE INDEX db_dbsetting_key_key ON public.db_dbsetting + USING btree (key) + db_dbsetting_pkey: CREATE UNIQUE INDEX db_dbsetting_pkey ON public.db_dbsetting + USING btree (id) + ix_db_dbsetting_key: CREATE INDEX ix_db_dbsetting_key ON public.db_dbsetting USING + btree (key) + db_dbuser: + db_dbuser_pkey: CREATE UNIQUE INDEX db_dbuser_pkey ON public.db_dbuser USING btree + (id) + ix_db_dbuser_email: CREATE UNIQUE INDEX ix_db_dbuser_email ON public.db_dbuser + USING btree (email) + db_dbworkflow: + db_dbworkflow_pkey: CREATE UNIQUE INDEX db_dbworkflow_pkey ON public.db_dbworkflow + USING btree (id) + ix_db_dbworkflow_label: CREATE INDEX ix_db_dbworkflow_label ON public.db_dbworkflow + USING btree (label) + db_dbworkflowdata: + db_dbworkflowdata_parent_id_name_data_type_key: CREATE UNIQUE INDEX db_dbworkflowdata_parent_id_name_data_type_key + ON public.db_dbworkflowdata USING btree (parent_id, name, data_type) + db_dbworkflowdata_pkey: CREATE UNIQUE INDEX db_dbworkflowdata_pkey ON public.db_dbworkflowdata + USING btree (id) + ix_db_dbworkflowdata_aiida_obj_id: CREATE INDEX ix_db_dbworkflowdata_aiida_obj_id + ON public.db_dbworkflowdata USING btree (aiida_obj_id) + ix_db_dbworkflowdata_parent_id: CREATE INDEX ix_db_dbworkflowdata_parent_id ON + public.db_dbworkflowdata USING btree (parent_id) + db_dbworkflowstep: + db_dbworkflowstep_parent_id_name_key: CREATE UNIQUE INDEX db_dbworkflowstep_parent_id_name_key + ON public.db_dbworkflowstep USING btree (parent_id, name) + db_dbworkflowstep_pkey: CREATE UNIQUE INDEX db_dbworkflowstep_pkey ON public.db_dbworkflowstep + USING btree (id) + db_dbworkflowstep_calculations: + db_dbworkflowstep_calculations_id_dbnode_id_key: CREATE UNIQUE INDEX db_dbworkflowstep_calculations_id_dbnode_id_key + ON public.db_dbworkflowstep_calculations USING btree (dbworkflowstep_id, dbnode_id) + db_dbworkflowstep_calculations_pkey: CREATE UNIQUE INDEX db_dbworkflowstep_calculations_pkey + ON public.db_dbworkflowstep_calculations USING btree (id) + db_dbworkflowstep_sub_workflows: + db_dbworkflowstep_sub_workflows_id_dbworkflow__key: CREATE UNIQUE INDEX db_dbworkflowstep_sub_workflows_id_dbworkflow__key + ON public.db_dbworkflowstep_sub_workflows USING btree (dbworkflowstep_id, dbworkflow_id) + db_dbworkflowstep_sub_workflows_pkey: CREATE UNIQUE INDEX db_dbworkflowstep_sub_workflows_pkey + ON public.db_dbworkflowstep_sub_workflows USING btree (id) diff --git a/tests/backends/aiida_sqlalchemy/migrations/test_all_schema/test_sqla_14_162b99bca4a2_.yml b/tests/backends/aiida_sqlalchemy/migrations/test_all_schema/test_sqla_14_162b99bca4a2_.yml new file mode 100644 index 0000000000..69d7fe33b7 --- /dev/null +++ b/tests/backends/aiida_sqlalchemy/migrations/test_all_schema/test_sqla_14_162b99bca4a2_.yml @@ -0,0 +1,727 @@ +columns: + db_dbauthinfo: + aiidauser_id: + data_type: integer + default: null + is_nullable: true + auth_params: + data_type: jsonb + default: null + is_nullable: true + dbcomputer_id: + data_type: integer + default: null + is_nullable: true + enabled: + data_type: boolean + default: null + is_nullable: true + id: + data_type: integer + default: nextval('db_dbauthinfo_id_seq'::regclass) + is_nullable: false + metadata: + data_type: jsonb + default: null + is_nullable: true + db_dbcomment: + content: + data_type: text + default: null + is_nullable: true + ctime: + data_type: timestamp with time zone + default: null + is_nullable: true + dbnode_id: + data_type: integer + default: null + is_nullable: true + id: + data_type: integer + default: nextval('db_dbcomment_id_seq'::regclass) + is_nullable: false + mtime: + data_type: timestamp with time zone + default: null + is_nullable: true + user_id: + data_type: integer + default: null + is_nullable: true + uuid: + data_type: uuid + default: null + is_nullable: true + db_dbcomputer: + description: + data_type: text + default: null + is_nullable: true + enabled: + data_type: boolean + default: null + is_nullable: true + hostname: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + id: + data_type: integer + default: nextval('db_dbcomputer_id_seq'::regclass) + is_nullable: false + metadata: + data_type: jsonb + default: null + is_nullable: true + name: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + scheduler_type: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + transport_params: + data_type: jsonb + default: null + is_nullable: true + transport_type: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + uuid: + data_type: uuid + default: null + is_nullable: true + db_dbgroup: + description: + data_type: text + default: null + is_nullable: true + id: + data_type: integer + default: nextval('db_dbgroup_id_seq'::regclass) + is_nullable: false + name: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + time: + data_type: timestamp with time zone + default: null + is_nullable: true + type: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + user_id: + data_type: integer + default: null + is_nullable: true + uuid: + data_type: uuid + default: null + is_nullable: true + db_dbgroup_dbnodes: + dbgroup_id: + data_type: integer + default: null + is_nullable: true + dbnode_id: + data_type: integer + default: null + is_nullable: true + id: + data_type: integer + default: nextval('db_dbgroup_dbnodes_id_seq'::regclass) + is_nullable: false + db_dblink: + id: + data_type: integer + default: nextval('db_dblink_id_seq'::regclass) + is_nullable: false + input_id: + data_type: integer + default: null + is_nullable: true + label: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + output_id: + data_type: integer + default: null + is_nullable: true + type: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + db_dblog: + id: + data_type: integer + default: nextval('db_dblog_id_seq'::regclass) + is_nullable: false + levelname: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + loggername: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + message: + data_type: text + default: null + is_nullable: true + metadata: + data_type: jsonb + default: null + is_nullable: true + objname: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + objpk: + data_type: integer + default: null + is_nullable: true + time: + data_type: timestamp with time zone + default: null + is_nullable: true + db_dbnode: + attributes: + data_type: jsonb + default: null + is_nullable: true + ctime: + data_type: timestamp with time zone + default: null + is_nullable: true + dbcomputer_id: + data_type: integer + default: null + is_nullable: true + description: + data_type: text + default: null + is_nullable: true + extras: + data_type: jsonb + default: null + is_nullable: true + id: + data_type: integer + default: nextval('db_dbnode_id_seq'::regclass) + is_nullable: false + label: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + mtime: + data_type: timestamp with time zone + default: null + is_nullable: true + nodeversion: + data_type: integer + default: null + is_nullable: true + process_type: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + public: + data_type: boolean + default: null + is_nullable: true + type: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + user_id: + data_type: integer + default: null + is_nullable: false + uuid: + data_type: uuid + default: null + is_nullable: true + db_dbsetting: + description: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + id: + data_type: integer + default: nextval('db_dbsetting_id_seq'::regclass) + is_nullable: false + key: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + time: + data_type: timestamp with time zone + default: null + is_nullable: true + val: + data_type: jsonb + default: null + is_nullable: true + db_dbuser: + date_joined: + data_type: timestamp with time zone + default: null + is_nullable: true + email: + data_type: character varying + default: null + is_nullable: true + max_length: 254 + first_name: + data_type: character varying + default: null + is_nullable: true + max_length: 254 + id: + data_type: integer + default: nextval('db_dbuser_id_seq'::regclass) + is_nullable: false + institution: + data_type: character varying + default: null + is_nullable: true + max_length: 254 + is_active: + data_type: boolean + default: null + is_nullable: true + is_staff: + data_type: boolean + default: null + is_nullable: true + is_superuser: + data_type: boolean + default: null + is_nullable: false + last_login: + data_type: timestamp with time zone + default: null + is_nullable: true + last_name: + data_type: character varying + default: null + is_nullable: true + max_length: 254 + password: + data_type: character varying + default: null + is_nullable: true + max_length: 128 + db_dbworkflow: + ctime: + data_type: timestamp with time zone + default: null + is_nullable: true + description: + data_type: text + default: null + is_nullable: true + id: + data_type: integer + default: nextval('db_dbworkflow_id_seq'::regclass) + is_nullable: false + label: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + lastsyncedversion: + data_type: integer + default: null + is_nullable: true + module: + data_type: text + default: null + is_nullable: true + module_class: + data_type: text + default: null + is_nullable: true + mtime: + data_type: timestamp with time zone + default: null + is_nullable: true + nodeversion: + data_type: integer + default: null + is_nullable: true + report: + data_type: text + default: null + is_nullable: true + script_md5: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + script_path: + data_type: text + default: null + is_nullable: true + state: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + user_id: + data_type: integer + default: null + is_nullable: true + uuid: + data_type: uuid + default: null + is_nullable: true + db_dbworkflowdata: + aiida_obj_id: + data_type: integer + default: null + is_nullable: true + data_type: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + id: + data_type: integer + default: nextval('db_dbworkflowdata_id_seq'::regclass) + is_nullable: false + json_value: + data_type: text + default: null + is_nullable: true + name: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + parent_id: + data_type: integer + default: null + is_nullable: true + time: + data_type: timestamp with time zone + default: null + is_nullable: true + value_type: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + db_dbworkflowstep: + id: + data_type: integer + default: nextval('db_dbworkflowstep_id_seq'::regclass) + is_nullable: false + name: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + nextcall: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + parent_id: + data_type: integer + default: null + is_nullable: true + state: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + time: + data_type: timestamp with time zone + default: null + is_nullable: true + user_id: + data_type: integer + default: null + is_nullable: true + db_dbworkflowstep_calculations: + dbnode_id: + data_type: integer + default: null + is_nullable: true + dbworkflowstep_id: + data_type: integer + default: null + is_nullable: true + id: + data_type: integer + default: nextval('db_dbworkflowstep_calculations_id_seq'::regclass) + is_nullable: false + db_dbworkflowstep_sub_workflows: + dbworkflow_id: + data_type: integer + default: null + is_nullable: true + dbworkflowstep_id: + data_type: integer + default: null + is_nullable: true + id: + data_type: integer + default: nextval('db_dbworkflowstep_sub_workflows_id_seq'::regclass) + is_nullable: false +constraints: + primary_key: + db_dbauthinfo: + db_dbauthinfo_pkey: + - id + db_dbcomment: + db_dbcomment_pkey: + - id + db_dbcomputer: + db_dbcomputer_pkey: + - id + db_dbgroup: + db_dbgroup_pkey: + - id + db_dbgroup_dbnodes: + db_dbgroup_dbnodes_pkey: + - id + db_dblink: + db_dblink_pkey: + - id + db_dblog: + db_dblog_pkey: + - id + db_dbnode: + db_dbnode_pkey: + - id + db_dbsetting: + db_dbsetting_pkey: + - id + db_dbuser: + db_dbuser_pkey: + - id + db_dbworkflow: + db_dbworkflow_pkey: + - id + db_dbworkflowdata: + db_dbworkflowdata_pkey: + - id + db_dbworkflowstep: + db_dbworkflowstep_pkey: + - id + db_dbworkflowstep_calculations: + db_dbworkflowstep_calculations_pkey: + - id + db_dbworkflowstep_sub_workflows: + db_dbworkflowstep_sub_workflows_pkey: + - id + unique: + db_dbauthinfo: + db_dbauthinfo_aiidauser_id_dbcomputer_id_key: + - aiidauser_id + - dbcomputer_id + db_dbcomputer: + db_dbcomputer_name_key: + - name + db_dbgroup: + db_dbgroup_name_type_key: + - name + - type + db_dbgroup_dbnodes: + db_dbgroup_dbnodes_dbgroup_id_dbnode_id_key: + - dbgroup_id + - dbnode_id + db_dbnode: + db_dbnode_uuid_key: + - uuid + db_dbsetting: + db_dbsetting_key_key: + - key + db_dbworkflowdata: + db_dbworkflowdata_parent_id_name_data_type_key: + - data_type + - name + - parent_id + db_dbworkflowstep: + db_dbworkflowstep_parent_id_name_key: + - name + - parent_id + db_dbworkflowstep_calculations: + db_dbworkflowstep_calculations_id_dbnode_id_key: + - dbnode_id + - dbworkflowstep_id + db_dbworkflowstep_sub_workflows: + db_dbworkflowstep_sub_workflows_id_dbworkflow__key: + - dbworkflow_id + - dbworkflowstep_id +foreign_keys: + db_dbauthinfo: + db_dbauthinfo_aiidauser_id_fkey: FOREIGN KEY (aiidauser_id) REFERENCES db_dbuser(id) + ON DELETE CASCADE DEFERRABLE INITIALLY DEFERRED + db_dbauthinfo_dbcomputer_id_fkey: FOREIGN KEY (dbcomputer_id) REFERENCES db_dbcomputer(id) + ON DELETE CASCADE DEFERRABLE INITIALLY DEFERRED + db_dbcomment: + db_dbcomment_dbnode_id_fkey: FOREIGN KEY (dbnode_id) REFERENCES db_dbnode(id) + ON DELETE CASCADE DEFERRABLE INITIALLY DEFERRED + db_dbcomment_user_id_fkey: FOREIGN KEY (user_id) REFERENCES db_dbuser(id) ON DELETE + CASCADE DEFERRABLE INITIALLY DEFERRED + db_dbgroup: + db_dbgroup_user_id_fkey: FOREIGN KEY (user_id) REFERENCES db_dbuser(id) ON DELETE + CASCADE DEFERRABLE INITIALLY DEFERRED + db_dbgroup_dbnodes: + db_dbgroup_dbnodes_dbgroup_id_fkey: FOREIGN KEY (dbgroup_id) REFERENCES db_dbgroup(id) + DEFERRABLE INITIALLY DEFERRED + db_dbgroup_dbnodes_dbnode_id_fkey: FOREIGN KEY (dbnode_id) REFERENCES db_dbnode(id) + DEFERRABLE INITIALLY DEFERRED + db_dblink: + db_dblink_input_id_fkey: FOREIGN KEY (input_id) REFERENCES db_dbnode(id) DEFERRABLE + INITIALLY DEFERRED + db_dblink_output_id_fkey: FOREIGN KEY (output_id) REFERENCES db_dbnode(id) ON + DELETE CASCADE DEFERRABLE INITIALLY DEFERRED + db_dbnode: + db_dbnode_dbcomputer_id_fkey: FOREIGN KEY (dbcomputer_id) REFERENCES db_dbcomputer(id) + ON DELETE RESTRICT DEFERRABLE INITIALLY DEFERRED + db_dbnode_user_id_fkey: FOREIGN KEY (user_id) REFERENCES db_dbuser(id) ON DELETE + RESTRICT DEFERRABLE INITIALLY DEFERRED + db_dbworkflow: + db_dbworkflow_user_id_fkey: FOREIGN KEY (user_id) REFERENCES db_dbuser(id) + db_dbworkflowdata: + db_dbworkflowdata_aiida_obj_id_fkey: FOREIGN KEY (aiida_obj_id) REFERENCES db_dbnode(id) + db_dbworkflowdata_parent_id_fkey: FOREIGN KEY (parent_id) REFERENCES db_dbworkflow(id) + db_dbworkflowstep: + db_dbworkflowstep_parent_id_fkey: FOREIGN KEY (parent_id) REFERENCES db_dbworkflow(id) + db_dbworkflowstep_user_id_fkey: FOREIGN KEY (user_id) REFERENCES db_dbuser(id) + db_dbworkflowstep_calculations: + db_dbworkflowstep_calculations_dbnode_id_fkey: FOREIGN KEY (dbnode_id) REFERENCES + db_dbnode(id) + db_dbworkflowstep_calculations_dbworkflowstep_id_fkey: FOREIGN KEY (dbworkflowstep_id) + REFERENCES db_dbworkflowstep(id) + db_dbworkflowstep_sub_workflows: + db_dbworkflowstep_sub_workflows_dbworkflow_id_fkey: FOREIGN KEY (dbworkflow_id) + REFERENCES db_dbworkflow(id) + db_dbworkflowstep_sub_workflows_dbworkflowstep_id_fkey: FOREIGN KEY (dbworkflowstep_id) + REFERENCES db_dbworkflowstep(id) +indexes: + db_dbauthinfo: + db_dbauthinfo_aiidauser_id_dbcomputer_id_key: CREATE UNIQUE INDEX db_dbauthinfo_aiidauser_id_dbcomputer_id_key + ON public.db_dbauthinfo USING btree (aiidauser_id, dbcomputer_id) + db_dbauthinfo_pkey: CREATE UNIQUE INDEX db_dbauthinfo_pkey ON public.db_dbauthinfo + USING btree (id) + db_dbcomment: + db_dbcomment_pkey: CREATE UNIQUE INDEX db_dbcomment_pkey ON public.db_dbcomment + USING btree (id) + db_dbcomputer: + db_dbcomputer_name_key: CREATE UNIQUE INDEX db_dbcomputer_name_key ON public.db_dbcomputer + USING btree (name) + db_dbcomputer_pkey: CREATE UNIQUE INDEX db_dbcomputer_pkey ON public.db_dbcomputer + USING btree (id) + db_dbgroup: + db_dbgroup_name_type_key: CREATE UNIQUE INDEX db_dbgroup_name_type_key ON public.db_dbgroup + USING btree (name, type) + db_dbgroup_pkey: CREATE UNIQUE INDEX db_dbgroup_pkey ON public.db_dbgroup USING + btree (id) + ix_db_dbgroup_name: CREATE INDEX ix_db_dbgroup_name ON public.db_dbgroup USING + btree (name) + ix_db_dbgroup_type: CREATE INDEX ix_db_dbgroup_type ON public.db_dbgroup USING + btree (type) + db_dbgroup_dbnodes: + db_dbgroup_dbnodes_dbgroup_id_dbnode_id_key: CREATE UNIQUE INDEX db_dbgroup_dbnodes_dbgroup_id_dbnode_id_key + ON public.db_dbgroup_dbnodes USING btree (dbgroup_id, dbnode_id) + db_dbgroup_dbnodes_dbgroup_id_idx: CREATE INDEX db_dbgroup_dbnodes_dbgroup_id_idx + ON public.db_dbgroup_dbnodes USING btree (dbgroup_id) + db_dbgroup_dbnodes_dbnode_id_idx: CREATE INDEX db_dbgroup_dbnodes_dbnode_id_idx + ON public.db_dbgroup_dbnodes USING btree (dbnode_id) + db_dbgroup_dbnodes_pkey: CREATE UNIQUE INDEX db_dbgroup_dbnodes_pkey ON public.db_dbgroup_dbnodes + USING btree (id) + db_dblink: + db_dblink_pkey: CREATE UNIQUE INDEX db_dblink_pkey ON public.db_dblink USING btree + (id) + ix_db_dblink_label: CREATE INDEX ix_db_dblink_label ON public.db_dblink USING + btree (label) + db_dblog: + db_dblog_pkey: CREATE UNIQUE INDEX db_dblog_pkey ON public.db_dblog USING btree + (id) + ix_db_dblog_levelname: CREATE INDEX ix_db_dblog_levelname ON public.db_dblog USING + btree (levelname) + ix_db_dblog_loggername: CREATE INDEX ix_db_dblog_loggername ON public.db_dblog + USING btree (loggername) + ix_db_dblog_objname: CREATE INDEX ix_db_dblog_objname ON public.db_dblog USING + btree (objname) + ix_db_dblog_objpk: CREATE INDEX ix_db_dblog_objpk ON public.db_dblog USING btree + (objpk) + db_dbnode: + db_dbnode_pkey: CREATE UNIQUE INDEX db_dbnode_pkey ON public.db_dbnode USING btree + (id) + db_dbnode_uuid_key: CREATE UNIQUE INDEX db_dbnode_uuid_key ON public.db_dbnode + USING btree (uuid) + ix_db_dbnode_label: CREATE INDEX ix_db_dbnode_label ON public.db_dbnode USING + btree (label) + ix_db_dbnode_process_type: CREATE INDEX ix_db_dbnode_process_type ON public.db_dbnode + USING btree (process_type) + ix_db_dbnode_type: CREATE INDEX ix_db_dbnode_type ON public.db_dbnode USING btree + (type) + db_dbsetting: + db_dbsetting_key_key: CREATE UNIQUE INDEX db_dbsetting_key_key ON public.db_dbsetting + USING btree (key) + db_dbsetting_pkey: CREATE UNIQUE INDEX db_dbsetting_pkey ON public.db_dbsetting + USING btree (id) + ix_db_dbsetting_key: CREATE INDEX ix_db_dbsetting_key ON public.db_dbsetting USING + btree (key) + db_dbuser: + db_dbuser_pkey: CREATE UNIQUE INDEX db_dbuser_pkey ON public.db_dbuser USING btree + (id) + ix_db_dbuser_email: CREATE UNIQUE INDEX ix_db_dbuser_email ON public.db_dbuser + USING btree (email) + db_dbworkflow: + db_dbworkflow_pkey: CREATE UNIQUE INDEX db_dbworkflow_pkey ON public.db_dbworkflow + USING btree (id) + ix_db_dbworkflow_label: CREATE INDEX ix_db_dbworkflow_label ON public.db_dbworkflow + USING btree (label) + db_dbworkflowdata: + db_dbworkflowdata_parent_id_name_data_type_key: CREATE UNIQUE INDEX db_dbworkflowdata_parent_id_name_data_type_key + ON public.db_dbworkflowdata USING btree (parent_id, name, data_type) + db_dbworkflowdata_pkey: CREATE UNIQUE INDEX db_dbworkflowdata_pkey ON public.db_dbworkflowdata + USING btree (id) + ix_db_dbworkflowdata_aiida_obj_id: CREATE INDEX ix_db_dbworkflowdata_aiida_obj_id + ON public.db_dbworkflowdata USING btree (aiida_obj_id) + ix_db_dbworkflowdata_parent_id: CREATE INDEX ix_db_dbworkflowdata_parent_id ON + public.db_dbworkflowdata USING btree (parent_id) + db_dbworkflowstep: + db_dbworkflowstep_parent_id_name_key: CREATE UNIQUE INDEX db_dbworkflowstep_parent_id_name_key + ON public.db_dbworkflowstep USING btree (parent_id, name) + db_dbworkflowstep_pkey: CREATE UNIQUE INDEX db_dbworkflowstep_pkey ON public.db_dbworkflowstep + USING btree (id) + db_dbworkflowstep_calculations: + db_dbworkflowstep_calculations_id_dbnode_id_key: CREATE UNIQUE INDEX db_dbworkflowstep_calculations_id_dbnode_id_key + ON public.db_dbworkflowstep_calculations USING btree (dbworkflowstep_id, dbnode_id) + db_dbworkflowstep_calculations_pkey: CREATE UNIQUE INDEX db_dbworkflowstep_calculations_pkey + ON public.db_dbworkflowstep_calculations USING btree (id) + db_dbworkflowstep_sub_workflows: + db_dbworkflowstep_sub_workflows_id_dbworkflow__key: CREATE UNIQUE INDEX db_dbworkflowstep_sub_workflows_id_dbworkflow__key + ON public.db_dbworkflowstep_sub_workflows USING btree (dbworkflowstep_id, dbworkflow_id) + db_dbworkflowstep_sub_workflows_pkey: CREATE UNIQUE INDEX db_dbworkflowstep_sub_workflows_pkey + ON public.db_dbworkflowstep_sub_workflows USING btree (id) diff --git a/tests/backends/aiida_sqlalchemy/migrations/test_all_schema/test_sqla_15_140c971ae0a3_.yml b/tests/backends/aiida_sqlalchemy/migrations/test_all_schema/test_sqla_15_140c971ae0a3_.yml new file mode 100644 index 0000000000..69d7fe33b7 --- /dev/null +++ b/tests/backends/aiida_sqlalchemy/migrations/test_all_schema/test_sqla_15_140c971ae0a3_.yml @@ -0,0 +1,727 @@ +columns: + db_dbauthinfo: + aiidauser_id: + data_type: integer + default: null + is_nullable: true + auth_params: + data_type: jsonb + default: null + is_nullable: true + dbcomputer_id: + data_type: integer + default: null + is_nullable: true + enabled: + data_type: boolean + default: null + is_nullable: true + id: + data_type: integer + default: nextval('db_dbauthinfo_id_seq'::regclass) + is_nullable: false + metadata: + data_type: jsonb + default: null + is_nullable: true + db_dbcomment: + content: + data_type: text + default: null + is_nullable: true + ctime: + data_type: timestamp with time zone + default: null + is_nullable: true + dbnode_id: + data_type: integer + default: null + is_nullable: true + id: + data_type: integer + default: nextval('db_dbcomment_id_seq'::regclass) + is_nullable: false + mtime: + data_type: timestamp with time zone + default: null + is_nullable: true + user_id: + data_type: integer + default: null + is_nullable: true + uuid: + data_type: uuid + default: null + is_nullable: true + db_dbcomputer: + description: + data_type: text + default: null + is_nullable: true + enabled: + data_type: boolean + default: null + is_nullable: true + hostname: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + id: + data_type: integer + default: nextval('db_dbcomputer_id_seq'::regclass) + is_nullable: false + metadata: + data_type: jsonb + default: null + is_nullable: true + name: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + scheduler_type: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + transport_params: + data_type: jsonb + default: null + is_nullable: true + transport_type: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + uuid: + data_type: uuid + default: null + is_nullable: true + db_dbgroup: + description: + data_type: text + default: null + is_nullable: true + id: + data_type: integer + default: nextval('db_dbgroup_id_seq'::regclass) + is_nullable: false + name: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + time: + data_type: timestamp with time zone + default: null + is_nullable: true + type: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + user_id: + data_type: integer + default: null + is_nullable: true + uuid: + data_type: uuid + default: null + is_nullable: true + db_dbgroup_dbnodes: + dbgroup_id: + data_type: integer + default: null + is_nullable: true + dbnode_id: + data_type: integer + default: null + is_nullable: true + id: + data_type: integer + default: nextval('db_dbgroup_dbnodes_id_seq'::regclass) + is_nullable: false + db_dblink: + id: + data_type: integer + default: nextval('db_dblink_id_seq'::regclass) + is_nullable: false + input_id: + data_type: integer + default: null + is_nullable: true + label: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + output_id: + data_type: integer + default: null + is_nullable: true + type: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + db_dblog: + id: + data_type: integer + default: nextval('db_dblog_id_seq'::regclass) + is_nullable: false + levelname: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + loggername: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + message: + data_type: text + default: null + is_nullable: true + metadata: + data_type: jsonb + default: null + is_nullable: true + objname: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + objpk: + data_type: integer + default: null + is_nullable: true + time: + data_type: timestamp with time zone + default: null + is_nullable: true + db_dbnode: + attributes: + data_type: jsonb + default: null + is_nullable: true + ctime: + data_type: timestamp with time zone + default: null + is_nullable: true + dbcomputer_id: + data_type: integer + default: null + is_nullable: true + description: + data_type: text + default: null + is_nullable: true + extras: + data_type: jsonb + default: null + is_nullable: true + id: + data_type: integer + default: nextval('db_dbnode_id_seq'::regclass) + is_nullable: false + label: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + mtime: + data_type: timestamp with time zone + default: null + is_nullable: true + nodeversion: + data_type: integer + default: null + is_nullable: true + process_type: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + public: + data_type: boolean + default: null + is_nullable: true + type: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + user_id: + data_type: integer + default: null + is_nullable: false + uuid: + data_type: uuid + default: null + is_nullable: true + db_dbsetting: + description: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + id: + data_type: integer + default: nextval('db_dbsetting_id_seq'::regclass) + is_nullable: false + key: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + time: + data_type: timestamp with time zone + default: null + is_nullable: true + val: + data_type: jsonb + default: null + is_nullable: true + db_dbuser: + date_joined: + data_type: timestamp with time zone + default: null + is_nullable: true + email: + data_type: character varying + default: null + is_nullable: true + max_length: 254 + first_name: + data_type: character varying + default: null + is_nullable: true + max_length: 254 + id: + data_type: integer + default: nextval('db_dbuser_id_seq'::regclass) + is_nullable: false + institution: + data_type: character varying + default: null + is_nullable: true + max_length: 254 + is_active: + data_type: boolean + default: null + is_nullable: true + is_staff: + data_type: boolean + default: null + is_nullable: true + is_superuser: + data_type: boolean + default: null + is_nullable: false + last_login: + data_type: timestamp with time zone + default: null + is_nullable: true + last_name: + data_type: character varying + default: null + is_nullable: true + max_length: 254 + password: + data_type: character varying + default: null + is_nullable: true + max_length: 128 + db_dbworkflow: + ctime: + data_type: timestamp with time zone + default: null + is_nullable: true + description: + data_type: text + default: null + is_nullable: true + id: + data_type: integer + default: nextval('db_dbworkflow_id_seq'::regclass) + is_nullable: false + label: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + lastsyncedversion: + data_type: integer + default: null + is_nullable: true + module: + data_type: text + default: null + is_nullable: true + module_class: + data_type: text + default: null + is_nullable: true + mtime: + data_type: timestamp with time zone + default: null + is_nullable: true + nodeversion: + data_type: integer + default: null + is_nullable: true + report: + data_type: text + default: null + is_nullable: true + script_md5: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + script_path: + data_type: text + default: null + is_nullable: true + state: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + user_id: + data_type: integer + default: null + is_nullable: true + uuid: + data_type: uuid + default: null + is_nullable: true + db_dbworkflowdata: + aiida_obj_id: + data_type: integer + default: null + is_nullable: true + data_type: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + id: + data_type: integer + default: nextval('db_dbworkflowdata_id_seq'::regclass) + is_nullable: false + json_value: + data_type: text + default: null + is_nullable: true + name: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + parent_id: + data_type: integer + default: null + is_nullable: true + time: + data_type: timestamp with time zone + default: null + is_nullable: true + value_type: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + db_dbworkflowstep: + id: + data_type: integer + default: nextval('db_dbworkflowstep_id_seq'::regclass) + is_nullable: false + name: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + nextcall: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + parent_id: + data_type: integer + default: null + is_nullable: true + state: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + time: + data_type: timestamp with time zone + default: null + is_nullable: true + user_id: + data_type: integer + default: null + is_nullable: true + db_dbworkflowstep_calculations: + dbnode_id: + data_type: integer + default: null + is_nullable: true + dbworkflowstep_id: + data_type: integer + default: null + is_nullable: true + id: + data_type: integer + default: nextval('db_dbworkflowstep_calculations_id_seq'::regclass) + is_nullable: false + db_dbworkflowstep_sub_workflows: + dbworkflow_id: + data_type: integer + default: null + is_nullable: true + dbworkflowstep_id: + data_type: integer + default: null + is_nullable: true + id: + data_type: integer + default: nextval('db_dbworkflowstep_sub_workflows_id_seq'::regclass) + is_nullable: false +constraints: + primary_key: + db_dbauthinfo: + db_dbauthinfo_pkey: + - id + db_dbcomment: + db_dbcomment_pkey: + - id + db_dbcomputer: + db_dbcomputer_pkey: + - id + db_dbgroup: + db_dbgroup_pkey: + - id + db_dbgroup_dbnodes: + db_dbgroup_dbnodes_pkey: + - id + db_dblink: + db_dblink_pkey: + - id + db_dblog: + db_dblog_pkey: + - id + db_dbnode: + db_dbnode_pkey: + - id + db_dbsetting: + db_dbsetting_pkey: + - id + db_dbuser: + db_dbuser_pkey: + - id + db_dbworkflow: + db_dbworkflow_pkey: + - id + db_dbworkflowdata: + db_dbworkflowdata_pkey: + - id + db_dbworkflowstep: + db_dbworkflowstep_pkey: + - id + db_dbworkflowstep_calculations: + db_dbworkflowstep_calculations_pkey: + - id + db_dbworkflowstep_sub_workflows: + db_dbworkflowstep_sub_workflows_pkey: + - id + unique: + db_dbauthinfo: + db_dbauthinfo_aiidauser_id_dbcomputer_id_key: + - aiidauser_id + - dbcomputer_id + db_dbcomputer: + db_dbcomputer_name_key: + - name + db_dbgroup: + db_dbgroup_name_type_key: + - name + - type + db_dbgroup_dbnodes: + db_dbgroup_dbnodes_dbgroup_id_dbnode_id_key: + - dbgroup_id + - dbnode_id + db_dbnode: + db_dbnode_uuid_key: + - uuid + db_dbsetting: + db_dbsetting_key_key: + - key + db_dbworkflowdata: + db_dbworkflowdata_parent_id_name_data_type_key: + - data_type + - name + - parent_id + db_dbworkflowstep: + db_dbworkflowstep_parent_id_name_key: + - name + - parent_id + db_dbworkflowstep_calculations: + db_dbworkflowstep_calculations_id_dbnode_id_key: + - dbnode_id + - dbworkflowstep_id + db_dbworkflowstep_sub_workflows: + db_dbworkflowstep_sub_workflows_id_dbworkflow__key: + - dbworkflow_id + - dbworkflowstep_id +foreign_keys: + db_dbauthinfo: + db_dbauthinfo_aiidauser_id_fkey: FOREIGN KEY (aiidauser_id) REFERENCES db_dbuser(id) + ON DELETE CASCADE DEFERRABLE INITIALLY DEFERRED + db_dbauthinfo_dbcomputer_id_fkey: FOREIGN KEY (dbcomputer_id) REFERENCES db_dbcomputer(id) + ON DELETE CASCADE DEFERRABLE INITIALLY DEFERRED + db_dbcomment: + db_dbcomment_dbnode_id_fkey: FOREIGN KEY (dbnode_id) REFERENCES db_dbnode(id) + ON DELETE CASCADE DEFERRABLE INITIALLY DEFERRED + db_dbcomment_user_id_fkey: FOREIGN KEY (user_id) REFERENCES db_dbuser(id) ON DELETE + CASCADE DEFERRABLE INITIALLY DEFERRED + db_dbgroup: + db_dbgroup_user_id_fkey: FOREIGN KEY (user_id) REFERENCES db_dbuser(id) ON DELETE + CASCADE DEFERRABLE INITIALLY DEFERRED + db_dbgroup_dbnodes: + db_dbgroup_dbnodes_dbgroup_id_fkey: FOREIGN KEY (dbgroup_id) REFERENCES db_dbgroup(id) + DEFERRABLE INITIALLY DEFERRED + db_dbgroup_dbnodes_dbnode_id_fkey: FOREIGN KEY (dbnode_id) REFERENCES db_dbnode(id) + DEFERRABLE INITIALLY DEFERRED + db_dblink: + db_dblink_input_id_fkey: FOREIGN KEY (input_id) REFERENCES db_dbnode(id) DEFERRABLE + INITIALLY DEFERRED + db_dblink_output_id_fkey: FOREIGN KEY (output_id) REFERENCES db_dbnode(id) ON + DELETE CASCADE DEFERRABLE INITIALLY DEFERRED + db_dbnode: + db_dbnode_dbcomputer_id_fkey: FOREIGN KEY (dbcomputer_id) REFERENCES db_dbcomputer(id) + ON DELETE RESTRICT DEFERRABLE INITIALLY DEFERRED + db_dbnode_user_id_fkey: FOREIGN KEY (user_id) REFERENCES db_dbuser(id) ON DELETE + RESTRICT DEFERRABLE INITIALLY DEFERRED + db_dbworkflow: + db_dbworkflow_user_id_fkey: FOREIGN KEY (user_id) REFERENCES db_dbuser(id) + db_dbworkflowdata: + db_dbworkflowdata_aiida_obj_id_fkey: FOREIGN KEY (aiida_obj_id) REFERENCES db_dbnode(id) + db_dbworkflowdata_parent_id_fkey: FOREIGN KEY (parent_id) REFERENCES db_dbworkflow(id) + db_dbworkflowstep: + db_dbworkflowstep_parent_id_fkey: FOREIGN KEY (parent_id) REFERENCES db_dbworkflow(id) + db_dbworkflowstep_user_id_fkey: FOREIGN KEY (user_id) REFERENCES db_dbuser(id) + db_dbworkflowstep_calculations: + db_dbworkflowstep_calculations_dbnode_id_fkey: FOREIGN KEY (dbnode_id) REFERENCES + db_dbnode(id) + db_dbworkflowstep_calculations_dbworkflowstep_id_fkey: FOREIGN KEY (dbworkflowstep_id) + REFERENCES db_dbworkflowstep(id) + db_dbworkflowstep_sub_workflows: + db_dbworkflowstep_sub_workflows_dbworkflow_id_fkey: FOREIGN KEY (dbworkflow_id) + REFERENCES db_dbworkflow(id) + db_dbworkflowstep_sub_workflows_dbworkflowstep_id_fkey: FOREIGN KEY (dbworkflowstep_id) + REFERENCES db_dbworkflowstep(id) +indexes: + db_dbauthinfo: + db_dbauthinfo_aiidauser_id_dbcomputer_id_key: CREATE UNIQUE INDEX db_dbauthinfo_aiidauser_id_dbcomputer_id_key + ON public.db_dbauthinfo USING btree (aiidauser_id, dbcomputer_id) + db_dbauthinfo_pkey: CREATE UNIQUE INDEX db_dbauthinfo_pkey ON public.db_dbauthinfo + USING btree (id) + db_dbcomment: + db_dbcomment_pkey: CREATE UNIQUE INDEX db_dbcomment_pkey ON public.db_dbcomment + USING btree (id) + db_dbcomputer: + db_dbcomputer_name_key: CREATE UNIQUE INDEX db_dbcomputer_name_key ON public.db_dbcomputer + USING btree (name) + db_dbcomputer_pkey: CREATE UNIQUE INDEX db_dbcomputer_pkey ON public.db_dbcomputer + USING btree (id) + db_dbgroup: + db_dbgroup_name_type_key: CREATE UNIQUE INDEX db_dbgroup_name_type_key ON public.db_dbgroup + USING btree (name, type) + db_dbgroup_pkey: CREATE UNIQUE INDEX db_dbgroup_pkey ON public.db_dbgroup USING + btree (id) + ix_db_dbgroup_name: CREATE INDEX ix_db_dbgroup_name ON public.db_dbgroup USING + btree (name) + ix_db_dbgroup_type: CREATE INDEX ix_db_dbgroup_type ON public.db_dbgroup USING + btree (type) + db_dbgroup_dbnodes: + db_dbgroup_dbnodes_dbgroup_id_dbnode_id_key: CREATE UNIQUE INDEX db_dbgroup_dbnodes_dbgroup_id_dbnode_id_key + ON public.db_dbgroup_dbnodes USING btree (dbgroup_id, dbnode_id) + db_dbgroup_dbnodes_dbgroup_id_idx: CREATE INDEX db_dbgroup_dbnodes_dbgroup_id_idx + ON public.db_dbgroup_dbnodes USING btree (dbgroup_id) + db_dbgroup_dbnodes_dbnode_id_idx: CREATE INDEX db_dbgroup_dbnodes_dbnode_id_idx + ON public.db_dbgroup_dbnodes USING btree (dbnode_id) + db_dbgroup_dbnodes_pkey: CREATE UNIQUE INDEX db_dbgroup_dbnodes_pkey ON public.db_dbgroup_dbnodes + USING btree (id) + db_dblink: + db_dblink_pkey: CREATE UNIQUE INDEX db_dblink_pkey ON public.db_dblink USING btree + (id) + ix_db_dblink_label: CREATE INDEX ix_db_dblink_label ON public.db_dblink USING + btree (label) + db_dblog: + db_dblog_pkey: CREATE UNIQUE INDEX db_dblog_pkey ON public.db_dblog USING btree + (id) + ix_db_dblog_levelname: CREATE INDEX ix_db_dblog_levelname ON public.db_dblog USING + btree (levelname) + ix_db_dblog_loggername: CREATE INDEX ix_db_dblog_loggername ON public.db_dblog + USING btree (loggername) + ix_db_dblog_objname: CREATE INDEX ix_db_dblog_objname ON public.db_dblog USING + btree (objname) + ix_db_dblog_objpk: CREATE INDEX ix_db_dblog_objpk ON public.db_dblog USING btree + (objpk) + db_dbnode: + db_dbnode_pkey: CREATE UNIQUE INDEX db_dbnode_pkey ON public.db_dbnode USING btree + (id) + db_dbnode_uuid_key: CREATE UNIQUE INDEX db_dbnode_uuid_key ON public.db_dbnode + USING btree (uuid) + ix_db_dbnode_label: CREATE INDEX ix_db_dbnode_label ON public.db_dbnode USING + btree (label) + ix_db_dbnode_process_type: CREATE INDEX ix_db_dbnode_process_type ON public.db_dbnode + USING btree (process_type) + ix_db_dbnode_type: CREATE INDEX ix_db_dbnode_type ON public.db_dbnode USING btree + (type) + db_dbsetting: + db_dbsetting_key_key: CREATE UNIQUE INDEX db_dbsetting_key_key ON public.db_dbsetting + USING btree (key) + db_dbsetting_pkey: CREATE UNIQUE INDEX db_dbsetting_pkey ON public.db_dbsetting + USING btree (id) + ix_db_dbsetting_key: CREATE INDEX ix_db_dbsetting_key ON public.db_dbsetting USING + btree (key) + db_dbuser: + db_dbuser_pkey: CREATE UNIQUE INDEX db_dbuser_pkey ON public.db_dbuser USING btree + (id) + ix_db_dbuser_email: CREATE UNIQUE INDEX ix_db_dbuser_email ON public.db_dbuser + USING btree (email) + db_dbworkflow: + db_dbworkflow_pkey: CREATE UNIQUE INDEX db_dbworkflow_pkey ON public.db_dbworkflow + USING btree (id) + ix_db_dbworkflow_label: CREATE INDEX ix_db_dbworkflow_label ON public.db_dbworkflow + USING btree (label) + db_dbworkflowdata: + db_dbworkflowdata_parent_id_name_data_type_key: CREATE UNIQUE INDEX db_dbworkflowdata_parent_id_name_data_type_key + ON public.db_dbworkflowdata USING btree (parent_id, name, data_type) + db_dbworkflowdata_pkey: CREATE UNIQUE INDEX db_dbworkflowdata_pkey ON public.db_dbworkflowdata + USING btree (id) + ix_db_dbworkflowdata_aiida_obj_id: CREATE INDEX ix_db_dbworkflowdata_aiida_obj_id + ON public.db_dbworkflowdata USING btree (aiida_obj_id) + ix_db_dbworkflowdata_parent_id: CREATE INDEX ix_db_dbworkflowdata_parent_id ON + public.db_dbworkflowdata USING btree (parent_id) + db_dbworkflowstep: + db_dbworkflowstep_parent_id_name_key: CREATE UNIQUE INDEX db_dbworkflowstep_parent_id_name_key + ON public.db_dbworkflowstep USING btree (parent_id, name) + db_dbworkflowstep_pkey: CREATE UNIQUE INDEX db_dbworkflowstep_pkey ON public.db_dbworkflowstep + USING btree (id) + db_dbworkflowstep_calculations: + db_dbworkflowstep_calculations_id_dbnode_id_key: CREATE UNIQUE INDEX db_dbworkflowstep_calculations_id_dbnode_id_key + ON public.db_dbworkflowstep_calculations USING btree (dbworkflowstep_id, dbnode_id) + db_dbworkflowstep_calculations_pkey: CREATE UNIQUE INDEX db_dbworkflowstep_calculations_pkey + ON public.db_dbworkflowstep_calculations USING btree (id) + db_dbworkflowstep_sub_workflows: + db_dbworkflowstep_sub_workflows_id_dbworkflow__key: CREATE UNIQUE INDEX db_dbworkflowstep_sub_workflows_id_dbworkflow__key + ON public.db_dbworkflowstep_sub_workflows USING btree (dbworkflowstep_id, dbworkflow_id) + db_dbworkflowstep_sub_workflows_pkey: CREATE UNIQUE INDEX db_dbworkflowstep_sub_workflows_pkey + ON public.db_dbworkflowstep_sub_workflows USING btree (id) diff --git a/tests/backends/aiida_sqlalchemy/migrations/test_all_schema/test_sqla_16_239cea6d2452_.yml b/tests/backends/aiida_sqlalchemy/migrations/test_all_schema/test_sqla_16_239cea6d2452_.yml new file mode 100644 index 0000000000..69d7fe33b7 --- /dev/null +++ b/tests/backends/aiida_sqlalchemy/migrations/test_all_schema/test_sqla_16_239cea6d2452_.yml @@ -0,0 +1,727 @@ +columns: + db_dbauthinfo: + aiidauser_id: + data_type: integer + default: null + is_nullable: true + auth_params: + data_type: jsonb + default: null + is_nullable: true + dbcomputer_id: + data_type: integer + default: null + is_nullable: true + enabled: + data_type: boolean + default: null + is_nullable: true + id: + data_type: integer + default: nextval('db_dbauthinfo_id_seq'::regclass) + is_nullable: false + metadata: + data_type: jsonb + default: null + is_nullable: true + db_dbcomment: + content: + data_type: text + default: null + is_nullable: true + ctime: + data_type: timestamp with time zone + default: null + is_nullable: true + dbnode_id: + data_type: integer + default: null + is_nullable: true + id: + data_type: integer + default: nextval('db_dbcomment_id_seq'::regclass) + is_nullable: false + mtime: + data_type: timestamp with time zone + default: null + is_nullable: true + user_id: + data_type: integer + default: null + is_nullable: true + uuid: + data_type: uuid + default: null + is_nullable: true + db_dbcomputer: + description: + data_type: text + default: null + is_nullable: true + enabled: + data_type: boolean + default: null + is_nullable: true + hostname: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + id: + data_type: integer + default: nextval('db_dbcomputer_id_seq'::regclass) + is_nullable: false + metadata: + data_type: jsonb + default: null + is_nullable: true + name: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + scheduler_type: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + transport_params: + data_type: jsonb + default: null + is_nullable: true + transport_type: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + uuid: + data_type: uuid + default: null + is_nullable: true + db_dbgroup: + description: + data_type: text + default: null + is_nullable: true + id: + data_type: integer + default: nextval('db_dbgroup_id_seq'::regclass) + is_nullable: false + name: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + time: + data_type: timestamp with time zone + default: null + is_nullable: true + type: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + user_id: + data_type: integer + default: null + is_nullable: true + uuid: + data_type: uuid + default: null + is_nullable: true + db_dbgroup_dbnodes: + dbgroup_id: + data_type: integer + default: null + is_nullable: true + dbnode_id: + data_type: integer + default: null + is_nullable: true + id: + data_type: integer + default: nextval('db_dbgroup_dbnodes_id_seq'::regclass) + is_nullable: false + db_dblink: + id: + data_type: integer + default: nextval('db_dblink_id_seq'::regclass) + is_nullable: false + input_id: + data_type: integer + default: null + is_nullable: true + label: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + output_id: + data_type: integer + default: null + is_nullable: true + type: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + db_dblog: + id: + data_type: integer + default: nextval('db_dblog_id_seq'::regclass) + is_nullable: false + levelname: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + loggername: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + message: + data_type: text + default: null + is_nullable: true + metadata: + data_type: jsonb + default: null + is_nullable: true + objname: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + objpk: + data_type: integer + default: null + is_nullable: true + time: + data_type: timestamp with time zone + default: null + is_nullable: true + db_dbnode: + attributes: + data_type: jsonb + default: null + is_nullable: true + ctime: + data_type: timestamp with time zone + default: null + is_nullable: true + dbcomputer_id: + data_type: integer + default: null + is_nullable: true + description: + data_type: text + default: null + is_nullable: true + extras: + data_type: jsonb + default: null + is_nullable: true + id: + data_type: integer + default: nextval('db_dbnode_id_seq'::regclass) + is_nullable: false + label: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + mtime: + data_type: timestamp with time zone + default: null + is_nullable: true + nodeversion: + data_type: integer + default: null + is_nullable: true + process_type: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + public: + data_type: boolean + default: null + is_nullable: true + type: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + user_id: + data_type: integer + default: null + is_nullable: false + uuid: + data_type: uuid + default: null + is_nullable: true + db_dbsetting: + description: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + id: + data_type: integer + default: nextval('db_dbsetting_id_seq'::regclass) + is_nullable: false + key: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + time: + data_type: timestamp with time zone + default: null + is_nullable: true + val: + data_type: jsonb + default: null + is_nullable: true + db_dbuser: + date_joined: + data_type: timestamp with time zone + default: null + is_nullable: true + email: + data_type: character varying + default: null + is_nullable: true + max_length: 254 + first_name: + data_type: character varying + default: null + is_nullable: true + max_length: 254 + id: + data_type: integer + default: nextval('db_dbuser_id_seq'::regclass) + is_nullable: false + institution: + data_type: character varying + default: null + is_nullable: true + max_length: 254 + is_active: + data_type: boolean + default: null + is_nullable: true + is_staff: + data_type: boolean + default: null + is_nullable: true + is_superuser: + data_type: boolean + default: null + is_nullable: false + last_login: + data_type: timestamp with time zone + default: null + is_nullable: true + last_name: + data_type: character varying + default: null + is_nullable: true + max_length: 254 + password: + data_type: character varying + default: null + is_nullable: true + max_length: 128 + db_dbworkflow: + ctime: + data_type: timestamp with time zone + default: null + is_nullable: true + description: + data_type: text + default: null + is_nullable: true + id: + data_type: integer + default: nextval('db_dbworkflow_id_seq'::regclass) + is_nullable: false + label: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + lastsyncedversion: + data_type: integer + default: null + is_nullable: true + module: + data_type: text + default: null + is_nullable: true + module_class: + data_type: text + default: null + is_nullable: true + mtime: + data_type: timestamp with time zone + default: null + is_nullable: true + nodeversion: + data_type: integer + default: null + is_nullable: true + report: + data_type: text + default: null + is_nullable: true + script_md5: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + script_path: + data_type: text + default: null + is_nullable: true + state: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + user_id: + data_type: integer + default: null + is_nullable: true + uuid: + data_type: uuid + default: null + is_nullable: true + db_dbworkflowdata: + aiida_obj_id: + data_type: integer + default: null + is_nullable: true + data_type: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + id: + data_type: integer + default: nextval('db_dbworkflowdata_id_seq'::regclass) + is_nullable: false + json_value: + data_type: text + default: null + is_nullable: true + name: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + parent_id: + data_type: integer + default: null + is_nullable: true + time: + data_type: timestamp with time zone + default: null + is_nullable: true + value_type: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + db_dbworkflowstep: + id: + data_type: integer + default: nextval('db_dbworkflowstep_id_seq'::regclass) + is_nullable: false + name: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + nextcall: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + parent_id: + data_type: integer + default: null + is_nullable: true + state: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + time: + data_type: timestamp with time zone + default: null + is_nullable: true + user_id: + data_type: integer + default: null + is_nullable: true + db_dbworkflowstep_calculations: + dbnode_id: + data_type: integer + default: null + is_nullable: true + dbworkflowstep_id: + data_type: integer + default: null + is_nullable: true + id: + data_type: integer + default: nextval('db_dbworkflowstep_calculations_id_seq'::regclass) + is_nullable: false + db_dbworkflowstep_sub_workflows: + dbworkflow_id: + data_type: integer + default: null + is_nullable: true + dbworkflowstep_id: + data_type: integer + default: null + is_nullable: true + id: + data_type: integer + default: nextval('db_dbworkflowstep_sub_workflows_id_seq'::regclass) + is_nullable: false +constraints: + primary_key: + db_dbauthinfo: + db_dbauthinfo_pkey: + - id + db_dbcomment: + db_dbcomment_pkey: + - id + db_dbcomputer: + db_dbcomputer_pkey: + - id + db_dbgroup: + db_dbgroup_pkey: + - id + db_dbgroup_dbnodes: + db_dbgroup_dbnodes_pkey: + - id + db_dblink: + db_dblink_pkey: + - id + db_dblog: + db_dblog_pkey: + - id + db_dbnode: + db_dbnode_pkey: + - id + db_dbsetting: + db_dbsetting_pkey: + - id + db_dbuser: + db_dbuser_pkey: + - id + db_dbworkflow: + db_dbworkflow_pkey: + - id + db_dbworkflowdata: + db_dbworkflowdata_pkey: + - id + db_dbworkflowstep: + db_dbworkflowstep_pkey: + - id + db_dbworkflowstep_calculations: + db_dbworkflowstep_calculations_pkey: + - id + db_dbworkflowstep_sub_workflows: + db_dbworkflowstep_sub_workflows_pkey: + - id + unique: + db_dbauthinfo: + db_dbauthinfo_aiidauser_id_dbcomputer_id_key: + - aiidauser_id + - dbcomputer_id + db_dbcomputer: + db_dbcomputer_name_key: + - name + db_dbgroup: + db_dbgroup_name_type_key: + - name + - type + db_dbgroup_dbnodes: + db_dbgroup_dbnodes_dbgroup_id_dbnode_id_key: + - dbgroup_id + - dbnode_id + db_dbnode: + db_dbnode_uuid_key: + - uuid + db_dbsetting: + db_dbsetting_key_key: + - key + db_dbworkflowdata: + db_dbworkflowdata_parent_id_name_data_type_key: + - data_type + - name + - parent_id + db_dbworkflowstep: + db_dbworkflowstep_parent_id_name_key: + - name + - parent_id + db_dbworkflowstep_calculations: + db_dbworkflowstep_calculations_id_dbnode_id_key: + - dbnode_id + - dbworkflowstep_id + db_dbworkflowstep_sub_workflows: + db_dbworkflowstep_sub_workflows_id_dbworkflow__key: + - dbworkflow_id + - dbworkflowstep_id +foreign_keys: + db_dbauthinfo: + db_dbauthinfo_aiidauser_id_fkey: FOREIGN KEY (aiidauser_id) REFERENCES db_dbuser(id) + ON DELETE CASCADE DEFERRABLE INITIALLY DEFERRED + db_dbauthinfo_dbcomputer_id_fkey: FOREIGN KEY (dbcomputer_id) REFERENCES db_dbcomputer(id) + ON DELETE CASCADE DEFERRABLE INITIALLY DEFERRED + db_dbcomment: + db_dbcomment_dbnode_id_fkey: FOREIGN KEY (dbnode_id) REFERENCES db_dbnode(id) + ON DELETE CASCADE DEFERRABLE INITIALLY DEFERRED + db_dbcomment_user_id_fkey: FOREIGN KEY (user_id) REFERENCES db_dbuser(id) ON DELETE + CASCADE DEFERRABLE INITIALLY DEFERRED + db_dbgroup: + db_dbgroup_user_id_fkey: FOREIGN KEY (user_id) REFERENCES db_dbuser(id) ON DELETE + CASCADE DEFERRABLE INITIALLY DEFERRED + db_dbgroup_dbnodes: + db_dbgroup_dbnodes_dbgroup_id_fkey: FOREIGN KEY (dbgroup_id) REFERENCES db_dbgroup(id) + DEFERRABLE INITIALLY DEFERRED + db_dbgroup_dbnodes_dbnode_id_fkey: FOREIGN KEY (dbnode_id) REFERENCES db_dbnode(id) + DEFERRABLE INITIALLY DEFERRED + db_dblink: + db_dblink_input_id_fkey: FOREIGN KEY (input_id) REFERENCES db_dbnode(id) DEFERRABLE + INITIALLY DEFERRED + db_dblink_output_id_fkey: FOREIGN KEY (output_id) REFERENCES db_dbnode(id) ON + DELETE CASCADE DEFERRABLE INITIALLY DEFERRED + db_dbnode: + db_dbnode_dbcomputer_id_fkey: FOREIGN KEY (dbcomputer_id) REFERENCES db_dbcomputer(id) + ON DELETE RESTRICT DEFERRABLE INITIALLY DEFERRED + db_dbnode_user_id_fkey: FOREIGN KEY (user_id) REFERENCES db_dbuser(id) ON DELETE + RESTRICT DEFERRABLE INITIALLY DEFERRED + db_dbworkflow: + db_dbworkflow_user_id_fkey: FOREIGN KEY (user_id) REFERENCES db_dbuser(id) + db_dbworkflowdata: + db_dbworkflowdata_aiida_obj_id_fkey: FOREIGN KEY (aiida_obj_id) REFERENCES db_dbnode(id) + db_dbworkflowdata_parent_id_fkey: FOREIGN KEY (parent_id) REFERENCES db_dbworkflow(id) + db_dbworkflowstep: + db_dbworkflowstep_parent_id_fkey: FOREIGN KEY (parent_id) REFERENCES db_dbworkflow(id) + db_dbworkflowstep_user_id_fkey: FOREIGN KEY (user_id) REFERENCES db_dbuser(id) + db_dbworkflowstep_calculations: + db_dbworkflowstep_calculations_dbnode_id_fkey: FOREIGN KEY (dbnode_id) REFERENCES + db_dbnode(id) + db_dbworkflowstep_calculations_dbworkflowstep_id_fkey: FOREIGN KEY (dbworkflowstep_id) + REFERENCES db_dbworkflowstep(id) + db_dbworkflowstep_sub_workflows: + db_dbworkflowstep_sub_workflows_dbworkflow_id_fkey: FOREIGN KEY (dbworkflow_id) + REFERENCES db_dbworkflow(id) + db_dbworkflowstep_sub_workflows_dbworkflowstep_id_fkey: FOREIGN KEY (dbworkflowstep_id) + REFERENCES db_dbworkflowstep(id) +indexes: + db_dbauthinfo: + db_dbauthinfo_aiidauser_id_dbcomputer_id_key: CREATE UNIQUE INDEX db_dbauthinfo_aiidauser_id_dbcomputer_id_key + ON public.db_dbauthinfo USING btree (aiidauser_id, dbcomputer_id) + db_dbauthinfo_pkey: CREATE UNIQUE INDEX db_dbauthinfo_pkey ON public.db_dbauthinfo + USING btree (id) + db_dbcomment: + db_dbcomment_pkey: CREATE UNIQUE INDEX db_dbcomment_pkey ON public.db_dbcomment + USING btree (id) + db_dbcomputer: + db_dbcomputer_name_key: CREATE UNIQUE INDEX db_dbcomputer_name_key ON public.db_dbcomputer + USING btree (name) + db_dbcomputer_pkey: CREATE UNIQUE INDEX db_dbcomputer_pkey ON public.db_dbcomputer + USING btree (id) + db_dbgroup: + db_dbgroup_name_type_key: CREATE UNIQUE INDEX db_dbgroup_name_type_key ON public.db_dbgroup + USING btree (name, type) + db_dbgroup_pkey: CREATE UNIQUE INDEX db_dbgroup_pkey ON public.db_dbgroup USING + btree (id) + ix_db_dbgroup_name: CREATE INDEX ix_db_dbgroup_name ON public.db_dbgroup USING + btree (name) + ix_db_dbgroup_type: CREATE INDEX ix_db_dbgroup_type ON public.db_dbgroup USING + btree (type) + db_dbgroup_dbnodes: + db_dbgroup_dbnodes_dbgroup_id_dbnode_id_key: CREATE UNIQUE INDEX db_dbgroup_dbnodes_dbgroup_id_dbnode_id_key + ON public.db_dbgroup_dbnodes USING btree (dbgroup_id, dbnode_id) + db_dbgroup_dbnodes_dbgroup_id_idx: CREATE INDEX db_dbgroup_dbnodes_dbgroup_id_idx + ON public.db_dbgroup_dbnodes USING btree (dbgroup_id) + db_dbgroup_dbnodes_dbnode_id_idx: CREATE INDEX db_dbgroup_dbnodes_dbnode_id_idx + ON public.db_dbgroup_dbnodes USING btree (dbnode_id) + db_dbgroup_dbnodes_pkey: CREATE UNIQUE INDEX db_dbgroup_dbnodes_pkey ON public.db_dbgroup_dbnodes + USING btree (id) + db_dblink: + db_dblink_pkey: CREATE UNIQUE INDEX db_dblink_pkey ON public.db_dblink USING btree + (id) + ix_db_dblink_label: CREATE INDEX ix_db_dblink_label ON public.db_dblink USING + btree (label) + db_dblog: + db_dblog_pkey: CREATE UNIQUE INDEX db_dblog_pkey ON public.db_dblog USING btree + (id) + ix_db_dblog_levelname: CREATE INDEX ix_db_dblog_levelname ON public.db_dblog USING + btree (levelname) + ix_db_dblog_loggername: CREATE INDEX ix_db_dblog_loggername ON public.db_dblog + USING btree (loggername) + ix_db_dblog_objname: CREATE INDEX ix_db_dblog_objname ON public.db_dblog USING + btree (objname) + ix_db_dblog_objpk: CREATE INDEX ix_db_dblog_objpk ON public.db_dblog USING btree + (objpk) + db_dbnode: + db_dbnode_pkey: CREATE UNIQUE INDEX db_dbnode_pkey ON public.db_dbnode USING btree + (id) + db_dbnode_uuid_key: CREATE UNIQUE INDEX db_dbnode_uuid_key ON public.db_dbnode + USING btree (uuid) + ix_db_dbnode_label: CREATE INDEX ix_db_dbnode_label ON public.db_dbnode USING + btree (label) + ix_db_dbnode_process_type: CREATE INDEX ix_db_dbnode_process_type ON public.db_dbnode + USING btree (process_type) + ix_db_dbnode_type: CREATE INDEX ix_db_dbnode_type ON public.db_dbnode USING btree + (type) + db_dbsetting: + db_dbsetting_key_key: CREATE UNIQUE INDEX db_dbsetting_key_key ON public.db_dbsetting + USING btree (key) + db_dbsetting_pkey: CREATE UNIQUE INDEX db_dbsetting_pkey ON public.db_dbsetting + USING btree (id) + ix_db_dbsetting_key: CREATE INDEX ix_db_dbsetting_key ON public.db_dbsetting USING + btree (key) + db_dbuser: + db_dbuser_pkey: CREATE UNIQUE INDEX db_dbuser_pkey ON public.db_dbuser USING btree + (id) + ix_db_dbuser_email: CREATE UNIQUE INDEX ix_db_dbuser_email ON public.db_dbuser + USING btree (email) + db_dbworkflow: + db_dbworkflow_pkey: CREATE UNIQUE INDEX db_dbworkflow_pkey ON public.db_dbworkflow + USING btree (id) + ix_db_dbworkflow_label: CREATE INDEX ix_db_dbworkflow_label ON public.db_dbworkflow + USING btree (label) + db_dbworkflowdata: + db_dbworkflowdata_parent_id_name_data_type_key: CREATE UNIQUE INDEX db_dbworkflowdata_parent_id_name_data_type_key + ON public.db_dbworkflowdata USING btree (parent_id, name, data_type) + db_dbworkflowdata_pkey: CREATE UNIQUE INDEX db_dbworkflowdata_pkey ON public.db_dbworkflowdata + USING btree (id) + ix_db_dbworkflowdata_aiida_obj_id: CREATE INDEX ix_db_dbworkflowdata_aiida_obj_id + ON public.db_dbworkflowdata USING btree (aiida_obj_id) + ix_db_dbworkflowdata_parent_id: CREATE INDEX ix_db_dbworkflowdata_parent_id ON + public.db_dbworkflowdata USING btree (parent_id) + db_dbworkflowstep: + db_dbworkflowstep_parent_id_name_key: CREATE UNIQUE INDEX db_dbworkflowstep_parent_id_name_key + ON public.db_dbworkflowstep USING btree (parent_id, name) + db_dbworkflowstep_pkey: CREATE UNIQUE INDEX db_dbworkflowstep_pkey ON public.db_dbworkflowstep + USING btree (id) + db_dbworkflowstep_calculations: + db_dbworkflowstep_calculations_id_dbnode_id_key: CREATE UNIQUE INDEX db_dbworkflowstep_calculations_id_dbnode_id_key + ON public.db_dbworkflowstep_calculations USING btree (dbworkflowstep_id, dbnode_id) + db_dbworkflowstep_calculations_pkey: CREATE UNIQUE INDEX db_dbworkflowstep_calculations_pkey + ON public.db_dbworkflowstep_calculations USING btree (id) + db_dbworkflowstep_sub_workflows: + db_dbworkflowstep_sub_workflows_id_dbworkflow__key: CREATE UNIQUE INDEX db_dbworkflowstep_sub_workflows_id_dbworkflow__key + ON public.db_dbworkflowstep_sub_workflows USING btree (dbworkflowstep_id, dbworkflow_id) + db_dbworkflowstep_sub_workflows_pkey: CREATE UNIQUE INDEX db_dbworkflowstep_sub_workflows_pkey + ON public.db_dbworkflowstep_sub_workflows USING btree (id) diff --git a/tests/backends/aiida_sqlalchemy/migrations/test_all_schema/test_sqla_17_b8b23ddefad4_.yml b/tests/backends/aiida_sqlalchemy/migrations/test_all_schema/test_sqla_17_b8b23ddefad4_.yml new file mode 100644 index 0000000000..4b8d38d8b7 --- /dev/null +++ b/tests/backends/aiida_sqlalchemy/migrations/test_all_schema/test_sqla_17_b8b23ddefad4_.yml @@ -0,0 +1,727 @@ +columns: + db_dbauthinfo: + aiidauser_id: + data_type: integer + default: null + is_nullable: true + auth_params: + data_type: jsonb + default: null + is_nullable: true + dbcomputer_id: + data_type: integer + default: null + is_nullable: true + enabled: + data_type: boolean + default: null + is_nullable: true + id: + data_type: integer + default: nextval('db_dbauthinfo_id_seq'::regclass) + is_nullable: false + metadata: + data_type: jsonb + default: null + is_nullable: true + db_dbcomment: + content: + data_type: text + default: null + is_nullable: true + ctime: + data_type: timestamp with time zone + default: null + is_nullable: true + dbnode_id: + data_type: integer + default: null + is_nullable: true + id: + data_type: integer + default: nextval('db_dbcomment_id_seq'::regclass) + is_nullable: false + mtime: + data_type: timestamp with time zone + default: null + is_nullable: true + user_id: + data_type: integer + default: null + is_nullable: true + uuid: + data_type: uuid + default: null + is_nullable: true + db_dbcomputer: + description: + data_type: text + default: null + is_nullable: true + enabled: + data_type: boolean + default: null + is_nullable: true + hostname: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + id: + data_type: integer + default: nextval('db_dbcomputer_id_seq'::regclass) + is_nullable: false + metadata: + data_type: jsonb + default: null + is_nullable: true + name: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + scheduler_type: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + transport_params: + data_type: jsonb + default: null + is_nullable: true + transport_type: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + uuid: + data_type: uuid + default: null + is_nullable: true + db_dbgroup: + description: + data_type: text + default: null + is_nullable: true + id: + data_type: integer + default: nextval('db_dbgroup_id_seq'::regclass) + is_nullable: false + label: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + time: + data_type: timestamp with time zone + default: null + is_nullable: true + type_string: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + user_id: + data_type: integer + default: null + is_nullable: true + uuid: + data_type: uuid + default: null + is_nullable: true + db_dbgroup_dbnodes: + dbgroup_id: + data_type: integer + default: null + is_nullable: true + dbnode_id: + data_type: integer + default: null + is_nullable: true + id: + data_type: integer + default: nextval('db_dbgroup_dbnodes_id_seq'::regclass) + is_nullable: false + db_dblink: + id: + data_type: integer + default: nextval('db_dblink_id_seq'::regclass) + is_nullable: false + input_id: + data_type: integer + default: null + is_nullable: true + label: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + output_id: + data_type: integer + default: null + is_nullable: true + type: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + db_dblog: + id: + data_type: integer + default: nextval('db_dblog_id_seq'::regclass) + is_nullable: false + levelname: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + loggername: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + message: + data_type: text + default: null + is_nullable: true + metadata: + data_type: jsonb + default: null + is_nullable: true + objname: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + objpk: + data_type: integer + default: null + is_nullable: true + time: + data_type: timestamp with time zone + default: null + is_nullable: true + db_dbnode: + attributes: + data_type: jsonb + default: null + is_nullable: true + ctime: + data_type: timestamp with time zone + default: null + is_nullable: true + dbcomputer_id: + data_type: integer + default: null + is_nullable: true + description: + data_type: text + default: null + is_nullable: true + extras: + data_type: jsonb + default: null + is_nullable: true + id: + data_type: integer + default: nextval('db_dbnode_id_seq'::regclass) + is_nullable: false + label: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + mtime: + data_type: timestamp with time zone + default: null + is_nullable: true + nodeversion: + data_type: integer + default: null + is_nullable: true + process_type: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + public: + data_type: boolean + default: null + is_nullable: true + type: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + user_id: + data_type: integer + default: null + is_nullable: false + uuid: + data_type: uuid + default: null + is_nullable: true + db_dbsetting: + description: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + id: + data_type: integer + default: nextval('db_dbsetting_id_seq'::regclass) + is_nullable: false + key: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + time: + data_type: timestamp with time zone + default: null + is_nullable: true + val: + data_type: jsonb + default: null + is_nullable: true + db_dbuser: + date_joined: + data_type: timestamp with time zone + default: null + is_nullable: true + email: + data_type: character varying + default: null + is_nullable: true + max_length: 254 + first_name: + data_type: character varying + default: null + is_nullable: true + max_length: 254 + id: + data_type: integer + default: nextval('db_dbuser_id_seq'::regclass) + is_nullable: false + institution: + data_type: character varying + default: null + is_nullable: true + max_length: 254 + is_active: + data_type: boolean + default: null + is_nullable: true + is_staff: + data_type: boolean + default: null + is_nullable: true + is_superuser: + data_type: boolean + default: null + is_nullable: false + last_login: + data_type: timestamp with time zone + default: null + is_nullable: true + last_name: + data_type: character varying + default: null + is_nullable: true + max_length: 254 + password: + data_type: character varying + default: null + is_nullable: true + max_length: 128 + db_dbworkflow: + ctime: + data_type: timestamp with time zone + default: null + is_nullable: true + description: + data_type: text + default: null + is_nullable: true + id: + data_type: integer + default: nextval('db_dbworkflow_id_seq'::regclass) + is_nullable: false + label: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + lastsyncedversion: + data_type: integer + default: null + is_nullable: true + module: + data_type: text + default: null + is_nullable: true + module_class: + data_type: text + default: null + is_nullable: true + mtime: + data_type: timestamp with time zone + default: null + is_nullable: true + nodeversion: + data_type: integer + default: null + is_nullable: true + report: + data_type: text + default: null + is_nullable: true + script_md5: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + script_path: + data_type: text + default: null + is_nullable: true + state: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + user_id: + data_type: integer + default: null + is_nullable: true + uuid: + data_type: uuid + default: null + is_nullable: true + db_dbworkflowdata: + aiida_obj_id: + data_type: integer + default: null + is_nullable: true + data_type: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + id: + data_type: integer + default: nextval('db_dbworkflowdata_id_seq'::regclass) + is_nullable: false + json_value: + data_type: text + default: null + is_nullable: true + name: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + parent_id: + data_type: integer + default: null + is_nullable: true + time: + data_type: timestamp with time zone + default: null + is_nullable: true + value_type: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + db_dbworkflowstep: + id: + data_type: integer + default: nextval('db_dbworkflowstep_id_seq'::regclass) + is_nullable: false + name: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + nextcall: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + parent_id: + data_type: integer + default: null + is_nullable: true + state: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + time: + data_type: timestamp with time zone + default: null + is_nullable: true + user_id: + data_type: integer + default: null + is_nullable: true + db_dbworkflowstep_calculations: + dbnode_id: + data_type: integer + default: null + is_nullable: true + dbworkflowstep_id: + data_type: integer + default: null + is_nullable: true + id: + data_type: integer + default: nextval('db_dbworkflowstep_calculations_id_seq'::regclass) + is_nullable: false + db_dbworkflowstep_sub_workflows: + dbworkflow_id: + data_type: integer + default: null + is_nullable: true + dbworkflowstep_id: + data_type: integer + default: null + is_nullable: true + id: + data_type: integer + default: nextval('db_dbworkflowstep_sub_workflows_id_seq'::regclass) + is_nullable: false +constraints: + primary_key: + db_dbauthinfo: + db_dbauthinfo_pkey: + - id + db_dbcomment: + db_dbcomment_pkey: + - id + db_dbcomputer: + db_dbcomputer_pkey: + - id + db_dbgroup: + db_dbgroup_pkey: + - id + db_dbgroup_dbnodes: + db_dbgroup_dbnodes_pkey: + - id + db_dblink: + db_dblink_pkey: + - id + db_dblog: + db_dblog_pkey: + - id + db_dbnode: + db_dbnode_pkey: + - id + db_dbsetting: + db_dbsetting_pkey: + - id + db_dbuser: + db_dbuser_pkey: + - id + db_dbworkflow: + db_dbworkflow_pkey: + - id + db_dbworkflowdata: + db_dbworkflowdata_pkey: + - id + db_dbworkflowstep: + db_dbworkflowstep_pkey: + - id + db_dbworkflowstep_calculations: + db_dbworkflowstep_calculations_pkey: + - id + db_dbworkflowstep_sub_workflows: + db_dbworkflowstep_sub_workflows_pkey: + - id + unique: + db_dbauthinfo: + db_dbauthinfo_aiidauser_id_dbcomputer_id_key: + - aiidauser_id + - dbcomputer_id + db_dbcomputer: + db_dbcomputer_name_key: + - name + db_dbgroup: + db_dbgroup_label_type_string_key: + - label + - type_string + db_dbgroup_dbnodes: + db_dbgroup_dbnodes_dbgroup_id_dbnode_id_key: + - dbgroup_id + - dbnode_id + db_dbnode: + db_dbnode_uuid_key: + - uuid + db_dbsetting: + db_dbsetting_key_key: + - key + db_dbworkflowdata: + db_dbworkflowdata_parent_id_name_data_type_key: + - data_type + - name + - parent_id + db_dbworkflowstep: + db_dbworkflowstep_parent_id_name_key: + - name + - parent_id + db_dbworkflowstep_calculations: + db_dbworkflowstep_calculations_id_dbnode_id_key: + - dbnode_id + - dbworkflowstep_id + db_dbworkflowstep_sub_workflows: + db_dbworkflowstep_sub_workflows_id_dbworkflow__key: + - dbworkflow_id + - dbworkflowstep_id +foreign_keys: + db_dbauthinfo: + db_dbauthinfo_aiidauser_id_fkey: FOREIGN KEY (aiidauser_id) REFERENCES db_dbuser(id) + ON DELETE CASCADE DEFERRABLE INITIALLY DEFERRED + db_dbauthinfo_dbcomputer_id_fkey: FOREIGN KEY (dbcomputer_id) REFERENCES db_dbcomputer(id) + ON DELETE CASCADE DEFERRABLE INITIALLY DEFERRED + db_dbcomment: + db_dbcomment_dbnode_id_fkey: FOREIGN KEY (dbnode_id) REFERENCES db_dbnode(id) + ON DELETE CASCADE DEFERRABLE INITIALLY DEFERRED + db_dbcomment_user_id_fkey: FOREIGN KEY (user_id) REFERENCES db_dbuser(id) ON DELETE + CASCADE DEFERRABLE INITIALLY DEFERRED + db_dbgroup: + db_dbgroup_user_id_fkey: FOREIGN KEY (user_id) REFERENCES db_dbuser(id) ON DELETE + CASCADE DEFERRABLE INITIALLY DEFERRED + db_dbgroup_dbnodes: + db_dbgroup_dbnodes_dbgroup_id_fkey: FOREIGN KEY (dbgroup_id) REFERENCES db_dbgroup(id) + DEFERRABLE INITIALLY DEFERRED + db_dbgroup_dbnodes_dbnode_id_fkey: FOREIGN KEY (dbnode_id) REFERENCES db_dbnode(id) + DEFERRABLE INITIALLY DEFERRED + db_dblink: + db_dblink_input_id_fkey: FOREIGN KEY (input_id) REFERENCES db_dbnode(id) DEFERRABLE + INITIALLY DEFERRED + db_dblink_output_id_fkey: FOREIGN KEY (output_id) REFERENCES db_dbnode(id) ON + DELETE CASCADE DEFERRABLE INITIALLY DEFERRED + db_dbnode: + db_dbnode_dbcomputer_id_fkey: FOREIGN KEY (dbcomputer_id) REFERENCES db_dbcomputer(id) + ON DELETE RESTRICT DEFERRABLE INITIALLY DEFERRED + db_dbnode_user_id_fkey: FOREIGN KEY (user_id) REFERENCES db_dbuser(id) ON DELETE + RESTRICT DEFERRABLE INITIALLY DEFERRED + db_dbworkflow: + db_dbworkflow_user_id_fkey: FOREIGN KEY (user_id) REFERENCES db_dbuser(id) + db_dbworkflowdata: + db_dbworkflowdata_aiida_obj_id_fkey: FOREIGN KEY (aiida_obj_id) REFERENCES db_dbnode(id) + db_dbworkflowdata_parent_id_fkey: FOREIGN KEY (parent_id) REFERENCES db_dbworkflow(id) + db_dbworkflowstep: + db_dbworkflowstep_parent_id_fkey: FOREIGN KEY (parent_id) REFERENCES db_dbworkflow(id) + db_dbworkflowstep_user_id_fkey: FOREIGN KEY (user_id) REFERENCES db_dbuser(id) + db_dbworkflowstep_calculations: + db_dbworkflowstep_calculations_dbnode_id_fkey: FOREIGN KEY (dbnode_id) REFERENCES + db_dbnode(id) + db_dbworkflowstep_calculations_dbworkflowstep_id_fkey: FOREIGN KEY (dbworkflowstep_id) + REFERENCES db_dbworkflowstep(id) + db_dbworkflowstep_sub_workflows: + db_dbworkflowstep_sub_workflows_dbworkflow_id_fkey: FOREIGN KEY (dbworkflow_id) + REFERENCES db_dbworkflow(id) + db_dbworkflowstep_sub_workflows_dbworkflowstep_id_fkey: FOREIGN KEY (dbworkflowstep_id) + REFERENCES db_dbworkflowstep(id) +indexes: + db_dbauthinfo: + db_dbauthinfo_aiidauser_id_dbcomputer_id_key: CREATE UNIQUE INDEX db_dbauthinfo_aiidauser_id_dbcomputer_id_key + ON public.db_dbauthinfo USING btree (aiidauser_id, dbcomputer_id) + db_dbauthinfo_pkey: CREATE UNIQUE INDEX db_dbauthinfo_pkey ON public.db_dbauthinfo + USING btree (id) + db_dbcomment: + db_dbcomment_pkey: CREATE UNIQUE INDEX db_dbcomment_pkey ON public.db_dbcomment + USING btree (id) + db_dbcomputer: + db_dbcomputer_name_key: CREATE UNIQUE INDEX db_dbcomputer_name_key ON public.db_dbcomputer + USING btree (name) + db_dbcomputer_pkey: CREATE UNIQUE INDEX db_dbcomputer_pkey ON public.db_dbcomputer + USING btree (id) + db_dbgroup: + db_dbgroup_label_type_string_key: CREATE UNIQUE INDEX db_dbgroup_label_type_string_key + ON public.db_dbgroup USING btree (label, type_string) + db_dbgroup_pkey: CREATE UNIQUE INDEX db_dbgroup_pkey ON public.db_dbgroup USING + btree (id) + ix_db_dbgroup_label: CREATE INDEX ix_db_dbgroup_label ON public.db_dbgroup USING + btree (label) + ix_db_dbgroup_type_string: CREATE INDEX ix_db_dbgroup_type_string ON public.db_dbgroup + USING btree (type_string) + db_dbgroup_dbnodes: + db_dbgroup_dbnodes_dbgroup_id_dbnode_id_key: CREATE UNIQUE INDEX db_dbgroup_dbnodes_dbgroup_id_dbnode_id_key + ON public.db_dbgroup_dbnodes USING btree (dbgroup_id, dbnode_id) + db_dbgroup_dbnodes_dbgroup_id_idx: CREATE INDEX db_dbgroup_dbnodes_dbgroup_id_idx + ON public.db_dbgroup_dbnodes USING btree (dbgroup_id) + db_dbgroup_dbnodes_dbnode_id_idx: CREATE INDEX db_dbgroup_dbnodes_dbnode_id_idx + ON public.db_dbgroup_dbnodes USING btree (dbnode_id) + db_dbgroup_dbnodes_pkey: CREATE UNIQUE INDEX db_dbgroup_dbnodes_pkey ON public.db_dbgroup_dbnodes + USING btree (id) + db_dblink: + db_dblink_pkey: CREATE UNIQUE INDEX db_dblink_pkey ON public.db_dblink USING btree + (id) + ix_db_dblink_label: CREATE INDEX ix_db_dblink_label ON public.db_dblink USING + btree (label) + db_dblog: + db_dblog_pkey: CREATE UNIQUE INDEX db_dblog_pkey ON public.db_dblog USING btree + (id) + ix_db_dblog_levelname: CREATE INDEX ix_db_dblog_levelname ON public.db_dblog USING + btree (levelname) + ix_db_dblog_loggername: CREATE INDEX ix_db_dblog_loggername ON public.db_dblog + USING btree (loggername) + ix_db_dblog_objname: CREATE INDEX ix_db_dblog_objname ON public.db_dblog USING + btree (objname) + ix_db_dblog_objpk: CREATE INDEX ix_db_dblog_objpk ON public.db_dblog USING btree + (objpk) + db_dbnode: + db_dbnode_pkey: CREATE UNIQUE INDEX db_dbnode_pkey ON public.db_dbnode USING btree + (id) + db_dbnode_uuid_key: CREATE UNIQUE INDEX db_dbnode_uuid_key ON public.db_dbnode + USING btree (uuid) + ix_db_dbnode_label: CREATE INDEX ix_db_dbnode_label ON public.db_dbnode USING + btree (label) + ix_db_dbnode_process_type: CREATE INDEX ix_db_dbnode_process_type ON public.db_dbnode + USING btree (process_type) + ix_db_dbnode_type: CREATE INDEX ix_db_dbnode_type ON public.db_dbnode USING btree + (type) + db_dbsetting: + db_dbsetting_key_key: CREATE UNIQUE INDEX db_dbsetting_key_key ON public.db_dbsetting + USING btree (key) + db_dbsetting_pkey: CREATE UNIQUE INDEX db_dbsetting_pkey ON public.db_dbsetting + USING btree (id) + ix_db_dbsetting_key: CREATE INDEX ix_db_dbsetting_key ON public.db_dbsetting USING + btree (key) + db_dbuser: + db_dbuser_pkey: CREATE UNIQUE INDEX db_dbuser_pkey ON public.db_dbuser USING btree + (id) + ix_db_dbuser_email: CREATE UNIQUE INDEX ix_db_dbuser_email ON public.db_dbuser + USING btree (email) + db_dbworkflow: + db_dbworkflow_pkey: CREATE UNIQUE INDEX db_dbworkflow_pkey ON public.db_dbworkflow + USING btree (id) + ix_db_dbworkflow_label: CREATE INDEX ix_db_dbworkflow_label ON public.db_dbworkflow + USING btree (label) + db_dbworkflowdata: + db_dbworkflowdata_parent_id_name_data_type_key: CREATE UNIQUE INDEX db_dbworkflowdata_parent_id_name_data_type_key + ON public.db_dbworkflowdata USING btree (parent_id, name, data_type) + db_dbworkflowdata_pkey: CREATE UNIQUE INDEX db_dbworkflowdata_pkey ON public.db_dbworkflowdata + USING btree (id) + ix_db_dbworkflowdata_aiida_obj_id: CREATE INDEX ix_db_dbworkflowdata_aiida_obj_id + ON public.db_dbworkflowdata USING btree (aiida_obj_id) + ix_db_dbworkflowdata_parent_id: CREATE INDEX ix_db_dbworkflowdata_parent_id ON + public.db_dbworkflowdata USING btree (parent_id) + db_dbworkflowstep: + db_dbworkflowstep_parent_id_name_key: CREATE UNIQUE INDEX db_dbworkflowstep_parent_id_name_key + ON public.db_dbworkflowstep USING btree (parent_id, name) + db_dbworkflowstep_pkey: CREATE UNIQUE INDEX db_dbworkflowstep_pkey ON public.db_dbworkflowstep + USING btree (id) + db_dbworkflowstep_calculations: + db_dbworkflowstep_calculations_id_dbnode_id_key: CREATE UNIQUE INDEX db_dbworkflowstep_calculations_id_dbnode_id_key + ON public.db_dbworkflowstep_calculations USING btree (dbworkflowstep_id, dbnode_id) + db_dbworkflowstep_calculations_pkey: CREATE UNIQUE INDEX db_dbworkflowstep_calculations_pkey + ON public.db_dbworkflowstep_calculations USING btree (id) + db_dbworkflowstep_sub_workflows: + db_dbworkflowstep_sub_workflows_id_dbworkflow__key: CREATE UNIQUE INDEX db_dbworkflowstep_sub_workflows_id_dbworkflow__key + ON public.db_dbworkflowstep_sub_workflows USING btree (dbworkflowstep_id, dbworkflow_id) + db_dbworkflowstep_sub_workflows_pkey: CREATE UNIQUE INDEX db_dbworkflowstep_sub_workflows_pkey + ON public.db_dbworkflowstep_sub_workflows USING btree (id) diff --git a/tests/backends/aiida_sqlalchemy/migrations/test_all_schema/test_sqla_18_e72ad251bcdb_.yml b/tests/backends/aiida_sqlalchemy/migrations/test_all_schema/test_sqla_18_e72ad251bcdb_.yml new file mode 100644 index 0000000000..4b8d38d8b7 --- /dev/null +++ b/tests/backends/aiida_sqlalchemy/migrations/test_all_schema/test_sqla_18_e72ad251bcdb_.yml @@ -0,0 +1,727 @@ +columns: + db_dbauthinfo: + aiidauser_id: + data_type: integer + default: null + is_nullable: true + auth_params: + data_type: jsonb + default: null + is_nullable: true + dbcomputer_id: + data_type: integer + default: null + is_nullable: true + enabled: + data_type: boolean + default: null + is_nullable: true + id: + data_type: integer + default: nextval('db_dbauthinfo_id_seq'::regclass) + is_nullable: false + metadata: + data_type: jsonb + default: null + is_nullable: true + db_dbcomment: + content: + data_type: text + default: null + is_nullable: true + ctime: + data_type: timestamp with time zone + default: null + is_nullable: true + dbnode_id: + data_type: integer + default: null + is_nullable: true + id: + data_type: integer + default: nextval('db_dbcomment_id_seq'::regclass) + is_nullable: false + mtime: + data_type: timestamp with time zone + default: null + is_nullable: true + user_id: + data_type: integer + default: null + is_nullable: true + uuid: + data_type: uuid + default: null + is_nullable: true + db_dbcomputer: + description: + data_type: text + default: null + is_nullable: true + enabled: + data_type: boolean + default: null + is_nullable: true + hostname: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + id: + data_type: integer + default: nextval('db_dbcomputer_id_seq'::regclass) + is_nullable: false + metadata: + data_type: jsonb + default: null + is_nullable: true + name: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + scheduler_type: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + transport_params: + data_type: jsonb + default: null + is_nullable: true + transport_type: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + uuid: + data_type: uuid + default: null + is_nullable: true + db_dbgroup: + description: + data_type: text + default: null + is_nullable: true + id: + data_type: integer + default: nextval('db_dbgroup_id_seq'::regclass) + is_nullable: false + label: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + time: + data_type: timestamp with time zone + default: null + is_nullable: true + type_string: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + user_id: + data_type: integer + default: null + is_nullable: true + uuid: + data_type: uuid + default: null + is_nullable: true + db_dbgroup_dbnodes: + dbgroup_id: + data_type: integer + default: null + is_nullable: true + dbnode_id: + data_type: integer + default: null + is_nullable: true + id: + data_type: integer + default: nextval('db_dbgroup_dbnodes_id_seq'::regclass) + is_nullable: false + db_dblink: + id: + data_type: integer + default: nextval('db_dblink_id_seq'::regclass) + is_nullable: false + input_id: + data_type: integer + default: null + is_nullable: true + label: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + output_id: + data_type: integer + default: null + is_nullable: true + type: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + db_dblog: + id: + data_type: integer + default: nextval('db_dblog_id_seq'::regclass) + is_nullable: false + levelname: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + loggername: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + message: + data_type: text + default: null + is_nullable: true + metadata: + data_type: jsonb + default: null + is_nullable: true + objname: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + objpk: + data_type: integer + default: null + is_nullable: true + time: + data_type: timestamp with time zone + default: null + is_nullable: true + db_dbnode: + attributes: + data_type: jsonb + default: null + is_nullable: true + ctime: + data_type: timestamp with time zone + default: null + is_nullable: true + dbcomputer_id: + data_type: integer + default: null + is_nullable: true + description: + data_type: text + default: null + is_nullable: true + extras: + data_type: jsonb + default: null + is_nullable: true + id: + data_type: integer + default: nextval('db_dbnode_id_seq'::regclass) + is_nullable: false + label: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + mtime: + data_type: timestamp with time zone + default: null + is_nullable: true + nodeversion: + data_type: integer + default: null + is_nullable: true + process_type: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + public: + data_type: boolean + default: null + is_nullable: true + type: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + user_id: + data_type: integer + default: null + is_nullable: false + uuid: + data_type: uuid + default: null + is_nullable: true + db_dbsetting: + description: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + id: + data_type: integer + default: nextval('db_dbsetting_id_seq'::regclass) + is_nullable: false + key: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + time: + data_type: timestamp with time zone + default: null + is_nullable: true + val: + data_type: jsonb + default: null + is_nullable: true + db_dbuser: + date_joined: + data_type: timestamp with time zone + default: null + is_nullable: true + email: + data_type: character varying + default: null + is_nullable: true + max_length: 254 + first_name: + data_type: character varying + default: null + is_nullable: true + max_length: 254 + id: + data_type: integer + default: nextval('db_dbuser_id_seq'::regclass) + is_nullable: false + institution: + data_type: character varying + default: null + is_nullable: true + max_length: 254 + is_active: + data_type: boolean + default: null + is_nullable: true + is_staff: + data_type: boolean + default: null + is_nullable: true + is_superuser: + data_type: boolean + default: null + is_nullable: false + last_login: + data_type: timestamp with time zone + default: null + is_nullable: true + last_name: + data_type: character varying + default: null + is_nullable: true + max_length: 254 + password: + data_type: character varying + default: null + is_nullable: true + max_length: 128 + db_dbworkflow: + ctime: + data_type: timestamp with time zone + default: null + is_nullable: true + description: + data_type: text + default: null + is_nullable: true + id: + data_type: integer + default: nextval('db_dbworkflow_id_seq'::regclass) + is_nullable: false + label: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + lastsyncedversion: + data_type: integer + default: null + is_nullable: true + module: + data_type: text + default: null + is_nullable: true + module_class: + data_type: text + default: null + is_nullable: true + mtime: + data_type: timestamp with time zone + default: null + is_nullable: true + nodeversion: + data_type: integer + default: null + is_nullable: true + report: + data_type: text + default: null + is_nullable: true + script_md5: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + script_path: + data_type: text + default: null + is_nullable: true + state: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + user_id: + data_type: integer + default: null + is_nullable: true + uuid: + data_type: uuid + default: null + is_nullable: true + db_dbworkflowdata: + aiida_obj_id: + data_type: integer + default: null + is_nullable: true + data_type: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + id: + data_type: integer + default: nextval('db_dbworkflowdata_id_seq'::regclass) + is_nullable: false + json_value: + data_type: text + default: null + is_nullable: true + name: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + parent_id: + data_type: integer + default: null + is_nullable: true + time: + data_type: timestamp with time zone + default: null + is_nullable: true + value_type: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + db_dbworkflowstep: + id: + data_type: integer + default: nextval('db_dbworkflowstep_id_seq'::regclass) + is_nullable: false + name: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + nextcall: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + parent_id: + data_type: integer + default: null + is_nullable: true + state: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + time: + data_type: timestamp with time zone + default: null + is_nullable: true + user_id: + data_type: integer + default: null + is_nullable: true + db_dbworkflowstep_calculations: + dbnode_id: + data_type: integer + default: null + is_nullable: true + dbworkflowstep_id: + data_type: integer + default: null + is_nullable: true + id: + data_type: integer + default: nextval('db_dbworkflowstep_calculations_id_seq'::regclass) + is_nullable: false + db_dbworkflowstep_sub_workflows: + dbworkflow_id: + data_type: integer + default: null + is_nullable: true + dbworkflowstep_id: + data_type: integer + default: null + is_nullable: true + id: + data_type: integer + default: nextval('db_dbworkflowstep_sub_workflows_id_seq'::regclass) + is_nullable: false +constraints: + primary_key: + db_dbauthinfo: + db_dbauthinfo_pkey: + - id + db_dbcomment: + db_dbcomment_pkey: + - id + db_dbcomputer: + db_dbcomputer_pkey: + - id + db_dbgroup: + db_dbgroup_pkey: + - id + db_dbgroup_dbnodes: + db_dbgroup_dbnodes_pkey: + - id + db_dblink: + db_dblink_pkey: + - id + db_dblog: + db_dblog_pkey: + - id + db_dbnode: + db_dbnode_pkey: + - id + db_dbsetting: + db_dbsetting_pkey: + - id + db_dbuser: + db_dbuser_pkey: + - id + db_dbworkflow: + db_dbworkflow_pkey: + - id + db_dbworkflowdata: + db_dbworkflowdata_pkey: + - id + db_dbworkflowstep: + db_dbworkflowstep_pkey: + - id + db_dbworkflowstep_calculations: + db_dbworkflowstep_calculations_pkey: + - id + db_dbworkflowstep_sub_workflows: + db_dbworkflowstep_sub_workflows_pkey: + - id + unique: + db_dbauthinfo: + db_dbauthinfo_aiidauser_id_dbcomputer_id_key: + - aiidauser_id + - dbcomputer_id + db_dbcomputer: + db_dbcomputer_name_key: + - name + db_dbgroup: + db_dbgroup_label_type_string_key: + - label + - type_string + db_dbgroup_dbnodes: + db_dbgroup_dbnodes_dbgroup_id_dbnode_id_key: + - dbgroup_id + - dbnode_id + db_dbnode: + db_dbnode_uuid_key: + - uuid + db_dbsetting: + db_dbsetting_key_key: + - key + db_dbworkflowdata: + db_dbworkflowdata_parent_id_name_data_type_key: + - data_type + - name + - parent_id + db_dbworkflowstep: + db_dbworkflowstep_parent_id_name_key: + - name + - parent_id + db_dbworkflowstep_calculations: + db_dbworkflowstep_calculations_id_dbnode_id_key: + - dbnode_id + - dbworkflowstep_id + db_dbworkflowstep_sub_workflows: + db_dbworkflowstep_sub_workflows_id_dbworkflow__key: + - dbworkflow_id + - dbworkflowstep_id +foreign_keys: + db_dbauthinfo: + db_dbauthinfo_aiidauser_id_fkey: FOREIGN KEY (aiidauser_id) REFERENCES db_dbuser(id) + ON DELETE CASCADE DEFERRABLE INITIALLY DEFERRED + db_dbauthinfo_dbcomputer_id_fkey: FOREIGN KEY (dbcomputer_id) REFERENCES db_dbcomputer(id) + ON DELETE CASCADE DEFERRABLE INITIALLY DEFERRED + db_dbcomment: + db_dbcomment_dbnode_id_fkey: FOREIGN KEY (dbnode_id) REFERENCES db_dbnode(id) + ON DELETE CASCADE DEFERRABLE INITIALLY DEFERRED + db_dbcomment_user_id_fkey: FOREIGN KEY (user_id) REFERENCES db_dbuser(id) ON DELETE + CASCADE DEFERRABLE INITIALLY DEFERRED + db_dbgroup: + db_dbgroup_user_id_fkey: FOREIGN KEY (user_id) REFERENCES db_dbuser(id) ON DELETE + CASCADE DEFERRABLE INITIALLY DEFERRED + db_dbgroup_dbnodes: + db_dbgroup_dbnodes_dbgroup_id_fkey: FOREIGN KEY (dbgroup_id) REFERENCES db_dbgroup(id) + DEFERRABLE INITIALLY DEFERRED + db_dbgroup_dbnodes_dbnode_id_fkey: FOREIGN KEY (dbnode_id) REFERENCES db_dbnode(id) + DEFERRABLE INITIALLY DEFERRED + db_dblink: + db_dblink_input_id_fkey: FOREIGN KEY (input_id) REFERENCES db_dbnode(id) DEFERRABLE + INITIALLY DEFERRED + db_dblink_output_id_fkey: FOREIGN KEY (output_id) REFERENCES db_dbnode(id) ON + DELETE CASCADE DEFERRABLE INITIALLY DEFERRED + db_dbnode: + db_dbnode_dbcomputer_id_fkey: FOREIGN KEY (dbcomputer_id) REFERENCES db_dbcomputer(id) + ON DELETE RESTRICT DEFERRABLE INITIALLY DEFERRED + db_dbnode_user_id_fkey: FOREIGN KEY (user_id) REFERENCES db_dbuser(id) ON DELETE + RESTRICT DEFERRABLE INITIALLY DEFERRED + db_dbworkflow: + db_dbworkflow_user_id_fkey: FOREIGN KEY (user_id) REFERENCES db_dbuser(id) + db_dbworkflowdata: + db_dbworkflowdata_aiida_obj_id_fkey: FOREIGN KEY (aiida_obj_id) REFERENCES db_dbnode(id) + db_dbworkflowdata_parent_id_fkey: FOREIGN KEY (parent_id) REFERENCES db_dbworkflow(id) + db_dbworkflowstep: + db_dbworkflowstep_parent_id_fkey: FOREIGN KEY (parent_id) REFERENCES db_dbworkflow(id) + db_dbworkflowstep_user_id_fkey: FOREIGN KEY (user_id) REFERENCES db_dbuser(id) + db_dbworkflowstep_calculations: + db_dbworkflowstep_calculations_dbnode_id_fkey: FOREIGN KEY (dbnode_id) REFERENCES + db_dbnode(id) + db_dbworkflowstep_calculations_dbworkflowstep_id_fkey: FOREIGN KEY (dbworkflowstep_id) + REFERENCES db_dbworkflowstep(id) + db_dbworkflowstep_sub_workflows: + db_dbworkflowstep_sub_workflows_dbworkflow_id_fkey: FOREIGN KEY (dbworkflow_id) + REFERENCES db_dbworkflow(id) + db_dbworkflowstep_sub_workflows_dbworkflowstep_id_fkey: FOREIGN KEY (dbworkflowstep_id) + REFERENCES db_dbworkflowstep(id) +indexes: + db_dbauthinfo: + db_dbauthinfo_aiidauser_id_dbcomputer_id_key: CREATE UNIQUE INDEX db_dbauthinfo_aiidauser_id_dbcomputer_id_key + ON public.db_dbauthinfo USING btree (aiidauser_id, dbcomputer_id) + db_dbauthinfo_pkey: CREATE UNIQUE INDEX db_dbauthinfo_pkey ON public.db_dbauthinfo + USING btree (id) + db_dbcomment: + db_dbcomment_pkey: CREATE UNIQUE INDEX db_dbcomment_pkey ON public.db_dbcomment + USING btree (id) + db_dbcomputer: + db_dbcomputer_name_key: CREATE UNIQUE INDEX db_dbcomputer_name_key ON public.db_dbcomputer + USING btree (name) + db_dbcomputer_pkey: CREATE UNIQUE INDEX db_dbcomputer_pkey ON public.db_dbcomputer + USING btree (id) + db_dbgroup: + db_dbgroup_label_type_string_key: CREATE UNIQUE INDEX db_dbgroup_label_type_string_key + ON public.db_dbgroup USING btree (label, type_string) + db_dbgroup_pkey: CREATE UNIQUE INDEX db_dbgroup_pkey ON public.db_dbgroup USING + btree (id) + ix_db_dbgroup_label: CREATE INDEX ix_db_dbgroup_label ON public.db_dbgroup USING + btree (label) + ix_db_dbgroup_type_string: CREATE INDEX ix_db_dbgroup_type_string ON public.db_dbgroup + USING btree (type_string) + db_dbgroup_dbnodes: + db_dbgroup_dbnodes_dbgroup_id_dbnode_id_key: CREATE UNIQUE INDEX db_dbgroup_dbnodes_dbgroup_id_dbnode_id_key + ON public.db_dbgroup_dbnodes USING btree (dbgroup_id, dbnode_id) + db_dbgroup_dbnodes_dbgroup_id_idx: CREATE INDEX db_dbgroup_dbnodes_dbgroup_id_idx + ON public.db_dbgroup_dbnodes USING btree (dbgroup_id) + db_dbgroup_dbnodes_dbnode_id_idx: CREATE INDEX db_dbgroup_dbnodes_dbnode_id_idx + ON public.db_dbgroup_dbnodes USING btree (dbnode_id) + db_dbgroup_dbnodes_pkey: CREATE UNIQUE INDEX db_dbgroup_dbnodes_pkey ON public.db_dbgroup_dbnodes + USING btree (id) + db_dblink: + db_dblink_pkey: CREATE UNIQUE INDEX db_dblink_pkey ON public.db_dblink USING btree + (id) + ix_db_dblink_label: CREATE INDEX ix_db_dblink_label ON public.db_dblink USING + btree (label) + db_dblog: + db_dblog_pkey: CREATE UNIQUE INDEX db_dblog_pkey ON public.db_dblog USING btree + (id) + ix_db_dblog_levelname: CREATE INDEX ix_db_dblog_levelname ON public.db_dblog USING + btree (levelname) + ix_db_dblog_loggername: CREATE INDEX ix_db_dblog_loggername ON public.db_dblog + USING btree (loggername) + ix_db_dblog_objname: CREATE INDEX ix_db_dblog_objname ON public.db_dblog USING + btree (objname) + ix_db_dblog_objpk: CREATE INDEX ix_db_dblog_objpk ON public.db_dblog USING btree + (objpk) + db_dbnode: + db_dbnode_pkey: CREATE UNIQUE INDEX db_dbnode_pkey ON public.db_dbnode USING btree + (id) + db_dbnode_uuid_key: CREATE UNIQUE INDEX db_dbnode_uuid_key ON public.db_dbnode + USING btree (uuid) + ix_db_dbnode_label: CREATE INDEX ix_db_dbnode_label ON public.db_dbnode USING + btree (label) + ix_db_dbnode_process_type: CREATE INDEX ix_db_dbnode_process_type ON public.db_dbnode + USING btree (process_type) + ix_db_dbnode_type: CREATE INDEX ix_db_dbnode_type ON public.db_dbnode USING btree + (type) + db_dbsetting: + db_dbsetting_key_key: CREATE UNIQUE INDEX db_dbsetting_key_key ON public.db_dbsetting + USING btree (key) + db_dbsetting_pkey: CREATE UNIQUE INDEX db_dbsetting_pkey ON public.db_dbsetting + USING btree (id) + ix_db_dbsetting_key: CREATE INDEX ix_db_dbsetting_key ON public.db_dbsetting USING + btree (key) + db_dbuser: + db_dbuser_pkey: CREATE UNIQUE INDEX db_dbuser_pkey ON public.db_dbuser USING btree + (id) + ix_db_dbuser_email: CREATE UNIQUE INDEX ix_db_dbuser_email ON public.db_dbuser + USING btree (email) + db_dbworkflow: + db_dbworkflow_pkey: CREATE UNIQUE INDEX db_dbworkflow_pkey ON public.db_dbworkflow + USING btree (id) + ix_db_dbworkflow_label: CREATE INDEX ix_db_dbworkflow_label ON public.db_dbworkflow + USING btree (label) + db_dbworkflowdata: + db_dbworkflowdata_parent_id_name_data_type_key: CREATE UNIQUE INDEX db_dbworkflowdata_parent_id_name_data_type_key + ON public.db_dbworkflowdata USING btree (parent_id, name, data_type) + db_dbworkflowdata_pkey: CREATE UNIQUE INDEX db_dbworkflowdata_pkey ON public.db_dbworkflowdata + USING btree (id) + ix_db_dbworkflowdata_aiida_obj_id: CREATE INDEX ix_db_dbworkflowdata_aiida_obj_id + ON public.db_dbworkflowdata USING btree (aiida_obj_id) + ix_db_dbworkflowdata_parent_id: CREATE INDEX ix_db_dbworkflowdata_parent_id ON + public.db_dbworkflowdata USING btree (parent_id) + db_dbworkflowstep: + db_dbworkflowstep_parent_id_name_key: CREATE UNIQUE INDEX db_dbworkflowstep_parent_id_name_key + ON public.db_dbworkflowstep USING btree (parent_id, name) + db_dbworkflowstep_pkey: CREATE UNIQUE INDEX db_dbworkflowstep_pkey ON public.db_dbworkflowstep + USING btree (id) + db_dbworkflowstep_calculations: + db_dbworkflowstep_calculations_id_dbnode_id_key: CREATE UNIQUE INDEX db_dbworkflowstep_calculations_id_dbnode_id_key + ON public.db_dbworkflowstep_calculations USING btree (dbworkflowstep_id, dbnode_id) + db_dbworkflowstep_calculations_pkey: CREATE UNIQUE INDEX db_dbworkflowstep_calculations_pkey + ON public.db_dbworkflowstep_calculations USING btree (id) + db_dbworkflowstep_sub_workflows: + db_dbworkflowstep_sub_workflows_id_dbworkflow__key: CREATE UNIQUE INDEX db_dbworkflowstep_sub_workflows_id_dbworkflow__key + ON public.db_dbworkflowstep_sub_workflows USING btree (dbworkflowstep_id, dbworkflow_id) + db_dbworkflowstep_sub_workflows_pkey: CREATE UNIQUE INDEX db_dbworkflowstep_sub_workflows_pkey + ON public.db_dbworkflowstep_sub_workflows USING btree (id) diff --git a/tests/backends/aiida_sqlalchemy/migrations/test_all_schema/test_sqla_19_7ca08c391c49_.yml b/tests/backends/aiida_sqlalchemy/migrations/test_all_schema/test_sqla_19_7ca08c391c49_.yml new file mode 100644 index 0000000000..4b8d38d8b7 --- /dev/null +++ b/tests/backends/aiida_sqlalchemy/migrations/test_all_schema/test_sqla_19_7ca08c391c49_.yml @@ -0,0 +1,727 @@ +columns: + db_dbauthinfo: + aiidauser_id: + data_type: integer + default: null + is_nullable: true + auth_params: + data_type: jsonb + default: null + is_nullable: true + dbcomputer_id: + data_type: integer + default: null + is_nullable: true + enabled: + data_type: boolean + default: null + is_nullable: true + id: + data_type: integer + default: nextval('db_dbauthinfo_id_seq'::regclass) + is_nullable: false + metadata: + data_type: jsonb + default: null + is_nullable: true + db_dbcomment: + content: + data_type: text + default: null + is_nullable: true + ctime: + data_type: timestamp with time zone + default: null + is_nullable: true + dbnode_id: + data_type: integer + default: null + is_nullable: true + id: + data_type: integer + default: nextval('db_dbcomment_id_seq'::regclass) + is_nullable: false + mtime: + data_type: timestamp with time zone + default: null + is_nullable: true + user_id: + data_type: integer + default: null + is_nullable: true + uuid: + data_type: uuid + default: null + is_nullable: true + db_dbcomputer: + description: + data_type: text + default: null + is_nullable: true + enabled: + data_type: boolean + default: null + is_nullable: true + hostname: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + id: + data_type: integer + default: nextval('db_dbcomputer_id_seq'::regclass) + is_nullable: false + metadata: + data_type: jsonb + default: null + is_nullable: true + name: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + scheduler_type: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + transport_params: + data_type: jsonb + default: null + is_nullable: true + transport_type: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + uuid: + data_type: uuid + default: null + is_nullable: true + db_dbgroup: + description: + data_type: text + default: null + is_nullable: true + id: + data_type: integer + default: nextval('db_dbgroup_id_seq'::regclass) + is_nullable: false + label: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + time: + data_type: timestamp with time zone + default: null + is_nullable: true + type_string: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + user_id: + data_type: integer + default: null + is_nullable: true + uuid: + data_type: uuid + default: null + is_nullable: true + db_dbgroup_dbnodes: + dbgroup_id: + data_type: integer + default: null + is_nullable: true + dbnode_id: + data_type: integer + default: null + is_nullable: true + id: + data_type: integer + default: nextval('db_dbgroup_dbnodes_id_seq'::regclass) + is_nullable: false + db_dblink: + id: + data_type: integer + default: nextval('db_dblink_id_seq'::regclass) + is_nullable: false + input_id: + data_type: integer + default: null + is_nullable: true + label: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + output_id: + data_type: integer + default: null + is_nullable: true + type: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + db_dblog: + id: + data_type: integer + default: nextval('db_dblog_id_seq'::regclass) + is_nullable: false + levelname: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + loggername: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + message: + data_type: text + default: null + is_nullable: true + metadata: + data_type: jsonb + default: null + is_nullable: true + objname: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + objpk: + data_type: integer + default: null + is_nullable: true + time: + data_type: timestamp with time zone + default: null + is_nullable: true + db_dbnode: + attributes: + data_type: jsonb + default: null + is_nullable: true + ctime: + data_type: timestamp with time zone + default: null + is_nullable: true + dbcomputer_id: + data_type: integer + default: null + is_nullable: true + description: + data_type: text + default: null + is_nullable: true + extras: + data_type: jsonb + default: null + is_nullable: true + id: + data_type: integer + default: nextval('db_dbnode_id_seq'::regclass) + is_nullable: false + label: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + mtime: + data_type: timestamp with time zone + default: null + is_nullable: true + nodeversion: + data_type: integer + default: null + is_nullable: true + process_type: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + public: + data_type: boolean + default: null + is_nullable: true + type: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + user_id: + data_type: integer + default: null + is_nullable: false + uuid: + data_type: uuid + default: null + is_nullable: true + db_dbsetting: + description: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + id: + data_type: integer + default: nextval('db_dbsetting_id_seq'::regclass) + is_nullable: false + key: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + time: + data_type: timestamp with time zone + default: null + is_nullable: true + val: + data_type: jsonb + default: null + is_nullable: true + db_dbuser: + date_joined: + data_type: timestamp with time zone + default: null + is_nullable: true + email: + data_type: character varying + default: null + is_nullable: true + max_length: 254 + first_name: + data_type: character varying + default: null + is_nullable: true + max_length: 254 + id: + data_type: integer + default: nextval('db_dbuser_id_seq'::regclass) + is_nullable: false + institution: + data_type: character varying + default: null + is_nullable: true + max_length: 254 + is_active: + data_type: boolean + default: null + is_nullable: true + is_staff: + data_type: boolean + default: null + is_nullable: true + is_superuser: + data_type: boolean + default: null + is_nullable: false + last_login: + data_type: timestamp with time zone + default: null + is_nullable: true + last_name: + data_type: character varying + default: null + is_nullable: true + max_length: 254 + password: + data_type: character varying + default: null + is_nullable: true + max_length: 128 + db_dbworkflow: + ctime: + data_type: timestamp with time zone + default: null + is_nullable: true + description: + data_type: text + default: null + is_nullable: true + id: + data_type: integer + default: nextval('db_dbworkflow_id_seq'::regclass) + is_nullable: false + label: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + lastsyncedversion: + data_type: integer + default: null + is_nullable: true + module: + data_type: text + default: null + is_nullable: true + module_class: + data_type: text + default: null + is_nullable: true + mtime: + data_type: timestamp with time zone + default: null + is_nullable: true + nodeversion: + data_type: integer + default: null + is_nullable: true + report: + data_type: text + default: null + is_nullable: true + script_md5: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + script_path: + data_type: text + default: null + is_nullable: true + state: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + user_id: + data_type: integer + default: null + is_nullable: true + uuid: + data_type: uuid + default: null + is_nullable: true + db_dbworkflowdata: + aiida_obj_id: + data_type: integer + default: null + is_nullable: true + data_type: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + id: + data_type: integer + default: nextval('db_dbworkflowdata_id_seq'::regclass) + is_nullable: false + json_value: + data_type: text + default: null + is_nullable: true + name: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + parent_id: + data_type: integer + default: null + is_nullable: true + time: + data_type: timestamp with time zone + default: null + is_nullable: true + value_type: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + db_dbworkflowstep: + id: + data_type: integer + default: nextval('db_dbworkflowstep_id_seq'::regclass) + is_nullable: false + name: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + nextcall: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + parent_id: + data_type: integer + default: null + is_nullable: true + state: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + time: + data_type: timestamp with time zone + default: null + is_nullable: true + user_id: + data_type: integer + default: null + is_nullable: true + db_dbworkflowstep_calculations: + dbnode_id: + data_type: integer + default: null + is_nullable: true + dbworkflowstep_id: + data_type: integer + default: null + is_nullable: true + id: + data_type: integer + default: nextval('db_dbworkflowstep_calculations_id_seq'::regclass) + is_nullable: false + db_dbworkflowstep_sub_workflows: + dbworkflow_id: + data_type: integer + default: null + is_nullable: true + dbworkflowstep_id: + data_type: integer + default: null + is_nullable: true + id: + data_type: integer + default: nextval('db_dbworkflowstep_sub_workflows_id_seq'::regclass) + is_nullable: false +constraints: + primary_key: + db_dbauthinfo: + db_dbauthinfo_pkey: + - id + db_dbcomment: + db_dbcomment_pkey: + - id + db_dbcomputer: + db_dbcomputer_pkey: + - id + db_dbgroup: + db_dbgroup_pkey: + - id + db_dbgroup_dbnodes: + db_dbgroup_dbnodes_pkey: + - id + db_dblink: + db_dblink_pkey: + - id + db_dblog: + db_dblog_pkey: + - id + db_dbnode: + db_dbnode_pkey: + - id + db_dbsetting: + db_dbsetting_pkey: + - id + db_dbuser: + db_dbuser_pkey: + - id + db_dbworkflow: + db_dbworkflow_pkey: + - id + db_dbworkflowdata: + db_dbworkflowdata_pkey: + - id + db_dbworkflowstep: + db_dbworkflowstep_pkey: + - id + db_dbworkflowstep_calculations: + db_dbworkflowstep_calculations_pkey: + - id + db_dbworkflowstep_sub_workflows: + db_dbworkflowstep_sub_workflows_pkey: + - id + unique: + db_dbauthinfo: + db_dbauthinfo_aiidauser_id_dbcomputer_id_key: + - aiidauser_id + - dbcomputer_id + db_dbcomputer: + db_dbcomputer_name_key: + - name + db_dbgroup: + db_dbgroup_label_type_string_key: + - label + - type_string + db_dbgroup_dbnodes: + db_dbgroup_dbnodes_dbgroup_id_dbnode_id_key: + - dbgroup_id + - dbnode_id + db_dbnode: + db_dbnode_uuid_key: + - uuid + db_dbsetting: + db_dbsetting_key_key: + - key + db_dbworkflowdata: + db_dbworkflowdata_parent_id_name_data_type_key: + - data_type + - name + - parent_id + db_dbworkflowstep: + db_dbworkflowstep_parent_id_name_key: + - name + - parent_id + db_dbworkflowstep_calculations: + db_dbworkflowstep_calculations_id_dbnode_id_key: + - dbnode_id + - dbworkflowstep_id + db_dbworkflowstep_sub_workflows: + db_dbworkflowstep_sub_workflows_id_dbworkflow__key: + - dbworkflow_id + - dbworkflowstep_id +foreign_keys: + db_dbauthinfo: + db_dbauthinfo_aiidauser_id_fkey: FOREIGN KEY (aiidauser_id) REFERENCES db_dbuser(id) + ON DELETE CASCADE DEFERRABLE INITIALLY DEFERRED + db_dbauthinfo_dbcomputer_id_fkey: FOREIGN KEY (dbcomputer_id) REFERENCES db_dbcomputer(id) + ON DELETE CASCADE DEFERRABLE INITIALLY DEFERRED + db_dbcomment: + db_dbcomment_dbnode_id_fkey: FOREIGN KEY (dbnode_id) REFERENCES db_dbnode(id) + ON DELETE CASCADE DEFERRABLE INITIALLY DEFERRED + db_dbcomment_user_id_fkey: FOREIGN KEY (user_id) REFERENCES db_dbuser(id) ON DELETE + CASCADE DEFERRABLE INITIALLY DEFERRED + db_dbgroup: + db_dbgroup_user_id_fkey: FOREIGN KEY (user_id) REFERENCES db_dbuser(id) ON DELETE + CASCADE DEFERRABLE INITIALLY DEFERRED + db_dbgroup_dbnodes: + db_dbgroup_dbnodes_dbgroup_id_fkey: FOREIGN KEY (dbgroup_id) REFERENCES db_dbgroup(id) + DEFERRABLE INITIALLY DEFERRED + db_dbgroup_dbnodes_dbnode_id_fkey: FOREIGN KEY (dbnode_id) REFERENCES db_dbnode(id) + DEFERRABLE INITIALLY DEFERRED + db_dblink: + db_dblink_input_id_fkey: FOREIGN KEY (input_id) REFERENCES db_dbnode(id) DEFERRABLE + INITIALLY DEFERRED + db_dblink_output_id_fkey: FOREIGN KEY (output_id) REFERENCES db_dbnode(id) ON + DELETE CASCADE DEFERRABLE INITIALLY DEFERRED + db_dbnode: + db_dbnode_dbcomputer_id_fkey: FOREIGN KEY (dbcomputer_id) REFERENCES db_dbcomputer(id) + ON DELETE RESTRICT DEFERRABLE INITIALLY DEFERRED + db_dbnode_user_id_fkey: FOREIGN KEY (user_id) REFERENCES db_dbuser(id) ON DELETE + RESTRICT DEFERRABLE INITIALLY DEFERRED + db_dbworkflow: + db_dbworkflow_user_id_fkey: FOREIGN KEY (user_id) REFERENCES db_dbuser(id) + db_dbworkflowdata: + db_dbworkflowdata_aiida_obj_id_fkey: FOREIGN KEY (aiida_obj_id) REFERENCES db_dbnode(id) + db_dbworkflowdata_parent_id_fkey: FOREIGN KEY (parent_id) REFERENCES db_dbworkflow(id) + db_dbworkflowstep: + db_dbworkflowstep_parent_id_fkey: FOREIGN KEY (parent_id) REFERENCES db_dbworkflow(id) + db_dbworkflowstep_user_id_fkey: FOREIGN KEY (user_id) REFERENCES db_dbuser(id) + db_dbworkflowstep_calculations: + db_dbworkflowstep_calculations_dbnode_id_fkey: FOREIGN KEY (dbnode_id) REFERENCES + db_dbnode(id) + db_dbworkflowstep_calculations_dbworkflowstep_id_fkey: FOREIGN KEY (dbworkflowstep_id) + REFERENCES db_dbworkflowstep(id) + db_dbworkflowstep_sub_workflows: + db_dbworkflowstep_sub_workflows_dbworkflow_id_fkey: FOREIGN KEY (dbworkflow_id) + REFERENCES db_dbworkflow(id) + db_dbworkflowstep_sub_workflows_dbworkflowstep_id_fkey: FOREIGN KEY (dbworkflowstep_id) + REFERENCES db_dbworkflowstep(id) +indexes: + db_dbauthinfo: + db_dbauthinfo_aiidauser_id_dbcomputer_id_key: CREATE UNIQUE INDEX db_dbauthinfo_aiidauser_id_dbcomputer_id_key + ON public.db_dbauthinfo USING btree (aiidauser_id, dbcomputer_id) + db_dbauthinfo_pkey: CREATE UNIQUE INDEX db_dbauthinfo_pkey ON public.db_dbauthinfo + USING btree (id) + db_dbcomment: + db_dbcomment_pkey: CREATE UNIQUE INDEX db_dbcomment_pkey ON public.db_dbcomment + USING btree (id) + db_dbcomputer: + db_dbcomputer_name_key: CREATE UNIQUE INDEX db_dbcomputer_name_key ON public.db_dbcomputer + USING btree (name) + db_dbcomputer_pkey: CREATE UNIQUE INDEX db_dbcomputer_pkey ON public.db_dbcomputer + USING btree (id) + db_dbgroup: + db_dbgroup_label_type_string_key: CREATE UNIQUE INDEX db_dbgroup_label_type_string_key + ON public.db_dbgroup USING btree (label, type_string) + db_dbgroup_pkey: CREATE UNIQUE INDEX db_dbgroup_pkey ON public.db_dbgroup USING + btree (id) + ix_db_dbgroup_label: CREATE INDEX ix_db_dbgroup_label ON public.db_dbgroup USING + btree (label) + ix_db_dbgroup_type_string: CREATE INDEX ix_db_dbgroup_type_string ON public.db_dbgroup + USING btree (type_string) + db_dbgroup_dbnodes: + db_dbgroup_dbnodes_dbgroup_id_dbnode_id_key: CREATE UNIQUE INDEX db_dbgroup_dbnodes_dbgroup_id_dbnode_id_key + ON public.db_dbgroup_dbnodes USING btree (dbgroup_id, dbnode_id) + db_dbgroup_dbnodes_dbgroup_id_idx: CREATE INDEX db_dbgroup_dbnodes_dbgroup_id_idx + ON public.db_dbgroup_dbnodes USING btree (dbgroup_id) + db_dbgroup_dbnodes_dbnode_id_idx: CREATE INDEX db_dbgroup_dbnodes_dbnode_id_idx + ON public.db_dbgroup_dbnodes USING btree (dbnode_id) + db_dbgroup_dbnodes_pkey: CREATE UNIQUE INDEX db_dbgroup_dbnodes_pkey ON public.db_dbgroup_dbnodes + USING btree (id) + db_dblink: + db_dblink_pkey: CREATE UNIQUE INDEX db_dblink_pkey ON public.db_dblink USING btree + (id) + ix_db_dblink_label: CREATE INDEX ix_db_dblink_label ON public.db_dblink USING + btree (label) + db_dblog: + db_dblog_pkey: CREATE UNIQUE INDEX db_dblog_pkey ON public.db_dblog USING btree + (id) + ix_db_dblog_levelname: CREATE INDEX ix_db_dblog_levelname ON public.db_dblog USING + btree (levelname) + ix_db_dblog_loggername: CREATE INDEX ix_db_dblog_loggername ON public.db_dblog + USING btree (loggername) + ix_db_dblog_objname: CREATE INDEX ix_db_dblog_objname ON public.db_dblog USING + btree (objname) + ix_db_dblog_objpk: CREATE INDEX ix_db_dblog_objpk ON public.db_dblog USING btree + (objpk) + db_dbnode: + db_dbnode_pkey: CREATE UNIQUE INDEX db_dbnode_pkey ON public.db_dbnode USING btree + (id) + db_dbnode_uuid_key: CREATE UNIQUE INDEX db_dbnode_uuid_key ON public.db_dbnode + USING btree (uuid) + ix_db_dbnode_label: CREATE INDEX ix_db_dbnode_label ON public.db_dbnode USING + btree (label) + ix_db_dbnode_process_type: CREATE INDEX ix_db_dbnode_process_type ON public.db_dbnode + USING btree (process_type) + ix_db_dbnode_type: CREATE INDEX ix_db_dbnode_type ON public.db_dbnode USING btree + (type) + db_dbsetting: + db_dbsetting_key_key: CREATE UNIQUE INDEX db_dbsetting_key_key ON public.db_dbsetting + USING btree (key) + db_dbsetting_pkey: CREATE UNIQUE INDEX db_dbsetting_pkey ON public.db_dbsetting + USING btree (id) + ix_db_dbsetting_key: CREATE INDEX ix_db_dbsetting_key ON public.db_dbsetting USING + btree (key) + db_dbuser: + db_dbuser_pkey: CREATE UNIQUE INDEX db_dbuser_pkey ON public.db_dbuser USING btree + (id) + ix_db_dbuser_email: CREATE UNIQUE INDEX ix_db_dbuser_email ON public.db_dbuser + USING btree (email) + db_dbworkflow: + db_dbworkflow_pkey: CREATE UNIQUE INDEX db_dbworkflow_pkey ON public.db_dbworkflow + USING btree (id) + ix_db_dbworkflow_label: CREATE INDEX ix_db_dbworkflow_label ON public.db_dbworkflow + USING btree (label) + db_dbworkflowdata: + db_dbworkflowdata_parent_id_name_data_type_key: CREATE UNIQUE INDEX db_dbworkflowdata_parent_id_name_data_type_key + ON public.db_dbworkflowdata USING btree (parent_id, name, data_type) + db_dbworkflowdata_pkey: CREATE UNIQUE INDEX db_dbworkflowdata_pkey ON public.db_dbworkflowdata + USING btree (id) + ix_db_dbworkflowdata_aiida_obj_id: CREATE INDEX ix_db_dbworkflowdata_aiida_obj_id + ON public.db_dbworkflowdata USING btree (aiida_obj_id) + ix_db_dbworkflowdata_parent_id: CREATE INDEX ix_db_dbworkflowdata_parent_id ON + public.db_dbworkflowdata USING btree (parent_id) + db_dbworkflowstep: + db_dbworkflowstep_parent_id_name_key: CREATE UNIQUE INDEX db_dbworkflowstep_parent_id_name_key + ON public.db_dbworkflowstep USING btree (parent_id, name) + db_dbworkflowstep_pkey: CREATE UNIQUE INDEX db_dbworkflowstep_pkey ON public.db_dbworkflowstep + USING btree (id) + db_dbworkflowstep_calculations: + db_dbworkflowstep_calculations_id_dbnode_id_key: CREATE UNIQUE INDEX db_dbworkflowstep_calculations_id_dbnode_id_key + ON public.db_dbworkflowstep_calculations USING btree (dbworkflowstep_id, dbnode_id) + db_dbworkflowstep_calculations_pkey: CREATE UNIQUE INDEX db_dbworkflowstep_calculations_pkey + ON public.db_dbworkflowstep_calculations USING btree (id) + db_dbworkflowstep_sub_workflows: + db_dbworkflowstep_sub_workflows_id_dbworkflow__key: CREATE UNIQUE INDEX db_dbworkflowstep_sub_workflows_id_dbworkflow__key + ON public.db_dbworkflowstep_sub_workflows USING btree (dbworkflowstep_id, dbworkflow_id) + db_dbworkflowstep_sub_workflows_pkey: CREATE UNIQUE INDEX db_dbworkflowstep_sub_workflows_pkey + ON public.db_dbworkflowstep_sub_workflows USING btree (id) diff --git a/tests/backends/aiida_sqlalchemy/migrations/test_all_schema/test_sqla_1_70c7d732f1b2_.yml b/tests/backends/aiida_sqlalchemy/migrations/test_all_schema/test_sqla_1_70c7d732f1b2_.yml new file mode 100644 index 0000000000..87ff4bf75b --- /dev/null +++ b/tests/backends/aiida_sqlalchemy/migrations/test_all_schema/test_sqla_1_70c7d732f1b2_.yml @@ -0,0 +1,761 @@ +columns: + db_dbauthinfo: + aiidauser_id: + data_type: integer + default: null + is_nullable: true + auth_params: + data_type: jsonb + default: null + is_nullable: true + dbcomputer_id: + data_type: integer + default: null + is_nullable: true + enabled: + data_type: boolean + default: null + is_nullable: true + id: + data_type: integer + default: nextval('db_dbauthinfo_id_seq'::regclass) + is_nullable: false + metadata: + data_type: jsonb + default: null + is_nullable: true + db_dbcalcstate: + dbnode_id: + data_type: integer + default: null + is_nullable: true + id: + data_type: integer + default: nextval('db_dbcalcstate_id_seq'::regclass) + is_nullable: false + state: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + time: + data_type: timestamp with time zone + default: null + is_nullable: true + db_dbcomment: + content: + data_type: text + default: null + is_nullable: true + ctime: + data_type: timestamp with time zone + default: null + is_nullable: true + dbnode_id: + data_type: integer + default: null + is_nullable: true + id: + data_type: integer + default: nextval('db_dbcomment_id_seq'::regclass) + is_nullable: false + mtime: + data_type: timestamp with time zone + default: null + is_nullable: true + user_id: + data_type: integer + default: null + is_nullable: true + uuid: + data_type: uuid + default: null + is_nullable: true + db_dbcomputer: + description: + data_type: text + default: null + is_nullable: true + enabled: + data_type: boolean + default: null + is_nullable: true + hostname: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + id: + data_type: integer + default: nextval('db_dbcomputer_id_seq'::regclass) + is_nullable: false + metadata: + data_type: jsonb + default: null + is_nullable: true + name: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + scheduler_type: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + transport_params: + data_type: jsonb + default: null + is_nullable: true + transport_type: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + uuid: + data_type: uuid + default: null + is_nullable: true + db_dbgroup: + description: + data_type: text + default: null + is_nullable: true + id: + data_type: integer + default: nextval('db_dbgroup_id_seq'::regclass) + is_nullable: false + name: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + time: + data_type: timestamp with time zone + default: null + is_nullable: true + type: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + user_id: + data_type: integer + default: null + is_nullable: true + uuid: + data_type: uuid + default: null + is_nullable: true + db_dbgroup_dbnodes: + dbgroup_id: + data_type: integer + default: null + is_nullable: true + dbnode_id: + data_type: integer + default: null + is_nullable: true + id: + data_type: integer + default: nextval('db_dbgroup_dbnodes_id_seq'::regclass) + is_nullable: false + db_dblink: + id: + data_type: integer + default: nextval('db_dblink_id_seq'::regclass) + is_nullable: false + input_id: + data_type: integer + default: null + is_nullable: true + label: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + output_id: + data_type: integer + default: null + is_nullable: true + type: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + db_dblock: + creation: + data_type: timestamp with time zone + default: null + is_nullable: true + key: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + owner: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + timeout: + data_type: integer + default: null + is_nullable: true + db_dblog: + id: + data_type: integer + default: nextval('db_dblog_id_seq'::regclass) + is_nullable: false + levelname: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + loggername: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + message: + data_type: text + default: null + is_nullable: true + metadata: + data_type: jsonb + default: null + is_nullable: true + objname: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + objpk: + data_type: integer + default: null + is_nullable: true + time: + data_type: timestamp with time zone + default: null + is_nullable: true + db_dbnode: + attributes: + data_type: jsonb + default: null + is_nullable: true + ctime: + data_type: timestamp with time zone + default: null + is_nullable: true + dbcomputer_id: + data_type: integer + default: null + is_nullable: true + description: + data_type: text + default: null + is_nullable: true + extras: + data_type: jsonb + default: null + is_nullable: true + id: + data_type: integer + default: nextval('db_dbnode_id_seq'::regclass) + is_nullable: false + label: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + mtime: + data_type: timestamp with time zone + default: null + is_nullable: true + nodeversion: + data_type: integer + default: null + is_nullable: true + public: + data_type: boolean + default: null + is_nullable: true + type: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + user_id: + data_type: integer + default: null + is_nullable: false + uuid: + data_type: uuid + default: null + is_nullable: true + db_dbsetting: + description: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + id: + data_type: integer + default: nextval('db_dbsetting_id_seq'::regclass) + is_nullable: false + key: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + time: + data_type: timestamp with time zone + default: null + is_nullable: true + val: + data_type: jsonb + default: null + is_nullable: true + db_dbuser: + date_joined: + data_type: timestamp with time zone + default: null + is_nullable: true + email: + data_type: character varying + default: null + is_nullable: true + max_length: 254 + first_name: + data_type: character varying + default: null + is_nullable: true + max_length: 254 + id: + data_type: integer + default: nextval('db_dbuser_id_seq'::regclass) + is_nullable: false + institution: + data_type: character varying + default: null + is_nullable: true + max_length: 254 + is_active: + data_type: boolean + default: null + is_nullable: true + is_staff: + data_type: boolean + default: null + is_nullable: true + is_superuser: + data_type: boolean + default: null + is_nullable: false + last_login: + data_type: timestamp with time zone + default: null + is_nullable: true + last_name: + data_type: character varying + default: null + is_nullable: true + max_length: 254 + password: + data_type: character varying + default: null + is_nullable: true + max_length: 128 + db_dbworkflow: + ctime: + data_type: timestamp with time zone + default: null + is_nullable: true + description: + data_type: text + default: null + is_nullable: true + id: + data_type: integer + default: nextval('db_dbworkflow_id_seq'::regclass) + is_nullable: false + label: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + lastsyncedversion: + data_type: integer + default: null + is_nullable: true + module: + data_type: text + default: null + is_nullable: true + module_class: + data_type: text + default: null + is_nullable: true + mtime: + data_type: timestamp with time zone + default: null + is_nullable: true + nodeversion: + data_type: integer + default: null + is_nullable: true + report: + data_type: text + default: null + is_nullable: true + script_md5: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + script_path: + data_type: text + default: null + is_nullable: true + state: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + user_id: + data_type: integer + default: null + is_nullable: true + uuid: + data_type: uuid + default: null + is_nullable: true + db_dbworkflowdata: + aiida_obj_id: + data_type: integer + default: null + is_nullable: true + data_type: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + id: + data_type: integer + default: nextval('db_dbworkflowdata_id_seq'::regclass) + is_nullable: false + json_value: + data_type: text + default: null + is_nullable: true + name: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + parent_id: + data_type: integer + default: null + is_nullable: true + time: + data_type: timestamp with time zone + default: null + is_nullable: true + value_type: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + db_dbworkflowstep: + id: + data_type: integer + default: nextval('db_dbworkflowstep_id_seq'::regclass) + is_nullable: false + name: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + nextcall: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + parent_id: + data_type: integer + default: null + is_nullable: true + state: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + time: + data_type: timestamp with time zone + default: null + is_nullable: true + user_id: + data_type: integer + default: null + is_nullable: true + db_dbworkflowstep_calculations: + dbnode_id: + data_type: integer + default: null + is_nullable: true + dbworkflowstep_id: + data_type: integer + default: null + is_nullable: true + id: + data_type: integer + default: nextval('db_dbworkflowstep_calculations_id_seq'::regclass) + is_nullable: false + db_dbworkflowstep_sub_workflows: + dbworkflow_id: + data_type: integer + default: null + is_nullable: true + dbworkflowstep_id: + data_type: integer + default: null + is_nullable: true + id: + data_type: integer + default: nextval('db_dbworkflowstep_sub_workflows_id_seq'::regclass) + is_nullable: false +constraints: + primary_key: + db_dbauthinfo: + db_dbauthinfo_pkey: + - id + db_dbcalcstate: + db_dbcalcstate_pkey: + - id + db_dbcomment: + db_dbcomment_pkey: + - id + db_dbcomputer: + db_dbcomputer_pkey: + - id + db_dbgroup: + db_dbgroup_pkey: + - id + db_dbgroup_dbnodes: + db_dbgroup_dbnodes_pkey: + - id + db_dblink: + db_dblink_pkey: + - id + db_dblock: + db_dblock_pkey: + - key + db_dblog: + db_dblog_pkey: + - id + db_dbnode: + db_dbnode_pkey: + - id + db_dbsetting: + db_dbsetting_pkey: + - id + db_dbuser: + db_dbuser_pkey: + - id + db_dbworkflow: + db_dbworkflow_pkey: + - id + db_dbworkflowdata: + db_dbworkflowdata_pkey: + - id + db_dbworkflowstep: + db_dbworkflowstep_pkey: + - id + db_dbworkflowstep_calculations: + db_dbworkflowstep_calculations_pkey: + - id + db_dbworkflowstep_sub_workflows: + db_dbworkflowstep_sub_workflows_pkey: + - id + unique: + db_dbauthinfo: + db_dbauthinfo_aiidauser_id_dbcomputer_id_key: + - aiidauser_id + - dbcomputer_id + db_dbcalcstate: + db_dbcalcstate_dbnode_id_state_key: + - dbnode_id + - state + db_dbcomputer: + db_dbcomputer_name_key: + - name + db_dbgroup: + db_dbgroup_name_type_key: + - name + - type + db_dbsetting: + db_dbsetting_key_key: + - key + db_dbworkflowdata: + db_dbworkflowdata_parent_id_name_data_type_key: + - data_type + - name + - parent_id + db_dbworkflowstep: + db_dbworkflowstep_parent_id_name_key: + - name + - parent_id + db_dbworkflowstep_calculations: + db_dbworkflowstep_calculations_id_dbnode_id_key: + - dbnode_id + - dbworkflowstep_id + db_dbworkflowstep_sub_workflows: + db_dbworkflowstep_sub_workflows_id_dbworkflow__key: + - dbworkflow_id + - dbworkflowstep_id +foreign_keys: + db_dbauthinfo: + db_dbauthinfo_aiidauser_id_fkey: FOREIGN KEY (aiidauser_id) REFERENCES db_dbuser(id) + ON DELETE CASCADE DEFERRABLE INITIALLY DEFERRED + db_dbauthinfo_dbcomputer_id_fkey: FOREIGN KEY (dbcomputer_id) REFERENCES db_dbcomputer(id) + ON DELETE CASCADE DEFERRABLE INITIALLY DEFERRED + db_dbcalcstate: + db_dbcalcstate_dbnode_id_fkey: FOREIGN KEY (dbnode_id) REFERENCES db_dbnode(id) + ON DELETE CASCADE DEFERRABLE INITIALLY DEFERRED + db_dbcomment: + db_dbcomment_dbnode_id_fkey: FOREIGN KEY (dbnode_id) REFERENCES db_dbnode(id) + ON DELETE CASCADE DEFERRABLE INITIALLY DEFERRED + db_dbcomment_user_id_fkey: FOREIGN KEY (user_id) REFERENCES db_dbuser(id) ON DELETE + CASCADE DEFERRABLE INITIALLY DEFERRED + db_dbgroup: + db_dbgroup_user_id_fkey: FOREIGN KEY (user_id) REFERENCES db_dbuser(id) ON DELETE + CASCADE DEFERRABLE INITIALLY DEFERRED + db_dbgroup_dbnodes: + db_dbgroup_dbnodes_dbgroup_id_fkey: FOREIGN KEY (dbgroup_id) REFERENCES db_dbgroup(id) + DEFERRABLE INITIALLY DEFERRED + db_dbgroup_dbnodes_dbnode_id_fkey: FOREIGN KEY (dbnode_id) REFERENCES db_dbnode(id) + DEFERRABLE INITIALLY DEFERRED + db_dblink: + db_dblink_input_id_fkey: FOREIGN KEY (input_id) REFERENCES db_dbnode(id) DEFERRABLE + INITIALLY DEFERRED + db_dblink_output_id_fkey: FOREIGN KEY (output_id) REFERENCES db_dbnode(id) ON + DELETE CASCADE DEFERRABLE INITIALLY DEFERRED + db_dbnode: + db_dbnode_dbcomputer_id_fkey: FOREIGN KEY (dbcomputer_id) REFERENCES db_dbcomputer(id) + ON DELETE RESTRICT DEFERRABLE INITIALLY DEFERRED + db_dbnode_user_id_fkey: FOREIGN KEY (user_id) REFERENCES db_dbuser(id) ON DELETE + RESTRICT DEFERRABLE INITIALLY DEFERRED + db_dbworkflow: + db_dbworkflow_user_id_fkey: FOREIGN KEY (user_id) REFERENCES db_dbuser(id) + db_dbworkflowdata: + db_dbworkflowdata_aiida_obj_id_fkey: FOREIGN KEY (aiida_obj_id) REFERENCES db_dbnode(id) + db_dbworkflowdata_parent_id_fkey: FOREIGN KEY (parent_id) REFERENCES db_dbworkflow(id) + db_dbworkflowstep: + db_dbworkflowstep_parent_id_fkey: FOREIGN KEY (parent_id) REFERENCES db_dbworkflow(id) + db_dbworkflowstep_user_id_fkey: FOREIGN KEY (user_id) REFERENCES db_dbuser(id) + db_dbworkflowstep_calculations: + db_dbworkflowstep_calculations_dbnode_id_fkey: FOREIGN KEY (dbnode_id) REFERENCES + db_dbnode(id) + db_dbworkflowstep_calculations_dbworkflowstep_id_fkey: FOREIGN KEY (dbworkflowstep_id) + REFERENCES db_dbworkflowstep(id) + db_dbworkflowstep_sub_workflows: + db_dbworkflowstep_sub_workflows_dbworkflow_id_fkey: FOREIGN KEY (dbworkflow_id) + REFERENCES db_dbworkflow(id) + db_dbworkflowstep_sub_workflows_dbworkflowstep_id_fkey: FOREIGN KEY (dbworkflowstep_id) + REFERENCES db_dbworkflowstep(id) +indexes: + db_dbauthinfo: + db_dbauthinfo_aiidauser_id_dbcomputer_id_key: CREATE UNIQUE INDEX db_dbauthinfo_aiidauser_id_dbcomputer_id_key + ON public.db_dbauthinfo USING btree (aiidauser_id, dbcomputer_id) + db_dbauthinfo_pkey: CREATE UNIQUE INDEX db_dbauthinfo_pkey ON public.db_dbauthinfo + USING btree (id) + db_dbcalcstate: + db_dbcalcstate_dbnode_id_state_key: CREATE UNIQUE INDEX db_dbcalcstate_dbnode_id_state_key + ON public.db_dbcalcstate USING btree (dbnode_id, state) + db_dbcalcstate_pkey: CREATE UNIQUE INDEX db_dbcalcstate_pkey ON public.db_dbcalcstate + USING btree (id) + ix_db_dbcalcstate_state: CREATE INDEX ix_db_dbcalcstate_state ON public.db_dbcalcstate + USING btree (state) + db_dbcomment: + db_dbcomment_pkey: CREATE UNIQUE INDEX db_dbcomment_pkey ON public.db_dbcomment + USING btree (id) + db_dbcomputer: + db_dbcomputer_name_key: CREATE UNIQUE INDEX db_dbcomputer_name_key ON public.db_dbcomputer + USING btree (name) + db_dbcomputer_pkey: CREATE UNIQUE INDEX db_dbcomputer_pkey ON public.db_dbcomputer + USING btree (id) + db_dbgroup: + db_dbgroup_name_type_key: CREATE UNIQUE INDEX db_dbgroup_name_type_key ON public.db_dbgroup + USING btree (name, type) + db_dbgroup_pkey: CREATE UNIQUE INDEX db_dbgroup_pkey ON public.db_dbgroup USING + btree (id) + ix_db_dbgroup_name: CREATE INDEX ix_db_dbgroup_name ON public.db_dbgroup USING + btree (name) + ix_db_dbgroup_type: CREATE INDEX ix_db_dbgroup_type ON public.db_dbgroup USING + btree (type) + db_dbgroup_dbnodes: + db_dbgroup_dbnodes_pkey: CREATE UNIQUE INDEX db_dbgroup_dbnodes_pkey ON public.db_dbgroup_dbnodes + USING btree (id) + db_dblink: + db_dblink_pkey: CREATE UNIQUE INDEX db_dblink_pkey ON public.db_dblink USING btree + (id) + ix_db_dblink_label: CREATE INDEX ix_db_dblink_label ON public.db_dblink USING + btree (label) + db_dblock: + db_dblock_pkey: CREATE UNIQUE INDEX db_dblock_pkey ON public.db_dblock USING btree + (key) + db_dblog: + db_dblog_pkey: CREATE UNIQUE INDEX db_dblog_pkey ON public.db_dblog USING btree + (id) + ix_db_dblog_levelname: CREATE INDEX ix_db_dblog_levelname ON public.db_dblog USING + btree (levelname) + ix_db_dblog_loggername: CREATE INDEX ix_db_dblog_loggername ON public.db_dblog + USING btree (loggername) + ix_db_dblog_objname: CREATE INDEX ix_db_dblog_objname ON public.db_dblog USING + btree (objname) + ix_db_dblog_objpk: CREATE INDEX ix_db_dblog_objpk ON public.db_dblog USING btree + (objpk) + db_dbnode: + db_dbnode_pkey: CREATE UNIQUE INDEX db_dbnode_pkey ON public.db_dbnode USING btree + (id) + ix_db_dbnode_label: CREATE INDEX ix_db_dbnode_label ON public.db_dbnode USING + btree (label) + ix_db_dbnode_type: CREATE INDEX ix_db_dbnode_type ON public.db_dbnode USING btree + (type) + db_dbsetting: + db_dbsetting_key_key: CREATE UNIQUE INDEX db_dbsetting_key_key ON public.db_dbsetting + USING btree (key) + db_dbsetting_pkey: CREATE UNIQUE INDEX db_dbsetting_pkey ON public.db_dbsetting + USING btree (id) + ix_db_dbsetting_key: CREATE INDEX ix_db_dbsetting_key ON public.db_dbsetting USING + btree (key) + db_dbuser: + db_dbuser_pkey: CREATE UNIQUE INDEX db_dbuser_pkey ON public.db_dbuser USING btree + (id) + ix_db_dbuser_email: CREATE UNIQUE INDEX ix_db_dbuser_email ON public.db_dbuser + USING btree (email) + db_dbworkflow: + db_dbworkflow_pkey: CREATE UNIQUE INDEX db_dbworkflow_pkey ON public.db_dbworkflow + USING btree (id) + ix_db_dbworkflow_label: CREATE INDEX ix_db_dbworkflow_label ON public.db_dbworkflow + USING btree (label) + db_dbworkflowdata: + db_dbworkflowdata_parent_id_name_data_type_key: CREATE UNIQUE INDEX db_dbworkflowdata_parent_id_name_data_type_key + ON public.db_dbworkflowdata USING btree (parent_id, name, data_type) + db_dbworkflowdata_pkey: CREATE UNIQUE INDEX db_dbworkflowdata_pkey ON public.db_dbworkflowdata + USING btree (id) + db_dbworkflowstep: + db_dbworkflowstep_parent_id_name_key: CREATE UNIQUE INDEX db_dbworkflowstep_parent_id_name_key + ON public.db_dbworkflowstep USING btree (parent_id, name) + db_dbworkflowstep_pkey: CREATE UNIQUE INDEX db_dbworkflowstep_pkey ON public.db_dbworkflowstep + USING btree (id) + db_dbworkflowstep_calculations: + db_dbworkflowstep_calculations_id_dbnode_id_key: CREATE UNIQUE INDEX db_dbworkflowstep_calculations_id_dbnode_id_key + ON public.db_dbworkflowstep_calculations USING btree (dbworkflowstep_id, dbnode_id) + db_dbworkflowstep_calculations_pkey: CREATE UNIQUE INDEX db_dbworkflowstep_calculations_pkey + ON public.db_dbworkflowstep_calculations USING btree (id) + db_dbworkflowstep_sub_workflows: + db_dbworkflowstep_sub_workflows_id_dbworkflow__key: CREATE UNIQUE INDEX db_dbworkflowstep_sub_workflows_id_dbworkflow__key + ON public.db_dbworkflowstep_sub_workflows USING btree (dbworkflowstep_id, dbworkflow_id) + db_dbworkflowstep_sub_workflows_pkey: CREATE UNIQUE INDEX db_dbworkflowstep_sub_workflows_pkey + ON public.db_dbworkflowstep_sub_workflows USING btree (id) diff --git a/tests/backends/aiida_sqlalchemy/migrations/test_all_schema/test_sqla_20_041a79fc615f_.yml b/tests/backends/aiida_sqlalchemy/migrations/test_all_schema/test_sqla_20_041a79fc615f_.yml new file mode 100644 index 0000000000..e21ed1e338 --- /dev/null +++ b/tests/backends/aiida_sqlalchemy/migrations/test_all_schema/test_sqla_20_041a79fc615f_.yml @@ -0,0 +1,721 @@ +columns: + db_dbauthinfo: + aiidauser_id: + data_type: integer + default: null + is_nullable: true + auth_params: + data_type: jsonb + default: null + is_nullable: true + dbcomputer_id: + data_type: integer + default: null + is_nullable: true + enabled: + data_type: boolean + default: null + is_nullable: true + id: + data_type: integer + default: nextval('db_dbauthinfo_id_seq'::regclass) + is_nullable: false + metadata: + data_type: jsonb + default: null + is_nullable: true + db_dbcomment: + content: + data_type: text + default: null + is_nullable: true + ctime: + data_type: timestamp with time zone + default: null + is_nullable: true + dbnode_id: + data_type: integer + default: null + is_nullable: true + id: + data_type: integer + default: nextval('db_dbcomment_id_seq'::regclass) + is_nullable: false + mtime: + data_type: timestamp with time zone + default: null + is_nullable: true + user_id: + data_type: integer + default: null + is_nullable: true + uuid: + data_type: uuid + default: null + is_nullable: true + db_dbcomputer: + description: + data_type: text + default: null + is_nullable: true + enabled: + data_type: boolean + default: null + is_nullable: true + hostname: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + id: + data_type: integer + default: nextval('db_dbcomputer_id_seq'::regclass) + is_nullable: false + metadata: + data_type: jsonb + default: null + is_nullable: true + name: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + scheduler_type: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + transport_params: + data_type: jsonb + default: null + is_nullable: true + transport_type: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + uuid: + data_type: uuid + default: null + is_nullable: true + db_dbgroup: + description: + data_type: text + default: null + is_nullable: true + id: + data_type: integer + default: nextval('db_dbgroup_id_seq'::regclass) + is_nullable: false + label: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + time: + data_type: timestamp with time zone + default: null + is_nullable: true + type_string: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + user_id: + data_type: integer + default: null + is_nullable: true + uuid: + data_type: uuid + default: null + is_nullable: true + db_dbgroup_dbnodes: + dbgroup_id: + data_type: integer + default: null + is_nullable: true + dbnode_id: + data_type: integer + default: null + is_nullable: true + id: + data_type: integer + default: nextval('db_dbgroup_dbnodes_id_seq'::regclass) + is_nullable: false + db_dblink: + id: + data_type: integer + default: nextval('db_dblink_id_seq'::regclass) + is_nullable: false + input_id: + data_type: integer + default: null + is_nullable: true + label: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + output_id: + data_type: integer + default: null + is_nullable: true + type: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + db_dblog: + dbnode_id: + data_type: integer + default: null + is_nullable: false + id: + data_type: integer + default: nextval('db_dblog_id_seq'::regclass) + is_nullable: false + levelname: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + loggername: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + message: + data_type: text + default: null + is_nullable: true + metadata: + data_type: jsonb + default: null + is_nullable: true + time: + data_type: timestamp with time zone + default: null + is_nullable: true + db_dbnode: + attributes: + data_type: jsonb + default: null + is_nullable: true + ctime: + data_type: timestamp with time zone + default: null + is_nullable: true + dbcomputer_id: + data_type: integer + default: null + is_nullable: true + description: + data_type: text + default: null + is_nullable: true + extras: + data_type: jsonb + default: null + is_nullable: true + id: + data_type: integer + default: nextval('db_dbnode_id_seq'::regclass) + is_nullable: false + label: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + mtime: + data_type: timestamp with time zone + default: null + is_nullable: true + nodeversion: + data_type: integer + default: null + is_nullable: true + process_type: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + public: + data_type: boolean + default: null + is_nullable: true + type: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + user_id: + data_type: integer + default: null + is_nullable: false + uuid: + data_type: uuid + default: null + is_nullable: true + db_dbsetting: + description: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + id: + data_type: integer + default: nextval('db_dbsetting_id_seq'::regclass) + is_nullable: false + key: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + time: + data_type: timestamp with time zone + default: null + is_nullable: true + val: + data_type: jsonb + default: null + is_nullable: true + db_dbuser: + date_joined: + data_type: timestamp with time zone + default: null + is_nullable: true + email: + data_type: character varying + default: null + is_nullable: true + max_length: 254 + first_name: + data_type: character varying + default: null + is_nullable: true + max_length: 254 + id: + data_type: integer + default: nextval('db_dbuser_id_seq'::regclass) + is_nullable: false + institution: + data_type: character varying + default: null + is_nullable: true + max_length: 254 + is_active: + data_type: boolean + default: null + is_nullable: true + is_staff: + data_type: boolean + default: null + is_nullable: true + is_superuser: + data_type: boolean + default: null + is_nullable: false + last_login: + data_type: timestamp with time zone + default: null + is_nullable: true + last_name: + data_type: character varying + default: null + is_nullable: true + max_length: 254 + password: + data_type: character varying + default: null + is_nullable: true + max_length: 128 + db_dbworkflow: + ctime: + data_type: timestamp with time zone + default: null + is_nullable: true + description: + data_type: text + default: null + is_nullable: true + id: + data_type: integer + default: nextval('db_dbworkflow_id_seq'::regclass) + is_nullable: false + label: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + lastsyncedversion: + data_type: integer + default: null + is_nullable: true + module: + data_type: text + default: null + is_nullable: true + module_class: + data_type: text + default: null + is_nullable: true + mtime: + data_type: timestamp with time zone + default: null + is_nullable: true + nodeversion: + data_type: integer + default: null + is_nullable: true + report: + data_type: text + default: null + is_nullable: true + script_md5: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + script_path: + data_type: text + default: null + is_nullable: true + state: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + user_id: + data_type: integer + default: null + is_nullable: true + uuid: + data_type: uuid + default: null + is_nullable: true + db_dbworkflowdata: + aiida_obj_id: + data_type: integer + default: null + is_nullable: true + data_type: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + id: + data_type: integer + default: nextval('db_dbworkflowdata_id_seq'::regclass) + is_nullable: false + json_value: + data_type: text + default: null + is_nullable: true + name: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + parent_id: + data_type: integer + default: null + is_nullable: true + time: + data_type: timestamp with time zone + default: null + is_nullable: true + value_type: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + db_dbworkflowstep: + id: + data_type: integer + default: nextval('db_dbworkflowstep_id_seq'::regclass) + is_nullable: false + name: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + nextcall: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + parent_id: + data_type: integer + default: null + is_nullable: true + state: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + time: + data_type: timestamp with time zone + default: null + is_nullable: true + user_id: + data_type: integer + default: null + is_nullable: true + db_dbworkflowstep_calculations: + dbnode_id: + data_type: integer + default: null + is_nullable: true + dbworkflowstep_id: + data_type: integer + default: null + is_nullable: true + id: + data_type: integer + default: nextval('db_dbworkflowstep_calculations_id_seq'::regclass) + is_nullable: false + db_dbworkflowstep_sub_workflows: + dbworkflow_id: + data_type: integer + default: null + is_nullable: true + dbworkflowstep_id: + data_type: integer + default: null + is_nullable: true + id: + data_type: integer + default: nextval('db_dbworkflowstep_sub_workflows_id_seq'::regclass) + is_nullable: false +constraints: + primary_key: + db_dbauthinfo: + db_dbauthinfo_pkey: + - id + db_dbcomment: + db_dbcomment_pkey: + - id + db_dbcomputer: + db_dbcomputer_pkey: + - id + db_dbgroup: + db_dbgroup_pkey: + - id + db_dbgroup_dbnodes: + db_dbgroup_dbnodes_pkey: + - id + db_dblink: + db_dblink_pkey: + - id + db_dblog: + db_dblog_pkey: + - id + db_dbnode: + db_dbnode_pkey: + - id + db_dbsetting: + db_dbsetting_pkey: + - id + db_dbuser: + db_dbuser_pkey: + - id + db_dbworkflow: + db_dbworkflow_pkey: + - id + db_dbworkflowdata: + db_dbworkflowdata_pkey: + - id + db_dbworkflowstep: + db_dbworkflowstep_pkey: + - id + db_dbworkflowstep_calculations: + db_dbworkflowstep_calculations_pkey: + - id + db_dbworkflowstep_sub_workflows: + db_dbworkflowstep_sub_workflows_pkey: + - id + unique: + db_dbauthinfo: + db_dbauthinfo_aiidauser_id_dbcomputer_id_key: + - aiidauser_id + - dbcomputer_id + db_dbcomputer: + db_dbcomputer_name_key: + - name + db_dbgroup: + db_dbgroup_label_type_string_key: + - label + - type_string + db_dbgroup_dbnodes: + db_dbgroup_dbnodes_dbgroup_id_dbnode_id_key: + - dbgroup_id + - dbnode_id + db_dbnode: + db_dbnode_uuid_key: + - uuid + db_dbsetting: + db_dbsetting_key_key: + - key + db_dbworkflowdata: + db_dbworkflowdata_parent_id_name_data_type_key: + - data_type + - name + - parent_id + db_dbworkflowstep: + db_dbworkflowstep_parent_id_name_key: + - name + - parent_id + db_dbworkflowstep_calculations: + db_dbworkflowstep_calculations_id_dbnode_id_key: + - dbnode_id + - dbworkflowstep_id + db_dbworkflowstep_sub_workflows: + db_dbworkflowstep_sub_workflows_id_dbworkflow__key: + - dbworkflow_id + - dbworkflowstep_id +foreign_keys: + db_dbauthinfo: + db_dbauthinfo_aiidauser_id_fkey: FOREIGN KEY (aiidauser_id) REFERENCES db_dbuser(id) + ON DELETE CASCADE DEFERRABLE INITIALLY DEFERRED + db_dbauthinfo_dbcomputer_id_fkey: FOREIGN KEY (dbcomputer_id) REFERENCES db_dbcomputer(id) + ON DELETE CASCADE DEFERRABLE INITIALLY DEFERRED + db_dbcomment: + db_dbcomment_dbnode_id_fkey: FOREIGN KEY (dbnode_id) REFERENCES db_dbnode(id) + ON DELETE CASCADE DEFERRABLE INITIALLY DEFERRED + db_dbcomment_user_id_fkey: FOREIGN KEY (user_id) REFERENCES db_dbuser(id) ON DELETE + CASCADE DEFERRABLE INITIALLY DEFERRED + db_dbgroup: + db_dbgroup_user_id_fkey: FOREIGN KEY (user_id) REFERENCES db_dbuser(id) ON DELETE + CASCADE DEFERRABLE INITIALLY DEFERRED + db_dbgroup_dbnodes: + db_dbgroup_dbnodes_dbgroup_id_fkey: FOREIGN KEY (dbgroup_id) REFERENCES db_dbgroup(id) + DEFERRABLE INITIALLY DEFERRED + db_dbgroup_dbnodes_dbnode_id_fkey: FOREIGN KEY (dbnode_id) REFERENCES db_dbnode(id) + DEFERRABLE INITIALLY DEFERRED + db_dblink: + db_dblink_input_id_fkey: FOREIGN KEY (input_id) REFERENCES db_dbnode(id) DEFERRABLE + INITIALLY DEFERRED + db_dblink_output_id_fkey: FOREIGN KEY (output_id) REFERENCES db_dbnode(id) ON + DELETE CASCADE DEFERRABLE INITIALLY DEFERRED + db_dblog: + db_dblog_dbnode_id_fkey: FOREIGN KEY (dbnode_id) REFERENCES db_dbnode(id) ON DELETE + CASCADE DEFERRABLE INITIALLY DEFERRED + db_dbnode: + db_dbnode_dbcomputer_id_fkey: FOREIGN KEY (dbcomputer_id) REFERENCES db_dbcomputer(id) + ON DELETE RESTRICT DEFERRABLE INITIALLY DEFERRED + db_dbnode_user_id_fkey: FOREIGN KEY (user_id) REFERENCES db_dbuser(id) ON DELETE + RESTRICT DEFERRABLE INITIALLY DEFERRED + db_dbworkflow: + db_dbworkflow_user_id_fkey: FOREIGN KEY (user_id) REFERENCES db_dbuser(id) + db_dbworkflowdata: + db_dbworkflowdata_aiida_obj_id_fkey: FOREIGN KEY (aiida_obj_id) REFERENCES db_dbnode(id) + db_dbworkflowdata_parent_id_fkey: FOREIGN KEY (parent_id) REFERENCES db_dbworkflow(id) + db_dbworkflowstep: + db_dbworkflowstep_parent_id_fkey: FOREIGN KEY (parent_id) REFERENCES db_dbworkflow(id) + db_dbworkflowstep_user_id_fkey: FOREIGN KEY (user_id) REFERENCES db_dbuser(id) + db_dbworkflowstep_calculations: + db_dbworkflowstep_calculations_dbnode_id_fkey: FOREIGN KEY (dbnode_id) REFERENCES + db_dbnode(id) + db_dbworkflowstep_calculations_dbworkflowstep_id_fkey: FOREIGN KEY (dbworkflowstep_id) + REFERENCES db_dbworkflowstep(id) + db_dbworkflowstep_sub_workflows: + db_dbworkflowstep_sub_workflows_dbworkflow_id_fkey: FOREIGN KEY (dbworkflow_id) + REFERENCES db_dbworkflow(id) + db_dbworkflowstep_sub_workflows_dbworkflowstep_id_fkey: FOREIGN KEY (dbworkflowstep_id) + REFERENCES db_dbworkflowstep(id) +indexes: + db_dbauthinfo: + db_dbauthinfo_aiidauser_id_dbcomputer_id_key: CREATE UNIQUE INDEX db_dbauthinfo_aiidauser_id_dbcomputer_id_key + ON public.db_dbauthinfo USING btree (aiidauser_id, dbcomputer_id) + db_dbauthinfo_pkey: CREATE UNIQUE INDEX db_dbauthinfo_pkey ON public.db_dbauthinfo + USING btree (id) + db_dbcomment: + db_dbcomment_pkey: CREATE UNIQUE INDEX db_dbcomment_pkey ON public.db_dbcomment + USING btree (id) + db_dbcomputer: + db_dbcomputer_name_key: CREATE UNIQUE INDEX db_dbcomputer_name_key ON public.db_dbcomputer + USING btree (name) + db_dbcomputer_pkey: CREATE UNIQUE INDEX db_dbcomputer_pkey ON public.db_dbcomputer + USING btree (id) + db_dbgroup: + db_dbgroup_label_type_string_key: CREATE UNIQUE INDEX db_dbgroup_label_type_string_key + ON public.db_dbgroup USING btree (label, type_string) + db_dbgroup_pkey: CREATE UNIQUE INDEX db_dbgroup_pkey ON public.db_dbgroup USING + btree (id) + ix_db_dbgroup_label: CREATE INDEX ix_db_dbgroup_label ON public.db_dbgroup USING + btree (label) + ix_db_dbgroup_type_string: CREATE INDEX ix_db_dbgroup_type_string ON public.db_dbgroup + USING btree (type_string) + db_dbgroup_dbnodes: + db_dbgroup_dbnodes_dbgroup_id_dbnode_id_key: CREATE UNIQUE INDEX db_dbgroup_dbnodes_dbgroup_id_dbnode_id_key + ON public.db_dbgroup_dbnodes USING btree (dbgroup_id, dbnode_id) + db_dbgroup_dbnodes_dbgroup_id_idx: CREATE INDEX db_dbgroup_dbnodes_dbgroup_id_idx + ON public.db_dbgroup_dbnodes USING btree (dbgroup_id) + db_dbgroup_dbnodes_dbnode_id_idx: CREATE INDEX db_dbgroup_dbnodes_dbnode_id_idx + ON public.db_dbgroup_dbnodes USING btree (dbnode_id) + db_dbgroup_dbnodes_pkey: CREATE UNIQUE INDEX db_dbgroup_dbnodes_pkey ON public.db_dbgroup_dbnodes + USING btree (id) + db_dblink: + db_dblink_pkey: CREATE UNIQUE INDEX db_dblink_pkey ON public.db_dblink USING btree + (id) + ix_db_dblink_label: CREATE INDEX ix_db_dblink_label ON public.db_dblink USING + btree (label) + db_dblog: + db_dblog_pkey: CREATE UNIQUE INDEX db_dblog_pkey ON public.db_dblog USING btree + (id) + ix_db_dblog_levelname: CREATE INDEX ix_db_dblog_levelname ON public.db_dblog USING + btree (levelname) + ix_db_dblog_loggername: CREATE INDEX ix_db_dblog_loggername ON public.db_dblog + USING btree (loggername) + db_dbnode: + db_dbnode_pkey: CREATE UNIQUE INDEX db_dbnode_pkey ON public.db_dbnode USING btree + (id) + db_dbnode_uuid_key: CREATE UNIQUE INDEX db_dbnode_uuid_key ON public.db_dbnode + USING btree (uuid) + ix_db_dbnode_label: CREATE INDEX ix_db_dbnode_label ON public.db_dbnode USING + btree (label) + ix_db_dbnode_process_type: CREATE INDEX ix_db_dbnode_process_type ON public.db_dbnode + USING btree (process_type) + ix_db_dbnode_type: CREATE INDEX ix_db_dbnode_type ON public.db_dbnode USING btree + (type) + db_dbsetting: + db_dbsetting_key_key: CREATE UNIQUE INDEX db_dbsetting_key_key ON public.db_dbsetting + USING btree (key) + db_dbsetting_pkey: CREATE UNIQUE INDEX db_dbsetting_pkey ON public.db_dbsetting + USING btree (id) + ix_db_dbsetting_key: CREATE INDEX ix_db_dbsetting_key ON public.db_dbsetting USING + btree (key) + db_dbuser: + db_dbuser_pkey: CREATE UNIQUE INDEX db_dbuser_pkey ON public.db_dbuser USING btree + (id) + ix_db_dbuser_email: CREATE UNIQUE INDEX ix_db_dbuser_email ON public.db_dbuser + USING btree (email) + db_dbworkflow: + db_dbworkflow_pkey: CREATE UNIQUE INDEX db_dbworkflow_pkey ON public.db_dbworkflow + USING btree (id) + ix_db_dbworkflow_label: CREATE INDEX ix_db_dbworkflow_label ON public.db_dbworkflow + USING btree (label) + db_dbworkflowdata: + db_dbworkflowdata_parent_id_name_data_type_key: CREATE UNIQUE INDEX db_dbworkflowdata_parent_id_name_data_type_key + ON public.db_dbworkflowdata USING btree (parent_id, name, data_type) + db_dbworkflowdata_pkey: CREATE UNIQUE INDEX db_dbworkflowdata_pkey ON public.db_dbworkflowdata + USING btree (id) + ix_db_dbworkflowdata_aiida_obj_id: CREATE INDEX ix_db_dbworkflowdata_aiida_obj_id + ON public.db_dbworkflowdata USING btree (aiida_obj_id) + ix_db_dbworkflowdata_parent_id: CREATE INDEX ix_db_dbworkflowdata_parent_id ON + public.db_dbworkflowdata USING btree (parent_id) + db_dbworkflowstep: + db_dbworkflowstep_parent_id_name_key: CREATE UNIQUE INDEX db_dbworkflowstep_parent_id_name_key + ON public.db_dbworkflowstep USING btree (parent_id, name) + db_dbworkflowstep_pkey: CREATE UNIQUE INDEX db_dbworkflowstep_pkey ON public.db_dbworkflowstep + USING btree (id) + db_dbworkflowstep_calculations: + db_dbworkflowstep_calculations_id_dbnode_id_key: CREATE UNIQUE INDEX db_dbworkflowstep_calculations_id_dbnode_id_key + ON public.db_dbworkflowstep_calculations USING btree (dbworkflowstep_id, dbnode_id) + db_dbworkflowstep_calculations_pkey: CREATE UNIQUE INDEX db_dbworkflowstep_calculations_pkey + ON public.db_dbworkflowstep_calculations USING btree (id) + db_dbworkflowstep_sub_workflows: + db_dbworkflowstep_sub_workflows_id_dbworkflow__key: CREATE UNIQUE INDEX db_dbworkflowstep_sub_workflows_id_dbworkflow__key + ON public.db_dbworkflowstep_sub_workflows USING btree (dbworkflowstep_id, dbworkflow_id) + db_dbworkflowstep_sub_workflows_pkey: CREATE UNIQUE INDEX db_dbworkflowstep_sub_workflows_pkey + ON public.db_dbworkflowstep_sub_workflows USING btree (id) diff --git a/tests/backends/aiida_sqlalchemy/migrations/test_all_schema/test_sqla_21_ea2f50e7f615_.yml b/tests/backends/aiida_sqlalchemy/migrations/test_all_schema/test_sqla_21_ea2f50e7f615_.yml new file mode 100644 index 0000000000..0c9b569cca --- /dev/null +++ b/tests/backends/aiida_sqlalchemy/migrations/test_all_schema/test_sqla_21_ea2f50e7f615_.yml @@ -0,0 +1,725 @@ +columns: + db_dbauthinfo: + aiidauser_id: + data_type: integer + default: null + is_nullable: true + auth_params: + data_type: jsonb + default: null + is_nullable: true + dbcomputer_id: + data_type: integer + default: null + is_nullable: true + enabled: + data_type: boolean + default: null + is_nullable: true + id: + data_type: integer + default: nextval('db_dbauthinfo_id_seq'::regclass) + is_nullable: false + metadata: + data_type: jsonb + default: null + is_nullable: true + db_dbcomment: + content: + data_type: text + default: null + is_nullable: true + ctime: + data_type: timestamp with time zone + default: null + is_nullable: true + dbnode_id: + data_type: integer + default: null + is_nullable: true + id: + data_type: integer + default: nextval('db_dbcomment_id_seq'::regclass) + is_nullable: false + mtime: + data_type: timestamp with time zone + default: null + is_nullable: true + user_id: + data_type: integer + default: null + is_nullable: true + uuid: + data_type: uuid + default: null + is_nullable: true + db_dbcomputer: + description: + data_type: text + default: null + is_nullable: true + enabled: + data_type: boolean + default: null + is_nullable: true + hostname: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + id: + data_type: integer + default: nextval('db_dbcomputer_id_seq'::regclass) + is_nullable: false + metadata: + data_type: jsonb + default: null + is_nullable: true + name: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + scheduler_type: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + transport_params: + data_type: jsonb + default: null + is_nullable: true + transport_type: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + uuid: + data_type: uuid + default: null + is_nullable: true + db_dbgroup: + description: + data_type: text + default: null + is_nullable: true + id: + data_type: integer + default: nextval('db_dbgroup_id_seq'::regclass) + is_nullable: false + label: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + time: + data_type: timestamp with time zone + default: null + is_nullable: true + type_string: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + user_id: + data_type: integer + default: null + is_nullable: true + uuid: + data_type: uuid + default: null + is_nullable: true + db_dbgroup_dbnodes: + dbgroup_id: + data_type: integer + default: null + is_nullable: true + dbnode_id: + data_type: integer + default: null + is_nullable: true + id: + data_type: integer + default: nextval('db_dbgroup_dbnodes_id_seq'::regclass) + is_nullable: false + db_dblink: + id: + data_type: integer + default: nextval('db_dblink_id_seq'::regclass) + is_nullable: false + input_id: + data_type: integer + default: null + is_nullable: true + label: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + output_id: + data_type: integer + default: null + is_nullable: true + type: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + db_dblog: + dbnode_id: + data_type: integer + default: null + is_nullable: false + id: + data_type: integer + default: nextval('db_dblog_id_seq'::regclass) + is_nullable: false + levelname: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + loggername: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + message: + data_type: text + default: null + is_nullable: true + metadata: + data_type: jsonb + default: null + is_nullable: true + time: + data_type: timestamp with time zone + default: null + is_nullable: true + uuid: + data_type: uuid + default: null + is_nullable: true + db_dbnode: + attributes: + data_type: jsonb + default: null + is_nullable: true + ctime: + data_type: timestamp with time zone + default: null + is_nullable: true + dbcomputer_id: + data_type: integer + default: null + is_nullable: true + description: + data_type: text + default: null + is_nullable: true + extras: + data_type: jsonb + default: null + is_nullable: true + id: + data_type: integer + default: nextval('db_dbnode_id_seq'::regclass) + is_nullable: false + label: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + mtime: + data_type: timestamp with time zone + default: null + is_nullable: true + nodeversion: + data_type: integer + default: null + is_nullable: true + process_type: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + public: + data_type: boolean + default: null + is_nullable: true + type: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + user_id: + data_type: integer + default: null + is_nullable: false + uuid: + data_type: uuid + default: null + is_nullable: true + db_dbsetting: + description: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + id: + data_type: integer + default: nextval('db_dbsetting_id_seq'::regclass) + is_nullable: false + key: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + time: + data_type: timestamp with time zone + default: null + is_nullable: true + val: + data_type: jsonb + default: null + is_nullable: true + db_dbuser: + date_joined: + data_type: timestamp with time zone + default: null + is_nullable: true + email: + data_type: character varying + default: null + is_nullable: true + max_length: 254 + first_name: + data_type: character varying + default: null + is_nullable: true + max_length: 254 + id: + data_type: integer + default: nextval('db_dbuser_id_seq'::regclass) + is_nullable: false + institution: + data_type: character varying + default: null + is_nullable: true + max_length: 254 + is_active: + data_type: boolean + default: null + is_nullable: true + is_staff: + data_type: boolean + default: null + is_nullable: true + is_superuser: + data_type: boolean + default: null + is_nullable: false + last_login: + data_type: timestamp with time zone + default: null + is_nullable: true + last_name: + data_type: character varying + default: null + is_nullable: true + max_length: 254 + password: + data_type: character varying + default: null + is_nullable: true + max_length: 128 + db_dbworkflow: + ctime: + data_type: timestamp with time zone + default: null + is_nullable: true + description: + data_type: text + default: null + is_nullable: true + id: + data_type: integer + default: nextval('db_dbworkflow_id_seq'::regclass) + is_nullable: false + label: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + lastsyncedversion: + data_type: integer + default: null + is_nullable: true + module: + data_type: text + default: null + is_nullable: true + module_class: + data_type: text + default: null + is_nullable: true + mtime: + data_type: timestamp with time zone + default: null + is_nullable: true + nodeversion: + data_type: integer + default: null + is_nullable: true + report: + data_type: text + default: null + is_nullable: true + script_md5: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + script_path: + data_type: text + default: null + is_nullable: true + state: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + user_id: + data_type: integer + default: null + is_nullable: true + uuid: + data_type: uuid + default: null + is_nullable: true + db_dbworkflowdata: + aiida_obj_id: + data_type: integer + default: null + is_nullable: true + data_type: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + id: + data_type: integer + default: nextval('db_dbworkflowdata_id_seq'::regclass) + is_nullable: false + json_value: + data_type: text + default: null + is_nullable: true + name: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + parent_id: + data_type: integer + default: null + is_nullable: true + time: + data_type: timestamp with time zone + default: null + is_nullable: true + value_type: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + db_dbworkflowstep: + id: + data_type: integer + default: nextval('db_dbworkflowstep_id_seq'::regclass) + is_nullable: false + name: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + nextcall: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + parent_id: + data_type: integer + default: null + is_nullable: true + state: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + time: + data_type: timestamp with time zone + default: null + is_nullable: true + user_id: + data_type: integer + default: null + is_nullable: true + db_dbworkflowstep_calculations: + dbnode_id: + data_type: integer + default: null + is_nullable: true + dbworkflowstep_id: + data_type: integer + default: null + is_nullable: true + id: + data_type: integer + default: nextval('db_dbworkflowstep_calculations_id_seq'::regclass) + is_nullable: false + db_dbworkflowstep_sub_workflows: + dbworkflow_id: + data_type: integer + default: null + is_nullable: true + dbworkflowstep_id: + data_type: integer + default: null + is_nullable: true + id: + data_type: integer + default: nextval('db_dbworkflowstep_sub_workflows_id_seq'::regclass) + is_nullable: false +constraints: + primary_key: + db_dbauthinfo: + db_dbauthinfo_pkey: + - id + db_dbcomment: + db_dbcomment_pkey: + - id + db_dbcomputer: + db_dbcomputer_pkey: + - id + db_dbgroup: + db_dbgroup_pkey: + - id + db_dbgroup_dbnodes: + db_dbgroup_dbnodes_pkey: + - id + db_dblink: + db_dblink_pkey: + - id + db_dblog: + db_dblog_pkey: + - id + db_dbnode: + db_dbnode_pkey: + - id + db_dbsetting: + db_dbsetting_pkey: + - id + db_dbuser: + db_dbuser_pkey: + - id + db_dbworkflow: + db_dbworkflow_pkey: + - id + db_dbworkflowdata: + db_dbworkflowdata_pkey: + - id + db_dbworkflowstep: + db_dbworkflowstep_pkey: + - id + db_dbworkflowstep_calculations: + db_dbworkflowstep_calculations_pkey: + - id + db_dbworkflowstep_sub_workflows: + db_dbworkflowstep_sub_workflows_pkey: + - id + unique: + db_dbauthinfo: + db_dbauthinfo_aiidauser_id_dbcomputer_id_key: + - aiidauser_id + - dbcomputer_id + db_dbcomputer: + db_dbcomputer_name_key: + - name + db_dbgroup: + db_dbgroup_label_type_string_key: + - label + - type_string + db_dbgroup_dbnodes: + db_dbgroup_dbnodes_dbgroup_id_dbnode_id_key: + - dbgroup_id + - dbnode_id + db_dbnode: + db_dbnode_uuid_key: + - uuid + db_dbsetting: + db_dbsetting_key_key: + - key + db_dbworkflowdata: + db_dbworkflowdata_parent_id_name_data_type_key: + - data_type + - name + - parent_id + db_dbworkflowstep: + db_dbworkflowstep_parent_id_name_key: + - name + - parent_id + db_dbworkflowstep_calculations: + db_dbworkflowstep_calculations_id_dbnode_id_key: + - dbnode_id + - dbworkflowstep_id + db_dbworkflowstep_sub_workflows: + db_dbworkflowstep_sub_workflows_id_dbworkflow__key: + - dbworkflow_id + - dbworkflowstep_id +foreign_keys: + db_dbauthinfo: + db_dbauthinfo_aiidauser_id_fkey: FOREIGN KEY (aiidauser_id) REFERENCES db_dbuser(id) + ON DELETE CASCADE DEFERRABLE INITIALLY DEFERRED + db_dbauthinfo_dbcomputer_id_fkey: FOREIGN KEY (dbcomputer_id) REFERENCES db_dbcomputer(id) + ON DELETE CASCADE DEFERRABLE INITIALLY DEFERRED + db_dbcomment: + db_dbcomment_dbnode_id_fkey: FOREIGN KEY (dbnode_id) REFERENCES db_dbnode(id) + ON DELETE CASCADE DEFERRABLE INITIALLY DEFERRED + db_dbcomment_user_id_fkey: FOREIGN KEY (user_id) REFERENCES db_dbuser(id) ON DELETE + CASCADE DEFERRABLE INITIALLY DEFERRED + db_dbgroup: + db_dbgroup_user_id_fkey: FOREIGN KEY (user_id) REFERENCES db_dbuser(id) ON DELETE + CASCADE DEFERRABLE INITIALLY DEFERRED + db_dbgroup_dbnodes: + db_dbgroup_dbnodes_dbgroup_id_fkey: FOREIGN KEY (dbgroup_id) REFERENCES db_dbgroup(id) + DEFERRABLE INITIALLY DEFERRED + db_dbgroup_dbnodes_dbnode_id_fkey: FOREIGN KEY (dbnode_id) REFERENCES db_dbnode(id) + DEFERRABLE INITIALLY DEFERRED + db_dblink: + db_dblink_input_id_fkey: FOREIGN KEY (input_id) REFERENCES db_dbnode(id) DEFERRABLE + INITIALLY DEFERRED + db_dblink_output_id_fkey: FOREIGN KEY (output_id) REFERENCES db_dbnode(id) ON + DELETE CASCADE DEFERRABLE INITIALLY DEFERRED + db_dblog: + db_dblog_dbnode_id_fkey: FOREIGN KEY (dbnode_id) REFERENCES db_dbnode(id) ON DELETE + CASCADE DEFERRABLE INITIALLY DEFERRED + db_dbnode: + db_dbnode_dbcomputer_id_fkey: FOREIGN KEY (dbcomputer_id) REFERENCES db_dbcomputer(id) + ON DELETE RESTRICT DEFERRABLE INITIALLY DEFERRED + db_dbnode_user_id_fkey: FOREIGN KEY (user_id) REFERENCES db_dbuser(id) ON DELETE + RESTRICT DEFERRABLE INITIALLY DEFERRED + db_dbworkflow: + db_dbworkflow_user_id_fkey: FOREIGN KEY (user_id) REFERENCES db_dbuser(id) + db_dbworkflowdata: + db_dbworkflowdata_aiida_obj_id_fkey: FOREIGN KEY (aiida_obj_id) REFERENCES db_dbnode(id) + db_dbworkflowdata_parent_id_fkey: FOREIGN KEY (parent_id) REFERENCES db_dbworkflow(id) + db_dbworkflowstep: + db_dbworkflowstep_parent_id_fkey: FOREIGN KEY (parent_id) REFERENCES db_dbworkflow(id) + db_dbworkflowstep_user_id_fkey: FOREIGN KEY (user_id) REFERENCES db_dbuser(id) + db_dbworkflowstep_calculations: + db_dbworkflowstep_calculations_dbnode_id_fkey: FOREIGN KEY (dbnode_id) REFERENCES + db_dbnode(id) + db_dbworkflowstep_calculations_dbworkflowstep_id_fkey: FOREIGN KEY (dbworkflowstep_id) + REFERENCES db_dbworkflowstep(id) + db_dbworkflowstep_sub_workflows: + db_dbworkflowstep_sub_workflows_dbworkflow_id_fkey: FOREIGN KEY (dbworkflow_id) + REFERENCES db_dbworkflow(id) + db_dbworkflowstep_sub_workflows_dbworkflowstep_id_fkey: FOREIGN KEY (dbworkflowstep_id) + REFERENCES db_dbworkflowstep(id) +indexes: + db_dbauthinfo: + db_dbauthinfo_aiidauser_id_dbcomputer_id_key: CREATE UNIQUE INDEX db_dbauthinfo_aiidauser_id_dbcomputer_id_key + ON public.db_dbauthinfo USING btree (aiidauser_id, dbcomputer_id) + db_dbauthinfo_pkey: CREATE UNIQUE INDEX db_dbauthinfo_pkey ON public.db_dbauthinfo + USING btree (id) + db_dbcomment: + db_dbcomment_pkey: CREATE UNIQUE INDEX db_dbcomment_pkey ON public.db_dbcomment + USING btree (id) + db_dbcomputer: + db_dbcomputer_name_key: CREATE UNIQUE INDEX db_dbcomputer_name_key ON public.db_dbcomputer + USING btree (name) + db_dbcomputer_pkey: CREATE UNIQUE INDEX db_dbcomputer_pkey ON public.db_dbcomputer + USING btree (id) + db_dbgroup: + db_dbgroup_label_type_string_key: CREATE UNIQUE INDEX db_dbgroup_label_type_string_key + ON public.db_dbgroup USING btree (label, type_string) + db_dbgroup_pkey: CREATE UNIQUE INDEX db_dbgroup_pkey ON public.db_dbgroup USING + btree (id) + ix_db_dbgroup_label: CREATE INDEX ix_db_dbgroup_label ON public.db_dbgroup USING + btree (label) + ix_db_dbgroup_type_string: CREATE INDEX ix_db_dbgroup_type_string ON public.db_dbgroup + USING btree (type_string) + db_dbgroup_dbnodes: + db_dbgroup_dbnodes_dbgroup_id_dbnode_id_key: CREATE UNIQUE INDEX db_dbgroup_dbnodes_dbgroup_id_dbnode_id_key + ON public.db_dbgroup_dbnodes USING btree (dbgroup_id, dbnode_id) + db_dbgroup_dbnodes_dbgroup_id_idx: CREATE INDEX db_dbgroup_dbnodes_dbgroup_id_idx + ON public.db_dbgroup_dbnodes USING btree (dbgroup_id) + db_dbgroup_dbnodes_dbnode_id_idx: CREATE INDEX db_dbgroup_dbnodes_dbnode_id_idx + ON public.db_dbgroup_dbnodes USING btree (dbnode_id) + db_dbgroup_dbnodes_pkey: CREATE UNIQUE INDEX db_dbgroup_dbnodes_pkey ON public.db_dbgroup_dbnodes + USING btree (id) + db_dblink: + db_dblink_pkey: CREATE UNIQUE INDEX db_dblink_pkey ON public.db_dblink USING btree + (id) + ix_db_dblink_label: CREATE INDEX ix_db_dblink_label ON public.db_dblink USING + btree (label) + db_dblog: + db_dblog_pkey: CREATE UNIQUE INDEX db_dblog_pkey ON public.db_dblog USING btree + (id) + ix_db_dblog_levelname: CREATE INDEX ix_db_dblog_levelname ON public.db_dblog USING + btree (levelname) + ix_db_dblog_loggername: CREATE INDEX ix_db_dblog_loggername ON public.db_dblog + USING btree (loggername) + db_dbnode: + db_dbnode_pkey: CREATE UNIQUE INDEX db_dbnode_pkey ON public.db_dbnode USING btree + (id) + db_dbnode_uuid_key: CREATE UNIQUE INDEX db_dbnode_uuid_key ON public.db_dbnode + USING btree (uuid) + ix_db_dbnode_label: CREATE INDEX ix_db_dbnode_label ON public.db_dbnode USING + btree (label) + ix_db_dbnode_process_type: CREATE INDEX ix_db_dbnode_process_type ON public.db_dbnode + USING btree (process_type) + ix_db_dbnode_type: CREATE INDEX ix_db_dbnode_type ON public.db_dbnode USING btree + (type) + db_dbsetting: + db_dbsetting_key_key: CREATE UNIQUE INDEX db_dbsetting_key_key ON public.db_dbsetting + USING btree (key) + db_dbsetting_pkey: CREATE UNIQUE INDEX db_dbsetting_pkey ON public.db_dbsetting + USING btree (id) + ix_db_dbsetting_key: CREATE INDEX ix_db_dbsetting_key ON public.db_dbsetting USING + btree (key) + db_dbuser: + db_dbuser_pkey: CREATE UNIQUE INDEX db_dbuser_pkey ON public.db_dbuser USING btree + (id) + ix_db_dbuser_email: CREATE UNIQUE INDEX ix_db_dbuser_email ON public.db_dbuser + USING btree (email) + db_dbworkflow: + db_dbworkflow_pkey: CREATE UNIQUE INDEX db_dbworkflow_pkey ON public.db_dbworkflow + USING btree (id) + ix_db_dbworkflow_label: CREATE INDEX ix_db_dbworkflow_label ON public.db_dbworkflow + USING btree (label) + db_dbworkflowdata: + db_dbworkflowdata_parent_id_name_data_type_key: CREATE UNIQUE INDEX db_dbworkflowdata_parent_id_name_data_type_key + ON public.db_dbworkflowdata USING btree (parent_id, name, data_type) + db_dbworkflowdata_pkey: CREATE UNIQUE INDEX db_dbworkflowdata_pkey ON public.db_dbworkflowdata + USING btree (id) + ix_db_dbworkflowdata_aiida_obj_id: CREATE INDEX ix_db_dbworkflowdata_aiida_obj_id + ON public.db_dbworkflowdata USING btree (aiida_obj_id) + ix_db_dbworkflowdata_parent_id: CREATE INDEX ix_db_dbworkflowdata_parent_id ON + public.db_dbworkflowdata USING btree (parent_id) + db_dbworkflowstep: + db_dbworkflowstep_parent_id_name_key: CREATE UNIQUE INDEX db_dbworkflowstep_parent_id_name_key + ON public.db_dbworkflowstep USING btree (parent_id, name) + db_dbworkflowstep_pkey: CREATE UNIQUE INDEX db_dbworkflowstep_pkey ON public.db_dbworkflowstep + USING btree (id) + db_dbworkflowstep_calculations: + db_dbworkflowstep_calculations_id_dbnode_id_key: CREATE UNIQUE INDEX db_dbworkflowstep_calculations_id_dbnode_id_key + ON public.db_dbworkflowstep_calculations USING btree (dbworkflowstep_id, dbnode_id) + db_dbworkflowstep_calculations_pkey: CREATE UNIQUE INDEX db_dbworkflowstep_calculations_pkey + ON public.db_dbworkflowstep_calculations USING btree (id) + db_dbworkflowstep_sub_workflows: + db_dbworkflowstep_sub_workflows_id_dbworkflow__key: CREATE UNIQUE INDEX db_dbworkflowstep_sub_workflows_id_dbworkflow__key + ON public.db_dbworkflowstep_sub_workflows USING btree (dbworkflowstep_id, dbworkflow_id) + db_dbworkflowstep_sub_workflows_pkey: CREATE UNIQUE INDEX db_dbworkflowstep_sub_workflows_pkey + ON public.db_dbworkflowstep_sub_workflows USING btree (id) diff --git a/tests/backends/aiida_sqlalchemy/migrations/test_all_schema/test_sqla_22_375c2db70663_.yml b/tests/backends/aiida_sqlalchemy/migrations/test_all_schema/test_sqla_22_375c2db70663_.yml new file mode 100644 index 0000000000..fdb88181eb --- /dev/null +++ b/tests/backends/aiida_sqlalchemy/migrations/test_all_schema/test_sqla_22_375c2db70663_.yml @@ -0,0 +1,730 @@ +columns: + db_dbauthinfo: + aiidauser_id: + data_type: integer + default: null + is_nullable: true + auth_params: + data_type: jsonb + default: null + is_nullable: true + dbcomputer_id: + data_type: integer + default: null + is_nullable: true + enabled: + data_type: boolean + default: null + is_nullable: true + id: + data_type: integer + default: nextval('db_dbauthinfo_id_seq'::regclass) + is_nullable: false + metadata: + data_type: jsonb + default: null + is_nullable: true + db_dbcomment: + content: + data_type: text + default: null + is_nullable: true + ctime: + data_type: timestamp with time zone + default: null + is_nullable: true + dbnode_id: + data_type: integer + default: null + is_nullable: true + id: + data_type: integer + default: nextval('db_dbcomment_id_seq'::regclass) + is_nullable: false + mtime: + data_type: timestamp with time zone + default: null + is_nullable: true + user_id: + data_type: integer + default: null + is_nullable: true + uuid: + data_type: uuid + default: null + is_nullable: true + db_dbcomputer: + description: + data_type: text + default: null + is_nullable: true + enabled: + data_type: boolean + default: null + is_nullable: true + hostname: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + id: + data_type: integer + default: nextval('db_dbcomputer_id_seq'::regclass) + is_nullable: false + metadata: + data_type: jsonb + default: null + is_nullable: true + name: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + scheduler_type: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + transport_params: + data_type: jsonb + default: null + is_nullable: true + transport_type: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + uuid: + data_type: uuid + default: null + is_nullable: true + db_dbgroup: + description: + data_type: text + default: null + is_nullable: true + id: + data_type: integer + default: nextval('db_dbgroup_id_seq'::regclass) + is_nullable: false + label: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + time: + data_type: timestamp with time zone + default: null + is_nullable: true + type_string: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + user_id: + data_type: integer + default: null + is_nullable: true + uuid: + data_type: uuid + default: null + is_nullable: true + db_dbgroup_dbnodes: + dbgroup_id: + data_type: integer + default: null + is_nullable: true + dbnode_id: + data_type: integer + default: null + is_nullable: true + id: + data_type: integer + default: nextval('db_dbgroup_dbnodes_id_seq'::regclass) + is_nullable: false + db_dblink: + id: + data_type: integer + default: nextval('db_dblink_id_seq'::regclass) + is_nullable: false + input_id: + data_type: integer + default: null + is_nullable: true + label: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + output_id: + data_type: integer + default: null + is_nullable: true + type: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + db_dblog: + dbnode_id: + data_type: integer + default: null + is_nullable: false + id: + data_type: integer + default: nextval('db_dblog_id_seq'::regclass) + is_nullable: false + levelname: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + loggername: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + message: + data_type: text + default: null + is_nullable: true + metadata: + data_type: jsonb + default: null + is_nullable: true + time: + data_type: timestamp with time zone + default: null + is_nullable: true + uuid: + data_type: uuid + default: null + is_nullable: true + db_dbnode: + attributes: + data_type: jsonb + default: null + is_nullable: true + ctime: + data_type: timestamp with time zone + default: null + is_nullable: true + dbcomputer_id: + data_type: integer + default: null + is_nullable: true + description: + data_type: text + default: null + is_nullable: true + extras: + data_type: jsonb + default: null + is_nullable: true + id: + data_type: integer + default: nextval('db_dbnode_id_seq'::regclass) + is_nullable: false + label: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + mtime: + data_type: timestamp with time zone + default: null + is_nullable: true + nodeversion: + data_type: integer + default: null + is_nullable: true + process_type: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + public: + data_type: boolean + default: null + is_nullable: true + type: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + user_id: + data_type: integer + default: null + is_nullable: false + uuid: + data_type: uuid + default: null + is_nullable: true + db_dbsetting: + description: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + id: + data_type: integer + default: nextval('db_dbsetting_id_seq'::regclass) + is_nullable: false + key: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + time: + data_type: timestamp with time zone + default: null + is_nullable: true + val: + data_type: jsonb + default: null + is_nullable: true + db_dbuser: + date_joined: + data_type: timestamp with time zone + default: null + is_nullable: true + email: + data_type: character varying + default: null + is_nullable: true + max_length: 254 + first_name: + data_type: character varying + default: null + is_nullable: true + max_length: 254 + id: + data_type: integer + default: nextval('db_dbuser_id_seq'::regclass) + is_nullable: false + institution: + data_type: character varying + default: null + is_nullable: true + max_length: 254 + is_active: + data_type: boolean + default: null + is_nullable: true + is_staff: + data_type: boolean + default: null + is_nullable: true + is_superuser: + data_type: boolean + default: null + is_nullable: false + last_login: + data_type: timestamp with time zone + default: null + is_nullable: true + last_name: + data_type: character varying + default: null + is_nullable: true + max_length: 254 + password: + data_type: character varying + default: null + is_nullable: true + max_length: 128 + db_dbworkflow: + ctime: + data_type: timestamp with time zone + default: null + is_nullable: true + description: + data_type: text + default: null + is_nullable: true + id: + data_type: integer + default: nextval('db_dbworkflow_id_seq'::regclass) + is_nullable: false + label: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + lastsyncedversion: + data_type: integer + default: null + is_nullable: true + module: + data_type: text + default: null + is_nullable: true + module_class: + data_type: text + default: null + is_nullable: true + mtime: + data_type: timestamp with time zone + default: null + is_nullable: true + nodeversion: + data_type: integer + default: null + is_nullable: true + report: + data_type: text + default: null + is_nullable: true + script_md5: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + script_path: + data_type: text + default: null + is_nullable: true + state: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + user_id: + data_type: integer + default: null + is_nullable: true + uuid: + data_type: uuid + default: null + is_nullable: true + db_dbworkflowdata: + aiida_obj_id: + data_type: integer + default: null + is_nullable: true + data_type: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + id: + data_type: integer + default: nextval('db_dbworkflowdata_id_seq'::regclass) + is_nullable: false + json_value: + data_type: text + default: null + is_nullable: true + name: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + parent_id: + data_type: integer + default: null + is_nullable: true + time: + data_type: timestamp with time zone + default: null + is_nullable: true + value_type: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + db_dbworkflowstep: + id: + data_type: integer + default: nextval('db_dbworkflowstep_id_seq'::regclass) + is_nullable: false + name: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + nextcall: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + parent_id: + data_type: integer + default: null + is_nullable: true + state: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + time: + data_type: timestamp with time zone + default: null + is_nullable: true + user_id: + data_type: integer + default: null + is_nullable: true + db_dbworkflowstep_calculations: + dbnode_id: + data_type: integer + default: null + is_nullable: true + dbworkflowstep_id: + data_type: integer + default: null + is_nullable: true + id: + data_type: integer + default: nextval('db_dbworkflowstep_calculations_id_seq'::regclass) + is_nullable: false + db_dbworkflowstep_sub_workflows: + dbworkflow_id: + data_type: integer + default: null + is_nullable: true + dbworkflowstep_id: + data_type: integer + default: null + is_nullable: true + id: + data_type: integer + default: nextval('db_dbworkflowstep_sub_workflows_id_seq'::regclass) + is_nullable: false +constraints: + primary_key: + db_dbauthinfo: + db_dbauthinfo_pkey: + - id + db_dbcomment: + db_dbcomment_pkey: + - id + db_dbcomputer: + db_dbcomputer_pkey: + - id + db_dbgroup: + db_dbgroup_pkey: + - id + db_dbgroup_dbnodes: + db_dbgroup_dbnodes_pkey: + - id + db_dblink: + db_dblink_pkey: + - id + db_dblog: + db_dblog_pkey: + - id + db_dbnode: + db_dbnode_pkey: + - id + db_dbsetting: + db_dbsetting_pkey: + - id + db_dbuser: + db_dbuser_pkey: + - id + db_dbworkflow: + db_dbworkflow_pkey: + - id + db_dbworkflowdata: + db_dbworkflowdata_pkey: + - id + db_dbworkflowstep: + db_dbworkflowstep_pkey: + - id + db_dbworkflowstep_calculations: + db_dbworkflowstep_calculations_pkey: + - id + db_dbworkflowstep_sub_workflows: + db_dbworkflowstep_sub_workflows_pkey: + - id + unique: + db_dbauthinfo: + db_dbauthinfo_aiidauser_id_dbcomputer_id_key: + - aiidauser_id + - dbcomputer_id + db_dbcomputer: + db_dbcomputer_name_key: + - name + db_dbgroup: + db_dbgroup_label_type_string_key: + - label + - type_string + db_dbgroup_dbnodes: + db_dbgroup_dbnodes_dbgroup_id_dbnode_id_key: + - dbgroup_id + - dbnode_id + db_dblog: + db_dblog_uuid_key: + - uuid + db_dbnode: + db_dbnode_uuid_key: + - uuid + db_dbsetting: + db_dbsetting_key_key: + - key + db_dbworkflowdata: + db_dbworkflowdata_parent_id_name_data_type_key: + - data_type + - name + - parent_id + db_dbworkflowstep: + db_dbworkflowstep_parent_id_name_key: + - name + - parent_id + db_dbworkflowstep_calculations: + db_dbworkflowstep_calculations_id_dbnode_id_key: + - dbnode_id + - dbworkflowstep_id + db_dbworkflowstep_sub_workflows: + db_dbworkflowstep_sub_workflows_id_dbworkflow__key: + - dbworkflow_id + - dbworkflowstep_id +foreign_keys: + db_dbauthinfo: + db_dbauthinfo_aiidauser_id_fkey: FOREIGN KEY (aiidauser_id) REFERENCES db_dbuser(id) + ON DELETE CASCADE DEFERRABLE INITIALLY DEFERRED + db_dbauthinfo_dbcomputer_id_fkey: FOREIGN KEY (dbcomputer_id) REFERENCES db_dbcomputer(id) + ON DELETE CASCADE DEFERRABLE INITIALLY DEFERRED + db_dbcomment: + db_dbcomment_dbnode_id_fkey: FOREIGN KEY (dbnode_id) REFERENCES db_dbnode(id) + ON DELETE CASCADE DEFERRABLE INITIALLY DEFERRED + db_dbcomment_user_id_fkey: FOREIGN KEY (user_id) REFERENCES db_dbuser(id) ON DELETE + CASCADE DEFERRABLE INITIALLY DEFERRED + db_dbgroup: + db_dbgroup_user_id_fkey: FOREIGN KEY (user_id) REFERENCES db_dbuser(id) ON DELETE + CASCADE DEFERRABLE INITIALLY DEFERRED + db_dbgroup_dbnodes: + db_dbgroup_dbnodes_dbgroup_id_fkey: FOREIGN KEY (dbgroup_id) REFERENCES db_dbgroup(id) + DEFERRABLE INITIALLY DEFERRED + db_dbgroup_dbnodes_dbnode_id_fkey: FOREIGN KEY (dbnode_id) REFERENCES db_dbnode(id) + DEFERRABLE INITIALLY DEFERRED + db_dblink: + db_dblink_input_id_fkey: FOREIGN KEY (input_id) REFERENCES db_dbnode(id) DEFERRABLE + INITIALLY DEFERRED + db_dblink_output_id_fkey: FOREIGN KEY (output_id) REFERENCES db_dbnode(id) ON + DELETE CASCADE DEFERRABLE INITIALLY DEFERRED + db_dblog: + db_dblog_dbnode_id_fkey: FOREIGN KEY (dbnode_id) REFERENCES db_dbnode(id) ON DELETE + CASCADE DEFERRABLE INITIALLY DEFERRED + db_dbnode: + db_dbnode_dbcomputer_id_fkey: FOREIGN KEY (dbcomputer_id) REFERENCES db_dbcomputer(id) + ON DELETE RESTRICT DEFERRABLE INITIALLY DEFERRED + db_dbnode_user_id_fkey: FOREIGN KEY (user_id) REFERENCES db_dbuser(id) ON DELETE + RESTRICT DEFERRABLE INITIALLY DEFERRED + db_dbworkflow: + db_dbworkflow_user_id_fkey: FOREIGN KEY (user_id) REFERENCES db_dbuser(id) + db_dbworkflowdata: + db_dbworkflowdata_aiida_obj_id_fkey: FOREIGN KEY (aiida_obj_id) REFERENCES db_dbnode(id) + db_dbworkflowdata_parent_id_fkey: FOREIGN KEY (parent_id) REFERENCES db_dbworkflow(id) + db_dbworkflowstep: + db_dbworkflowstep_parent_id_fkey: FOREIGN KEY (parent_id) REFERENCES db_dbworkflow(id) + db_dbworkflowstep_user_id_fkey: FOREIGN KEY (user_id) REFERENCES db_dbuser(id) + db_dbworkflowstep_calculations: + db_dbworkflowstep_calculations_dbnode_id_fkey: FOREIGN KEY (dbnode_id) REFERENCES + db_dbnode(id) + db_dbworkflowstep_calculations_dbworkflowstep_id_fkey: FOREIGN KEY (dbworkflowstep_id) + REFERENCES db_dbworkflowstep(id) + db_dbworkflowstep_sub_workflows: + db_dbworkflowstep_sub_workflows_dbworkflow_id_fkey: FOREIGN KEY (dbworkflow_id) + REFERENCES db_dbworkflow(id) + db_dbworkflowstep_sub_workflows_dbworkflowstep_id_fkey: FOREIGN KEY (dbworkflowstep_id) + REFERENCES db_dbworkflowstep(id) +indexes: + db_dbauthinfo: + db_dbauthinfo_aiidauser_id_dbcomputer_id_key: CREATE UNIQUE INDEX db_dbauthinfo_aiidauser_id_dbcomputer_id_key + ON public.db_dbauthinfo USING btree (aiidauser_id, dbcomputer_id) + db_dbauthinfo_pkey: CREATE UNIQUE INDEX db_dbauthinfo_pkey ON public.db_dbauthinfo + USING btree (id) + db_dbcomment: + db_dbcomment_pkey: CREATE UNIQUE INDEX db_dbcomment_pkey ON public.db_dbcomment + USING btree (id) + db_dbcomputer: + db_dbcomputer_name_key: CREATE UNIQUE INDEX db_dbcomputer_name_key ON public.db_dbcomputer + USING btree (name) + db_dbcomputer_pkey: CREATE UNIQUE INDEX db_dbcomputer_pkey ON public.db_dbcomputer + USING btree (id) + db_dbgroup: + db_dbgroup_label_type_string_key: CREATE UNIQUE INDEX db_dbgroup_label_type_string_key + ON public.db_dbgroup USING btree (label, type_string) + db_dbgroup_pkey: CREATE UNIQUE INDEX db_dbgroup_pkey ON public.db_dbgroup USING + btree (id) + ix_db_dbgroup_label: CREATE INDEX ix_db_dbgroup_label ON public.db_dbgroup USING + btree (label) + ix_db_dbgroup_type_string: CREATE INDEX ix_db_dbgroup_type_string ON public.db_dbgroup + USING btree (type_string) + db_dbgroup_dbnodes: + db_dbgroup_dbnodes_dbgroup_id_dbnode_id_key: CREATE UNIQUE INDEX db_dbgroup_dbnodes_dbgroup_id_dbnode_id_key + ON public.db_dbgroup_dbnodes USING btree (dbgroup_id, dbnode_id) + db_dbgroup_dbnodes_dbgroup_id_idx: CREATE INDEX db_dbgroup_dbnodes_dbgroup_id_idx + ON public.db_dbgroup_dbnodes USING btree (dbgroup_id) + db_dbgroup_dbnodes_dbnode_id_idx: CREATE INDEX db_dbgroup_dbnodes_dbnode_id_idx + ON public.db_dbgroup_dbnodes USING btree (dbnode_id) + db_dbgroup_dbnodes_pkey: CREATE UNIQUE INDEX db_dbgroup_dbnodes_pkey ON public.db_dbgroup_dbnodes + USING btree (id) + db_dblink: + db_dblink_pkey: CREATE UNIQUE INDEX db_dblink_pkey ON public.db_dblink USING btree + (id) + ix_db_dblink_label: CREATE INDEX ix_db_dblink_label ON public.db_dblink USING + btree (label) + db_dblog: + db_dblog_pkey: CREATE UNIQUE INDEX db_dblog_pkey ON public.db_dblog USING btree + (id) + db_dblog_uuid_key: CREATE UNIQUE INDEX db_dblog_uuid_key ON public.db_dblog USING + btree (uuid) + ix_db_dblog_levelname: CREATE INDEX ix_db_dblog_levelname ON public.db_dblog USING + btree (levelname) + ix_db_dblog_loggername: CREATE INDEX ix_db_dblog_loggername ON public.db_dblog + USING btree (loggername) + db_dbnode: + db_dbnode_pkey: CREATE UNIQUE INDEX db_dbnode_pkey ON public.db_dbnode USING btree + (id) + db_dbnode_uuid_key: CREATE UNIQUE INDEX db_dbnode_uuid_key ON public.db_dbnode + USING btree (uuid) + ix_db_dbnode_label: CREATE INDEX ix_db_dbnode_label ON public.db_dbnode USING + btree (label) + ix_db_dbnode_process_type: CREATE INDEX ix_db_dbnode_process_type ON public.db_dbnode + USING btree (process_type) + ix_db_dbnode_type: CREATE INDEX ix_db_dbnode_type ON public.db_dbnode USING btree + (type) + db_dbsetting: + db_dbsetting_key_key: CREATE UNIQUE INDEX db_dbsetting_key_key ON public.db_dbsetting + USING btree (key) + db_dbsetting_pkey: CREATE UNIQUE INDEX db_dbsetting_pkey ON public.db_dbsetting + USING btree (id) + ix_db_dbsetting_key: CREATE INDEX ix_db_dbsetting_key ON public.db_dbsetting USING + btree (key) + db_dbuser: + db_dbuser_pkey: CREATE UNIQUE INDEX db_dbuser_pkey ON public.db_dbuser USING btree + (id) + ix_db_dbuser_email: CREATE UNIQUE INDEX ix_db_dbuser_email ON public.db_dbuser + USING btree (email) + db_dbworkflow: + db_dbworkflow_pkey: CREATE UNIQUE INDEX db_dbworkflow_pkey ON public.db_dbworkflow + USING btree (id) + ix_db_dbworkflow_label: CREATE INDEX ix_db_dbworkflow_label ON public.db_dbworkflow + USING btree (label) + db_dbworkflowdata: + db_dbworkflowdata_parent_id_name_data_type_key: CREATE UNIQUE INDEX db_dbworkflowdata_parent_id_name_data_type_key + ON public.db_dbworkflowdata USING btree (parent_id, name, data_type) + db_dbworkflowdata_pkey: CREATE UNIQUE INDEX db_dbworkflowdata_pkey ON public.db_dbworkflowdata + USING btree (id) + ix_db_dbworkflowdata_aiida_obj_id: CREATE INDEX ix_db_dbworkflowdata_aiida_obj_id + ON public.db_dbworkflowdata USING btree (aiida_obj_id) + ix_db_dbworkflowdata_parent_id: CREATE INDEX ix_db_dbworkflowdata_parent_id ON + public.db_dbworkflowdata USING btree (parent_id) + db_dbworkflowstep: + db_dbworkflowstep_parent_id_name_key: CREATE UNIQUE INDEX db_dbworkflowstep_parent_id_name_key + ON public.db_dbworkflowstep USING btree (parent_id, name) + db_dbworkflowstep_pkey: CREATE UNIQUE INDEX db_dbworkflowstep_pkey ON public.db_dbworkflowstep + USING btree (id) + db_dbworkflowstep_calculations: + db_dbworkflowstep_calculations_id_dbnode_id_key: CREATE UNIQUE INDEX db_dbworkflowstep_calculations_id_dbnode_id_key + ON public.db_dbworkflowstep_calculations USING btree (dbworkflowstep_id, dbnode_id) + db_dbworkflowstep_calculations_pkey: CREATE UNIQUE INDEX db_dbworkflowstep_calculations_pkey + ON public.db_dbworkflowstep_calculations USING btree (id) + db_dbworkflowstep_sub_workflows: + db_dbworkflowstep_sub_workflows_id_dbworkflow__key: CREATE UNIQUE INDEX db_dbworkflowstep_sub_workflows_id_dbworkflow__key + ON public.db_dbworkflowstep_sub_workflows USING btree (dbworkflowstep_id, dbworkflow_id) + db_dbworkflowstep_sub_workflows_pkey: CREATE UNIQUE INDEX db_dbworkflowstep_sub_workflows_pkey + ON public.db_dbworkflowstep_sub_workflows USING btree (id) diff --git a/tests/backends/aiida_sqlalchemy/migrations/test_all_schema/test_sqla_23_6a5c2ea1439d_.yml b/tests/backends/aiida_sqlalchemy/migrations/test_all_schema/test_sqla_23_6a5c2ea1439d_.yml new file mode 100644 index 0000000000..fdb88181eb --- /dev/null +++ b/tests/backends/aiida_sqlalchemy/migrations/test_all_schema/test_sqla_23_6a5c2ea1439d_.yml @@ -0,0 +1,730 @@ +columns: + db_dbauthinfo: + aiidauser_id: + data_type: integer + default: null + is_nullable: true + auth_params: + data_type: jsonb + default: null + is_nullable: true + dbcomputer_id: + data_type: integer + default: null + is_nullable: true + enabled: + data_type: boolean + default: null + is_nullable: true + id: + data_type: integer + default: nextval('db_dbauthinfo_id_seq'::regclass) + is_nullable: false + metadata: + data_type: jsonb + default: null + is_nullable: true + db_dbcomment: + content: + data_type: text + default: null + is_nullable: true + ctime: + data_type: timestamp with time zone + default: null + is_nullable: true + dbnode_id: + data_type: integer + default: null + is_nullable: true + id: + data_type: integer + default: nextval('db_dbcomment_id_seq'::regclass) + is_nullable: false + mtime: + data_type: timestamp with time zone + default: null + is_nullable: true + user_id: + data_type: integer + default: null + is_nullable: true + uuid: + data_type: uuid + default: null + is_nullable: true + db_dbcomputer: + description: + data_type: text + default: null + is_nullable: true + enabled: + data_type: boolean + default: null + is_nullable: true + hostname: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + id: + data_type: integer + default: nextval('db_dbcomputer_id_seq'::regclass) + is_nullable: false + metadata: + data_type: jsonb + default: null + is_nullable: true + name: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + scheduler_type: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + transport_params: + data_type: jsonb + default: null + is_nullable: true + transport_type: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + uuid: + data_type: uuid + default: null + is_nullable: true + db_dbgroup: + description: + data_type: text + default: null + is_nullable: true + id: + data_type: integer + default: nextval('db_dbgroup_id_seq'::regclass) + is_nullable: false + label: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + time: + data_type: timestamp with time zone + default: null + is_nullable: true + type_string: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + user_id: + data_type: integer + default: null + is_nullable: true + uuid: + data_type: uuid + default: null + is_nullable: true + db_dbgroup_dbnodes: + dbgroup_id: + data_type: integer + default: null + is_nullable: true + dbnode_id: + data_type: integer + default: null + is_nullable: true + id: + data_type: integer + default: nextval('db_dbgroup_dbnodes_id_seq'::regclass) + is_nullable: false + db_dblink: + id: + data_type: integer + default: nextval('db_dblink_id_seq'::regclass) + is_nullable: false + input_id: + data_type: integer + default: null + is_nullable: true + label: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + output_id: + data_type: integer + default: null + is_nullable: true + type: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + db_dblog: + dbnode_id: + data_type: integer + default: null + is_nullable: false + id: + data_type: integer + default: nextval('db_dblog_id_seq'::regclass) + is_nullable: false + levelname: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + loggername: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + message: + data_type: text + default: null + is_nullable: true + metadata: + data_type: jsonb + default: null + is_nullable: true + time: + data_type: timestamp with time zone + default: null + is_nullable: true + uuid: + data_type: uuid + default: null + is_nullable: true + db_dbnode: + attributes: + data_type: jsonb + default: null + is_nullable: true + ctime: + data_type: timestamp with time zone + default: null + is_nullable: true + dbcomputer_id: + data_type: integer + default: null + is_nullable: true + description: + data_type: text + default: null + is_nullable: true + extras: + data_type: jsonb + default: null + is_nullable: true + id: + data_type: integer + default: nextval('db_dbnode_id_seq'::regclass) + is_nullable: false + label: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + mtime: + data_type: timestamp with time zone + default: null + is_nullable: true + nodeversion: + data_type: integer + default: null + is_nullable: true + process_type: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + public: + data_type: boolean + default: null + is_nullable: true + type: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + user_id: + data_type: integer + default: null + is_nullable: false + uuid: + data_type: uuid + default: null + is_nullable: true + db_dbsetting: + description: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + id: + data_type: integer + default: nextval('db_dbsetting_id_seq'::regclass) + is_nullable: false + key: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + time: + data_type: timestamp with time zone + default: null + is_nullable: true + val: + data_type: jsonb + default: null + is_nullable: true + db_dbuser: + date_joined: + data_type: timestamp with time zone + default: null + is_nullable: true + email: + data_type: character varying + default: null + is_nullable: true + max_length: 254 + first_name: + data_type: character varying + default: null + is_nullable: true + max_length: 254 + id: + data_type: integer + default: nextval('db_dbuser_id_seq'::regclass) + is_nullable: false + institution: + data_type: character varying + default: null + is_nullable: true + max_length: 254 + is_active: + data_type: boolean + default: null + is_nullable: true + is_staff: + data_type: boolean + default: null + is_nullable: true + is_superuser: + data_type: boolean + default: null + is_nullable: false + last_login: + data_type: timestamp with time zone + default: null + is_nullable: true + last_name: + data_type: character varying + default: null + is_nullable: true + max_length: 254 + password: + data_type: character varying + default: null + is_nullable: true + max_length: 128 + db_dbworkflow: + ctime: + data_type: timestamp with time zone + default: null + is_nullable: true + description: + data_type: text + default: null + is_nullable: true + id: + data_type: integer + default: nextval('db_dbworkflow_id_seq'::regclass) + is_nullable: false + label: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + lastsyncedversion: + data_type: integer + default: null + is_nullable: true + module: + data_type: text + default: null + is_nullable: true + module_class: + data_type: text + default: null + is_nullable: true + mtime: + data_type: timestamp with time zone + default: null + is_nullable: true + nodeversion: + data_type: integer + default: null + is_nullable: true + report: + data_type: text + default: null + is_nullable: true + script_md5: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + script_path: + data_type: text + default: null + is_nullable: true + state: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + user_id: + data_type: integer + default: null + is_nullable: true + uuid: + data_type: uuid + default: null + is_nullable: true + db_dbworkflowdata: + aiida_obj_id: + data_type: integer + default: null + is_nullable: true + data_type: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + id: + data_type: integer + default: nextval('db_dbworkflowdata_id_seq'::regclass) + is_nullable: false + json_value: + data_type: text + default: null + is_nullable: true + name: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + parent_id: + data_type: integer + default: null + is_nullable: true + time: + data_type: timestamp with time zone + default: null + is_nullable: true + value_type: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + db_dbworkflowstep: + id: + data_type: integer + default: nextval('db_dbworkflowstep_id_seq'::regclass) + is_nullable: false + name: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + nextcall: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + parent_id: + data_type: integer + default: null + is_nullable: true + state: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + time: + data_type: timestamp with time zone + default: null + is_nullable: true + user_id: + data_type: integer + default: null + is_nullable: true + db_dbworkflowstep_calculations: + dbnode_id: + data_type: integer + default: null + is_nullable: true + dbworkflowstep_id: + data_type: integer + default: null + is_nullable: true + id: + data_type: integer + default: nextval('db_dbworkflowstep_calculations_id_seq'::regclass) + is_nullable: false + db_dbworkflowstep_sub_workflows: + dbworkflow_id: + data_type: integer + default: null + is_nullable: true + dbworkflowstep_id: + data_type: integer + default: null + is_nullable: true + id: + data_type: integer + default: nextval('db_dbworkflowstep_sub_workflows_id_seq'::regclass) + is_nullable: false +constraints: + primary_key: + db_dbauthinfo: + db_dbauthinfo_pkey: + - id + db_dbcomment: + db_dbcomment_pkey: + - id + db_dbcomputer: + db_dbcomputer_pkey: + - id + db_dbgroup: + db_dbgroup_pkey: + - id + db_dbgroup_dbnodes: + db_dbgroup_dbnodes_pkey: + - id + db_dblink: + db_dblink_pkey: + - id + db_dblog: + db_dblog_pkey: + - id + db_dbnode: + db_dbnode_pkey: + - id + db_dbsetting: + db_dbsetting_pkey: + - id + db_dbuser: + db_dbuser_pkey: + - id + db_dbworkflow: + db_dbworkflow_pkey: + - id + db_dbworkflowdata: + db_dbworkflowdata_pkey: + - id + db_dbworkflowstep: + db_dbworkflowstep_pkey: + - id + db_dbworkflowstep_calculations: + db_dbworkflowstep_calculations_pkey: + - id + db_dbworkflowstep_sub_workflows: + db_dbworkflowstep_sub_workflows_pkey: + - id + unique: + db_dbauthinfo: + db_dbauthinfo_aiidauser_id_dbcomputer_id_key: + - aiidauser_id + - dbcomputer_id + db_dbcomputer: + db_dbcomputer_name_key: + - name + db_dbgroup: + db_dbgroup_label_type_string_key: + - label + - type_string + db_dbgroup_dbnodes: + db_dbgroup_dbnodes_dbgroup_id_dbnode_id_key: + - dbgroup_id + - dbnode_id + db_dblog: + db_dblog_uuid_key: + - uuid + db_dbnode: + db_dbnode_uuid_key: + - uuid + db_dbsetting: + db_dbsetting_key_key: + - key + db_dbworkflowdata: + db_dbworkflowdata_parent_id_name_data_type_key: + - data_type + - name + - parent_id + db_dbworkflowstep: + db_dbworkflowstep_parent_id_name_key: + - name + - parent_id + db_dbworkflowstep_calculations: + db_dbworkflowstep_calculations_id_dbnode_id_key: + - dbnode_id + - dbworkflowstep_id + db_dbworkflowstep_sub_workflows: + db_dbworkflowstep_sub_workflows_id_dbworkflow__key: + - dbworkflow_id + - dbworkflowstep_id +foreign_keys: + db_dbauthinfo: + db_dbauthinfo_aiidauser_id_fkey: FOREIGN KEY (aiidauser_id) REFERENCES db_dbuser(id) + ON DELETE CASCADE DEFERRABLE INITIALLY DEFERRED + db_dbauthinfo_dbcomputer_id_fkey: FOREIGN KEY (dbcomputer_id) REFERENCES db_dbcomputer(id) + ON DELETE CASCADE DEFERRABLE INITIALLY DEFERRED + db_dbcomment: + db_dbcomment_dbnode_id_fkey: FOREIGN KEY (dbnode_id) REFERENCES db_dbnode(id) + ON DELETE CASCADE DEFERRABLE INITIALLY DEFERRED + db_dbcomment_user_id_fkey: FOREIGN KEY (user_id) REFERENCES db_dbuser(id) ON DELETE + CASCADE DEFERRABLE INITIALLY DEFERRED + db_dbgroup: + db_dbgroup_user_id_fkey: FOREIGN KEY (user_id) REFERENCES db_dbuser(id) ON DELETE + CASCADE DEFERRABLE INITIALLY DEFERRED + db_dbgroup_dbnodes: + db_dbgroup_dbnodes_dbgroup_id_fkey: FOREIGN KEY (dbgroup_id) REFERENCES db_dbgroup(id) + DEFERRABLE INITIALLY DEFERRED + db_dbgroup_dbnodes_dbnode_id_fkey: FOREIGN KEY (dbnode_id) REFERENCES db_dbnode(id) + DEFERRABLE INITIALLY DEFERRED + db_dblink: + db_dblink_input_id_fkey: FOREIGN KEY (input_id) REFERENCES db_dbnode(id) DEFERRABLE + INITIALLY DEFERRED + db_dblink_output_id_fkey: FOREIGN KEY (output_id) REFERENCES db_dbnode(id) ON + DELETE CASCADE DEFERRABLE INITIALLY DEFERRED + db_dblog: + db_dblog_dbnode_id_fkey: FOREIGN KEY (dbnode_id) REFERENCES db_dbnode(id) ON DELETE + CASCADE DEFERRABLE INITIALLY DEFERRED + db_dbnode: + db_dbnode_dbcomputer_id_fkey: FOREIGN KEY (dbcomputer_id) REFERENCES db_dbcomputer(id) + ON DELETE RESTRICT DEFERRABLE INITIALLY DEFERRED + db_dbnode_user_id_fkey: FOREIGN KEY (user_id) REFERENCES db_dbuser(id) ON DELETE + RESTRICT DEFERRABLE INITIALLY DEFERRED + db_dbworkflow: + db_dbworkflow_user_id_fkey: FOREIGN KEY (user_id) REFERENCES db_dbuser(id) + db_dbworkflowdata: + db_dbworkflowdata_aiida_obj_id_fkey: FOREIGN KEY (aiida_obj_id) REFERENCES db_dbnode(id) + db_dbworkflowdata_parent_id_fkey: FOREIGN KEY (parent_id) REFERENCES db_dbworkflow(id) + db_dbworkflowstep: + db_dbworkflowstep_parent_id_fkey: FOREIGN KEY (parent_id) REFERENCES db_dbworkflow(id) + db_dbworkflowstep_user_id_fkey: FOREIGN KEY (user_id) REFERENCES db_dbuser(id) + db_dbworkflowstep_calculations: + db_dbworkflowstep_calculations_dbnode_id_fkey: FOREIGN KEY (dbnode_id) REFERENCES + db_dbnode(id) + db_dbworkflowstep_calculations_dbworkflowstep_id_fkey: FOREIGN KEY (dbworkflowstep_id) + REFERENCES db_dbworkflowstep(id) + db_dbworkflowstep_sub_workflows: + db_dbworkflowstep_sub_workflows_dbworkflow_id_fkey: FOREIGN KEY (dbworkflow_id) + REFERENCES db_dbworkflow(id) + db_dbworkflowstep_sub_workflows_dbworkflowstep_id_fkey: FOREIGN KEY (dbworkflowstep_id) + REFERENCES db_dbworkflowstep(id) +indexes: + db_dbauthinfo: + db_dbauthinfo_aiidauser_id_dbcomputer_id_key: CREATE UNIQUE INDEX db_dbauthinfo_aiidauser_id_dbcomputer_id_key + ON public.db_dbauthinfo USING btree (aiidauser_id, dbcomputer_id) + db_dbauthinfo_pkey: CREATE UNIQUE INDEX db_dbauthinfo_pkey ON public.db_dbauthinfo + USING btree (id) + db_dbcomment: + db_dbcomment_pkey: CREATE UNIQUE INDEX db_dbcomment_pkey ON public.db_dbcomment + USING btree (id) + db_dbcomputer: + db_dbcomputer_name_key: CREATE UNIQUE INDEX db_dbcomputer_name_key ON public.db_dbcomputer + USING btree (name) + db_dbcomputer_pkey: CREATE UNIQUE INDEX db_dbcomputer_pkey ON public.db_dbcomputer + USING btree (id) + db_dbgroup: + db_dbgroup_label_type_string_key: CREATE UNIQUE INDEX db_dbgroup_label_type_string_key + ON public.db_dbgroup USING btree (label, type_string) + db_dbgroup_pkey: CREATE UNIQUE INDEX db_dbgroup_pkey ON public.db_dbgroup USING + btree (id) + ix_db_dbgroup_label: CREATE INDEX ix_db_dbgroup_label ON public.db_dbgroup USING + btree (label) + ix_db_dbgroup_type_string: CREATE INDEX ix_db_dbgroup_type_string ON public.db_dbgroup + USING btree (type_string) + db_dbgroup_dbnodes: + db_dbgroup_dbnodes_dbgroup_id_dbnode_id_key: CREATE UNIQUE INDEX db_dbgroup_dbnodes_dbgroup_id_dbnode_id_key + ON public.db_dbgroup_dbnodes USING btree (dbgroup_id, dbnode_id) + db_dbgroup_dbnodes_dbgroup_id_idx: CREATE INDEX db_dbgroup_dbnodes_dbgroup_id_idx + ON public.db_dbgroup_dbnodes USING btree (dbgroup_id) + db_dbgroup_dbnodes_dbnode_id_idx: CREATE INDEX db_dbgroup_dbnodes_dbnode_id_idx + ON public.db_dbgroup_dbnodes USING btree (dbnode_id) + db_dbgroup_dbnodes_pkey: CREATE UNIQUE INDEX db_dbgroup_dbnodes_pkey ON public.db_dbgroup_dbnodes + USING btree (id) + db_dblink: + db_dblink_pkey: CREATE UNIQUE INDEX db_dblink_pkey ON public.db_dblink USING btree + (id) + ix_db_dblink_label: CREATE INDEX ix_db_dblink_label ON public.db_dblink USING + btree (label) + db_dblog: + db_dblog_pkey: CREATE UNIQUE INDEX db_dblog_pkey ON public.db_dblog USING btree + (id) + db_dblog_uuid_key: CREATE UNIQUE INDEX db_dblog_uuid_key ON public.db_dblog USING + btree (uuid) + ix_db_dblog_levelname: CREATE INDEX ix_db_dblog_levelname ON public.db_dblog USING + btree (levelname) + ix_db_dblog_loggername: CREATE INDEX ix_db_dblog_loggername ON public.db_dblog + USING btree (loggername) + db_dbnode: + db_dbnode_pkey: CREATE UNIQUE INDEX db_dbnode_pkey ON public.db_dbnode USING btree + (id) + db_dbnode_uuid_key: CREATE UNIQUE INDEX db_dbnode_uuid_key ON public.db_dbnode + USING btree (uuid) + ix_db_dbnode_label: CREATE INDEX ix_db_dbnode_label ON public.db_dbnode USING + btree (label) + ix_db_dbnode_process_type: CREATE INDEX ix_db_dbnode_process_type ON public.db_dbnode + USING btree (process_type) + ix_db_dbnode_type: CREATE INDEX ix_db_dbnode_type ON public.db_dbnode USING btree + (type) + db_dbsetting: + db_dbsetting_key_key: CREATE UNIQUE INDEX db_dbsetting_key_key ON public.db_dbsetting + USING btree (key) + db_dbsetting_pkey: CREATE UNIQUE INDEX db_dbsetting_pkey ON public.db_dbsetting + USING btree (id) + ix_db_dbsetting_key: CREATE INDEX ix_db_dbsetting_key ON public.db_dbsetting USING + btree (key) + db_dbuser: + db_dbuser_pkey: CREATE UNIQUE INDEX db_dbuser_pkey ON public.db_dbuser USING btree + (id) + ix_db_dbuser_email: CREATE UNIQUE INDEX ix_db_dbuser_email ON public.db_dbuser + USING btree (email) + db_dbworkflow: + db_dbworkflow_pkey: CREATE UNIQUE INDEX db_dbworkflow_pkey ON public.db_dbworkflow + USING btree (id) + ix_db_dbworkflow_label: CREATE INDEX ix_db_dbworkflow_label ON public.db_dbworkflow + USING btree (label) + db_dbworkflowdata: + db_dbworkflowdata_parent_id_name_data_type_key: CREATE UNIQUE INDEX db_dbworkflowdata_parent_id_name_data_type_key + ON public.db_dbworkflowdata USING btree (parent_id, name, data_type) + db_dbworkflowdata_pkey: CREATE UNIQUE INDEX db_dbworkflowdata_pkey ON public.db_dbworkflowdata + USING btree (id) + ix_db_dbworkflowdata_aiida_obj_id: CREATE INDEX ix_db_dbworkflowdata_aiida_obj_id + ON public.db_dbworkflowdata USING btree (aiida_obj_id) + ix_db_dbworkflowdata_parent_id: CREATE INDEX ix_db_dbworkflowdata_parent_id ON + public.db_dbworkflowdata USING btree (parent_id) + db_dbworkflowstep: + db_dbworkflowstep_parent_id_name_key: CREATE UNIQUE INDEX db_dbworkflowstep_parent_id_name_key + ON public.db_dbworkflowstep USING btree (parent_id, name) + db_dbworkflowstep_pkey: CREATE UNIQUE INDEX db_dbworkflowstep_pkey ON public.db_dbworkflowstep + USING btree (id) + db_dbworkflowstep_calculations: + db_dbworkflowstep_calculations_id_dbnode_id_key: CREATE UNIQUE INDEX db_dbworkflowstep_calculations_id_dbnode_id_key + ON public.db_dbworkflowstep_calculations USING btree (dbworkflowstep_id, dbnode_id) + db_dbworkflowstep_calculations_pkey: CREATE UNIQUE INDEX db_dbworkflowstep_calculations_pkey + ON public.db_dbworkflowstep_calculations USING btree (id) + db_dbworkflowstep_sub_workflows: + db_dbworkflowstep_sub_workflows_id_dbworkflow__key: CREATE UNIQUE INDEX db_dbworkflowstep_sub_workflows_id_dbworkflow__key + ON public.db_dbworkflowstep_sub_workflows USING btree (dbworkflowstep_id, dbworkflow_id) + db_dbworkflowstep_sub_workflows_pkey: CREATE UNIQUE INDEX db_dbworkflowstep_sub_workflows_pkey + ON public.db_dbworkflowstep_sub_workflows USING btree (id) diff --git a/tests/backends/aiida_sqlalchemy/migrations/test_all_schema/test_sqla_24_37f3d4882837_.yml b/tests/backends/aiida_sqlalchemy/migrations/test_all_schema/test_sqla_24_37f3d4882837_.yml new file mode 100644 index 0000000000..0b858160ba --- /dev/null +++ b/tests/backends/aiida_sqlalchemy/migrations/test_all_schema/test_sqla_24_37f3d4882837_.yml @@ -0,0 +1,748 @@ +columns: + db_dbauthinfo: + aiidauser_id: + data_type: integer + default: null + is_nullable: true + auth_params: + data_type: jsonb + default: null + is_nullable: true + dbcomputer_id: + data_type: integer + default: null + is_nullable: true + enabled: + data_type: boolean + default: null + is_nullable: true + id: + data_type: integer + default: nextval('db_dbauthinfo_id_seq'::regclass) + is_nullable: false + metadata: + data_type: jsonb + default: null + is_nullable: true + db_dbcomment: + content: + data_type: text + default: null + is_nullable: true + ctime: + data_type: timestamp with time zone + default: null + is_nullable: true + dbnode_id: + data_type: integer + default: null + is_nullable: true + id: + data_type: integer + default: nextval('db_dbcomment_id_seq'::regclass) + is_nullable: false + mtime: + data_type: timestamp with time zone + default: null + is_nullable: true + user_id: + data_type: integer + default: null + is_nullable: true + uuid: + data_type: uuid + default: null + is_nullable: true + db_dbcomputer: + description: + data_type: text + default: null + is_nullable: true + enabled: + data_type: boolean + default: null + is_nullable: true + hostname: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + id: + data_type: integer + default: nextval('db_dbcomputer_id_seq'::regclass) + is_nullable: false + metadata: + data_type: jsonb + default: null + is_nullable: true + name: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + scheduler_type: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + transport_params: + data_type: jsonb + default: null + is_nullable: true + transport_type: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + uuid: + data_type: uuid + default: null + is_nullable: true + db_dbgroup: + description: + data_type: text + default: null + is_nullable: true + id: + data_type: integer + default: nextval('db_dbgroup_id_seq'::regclass) + is_nullable: false + label: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + time: + data_type: timestamp with time zone + default: null + is_nullable: true + type_string: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + user_id: + data_type: integer + default: null + is_nullable: true + uuid: + data_type: uuid + default: null + is_nullable: true + db_dbgroup_dbnodes: + dbgroup_id: + data_type: integer + default: null + is_nullable: true + dbnode_id: + data_type: integer + default: null + is_nullable: true + id: + data_type: integer + default: nextval('db_dbgroup_dbnodes_id_seq'::regclass) + is_nullable: false + db_dblink: + id: + data_type: integer + default: nextval('db_dblink_id_seq'::regclass) + is_nullable: false + input_id: + data_type: integer + default: null + is_nullable: true + label: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + output_id: + data_type: integer + default: null + is_nullable: true + type: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + db_dblog: + dbnode_id: + data_type: integer + default: null + is_nullable: false + id: + data_type: integer + default: nextval('db_dblog_id_seq'::regclass) + is_nullable: false + levelname: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + loggername: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + message: + data_type: text + default: null + is_nullable: true + metadata: + data_type: jsonb + default: null + is_nullable: true + time: + data_type: timestamp with time zone + default: null + is_nullable: true + uuid: + data_type: uuid + default: null + is_nullable: true + db_dbnode: + attributes: + data_type: jsonb + default: null + is_nullable: true + ctime: + data_type: timestamp with time zone + default: null + is_nullable: true + dbcomputer_id: + data_type: integer + default: null + is_nullable: true + description: + data_type: text + default: null + is_nullable: true + extras: + data_type: jsonb + default: null + is_nullable: true + id: + data_type: integer + default: nextval('db_dbnode_id_seq'::regclass) + is_nullable: false + label: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + mtime: + data_type: timestamp with time zone + default: null + is_nullable: true + nodeversion: + data_type: integer + default: null + is_nullable: true + process_type: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + public: + data_type: boolean + default: null + is_nullable: true + type: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + user_id: + data_type: integer + default: null + is_nullable: false + uuid: + data_type: uuid + default: null + is_nullable: true + db_dbsetting: + description: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + id: + data_type: integer + default: nextval('db_dbsetting_id_seq'::regclass) + is_nullable: false + key: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + time: + data_type: timestamp with time zone + default: null + is_nullable: true + val: + data_type: jsonb + default: null + is_nullable: true + db_dbuser: + date_joined: + data_type: timestamp with time zone + default: null + is_nullable: true + email: + data_type: character varying + default: null + is_nullable: true + max_length: 254 + first_name: + data_type: character varying + default: null + is_nullable: true + max_length: 254 + id: + data_type: integer + default: nextval('db_dbuser_id_seq'::regclass) + is_nullable: false + institution: + data_type: character varying + default: null + is_nullable: true + max_length: 254 + is_active: + data_type: boolean + default: null + is_nullable: true + is_staff: + data_type: boolean + default: null + is_nullable: true + is_superuser: + data_type: boolean + default: null + is_nullable: false + last_login: + data_type: timestamp with time zone + default: null + is_nullable: true + last_name: + data_type: character varying + default: null + is_nullable: true + max_length: 254 + password: + data_type: character varying + default: null + is_nullable: true + max_length: 128 + db_dbworkflow: + ctime: + data_type: timestamp with time zone + default: null + is_nullable: true + description: + data_type: text + default: null + is_nullable: true + id: + data_type: integer + default: nextval('db_dbworkflow_id_seq'::regclass) + is_nullable: false + label: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + lastsyncedversion: + data_type: integer + default: null + is_nullable: true + module: + data_type: text + default: null + is_nullable: true + module_class: + data_type: text + default: null + is_nullable: true + mtime: + data_type: timestamp with time zone + default: null + is_nullable: true + nodeversion: + data_type: integer + default: null + is_nullable: true + report: + data_type: text + default: null + is_nullable: true + script_md5: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + script_path: + data_type: text + default: null + is_nullable: true + state: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + user_id: + data_type: integer + default: null + is_nullable: true + uuid: + data_type: uuid + default: null + is_nullable: true + db_dbworkflowdata: + aiida_obj_id: + data_type: integer + default: null + is_nullable: true + data_type: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + id: + data_type: integer + default: nextval('db_dbworkflowdata_id_seq'::regclass) + is_nullable: false + json_value: + data_type: text + default: null + is_nullable: true + name: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + parent_id: + data_type: integer + default: null + is_nullable: true + time: + data_type: timestamp with time zone + default: null + is_nullable: true + value_type: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + db_dbworkflowstep: + id: + data_type: integer + default: nextval('db_dbworkflowstep_id_seq'::regclass) + is_nullable: false + name: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + nextcall: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + parent_id: + data_type: integer + default: null + is_nullable: true + state: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + time: + data_type: timestamp with time zone + default: null + is_nullable: true + user_id: + data_type: integer + default: null + is_nullable: true + db_dbworkflowstep_calculations: + dbnode_id: + data_type: integer + default: null + is_nullable: true + dbworkflowstep_id: + data_type: integer + default: null + is_nullable: true + id: + data_type: integer + default: nextval('db_dbworkflowstep_calculations_id_seq'::regclass) + is_nullable: false + db_dbworkflowstep_sub_workflows: + dbworkflow_id: + data_type: integer + default: null + is_nullable: true + dbworkflowstep_id: + data_type: integer + default: null + is_nullable: true + id: + data_type: integer + default: nextval('db_dbworkflowstep_sub_workflows_id_seq'::regclass) + is_nullable: false +constraints: + primary_key: + db_dbauthinfo: + db_dbauthinfo_pkey: + - id + db_dbcomment: + db_dbcomment_pkey: + - id + db_dbcomputer: + db_dbcomputer_pkey: + - id + db_dbgroup: + db_dbgroup_pkey: + - id + db_dbgroup_dbnodes: + db_dbgroup_dbnodes_pkey: + - id + db_dblink: + db_dblink_pkey: + - id + db_dblog: + db_dblog_pkey: + - id + db_dbnode: + db_dbnode_pkey: + - id + db_dbsetting: + db_dbsetting_pkey: + - id + db_dbuser: + db_dbuser_pkey: + - id + db_dbworkflow: + db_dbworkflow_pkey: + - id + db_dbworkflowdata: + db_dbworkflowdata_pkey: + - id + db_dbworkflowstep: + db_dbworkflowstep_pkey: + - id + db_dbworkflowstep_calculations: + db_dbworkflowstep_calculations_pkey: + - id + db_dbworkflowstep_sub_workflows: + db_dbworkflowstep_sub_workflows_pkey: + - id + unique: + db_dbauthinfo: + db_dbauthinfo_aiidauser_id_dbcomputer_id_key: + - aiidauser_id + - dbcomputer_id + db_dbcomment: + db_dbcomment_uuid_key: + - uuid + db_dbcomputer: + db_dbcomputer_name_key: + - name + db_dbcomputer_uuid_key: + - uuid + db_dbgroup: + db_dbgroup_label_type_string_key: + - label + - type_string + db_dbgroup_uuid_key: + - uuid + db_dbgroup_dbnodes: + db_dbgroup_dbnodes_dbgroup_id_dbnode_id_key: + - dbgroup_id + - dbnode_id + db_dblog: + db_dblog_uuid_key: + - uuid + db_dbnode: + db_dbnode_uuid_key: + - uuid + db_dbsetting: + db_dbsetting_key_key: + - key + db_dbworkflow: + db_dbworkflow_uuid_key: + - uuid + db_dbworkflowdata: + db_dbworkflowdata_parent_id_name_data_type_key: + - data_type + - name + - parent_id + db_dbworkflowstep: + db_dbworkflowstep_parent_id_name_key: + - name + - parent_id + db_dbworkflowstep_calculations: + db_dbworkflowstep_calculations_id_dbnode_id_key: + - dbnode_id + - dbworkflowstep_id + db_dbworkflowstep_sub_workflows: + db_dbworkflowstep_sub_workflows_id_dbworkflow__key: + - dbworkflow_id + - dbworkflowstep_id +foreign_keys: + db_dbauthinfo: + db_dbauthinfo_aiidauser_id_fkey: FOREIGN KEY (aiidauser_id) REFERENCES db_dbuser(id) + ON DELETE CASCADE DEFERRABLE INITIALLY DEFERRED + db_dbauthinfo_dbcomputer_id_fkey: FOREIGN KEY (dbcomputer_id) REFERENCES db_dbcomputer(id) + ON DELETE CASCADE DEFERRABLE INITIALLY DEFERRED + db_dbcomment: + db_dbcomment_dbnode_id_fkey: FOREIGN KEY (dbnode_id) REFERENCES db_dbnode(id) + ON DELETE CASCADE DEFERRABLE INITIALLY DEFERRED + db_dbcomment_user_id_fkey: FOREIGN KEY (user_id) REFERENCES db_dbuser(id) ON DELETE + CASCADE DEFERRABLE INITIALLY DEFERRED + db_dbgroup: + db_dbgroup_user_id_fkey: FOREIGN KEY (user_id) REFERENCES db_dbuser(id) ON DELETE + CASCADE DEFERRABLE INITIALLY DEFERRED + db_dbgroup_dbnodes: + db_dbgroup_dbnodes_dbgroup_id_fkey: FOREIGN KEY (dbgroup_id) REFERENCES db_dbgroup(id) + DEFERRABLE INITIALLY DEFERRED + db_dbgroup_dbnodes_dbnode_id_fkey: FOREIGN KEY (dbnode_id) REFERENCES db_dbnode(id) + DEFERRABLE INITIALLY DEFERRED + db_dblink: + db_dblink_input_id_fkey: FOREIGN KEY (input_id) REFERENCES db_dbnode(id) DEFERRABLE + INITIALLY DEFERRED + db_dblink_output_id_fkey: FOREIGN KEY (output_id) REFERENCES db_dbnode(id) ON + DELETE CASCADE DEFERRABLE INITIALLY DEFERRED + db_dblog: + db_dblog_dbnode_id_fkey: FOREIGN KEY (dbnode_id) REFERENCES db_dbnode(id) ON DELETE + CASCADE DEFERRABLE INITIALLY DEFERRED + db_dbnode: + db_dbnode_dbcomputer_id_fkey: FOREIGN KEY (dbcomputer_id) REFERENCES db_dbcomputer(id) + ON DELETE RESTRICT DEFERRABLE INITIALLY DEFERRED + db_dbnode_user_id_fkey: FOREIGN KEY (user_id) REFERENCES db_dbuser(id) ON DELETE + RESTRICT DEFERRABLE INITIALLY DEFERRED + db_dbworkflow: + db_dbworkflow_user_id_fkey: FOREIGN KEY (user_id) REFERENCES db_dbuser(id) + db_dbworkflowdata: + db_dbworkflowdata_aiida_obj_id_fkey: FOREIGN KEY (aiida_obj_id) REFERENCES db_dbnode(id) + db_dbworkflowdata_parent_id_fkey: FOREIGN KEY (parent_id) REFERENCES db_dbworkflow(id) + db_dbworkflowstep: + db_dbworkflowstep_parent_id_fkey: FOREIGN KEY (parent_id) REFERENCES db_dbworkflow(id) + db_dbworkflowstep_user_id_fkey: FOREIGN KEY (user_id) REFERENCES db_dbuser(id) + db_dbworkflowstep_calculations: + db_dbworkflowstep_calculations_dbnode_id_fkey: FOREIGN KEY (dbnode_id) REFERENCES + db_dbnode(id) + db_dbworkflowstep_calculations_dbworkflowstep_id_fkey: FOREIGN KEY (dbworkflowstep_id) + REFERENCES db_dbworkflowstep(id) + db_dbworkflowstep_sub_workflows: + db_dbworkflowstep_sub_workflows_dbworkflow_id_fkey: FOREIGN KEY (dbworkflow_id) + REFERENCES db_dbworkflow(id) + db_dbworkflowstep_sub_workflows_dbworkflowstep_id_fkey: FOREIGN KEY (dbworkflowstep_id) + REFERENCES db_dbworkflowstep(id) +indexes: + db_dbauthinfo: + db_dbauthinfo_aiidauser_id_dbcomputer_id_key: CREATE UNIQUE INDEX db_dbauthinfo_aiidauser_id_dbcomputer_id_key + ON public.db_dbauthinfo USING btree (aiidauser_id, dbcomputer_id) + db_dbauthinfo_pkey: CREATE UNIQUE INDEX db_dbauthinfo_pkey ON public.db_dbauthinfo + USING btree (id) + db_dbcomment: + db_dbcomment_pkey: CREATE UNIQUE INDEX db_dbcomment_pkey ON public.db_dbcomment + USING btree (id) + db_dbcomment_uuid_key: CREATE UNIQUE INDEX db_dbcomment_uuid_key ON public.db_dbcomment + USING btree (uuid) + db_dbcomputer: + db_dbcomputer_name_key: CREATE UNIQUE INDEX db_dbcomputer_name_key ON public.db_dbcomputer + USING btree (name) + db_dbcomputer_pkey: CREATE UNIQUE INDEX db_dbcomputer_pkey ON public.db_dbcomputer + USING btree (id) + db_dbcomputer_uuid_key: CREATE UNIQUE INDEX db_dbcomputer_uuid_key ON public.db_dbcomputer + USING btree (uuid) + db_dbgroup: + db_dbgroup_label_type_string_key: CREATE UNIQUE INDEX db_dbgroup_label_type_string_key + ON public.db_dbgroup USING btree (label, type_string) + db_dbgroup_pkey: CREATE UNIQUE INDEX db_dbgroup_pkey ON public.db_dbgroup USING + btree (id) + db_dbgroup_uuid_key: CREATE UNIQUE INDEX db_dbgroup_uuid_key ON public.db_dbgroup + USING btree (uuid) + ix_db_dbgroup_label: CREATE INDEX ix_db_dbgroup_label ON public.db_dbgroup USING + btree (label) + ix_db_dbgroup_type_string: CREATE INDEX ix_db_dbgroup_type_string ON public.db_dbgroup + USING btree (type_string) + db_dbgroup_dbnodes: + db_dbgroup_dbnodes_dbgroup_id_dbnode_id_key: CREATE UNIQUE INDEX db_dbgroup_dbnodes_dbgroup_id_dbnode_id_key + ON public.db_dbgroup_dbnodes USING btree (dbgroup_id, dbnode_id) + db_dbgroup_dbnodes_dbgroup_id_idx: CREATE INDEX db_dbgroup_dbnodes_dbgroup_id_idx + ON public.db_dbgroup_dbnodes USING btree (dbgroup_id) + db_dbgroup_dbnodes_dbnode_id_idx: CREATE INDEX db_dbgroup_dbnodes_dbnode_id_idx + ON public.db_dbgroup_dbnodes USING btree (dbnode_id) + db_dbgroup_dbnodes_pkey: CREATE UNIQUE INDEX db_dbgroup_dbnodes_pkey ON public.db_dbgroup_dbnodes + USING btree (id) + db_dblink: + db_dblink_pkey: CREATE UNIQUE INDEX db_dblink_pkey ON public.db_dblink USING btree + (id) + ix_db_dblink_label: CREATE INDEX ix_db_dblink_label ON public.db_dblink USING + btree (label) + db_dblog: + db_dblog_pkey: CREATE UNIQUE INDEX db_dblog_pkey ON public.db_dblog USING btree + (id) + db_dblog_uuid_key: CREATE UNIQUE INDEX db_dblog_uuid_key ON public.db_dblog USING + btree (uuid) + ix_db_dblog_levelname: CREATE INDEX ix_db_dblog_levelname ON public.db_dblog USING + btree (levelname) + ix_db_dblog_loggername: CREATE INDEX ix_db_dblog_loggername ON public.db_dblog + USING btree (loggername) + db_dbnode: + db_dbnode_pkey: CREATE UNIQUE INDEX db_dbnode_pkey ON public.db_dbnode USING btree + (id) + db_dbnode_uuid_key: CREATE UNIQUE INDEX db_dbnode_uuid_key ON public.db_dbnode + USING btree (uuid) + ix_db_dbnode_label: CREATE INDEX ix_db_dbnode_label ON public.db_dbnode USING + btree (label) + ix_db_dbnode_process_type: CREATE INDEX ix_db_dbnode_process_type ON public.db_dbnode + USING btree (process_type) + ix_db_dbnode_type: CREATE INDEX ix_db_dbnode_type ON public.db_dbnode USING btree + (type) + db_dbsetting: + db_dbsetting_key_key: CREATE UNIQUE INDEX db_dbsetting_key_key ON public.db_dbsetting + USING btree (key) + db_dbsetting_pkey: CREATE UNIQUE INDEX db_dbsetting_pkey ON public.db_dbsetting + USING btree (id) + ix_db_dbsetting_key: CREATE INDEX ix_db_dbsetting_key ON public.db_dbsetting USING + btree (key) + db_dbuser: + db_dbuser_pkey: CREATE UNIQUE INDEX db_dbuser_pkey ON public.db_dbuser USING btree + (id) + ix_db_dbuser_email: CREATE UNIQUE INDEX ix_db_dbuser_email ON public.db_dbuser + USING btree (email) + db_dbworkflow: + db_dbworkflow_pkey: CREATE UNIQUE INDEX db_dbworkflow_pkey ON public.db_dbworkflow + USING btree (id) + db_dbworkflow_uuid_key: CREATE UNIQUE INDEX db_dbworkflow_uuid_key ON public.db_dbworkflow + USING btree (uuid) + ix_db_dbworkflow_label: CREATE INDEX ix_db_dbworkflow_label ON public.db_dbworkflow + USING btree (label) + db_dbworkflowdata: + db_dbworkflowdata_parent_id_name_data_type_key: CREATE UNIQUE INDEX db_dbworkflowdata_parent_id_name_data_type_key + ON public.db_dbworkflowdata USING btree (parent_id, name, data_type) + db_dbworkflowdata_pkey: CREATE UNIQUE INDEX db_dbworkflowdata_pkey ON public.db_dbworkflowdata + USING btree (id) + ix_db_dbworkflowdata_aiida_obj_id: CREATE INDEX ix_db_dbworkflowdata_aiida_obj_id + ON public.db_dbworkflowdata USING btree (aiida_obj_id) + ix_db_dbworkflowdata_parent_id: CREATE INDEX ix_db_dbworkflowdata_parent_id ON + public.db_dbworkflowdata USING btree (parent_id) + db_dbworkflowstep: + db_dbworkflowstep_parent_id_name_key: CREATE UNIQUE INDEX db_dbworkflowstep_parent_id_name_key + ON public.db_dbworkflowstep USING btree (parent_id, name) + db_dbworkflowstep_pkey: CREATE UNIQUE INDEX db_dbworkflowstep_pkey ON public.db_dbworkflowstep + USING btree (id) + db_dbworkflowstep_calculations: + db_dbworkflowstep_calculations_id_dbnode_id_key: CREATE UNIQUE INDEX db_dbworkflowstep_calculations_id_dbnode_id_key + ON public.db_dbworkflowstep_calculations USING btree (dbworkflowstep_id, dbnode_id) + db_dbworkflowstep_calculations_pkey: CREATE UNIQUE INDEX db_dbworkflowstep_calculations_pkey + ON public.db_dbworkflowstep_calculations USING btree (id) + db_dbworkflowstep_sub_workflows: + db_dbworkflowstep_sub_workflows_id_dbworkflow__key: CREATE UNIQUE INDEX db_dbworkflowstep_sub_workflows_id_dbworkflow__key + ON public.db_dbworkflowstep_sub_workflows USING btree (dbworkflowstep_id, dbworkflow_id) + db_dbworkflowstep_sub_workflows_pkey: CREATE UNIQUE INDEX db_dbworkflowstep_sub_workflows_pkey + ON public.db_dbworkflowstep_sub_workflows USING btree (id) diff --git a/tests/backends/aiida_sqlalchemy/migrations/test_all_schema/test_sqla_25_12536798d4d3_.yml b/tests/backends/aiida_sqlalchemy/migrations/test_all_schema/test_sqla_25_12536798d4d3_.yml new file mode 100644 index 0000000000..0b858160ba --- /dev/null +++ b/tests/backends/aiida_sqlalchemy/migrations/test_all_schema/test_sqla_25_12536798d4d3_.yml @@ -0,0 +1,748 @@ +columns: + db_dbauthinfo: + aiidauser_id: + data_type: integer + default: null + is_nullable: true + auth_params: + data_type: jsonb + default: null + is_nullable: true + dbcomputer_id: + data_type: integer + default: null + is_nullable: true + enabled: + data_type: boolean + default: null + is_nullable: true + id: + data_type: integer + default: nextval('db_dbauthinfo_id_seq'::regclass) + is_nullable: false + metadata: + data_type: jsonb + default: null + is_nullable: true + db_dbcomment: + content: + data_type: text + default: null + is_nullable: true + ctime: + data_type: timestamp with time zone + default: null + is_nullable: true + dbnode_id: + data_type: integer + default: null + is_nullable: true + id: + data_type: integer + default: nextval('db_dbcomment_id_seq'::regclass) + is_nullable: false + mtime: + data_type: timestamp with time zone + default: null + is_nullable: true + user_id: + data_type: integer + default: null + is_nullable: true + uuid: + data_type: uuid + default: null + is_nullable: true + db_dbcomputer: + description: + data_type: text + default: null + is_nullable: true + enabled: + data_type: boolean + default: null + is_nullable: true + hostname: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + id: + data_type: integer + default: nextval('db_dbcomputer_id_seq'::regclass) + is_nullable: false + metadata: + data_type: jsonb + default: null + is_nullable: true + name: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + scheduler_type: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + transport_params: + data_type: jsonb + default: null + is_nullable: true + transport_type: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + uuid: + data_type: uuid + default: null + is_nullable: true + db_dbgroup: + description: + data_type: text + default: null + is_nullable: true + id: + data_type: integer + default: nextval('db_dbgroup_id_seq'::regclass) + is_nullable: false + label: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + time: + data_type: timestamp with time zone + default: null + is_nullable: true + type_string: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + user_id: + data_type: integer + default: null + is_nullable: true + uuid: + data_type: uuid + default: null + is_nullable: true + db_dbgroup_dbnodes: + dbgroup_id: + data_type: integer + default: null + is_nullable: true + dbnode_id: + data_type: integer + default: null + is_nullable: true + id: + data_type: integer + default: nextval('db_dbgroup_dbnodes_id_seq'::regclass) + is_nullable: false + db_dblink: + id: + data_type: integer + default: nextval('db_dblink_id_seq'::regclass) + is_nullable: false + input_id: + data_type: integer + default: null + is_nullable: true + label: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + output_id: + data_type: integer + default: null + is_nullable: true + type: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + db_dblog: + dbnode_id: + data_type: integer + default: null + is_nullable: false + id: + data_type: integer + default: nextval('db_dblog_id_seq'::regclass) + is_nullable: false + levelname: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + loggername: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + message: + data_type: text + default: null + is_nullable: true + metadata: + data_type: jsonb + default: null + is_nullable: true + time: + data_type: timestamp with time zone + default: null + is_nullable: true + uuid: + data_type: uuid + default: null + is_nullable: true + db_dbnode: + attributes: + data_type: jsonb + default: null + is_nullable: true + ctime: + data_type: timestamp with time zone + default: null + is_nullable: true + dbcomputer_id: + data_type: integer + default: null + is_nullable: true + description: + data_type: text + default: null + is_nullable: true + extras: + data_type: jsonb + default: null + is_nullable: true + id: + data_type: integer + default: nextval('db_dbnode_id_seq'::regclass) + is_nullable: false + label: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + mtime: + data_type: timestamp with time zone + default: null + is_nullable: true + nodeversion: + data_type: integer + default: null + is_nullable: true + process_type: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + public: + data_type: boolean + default: null + is_nullable: true + type: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + user_id: + data_type: integer + default: null + is_nullable: false + uuid: + data_type: uuid + default: null + is_nullable: true + db_dbsetting: + description: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + id: + data_type: integer + default: nextval('db_dbsetting_id_seq'::regclass) + is_nullable: false + key: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + time: + data_type: timestamp with time zone + default: null + is_nullable: true + val: + data_type: jsonb + default: null + is_nullable: true + db_dbuser: + date_joined: + data_type: timestamp with time zone + default: null + is_nullable: true + email: + data_type: character varying + default: null + is_nullable: true + max_length: 254 + first_name: + data_type: character varying + default: null + is_nullable: true + max_length: 254 + id: + data_type: integer + default: nextval('db_dbuser_id_seq'::regclass) + is_nullable: false + institution: + data_type: character varying + default: null + is_nullable: true + max_length: 254 + is_active: + data_type: boolean + default: null + is_nullable: true + is_staff: + data_type: boolean + default: null + is_nullable: true + is_superuser: + data_type: boolean + default: null + is_nullable: false + last_login: + data_type: timestamp with time zone + default: null + is_nullable: true + last_name: + data_type: character varying + default: null + is_nullable: true + max_length: 254 + password: + data_type: character varying + default: null + is_nullable: true + max_length: 128 + db_dbworkflow: + ctime: + data_type: timestamp with time zone + default: null + is_nullable: true + description: + data_type: text + default: null + is_nullable: true + id: + data_type: integer + default: nextval('db_dbworkflow_id_seq'::regclass) + is_nullable: false + label: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + lastsyncedversion: + data_type: integer + default: null + is_nullable: true + module: + data_type: text + default: null + is_nullable: true + module_class: + data_type: text + default: null + is_nullable: true + mtime: + data_type: timestamp with time zone + default: null + is_nullable: true + nodeversion: + data_type: integer + default: null + is_nullable: true + report: + data_type: text + default: null + is_nullable: true + script_md5: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + script_path: + data_type: text + default: null + is_nullable: true + state: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + user_id: + data_type: integer + default: null + is_nullable: true + uuid: + data_type: uuid + default: null + is_nullable: true + db_dbworkflowdata: + aiida_obj_id: + data_type: integer + default: null + is_nullable: true + data_type: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + id: + data_type: integer + default: nextval('db_dbworkflowdata_id_seq'::regclass) + is_nullable: false + json_value: + data_type: text + default: null + is_nullable: true + name: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + parent_id: + data_type: integer + default: null + is_nullable: true + time: + data_type: timestamp with time zone + default: null + is_nullable: true + value_type: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + db_dbworkflowstep: + id: + data_type: integer + default: nextval('db_dbworkflowstep_id_seq'::regclass) + is_nullable: false + name: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + nextcall: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + parent_id: + data_type: integer + default: null + is_nullable: true + state: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + time: + data_type: timestamp with time zone + default: null + is_nullable: true + user_id: + data_type: integer + default: null + is_nullable: true + db_dbworkflowstep_calculations: + dbnode_id: + data_type: integer + default: null + is_nullable: true + dbworkflowstep_id: + data_type: integer + default: null + is_nullable: true + id: + data_type: integer + default: nextval('db_dbworkflowstep_calculations_id_seq'::regclass) + is_nullable: false + db_dbworkflowstep_sub_workflows: + dbworkflow_id: + data_type: integer + default: null + is_nullable: true + dbworkflowstep_id: + data_type: integer + default: null + is_nullable: true + id: + data_type: integer + default: nextval('db_dbworkflowstep_sub_workflows_id_seq'::regclass) + is_nullable: false +constraints: + primary_key: + db_dbauthinfo: + db_dbauthinfo_pkey: + - id + db_dbcomment: + db_dbcomment_pkey: + - id + db_dbcomputer: + db_dbcomputer_pkey: + - id + db_dbgroup: + db_dbgroup_pkey: + - id + db_dbgroup_dbnodes: + db_dbgroup_dbnodes_pkey: + - id + db_dblink: + db_dblink_pkey: + - id + db_dblog: + db_dblog_pkey: + - id + db_dbnode: + db_dbnode_pkey: + - id + db_dbsetting: + db_dbsetting_pkey: + - id + db_dbuser: + db_dbuser_pkey: + - id + db_dbworkflow: + db_dbworkflow_pkey: + - id + db_dbworkflowdata: + db_dbworkflowdata_pkey: + - id + db_dbworkflowstep: + db_dbworkflowstep_pkey: + - id + db_dbworkflowstep_calculations: + db_dbworkflowstep_calculations_pkey: + - id + db_dbworkflowstep_sub_workflows: + db_dbworkflowstep_sub_workflows_pkey: + - id + unique: + db_dbauthinfo: + db_dbauthinfo_aiidauser_id_dbcomputer_id_key: + - aiidauser_id + - dbcomputer_id + db_dbcomment: + db_dbcomment_uuid_key: + - uuid + db_dbcomputer: + db_dbcomputer_name_key: + - name + db_dbcomputer_uuid_key: + - uuid + db_dbgroup: + db_dbgroup_label_type_string_key: + - label + - type_string + db_dbgroup_uuid_key: + - uuid + db_dbgroup_dbnodes: + db_dbgroup_dbnodes_dbgroup_id_dbnode_id_key: + - dbgroup_id + - dbnode_id + db_dblog: + db_dblog_uuid_key: + - uuid + db_dbnode: + db_dbnode_uuid_key: + - uuid + db_dbsetting: + db_dbsetting_key_key: + - key + db_dbworkflow: + db_dbworkflow_uuid_key: + - uuid + db_dbworkflowdata: + db_dbworkflowdata_parent_id_name_data_type_key: + - data_type + - name + - parent_id + db_dbworkflowstep: + db_dbworkflowstep_parent_id_name_key: + - name + - parent_id + db_dbworkflowstep_calculations: + db_dbworkflowstep_calculations_id_dbnode_id_key: + - dbnode_id + - dbworkflowstep_id + db_dbworkflowstep_sub_workflows: + db_dbworkflowstep_sub_workflows_id_dbworkflow__key: + - dbworkflow_id + - dbworkflowstep_id +foreign_keys: + db_dbauthinfo: + db_dbauthinfo_aiidauser_id_fkey: FOREIGN KEY (aiidauser_id) REFERENCES db_dbuser(id) + ON DELETE CASCADE DEFERRABLE INITIALLY DEFERRED + db_dbauthinfo_dbcomputer_id_fkey: FOREIGN KEY (dbcomputer_id) REFERENCES db_dbcomputer(id) + ON DELETE CASCADE DEFERRABLE INITIALLY DEFERRED + db_dbcomment: + db_dbcomment_dbnode_id_fkey: FOREIGN KEY (dbnode_id) REFERENCES db_dbnode(id) + ON DELETE CASCADE DEFERRABLE INITIALLY DEFERRED + db_dbcomment_user_id_fkey: FOREIGN KEY (user_id) REFERENCES db_dbuser(id) ON DELETE + CASCADE DEFERRABLE INITIALLY DEFERRED + db_dbgroup: + db_dbgroup_user_id_fkey: FOREIGN KEY (user_id) REFERENCES db_dbuser(id) ON DELETE + CASCADE DEFERRABLE INITIALLY DEFERRED + db_dbgroup_dbnodes: + db_dbgroup_dbnodes_dbgroup_id_fkey: FOREIGN KEY (dbgroup_id) REFERENCES db_dbgroup(id) + DEFERRABLE INITIALLY DEFERRED + db_dbgroup_dbnodes_dbnode_id_fkey: FOREIGN KEY (dbnode_id) REFERENCES db_dbnode(id) + DEFERRABLE INITIALLY DEFERRED + db_dblink: + db_dblink_input_id_fkey: FOREIGN KEY (input_id) REFERENCES db_dbnode(id) DEFERRABLE + INITIALLY DEFERRED + db_dblink_output_id_fkey: FOREIGN KEY (output_id) REFERENCES db_dbnode(id) ON + DELETE CASCADE DEFERRABLE INITIALLY DEFERRED + db_dblog: + db_dblog_dbnode_id_fkey: FOREIGN KEY (dbnode_id) REFERENCES db_dbnode(id) ON DELETE + CASCADE DEFERRABLE INITIALLY DEFERRED + db_dbnode: + db_dbnode_dbcomputer_id_fkey: FOREIGN KEY (dbcomputer_id) REFERENCES db_dbcomputer(id) + ON DELETE RESTRICT DEFERRABLE INITIALLY DEFERRED + db_dbnode_user_id_fkey: FOREIGN KEY (user_id) REFERENCES db_dbuser(id) ON DELETE + RESTRICT DEFERRABLE INITIALLY DEFERRED + db_dbworkflow: + db_dbworkflow_user_id_fkey: FOREIGN KEY (user_id) REFERENCES db_dbuser(id) + db_dbworkflowdata: + db_dbworkflowdata_aiida_obj_id_fkey: FOREIGN KEY (aiida_obj_id) REFERENCES db_dbnode(id) + db_dbworkflowdata_parent_id_fkey: FOREIGN KEY (parent_id) REFERENCES db_dbworkflow(id) + db_dbworkflowstep: + db_dbworkflowstep_parent_id_fkey: FOREIGN KEY (parent_id) REFERENCES db_dbworkflow(id) + db_dbworkflowstep_user_id_fkey: FOREIGN KEY (user_id) REFERENCES db_dbuser(id) + db_dbworkflowstep_calculations: + db_dbworkflowstep_calculations_dbnode_id_fkey: FOREIGN KEY (dbnode_id) REFERENCES + db_dbnode(id) + db_dbworkflowstep_calculations_dbworkflowstep_id_fkey: FOREIGN KEY (dbworkflowstep_id) + REFERENCES db_dbworkflowstep(id) + db_dbworkflowstep_sub_workflows: + db_dbworkflowstep_sub_workflows_dbworkflow_id_fkey: FOREIGN KEY (dbworkflow_id) + REFERENCES db_dbworkflow(id) + db_dbworkflowstep_sub_workflows_dbworkflowstep_id_fkey: FOREIGN KEY (dbworkflowstep_id) + REFERENCES db_dbworkflowstep(id) +indexes: + db_dbauthinfo: + db_dbauthinfo_aiidauser_id_dbcomputer_id_key: CREATE UNIQUE INDEX db_dbauthinfo_aiidauser_id_dbcomputer_id_key + ON public.db_dbauthinfo USING btree (aiidauser_id, dbcomputer_id) + db_dbauthinfo_pkey: CREATE UNIQUE INDEX db_dbauthinfo_pkey ON public.db_dbauthinfo + USING btree (id) + db_dbcomment: + db_dbcomment_pkey: CREATE UNIQUE INDEX db_dbcomment_pkey ON public.db_dbcomment + USING btree (id) + db_dbcomment_uuid_key: CREATE UNIQUE INDEX db_dbcomment_uuid_key ON public.db_dbcomment + USING btree (uuid) + db_dbcomputer: + db_dbcomputer_name_key: CREATE UNIQUE INDEX db_dbcomputer_name_key ON public.db_dbcomputer + USING btree (name) + db_dbcomputer_pkey: CREATE UNIQUE INDEX db_dbcomputer_pkey ON public.db_dbcomputer + USING btree (id) + db_dbcomputer_uuid_key: CREATE UNIQUE INDEX db_dbcomputer_uuid_key ON public.db_dbcomputer + USING btree (uuid) + db_dbgroup: + db_dbgroup_label_type_string_key: CREATE UNIQUE INDEX db_dbgroup_label_type_string_key + ON public.db_dbgroup USING btree (label, type_string) + db_dbgroup_pkey: CREATE UNIQUE INDEX db_dbgroup_pkey ON public.db_dbgroup USING + btree (id) + db_dbgroup_uuid_key: CREATE UNIQUE INDEX db_dbgroup_uuid_key ON public.db_dbgroup + USING btree (uuid) + ix_db_dbgroup_label: CREATE INDEX ix_db_dbgroup_label ON public.db_dbgroup USING + btree (label) + ix_db_dbgroup_type_string: CREATE INDEX ix_db_dbgroup_type_string ON public.db_dbgroup + USING btree (type_string) + db_dbgroup_dbnodes: + db_dbgroup_dbnodes_dbgroup_id_dbnode_id_key: CREATE UNIQUE INDEX db_dbgroup_dbnodes_dbgroup_id_dbnode_id_key + ON public.db_dbgroup_dbnodes USING btree (dbgroup_id, dbnode_id) + db_dbgroup_dbnodes_dbgroup_id_idx: CREATE INDEX db_dbgroup_dbnodes_dbgroup_id_idx + ON public.db_dbgroup_dbnodes USING btree (dbgroup_id) + db_dbgroup_dbnodes_dbnode_id_idx: CREATE INDEX db_dbgroup_dbnodes_dbnode_id_idx + ON public.db_dbgroup_dbnodes USING btree (dbnode_id) + db_dbgroup_dbnodes_pkey: CREATE UNIQUE INDEX db_dbgroup_dbnodes_pkey ON public.db_dbgroup_dbnodes + USING btree (id) + db_dblink: + db_dblink_pkey: CREATE UNIQUE INDEX db_dblink_pkey ON public.db_dblink USING btree + (id) + ix_db_dblink_label: CREATE INDEX ix_db_dblink_label ON public.db_dblink USING + btree (label) + db_dblog: + db_dblog_pkey: CREATE UNIQUE INDEX db_dblog_pkey ON public.db_dblog USING btree + (id) + db_dblog_uuid_key: CREATE UNIQUE INDEX db_dblog_uuid_key ON public.db_dblog USING + btree (uuid) + ix_db_dblog_levelname: CREATE INDEX ix_db_dblog_levelname ON public.db_dblog USING + btree (levelname) + ix_db_dblog_loggername: CREATE INDEX ix_db_dblog_loggername ON public.db_dblog + USING btree (loggername) + db_dbnode: + db_dbnode_pkey: CREATE UNIQUE INDEX db_dbnode_pkey ON public.db_dbnode USING btree + (id) + db_dbnode_uuid_key: CREATE UNIQUE INDEX db_dbnode_uuid_key ON public.db_dbnode + USING btree (uuid) + ix_db_dbnode_label: CREATE INDEX ix_db_dbnode_label ON public.db_dbnode USING + btree (label) + ix_db_dbnode_process_type: CREATE INDEX ix_db_dbnode_process_type ON public.db_dbnode + USING btree (process_type) + ix_db_dbnode_type: CREATE INDEX ix_db_dbnode_type ON public.db_dbnode USING btree + (type) + db_dbsetting: + db_dbsetting_key_key: CREATE UNIQUE INDEX db_dbsetting_key_key ON public.db_dbsetting + USING btree (key) + db_dbsetting_pkey: CREATE UNIQUE INDEX db_dbsetting_pkey ON public.db_dbsetting + USING btree (id) + ix_db_dbsetting_key: CREATE INDEX ix_db_dbsetting_key ON public.db_dbsetting USING + btree (key) + db_dbuser: + db_dbuser_pkey: CREATE UNIQUE INDEX db_dbuser_pkey ON public.db_dbuser USING btree + (id) + ix_db_dbuser_email: CREATE UNIQUE INDEX ix_db_dbuser_email ON public.db_dbuser + USING btree (email) + db_dbworkflow: + db_dbworkflow_pkey: CREATE UNIQUE INDEX db_dbworkflow_pkey ON public.db_dbworkflow + USING btree (id) + db_dbworkflow_uuid_key: CREATE UNIQUE INDEX db_dbworkflow_uuid_key ON public.db_dbworkflow + USING btree (uuid) + ix_db_dbworkflow_label: CREATE INDEX ix_db_dbworkflow_label ON public.db_dbworkflow + USING btree (label) + db_dbworkflowdata: + db_dbworkflowdata_parent_id_name_data_type_key: CREATE UNIQUE INDEX db_dbworkflowdata_parent_id_name_data_type_key + ON public.db_dbworkflowdata USING btree (parent_id, name, data_type) + db_dbworkflowdata_pkey: CREATE UNIQUE INDEX db_dbworkflowdata_pkey ON public.db_dbworkflowdata + USING btree (id) + ix_db_dbworkflowdata_aiida_obj_id: CREATE INDEX ix_db_dbworkflowdata_aiida_obj_id + ON public.db_dbworkflowdata USING btree (aiida_obj_id) + ix_db_dbworkflowdata_parent_id: CREATE INDEX ix_db_dbworkflowdata_parent_id ON + public.db_dbworkflowdata USING btree (parent_id) + db_dbworkflowstep: + db_dbworkflowstep_parent_id_name_key: CREATE UNIQUE INDEX db_dbworkflowstep_parent_id_name_key + ON public.db_dbworkflowstep USING btree (parent_id, name) + db_dbworkflowstep_pkey: CREATE UNIQUE INDEX db_dbworkflowstep_pkey ON public.db_dbworkflowstep + USING btree (id) + db_dbworkflowstep_calculations: + db_dbworkflowstep_calculations_id_dbnode_id_key: CREATE UNIQUE INDEX db_dbworkflowstep_calculations_id_dbnode_id_key + ON public.db_dbworkflowstep_calculations USING btree (dbworkflowstep_id, dbnode_id) + db_dbworkflowstep_calculations_pkey: CREATE UNIQUE INDEX db_dbworkflowstep_calculations_pkey + ON public.db_dbworkflowstep_calculations USING btree (id) + db_dbworkflowstep_sub_workflows: + db_dbworkflowstep_sub_workflows_id_dbworkflow__key: CREATE UNIQUE INDEX db_dbworkflowstep_sub_workflows_id_dbworkflow__key + ON public.db_dbworkflowstep_sub_workflows USING btree (dbworkflowstep_id, dbworkflow_id) + db_dbworkflowstep_sub_workflows_pkey: CREATE UNIQUE INDEX db_dbworkflowstep_sub_workflows_pkey + ON public.db_dbworkflowstep_sub_workflows USING btree (id) diff --git a/tests/backends/aiida_sqlalchemy/migrations/test_all_schema/test_sqla_26_ce56d84bcc35_.yml b/tests/backends/aiida_sqlalchemy/migrations/test_all_schema/test_sqla_26_ce56d84bcc35_.yml new file mode 100644 index 0000000000..0b858160ba --- /dev/null +++ b/tests/backends/aiida_sqlalchemy/migrations/test_all_schema/test_sqla_26_ce56d84bcc35_.yml @@ -0,0 +1,748 @@ +columns: + db_dbauthinfo: + aiidauser_id: + data_type: integer + default: null + is_nullable: true + auth_params: + data_type: jsonb + default: null + is_nullable: true + dbcomputer_id: + data_type: integer + default: null + is_nullable: true + enabled: + data_type: boolean + default: null + is_nullable: true + id: + data_type: integer + default: nextval('db_dbauthinfo_id_seq'::regclass) + is_nullable: false + metadata: + data_type: jsonb + default: null + is_nullable: true + db_dbcomment: + content: + data_type: text + default: null + is_nullable: true + ctime: + data_type: timestamp with time zone + default: null + is_nullable: true + dbnode_id: + data_type: integer + default: null + is_nullable: true + id: + data_type: integer + default: nextval('db_dbcomment_id_seq'::regclass) + is_nullable: false + mtime: + data_type: timestamp with time zone + default: null + is_nullable: true + user_id: + data_type: integer + default: null + is_nullable: true + uuid: + data_type: uuid + default: null + is_nullable: true + db_dbcomputer: + description: + data_type: text + default: null + is_nullable: true + enabled: + data_type: boolean + default: null + is_nullable: true + hostname: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + id: + data_type: integer + default: nextval('db_dbcomputer_id_seq'::regclass) + is_nullable: false + metadata: + data_type: jsonb + default: null + is_nullable: true + name: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + scheduler_type: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + transport_params: + data_type: jsonb + default: null + is_nullable: true + transport_type: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + uuid: + data_type: uuid + default: null + is_nullable: true + db_dbgroup: + description: + data_type: text + default: null + is_nullable: true + id: + data_type: integer + default: nextval('db_dbgroup_id_seq'::regclass) + is_nullable: false + label: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + time: + data_type: timestamp with time zone + default: null + is_nullable: true + type_string: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + user_id: + data_type: integer + default: null + is_nullable: true + uuid: + data_type: uuid + default: null + is_nullable: true + db_dbgroup_dbnodes: + dbgroup_id: + data_type: integer + default: null + is_nullable: true + dbnode_id: + data_type: integer + default: null + is_nullable: true + id: + data_type: integer + default: nextval('db_dbgroup_dbnodes_id_seq'::regclass) + is_nullable: false + db_dblink: + id: + data_type: integer + default: nextval('db_dblink_id_seq'::regclass) + is_nullable: false + input_id: + data_type: integer + default: null + is_nullable: true + label: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + output_id: + data_type: integer + default: null + is_nullable: true + type: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + db_dblog: + dbnode_id: + data_type: integer + default: null + is_nullable: false + id: + data_type: integer + default: nextval('db_dblog_id_seq'::regclass) + is_nullable: false + levelname: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + loggername: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + message: + data_type: text + default: null + is_nullable: true + metadata: + data_type: jsonb + default: null + is_nullable: true + time: + data_type: timestamp with time zone + default: null + is_nullable: true + uuid: + data_type: uuid + default: null + is_nullable: true + db_dbnode: + attributes: + data_type: jsonb + default: null + is_nullable: true + ctime: + data_type: timestamp with time zone + default: null + is_nullable: true + dbcomputer_id: + data_type: integer + default: null + is_nullable: true + description: + data_type: text + default: null + is_nullable: true + extras: + data_type: jsonb + default: null + is_nullable: true + id: + data_type: integer + default: nextval('db_dbnode_id_seq'::regclass) + is_nullable: false + label: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + mtime: + data_type: timestamp with time zone + default: null + is_nullable: true + nodeversion: + data_type: integer + default: null + is_nullable: true + process_type: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + public: + data_type: boolean + default: null + is_nullable: true + type: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + user_id: + data_type: integer + default: null + is_nullable: false + uuid: + data_type: uuid + default: null + is_nullable: true + db_dbsetting: + description: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + id: + data_type: integer + default: nextval('db_dbsetting_id_seq'::regclass) + is_nullable: false + key: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + time: + data_type: timestamp with time zone + default: null + is_nullable: true + val: + data_type: jsonb + default: null + is_nullable: true + db_dbuser: + date_joined: + data_type: timestamp with time zone + default: null + is_nullable: true + email: + data_type: character varying + default: null + is_nullable: true + max_length: 254 + first_name: + data_type: character varying + default: null + is_nullable: true + max_length: 254 + id: + data_type: integer + default: nextval('db_dbuser_id_seq'::regclass) + is_nullable: false + institution: + data_type: character varying + default: null + is_nullable: true + max_length: 254 + is_active: + data_type: boolean + default: null + is_nullable: true + is_staff: + data_type: boolean + default: null + is_nullable: true + is_superuser: + data_type: boolean + default: null + is_nullable: false + last_login: + data_type: timestamp with time zone + default: null + is_nullable: true + last_name: + data_type: character varying + default: null + is_nullable: true + max_length: 254 + password: + data_type: character varying + default: null + is_nullable: true + max_length: 128 + db_dbworkflow: + ctime: + data_type: timestamp with time zone + default: null + is_nullable: true + description: + data_type: text + default: null + is_nullable: true + id: + data_type: integer + default: nextval('db_dbworkflow_id_seq'::regclass) + is_nullable: false + label: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + lastsyncedversion: + data_type: integer + default: null + is_nullable: true + module: + data_type: text + default: null + is_nullable: true + module_class: + data_type: text + default: null + is_nullable: true + mtime: + data_type: timestamp with time zone + default: null + is_nullable: true + nodeversion: + data_type: integer + default: null + is_nullable: true + report: + data_type: text + default: null + is_nullable: true + script_md5: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + script_path: + data_type: text + default: null + is_nullable: true + state: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + user_id: + data_type: integer + default: null + is_nullable: true + uuid: + data_type: uuid + default: null + is_nullable: true + db_dbworkflowdata: + aiida_obj_id: + data_type: integer + default: null + is_nullable: true + data_type: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + id: + data_type: integer + default: nextval('db_dbworkflowdata_id_seq'::regclass) + is_nullable: false + json_value: + data_type: text + default: null + is_nullable: true + name: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + parent_id: + data_type: integer + default: null + is_nullable: true + time: + data_type: timestamp with time zone + default: null + is_nullable: true + value_type: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + db_dbworkflowstep: + id: + data_type: integer + default: nextval('db_dbworkflowstep_id_seq'::regclass) + is_nullable: false + name: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + nextcall: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + parent_id: + data_type: integer + default: null + is_nullable: true + state: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + time: + data_type: timestamp with time zone + default: null + is_nullable: true + user_id: + data_type: integer + default: null + is_nullable: true + db_dbworkflowstep_calculations: + dbnode_id: + data_type: integer + default: null + is_nullable: true + dbworkflowstep_id: + data_type: integer + default: null + is_nullable: true + id: + data_type: integer + default: nextval('db_dbworkflowstep_calculations_id_seq'::regclass) + is_nullable: false + db_dbworkflowstep_sub_workflows: + dbworkflow_id: + data_type: integer + default: null + is_nullable: true + dbworkflowstep_id: + data_type: integer + default: null + is_nullable: true + id: + data_type: integer + default: nextval('db_dbworkflowstep_sub_workflows_id_seq'::regclass) + is_nullable: false +constraints: + primary_key: + db_dbauthinfo: + db_dbauthinfo_pkey: + - id + db_dbcomment: + db_dbcomment_pkey: + - id + db_dbcomputer: + db_dbcomputer_pkey: + - id + db_dbgroup: + db_dbgroup_pkey: + - id + db_dbgroup_dbnodes: + db_dbgroup_dbnodes_pkey: + - id + db_dblink: + db_dblink_pkey: + - id + db_dblog: + db_dblog_pkey: + - id + db_dbnode: + db_dbnode_pkey: + - id + db_dbsetting: + db_dbsetting_pkey: + - id + db_dbuser: + db_dbuser_pkey: + - id + db_dbworkflow: + db_dbworkflow_pkey: + - id + db_dbworkflowdata: + db_dbworkflowdata_pkey: + - id + db_dbworkflowstep: + db_dbworkflowstep_pkey: + - id + db_dbworkflowstep_calculations: + db_dbworkflowstep_calculations_pkey: + - id + db_dbworkflowstep_sub_workflows: + db_dbworkflowstep_sub_workflows_pkey: + - id + unique: + db_dbauthinfo: + db_dbauthinfo_aiidauser_id_dbcomputer_id_key: + - aiidauser_id + - dbcomputer_id + db_dbcomment: + db_dbcomment_uuid_key: + - uuid + db_dbcomputer: + db_dbcomputer_name_key: + - name + db_dbcomputer_uuid_key: + - uuid + db_dbgroup: + db_dbgroup_label_type_string_key: + - label + - type_string + db_dbgroup_uuid_key: + - uuid + db_dbgroup_dbnodes: + db_dbgroup_dbnodes_dbgroup_id_dbnode_id_key: + - dbgroup_id + - dbnode_id + db_dblog: + db_dblog_uuid_key: + - uuid + db_dbnode: + db_dbnode_uuid_key: + - uuid + db_dbsetting: + db_dbsetting_key_key: + - key + db_dbworkflow: + db_dbworkflow_uuid_key: + - uuid + db_dbworkflowdata: + db_dbworkflowdata_parent_id_name_data_type_key: + - data_type + - name + - parent_id + db_dbworkflowstep: + db_dbworkflowstep_parent_id_name_key: + - name + - parent_id + db_dbworkflowstep_calculations: + db_dbworkflowstep_calculations_id_dbnode_id_key: + - dbnode_id + - dbworkflowstep_id + db_dbworkflowstep_sub_workflows: + db_dbworkflowstep_sub_workflows_id_dbworkflow__key: + - dbworkflow_id + - dbworkflowstep_id +foreign_keys: + db_dbauthinfo: + db_dbauthinfo_aiidauser_id_fkey: FOREIGN KEY (aiidauser_id) REFERENCES db_dbuser(id) + ON DELETE CASCADE DEFERRABLE INITIALLY DEFERRED + db_dbauthinfo_dbcomputer_id_fkey: FOREIGN KEY (dbcomputer_id) REFERENCES db_dbcomputer(id) + ON DELETE CASCADE DEFERRABLE INITIALLY DEFERRED + db_dbcomment: + db_dbcomment_dbnode_id_fkey: FOREIGN KEY (dbnode_id) REFERENCES db_dbnode(id) + ON DELETE CASCADE DEFERRABLE INITIALLY DEFERRED + db_dbcomment_user_id_fkey: FOREIGN KEY (user_id) REFERENCES db_dbuser(id) ON DELETE + CASCADE DEFERRABLE INITIALLY DEFERRED + db_dbgroup: + db_dbgroup_user_id_fkey: FOREIGN KEY (user_id) REFERENCES db_dbuser(id) ON DELETE + CASCADE DEFERRABLE INITIALLY DEFERRED + db_dbgroup_dbnodes: + db_dbgroup_dbnodes_dbgroup_id_fkey: FOREIGN KEY (dbgroup_id) REFERENCES db_dbgroup(id) + DEFERRABLE INITIALLY DEFERRED + db_dbgroup_dbnodes_dbnode_id_fkey: FOREIGN KEY (dbnode_id) REFERENCES db_dbnode(id) + DEFERRABLE INITIALLY DEFERRED + db_dblink: + db_dblink_input_id_fkey: FOREIGN KEY (input_id) REFERENCES db_dbnode(id) DEFERRABLE + INITIALLY DEFERRED + db_dblink_output_id_fkey: FOREIGN KEY (output_id) REFERENCES db_dbnode(id) ON + DELETE CASCADE DEFERRABLE INITIALLY DEFERRED + db_dblog: + db_dblog_dbnode_id_fkey: FOREIGN KEY (dbnode_id) REFERENCES db_dbnode(id) ON DELETE + CASCADE DEFERRABLE INITIALLY DEFERRED + db_dbnode: + db_dbnode_dbcomputer_id_fkey: FOREIGN KEY (dbcomputer_id) REFERENCES db_dbcomputer(id) + ON DELETE RESTRICT DEFERRABLE INITIALLY DEFERRED + db_dbnode_user_id_fkey: FOREIGN KEY (user_id) REFERENCES db_dbuser(id) ON DELETE + RESTRICT DEFERRABLE INITIALLY DEFERRED + db_dbworkflow: + db_dbworkflow_user_id_fkey: FOREIGN KEY (user_id) REFERENCES db_dbuser(id) + db_dbworkflowdata: + db_dbworkflowdata_aiida_obj_id_fkey: FOREIGN KEY (aiida_obj_id) REFERENCES db_dbnode(id) + db_dbworkflowdata_parent_id_fkey: FOREIGN KEY (parent_id) REFERENCES db_dbworkflow(id) + db_dbworkflowstep: + db_dbworkflowstep_parent_id_fkey: FOREIGN KEY (parent_id) REFERENCES db_dbworkflow(id) + db_dbworkflowstep_user_id_fkey: FOREIGN KEY (user_id) REFERENCES db_dbuser(id) + db_dbworkflowstep_calculations: + db_dbworkflowstep_calculations_dbnode_id_fkey: FOREIGN KEY (dbnode_id) REFERENCES + db_dbnode(id) + db_dbworkflowstep_calculations_dbworkflowstep_id_fkey: FOREIGN KEY (dbworkflowstep_id) + REFERENCES db_dbworkflowstep(id) + db_dbworkflowstep_sub_workflows: + db_dbworkflowstep_sub_workflows_dbworkflow_id_fkey: FOREIGN KEY (dbworkflow_id) + REFERENCES db_dbworkflow(id) + db_dbworkflowstep_sub_workflows_dbworkflowstep_id_fkey: FOREIGN KEY (dbworkflowstep_id) + REFERENCES db_dbworkflowstep(id) +indexes: + db_dbauthinfo: + db_dbauthinfo_aiidauser_id_dbcomputer_id_key: CREATE UNIQUE INDEX db_dbauthinfo_aiidauser_id_dbcomputer_id_key + ON public.db_dbauthinfo USING btree (aiidauser_id, dbcomputer_id) + db_dbauthinfo_pkey: CREATE UNIQUE INDEX db_dbauthinfo_pkey ON public.db_dbauthinfo + USING btree (id) + db_dbcomment: + db_dbcomment_pkey: CREATE UNIQUE INDEX db_dbcomment_pkey ON public.db_dbcomment + USING btree (id) + db_dbcomment_uuid_key: CREATE UNIQUE INDEX db_dbcomment_uuid_key ON public.db_dbcomment + USING btree (uuid) + db_dbcomputer: + db_dbcomputer_name_key: CREATE UNIQUE INDEX db_dbcomputer_name_key ON public.db_dbcomputer + USING btree (name) + db_dbcomputer_pkey: CREATE UNIQUE INDEX db_dbcomputer_pkey ON public.db_dbcomputer + USING btree (id) + db_dbcomputer_uuid_key: CREATE UNIQUE INDEX db_dbcomputer_uuid_key ON public.db_dbcomputer + USING btree (uuid) + db_dbgroup: + db_dbgroup_label_type_string_key: CREATE UNIQUE INDEX db_dbgroup_label_type_string_key + ON public.db_dbgroup USING btree (label, type_string) + db_dbgroup_pkey: CREATE UNIQUE INDEX db_dbgroup_pkey ON public.db_dbgroup USING + btree (id) + db_dbgroup_uuid_key: CREATE UNIQUE INDEX db_dbgroup_uuid_key ON public.db_dbgroup + USING btree (uuid) + ix_db_dbgroup_label: CREATE INDEX ix_db_dbgroup_label ON public.db_dbgroup USING + btree (label) + ix_db_dbgroup_type_string: CREATE INDEX ix_db_dbgroup_type_string ON public.db_dbgroup + USING btree (type_string) + db_dbgroup_dbnodes: + db_dbgroup_dbnodes_dbgroup_id_dbnode_id_key: CREATE UNIQUE INDEX db_dbgroup_dbnodes_dbgroup_id_dbnode_id_key + ON public.db_dbgroup_dbnodes USING btree (dbgroup_id, dbnode_id) + db_dbgroup_dbnodes_dbgroup_id_idx: CREATE INDEX db_dbgroup_dbnodes_dbgroup_id_idx + ON public.db_dbgroup_dbnodes USING btree (dbgroup_id) + db_dbgroup_dbnodes_dbnode_id_idx: CREATE INDEX db_dbgroup_dbnodes_dbnode_id_idx + ON public.db_dbgroup_dbnodes USING btree (dbnode_id) + db_dbgroup_dbnodes_pkey: CREATE UNIQUE INDEX db_dbgroup_dbnodes_pkey ON public.db_dbgroup_dbnodes + USING btree (id) + db_dblink: + db_dblink_pkey: CREATE UNIQUE INDEX db_dblink_pkey ON public.db_dblink USING btree + (id) + ix_db_dblink_label: CREATE INDEX ix_db_dblink_label ON public.db_dblink USING + btree (label) + db_dblog: + db_dblog_pkey: CREATE UNIQUE INDEX db_dblog_pkey ON public.db_dblog USING btree + (id) + db_dblog_uuid_key: CREATE UNIQUE INDEX db_dblog_uuid_key ON public.db_dblog USING + btree (uuid) + ix_db_dblog_levelname: CREATE INDEX ix_db_dblog_levelname ON public.db_dblog USING + btree (levelname) + ix_db_dblog_loggername: CREATE INDEX ix_db_dblog_loggername ON public.db_dblog + USING btree (loggername) + db_dbnode: + db_dbnode_pkey: CREATE UNIQUE INDEX db_dbnode_pkey ON public.db_dbnode USING btree + (id) + db_dbnode_uuid_key: CREATE UNIQUE INDEX db_dbnode_uuid_key ON public.db_dbnode + USING btree (uuid) + ix_db_dbnode_label: CREATE INDEX ix_db_dbnode_label ON public.db_dbnode USING + btree (label) + ix_db_dbnode_process_type: CREATE INDEX ix_db_dbnode_process_type ON public.db_dbnode + USING btree (process_type) + ix_db_dbnode_type: CREATE INDEX ix_db_dbnode_type ON public.db_dbnode USING btree + (type) + db_dbsetting: + db_dbsetting_key_key: CREATE UNIQUE INDEX db_dbsetting_key_key ON public.db_dbsetting + USING btree (key) + db_dbsetting_pkey: CREATE UNIQUE INDEX db_dbsetting_pkey ON public.db_dbsetting + USING btree (id) + ix_db_dbsetting_key: CREATE INDEX ix_db_dbsetting_key ON public.db_dbsetting USING + btree (key) + db_dbuser: + db_dbuser_pkey: CREATE UNIQUE INDEX db_dbuser_pkey ON public.db_dbuser USING btree + (id) + ix_db_dbuser_email: CREATE UNIQUE INDEX ix_db_dbuser_email ON public.db_dbuser + USING btree (email) + db_dbworkflow: + db_dbworkflow_pkey: CREATE UNIQUE INDEX db_dbworkflow_pkey ON public.db_dbworkflow + USING btree (id) + db_dbworkflow_uuid_key: CREATE UNIQUE INDEX db_dbworkflow_uuid_key ON public.db_dbworkflow + USING btree (uuid) + ix_db_dbworkflow_label: CREATE INDEX ix_db_dbworkflow_label ON public.db_dbworkflow + USING btree (label) + db_dbworkflowdata: + db_dbworkflowdata_parent_id_name_data_type_key: CREATE UNIQUE INDEX db_dbworkflowdata_parent_id_name_data_type_key + ON public.db_dbworkflowdata USING btree (parent_id, name, data_type) + db_dbworkflowdata_pkey: CREATE UNIQUE INDEX db_dbworkflowdata_pkey ON public.db_dbworkflowdata + USING btree (id) + ix_db_dbworkflowdata_aiida_obj_id: CREATE INDEX ix_db_dbworkflowdata_aiida_obj_id + ON public.db_dbworkflowdata USING btree (aiida_obj_id) + ix_db_dbworkflowdata_parent_id: CREATE INDEX ix_db_dbworkflowdata_parent_id ON + public.db_dbworkflowdata USING btree (parent_id) + db_dbworkflowstep: + db_dbworkflowstep_parent_id_name_key: CREATE UNIQUE INDEX db_dbworkflowstep_parent_id_name_key + ON public.db_dbworkflowstep USING btree (parent_id, name) + db_dbworkflowstep_pkey: CREATE UNIQUE INDEX db_dbworkflowstep_pkey ON public.db_dbworkflowstep + USING btree (id) + db_dbworkflowstep_calculations: + db_dbworkflowstep_calculations_id_dbnode_id_key: CREATE UNIQUE INDEX db_dbworkflowstep_calculations_id_dbnode_id_key + ON public.db_dbworkflowstep_calculations USING btree (dbworkflowstep_id, dbnode_id) + db_dbworkflowstep_calculations_pkey: CREATE UNIQUE INDEX db_dbworkflowstep_calculations_pkey + ON public.db_dbworkflowstep_calculations USING btree (id) + db_dbworkflowstep_sub_workflows: + db_dbworkflowstep_sub_workflows_id_dbworkflow__key: CREATE UNIQUE INDEX db_dbworkflowstep_sub_workflows_id_dbworkflow__key + ON public.db_dbworkflowstep_sub_workflows USING btree (dbworkflowstep_id, dbworkflow_id) + db_dbworkflowstep_sub_workflows_pkey: CREATE UNIQUE INDEX db_dbworkflowstep_sub_workflows_pkey + ON public.db_dbworkflowstep_sub_workflows USING btree (id) diff --git a/tests/backends/aiida_sqlalchemy/migrations/test_all_schema/test_sqla_27_61fc0913fae9_.yml b/tests/backends/aiida_sqlalchemy/migrations/test_all_schema/test_sqla_27_61fc0913fae9_.yml new file mode 100644 index 0000000000..0b858160ba --- /dev/null +++ b/tests/backends/aiida_sqlalchemy/migrations/test_all_schema/test_sqla_27_61fc0913fae9_.yml @@ -0,0 +1,748 @@ +columns: + db_dbauthinfo: + aiidauser_id: + data_type: integer + default: null + is_nullable: true + auth_params: + data_type: jsonb + default: null + is_nullable: true + dbcomputer_id: + data_type: integer + default: null + is_nullable: true + enabled: + data_type: boolean + default: null + is_nullable: true + id: + data_type: integer + default: nextval('db_dbauthinfo_id_seq'::regclass) + is_nullable: false + metadata: + data_type: jsonb + default: null + is_nullable: true + db_dbcomment: + content: + data_type: text + default: null + is_nullable: true + ctime: + data_type: timestamp with time zone + default: null + is_nullable: true + dbnode_id: + data_type: integer + default: null + is_nullable: true + id: + data_type: integer + default: nextval('db_dbcomment_id_seq'::regclass) + is_nullable: false + mtime: + data_type: timestamp with time zone + default: null + is_nullable: true + user_id: + data_type: integer + default: null + is_nullable: true + uuid: + data_type: uuid + default: null + is_nullable: true + db_dbcomputer: + description: + data_type: text + default: null + is_nullable: true + enabled: + data_type: boolean + default: null + is_nullable: true + hostname: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + id: + data_type: integer + default: nextval('db_dbcomputer_id_seq'::regclass) + is_nullable: false + metadata: + data_type: jsonb + default: null + is_nullable: true + name: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + scheduler_type: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + transport_params: + data_type: jsonb + default: null + is_nullable: true + transport_type: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + uuid: + data_type: uuid + default: null + is_nullable: true + db_dbgroup: + description: + data_type: text + default: null + is_nullable: true + id: + data_type: integer + default: nextval('db_dbgroup_id_seq'::regclass) + is_nullable: false + label: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + time: + data_type: timestamp with time zone + default: null + is_nullable: true + type_string: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + user_id: + data_type: integer + default: null + is_nullable: true + uuid: + data_type: uuid + default: null + is_nullable: true + db_dbgroup_dbnodes: + dbgroup_id: + data_type: integer + default: null + is_nullable: true + dbnode_id: + data_type: integer + default: null + is_nullable: true + id: + data_type: integer + default: nextval('db_dbgroup_dbnodes_id_seq'::regclass) + is_nullable: false + db_dblink: + id: + data_type: integer + default: nextval('db_dblink_id_seq'::regclass) + is_nullable: false + input_id: + data_type: integer + default: null + is_nullable: true + label: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + output_id: + data_type: integer + default: null + is_nullable: true + type: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + db_dblog: + dbnode_id: + data_type: integer + default: null + is_nullable: false + id: + data_type: integer + default: nextval('db_dblog_id_seq'::regclass) + is_nullable: false + levelname: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + loggername: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + message: + data_type: text + default: null + is_nullable: true + metadata: + data_type: jsonb + default: null + is_nullable: true + time: + data_type: timestamp with time zone + default: null + is_nullable: true + uuid: + data_type: uuid + default: null + is_nullable: true + db_dbnode: + attributes: + data_type: jsonb + default: null + is_nullable: true + ctime: + data_type: timestamp with time zone + default: null + is_nullable: true + dbcomputer_id: + data_type: integer + default: null + is_nullable: true + description: + data_type: text + default: null + is_nullable: true + extras: + data_type: jsonb + default: null + is_nullable: true + id: + data_type: integer + default: nextval('db_dbnode_id_seq'::regclass) + is_nullable: false + label: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + mtime: + data_type: timestamp with time zone + default: null + is_nullable: true + nodeversion: + data_type: integer + default: null + is_nullable: true + process_type: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + public: + data_type: boolean + default: null + is_nullable: true + type: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + user_id: + data_type: integer + default: null + is_nullable: false + uuid: + data_type: uuid + default: null + is_nullable: true + db_dbsetting: + description: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + id: + data_type: integer + default: nextval('db_dbsetting_id_seq'::regclass) + is_nullable: false + key: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + time: + data_type: timestamp with time zone + default: null + is_nullable: true + val: + data_type: jsonb + default: null + is_nullable: true + db_dbuser: + date_joined: + data_type: timestamp with time zone + default: null + is_nullable: true + email: + data_type: character varying + default: null + is_nullable: true + max_length: 254 + first_name: + data_type: character varying + default: null + is_nullable: true + max_length: 254 + id: + data_type: integer + default: nextval('db_dbuser_id_seq'::regclass) + is_nullable: false + institution: + data_type: character varying + default: null + is_nullable: true + max_length: 254 + is_active: + data_type: boolean + default: null + is_nullable: true + is_staff: + data_type: boolean + default: null + is_nullable: true + is_superuser: + data_type: boolean + default: null + is_nullable: false + last_login: + data_type: timestamp with time zone + default: null + is_nullable: true + last_name: + data_type: character varying + default: null + is_nullable: true + max_length: 254 + password: + data_type: character varying + default: null + is_nullable: true + max_length: 128 + db_dbworkflow: + ctime: + data_type: timestamp with time zone + default: null + is_nullable: true + description: + data_type: text + default: null + is_nullable: true + id: + data_type: integer + default: nextval('db_dbworkflow_id_seq'::regclass) + is_nullable: false + label: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + lastsyncedversion: + data_type: integer + default: null + is_nullable: true + module: + data_type: text + default: null + is_nullable: true + module_class: + data_type: text + default: null + is_nullable: true + mtime: + data_type: timestamp with time zone + default: null + is_nullable: true + nodeversion: + data_type: integer + default: null + is_nullable: true + report: + data_type: text + default: null + is_nullable: true + script_md5: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + script_path: + data_type: text + default: null + is_nullable: true + state: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + user_id: + data_type: integer + default: null + is_nullable: true + uuid: + data_type: uuid + default: null + is_nullable: true + db_dbworkflowdata: + aiida_obj_id: + data_type: integer + default: null + is_nullable: true + data_type: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + id: + data_type: integer + default: nextval('db_dbworkflowdata_id_seq'::regclass) + is_nullable: false + json_value: + data_type: text + default: null + is_nullable: true + name: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + parent_id: + data_type: integer + default: null + is_nullable: true + time: + data_type: timestamp with time zone + default: null + is_nullable: true + value_type: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + db_dbworkflowstep: + id: + data_type: integer + default: nextval('db_dbworkflowstep_id_seq'::regclass) + is_nullable: false + name: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + nextcall: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + parent_id: + data_type: integer + default: null + is_nullable: true + state: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + time: + data_type: timestamp with time zone + default: null + is_nullable: true + user_id: + data_type: integer + default: null + is_nullable: true + db_dbworkflowstep_calculations: + dbnode_id: + data_type: integer + default: null + is_nullable: true + dbworkflowstep_id: + data_type: integer + default: null + is_nullable: true + id: + data_type: integer + default: nextval('db_dbworkflowstep_calculations_id_seq'::regclass) + is_nullable: false + db_dbworkflowstep_sub_workflows: + dbworkflow_id: + data_type: integer + default: null + is_nullable: true + dbworkflowstep_id: + data_type: integer + default: null + is_nullable: true + id: + data_type: integer + default: nextval('db_dbworkflowstep_sub_workflows_id_seq'::regclass) + is_nullable: false +constraints: + primary_key: + db_dbauthinfo: + db_dbauthinfo_pkey: + - id + db_dbcomment: + db_dbcomment_pkey: + - id + db_dbcomputer: + db_dbcomputer_pkey: + - id + db_dbgroup: + db_dbgroup_pkey: + - id + db_dbgroup_dbnodes: + db_dbgroup_dbnodes_pkey: + - id + db_dblink: + db_dblink_pkey: + - id + db_dblog: + db_dblog_pkey: + - id + db_dbnode: + db_dbnode_pkey: + - id + db_dbsetting: + db_dbsetting_pkey: + - id + db_dbuser: + db_dbuser_pkey: + - id + db_dbworkflow: + db_dbworkflow_pkey: + - id + db_dbworkflowdata: + db_dbworkflowdata_pkey: + - id + db_dbworkflowstep: + db_dbworkflowstep_pkey: + - id + db_dbworkflowstep_calculations: + db_dbworkflowstep_calculations_pkey: + - id + db_dbworkflowstep_sub_workflows: + db_dbworkflowstep_sub_workflows_pkey: + - id + unique: + db_dbauthinfo: + db_dbauthinfo_aiidauser_id_dbcomputer_id_key: + - aiidauser_id + - dbcomputer_id + db_dbcomment: + db_dbcomment_uuid_key: + - uuid + db_dbcomputer: + db_dbcomputer_name_key: + - name + db_dbcomputer_uuid_key: + - uuid + db_dbgroup: + db_dbgroup_label_type_string_key: + - label + - type_string + db_dbgroup_uuid_key: + - uuid + db_dbgroup_dbnodes: + db_dbgroup_dbnodes_dbgroup_id_dbnode_id_key: + - dbgroup_id + - dbnode_id + db_dblog: + db_dblog_uuid_key: + - uuid + db_dbnode: + db_dbnode_uuid_key: + - uuid + db_dbsetting: + db_dbsetting_key_key: + - key + db_dbworkflow: + db_dbworkflow_uuid_key: + - uuid + db_dbworkflowdata: + db_dbworkflowdata_parent_id_name_data_type_key: + - data_type + - name + - parent_id + db_dbworkflowstep: + db_dbworkflowstep_parent_id_name_key: + - name + - parent_id + db_dbworkflowstep_calculations: + db_dbworkflowstep_calculations_id_dbnode_id_key: + - dbnode_id + - dbworkflowstep_id + db_dbworkflowstep_sub_workflows: + db_dbworkflowstep_sub_workflows_id_dbworkflow__key: + - dbworkflow_id + - dbworkflowstep_id +foreign_keys: + db_dbauthinfo: + db_dbauthinfo_aiidauser_id_fkey: FOREIGN KEY (aiidauser_id) REFERENCES db_dbuser(id) + ON DELETE CASCADE DEFERRABLE INITIALLY DEFERRED + db_dbauthinfo_dbcomputer_id_fkey: FOREIGN KEY (dbcomputer_id) REFERENCES db_dbcomputer(id) + ON DELETE CASCADE DEFERRABLE INITIALLY DEFERRED + db_dbcomment: + db_dbcomment_dbnode_id_fkey: FOREIGN KEY (dbnode_id) REFERENCES db_dbnode(id) + ON DELETE CASCADE DEFERRABLE INITIALLY DEFERRED + db_dbcomment_user_id_fkey: FOREIGN KEY (user_id) REFERENCES db_dbuser(id) ON DELETE + CASCADE DEFERRABLE INITIALLY DEFERRED + db_dbgroup: + db_dbgroup_user_id_fkey: FOREIGN KEY (user_id) REFERENCES db_dbuser(id) ON DELETE + CASCADE DEFERRABLE INITIALLY DEFERRED + db_dbgroup_dbnodes: + db_dbgroup_dbnodes_dbgroup_id_fkey: FOREIGN KEY (dbgroup_id) REFERENCES db_dbgroup(id) + DEFERRABLE INITIALLY DEFERRED + db_dbgroup_dbnodes_dbnode_id_fkey: FOREIGN KEY (dbnode_id) REFERENCES db_dbnode(id) + DEFERRABLE INITIALLY DEFERRED + db_dblink: + db_dblink_input_id_fkey: FOREIGN KEY (input_id) REFERENCES db_dbnode(id) DEFERRABLE + INITIALLY DEFERRED + db_dblink_output_id_fkey: FOREIGN KEY (output_id) REFERENCES db_dbnode(id) ON + DELETE CASCADE DEFERRABLE INITIALLY DEFERRED + db_dblog: + db_dblog_dbnode_id_fkey: FOREIGN KEY (dbnode_id) REFERENCES db_dbnode(id) ON DELETE + CASCADE DEFERRABLE INITIALLY DEFERRED + db_dbnode: + db_dbnode_dbcomputer_id_fkey: FOREIGN KEY (dbcomputer_id) REFERENCES db_dbcomputer(id) + ON DELETE RESTRICT DEFERRABLE INITIALLY DEFERRED + db_dbnode_user_id_fkey: FOREIGN KEY (user_id) REFERENCES db_dbuser(id) ON DELETE + RESTRICT DEFERRABLE INITIALLY DEFERRED + db_dbworkflow: + db_dbworkflow_user_id_fkey: FOREIGN KEY (user_id) REFERENCES db_dbuser(id) + db_dbworkflowdata: + db_dbworkflowdata_aiida_obj_id_fkey: FOREIGN KEY (aiida_obj_id) REFERENCES db_dbnode(id) + db_dbworkflowdata_parent_id_fkey: FOREIGN KEY (parent_id) REFERENCES db_dbworkflow(id) + db_dbworkflowstep: + db_dbworkflowstep_parent_id_fkey: FOREIGN KEY (parent_id) REFERENCES db_dbworkflow(id) + db_dbworkflowstep_user_id_fkey: FOREIGN KEY (user_id) REFERENCES db_dbuser(id) + db_dbworkflowstep_calculations: + db_dbworkflowstep_calculations_dbnode_id_fkey: FOREIGN KEY (dbnode_id) REFERENCES + db_dbnode(id) + db_dbworkflowstep_calculations_dbworkflowstep_id_fkey: FOREIGN KEY (dbworkflowstep_id) + REFERENCES db_dbworkflowstep(id) + db_dbworkflowstep_sub_workflows: + db_dbworkflowstep_sub_workflows_dbworkflow_id_fkey: FOREIGN KEY (dbworkflow_id) + REFERENCES db_dbworkflow(id) + db_dbworkflowstep_sub_workflows_dbworkflowstep_id_fkey: FOREIGN KEY (dbworkflowstep_id) + REFERENCES db_dbworkflowstep(id) +indexes: + db_dbauthinfo: + db_dbauthinfo_aiidauser_id_dbcomputer_id_key: CREATE UNIQUE INDEX db_dbauthinfo_aiidauser_id_dbcomputer_id_key + ON public.db_dbauthinfo USING btree (aiidauser_id, dbcomputer_id) + db_dbauthinfo_pkey: CREATE UNIQUE INDEX db_dbauthinfo_pkey ON public.db_dbauthinfo + USING btree (id) + db_dbcomment: + db_dbcomment_pkey: CREATE UNIQUE INDEX db_dbcomment_pkey ON public.db_dbcomment + USING btree (id) + db_dbcomment_uuid_key: CREATE UNIQUE INDEX db_dbcomment_uuid_key ON public.db_dbcomment + USING btree (uuid) + db_dbcomputer: + db_dbcomputer_name_key: CREATE UNIQUE INDEX db_dbcomputer_name_key ON public.db_dbcomputer + USING btree (name) + db_dbcomputer_pkey: CREATE UNIQUE INDEX db_dbcomputer_pkey ON public.db_dbcomputer + USING btree (id) + db_dbcomputer_uuid_key: CREATE UNIQUE INDEX db_dbcomputer_uuid_key ON public.db_dbcomputer + USING btree (uuid) + db_dbgroup: + db_dbgroup_label_type_string_key: CREATE UNIQUE INDEX db_dbgroup_label_type_string_key + ON public.db_dbgroup USING btree (label, type_string) + db_dbgroup_pkey: CREATE UNIQUE INDEX db_dbgroup_pkey ON public.db_dbgroup USING + btree (id) + db_dbgroup_uuid_key: CREATE UNIQUE INDEX db_dbgroup_uuid_key ON public.db_dbgroup + USING btree (uuid) + ix_db_dbgroup_label: CREATE INDEX ix_db_dbgroup_label ON public.db_dbgroup USING + btree (label) + ix_db_dbgroup_type_string: CREATE INDEX ix_db_dbgroup_type_string ON public.db_dbgroup + USING btree (type_string) + db_dbgroup_dbnodes: + db_dbgroup_dbnodes_dbgroup_id_dbnode_id_key: CREATE UNIQUE INDEX db_dbgroup_dbnodes_dbgroup_id_dbnode_id_key + ON public.db_dbgroup_dbnodes USING btree (dbgroup_id, dbnode_id) + db_dbgroup_dbnodes_dbgroup_id_idx: CREATE INDEX db_dbgroup_dbnodes_dbgroup_id_idx + ON public.db_dbgroup_dbnodes USING btree (dbgroup_id) + db_dbgroup_dbnodes_dbnode_id_idx: CREATE INDEX db_dbgroup_dbnodes_dbnode_id_idx + ON public.db_dbgroup_dbnodes USING btree (dbnode_id) + db_dbgroup_dbnodes_pkey: CREATE UNIQUE INDEX db_dbgroup_dbnodes_pkey ON public.db_dbgroup_dbnodes + USING btree (id) + db_dblink: + db_dblink_pkey: CREATE UNIQUE INDEX db_dblink_pkey ON public.db_dblink USING btree + (id) + ix_db_dblink_label: CREATE INDEX ix_db_dblink_label ON public.db_dblink USING + btree (label) + db_dblog: + db_dblog_pkey: CREATE UNIQUE INDEX db_dblog_pkey ON public.db_dblog USING btree + (id) + db_dblog_uuid_key: CREATE UNIQUE INDEX db_dblog_uuid_key ON public.db_dblog USING + btree (uuid) + ix_db_dblog_levelname: CREATE INDEX ix_db_dblog_levelname ON public.db_dblog USING + btree (levelname) + ix_db_dblog_loggername: CREATE INDEX ix_db_dblog_loggername ON public.db_dblog + USING btree (loggername) + db_dbnode: + db_dbnode_pkey: CREATE UNIQUE INDEX db_dbnode_pkey ON public.db_dbnode USING btree + (id) + db_dbnode_uuid_key: CREATE UNIQUE INDEX db_dbnode_uuid_key ON public.db_dbnode + USING btree (uuid) + ix_db_dbnode_label: CREATE INDEX ix_db_dbnode_label ON public.db_dbnode USING + btree (label) + ix_db_dbnode_process_type: CREATE INDEX ix_db_dbnode_process_type ON public.db_dbnode + USING btree (process_type) + ix_db_dbnode_type: CREATE INDEX ix_db_dbnode_type ON public.db_dbnode USING btree + (type) + db_dbsetting: + db_dbsetting_key_key: CREATE UNIQUE INDEX db_dbsetting_key_key ON public.db_dbsetting + USING btree (key) + db_dbsetting_pkey: CREATE UNIQUE INDEX db_dbsetting_pkey ON public.db_dbsetting + USING btree (id) + ix_db_dbsetting_key: CREATE INDEX ix_db_dbsetting_key ON public.db_dbsetting USING + btree (key) + db_dbuser: + db_dbuser_pkey: CREATE UNIQUE INDEX db_dbuser_pkey ON public.db_dbuser USING btree + (id) + ix_db_dbuser_email: CREATE UNIQUE INDEX ix_db_dbuser_email ON public.db_dbuser + USING btree (email) + db_dbworkflow: + db_dbworkflow_pkey: CREATE UNIQUE INDEX db_dbworkflow_pkey ON public.db_dbworkflow + USING btree (id) + db_dbworkflow_uuid_key: CREATE UNIQUE INDEX db_dbworkflow_uuid_key ON public.db_dbworkflow + USING btree (uuid) + ix_db_dbworkflow_label: CREATE INDEX ix_db_dbworkflow_label ON public.db_dbworkflow + USING btree (label) + db_dbworkflowdata: + db_dbworkflowdata_parent_id_name_data_type_key: CREATE UNIQUE INDEX db_dbworkflowdata_parent_id_name_data_type_key + ON public.db_dbworkflowdata USING btree (parent_id, name, data_type) + db_dbworkflowdata_pkey: CREATE UNIQUE INDEX db_dbworkflowdata_pkey ON public.db_dbworkflowdata + USING btree (id) + ix_db_dbworkflowdata_aiida_obj_id: CREATE INDEX ix_db_dbworkflowdata_aiida_obj_id + ON public.db_dbworkflowdata USING btree (aiida_obj_id) + ix_db_dbworkflowdata_parent_id: CREATE INDEX ix_db_dbworkflowdata_parent_id ON + public.db_dbworkflowdata USING btree (parent_id) + db_dbworkflowstep: + db_dbworkflowstep_parent_id_name_key: CREATE UNIQUE INDEX db_dbworkflowstep_parent_id_name_key + ON public.db_dbworkflowstep USING btree (parent_id, name) + db_dbworkflowstep_pkey: CREATE UNIQUE INDEX db_dbworkflowstep_pkey ON public.db_dbworkflowstep + USING btree (id) + db_dbworkflowstep_calculations: + db_dbworkflowstep_calculations_id_dbnode_id_key: CREATE UNIQUE INDEX db_dbworkflowstep_calculations_id_dbnode_id_key + ON public.db_dbworkflowstep_calculations USING btree (dbworkflowstep_id, dbnode_id) + db_dbworkflowstep_calculations_pkey: CREATE UNIQUE INDEX db_dbworkflowstep_calculations_pkey + ON public.db_dbworkflowstep_calculations USING btree (id) + db_dbworkflowstep_sub_workflows: + db_dbworkflowstep_sub_workflows_id_dbworkflow__key: CREATE UNIQUE INDEX db_dbworkflowstep_sub_workflows_id_dbworkflow__key + ON public.db_dbworkflowstep_sub_workflows USING btree (dbworkflowstep_id, dbworkflow_id) + db_dbworkflowstep_sub_workflows_pkey: CREATE UNIQUE INDEX db_dbworkflowstep_sub_workflows_pkey + ON public.db_dbworkflowstep_sub_workflows USING btree (id) diff --git a/tests/backends/aiida_sqlalchemy/migrations/test_all_schema/test_sqla_28_d254fdfed416_.yml b/tests/backends/aiida_sqlalchemy/migrations/test_all_schema/test_sqla_28_d254fdfed416_.yml new file mode 100644 index 0000000000..0b858160ba --- /dev/null +++ b/tests/backends/aiida_sqlalchemy/migrations/test_all_schema/test_sqla_28_d254fdfed416_.yml @@ -0,0 +1,748 @@ +columns: + db_dbauthinfo: + aiidauser_id: + data_type: integer + default: null + is_nullable: true + auth_params: + data_type: jsonb + default: null + is_nullable: true + dbcomputer_id: + data_type: integer + default: null + is_nullable: true + enabled: + data_type: boolean + default: null + is_nullable: true + id: + data_type: integer + default: nextval('db_dbauthinfo_id_seq'::regclass) + is_nullable: false + metadata: + data_type: jsonb + default: null + is_nullable: true + db_dbcomment: + content: + data_type: text + default: null + is_nullable: true + ctime: + data_type: timestamp with time zone + default: null + is_nullable: true + dbnode_id: + data_type: integer + default: null + is_nullable: true + id: + data_type: integer + default: nextval('db_dbcomment_id_seq'::regclass) + is_nullable: false + mtime: + data_type: timestamp with time zone + default: null + is_nullable: true + user_id: + data_type: integer + default: null + is_nullable: true + uuid: + data_type: uuid + default: null + is_nullable: true + db_dbcomputer: + description: + data_type: text + default: null + is_nullable: true + enabled: + data_type: boolean + default: null + is_nullable: true + hostname: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + id: + data_type: integer + default: nextval('db_dbcomputer_id_seq'::regclass) + is_nullable: false + metadata: + data_type: jsonb + default: null + is_nullable: true + name: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + scheduler_type: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + transport_params: + data_type: jsonb + default: null + is_nullable: true + transport_type: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + uuid: + data_type: uuid + default: null + is_nullable: true + db_dbgroup: + description: + data_type: text + default: null + is_nullable: true + id: + data_type: integer + default: nextval('db_dbgroup_id_seq'::regclass) + is_nullable: false + label: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + time: + data_type: timestamp with time zone + default: null + is_nullable: true + type_string: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + user_id: + data_type: integer + default: null + is_nullable: true + uuid: + data_type: uuid + default: null + is_nullable: true + db_dbgroup_dbnodes: + dbgroup_id: + data_type: integer + default: null + is_nullable: true + dbnode_id: + data_type: integer + default: null + is_nullable: true + id: + data_type: integer + default: nextval('db_dbgroup_dbnodes_id_seq'::regclass) + is_nullable: false + db_dblink: + id: + data_type: integer + default: nextval('db_dblink_id_seq'::regclass) + is_nullable: false + input_id: + data_type: integer + default: null + is_nullable: true + label: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + output_id: + data_type: integer + default: null + is_nullable: true + type: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + db_dblog: + dbnode_id: + data_type: integer + default: null + is_nullable: false + id: + data_type: integer + default: nextval('db_dblog_id_seq'::regclass) + is_nullable: false + levelname: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + loggername: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + message: + data_type: text + default: null + is_nullable: true + metadata: + data_type: jsonb + default: null + is_nullable: true + time: + data_type: timestamp with time zone + default: null + is_nullable: true + uuid: + data_type: uuid + default: null + is_nullable: true + db_dbnode: + attributes: + data_type: jsonb + default: null + is_nullable: true + ctime: + data_type: timestamp with time zone + default: null + is_nullable: true + dbcomputer_id: + data_type: integer + default: null + is_nullable: true + description: + data_type: text + default: null + is_nullable: true + extras: + data_type: jsonb + default: null + is_nullable: true + id: + data_type: integer + default: nextval('db_dbnode_id_seq'::regclass) + is_nullable: false + label: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + mtime: + data_type: timestamp with time zone + default: null + is_nullable: true + nodeversion: + data_type: integer + default: null + is_nullable: true + process_type: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + public: + data_type: boolean + default: null + is_nullable: true + type: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + user_id: + data_type: integer + default: null + is_nullable: false + uuid: + data_type: uuid + default: null + is_nullable: true + db_dbsetting: + description: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + id: + data_type: integer + default: nextval('db_dbsetting_id_seq'::regclass) + is_nullable: false + key: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + time: + data_type: timestamp with time zone + default: null + is_nullable: true + val: + data_type: jsonb + default: null + is_nullable: true + db_dbuser: + date_joined: + data_type: timestamp with time zone + default: null + is_nullable: true + email: + data_type: character varying + default: null + is_nullable: true + max_length: 254 + first_name: + data_type: character varying + default: null + is_nullable: true + max_length: 254 + id: + data_type: integer + default: nextval('db_dbuser_id_seq'::regclass) + is_nullable: false + institution: + data_type: character varying + default: null + is_nullable: true + max_length: 254 + is_active: + data_type: boolean + default: null + is_nullable: true + is_staff: + data_type: boolean + default: null + is_nullable: true + is_superuser: + data_type: boolean + default: null + is_nullable: false + last_login: + data_type: timestamp with time zone + default: null + is_nullable: true + last_name: + data_type: character varying + default: null + is_nullable: true + max_length: 254 + password: + data_type: character varying + default: null + is_nullable: true + max_length: 128 + db_dbworkflow: + ctime: + data_type: timestamp with time zone + default: null + is_nullable: true + description: + data_type: text + default: null + is_nullable: true + id: + data_type: integer + default: nextval('db_dbworkflow_id_seq'::regclass) + is_nullable: false + label: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + lastsyncedversion: + data_type: integer + default: null + is_nullable: true + module: + data_type: text + default: null + is_nullable: true + module_class: + data_type: text + default: null + is_nullable: true + mtime: + data_type: timestamp with time zone + default: null + is_nullable: true + nodeversion: + data_type: integer + default: null + is_nullable: true + report: + data_type: text + default: null + is_nullable: true + script_md5: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + script_path: + data_type: text + default: null + is_nullable: true + state: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + user_id: + data_type: integer + default: null + is_nullable: true + uuid: + data_type: uuid + default: null + is_nullable: true + db_dbworkflowdata: + aiida_obj_id: + data_type: integer + default: null + is_nullable: true + data_type: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + id: + data_type: integer + default: nextval('db_dbworkflowdata_id_seq'::regclass) + is_nullable: false + json_value: + data_type: text + default: null + is_nullable: true + name: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + parent_id: + data_type: integer + default: null + is_nullable: true + time: + data_type: timestamp with time zone + default: null + is_nullable: true + value_type: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + db_dbworkflowstep: + id: + data_type: integer + default: nextval('db_dbworkflowstep_id_seq'::regclass) + is_nullable: false + name: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + nextcall: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + parent_id: + data_type: integer + default: null + is_nullable: true + state: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + time: + data_type: timestamp with time zone + default: null + is_nullable: true + user_id: + data_type: integer + default: null + is_nullable: true + db_dbworkflowstep_calculations: + dbnode_id: + data_type: integer + default: null + is_nullable: true + dbworkflowstep_id: + data_type: integer + default: null + is_nullable: true + id: + data_type: integer + default: nextval('db_dbworkflowstep_calculations_id_seq'::regclass) + is_nullable: false + db_dbworkflowstep_sub_workflows: + dbworkflow_id: + data_type: integer + default: null + is_nullable: true + dbworkflowstep_id: + data_type: integer + default: null + is_nullable: true + id: + data_type: integer + default: nextval('db_dbworkflowstep_sub_workflows_id_seq'::regclass) + is_nullable: false +constraints: + primary_key: + db_dbauthinfo: + db_dbauthinfo_pkey: + - id + db_dbcomment: + db_dbcomment_pkey: + - id + db_dbcomputer: + db_dbcomputer_pkey: + - id + db_dbgroup: + db_dbgroup_pkey: + - id + db_dbgroup_dbnodes: + db_dbgroup_dbnodes_pkey: + - id + db_dblink: + db_dblink_pkey: + - id + db_dblog: + db_dblog_pkey: + - id + db_dbnode: + db_dbnode_pkey: + - id + db_dbsetting: + db_dbsetting_pkey: + - id + db_dbuser: + db_dbuser_pkey: + - id + db_dbworkflow: + db_dbworkflow_pkey: + - id + db_dbworkflowdata: + db_dbworkflowdata_pkey: + - id + db_dbworkflowstep: + db_dbworkflowstep_pkey: + - id + db_dbworkflowstep_calculations: + db_dbworkflowstep_calculations_pkey: + - id + db_dbworkflowstep_sub_workflows: + db_dbworkflowstep_sub_workflows_pkey: + - id + unique: + db_dbauthinfo: + db_dbauthinfo_aiidauser_id_dbcomputer_id_key: + - aiidauser_id + - dbcomputer_id + db_dbcomment: + db_dbcomment_uuid_key: + - uuid + db_dbcomputer: + db_dbcomputer_name_key: + - name + db_dbcomputer_uuid_key: + - uuid + db_dbgroup: + db_dbgroup_label_type_string_key: + - label + - type_string + db_dbgroup_uuid_key: + - uuid + db_dbgroup_dbnodes: + db_dbgroup_dbnodes_dbgroup_id_dbnode_id_key: + - dbgroup_id + - dbnode_id + db_dblog: + db_dblog_uuid_key: + - uuid + db_dbnode: + db_dbnode_uuid_key: + - uuid + db_dbsetting: + db_dbsetting_key_key: + - key + db_dbworkflow: + db_dbworkflow_uuid_key: + - uuid + db_dbworkflowdata: + db_dbworkflowdata_parent_id_name_data_type_key: + - data_type + - name + - parent_id + db_dbworkflowstep: + db_dbworkflowstep_parent_id_name_key: + - name + - parent_id + db_dbworkflowstep_calculations: + db_dbworkflowstep_calculations_id_dbnode_id_key: + - dbnode_id + - dbworkflowstep_id + db_dbworkflowstep_sub_workflows: + db_dbworkflowstep_sub_workflows_id_dbworkflow__key: + - dbworkflow_id + - dbworkflowstep_id +foreign_keys: + db_dbauthinfo: + db_dbauthinfo_aiidauser_id_fkey: FOREIGN KEY (aiidauser_id) REFERENCES db_dbuser(id) + ON DELETE CASCADE DEFERRABLE INITIALLY DEFERRED + db_dbauthinfo_dbcomputer_id_fkey: FOREIGN KEY (dbcomputer_id) REFERENCES db_dbcomputer(id) + ON DELETE CASCADE DEFERRABLE INITIALLY DEFERRED + db_dbcomment: + db_dbcomment_dbnode_id_fkey: FOREIGN KEY (dbnode_id) REFERENCES db_dbnode(id) + ON DELETE CASCADE DEFERRABLE INITIALLY DEFERRED + db_dbcomment_user_id_fkey: FOREIGN KEY (user_id) REFERENCES db_dbuser(id) ON DELETE + CASCADE DEFERRABLE INITIALLY DEFERRED + db_dbgroup: + db_dbgroup_user_id_fkey: FOREIGN KEY (user_id) REFERENCES db_dbuser(id) ON DELETE + CASCADE DEFERRABLE INITIALLY DEFERRED + db_dbgroup_dbnodes: + db_dbgroup_dbnodes_dbgroup_id_fkey: FOREIGN KEY (dbgroup_id) REFERENCES db_dbgroup(id) + DEFERRABLE INITIALLY DEFERRED + db_dbgroup_dbnodes_dbnode_id_fkey: FOREIGN KEY (dbnode_id) REFERENCES db_dbnode(id) + DEFERRABLE INITIALLY DEFERRED + db_dblink: + db_dblink_input_id_fkey: FOREIGN KEY (input_id) REFERENCES db_dbnode(id) DEFERRABLE + INITIALLY DEFERRED + db_dblink_output_id_fkey: FOREIGN KEY (output_id) REFERENCES db_dbnode(id) ON + DELETE CASCADE DEFERRABLE INITIALLY DEFERRED + db_dblog: + db_dblog_dbnode_id_fkey: FOREIGN KEY (dbnode_id) REFERENCES db_dbnode(id) ON DELETE + CASCADE DEFERRABLE INITIALLY DEFERRED + db_dbnode: + db_dbnode_dbcomputer_id_fkey: FOREIGN KEY (dbcomputer_id) REFERENCES db_dbcomputer(id) + ON DELETE RESTRICT DEFERRABLE INITIALLY DEFERRED + db_dbnode_user_id_fkey: FOREIGN KEY (user_id) REFERENCES db_dbuser(id) ON DELETE + RESTRICT DEFERRABLE INITIALLY DEFERRED + db_dbworkflow: + db_dbworkflow_user_id_fkey: FOREIGN KEY (user_id) REFERENCES db_dbuser(id) + db_dbworkflowdata: + db_dbworkflowdata_aiida_obj_id_fkey: FOREIGN KEY (aiida_obj_id) REFERENCES db_dbnode(id) + db_dbworkflowdata_parent_id_fkey: FOREIGN KEY (parent_id) REFERENCES db_dbworkflow(id) + db_dbworkflowstep: + db_dbworkflowstep_parent_id_fkey: FOREIGN KEY (parent_id) REFERENCES db_dbworkflow(id) + db_dbworkflowstep_user_id_fkey: FOREIGN KEY (user_id) REFERENCES db_dbuser(id) + db_dbworkflowstep_calculations: + db_dbworkflowstep_calculations_dbnode_id_fkey: FOREIGN KEY (dbnode_id) REFERENCES + db_dbnode(id) + db_dbworkflowstep_calculations_dbworkflowstep_id_fkey: FOREIGN KEY (dbworkflowstep_id) + REFERENCES db_dbworkflowstep(id) + db_dbworkflowstep_sub_workflows: + db_dbworkflowstep_sub_workflows_dbworkflow_id_fkey: FOREIGN KEY (dbworkflow_id) + REFERENCES db_dbworkflow(id) + db_dbworkflowstep_sub_workflows_dbworkflowstep_id_fkey: FOREIGN KEY (dbworkflowstep_id) + REFERENCES db_dbworkflowstep(id) +indexes: + db_dbauthinfo: + db_dbauthinfo_aiidauser_id_dbcomputer_id_key: CREATE UNIQUE INDEX db_dbauthinfo_aiidauser_id_dbcomputer_id_key + ON public.db_dbauthinfo USING btree (aiidauser_id, dbcomputer_id) + db_dbauthinfo_pkey: CREATE UNIQUE INDEX db_dbauthinfo_pkey ON public.db_dbauthinfo + USING btree (id) + db_dbcomment: + db_dbcomment_pkey: CREATE UNIQUE INDEX db_dbcomment_pkey ON public.db_dbcomment + USING btree (id) + db_dbcomment_uuid_key: CREATE UNIQUE INDEX db_dbcomment_uuid_key ON public.db_dbcomment + USING btree (uuid) + db_dbcomputer: + db_dbcomputer_name_key: CREATE UNIQUE INDEX db_dbcomputer_name_key ON public.db_dbcomputer + USING btree (name) + db_dbcomputer_pkey: CREATE UNIQUE INDEX db_dbcomputer_pkey ON public.db_dbcomputer + USING btree (id) + db_dbcomputer_uuid_key: CREATE UNIQUE INDEX db_dbcomputer_uuid_key ON public.db_dbcomputer + USING btree (uuid) + db_dbgroup: + db_dbgroup_label_type_string_key: CREATE UNIQUE INDEX db_dbgroup_label_type_string_key + ON public.db_dbgroup USING btree (label, type_string) + db_dbgroup_pkey: CREATE UNIQUE INDEX db_dbgroup_pkey ON public.db_dbgroup USING + btree (id) + db_dbgroup_uuid_key: CREATE UNIQUE INDEX db_dbgroup_uuid_key ON public.db_dbgroup + USING btree (uuid) + ix_db_dbgroup_label: CREATE INDEX ix_db_dbgroup_label ON public.db_dbgroup USING + btree (label) + ix_db_dbgroup_type_string: CREATE INDEX ix_db_dbgroup_type_string ON public.db_dbgroup + USING btree (type_string) + db_dbgroup_dbnodes: + db_dbgroup_dbnodes_dbgroup_id_dbnode_id_key: CREATE UNIQUE INDEX db_dbgroup_dbnodes_dbgroup_id_dbnode_id_key + ON public.db_dbgroup_dbnodes USING btree (dbgroup_id, dbnode_id) + db_dbgroup_dbnodes_dbgroup_id_idx: CREATE INDEX db_dbgroup_dbnodes_dbgroup_id_idx + ON public.db_dbgroup_dbnodes USING btree (dbgroup_id) + db_dbgroup_dbnodes_dbnode_id_idx: CREATE INDEX db_dbgroup_dbnodes_dbnode_id_idx + ON public.db_dbgroup_dbnodes USING btree (dbnode_id) + db_dbgroup_dbnodes_pkey: CREATE UNIQUE INDEX db_dbgroup_dbnodes_pkey ON public.db_dbgroup_dbnodes + USING btree (id) + db_dblink: + db_dblink_pkey: CREATE UNIQUE INDEX db_dblink_pkey ON public.db_dblink USING btree + (id) + ix_db_dblink_label: CREATE INDEX ix_db_dblink_label ON public.db_dblink USING + btree (label) + db_dblog: + db_dblog_pkey: CREATE UNIQUE INDEX db_dblog_pkey ON public.db_dblog USING btree + (id) + db_dblog_uuid_key: CREATE UNIQUE INDEX db_dblog_uuid_key ON public.db_dblog USING + btree (uuid) + ix_db_dblog_levelname: CREATE INDEX ix_db_dblog_levelname ON public.db_dblog USING + btree (levelname) + ix_db_dblog_loggername: CREATE INDEX ix_db_dblog_loggername ON public.db_dblog + USING btree (loggername) + db_dbnode: + db_dbnode_pkey: CREATE UNIQUE INDEX db_dbnode_pkey ON public.db_dbnode USING btree + (id) + db_dbnode_uuid_key: CREATE UNIQUE INDEX db_dbnode_uuid_key ON public.db_dbnode + USING btree (uuid) + ix_db_dbnode_label: CREATE INDEX ix_db_dbnode_label ON public.db_dbnode USING + btree (label) + ix_db_dbnode_process_type: CREATE INDEX ix_db_dbnode_process_type ON public.db_dbnode + USING btree (process_type) + ix_db_dbnode_type: CREATE INDEX ix_db_dbnode_type ON public.db_dbnode USING btree + (type) + db_dbsetting: + db_dbsetting_key_key: CREATE UNIQUE INDEX db_dbsetting_key_key ON public.db_dbsetting + USING btree (key) + db_dbsetting_pkey: CREATE UNIQUE INDEX db_dbsetting_pkey ON public.db_dbsetting + USING btree (id) + ix_db_dbsetting_key: CREATE INDEX ix_db_dbsetting_key ON public.db_dbsetting USING + btree (key) + db_dbuser: + db_dbuser_pkey: CREATE UNIQUE INDEX db_dbuser_pkey ON public.db_dbuser USING btree + (id) + ix_db_dbuser_email: CREATE UNIQUE INDEX ix_db_dbuser_email ON public.db_dbuser + USING btree (email) + db_dbworkflow: + db_dbworkflow_pkey: CREATE UNIQUE INDEX db_dbworkflow_pkey ON public.db_dbworkflow + USING btree (id) + db_dbworkflow_uuid_key: CREATE UNIQUE INDEX db_dbworkflow_uuid_key ON public.db_dbworkflow + USING btree (uuid) + ix_db_dbworkflow_label: CREATE INDEX ix_db_dbworkflow_label ON public.db_dbworkflow + USING btree (label) + db_dbworkflowdata: + db_dbworkflowdata_parent_id_name_data_type_key: CREATE UNIQUE INDEX db_dbworkflowdata_parent_id_name_data_type_key + ON public.db_dbworkflowdata USING btree (parent_id, name, data_type) + db_dbworkflowdata_pkey: CREATE UNIQUE INDEX db_dbworkflowdata_pkey ON public.db_dbworkflowdata + USING btree (id) + ix_db_dbworkflowdata_aiida_obj_id: CREATE INDEX ix_db_dbworkflowdata_aiida_obj_id + ON public.db_dbworkflowdata USING btree (aiida_obj_id) + ix_db_dbworkflowdata_parent_id: CREATE INDEX ix_db_dbworkflowdata_parent_id ON + public.db_dbworkflowdata USING btree (parent_id) + db_dbworkflowstep: + db_dbworkflowstep_parent_id_name_key: CREATE UNIQUE INDEX db_dbworkflowstep_parent_id_name_key + ON public.db_dbworkflowstep USING btree (parent_id, name) + db_dbworkflowstep_pkey: CREATE UNIQUE INDEX db_dbworkflowstep_pkey ON public.db_dbworkflowstep + USING btree (id) + db_dbworkflowstep_calculations: + db_dbworkflowstep_calculations_id_dbnode_id_key: CREATE UNIQUE INDEX db_dbworkflowstep_calculations_id_dbnode_id_key + ON public.db_dbworkflowstep_calculations USING btree (dbworkflowstep_id, dbnode_id) + db_dbworkflowstep_calculations_pkey: CREATE UNIQUE INDEX db_dbworkflowstep_calculations_pkey + ON public.db_dbworkflowstep_calculations USING btree (id) + db_dbworkflowstep_sub_workflows: + db_dbworkflowstep_sub_workflows_id_dbworkflow__key: CREATE UNIQUE INDEX db_dbworkflowstep_sub_workflows_id_dbworkflow__key + ON public.db_dbworkflowstep_sub_workflows USING btree (dbworkflowstep_id, dbworkflow_id) + db_dbworkflowstep_sub_workflows_pkey: CREATE UNIQUE INDEX db_dbworkflowstep_sub_workflows_pkey + ON public.db_dbworkflowstep_sub_workflows USING btree (id) diff --git a/tests/backends/aiida_sqlalchemy/migrations/test_all_schema/test_sqla_29_5ddd24e52864_.yml b/tests/backends/aiida_sqlalchemy/migrations/test_all_schema/test_sqla_29_5ddd24e52864_.yml new file mode 100644 index 0000000000..754d45509d --- /dev/null +++ b/tests/backends/aiida_sqlalchemy/migrations/test_all_schema/test_sqla_29_5ddd24e52864_.yml @@ -0,0 +1,748 @@ +columns: + db_dbauthinfo: + aiidauser_id: + data_type: integer + default: null + is_nullable: true + auth_params: + data_type: jsonb + default: null + is_nullable: true + dbcomputer_id: + data_type: integer + default: null + is_nullable: true + enabled: + data_type: boolean + default: null + is_nullable: true + id: + data_type: integer + default: nextval('db_dbauthinfo_id_seq'::regclass) + is_nullable: false + metadata: + data_type: jsonb + default: null + is_nullable: true + db_dbcomment: + content: + data_type: text + default: null + is_nullable: true + ctime: + data_type: timestamp with time zone + default: null + is_nullable: true + dbnode_id: + data_type: integer + default: null + is_nullable: true + id: + data_type: integer + default: nextval('db_dbcomment_id_seq'::regclass) + is_nullable: false + mtime: + data_type: timestamp with time zone + default: null + is_nullable: true + user_id: + data_type: integer + default: null + is_nullable: true + uuid: + data_type: uuid + default: null + is_nullable: true + db_dbcomputer: + description: + data_type: text + default: null + is_nullable: true + enabled: + data_type: boolean + default: null + is_nullable: true + hostname: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + id: + data_type: integer + default: nextval('db_dbcomputer_id_seq'::regclass) + is_nullable: false + metadata: + data_type: jsonb + default: null + is_nullable: true + name: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + scheduler_type: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + transport_params: + data_type: jsonb + default: null + is_nullable: true + transport_type: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + uuid: + data_type: uuid + default: null + is_nullable: true + db_dbgroup: + description: + data_type: text + default: null + is_nullable: true + id: + data_type: integer + default: nextval('db_dbgroup_id_seq'::regclass) + is_nullable: false + label: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + time: + data_type: timestamp with time zone + default: null + is_nullable: true + type_string: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + user_id: + data_type: integer + default: null + is_nullable: true + uuid: + data_type: uuid + default: null + is_nullable: true + db_dbgroup_dbnodes: + dbgroup_id: + data_type: integer + default: null + is_nullable: true + dbnode_id: + data_type: integer + default: null + is_nullable: true + id: + data_type: integer + default: nextval('db_dbgroup_dbnodes_id_seq'::regclass) + is_nullable: false + db_dblink: + id: + data_type: integer + default: nextval('db_dblink_id_seq'::regclass) + is_nullable: false + input_id: + data_type: integer + default: null + is_nullable: true + label: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + output_id: + data_type: integer + default: null + is_nullable: true + type: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + db_dblog: + dbnode_id: + data_type: integer + default: null + is_nullable: false + id: + data_type: integer + default: nextval('db_dblog_id_seq'::regclass) + is_nullable: false + levelname: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + loggername: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + message: + data_type: text + default: null + is_nullable: true + metadata: + data_type: jsonb + default: null + is_nullable: true + time: + data_type: timestamp with time zone + default: null + is_nullable: true + uuid: + data_type: uuid + default: null + is_nullable: true + db_dbnode: + attributes: + data_type: jsonb + default: null + is_nullable: true + ctime: + data_type: timestamp with time zone + default: null + is_nullable: true + dbcomputer_id: + data_type: integer + default: null + is_nullable: true + description: + data_type: text + default: null + is_nullable: true + extras: + data_type: jsonb + default: null + is_nullable: true + id: + data_type: integer + default: nextval('db_dbnode_id_seq'::regclass) + is_nullable: false + label: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + mtime: + data_type: timestamp with time zone + default: null + is_nullable: true + node_type: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + nodeversion: + data_type: integer + default: null + is_nullable: true + process_type: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + public: + data_type: boolean + default: null + is_nullable: true + user_id: + data_type: integer + default: null + is_nullable: false + uuid: + data_type: uuid + default: null + is_nullable: true + db_dbsetting: + description: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + id: + data_type: integer + default: nextval('db_dbsetting_id_seq'::regclass) + is_nullable: false + key: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + time: + data_type: timestamp with time zone + default: null + is_nullable: true + val: + data_type: jsonb + default: null + is_nullable: true + db_dbuser: + date_joined: + data_type: timestamp with time zone + default: null + is_nullable: true + email: + data_type: character varying + default: null + is_nullable: true + max_length: 254 + first_name: + data_type: character varying + default: null + is_nullable: true + max_length: 254 + id: + data_type: integer + default: nextval('db_dbuser_id_seq'::regclass) + is_nullable: false + institution: + data_type: character varying + default: null + is_nullable: true + max_length: 254 + is_active: + data_type: boolean + default: null + is_nullable: true + is_staff: + data_type: boolean + default: null + is_nullable: true + is_superuser: + data_type: boolean + default: null + is_nullable: false + last_login: + data_type: timestamp with time zone + default: null + is_nullable: true + last_name: + data_type: character varying + default: null + is_nullable: true + max_length: 254 + password: + data_type: character varying + default: null + is_nullable: true + max_length: 128 + db_dbworkflow: + ctime: + data_type: timestamp with time zone + default: null + is_nullable: true + description: + data_type: text + default: null + is_nullable: true + id: + data_type: integer + default: nextval('db_dbworkflow_id_seq'::regclass) + is_nullable: false + label: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + lastsyncedversion: + data_type: integer + default: null + is_nullable: true + module: + data_type: text + default: null + is_nullable: true + module_class: + data_type: text + default: null + is_nullable: true + mtime: + data_type: timestamp with time zone + default: null + is_nullable: true + nodeversion: + data_type: integer + default: null + is_nullable: true + report: + data_type: text + default: null + is_nullable: true + script_md5: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + script_path: + data_type: text + default: null + is_nullable: true + state: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + user_id: + data_type: integer + default: null + is_nullable: true + uuid: + data_type: uuid + default: null + is_nullable: true + db_dbworkflowdata: + aiida_obj_id: + data_type: integer + default: null + is_nullable: true + data_type: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + id: + data_type: integer + default: nextval('db_dbworkflowdata_id_seq'::regclass) + is_nullable: false + json_value: + data_type: text + default: null + is_nullable: true + name: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + parent_id: + data_type: integer + default: null + is_nullable: true + time: + data_type: timestamp with time zone + default: null + is_nullable: true + value_type: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + db_dbworkflowstep: + id: + data_type: integer + default: nextval('db_dbworkflowstep_id_seq'::regclass) + is_nullable: false + name: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + nextcall: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + parent_id: + data_type: integer + default: null + is_nullable: true + state: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + time: + data_type: timestamp with time zone + default: null + is_nullable: true + user_id: + data_type: integer + default: null + is_nullable: true + db_dbworkflowstep_calculations: + dbnode_id: + data_type: integer + default: null + is_nullable: true + dbworkflowstep_id: + data_type: integer + default: null + is_nullable: true + id: + data_type: integer + default: nextval('db_dbworkflowstep_calculations_id_seq'::regclass) + is_nullable: false + db_dbworkflowstep_sub_workflows: + dbworkflow_id: + data_type: integer + default: null + is_nullable: true + dbworkflowstep_id: + data_type: integer + default: null + is_nullable: true + id: + data_type: integer + default: nextval('db_dbworkflowstep_sub_workflows_id_seq'::regclass) + is_nullable: false +constraints: + primary_key: + db_dbauthinfo: + db_dbauthinfo_pkey: + - id + db_dbcomment: + db_dbcomment_pkey: + - id + db_dbcomputer: + db_dbcomputer_pkey: + - id + db_dbgroup: + db_dbgroup_pkey: + - id + db_dbgroup_dbnodes: + db_dbgroup_dbnodes_pkey: + - id + db_dblink: + db_dblink_pkey: + - id + db_dblog: + db_dblog_pkey: + - id + db_dbnode: + db_dbnode_pkey: + - id + db_dbsetting: + db_dbsetting_pkey: + - id + db_dbuser: + db_dbuser_pkey: + - id + db_dbworkflow: + db_dbworkflow_pkey: + - id + db_dbworkflowdata: + db_dbworkflowdata_pkey: + - id + db_dbworkflowstep: + db_dbworkflowstep_pkey: + - id + db_dbworkflowstep_calculations: + db_dbworkflowstep_calculations_pkey: + - id + db_dbworkflowstep_sub_workflows: + db_dbworkflowstep_sub_workflows_pkey: + - id + unique: + db_dbauthinfo: + db_dbauthinfo_aiidauser_id_dbcomputer_id_key: + - aiidauser_id + - dbcomputer_id + db_dbcomment: + db_dbcomment_uuid_key: + - uuid + db_dbcomputer: + db_dbcomputer_name_key: + - name + db_dbcomputer_uuid_key: + - uuid + db_dbgroup: + db_dbgroup_label_type_string_key: + - label + - type_string + db_dbgroup_uuid_key: + - uuid + db_dbgroup_dbnodes: + db_dbgroup_dbnodes_dbgroup_id_dbnode_id_key: + - dbgroup_id + - dbnode_id + db_dblog: + db_dblog_uuid_key: + - uuid + db_dbnode: + db_dbnode_uuid_key: + - uuid + db_dbsetting: + db_dbsetting_key_key: + - key + db_dbworkflow: + db_dbworkflow_uuid_key: + - uuid + db_dbworkflowdata: + db_dbworkflowdata_parent_id_name_data_type_key: + - data_type + - name + - parent_id + db_dbworkflowstep: + db_dbworkflowstep_parent_id_name_key: + - name + - parent_id + db_dbworkflowstep_calculations: + db_dbworkflowstep_calculations_id_dbnode_id_key: + - dbnode_id + - dbworkflowstep_id + db_dbworkflowstep_sub_workflows: + db_dbworkflowstep_sub_workflows_id_dbworkflow__key: + - dbworkflow_id + - dbworkflowstep_id +foreign_keys: + db_dbauthinfo: + db_dbauthinfo_aiidauser_id_fkey: FOREIGN KEY (aiidauser_id) REFERENCES db_dbuser(id) + ON DELETE CASCADE DEFERRABLE INITIALLY DEFERRED + db_dbauthinfo_dbcomputer_id_fkey: FOREIGN KEY (dbcomputer_id) REFERENCES db_dbcomputer(id) + ON DELETE CASCADE DEFERRABLE INITIALLY DEFERRED + db_dbcomment: + db_dbcomment_dbnode_id_fkey: FOREIGN KEY (dbnode_id) REFERENCES db_dbnode(id) + ON DELETE CASCADE DEFERRABLE INITIALLY DEFERRED + db_dbcomment_user_id_fkey: FOREIGN KEY (user_id) REFERENCES db_dbuser(id) ON DELETE + CASCADE DEFERRABLE INITIALLY DEFERRED + db_dbgroup: + db_dbgroup_user_id_fkey: FOREIGN KEY (user_id) REFERENCES db_dbuser(id) ON DELETE + CASCADE DEFERRABLE INITIALLY DEFERRED + db_dbgroup_dbnodes: + db_dbgroup_dbnodes_dbgroup_id_fkey: FOREIGN KEY (dbgroup_id) REFERENCES db_dbgroup(id) + DEFERRABLE INITIALLY DEFERRED + db_dbgroup_dbnodes_dbnode_id_fkey: FOREIGN KEY (dbnode_id) REFERENCES db_dbnode(id) + DEFERRABLE INITIALLY DEFERRED + db_dblink: + db_dblink_input_id_fkey: FOREIGN KEY (input_id) REFERENCES db_dbnode(id) DEFERRABLE + INITIALLY DEFERRED + db_dblink_output_id_fkey: FOREIGN KEY (output_id) REFERENCES db_dbnode(id) ON + DELETE CASCADE DEFERRABLE INITIALLY DEFERRED + db_dblog: + db_dblog_dbnode_id_fkey: FOREIGN KEY (dbnode_id) REFERENCES db_dbnode(id) ON DELETE + CASCADE DEFERRABLE INITIALLY DEFERRED + db_dbnode: + db_dbnode_dbcomputer_id_fkey: FOREIGN KEY (dbcomputer_id) REFERENCES db_dbcomputer(id) + ON DELETE RESTRICT DEFERRABLE INITIALLY DEFERRED + db_dbnode_user_id_fkey: FOREIGN KEY (user_id) REFERENCES db_dbuser(id) ON DELETE + RESTRICT DEFERRABLE INITIALLY DEFERRED + db_dbworkflow: + db_dbworkflow_user_id_fkey: FOREIGN KEY (user_id) REFERENCES db_dbuser(id) + db_dbworkflowdata: + db_dbworkflowdata_aiida_obj_id_fkey: FOREIGN KEY (aiida_obj_id) REFERENCES db_dbnode(id) + db_dbworkflowdata_parent_id_fkey: FOREIGN KEY (parent_id) REFERENCES db_dbworkflow(id) + db_dbworkflowstep: + db_dbworkflowstep_parent_id_fkey: FOREIGN KEY (parent_id) REFERENCES db_dbworkflow(id) + db_dbworkflowstep_user_id_fkey: FOREIGN KEY (user_id) REFERENCES db_dbuser(id) + db_dbworkflowstep_calculations: + db_dbworkflowstep_calculations_dbnode_id_fkey: FOREIGN KEY (dbnode_id) REFERENCES + db_dbnode(id) + db_dbworkflowstep_calculations_dbworkflowstep_id_fkey: FOREIGN KEY (dbworkflowstep_id) + REFERENCES db_dbworkflowstep(id) + db_dbworkflowstep_sub_workflows: + db_dbworkflowstep_sub_workflows_dbworkflow_id_fkey: FOREIGN KEY (dbworkflow_id) + REFERENCES db_dbworkflow(id) + db_dbworkflowstep_sub_workflows_dbworkflowstep_id_fkey: FOREIGN KEY (dbworkflowstep_id) + REFERENCES db_dbworkflowstep(id) +indexes: + db_dbauthinfo: + db_dbauthinfo_aiidauser_id_dbcomputer_id_key: CREATE UNIQUE INDEX db_dbauthinfo_aiidauser_id_dbcomputer_id_key + ON public.db_dbauthinfo USING btree (aiidauser_id, dbcomputer_id) + db_dbauthinfo_pkey: CREATE UNIQUE INDEX db_dbauthinfo_pkey ON public.db_dbauthinfo + USING btree (id) + db_dbcomment: + db_dbcomment_pkey: CREATE UNIQUE INDEX db_dbcomment_pkey ON public.db_dbcomment + USING btree (id) + db_dbcomment_uuid_key: CREATE UNIQUE INDEX db_dbcomment_uuid_key ON public.db_dbcomment + USING btree (uuid) + db_dbcomputer: + db_dbcomputer_name_key: CREATE UNIQUE INDEX db_dbcomputer_name_key ON public.db_dbcomputer + USING btree (name) + db_dbcomputer_pkey: CREATE UNIQUE INDEX db_dbcomputer_pkey ON public.db_dbcomputer + USING btree (id) + db_dbcomputer_uuid_key: CREATE UNIQUE INDEX db_dbcomputer_uuid_key ON public.db_dbcomputer + USING btree (uuid) + db_dbgroup: + db_dbgroup_label_type_string_key: CREATE UNIQUE INDEX db_dbgroup_label_type_string_key + ON public.db_dbgroup USING btree (label, type_string) + db_dbgroup_pkey: CREATE UNIQUE INDEX db_dbgroup_pkey ON public.db_dbgroup USING + btree (id) + db_dbgroup_uuid_key: CREATE UNIQUE INDEX db_dbgroup_uuid_key ON public.db_dbgroup + USING btree (uuid) + ix_db_dbgroup_label: CREATE INDEX ix_db_dbgroup_label ON public.db_dbgroup USING + btree (label) + ix_db_dbgroup_type_string: CREATE INDEX ix_db_dbgroup_type_string ON public.db_dbgroup + USING btree (type_string) + db_dbgroup_dbnodes: + db_dbgroup_dbnodes_dbgroup_id_dbnode_id_key: CREATE UNIQUE INDEX db_dbgroup_dbnodes_dbgroup_id_dbnode_id_key + ON public.db_dbgroup_dbnodes USING btree (dbgroup_id, dbnode_id) + db_dbgroup_dbnodes_dbgroup_id_idx: CREATE INDEX db_dbgroup_dbnodes_dbgroup_id_idx + ON public.db_dbgroup_dbnodes USING btree (dbgroup_id) + db_dbgroup_dbnodes_dbnode_id_idx: CREATE INDEX db_dbgroup_dbnodes_dbnode_id_idx + ON public.db_dbgroup_dbnodes USING btree (dbnode_id) + db_dbgroup_dbnodes_pkey: CREATE UNIQUE INDEX db_dbgroup_dbnodes_pkey ON public.db_dbgroup_dbnodes + USING btree (id) + db_dblink: + db_dblink_pkey: CREATE UNIQUE INDEX db_dblink_pkey ON public.db_dblink USING btree + (id) + ix_db_dblink_label: CREATE INDEX ix_db_dblink_label ON public.db_dblink USING + btree (label) + db_dblog: + db_dblog_pkey: CREATE UNIQUE INDEX db_dblog_pkey ON public.db_dblog USING btree + (id) + db_dblog_uuid_key: CREATE UNIQUE INDEX db_dblog_uuid_key ON public.db_dblog USING + btree (uuid) + ix_db_dblog_levelname: CREATE INDEX ix_db_dblog_levelname ON public.db_dblog USING + btree (levelname) + ix_db_dblog_loggername: CREATE INDEX ix_db_dblog_loggername ON public.db_dblog + USING btree (loggername) + db_dbnode: + db_dbnode_pkey: CREATE UNIQUE INDEX db_dbnode_pkey ON public.db_dbnode USING btree + (id) + db_dbnode_uuid_key: CREATE UNIQUE INDEX db_dbnode_uuid_key ON public.db_dbnode + USING btree (uuid) + ix_db_dbnode_label: CREATE INDEX ix_db_dbnode_label ON public.db_dbnode USING + btree (label) + ix_db_dbnode_node_type: CREATE INDEX ix_db_dbnode_node_type ON public.db_dbnode + USING btree (node_type) + ix_db_dbnode_process_type: CREATE INDEX ix_db_dbnode_process_type ON public.db_dbnode + USING btree (process_type) + db_dbsetting: + db_dbsetting_key_key: CREATE UNIQUE INDEX db_dbsetting_key_key ON public.db_dbsetting + USING btree (key) + db_dbsetting_pkey: CREATE UNIQUE INDEX db_dbsetting_pkey ON public.db_dbsetting + USING btree (id) + ix_db_dbsetting_key: CREATE INDEX ix_db_dbsetting_key ON public.db_dbsetting USING + btree (key) + db_dbuser: + db_dbuser_pkey: CREATE UNIQUE INDEX db_dbuser_pkey ON public.db_dbuser USING btree + (id) + ix_db_dbuser_email: CREATE UNIQUE INDEX ix_db_dbuser_email ON public.db_dbuser + USING btree (email) + db_dbworkflow: + db_dbworkflow_pkey: CREATE UNIQUE INDEX db_dbworkflow_pkey ON public.db_dbworkflow + USING btree (id) + db_dbworkflow_uuid_key: CREATE UNIQUE INDEX db_dbworkflow_uuid_key ON public.db_dbworkflow + USING btree (uuid) + ix_db_dbworkflow_label: CREATE INDEX ix_db_dbworkflow_label ON public.db_dbworkflow + USING btree (label) + db_dbworkflowdata: + db_dbworkflowdata_parent_id_name_data_type_key: CREATE UNIQUE INDEX db_dbworkflowdata_parent_id_name_data_type_key + ON public.db_dbworkflowdata USING btree (parent_id, name, data_type) + db_dbworkflowdata_pkey: CREATE UNIQUE INDEX db_dbworkflowdata_pkey ON public.db_dbworkflowdata + USING btree (id) + ix_db_dbworkflowdata_aiida_obj_id: CREATE INDEX ix_db_dbworkflowdata_aiida_obj_id + ON public.db_dbworkflowdata USING btree (aiida_obj_id) + ix_db_dbworkflowdata_parent_id: CREATE INDEX ix_db_dbworkflowdata_parent_id ON + public.db_dbworkflowdata USING btree (parent_id) + db_dbworkflowstep: + db_dbworkflowstep_parent_id_name_key: CREATE UNIQUE INDEX db_dbworkflowstep_parent_id_name_key + ON public.db_dbworkflowstep USING btree (parent_id, name) + db_dbworkflowstep_pkey: CREATE UNIQUE INDEX db_dbworkflowstep_pkey ON public.db_dbworkflowstep + USING btree (id) + db_dbworkflowstep_calculations: + db_dbworkflowstep_calculations_id_dbnode_id_key: CREATE UNIQUE INDEX db_dbworkflowstep_calculations_id_dbnode_id_key + ON public.db_dbworkflowstep_calculations USING btree (dbworkflowstep_id, dbnode_id) + db_dbworkflowstep_calculations_pkey: CREATE UNIQUE INDEX db_dbworkflowstep_calculations_pkey + ON public.db_dbworkflowstep_calculations USING btree (id) + db_dbworkflowstep_sub_workflows: + db_dbworkflowstep_sub_workflows_id_dbworkflow__key: CREATE UNIQUE INDEX db_dbworkflowstep_sub_workflows_id_dbworkflow__key + ON public.db_dbworkflowstep_sub_workflows USING btree (dbworkflowstep_id, dbworkflow_id) + db_dbworkflowstep_sub_workflows_pkey: CREATE UNIQUE INDEX db_dbworkflowstep_sub_workflows_pkey + ON public.db_dbworkflowstep_sub_workflows USING btree (id) diff --git a/tests/backends/aiida_sqlalchemy/migrations/test_all_schema/test_sqla_2_a6048f0ffca8_.yml b/tests/backends/aiida_sqlalchemy/migrations/test_all_schema/test_sqla_2_a6048f0ffca8_.yml new file mode 100644 index 0000000000..87ff4bf75b --- /dev/null +++ b/tests/backends/aiida_sqlalchemy/migrations/test_all_schema/test_sqla_2_a6048f0ffca8_.yml @@ -0,0 +1,761 @@ +columns: + db_dbauthinfo: + aiidauser_id: + data_type: integer + default: null + is_nullable: true + auth_params: + data_type: jsonb + default: null + is_nullable: true + dbcomputer_id: + data_type: integer + default: null + is_nullable: true + enabled: + data_type: boolean + default: null + is_nullable: true + id: + data_type: integer + default: nextval('db_dbauthinfo_id_seq'::regclass) + is_nullable: false + metadata: + data_type: jsonb + default: null + is_nullable: true + db_dbcalcstate: + dbnode_id: + data_type: integer + default: null + is_nullable: true + id: + data_type: integer + default: nextval('db_dbcalcstate_id_seq'::regclass) + is_nullable: false + state: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + time: + data_type: timestamp with time zone + default: null + is_nullable: true + db_dbcomment: + content: + data_type: text + default: null + is_nullable: true + ctime: + data_type: timestamp with time zone + default: null + is_nullable: true + dbnode_id: + data_type: integer + default: null + is_nullable: true + id: + data_type: integer + default: nextval('db_dbcomment_id_seq'::regclass) + is_nullable: false + mtime: + data_type: timestamp with time zone + default: null + is_nullable: true + user_id: + data_type: integer + default: null + is_nullable: true + uuid: + data_type: uuid + default: null + is_nullable: true + db_dbcomputer: + description: + data_type: text + default: null + is_nullable: true + enabled: + data_type: boolean + default: null + is_nullable: true + hostname: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + id: + data_type: integer + default: nextval('db_dbcomputer_id_seq'::regclass) + is_nullable: false + metadata: + data_type: jsonb + default: null + is_nullable: true + name: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + scheduler_type: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + transport_params: + data_type: jsonb + default: null + is_nullable: true + transport_type: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + uuid: + data_type: uuid + default: null + is_nullable: true + db_dbgroup: + description: + data_type: text + default: null + is_nullable: true + id: + data_type: integer + default: nextval('db_dbgroup_id_seq'::regclass) + is_nullable: false + name: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + time: + data_type: timestamp with time zone + default: null + is_nullable: true + type: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + user_id: + data_type: integer + default: null + is_nullable: true + uuid: + data_type: uuid + default: null + is_nullable: true + db_dbgroup_dbnodes: + dbgroup_id: + data_type: integer + default: null + is_nullable: true + dbnode_id: + data_type: integer + default: null + is_nullable: true + id: + data_type: integer + default: nextval('db_dbgroup_dbnodes_id_seq'::regclass) + is_nullable: false + db_dblink: + id: + data_type: integer + default: nextval('db_dblink_id_seq'::regclass) + is_nullable: false + input_id: + data_type: integer + default: null + is_nullable: true + label: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + output_id: + data_type: integer + default: null + is_nullable: true + type: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + db_dblock: + creation: + data_type: timestamp with time zone + default: null + is_nullable: true + key: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + owner: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + timeout: + data_type: integer + default: null + is_nullable: true + db_dblog: + id: + data_type: integer + default: nextval('db_dblog_id_seq'::regclass) + is_nullable: false + levelname: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + loggername: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + message: + data_type: text + default: null + is_nullable: true + metadata: + data_type: jsonb + default: null + is_nullable: true + objname: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + objpk: + data_type: integer + default: null + is_nullable: true + time: + data_type: timestamp with time zone + default: null + is_nullable: true + db_dbnode: + attributes: + data_type: jsonb + default: null + is_nullable: true + ctime: + data_type: timestamp with time zone + default: null + is_nullable: true + dbcomputer_id: + data_type: integer + default: null + is_nullable: true + description: + data_type: text + default: null + is_nullable: true + extras: + data_type: jsonb + default: null + is_nullable: true + id: + data_type: integer + default: nextval('db_dbnode_id_seq'::regclass) + is_nullable: false + label: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + mtime: + data_type: timestamp with time zone + default: null + is_nullable: true + nodeversion: + data_type: integer + default: null + is_nullable: true + public: + data_type: boolean + default: null + is_nullable: true + type: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + user_id: + data_type: integer + default: null + is_nullable: false + uuid: + data_type: uuid + default: null + is_nullable: true + db_dbsetting: + description: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + id: + data_type: integer + default: nextval('db_dbsetting_id_seq'::regclass) + is_nullable: false + key: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + time: + data_type: timestamp with time zone + default: null + is_nullable: true + val: + data_type: jsonb + default: null + is_nullable: true + db_dbuser: + date_joined: + data_type: timestamp with time zone + default: null + is_nullable: true + email: + data_type: character varying + default: null + is_nullable: true + max_length: 254 + first_name: + data_type: character varying + default: null + is_nullable: true + max_length: 254 + id: + data_type: integer + default: nextval('db_dbuser_id_seq'::regclass) + is_nullable: false + institution: + data_type: character varying + default: null + is_nullable: true + max_length: 254 + is_active: + data_type: boolean + default: null + is_nullable: true + is_staff: + data_type: boolean + default: null + is_nullable: true + is_superuser: + data_type: boolean + default: null + is_nullable: false + last_login: + data_type: timestamp with time zone + default: null + is_nullable: true + last_name: + data_type: character varying + default: null + is_nullable: true + max_length: 254 + password: + data_type: character varying + default: null + is_nullable: true + max_length: 128 + db_dbworkflow: + ctime: + data_type: timestamp with time zone + default: null + is_nullable: true + description: + data_type: text + default: null + is_nullable: true + id: + data_type: integer + default: nextval('db_dbworkflow_id_seq'::regclass) + is_nullable: false + label: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + lastsyncedversion: + data_type: integer + default: null + is_nullable: true + module: + data_type: text + default: null + is_nullable: true + module_class: + data_type: text + default: null + is_nullable: true + mtime: + data_type: timestamp with time zone + default: null + is_nullable: true + nodeversion: + data_type: integer + default: null + is_nullable: true + report: + data_type: text + default: null + is_nullable: true + script_md5: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + script_path: + data_type: text + default: null + is_nullable: true + state: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + user_id: + data_type: integer + default: null + is_nullable: true + uuid: + data_type: uuid + default: null + is_nullable: true + db_dbworkflowdata: + aiida_obj_id: + data_type: integer + default: null + is_nullable: true + data_type: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + id: + data_type: integer + default: nextval('db_dbworkflowdata_id_seq'::regclass) + is_nullable: false + json_value: + data_type: text + default: null + is_nullable: true + name: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + parent_id: + data_type: integer + default: null + is_nullable: true + time: + data_type: timestamp with time zone + default: null + is_nullable: true + value_type: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + db_dbworkflowstep: + id: + data_type: integer + default: nextval('db_dbworkflowstep_id_seq'::regclass) + is_nullable: false + name: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + nextcall: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + parent_id: + data_type: integer + default: null + is_nullable: true + state: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + time: + data_type: timestamp with time zone + default: null + is_nullable: true + user_id: + data_type: integer + default: null + is_nullable: true + db_dbworkflowstep_calculations: + dbnode_id: + data_type: integer + default: null + is_nullable: true + dbworkflowstep_id: + data_type: integer + default: null + is_nullable: true + id: + data_type: integer + default: nextval('db_dbworkflowstep_calculations_id_seq'::regclass) + is_nullable: false + db_dbworkflowstep_sub_workflows: + dbworkflow_id: + data_type: integer + default: null + is_nullable: true + dbworkflowstep_id: + data_type: integer + default: null + is_nullable: true + id: + data_type: integer + default: nextval('db_dbworkflowstep_sub_workflows_id_seq'::regclass) + is_nullable: false +constraints: + primary_key: + db_dbauthinfo: + db_dbauthinfo_pkey: + - id + db_dbcalcstate: + db_dbcalcstate_pkey: + - id + db_dbcomment: + db_dbcomment_pkey: + - id + db_dbcomputer: + db_dbcomputer_pkey: + - id + db_dbgroup: + db_dbgroup_pkey: + - id + db_dbgroup_dbnodes: + db_dbgroup_dbnodes_pkey: + - id + db_dblink: + db_dblink_pkey: + - id + db_dblock: + db_dblock_pkey: + - key + db_dblog: + db_dblog_pkey: + - id + db_dbnode: + db_dbnode_pkey: + - id + db_dbsetting: + db_dbsetting_pkey: + - id + db_dbuser: + db_dbuser_pkey: + - id + db_dbworkflow: + db_dbworkflow_pkey: + - id + db_dbworkflowdata: + db_dbworkflowdata_pkey: + - id + db_dbworkflowstep: + db_dbworkflowstep_pkey: + - id + db_dbworkflowstep_calculations: + db_dbworkflowstep_calculations_pkey: + - id + db_dbworkflowstep_sub_workflows: + db_dbworkflowstep_sub_workflows_pkey: + - id + unique: + db_dbauthinfo: + db_dbauthinfo_aiidauser_id_dbcomputer_id_key: + - aiidauser_id + - dbcomputer_id + db_dbcalcstate: + db_dbcalcstate_dbnode_id_state_key: + - dbnode_id + - state + db_dbcomputer: + db_dbcomputer_name_key: + - name + db_dbgroup: + db_dbgroup_name_type_key: + - name + - type + db_dbsetting: + db_dbsetting_key_key: + - key + db_dbworkflowdata: + db_dbworkflowdata_parent_id_name_data_type_key: + - data_type + - name + - parent_id + db_dbworkflowstep: + db_dbworkflowstep_parent_id_name_key: + - name + - parent_id + db_dbworkflowstep_calculations: + db_dbworkflowstep_calculations_id_dbnode_id_key: + - dbnode_id + - dbworkflowstep_id + db_dbworkflowstep_sub_workflows: + db_dbworkflowstep_sub_workflows_id_dbworkflow__key: + - dbworkflow_id + - dbworkflowstep_id +foreign_keys: + db_dbauthinfo: + db_dbauthinfo_aiidauser_id_fkey: FOREIGN KEY (aiidauser_id) REFERENCES db_dbuser(id) + ON DELETE CASCADE DEFERRABLE INITIALLY DEFERRED + db_dbauthinfo_dbcomputer_id_fkey: FOREIGN KEY (dbcomputer_id) REFERENCES db_dbcomputer(id) + ON DELETE CASCADE DEFERRABLE INITIALLY DEFERRED + db_dbcalcstate: + db_dbcalcstate_dbnode_id_fkey: FOREIGN KEY (dbnode_id) REFERENCES db_dbnode(id) + ON DELETE CASCADE DEFERRABLE INITIALLY DEFERRED + db_dbcomment: + db_dbcomment_dbnode_id_fkey: FOREIGN KEY (dbnode_id) REFERENCES db_dbnode(id) + ON DELETE CASCADE DEFERRABLE INITIALLY DEFERRED + db_dbcomment_user_id_fkey: FOREIGN KEY (user_id) REFERENCES db_dbuser(id) ON DELETE + CASCADE DEFERRABLE INITIALLY DEFERRED + db_dbgroup: + db_dbgroup_user_id_fkey: FOREIGN KEY (user_id) REFERENCES db_dbuser(id) ON DELETE + CASCADE DEFERRABLE INITIALLY DEFERRED + db_dbgroup_dbnodes: + db_dbgroup_dbnodes_dbgroup_id_fkey: FOREIGN KEY (dbgroup_id) REFERENCES db_dbgroup(id) + DEFERRABLE INITIALLY DEFERRED + db_dbgroup_dbnodes_dbnode_id_fkey: FOREIGN KEY (dbnode_id) REFERENCES db_dbnode(id) + DEFERRABLE INITIALLY DEFERRED + db_dblink: + db_dblink_input_id_fkey: FOREIGN KEY (input_id) REFERENCES db_dbnode(id) DEFERRABLE + INITIALLY DEFERRED + db_dblink_output_id_fkey: FOREIGN KEY (output_id) REFERENCES db_dbnode(id) ON + DELETE CASCADE DEFERRABLE INITIALLY DEFERRED + db_dbnode: + db_dbnode_dbcomputer_id_fkey: FOREIGN KEY (dbcomputer_id) REFERENCES db_dbcomputer(id) + ON DELETE RESTRICT DEFERRABLE INITIALLY DEFERRED + db_dbnode_user_id_fkey: FOREIGN KEY (user_id) REFERENCES db_dbuser(id) ON DELETE + RESTRICT DEFERRABLE INITIALLY DEFERRED + db_dbworkflow: + db_dbworkflow_user_id_fkey: FOREIGN KEY (user_id) REFERENCES db_dbuser(id) + db_dbworkflowdata: + db_dbworkflowdata_aiida_obj_id_fkey: FOREIGN KEY (aiida_obj_id) REFERENCES db_dbnode(id) + db_dbworkflowdata_parent_id_fkey: FOREIGN KEY (parent_id) REFERENCES db_dbworkflow(id) + db_dbworkflowstep: + db_dbworkflowstep_parent_id_fkey: FOREIGN KEY (parent_id) REFERENCES db_dbworkflow(id) + db_dbworkflowstep_user_id_fkey: FOREIGN KEY (user_id) REFERENCES db_dbuser(id) + db_dbworkflowstep_calculations: + db_dbworkflowstep_calculations_dbnode_id_fkey: FOREIGN KEY (dbnode_id) REFERENCES + db_dbnode(id) + db_dbworkflowstep_calculations_dbworkflowstep_id_fkey: FOREIGN KEY (dbworkflowstep_id) + REFERENCES db_dbworkflowstep(id) + db_dbworkflowstep_sub_workflows: + db_dbworkflowstep_sub_workflows_dbworkflow_id_fkey: FOREIGN KEY (dbworkflow_id) + REFERENCES db_dbworkflow(id) + db_dbworkflowstep_sub_workflows_dbworkflowstep_id_fkey: FOREIGN KEY (dbworkflowstep_id) + REFERENCES db_dbworkflowstep(id) +indexes: + db_dbauthinfo: + db_dbauthinfo_aiidauser_id_dbcomputer_id_key: CREATE UNIQUE INDEX db_dbauthinfo_aiidauser_id_dbcomputer_id_key + ON public.db_dbauthinfo USING btree (aiidauser_id, dbcomputer_id) + db_dbauthinfo_pkey: CREATE UNIQUE INDEX db_dbauthinfo_pkey ON public.db_dbauthinfo + USING btree (id) + db_dbcalcstate: + db_dbcalcstate_dbnode_id_state_key: CREATE UNIQUE INDEX db_dbcalcstate_dbnode_id_state_key + ON public.db_dbcalcstate USING btree (dbnode_id, state) + db_dbcalcstate_pkey: CREATE UNIQUE INDEX db_dbcalcstate_pkey ON public.db_dbcalcstate + USING btree (id) + ix_db_dbcalcstate_state: CREATE INDEX ix_db_dbcalcstate_state ON public.db_dbcalcstate + USING btree (state) + db_dbcomment: + db_dbcomment_pkey: CREATE UNIQUE INDEX db_dbcomment_pkey ON public.db_dbcomment + USING btree (id) + db_dbcomputer: + db_dbcomputer_name_key: CREATE UNIQUE INDEX db_dbcomputer_name_key ON public.db_dbcomputer + USING btree (name) + db_dbcomputer_pkey: CREATE UNIQUE INDEX db_dbcomputer_pkey ON public.db_dbcomputer + USING btree (id) + db_dbgroup: + db_dbgroup_name_type_key: CREATE UNIQUE INDEX db_dbgroup_name_type_key ON public.db_dbgroup + USING btree (name, type) + db_dbgroup_pkey: CREATE UNIQUE INDEX db_dbgroup_pkey ON public.db_dbgroup USING + btree (id) + ix_db_dbgroup_name: CREATE INDEX ix_db_dbgroup_name ON public.db_dbgroup USING + btree (name) + ix_db_dbgroup_type: CREATE INDEX ix_db_dbgroup_type ON public.db_dbgroup USING + btree (type) + db_dbgroup_dbnodes: + db_dbgroup_dbnodes_pkey: CREATE UNIQUE INDEX db_dbgroup_dbnodes_pkey ON public.db_dbgroup_dbnodes + USING btree (id) + db_dblink: + db_dblink_pkey: CREATE UNIQUE INDEX db_dblink_pkey ON public.db_dblink USING btree + (id) + ix_db_dblink_label: CREATE INDEX ix_db_dblink_label ON public.db_dblink USING + btree (label) + db_dblock: + db_dblock_pkey: CREATE UNIQUE INDEX db_dblock_pkey ON public.db_dblock USING btree + (key) + db_dblog: + db_dblog_pkey: CREATE UNIQUE INDEX db_dblog_pkey ON public.db_dblog USING btree + (id) + ix_db_dblog_levelname: CREATE INDEX ix_db_dblog_levelname ON public.db_dblog USING + btree (levelname) + ix_db_dblog_loggername: CREATE INDEX ix_db_dblog_loggername ON public.db_dblog + USING btree (loggername) + ix_db_dblog_objname: CREATE INDEX ix_db_dblog_objname ON public.db_dblog USING + btree (objname) + ix_db_dblog_objpk: CREATE INDEX ix_db_dblog_objpk ON public.db_dblog USING btree + (objpk) + db_dbnode: + db_dbnode_pkey: CREATE UNIQUE INDEX db_dbnode_pkey ON public.db_dbnode USING btree + (id) + ix_db_dbnode_label: CREATE INDEX ix_db_dbnode_label ON public.db_dbnode USING + btree (label) + ix_db_dbnode_type: CREATE INDEX ix_db_dbnode_type ON public.db_dbnode USING btree + (type) + db_dbsetting: + db_dbsetting_key_key: CREATE UNIQUE INDEX db_dbsetting_key_key ON public.db_dbsetting + USING btree (key) + db_dbsetting_pkey: CREATE UNIQUE INDEX db_dbsetting_pkey ON public.db_dbsetting + USING btree (id) + ix_db_dbsetting_key: CREATE INDEX ix_db_dbsetting_key ON public.db_dbsetting USING + btree (key) + db_dbuser: + db_dbuser_pkey: CREATE UNIQUE INDEX db_dbuser_pkey ON public.db_dbuser USING btree + (id) + ix_db_dbuser_email: CREATE UNIQUE INDEX ix_db_dbuser_email ON public.db_dbuser + USING btree (email) + db_dbworkflow: + db_dbworkflow_pkey: CREATE UNIQUE INDEX db_dbworkflow_pkey ON public.db_dbworkflow + USING btree (id) + ix_db_dbworkflow_label: CREATE INDEX ix_db_dbworkflow_label ON public.db_dbworkflow + USING btree (label) + db_dbworkflowdata: + db_dbworkflowdata_parent_id_name_data_type_key: CREATE UNIQUE INDEX db_dbworkflowdata_parent_id_name_data_type_key + ON public.db_dbworkflowdata USING btree (parent_id, name, data_type) + db_dbworkflowdata_pkey: CREATE UNIQUE INDEX db_dbworkflowdata_pkey ON public.db_dbworkflowdata + USING btree (id) + db_dbworkflowstep: + db_dbworkflowstep_parent_id_name_key: CREATE UNIQUE INDEX db_dbworkflowstep_parent_id_name_key + ON public.db_dbworkflowstep USING btree (parent_id, name) + db_dbworkflowstep_pkey: CREATE UNIQUE INDEX db_dbworkflowstep_pkey ON public.db_dbworkflowstep + USING btree (id) + db_dbworkflowstep_calculations: + db_dbworkflowstep_calculations_id_dbnode_id_key: CREATE UNIQUE INDEX db_dbworkflowstep_calculations_id_dbnode_id_key + ON public.db_dbworkflowstep_calculations USING btree (dbworkflowstep_id, dbnode_id) + db_dbworkflowstep_calculations_pkey: CREATE UNIQUE INDEX db_dbworkflowstep_calculations_pkey + ON public.db_dbworkflowstep_calculations USING btree (id) + db_dbworkflowstep_sub_workflows: + db_dbworkflowstep_sub_workflows_id_dbworkflow__key: CREATE UNIQUE INDEX db_dbworkflowstep_sub_workflows_id_dbworkflow__key + ON public.db_dbworkflowstep_sub_workflows USING btree (dbworkflowstep_id, dbworkflow_id) + db_dbworkflowstep_sub_workflows_pkey: CREATE UNIQUE INDEX db_dbworkflowstep_sub_workflows_pkey + ON public.db_dbworkflowstep_sub_workflows USING btree (id) diff --git a/tests/backends/aiida_sqlalchemy/migrations/test_all_schema/test_sqla_30_5a49629f0d45_.yml b/tests/backends/aiida_sqlalchemy/migrations/test_all_schema/test_sqla_30_5a49629f0d45_.yml new file mode 100644 index 0000000000..88d42f0523 --- /dev/null +++ b/tests/backends/aiida_sqlalchemy/migrations/test_all_schema/test_sqla_30_5a49629f0d45_.yml @@ -0,0 +1,754 @@ +columns: + db_dbauthinfo: + aiidauser_id: + data_type: integer + default: null + is_nullable: true + auth_params: + data_type: jsonb + default: null + is_nullable: true + dbcomputer_id: + data_type: integer + default: null + is_nullable: true + enabled: + data_type: boolean + default: null + is_nullable: true + id: + data_type: integer + default: nextval('db_dbauthinfo_id_seq'::regclass) + is_nullable: false + metadata: + data_type: jsonb + default: null + is_nullable: true + db_dbcomment: + content: + data_type: text + default: null + is_nullable: true + ctime: + data_type: timestamp with time zone + default: null + is_nullable: true + dbnode_id: + data_type: integer + default: null + is_nullable: true + id: + data_type: integer + default: nextval('db_dbcomment_id_seq'::regclass) + is_nullable: false + mtime: + data_type: timestamp with time zone + default: null + is_nullable: true + user_id: + data_type: integer + default: null + is_nullable: true + uuid: + data_type: uuid + default: null + is_nullable: true + db_dbcomputer: + description: + data_type: text + default: null + is_nullable: true + enabled: + data_type: boolean + default: null + is_nullable: true + hostname: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + id: + data_type: integer + default: nextval('db_dbcomputer_id_seq'::regclass) + is_nullable: false + metadata: + data_type: jsonb + default: null + is_nullable: true + name: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + scheduler_type: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + transport_params: + data_type: jsonb + default: null + is_nullable: true + transport_type: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + uuid: + data_type: uuid + default: null + is_nullable: true + db_dbgroup: + description: + data_type: text + default: null + is_nullable: true + id: + data_type: integer + default: nextval('db_dbgroup_id_seq'::regclass) + is_nullable: false + label: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + time: + data_type: timestamp with time zone + default: null + is_nullable: true + type_string: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + user_id: + data_type: integer + default: null + is_nullable: true + uuid: + data_type: uuid + default: null + is_nullable: true + db_dbgroup_dbnodes: + dbgroup_id: + data_type: integer + default: null + is_nullable: true + dbnode_id: + data_type: integer + default: null + is_nullable: true + id: + data_type: integer + default: nextval('db_dbgroup_dbnodes_id_seq'::regclass) + is_nullable: false + db_dblink: + id: + data_type: integer + default: nextval('db_dblink_id_seq'::regclass) + is_nullable: false + input_id: + data_type: integer + default: null + is_nullable: true + label: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + output_id: + data_type: integer + default: null + is_nullable: true + type: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + db_dblog: + dbnode_id: + data_type: integer + default: null + is_nullable: false + id: + data_type: integer + default: nextval('db_dblog_id_seq'::regclass) + is_nullable: false + levelname: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + loggername: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + message: + data_type: text + default: null + is_nullable: true + metadata: + data_type: jsonb + default: null + is_nullable: true + time: + data_type: timestamp with time zone + default: null + is_nullable: true + uuid: + data_type: uuid + default: null + is_nullable: true + db_dbnode: + attributes: + data_type: jsonb + default: null + is_nullable: true + ctime: + data_type: timestamp with time zone + default: null + is_nullable: true + dbcomputer_id: + data_type: integer + default: null + is_nullable: true + description: + data_type: text + default: null + is_nullable: true + extras: + data_type: jsonb + default: null + is_nullable: true + id: + data_type: integer + default: nextval('db_dbnode_id_seq'::regclass) + is_nullable: false + label: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + mtime: + data_type: timestamp with time zone + default: null + is_nullable: true + node_type: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + nodeversion: + data_type: integer + default: null + is_nullable: true + process_type: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + public: + data_type: boolean + default: null + is_nullable: true + user_id: + data_type: integer + default: null + is_nullable: false + uuid: + data_type: uuid + default: null + is_nullable: true + db_dbsetting: + description: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + id: + data_type: integer + default: nextval('db_dbsetting_id_seq'::regclass) + is_nullable: false + key: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + time: + data_type: timestamp with time zone + default: null + is_nullable: true + val: + data_type: jsonb + default: null + is_nullable: true + db_dbuser: + date_joined: + data_type: timestamp with time zone + default: null + is_nullable: true + email: + data_type: character varying + default: null + is_nullable: true + max_length: 254 + first_name: + data_type: character varying + default: null + is_nullable: true + max_length: 254 + id: + data_type: integer + default: nextval('db_dbuser_id_seq'::regclass) + is_nullable: false + institution: + data_type: character varying + default: null + is_nullable: true + max_length: 254 + is_active: + data_type: boolean + default: null + is_nullable: true + is_staff: + data_type: boolean + default: null + is_nullable: true + is_superuser: + data_type: boolean + default: null + is_nullable: false + last_login: + data_type: timestamp with time zone + default: null + is_nullable: true + last_name: + data_type: character varying + default: null + is_nullable: true + max_length: 254 + password: + data_type: character varying + default: null + is_nullable: true + max_length: 128 + db_dbworkflow: + ctime: + data_type: timestamp with time zone + default: null + is_nullable: true + description: + data_type: text + default: null + is_nullable: true + id: + data_type: integer + default: nextval('db_dbworkflow_id_seq'::regclass) + is_nullable: false + label: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + lastsyncedversion: + data_type: integer + default: null + is_nullable: true + module: + data_type: text + default: null + is_nullable: true + module_class: + data_type: text + default: null + is_nullable: true + mtime: + data_type: timestamp with time zone + default: null + is_nullable: true + nodeversion: + data_type: integer + default: null + is_nullable: true + report: + data_type: text + default: null + is_nullable: true + script_md5: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + script_path: + data_type: text + default: null + is_nullable: true + state: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + user_id: + data_type: integer + default: null + is_nullable: true + uuid: + data_type: uuid + default: null + is_nullable: true + db_dbworkflowdata: + aiida_obj_id: + data_type: integer + default: null + is_nullable: true + data_type: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + id: + data_type: integer + default: nextval('db_dbworkflowdata_id_seq'::regclass) + is_nullable: false + json_value: + data_type: text + default: null + is_nullable: true + name: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + parent_id: + data_type: integer + default: null + is_nullable: true + time: + data_type: timestamp with time zone + default: null + is_nullable: true + value_type: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + db_dbworkflowstep: + id: + data_type: integer + default: nextval('db_dbworkflowstep_id_seq'::regclass) + is_nullable: false + name: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + nextcall: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + parent_id: + data_type: integer + default: null + is_nullable: true + state: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + time: + data_type: timestamp with time zone + default: null + is_nullable: true + user_id: + data_type: integer + default: null + is_nullable: true + db_dbworkflowstep_calculations: + dbnode_id: + data_type: integer + default: null + is_nullable: true + dbworkflowstep_id: + data_type: integer + default: null + is_nullable: true + id: + data_type: integer + default: nextval('db_dbworkflowstep_calculations_id_seq'::regclass) + is_nullable: false + db_dbworkflowstep_sub_workflows: + dbworkflow_id: + data_type: integer + default: null + is_nullable: true + dbworkflowstep_id: + data_type: integer + default: null + is_nullable: true + id: + data_type: integer + default: nextval('db_dbworkflowstep_sub_workflows_id_seq'::regclass) + is_nullable: false +constraints: + primary_key: + db_dbauthinfo: + db_dbauthinfo_pkey: + - id + db_dbcomment: + db_dbcomment_pkey: + - id + db_dbcomputer: + db_dbcomputer_pkey: + - id + db_dbgroup: + db_dbgroup_pkey: + - id + db_dbgroup_dbnodes: + db_dbgroup_dbnodes_pkey: + - id + db_dblink: + db_dblink_pkey: + - id + db_dblog: + db_dblog_pkey: + - id + db_dbnode: + db_dbnode_pkey: + - id + db_dbsetting: + db_dbsetting_pkey: + - id + db_dbuser: + db_dbuser_pkey: + - id + db_dbworkflow: + db_dbworkflow_pkey: + - id + db_dbworkflowdata: + db_dbworkflowdata_pkey: + - id + db_dbworkflowstep: + db_dbworkflowstep_pkey: + - id + db_dbworkflowstep_calculations: + db_dbworkflowstep_calculations_pkey: + - id + db_dbworkflowstep_sub_workflows: + db_dbworkflowstep_sub_workflows_pkey: + - id + unique: + db_dbauthinfo: + db_dbauthinfo_aiidauser_id_dbcomputer_id_key: + - aiidauser_id + - dbcomputer_id + db_dbcomment: + db_dbcomment_uuid_key: + - uuid + db_dbcomputer: + db_dbcomputer_name_key: + - name + db_dbcomputer_uuid_key: + - uuid + db_dbgroup: + db_dbgroup_label_type_string_key: + - label + - type_string + db_dbgroup_uuid_key: + - uuid + db_dbgroup_dbnodes: + db_dbgroup_dbnodes_dbgroup_id_dbnode_id_key: + - dbgroup_id + - dbnode_id + db_dblog: + db_dblog_uuid_key: + - uuid + db_dbnode: + db_dbnode_uuid_key: + - uuid + db_dbsetting: + db_dbsetting_key_key: + - key + db_dbworkflow: + db_dbworkflow_uuid_key: + - uuid + db_dbworkflowdata: + db_dbworkflowdata_parent_id_name_data_type_key: + - data_type + - name + - parent_id + db_dbworkflowstep: + db_dbworkflowstep_parent_id_name_key: + - name + - parent_id + db_dbworkflowstep_calculations: + db_dbworkflowstep_calculations_id_dbnode_id_key: + - dbnode_id + - dbworkflowstep_id + db_dbworkflowstep_sub_workflows: + db_dbworkflowstep_sub_workflows_id_dbworkflow__key: + - dbworkflow_id + - dbworkflowstep_id +foreign_keys: + db_dbauthinfo: + db_dbauthinfo_aiidauser_id_fkey: FOREIGN KEY (aiidauser_id) REFERENCES db_dbuser(id) + ON DELETE CASCADE DEFERRABLE INITIALLY DEFERRED + db_dbauthinfo_dbcomputer_id_fkey: FOREIGN KEY (dbcomputer_id) REFERENCES db_dbcomputer(id) + ON DELETE CASCADE DEFERRABLE INITIALLY DEFERRED + db_dbcomment: + db_dbcomment_dbnode_id_fkey: FOREIGN KEY (dbnode_id) REFERENCES db_dbnode(id) + ON DELETE CASCADE DEFERRABLE INITIALLY DEFERRED + db_dbcomment_user_id_fkey: FOREIGN KEY (user_id) REFERENCES db_dbuser(id) ON DELETE + CASCADE DEFERRABLE INITIALLY DEFERRED + db_dbgroup: + db_dbgroup_user_id_fkey: FOREIGN KEY (user_id) REFERENCES db_dbuser(id) ON DELETE + CASCADE DEFERRABLE INITIALLY DEFERRED + db_dbgroup_dbnodes: + db_dbgroup_dbnodes_dbgroup_id_fkey: FOREIGN KEY (dbgroup_id) REFERENCES db_dbgroup(id) + DEFERRABLE INITIALLY DEFERRED + db_dbgroup_dbnodes_dbnode_id_fkey: FOREIGN KEY (dbnode_id) REFERENCES db_dbnode(id) + DEFERRABLE INITIALLY DEFERRED + db_dblink: + db_dblink_input_id_fkey: FOREIGN KEY (input_id) REFERENCES db_dbnode(id) DEFERRABLE + INITIALLY DEFERRED + db_dblink_output_id_fkey: FOREIGN KEY (output_id) REFERENCES db_dbnode(id) ON + DELETE CASCADE DEFERRABLE INITIALLY DEFERRED + db_dblog: + db_dblog_dbnode_id_fkey: FOREIGN KEY (dbnode_id) REFERENCES db_dbnode(id) ON DELETE + CASCADE DEFERRABLE INITIALLY DEFERRED + db_dbnode: + db_dbnode_dbcomputer_id_fkey: FOREIGN KEY (dbcomputer_id) REFERENCES db_dbcomputer(id) + ON DELETE RESTRICT DEFERRABLE INITIALLY DEFERRED + db_dbnode_user_id_fkey: FOREIGN KEY (user_id) REFERENCES db_dbuser(id) ON DELETE + RESTRICT DEFERRABLE INITIALLY DEFERRED + db_dbworkflow: + db_dbworkflow_user_id_fkey: FOREIGN KEY (user_id) REFERENCES db_dbuser(id) + db_dbworkflowdata: + db_dbworkflowdata_aiida_obj_id_fkey: FOREIGN KEY (aiida_obj_id) REFERENCES db_dbnode(id) + db_dbworkflowdata_parent_id_fkey: FOREIGN KEY (parent_id) REFERENCES db_dbworkflow(id) + db_dbworkflowstep: + db_dbworkflowstep_parent_id_fkey: FOREIGN KEY (parent_id) REFERENCES db_dbworkflow(id) + db_dbworkflowstep_user_id_fkey: FOREIGN KEY (user_id) REFERENCES db_dbuser(id) + db_dbworkflowstep_calculations: + db_dbworkflowstep_calculations_dbnode_id_fkey: FOREIGN KEY (dbnode_id) REFERENCES + db_dbnode(id) + db_dbworkflowstep_calculations_dbworkflowstep_id_fkey: FOREIGN KEY (dbworkflowstep_id) + REFERENCES db_dbworkflowstep(id) + db_dbworkflowstep_sub_workflows: + db_dbworkflowstep_sub_workflows_dbworkflow_id_fkey: FOREIGN KEY (dbworkflow_id) + REFERENCES db_dbworkflow(id) + db_dbworkflowstep_sub_workflows_dbworkflowstep_id_fkey: FOREIGN KEY (dbworkflowstep_id) + REFERENCES db_dbworkflowstep(id) +indexes: + db_dbauthinfo: + db_dbauthinfo_aiidauser_id_dbcomputer_id_key: CREATE UNIQUE INDEX db_dbauthinfo_aiidauser_id_dbcomputer_id_key + ON public.db_dbauthinfo USING btree (aiidauser_id, dbcomputer_id) + db_dbauthinfo_pkey: CREATE UNIQUE INDEX db_dbauthinfo_pkey ON public.db_dbauthinfo + USING btree (id) + db_dbcomment: + db_dbcomment_pkey: CREATE UNIQUE INDEX db_dbcomment_pkey ON public.db_dbcomment + USING btree (id) + db_dbcomment_uuid_key: CREATE UNIQUE INDEX db_dbcomment_uuid_key ON public.db_dbcomment + USING btree (uuid) + db_dbcomputer: + db_dbcomputer_name_key: CREATE UNIQUE INDEX db_dbcomputer_name_key ON public.db_dbcomputer + USING btree (name) + db_dbcomputer_pkey: CREATE UNIQUE INDEX db_dbcomputer_pkey ON public.db_dbcomputer + USING btree (id) + db_dbcomputer_uuid_key: CREATE UNIQUE INDEX db_dbcomputer_uuid_key ON public.db_dbcomputer + USING btree (uuid) + db_dbgroup: + db_dbgroup_label_type_string_key: CREATE UNIQUE INDEX db_dbgroup_label_type_string_key + ON public.db_dbgroup USING btree (label, type_string) + db_dbgroup_pkey: CREATE UNIQUE INDEX db_dbgroup_pkey ON public.db_dbgroup USING + btree (id) + db_dbgroup_uuid_key: CREATE UNIQUE INDEX db_dbgroup_uuid_key ON public.db_dbgroup + USING btree (uuid) + ix_db_dbgroup_label: CREATE INDEX ix_db_dbgroup_label ON public.db_dbgroup USING + btree (label) + ix_db_dbgroup_type_string: CREATE INDEX ix_db_dbgroup_type_string ON public.db_dbgroup + USING btree (type_string) + db_dbgroup_dbnodes: + db_dbgroup_dbnodes_dbgroup_id_dbnode_id_key: CREATE UNIQUE INDEX db_dbgroup_dbnodes_dbgroup_id_dbnode_id_key + ON public.db_dbgroup_dbnodes USING btree (dbgroup_id, dbnode_id) + db_dbgroup_dbnodes_dbgroup_id_idx: CREATE INDEX db_dbgroup_dbnodes_dbgroup_id_idx + ON public.db_dbgroup_dbnodes USING btree (dbgroup_id) + db_dbgroup_dbnodes_dbnode_id_idx: CREATE INDEX db_dbgroup_dbnodes_dbnode_id_idx + ON public.db_dbgroup_dbnodes USING btree (dbnode_id) + db_dbgroup_dbnodes_pkey: CREATE UNIQUE INDEX db_dbgroup_dbnodes_pkey ON public.db_dbgroup_dbnodes + USING btree (id) + db_dblink: + db_dblink_pkey: CREATE UNIQUE INDEX db_dblink_pkey ON public.db_dblink USING btree + (id) + ix_db_dblink_input_id: CREATE INDEX ix_db_dblink_input_id ON public.db_dblink + USING btree (input_id) + ix_db_dblink_label: CREATE INDEX ix_db_dblink_label ON public.db_dblink USING + btree (label) + ix_db_dblink_output_id: CREATE INDEX ix_db_dblink_output_id ON public.db_dblink + USING btree (output_id) + ix_db_dblink_type: CREATE INDEX ix_db_dblink_type ON public.db_dblink USING btree + (type) + db_dblog: + db_dblog_pkey: CREATE UNIQUE INDEX db_dblog_pkey ON public.db_dblog USING btree + (id) + db_dblog_uuid_key: CREATE UNIQUE INDEX db_dblog_uuid_key ON public.db_dblog USING + btree (uuid) + ix_db_dblog_levelname: CREATE INDEX ix_db_dblog_levelname ON public.db_dblog USING + btree (levelname) + ix_db_dblog_loggername: CREATE INDEX ix_db_dblog_loggername ON public.db_dblog + USING btree (loggername) + db_dbnode: + db_dbnode_pkey: CREATE UNIQUE INDEX db_dbnode_pkey ON public.db_dbnode USING btree + (id) + db_dbnode_uuid_key: CREATE UNIQUE INDEX db_dbnode_uuid_key ON public.db_dbnode + USING btree (uuid) + ix_db_dbnode_label: CREATE INDEX ix_db_dbnode_label ON public.db_dbnode USING + btree (label) + ix_db_dbnode_node_type: CREATE INDEX ix_db_dbnode_node_type ON public.db_dbnode + USING btree (node_type) + ix_db_dbnode_process_type: CREATE INDEX ix_db_dbnode_process_type ON public.db_dbnode + USING btree (process_type) + db_dbsetting: + db_dbsetting_key_key: CREATE UNIQUE INDEX db_dbsetting_key_key ON public.db_dbsetting + USING btree (key) + db_dbsetting_pkey: CREATE UNIQUE INDEX db_dbsetting_pkey ON public.db_dbsetting + USING btree (id) + ix_db_dbsetting_key: CREATE INDEX ix_db_dbsetting_key ON public.db_dbsetting USING + btree (key) + db_dbuser: + db_dbuser_pkey: CREATE UNIQUE INDEX db_dbuser_pkey ON public.db_dbuser USING btree + (id) + ix_db_dbuser_email: CREATE UNIQUE INDEX ix_db_dbuser_email ON public.db_dbuser + USING btree (email) + db_dbworkflow: + db_dbworkflow_pkey: CREATE UNIQUE INDEX db_dbworkflow_pkey ON public.db_dbworkflow + USING btree (id) + db_dbworkflow_uuid_key: CREATE UNIQUE INDEX db_dbworkflow_uuid_key ON public.db_dbworkflow + USING btree (uuid) + ix_db_dbworkflow_label: CREATE INDEX ix_db_dbworkflow_label ON public.db_dbworkflow + USING btree (label) + db_dbworkflowdata: + db_dbworkflowdata_parent_id_name_data_type_key: CREATE UNIQUE INDEX db_dbworkflowdata_parent_id_name_data_type_key + ON public.db_dbworkflowdata USING btree (parent_id, name, data_type) + db_dbworkflowdata_pkey: CREATE UNIQUE INDEX db_dbworkflowdata_pkey ON public.db_dbworkflowdata + USING btree (id) + ix_db_dbworkflowdata_aiida_obj_id: CREATE INDEX ix_db_dbworkflowdata_aiida_obj_id + ON public.db_dbworkflowdata USING btree (aiida_obj_id) + ix_db_dbworkflowdata_parent_id: CREATE INDEX ix_db_dbworkflowdata_parent_id ON + public.db_dbworkflowdata USING btree (parent_id) + db_dbworkflowstep: + db_dbworkflowstep_parent_id_name_key: CREATE UNIQUE INDEX db_dbworkflowstep_parent_id_name_key + ON public.db_dbworkflowstep USING btree (parent_id, name) + db_dbworkflowstep_pkey: CREATE UNIQUE INDEX db_dbworkflowstep_pkey ON public.db_dbworkflowstep + USING btree (id) + db_dbworkflowstep_calculations: + db_dbworkflowstep_calculations_id_dbnode_id_key: CREATE UNIQUE INDEX db_dbworkflowstep_calculations_id_dbnode_id_key + ON public.db_dbworkflowstep_calculations USING btree (dbworkflowstep_id, dbnode_id) + db_dbworkflowstep_calculations_pkey: CREATE UNIQUE INDEX db_dbworkflowstep_calculations_pkey + ON public.db_dbworkflowstep_calculations USING btree (id) + db_dbworkflowstep_sub_workflows: + db_dbworkflowstep_sub_workflows_id_dbworkflow__key: CREATE UNIQUE INDEX db_dbworkflowstep_sub_workflows_id_dbworkflow__key + ON public.db_dbworkflowstep_sub_workflows USING btree (dbworkflowstep_id, dbworkflow_id) + db_dbworkflowstep_sub_workflows_pkey: CREATE UNIQUE INDEX db_dbworkflowstep_sub_workflows_pkey + ON public.db_dbworkflowstep_sub_workflows USING btree (id) diff --git a/tests/backends/aiida_sqlalchemy/migrations/test_all_schema/test_sqla_31_3d6190594e19_.yml b/tests/backends/aiida_sqlalchemy/migrations/test_all_schema/test_sqla_31_3d6190594e19_.yml new file mode 100644 index 0000000000..6100a1489c --- /dev/null +++ b/tests/backends/aiida_sqlalchemy/migrations/test_all_schema/test_sqla_31_3d6190594e19_.yml @@ -0,0 +1,750 @@ +columns: + db_dbauthinfo: + aiidauser_id: + data_type: integer + default: null + is_nullable: true + auth_params: + data_type: jsonb + default: null + is_nullable: true + dbcomputer_id: + data_type: integer + default: null + is_nullable: true + enabled: + data_type: boolean + default: null + is_nullable: true + id: + data_type: integer + default: nextval('db_dbauthinfo_id_seq'::regclass) + is_nullable: false + metadata: + data_type: jsonb + default: null + is_nullable: true + db_dbcomment: + content: + data_type: text + default: null + is_nullable: true + ctime: + data_type: timestamp with time zone + default: null + is_nullable: true + dbnode_id: + data_type: integer + default: null + is_nullable: true + id: + data_type: integer + default: nextval('db_dbcomment_id_seq'::regclass) + is_nullable: false + mtime: + data_type: timestamp with time zone + default: null + is_nullable: true + user_id: + data_type: integer + default: null + is_nullable: true + uuid: + data_type: uuid + default: null + is_nullable: true + db_dbcomputer: + description: + data_type: text + default: null + is_nullable: true + hostname: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + id: + data_type: integer + default: nextval('db_dbcomputer_id_seq'::regclass) + is_nullable: false + metadata: + data_type: jsonb + default: null + is_nullable: true + name: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + scheduler_type: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + transport_params: + data_type: jsonb + default: null + is_nullable: true + transport_type: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + uuid: + data_type: uuid + default: null + is_nullable: true + db_dbgroup: + description: + data_type: text + default: null + is_nullable: true + id: + data_type: integer + default: nextval('db_dbgroup_id_seq'::regclass) + is_nullable: false + label: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + time: + data_type: timestamp with time zone + default: null + is_nullable: true + type_string: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + user_id: + data_type: integer + default: null + is_nullable: true + uuid: + data_type: uuid + default: null + is_nullable: true + db_dbgroup_dbnodes: + dbgroup_id: + data_type: integer + default: null + is_nullable: true + dbnode_id: + data_type: integer + default: null + is_nullable: true + id: + data_type: integer + default: nextval('db_dbgroup_dbnodes_id_seq'::regclass) + is_nullable: false + db_dblink: + id: + data_type: integer + default: nextval('db_dblink_id_seq'::regclass) + is_nullable: false + input_id: + data_type: integer + default: null + is_nullable: true + label: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + output_id: + data_type: integer + default: null + is_nullable: true + type: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + db_dblog: + dbnode_id: + data_type: integer + default: null + is_nullable: false + id: + data_type: integer + default: nextval('db_dblog_id_seq'::regclass) + is_nullable: false + levelname: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + loggername: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + message: + data_type: text + default: null + is_nullable: true + metadata: + data_type: jsonb + default: null + is_nullable: true + time: + data_type: timestamp with time zone + default: null + is_nullable: true + uuid: + data_type: uuid + default: null + is_nullable: true + db_dbnode: + attributes: + data_type: jsonb + default: null + is_nullable: true + ctime: + data_type: timestamp with time zone + default: null + is_nullable: true + dbcomputer_id: + data_type: integer + default: null + is_nullable: true + description: + data_type: text + default: null + is_nullable: true + extras: + data_type: jsonb + default: null + is_nullable: true + id: + data_type: integer + default: nextval('db_dbnode_id_seq'::regclass) + is_nullable: false + label: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + mtime: + data_type: timestamp with time zone + default: null + is_nullable: true + node_type: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + nodeversion: + data_type: integer + default: null + is_nullable: true + process_type: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + public: + data_type: boolean + default: null + is_nullable: true + user_id: + data_type: integer + default: null + is_nullable: false + uuid: + data_type: uuid + default: null + is_nullable: true + db_dbsetting: + description: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + id: + data_type: integer + default: nextval('db_dbsetting_id_seq'::regclass) + is_nullable: false + key: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + time: + data_type: timestamp with time zone + default: null + is_nullable: true + val: + data_type: jsonb + default: null + is_nullable: true + db_dbuser: + date_joined: + data_type: timestamp with time zone + default: null + is_nullable: true + email: + data_type: character varying + default: null + is_nullable: true + max_length: 254 + first_name: + data_type: character varying + default: null + is_nullable: true + max_length: 254 + id: + data_type: integer + default: nextval('db_dbuser_id_seq'::regclass) + is_nullable: false + institution: + data_type: character varying + default: null + is_nullable: true + max_length: 254 + is_active: + data_type: boolean + default: null + is_nullable: true + is_staff: + data_type: boolean + default: null + is_nullable: true + is_superuser: + data_type: boolean + default: null + is_nullable: false + last_login: + data_type: timestamp with time zone + default: null + is_nullable: true + last_name: + data_type: character varying + default: null + is_nullable: true + max_length: 254 + password: + data_type: character varying + default: null + is_nullable: true + max_length: 128 + db_dbworkflow: + ctime: + data_type: timestamp with time zone + default: null + is_nullable: true + description: + data_type: text + default: null + is_nullable: true + id: + data_type: integer + default: nextval('db_dbworkflow_id_seq'::regclass) + is_nullable: false + label: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + lastsyncedversion: + data_type: integer + default: null + is_nullable: true + module: + data_type: text + default: null + is_nullable: true + module_class: + data_type: text + default: null + is_nullable: true + mtime: + data_type: timestamp with time zone + default: null + is_nullable: true + nodeversion: + data_type: integer + default: null + is_nullable: true + report: + data_type: text + default: null + is_nullable: true + script_md5: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + script_path: + data_type: text + default: null + is_nullable: true + state: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + user_id: + data_type: integer + default: null + is_nullable: true + uuid: + data_type: uuid + default: null + is_nullable: true + db_dbworkflowdata: + aiida_obj_id: + data_type: integer + default: null + is_nullable: true + data_type: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + id: + data_type: integer + default: nextval('db_dbworkflowdata_id_seq'::regclass) + is_nullable: false + json_value: + data_type: text + default: null + is_nullable: true + name: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + parent_id: + data_type: integer + default: null + is_nullable: true + time: + data_type: timestamp with time zone + default: null + is_nullable: true + value_type: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + db_dbworkflowstep: + id: + data_type: integer + default: nextval('db_dbworkflowstep_id_seq'::regclass) + is_nullable: false + name: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + nextcall: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + parent_id: + data_type: integer + default: null + is_nullable: true + state: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + time: + data_type: timestamp with time zone + default: null + is_nullable: true + user_id: + data_type: integer + default: null + is_nullable: true + db_dbworkflowstep_calculations: + dbnode_id: + data_type: integer + default: null + is_nullable: true + dbworkflowstep_id: + data_type: integer + default: null + is_nullable: true + id: + data_type: integer + default: nextval('db_dbworkflowstep_calculations_id_seq'::regclass) + is_nullable: false + db_dbworkflowstep_sub_workflows: + dbworkflow_id: + data_type: integer + default: null + is_nullable: true + dbworkflowstep_id: + data_type: integer + default: null + is_nullable: true + id: + data_type: integer + default: nextval('db_dbworkflowstep_sub_workflows_id_seq'::regclass) + is_nullable: false +constraints: + primary_key: + db_dbauthinfo: + db_dbauthinfo_pkey: + - id + db_dbcomment: + db_dbcomment_pkey: + - id + db_dbcomputer: + db_dbcomputer_pkey: + - id + db_dbgroup: + db_dbgroup_pkey: + - id + db_dbgroup_dbnodes: + db_dbgroup_dbnodes_pkey: + - id + db_dblink: + db_dblink_pkey: + - id + db_dblog: + db_dblog_pkey: + - id + db_dbnode: + db_dbnode_pkey: + - id + db_dbsetting: + db_dbsetting_pkey: + - id + db_dbuser: + db_dbuser_pkey: + - id + db_dbworkflow: + db_dbworkflow_pkey: + - id + db_dbworkflowdata: + db_dbworkflowdata_pkey: + - id + db_dbworkflowstep: + db_dbworkflowstep_pkey: + - id + db_dbworkflowstep_calculations: + db_dbworkflowstep_calculations_pkey: + - id + db_dbworkflowstep_sub_workflows: + db_dbworkflowstep_sub_workflows_pkey: + - id + unique: + db_dbauthinfo: + db_dbauthinfo_aiidauser_id_dbcomputer_id_key: + - aiidauser_id + - dbcomputer_id + db_dbcomment: + db_dbcomment_uuid_key: + - uuid + db_dbcomputer: + db_dbcomputer_name_key: + - name + db_dbcomputer_uuid_key: + - uuid + db_dbgroup: + db_dbgroup_label_type_string_key: + - label + - type_string + db_dbgroup_uuid_key: + - uuid + db_dbgroup_dbnodes: + db_dbgroup_dbnodes_dbgroup_id_dbnode_id_key: + - dbgroup_id + - dbnode_id + db_dblog: + db_dblog_uuid_key: + - uuid + db_dbnode: + db_dbnode_uuid_key: + - uuid + db_dbsetting: + db_dbsetting_key_key: + - key + db_dbworkflow: + db_dbworkflow_uuid_key: + - uuid + db_dbworkflowdata: + db_dbworkflowdata_parent_id_name_data_type_key: + - data_type + - name + - parent_id + db_dbworkflowstep: + db_dbworkflowstep_parent_id_name_key: + - name + - parent_id + db_dbworkflowstep_calculations: + db_dbworkflowstep_calculations_id_dbnode_id_key: + - dbnode_id + - dbworkflowstep_id + db_dbworkflowstep_sub_workflows: + db_dbworkflowstep_sub_workflows_id_dbworkflow__key: + - dbworkflow_id + - dbworkflowstep_id +foreign_keys: + db_dbauthinfo: + db_dbauthinfo_aiidauser_id_fkey: FOREIGN KEY (aiidauser_id) REFERENCES db_dbuser(id) + ON DELETE CASCADE DEFERRABLE INITIALLY DEFERRED + db_dbauthinfo_dbcomputer_id_fkey: FOREIGN KEY (dbcomputer_id) REFERENCES db_dbcomputer(id) + ON DELETE CASCADE DEFERRABLE INITIALLY DEFERRED + db_dbcomment: + db_dbcomment_dbnode_id_fkey: FOREIGN KEY (dbnode_id) REFERENCES db_dbnode(id) + ON DELETE CASCADE DEFERRABLE INITIALLY DEFERRED + db_dbcomment_user_id_fkey: FOREIGN KEY (user_id) REFERENCES db_dbuser(id) ON DELETE + CASCADE DEFERRABLE INITIALLY DEFERRED + db_dbgroup: + db_dbgroup_user_id_fkey: FOREIGN KEY (user_id) REFERENCES db_dbuser(id) ON DELETE + CASCADE DEFERRABLE INITIALLY DEFERRED + db_dbgroup_dbnodes: + db_dbgroup_dbnodes_dbgroup_id_fkey: FOREIGN KEY (dbgroup_id) REFERENCES db_dbgroup(id) + DEFERRABLE INITIALLY DEFERRED + db_dbgroup_dbnodes_dbnode_id_fkey: FOREIGN KEY (dbnode_id) REFERENCES db_dbnode(id) + DEFERRABLE INITIALLY DEFERRED + db_dblink: + db_dblink_input_id_fkey: FOREIGN KEY (input_id) REFERENCES db_dbnode(id) DEFERRABLE + INITIALLY DEFERRED + db_dblink_output_id_fkey: FOREIGN KEY (output_id) REFERENCES db_dbnode(id) ON + DELETE CASCADE DEFERRABLE INITIALLY DEFERRED + db_dblog: + db_dblog_dbnode_id_fkey: FOREIGN KEY (dbnode_id) REFERENCES db_dbnode(id) ON DELETE + CASCADE DEFERRABLE INITIALLY DEFERRED + db_dbnode: + db_dbnode_dbcomputer_id_fkey: FOREIGN KEY (dbcomputer_id) REFERENCES db_dbcomputer(id) + ON DELETE RESTRICT DEFERRABLE INITIALLY DEFERRED + db_dbnode_user_id_fkey: FOREIGN KEY (user_id) REFERENCES db_dbuser(id) ON DELETE + RESTRICT DEFERRABLE INITIALLY DEFERRED + db_dbworkflow: + db_dbworkflow_user_id_fkey: FOREIGN KEY (user_id) REFERENCES db_dbuser(id) + db_dbworkflowdata: + db_dbworkflowdata_aiida_obj_id_fkey: FOREIGN KEY (aiida_obj_id) REFERENCES db_dbnode(id) + db_dbworkflowdata_parent_id_fkey: FOREIGN KEY (parent_id) REFERENCES db_dbworkflow(id) + db_dbworkflowstep: + db_dbworkflowstep_parent_id_fkey: FOREIGN KEY (parent_id) REFERENCES db_dbworkflow(id) + db_dbworkflowstep_user_id_fkey: FOREIGN KEY (user_id) REFERENCES db_dbuser(id) + db_dbworkflowstep_calculations: + db_dbworkflowstep_calculations_dbnode_id_fkey: FOREIGN KEY (dbnode_id) REFERENCES + db_dbnode(id) + db_dbworkflowstep_calculations_dbworkflowstep_id_fkey: FOREIGN KEY (dbworkflowstep_id) + REFERENCES db_dbworkflowstep(id) + db_dbworkflowstep_sub_workflows: + db_dbworkflowstep_sub_workflows_dbworkflow_id_fkey: FOREIGN KEY (dbworkflow_id) + REFERENCES db_dbworkflow(id) + db_dbworkflowstep_sub_workflows_dbworkflowstep_id_fkey: FOREIGN KEY (dbworkflowstep_id) + REFERENCES db_dbworkflowstep(id) +indexes: + db_dbauthinfo: + db_dbauthinfo_aiidauser_id_dbcomputer_id_key: CREATE UNIQUE INDEX db_dbauthinfo_aiidauser_id_dbcomputer_id_key + ON public.db_dbauthinfo USING btree (aiidauser_id, dbcomputer_id) + db_dbauthinfo_pkey: CREATE UNIQUE INDEX db_dbauthinfo_pkey ON public.db_dbauthinfo + USING btree (id) + db_dbcomment: + db_dbcomment_pkey: CREATE UNIQUE INDEX db_dbcomment_pkey ON public.db_dbcomment + USING btree (id) + db_dbcomment_uuid_key: CREATE UNIQUE INDEX db_dbcomment_uuid_key ON public.db_dbcomment + USING btree (uuid) + db_dbcomputer: + db_dbcomputer_name_key: CREATE UNIQUE INDEX db_dbcomputer_name_key ON public.db_dbcomputer + USING btree (name) + db_dbcomputer_pkey: CREATE UNIQUE INDEX db_dbcomputer_pkey ON public.db_dbcomputer + USING btree (id) + db_dbcomputer_uuid_key: CREATE UNIQUE INDEX db_dbcomputer_uuid_key ON public.db_dbcomputer + USING btree (uuid) + db_dbgroup: + db_dbgroup_label_type_string_key: CREATE UNIQUE INDEX db_dbgroup_label_type_string_key + ON public.db_dbgroup USING btree (label, type_string) + db_dbgroup_pkey: CREATE UNIQUE INDEX db_dbgroup_pkey ON public.db_dbgroup USING + btree (id) + db_dbgroup_uuid_key: CREATE UNIQUE INDEX db_dbgroup_uuid_key ON public.db_dbgroup + USING btree (uuid) + ix_db_dbgroup_label: CREATE INDEX ix_db_dbgroup_label ON public.db_dbgroup USING + btree (label) + ix_db_dbgroup_type_string: CREATE INDEX ix_db_dbgroup_type_string ON public.db_dbgroup + USING btree (type_string) + db_dbgroup_dbnodes: + db_dbgroup_dbnodes_dbgroup_id_dbnode_id_key: CREATE UNIQUE INDEX db_dbgroup_dbnodes_dbgroup_id_dbnode_id_key + ON public.db_dbgroup_dbnodes USING btree (dbgroup_id, dbnode_id) + db_dbgroup_dbnodes_dbgroup_id_idx: CREATE INDEX db_dbgroup_dbnodes_dbgroup_id_idx + ON public.db_dbgroup_dbnodes USING btree (dbgroup_id) + db_dbgroup_dbnodes_dbnode_id_idx: CREATE INDEX db_dbgroup_dbnodes_dbnode_id_idx + ON public.db_dbgroup_dbnodes USING btree (dbnode_id) + db_dbgroup_dbnodes_pkey: CREATE UNIQUE INDEX db_dbgroup_dbnodes_pkey ON public.db_dbgroup_dbnodes + USING btree (id) + db_dblink: + db_dblink_pkey: CREATE UNIQUE INDEX db_dblink_pkey ON public.db_dblink USING btree + (id) + ix_db_dblink_input_id: CREATE INDEX ix_db_dblink_input_id ON public.db_dblink + USING btree (input_id) + ix_db_dblink_label: CREATE INDEX ix_db_dblink_label ON public.db_dblink USING + btree (label) + ix_db_dblink_output_id: CREATE INDEX ix_db_dblink_output_id ON public.db_dblink + USING btree (output_id) + ix_db_dblink_type: CREATE INDEX ix_db_dblink_type ON public.db_dblink USING btree + (type) + db_dblog: + db_dblog_pkey: CREATE UNIQUE INDEX db_dblog_pkey ON public.db_dblog USING btree + (id) + db_dblog_uuid_key: CREATE UNIQUE INDEX db_dblog_uuid_key ON public.db_dblog USING + btree (uuid) + ix_db_dblog_levelname: CREATE INDEX ix_db_dblog_levelname ON public.db_dblog USING + btree (levelname) + ix_db_dblog_loggername: CREATE INDEX ix_db_dblog_loggername ON public.db_dblog + USING btree (loggername) + db_dbnode: + db_dbnode_pkey: CREATE UNIQUE INDEX db_dbnode_pkey ON public.db_dbnode USING btree + (id) + db_dbnode_uuid_key: CREATE UNIQUE INDEX db_dbnode_uuid_key ON public.db_dbnode + USING btree (uuid) + ix_db_dbnode_label: CREATE INDEX ix_db_dbnode_label ON public.db_dbnode USING + btree (label) + ix_db_dbnode_node_type: CREATE INDEX ix_db_dbnode_node_type ON public.db_dbnode + USING btree (node_type) + ix_db_dbnode_process_type: CREATE INDEX ix_db_dbnode_process_type ON public.db_dbnode + USING btree (process_type) + db_dbsetting: + db_dbsetting_key_key: CREATE UNIQUE INDEX db_dbsetting_key_key ON public.db_dbsetting + USING btree (key) + db_dbsetting_pkey: CREATE UNIQUE INDEX db_dbsetting_pkey ON public.db_dbsetting + USING btree (id) + ix_db_dbsetting_key: CREATE INDEX ix_db_dbsetting_key ON public.db_dbsetting USING + btree (key) + db_dbuser: + db_dbuser_pkey: CREATE UNIQUE INDEX db_dbuser_pkey ON public.db_dbuser USING btree + (id) + ix_db_dbuser_email: CREATE UNIQUE INDEX ix_db_dbuser_email ON public.db_dbuser + USING btree (email) + db_dbworkflow: + db_dbworkflow_pkey: CREATE UNIQUE INDEX db_dbworkflow_pkey ON public.db_dbworkflow + USING btree (id) + db_dbworkflow_uuid_key: CREATE UNIQUE INDEX db_dbworkflow_uuid_key ON public.db_dbworkflow + USING btree (uuid) + ix_db_dbworkflow_label: CREATE INDEX ix_db_dbworkflow_label ON public.db_dbworkflow + USING btree (label) + db_dbworkflowdata: + db_dbworkflowdata_parent_id_name_data_type_key: CREATE UNIQUE INDEX db_dbworkflowdata_parent_id_name_data_type_key + ON public.db_dbworkflowdata USING btree (parent_id, name, data_type) + db_dbworkflowdata_pkey: CREATE UNIQUE INDEX db_dbworkflowdata_pkey ON public.db_dbworkflowdata + USING btree (id) + ix_db_dbworkflowdata_aiida_obj_id: CREATE INDEX ix_db_dbworkflowdata_aiida_obj_id + ON public.db_dbworkflowdata USING btree (aiida_obj_id) + ix_db_dbworkflowdata_parent_id: CREATE INDEX ix_db_dbworkflowdata_parent_id ON + public.db_dbworkflowdata USING btree (parent_id) + db_dbworkflowstep: + db_dbworkflowstep_parent_id_name_key: CREATE UNIQUE INDEX db_dbworkflowstep_parent_id_name_key + ON public.db_dbworkflowstep USING btree (parent_id, name) + db_dbworkflowstep_pkey: CREATE UNIQUE INDEX db_dbworkflowstep_pkey ON public.db_dbworkflowstep + USING btree (id) + db_dbworkflowstep_calculations: + db_dbworkflowstep_calculations_id_dbnode_id_key: CREATE UNIQUE INDEX db_dbworkflowstep_calculations_id_dbnode_id_key + ON public.db_dbworkflowstep_calculations USING btree (dbworkflowstep_id, dbnode_id) + db_dbworkflowstep_calculations_pkey: CREATE UNIQUE INDEX db_dbworkflowstep_calculations_pkey + ON public.db_dbworkflowstep_calculations USING btree (id) + db_dbworkflowstep_sub_workflows: + db_dbworkflowstep_sub_workflows_id_dbworkflow__key: CREATE UNIQUE INDEX db_dbworkflowstep_sub_workflows_id_dbworkflow__key + ON public.db_dbworkflowstep_sub_workflows USING btree (dbworkflowstep_id, dbworkflow_id) + db_dbworkflowstep_sub_workflows_pkey: CREATE UNIQUE INDEX db_dbworkflowstep_sub_workflows_pkey + ON public.db_dbworkflowstep_sub_workflows USING btree (id) diff --git a/tests/backends/aiida_sqlalchemy/migrations/test_all_schema/test_sqla_32_1b8ed3425af9_.yml b/tests/backends/aiida_sqlalchemy/migrations/test_all_schema/test_sqla_32_1b8ed3425af9_.yml new file mode 100644 index 0000000000..59f99e9f87 --- /dev/null +++ b/tests/backends/aiida_sqlalchemy/migrations/test_all_schema/test_sqla_32_1b8ed3425af9_.yml @@ -0,0 +1,508 @@ +columns: + db_dbauthinfo: + aiidauser_id: + data_type: integer + default: null + is_nullable: true + auth_params: + data_type: jsonb + default: null + is_nullable: true + dbcomputer_id: + data_type: integer + default: null + is_nullable: true + enabled: + data_type: boolean + default: null + is_nullable: true + id: + data_type: integer + default: nextval('db_dbauthinfo_id_seq'::regclass) + is_nullable: false + metadata: + data_type: jsonb + default: null + is_nullable: true + db_dbcomment: + content: + data_type: text + default: null + is_nullable: true + ctime: + data_type: timestamp with time zone + default: null + is_nullable: true + dbnode_id: + data_type: integer + default: null + is_nullable: true + id: + data_type: integer + default: nextval('db_dbcomment_id_seq'::regclass) + is_nullable: false + mtime: + data_type: timestamp with time zone + default: null + is_nullable: true + user_id: + data_type: integer + default: null + is_nullable: true + uuid: + data_type: uuid + default: null + is_nullable: true + db_dbcomputer: + description: + data_type: text + default: null + is_nullable: true + hostname: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + id: + data_type: integer + default: nextval('db_dbcomputer_id_seq'::regclass) + is_nullable: false + metadata: + data_type: jsonb + default: null + is_nullable: true + name: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + scheduler_type: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + transport_params: + data_type: jsonb + default: null + is_nullable: true + transport_type: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + uuid: + data_type: uuid + default: null + is_nullable: true + db_dbgroup: + description: + data_type: text + default: null + is_nullable: true + id: + data_type: integer + default: nextval('db_dbgroup_id_seq'::regclass) + is_nullable: false + label: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + time: + data_type: timestamp with time zone + default: null + is_nullable: true + type_string: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + user_id: + data_type: integer + default: null + is_nullable: true + uuid: + data_type: uuid + default: null + is_nullable: true + db_dbgroup_dbnodes: + dbgroup_id: + data_type: integer + default: null + is_nullable: true + dbnode_id: + data_type: integer + default: null + is_nullable: true + id: + data_type: integer + default: nextval('db_dbgroup_dbnodes_id_seq'::regclass) + is_nullable: false + db_dblink: + id: + data_type: integer + default: nextval('db_dblink_id_seq'::regclass) + is_nullable: false + input_id: + data_type: integer + default: null + is_nullable: true + label: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + output_id: + data_type: integer + default: null + is_nullable: true + type: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + db_dblog: + dbnode_id: + data_type: integer + default: null + is_nullable: false + id: + data_type: integer + default: nextval('db_dblog_id_seq'::regclass) + is_nullable: false + levelname: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + loggername: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + message: + data_type: text + default: null + is_nullable: true + metadata: + data_type: jsonb + default: null + is_nullable: true + time: + data_type: timestamp with time zone + default: null + is_nullable: true + uuid: + data_type: uuid + default: null + is_nullable: true + db_dbnode: + attributes: + data_type: jsonb + default: null + is_nullable: true + ctime: + data_type: timestamp with time zone + default: null + is_nullable: true + dbcomputer_id: + data_type: integer + default: null + is_nullable: true + description: + data_type: text + default: null + is_nullable: true + extras: + data_type: jsonb + default: null + is_nullable: true + id: + data_type: integer + default: nextval('db_dbnode_id_seq'::regclass) + is_nullable: false + label: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + mtime: + data_type: timestamp with time zone + default: null + is_nullable: true + node_type: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + nodeversion: + data_type: integer + default: null + is_nullable: true + process_type: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + public: + data_type: boolean + default: null + is_nullable: true + user_id: + data_type: integer + default: null + is_nullable: false + uuid: + data_type: uuid + default: null + is_nullable: true + db_dbsetting: + description: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + id: + data_type: integer + default: nextval('db_dbsetting_id_seq'::regclass) + is_nullable: false + key: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + time: + data_type: timestamp with time zone + default: null + is_nullable: true + val: + data_type: jsonb + default: null + is_nullable: true + db_dbuser: + date_joined: + data_type: timestamp with time zone + default: null + is_nullable: true + email: + data_type: character varying + default: null + is_nullable: true + max_length: 254 + first_name: + data_type: character varying + default: null + is_nullable: true + max_length: 254 + id: + data_type: integer + default: nextval('db_dbuser_id_seq'::regclass) + is_nullable: false + institution: + data_type: character varying + default: null + is_nullable: true + max_length: 254 + is_active: + data_type: boolean + default: null + is_nullable: true + is_staff: + data_type: boolean + default: null + is_nullable: true + is_superuser: + data_type: boolean + default: null + is_nullable: false + last_login: + data_type: timestamp with time zone + default: null + is_nullable: true + last_name: + data_type: character varying + default: null + is_nullable: true + max_length: 254 + password: + data_type: character varying + default: null + is_nullable: true + max_length: 128 +constraints: + primary_key: + db_dbauthinfo: + db_dbauthinfo_pkey: + - id + db_dbcomment: + db_dbcomment_pkey: + - id + db_dbcomputer: + db_dbcomputer_pkey: + - id + db_dbgroup: + db_dbgroup_pkey: + - id + db_dbgroup_dbnodes: + db_dbgroup_dbnodes_pkey: + - id + db_dblink: + db_dblink_pkey: + - id + db_dblog: + db_dblog_pkey: + - id + db_dbnode: + db_dbnode_pkey: + - id + db_dbsetting: + db_dbsetting_pkey: + - id + db_dbuser: + db_dbuser_pkey: + - id + unique: + db_dbauthinfo: + db_dbauthinfo_aiidauser_id_dbcomputer_id_key: + - aiidauser_id + - dbcomputer_id + db_dbcomment: + db_dbcomment_uuid_key: + - uuid + db_dbcomputer: + db_dbcomputer_name_key: + - name + db_dbcomputer_uuid_key: + - uuid + db_dbgroup: + db_dbgroup_label_type_string_key: + - label + - type_string + db_dbgroup_uuid_key: + - uuid + db_dbgroup_dbnodes: + db_dbgroup_dbnodes_dbgroup_id_dbnode_id_key: + - dbgroup_id + - dbnode_id + db_dblog: + db_dblog_uuid_key: + - uuid + db_dbnode: + db_dbnode_uuid_key: + - uuid + db_dbsetting: + db_dbsetting_key_key: + - key +foreign_keys: + db_dbauthinfo: + db_dbauthinfo_aiidauser_id_fkey: FOREIGN KEY (aiidauser_id) REFERENCES db_dbuser(id) + ON DELETE CASCADE DEFERRABLE INITIALLY DEFERRED + db_dbauthinfo_dbcomputer_id_fkey: FOREIGN KEY (dbcomputer_id) REFERENCES db_dbcomputer(id) + ON DELETE CASCADE DEFERRABLE INITIALLY DEFERRED + db_dbcomment: + db_dbcomment_dbnode_id_fkey: FOREIGN KEY (dbnode_id) REFERENCES db_dbnode(id) + ON DELETE CASCADE DEFERRABLE INITIALLY DEFERRED + db_dbcomment_user_id_fkey: FOREIGN KEY (user_id) REFERENCES db_dbuser(id) ON DELETE + CASCADE DEFERRABLE INITIALLY DEFERRED + db_dbgroup: + db_dbgroup_user_id_fkey: FOREIGN KEY (user_id) REFERENCES db_dbuser(id) ON DELETE + CASCADE DEFERRABLE INITIALLY DEFERRED + db_dbgroup_dbnodes: + db_dbgroup_dbnodes_dbgroup_id_fkey: FOREIGN KEY (dbgroup_id) REFERENCES db_dbgroup(id) + DEFERRABLE INITIALLY DEFERRED + db_dbgroup_dbnodes_dbnode_id_fkey: FOREIGN KEY (dbnode_id) REFERENCES db_dbnode(id) + DEFERRABLE INITIALLY DEFERRED + db_dblink: + db_dblink_input_id_fkey: FOREIGN KEY (input_id) REFERENCES db_dbnode(id) DEFERRABLE + INITIALLY DEFERRED + db_dblink_output_id_fkey: FOREIGN KEY (output_id) REFERENCES db_dbnode(id) ON + DELETE CASCADE DEFERRABLE INITIALLY DEFERRED + db_dblog: + db_dblog_dbnode_id_fkey: FOREIGN KEY (dbnode_id) REFERENCES db_dbnode(id) ON DELETE + CASCADE DEFERRABLE INITIALLY DEFERRED + db_dbnode: + db_dbnode_dbcomputer_id_fkey: FOREIGN KEY (dbcomputer_id) REFERENCES db_dbcomputer(id) + ON DELETE RESTRICT DEFERRABLE INITIALLY DEFERRED + db_dbnode_user_id_fkey: FOREIGN KEY (user_id) REFERENCES db_dbuser(id) ON DELETE + RESTRICT DEFERRABLE INITIALLY DEFERRED +indexes: + db_dbauthinfo: + db_dbauthinfo_aiidauser_id_dbcomputer_id_key: CREATE UNIQUE INDEX db_dbauthinfo_aiidauser_id_dbcomputer_id_key + ON public.db_dbauthinfo USING btree (aiidauser_id, dbcomputer_id) + db_dbauthinfo_pkey: CREATE UNIQUE INDEX db_dbauthinfo_pkey ON public.db_dbauthinfo + USING btree (id) + db_dbcomment: + db_dbcomment_pkey: CREATE UNIQUE INDEX db_dbcomment_pkey ON public.db_dbcomment + USING btree (id) + db_dbcomment_uuid_key: CREATE UNIQUE INDEX db_dbcomment_uuid_key ON public.db_dbcomment + USING btree (uuid) + db_dbcomputer: + db_dbcomputer_name_key: CREATE UNIQUE INDEX db_dbcomputer_name_key ON public.db_dbcomputer + USING btree (name) + db_dbcomputer_pkey: CREATE UNIQUE INDEX db_dbcomputer_pkey ON public.db_dbcomputer + USING btree (id) + db_dbcomputer_uuid_key: CREATE UNIQUE INDEX db_dbcomputer_uuid_key ON public.db_dbcomputer + USING btree (uuid) + db_dbgroup: + db_dbgroup_label_type_string_key: CREATE UNIQUE INDEX db_dbgroup_label_type_string_key + ON public.db_dbgroup USING btree (label, type_string) + db_dbgroup_pkey: CREATE UNIQUE INDEX db_dbgroup_pkey ON public.db_dbgroup USING + btree (id) + db_dbgroup_uuid_key: CREATE UNIQUE INDEX db_dbgroup_uuid_key ON public.db_dbgroup + USING btree (uuid) + ix_db_dbgroup_label: CREATE INDEX ix_db_dbgroup_label ON public.db_dbgroup USING + btree (label) + ix_db_dbgroup_type_string: CREATE INDEX ix_db_dbgroup_type_string ON public.db_dbgroup + USING btree (type_string) + db_dbgroup_dbnodes: + db_dbgroup_dbnodes_dbgroup_id_dbnode_id_key: CREATE UNIQUE INDEX db_dbgroup_dbnodes_dbgroup_id_dbnode_id_key + ON public.db_dbgroup_dbnodes USING btree (dbgroup_id, dbnode_id) + db_dbgroup_dbnodes_dbgroup_id_idx: CREATE INDEX db_dbgroup_dbnodes_dbgroup_id_idx + ON public.db_dbgroup_dbnodes USING btree (dbgroup_id) + db_dbgroup_dbnodes_dbnode_id_idx: CREATE INDEX db_dbgroup_dbnodes_dbnode_id_idx + ON public.db_dbgroup_dbnodes USING btree (dbnode_id) + db_dbgroup_dbnodes_pkey: CREATE UNIQUE INDEX db_dbgroup_dbnodes_pkey ON public.db_dbgroup_dbnodes + USING btree (id) + db_dblink: + db_dblink_pkey: CREATE UNIQUE INDEX db_dblink_pkey ON public.db_dblink USING btree + (id) + ix_db_dblink_input_id: CREATE INDEX ix_db_dblink_input_id ON public.db_dblink + USING btree (input_id) + ix_db_dblink_label: CREATE INDEX ix_db_dblink_label ON public.db_dblink USING + btree (label) + ix_db_dblink_output_id: CREATE INDEX ix_db_dblink_output_id ON public.db_dblink + USING btree (output_id) + ix_db_dblink_type: CREATE INDEX ix_db_dblink_type ON public.db_dblink USING btree + (type) + db_dblog: + db_dblog_pkey: CREATE UNIQUE INDEX db_dblog_pkey ON public.db_dblog USING btree + (id) + db_dblog_uuid_key: CREATE UNIQUE INDEX db_dblog_uuid_key ON public.db_dblog USING + btree (uuid) + ix_db_dblog_levelname: CREATE INDEX ix_db_dblog_levelname ON public.db_dblog USING + btree (levelname) + ix_db_dblog_loggername: CREATE INDEX ix_db_dblog_loggername ON public.db_dblog + USING btree (loggername) + db_dbnode: + db_dbnode_pkey: CREATE UNIQUE INDEX db_dbnode_pkey ON public.db_dbnode USING btree + (id) + db_dbnode_uuid_key: CREATE UNIQUE INDEX db_dbnode_uuid_key ON public.db_dbnode + USING btree (uuid) + ix_db_dbnode_label: CREATE INDEX ix_db_dbnode_label ON public.db_dbnode USING + btree (label) + ix_db_dbnode_node_type: CREATE INDEX ix_db_dbnode_node_type ON public.db_dbnode + USING btree (node_type) + ix_db_dbnode_process_type: CREATE INDEX ix_db_dbnode_process_type ON public.db_dbnode + USING btree (process_type) + db_dbsetting: + db_dbsetting_key_key: CREATE UNIQUE INDEX db_dbsetting_key_key ON public.db_dbsetting + USING btree (key) + db_dbsetting_pkey: CREATE UNIQUE INDEX db_dbsetting_pkey ON public.db_dbsetting + USING btree (id) + ix_db_dbsetting_key: CREATE INDEX ix_db_dbsetting_key ON public.db_dbsetting USING + btree (key) + db_dbuser: + db_dbuser_pkey: CREATE UNIQUE INDEX db_dbuser_pkey ON public.db_dbuser USING btree + (id) + ix_db_dbuser_email: CREATE UNIQUE INDEX ix_db_dbuser_email ON public.db_dbuser + USING btree (email) diff --git a/tests/backends/aiida_sqlalchemy/migrations/test_all_schema/test_sqla_33_1830c8430131_.yml b/tests/backends/aiida_sqlalchemy/migrations/test_all_schema/test_sqla_33_1830c8430131_.yml new file mode 100644 index 0000000000..dddc513d9b --- /dev/null +++ b/tests/backends/aiida_sqlalchemy/migrations/test_all_schema/test_sqla_33_1830c8430131_.yml @@ -0,0 +1,500 @@ +columns: + db_dbauthinfo: + aiidauser_id: + data_type: integer + default: null + is_nullable: true + auth_params: + data_type: jsonb + default: null + is_nullable: true + dbcomputer_id: + data_type: integer + default: null + is_nullable: true + enabled: + data_type: boolean + default: null + is_nullable: true + id: + data_type: integer + default: nextval('db_dbauthinfo_id_seq'::regclass) + is_nullable: false + metadata: + data_type: jsonb + default: null + is_nullable: true + db_dbcomment: + content: + data_type: text + default: null + is_nullable: true + ctime: + data_type: timestamp with time zone + default: null + is_nullable: true + dbnode_id: + data_type: integer + default: null + is_nullable: true + id: + data_type: integer + default: nextval('db_dbcomment_id_seq'::regclass) + is_nullable: false + mtime: + data_type: timestamp with time zone + default: null + is_nullable: true + user_id: + data_type: integer + default: null + is_nullable: true + uuid: + data_type: uuid + default: null + is_nullable: true + db_dbcomputer: + description: + data_type: text + default: null + is_nullable: true + hostname: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + id: + data_type: integer + default: nextval('db_dbcomputer_id_seq'::regclass) + is_nullable: false + metadata: + data_type: jsonb + default: null + is_nullable: true + name: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + scheduler_type: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + transport_params: + data_type: jsonb + default: null + is_nullable: true + transport_type: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + uuid: + data_type: uuid + default: null + is_nullable: true + db_dbgroup: + description: + data_type: text + default: null + is_nullable: true + id: + data_type: integer + default: nextval('db_dbgroup_id_seq'::regclass) + is_nullable: false + label: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + time: + data_type: timestamp with time zone + default: null + is_nullable: true + type_string: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + user_id: + data_type: integer + default: null + is_nullable: true + uuid: + data_type: uuid + default: null + is_nullable: true + db_dbgroup_dbnodes: + dbgroup_id: + data_type: integer + default: null + is_nullable: true + dbnode_id: + data_type: integer + default: null + is_nullable: true + id: + data_type: integer + default: nextval('db_dbgroup_dbnodes_id_seq'::regclass) + is_nullable: false + db_dblink: + id: + data_type: integer + default: nextval('db_dblink_id_seq'::regclass) + is_nullable: false + input_id: + data_type: integer + default: null + is_nullable: true + label: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + output_id: + data_type: integer + default: null + is_nullable: true + type: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + db_dblog: + dbnode_id: + data_type: integer + default: null + is_nullable: false + id: + data_type: integer + default: nextval('db_dblog_id_seq'::regclass) + is_nullable: false + levelname: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + loggername: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + message: + data_type: text + default: null + is_nullable: true + metadata: + data_type: jsonb + default: null + is_nullable: true + time: + data_type: timestamp with time zone + default: null + is_nullable: true + uuid: + data_type: uuid + default: null + is_nullable: true + db_dbnode: + attributes: + data_type: jsonb + default: null + is_nullable: true + ctime: + data_type: timestamp with time zone + default: null + is_nullable: true + dbcomputer_id: + data_type: integer + default: null + is_nullable: true + description: + data_type: text + default: null + is_nullable: true + extras: + data_type: jsonb + default: null + is_nullable: true + id: + data_type: integer + default: nextval('db_dbnode_id_seq'::regclass) + is_nullable: false + label: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + mtime: + data_type: timestamp with time zone + default: null + is_nullable: true + node_type: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + process_type: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + user_id: + data_type: integer + default: null + is_nullable: false + uuid: + data_type: uuid + default: null + is_nullable: true + db_dbsetting: + description: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + id: + data_type: integer + default: nextval('db_dbsetting_id_seq'::regclass) + is_nullable: false + key: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + time: + data_type: timestamp with time zone + default: null + is_nullable: true + val: + data_type: jsonb + default: null + is_nullable: true + db_dbuser: + date_joined: + data_type: timestamp with time zone + default: null + is_nullable: true + email: + data_type: character varying + default: null + is_nullable: true + max_length: 254 + first_name: + data_type: character varying + default: null + is_nullable: true + max_length: 254 + id: + data_type: integer + default: nextval('db_dbuser_id_seq'::regclass) + is_nullable: false + institution: + data_type: character varying + default: null + is_nullable: true + max_length: 254 + is_active: + data_type: boolean + default: null + is_nullable: true + is_staff: + data_type: boolean + default: null + is_nullable: true + is_superuser: + data_type: boolean + default: null + is_nullable: false + last_login: + data_type: timestamp with time zone + default: null + is_nullable: true + last_name: + data_type: character varying + default: null + is_nullable: true + max_length: 254 + password: + data_type: character varying + default: null + is_nullable: true + max_length: 128 +constraints: + primary_key: + db_dbauthinfo: + db_dbauthinfo_pkey: + - id + db_dbcomment: + db_dbcomment_pkey: + - id + db_dbcomputer: + db_dbcomputer_pkey: + - id + db_dbgroup: + db_dbgroup_pkey: + - id + db_dbgroup_dbnodes: + db_dbgroup_dbnodes_pkey: + - id + db_dblink: + db_dblink_pkey: + - id + db_dblog: + db_dblog_pkey: + - id + db_dbnode: + db_dbnode_pkey: + - id + db_dbsetting: + db_dbsetting_pkey: + - id + db_dbuser: + db_dbuser_pkey: + - id + unique: + db_dbauthinfo: + db_dbauthinfo_aiidauser_id_dbcomputer_id_key: + - aiidauser_id + - dbcomputer_id + db_dbcomment: + db_dbcomment_uuid_key: + - uuid + db_dbcomputer: + db_dbcomputer_name_key: + - name + db_dbcomputer_uuid_key: + - uuid + db_dbgroup: + db_dbgroup_label_type_string_key: + - label + - type_string + db_dbgroup_uuid_key: + - uuid + db_dbgroup_dbnodes: + db_dbgroup_dbnodes_dbgroup_id_dbnode_id_key: + - dbgroup_id + - dbnode_id + db_dblog: + db_dblog_uuid_key: + - uuid + db_dbnode: + db_dbnode_uuid_key: + - uuid + db_dbsetting: + db_dbsetting_key_key: + - key +foreign_keys: + db_dbauthinfo: + db_dbauthinfo_aiidauser_id_fkey: FOREIGN KEY (aiidauser_id) REFERENCES db_dbuser(id) + ON DELETE CASCADE DEFERRABLE INITIALLY DEFERRED + db_dbauthinfo_dbcomputer_id_fkey: FOREIGN KEY (dbcomputer_id) REFERENCES db_dbcomputer(id) + ON DELETE CASCADE DEFERRABLE INITIALLY DEFERRED + db_dbcomment: + db_dbcomment_dbnode_id_fkey: FOREIGN KEY (dbnode_id) REFERENCES db_dbnode(id) + ON DELETE CASCADE DEFERRABLE INITIALLY DEFERRED + db_dbcomment_user_id_fkey: FOREIGN KEY (user_id) REFERENCES db_dbuser(id) ON DELETE + CASCADE DEFERRABLE INITIALLY DEFERRED + db_dbgroup: + db_dbgroup_user_id_fkey: FOREIGN KEY (user_id) REFERENCES db_dbuser(id) ON DELETE + CASCADE DEFERRABLE INITIALLY DEFERRED + db_dbgroup_dbnodes: + db_dbgroup_dbnodes_dbgroup_id_fkey: FOREIGN KEY (dbgroup_id) REFERENCES db_dbgroup(id) + DEFERRABLE INITIALLY DEFERRED + db_dbgroup_dbnodes_dbnode_id_fkey: FOREIGN KEY (dbnode_id) REFERENCES db_dbnode(id) + DEFERRABLE INITIALLY DEFERRED + db_dblink: + db_dblink_input_id_fkey: FOREIGN KEY (input_id) REFERENCES db_dbnode(id) DEFERRABLE + INITIALLY DEFERRED + db_dblink_output_id_fkey: FOREIGN KEY (output_id) REFERENCES db_dbnode(id) ON + DELETE CASCADE DEFERRABLE INITIALLY DEFERRED + db_dblog: + db_dblog_dbnode_id_fkey: FOREIGN KEY (dbnode_id) REFERENCES db_dbnode(id) ON DELETE + CASCADE DEFERRABLE INITIALLY DEFERRED + db_dbnode: + db_dbnode_dbcomputer_id_fkey: FOREIGN KEY (dbcomputer_id) REFERENCES db_dbcomputer(id) + ON DELETE RESTRICT DEFERRABLE INITIALLY DEFERRED + db_dbnode_user_id_fkey: FOREIGN KEY (user_id) REFERENCES db_dbuser(id) ON DELETE + RESTRICT DEFERRABLE INITIALLY DEFERRED +indexes: + db_dbauthinfo: + db_dbauthinfo_aiidauser_id_dbcomputer_id_key: CREATE UNIQUE INDEX db_dbauthinfo_aiidauser_id_dbcomputer_id_key + ON public.db_dbauthinfo USING btree (aiidauser_id, dbcomputer_id) + db_dbauthinfo_pkey: CREATE UNIQUE INDEX db_dbauthinfo_pkey ON public.db_dbauthinfo + USING btree (id) + db_dbcomment: + db_dbcomment_pkey: CREATE UNIQUE INDEX db_dbcomment_pkey ON public.db_dbcomment + USING btree (id) + db_dbcomment_uuid_key: CREATE UNIQUE INDEX db_dbcomment_uuid_key ON public.db_dbcomment + USING btree (uuid) + db_dbcomputer: + db_dbcomputer_name_key: CREATE UNIQUE INDEX db_dbcomputer_name_key ON public.db_dbcomputer + USING btree (name) + db_dbcomputer_pkey: CREATE UNIQUE INDEX db_dbcomputer_pkey ON public.db_dbcomputer + USING btree (id) + db_dbcomputer_uuid_key: CREATE UNIQUE INDEX db_dbcomputer_uuid_key ON public.db_dbcomputer + USING btree (uuid) + db_dbgroup: + db_dbgroup_label_type_string_key: CREATE UNIQUE INDEX db_dbgroup_label_type_string_key + ON public.db_dbgroup USING btree (label, type_string) + db_dbgroup_pkey: CREATE UNIQUE INDEX db_dbgroup_pkey ON public.db_dbgroup USING + btree (id) + db_dbgroup_uuid_key: CREATE UNIQUE INDEX db_dbgroup_uuid_key ON public.db_dbgroup + USING btree (uuid) + ix_db_dbgroup_label: CREATE INDEX ix_db_dbgroup_label ON public.db_dbgroup USING + btree (label) + ix_db_dbgroup_type_string: CREATE INDEX ix_db_dbgroup_type_string ON public.db_dbgroup + USING btree (type_string) + db_dbgroup_dbnodes: + db_dbgroup_dbnodes_dbgroup_id_dbnode_id_key: CREATE UNIQUE INDEX db_dbgroup_dbnodes_dbgroup_id_dbnode_id_key + ON public.db_dbgroup_dbnodes USING btree (dbgroup_id, dbnode_id) + db_dbgroup_dbnodes_dbgroup_id_idx: CREATE INDEX db_dbgroup_dbnodes_dbgroup_id_idx + ON public.db_dbgroup_dbnodes USING btree (dbgroup_id) + db_dbgroup_dbnodes_dbnode_id_idx: CREATE INDEX db_dbgroup_dbnodes_dbnode_id_idx + ON public.db_dbgroup_dbnodes USING btree (dbnode_id) + db_dbgroup_dbnodes_pkey: CREATE UNIQUE INDEX db_dbgroup_dbnodes_pkey ON public.db_dbgroup_dbnodes + USING btree (id) + db_dblink: + db_dblink_pkey: CREATE UNIQUE INDEX db_dblink_pkey ON public.db_dblink USING btree + (id) + ix_db_dblink_input_id: CREATE INDEX ix_db_dblink_input_id ON public.db_dblink + USING btree (input_id) + ix_db_dblink_label: CREATE INDEX ix_db_dblink_label ON public.db_dblink USING + btree (label) + ix_db_dblink_output_id: CREATE INDEX ix_db_dblink_output_id ON public.db_dblink + USING btree (output_id) + ix_db_dblink_type: CREATE INDEX ix_db_dblink_type ON public.db_dblink USING btree + (type) + db_dblog: + db_dblog_pkey: CREATE UNIQUE INDEX db_dblog_pkey ON public.db_dblog USING btree + (id) + db_dblog_uuid_key: CREATE UNIQUE INDEX db_dblog_uuid_key ON public.db_dblog USING + btree (uuid) + ix_db_dblog_levelname: CREATE INDEX ix_db_dblog_levelname ON public.db_dblog USING + btree (levelname) + ix_db_dblog_loggername: CREATE INDEX ix_db_dblog_loggername ON public.db_dblog + USING btree (loggername) + db_dbnode: + db_dbnode_pkey: CREATE UNIQUE INDEX db_dbnode_pkey ON public.db_dbnode USING btree + (id) + db_dbnode_uuid_key: CREATE UNIQUE INDEX db_dbnode_uuid_key ON public.db_dbnode + USING btree (uuid) + ix_db_dbnode_label: CREATE INDEX ix_db_dbnode_label ON public.db_dbnode USING + btree (label) + ix_db_dbnode_node_type: CREATE INDEX ix_db_dbnode_node_type ON public.db_dbnode + USING btree (node_type) + ix_db_dbnode_process_type: CREATE INDEX ix_db_dbnode_process_type ON public.db_dbnode + USING btree (process_type) + db_dbsetting: + db_dbsetting_key_key: CREATE UNIQUE INDEX db_dbsetting_key_key ON public.db_dbsetting + USING btree (key) + db_dbsetting_pkey: CREATE UNIQUE INDEX db_dbsetting_pkey ON public.db_dbsetting + USING btree (id) + ix_db_dbsetting_key: CREATE INDEX ix_db_dbsetting_key ON public.db_dbsetting USING + btree (key) + db_dbuser: + db_dbuser_pkey: CREATE UNIQUE INDEX db_dbuser_pkey ON public.db_dbuser USING btree + (id) + ix_db_dbuser_email: CREATE UNIQUE INDEX ix_db_dbuser_email ON public.db_dbuser + USING btree (email) diff --git a/tests/backends/aiida_sqlalchemy/migrations/test_all_schema/test_sqla_34_de2eaf6978b4_.yml b/tests/backends/aiida_sqlalchemy/migrations/test_all_schema/test_sqla_34_de2eaf6978b4_.yml new file mode 100644 index 0000000000..9bdac4d994 --- /dev/null +++ b/tests/backends/aiida_sqlalchemy/migrations/test_all_schema/test_sqla_34_de2eaf6978b4_.yml @@ -0,0 +1,475 @@ +columns: + db_dbauthinfo: + aiidauser_id: + data_type: integer + default: null + is_nullable: true + auth_params: + data_type: jsonb + default: null + is_nullable: true + dbcomputer_id: + data_type: integer + default: null + is_nullable: true + enabled: + data_type: boolean + default: null + is_nullable: true + id: + data_type: integer + default: nextval('db_dbauthinfo_id_seq'::regclass) + is_nullable: false + metadata: + data_type: jsonb + default: null + is_nullable: true + db_dbcomment: + content: + data_type: text + default: null + is_nullable: true + ctime: + data_type: timestamp with time zone + default: null + is_nullable: true + dbnode_id: + data_type: integer + default: null + is_nullable: true + id: + data_type: integer + default: nextval('db_dbcomment_id_seq'::regclass) + is_nullable: false + mtime: + data_type: timestamp with time zone + default: null + is_nullable: true + user_id: + data_type: integer + default: null + is_nullable: true + uuid: + data_type: uuid + default: null + is_nullable: true + db_dbcomputer: + description: + data_type: text + default: null + is_nullable: true + hostname: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + id: + data_type: integer + default: nextval('db_dbcomputer_id_seq'::regclass) + is_nullable: false + metadata: + data_type: jsonb + default: null + is_nullable: true + name: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + scheduler_type: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + transport_params: + data_type: jsonb + default: null + is_nullable: true + transport_type: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + uuid: + data_type: uuid + default: null + is_nullable: true + db_dbgroup: + description: + data_type: text + default: null + is_nullable: true + id: + data_type: integer + default: nextval('db_dbgroup_id_seq'::regclass) + is_nullable: false + label: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + time: + data_type: timestamp with time zone + default: null + is_nullable: true + type_string: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + user_id: + data_type: integer + default: null + is_nullable: true + uuid: + data_type: uuid + default: null + is_nullable: true + db_dbgroup_dbnodes: + dbgroup_id: + data_type: integer + default: null + is_nullable: true + dbnode_id: + data_type: integer + default: null + is_nullable: true + id: + data_type: integer + default: nextval('db_dbgroup_dbnodes_id_seq'::regclass) + is_nullable: false + db_dblink: + id: + data_type: integer + default: nextval('db_dblink_id_seq'::regclass) + is_nullable: false + input_id: + data_type: integer + default: null + is_nullable: true + label: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + output_id: + data_type: integer + default: null + is_nullable: true + type: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + db_dblog: + dbnode_id: + data_type: integer + default: null + is_nullable: false + id: + data_type: integer + default: nextval('db_dblog_id_seq'::regclass) + is_nullable: false + levelname: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + loggername: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + message: + data_type: text + default: null + is_nullable: true + metadata: + data_type: jsonb + default: null + is_nullable: true + time: + data_type: timestamp with time zone + default: null + is_nullable: true + uuid: + data_type: uuid + default: null + is_nullable: true + db_dbnode: + attributes: + data_type: jsonb + default: null + is_nullable: true + ctime: + data_type: timestamp with time zone + default: null + is_nullable: true + dbcomputer_id: + data_type: integer + default: null + is_nullable: true + description: + data_type: text + default: null + is_nullable: true + extras: + data_type: jsonb + default: null + is_nullable: true + id: + data_type: integer + default: nextval('db_dbnode_id_seq'::regclass) + is_nullable: false + label: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + mtime: + data_type: timestamp with time zone + default: null + is_nullable: true + node_type: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + process_type: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + user_id: + data_type: integer + default: null + is_nullable: false + uuid: + data_type: uuid + default: null + is_nullable: true + db_dbsetting: + description: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + id: + data_type: integer + default: nextval('db_dbsetting_id_seq'::regclass) + is_nullable: false + key: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + time: + data_type: timestamp with time zone + default: null + is_nullable: true + val: + data_type: jsonb + default: null + is_nullable: true + db_dbuser: + email: + data_type: character varying + default: null + is_nullable: true + max_length: 254 + first_name: + data_type: character varying + default: null + is_nullable: true + max_length: 254 + id: + data_type: integer + default: nextval('db_dbuser_id_seq'::regclass) + is_nullable: false + institution: + data_type: character varying + default: null + is_nullable: true + max_length: 254 + last_name: + data_type: character varying + default: null + is_nullable: true + max_length: 254 +constraints: + primary_key: + db_dbauthinfo: + db_dbauthinfo_pkey: + - id + db_dbcomment: + db_dbcomment_pkey: + - id + db_dbcomputer: + db_dbcomputer_pkey: + - id + db_dbgroup: + db_dbgroup_pkey: + - id + db_dbgroup_dbnodes: + db_dbgroup_dbnodes_pkey: + - id + db_dblink: + db_dblink_pkey: + - id + db_dblog: + db_dblog_pkey: + - id + db_dbnode: + db_dbnode_pkey: + - id + db_dbsetting: + db_dbsetting_pkey: + - id + db_dbuser: + db_dbuser_pkey: + - id + unique: + db_dbauthinfo: + db_dbauthinfo_aiidauser_id_dbcomputer_id_key: + - aiidauser_id + - dbcomputer_id + db_dbcomment: + db_dbcomment_uuid_key: + - uuid + db_dbcomputer: + db_dbcomputer_name_key: + - name + db_dbcomputer_uuid_key: + - uuid + db_dbgroup: + db_dbgroup_label_type_string_key: + - label + - type_string + db_dbgroup_uuid_key: + - uuid + db_dbgroup_dbnodes: + db_dbgroup_dbnodes_dbgroup_id_dbnode_id_key: + - dbgroup_id + - dbnode_id + db_dblog: + db_dblog_uuid_key: + - uuid + db_dbnode: + db_dbnode_uuid_key: + - uuid + db_dbsetting: + db_dbsetting_key_key: + - key +foreign_keys: + db_dbauthinfo: + db_dbauthinfo_aiidauser_id_fkey: FOREIGN KEY (aiidauser_id) REFERENCES db_dbuser(id) + ON DELETE CASCADE DEFERRABLE INITIALLY DEFERRED + db_dbauthinfo_dbcomputer_id_fkey: FOREIGN KEY (dbcomputer_id) REFERENCES db_dbcomputer(id) + ON DELETE CASCADE DEFERRABLE INITIALLY DEFERRED + db_dbcomment: + db_dbcomment_dbnode_id_fkey: FOREIGN KEY (dbnode_id) REFERENCES db_dbnode(id) + ON DELETE CASCADE DEFERRABLE INITIALLY DEFERRED + db_dbcomment_user_id_fkey: FOREIGN KEY (user_id) REFERENCES db_dbuser(id) ON DELETE + CASCADE DEFERRABLE INITIALLY DEFERRED + db_dbgroup: + db_dbgroup_user_id_fkey: FOREIGN KEY (user_id) REFERENCES db_dbuser(id) ON DELETE + CASCADE DEFERRABLE INITIALLY DEFERRED + db_dbgroup_dbnodes: + db_dbgroup_dbnodes_dbgroup_id_fkey: FOREIGN KEY (dbgroup_id) REFERENCES db_dbgroup(id) + DEFERRABLE INITIALLY DEFERRED + db_dbgroup_dbnodes_dbnode_id_fkey: FOREIGN KEY (dbnode_id) REFERENCES db_dbnode(id) + DEFERRABLE INITIALLY DEFERRED + db_dblink: + db_dblink_input_id_fkey: FOREIGN KEY (input_id) REFERENCES db_dbnode(id) DEFERRABLE + INITIALLY DEFERRED + db_dblink_output_id_fkey: FOREIGN KEY (output_id) REFERENCES db_dbnode(id) ON + DELETE CASCADE DEFERRABLE INITIALLY DEFERRED + db_dblog: + db_dblog_dbnode_id_fkey: FOREIGN KEY (dbnode_id) REFERENCES db_dbnode(id) ON DELETE + CASCADE DEFERRABLE INITIALLY DEFERRED + db_dbnode: + db_dbnode_dbcomputer_id_fkey: FOREIGN KEY (dbcomputer_id) REFERENCES db_dbcomputer(id) + ON DELETE RESTRICT DEFERRABLE INITIALLY DEFERRED + db_dbnode_user_id_fkey: FOREIGN KEY (user_id) REFERENCES db_dbuser(id) ON DELETE + RESTRICT DEFERRABLE INITIALLY DEFERRED +indexes: + db_dbauthinfo: + db_dbauthinfo_aiidauser_id_dbcomputer_id_key: CREATE UNIQUE INDEX db_dbauthinfo_aiidauser_id_dbcomputer_id_key + ON public.db_dbauthinfo USING btree (aiidauser_id, dbcomputer_id) + db_dbauthinfo_pkey: CREATE UNIQUE INDEX db_dbauthinfo_pkey ON public.db_dbauthinfo + USING btree (id) + db_dbcomment: + db_dbcomment_pkey: CREATE UNIQUE INDEX db_dbcomment_pkey ON public.db_dbcomment + USING btree (id) + db_dbcomment_uuid_key: CREATE UNIQUE INDEX db_dbcomment_uuid_key ON public.db_dbcomment + USING btree (uuid) + db_dbcomputer: + db_dbcomputer_name_key: CREATE UNIQUE INDEX db_dbcomputer_name_key ON public.db_dbcomputer + USING btree (name) + db_dbcomputer_pkey: CREATE UNIQUE INDEX db_dbcomputer_pkey ON public.db_dbcomputer + USING btree (id) + db_dbcomputer_uuid_key: CREATE UNIQUE INDEX db_dbcomputer_uuid_key ON public.db_dbcomputer + USING btree (uuid) + db_dbgroup: + db_dbgroup_label_type_string_key: CREATE UNIQUE INDEX db_dbgroup_label_type_string_key + ON public.db_dbgroup USING btree (label, type_string) + db_dbgroup_pkey: CREATE UNIQUE INDEX db_dbgroup_pkey ON public.db_dbgroup USING + btree (id) + db_dbgroup_uuid_key: CREATE UNIQUE INDEX db_dbgroup_uuid_key ON public.db_dbgroup + USING btree (uuid) + ix_db_dbgroup_label: CREATE INDEX ix_db_dbgroup_label ON public.db_dbgroup USING + btree (label) + ix_db_dbgroup_type_string: CREATE INDEX ix_db_dbgroup_type_string ON public.db_dbgroup + USING btree (type_string) + db_dbgroup_dbnodes: + db_dbgroup_dbnodes_dbgroup_id_dbnode_id_key: CREATE UNIQUE INDEX db_dbgroup_dbnodes_dbgroup_id_dbnode_id_key + ON public.db_dbgroup_dbnodes USING btree (dbgroup_id, dbnode_id) + db_dbgroup_dbnodes_dbgroup_id_idx: CREATE INDEX db_dbgroup_dbnodes_dbgroup_id_idx + ON public.db_dbgroup_dbnodes USING btree (dbgroup_id) + db_dbgroup_dbnodes_dbnode_id_idx: CREATE INDEX db_dbgroup_dbnodes_dbnode_id_idx + ON public.db_dbgroup_dbnodes USING btree (dbnode_id) + db_dbgroup_dbnodes_pkey: CREATE UNIQUE INDEX db_dbgroup_dbnodes_pkey ON public.db_dbgroup_dbnodes + USING btree (id) + db_dblink: + db_dblink_pkey: CREATE UNIQUE INDEX db_dblink_pkey ON public.db_dblink USING btree + (id) + ix_db_dblink_input_id: CREATE INDEX ix_db_dblink_input_id ON public.db_dblink + USING btree (input_id) + ix_db_dblink_label: CREATE INDEX ix_db_dblink_label ON public.db_dblink USING + btree (label) + ix_db_dblink_output_id: CREATE INDEX ix_db_dblink_output_id ON public.db_dblink + USING btree (output_id) + ix_db_dblink_type: CREATE INDEX ix_db_dblink_type ON public.db_dblink USING btree + (type) + db_dblog: + db_dblog_pkey: CREATE UNIQUE INDEX db_dblog_pkey ON public.db_dblog USING btree + (id) + db_dblog_uuid_key: CREATE UNIQUE INDEX db_dblog_uuid_key ON public.db_dblog USING + btree (uuid) + ix_db_dblog_levelname: CREATE INDEX ix_db_dblog_levelname ON public.db_dblog USING + btree (levelname) + ix_db_dblog_loggername: CREATE INDEX ix_db_dblog_loggername ON public.db_dblog + USING btree (loggername) + db_dbnode: + db_dbnode_pkey: CREATE UNIQUE INDEX db_dbnode_pkey ON public.db_dbnode USING btree + (id) + db_dbnode_uuid_key: CREATE UNIQUE INDEX db_dbnode_uuid_key ON public.db_dbnode + USING btree (uuid) + ix_db_dbnode_label: CREATE INDEX ix_db_dbnode_label ON public.db_dbnode USING + btree (label) + ix_db_dbnode_node_type: CREATE INDEX ix_db_dbnode_node_type ON public.db_dbnode + USING btree (node_type) + ix_db_dbnode_process_type: CREATE INDEX ix_db_dbnode_process_type ON public.db_dbnode + USING btree (process_type) + db_dbsetting: + db_dbsetting_key_key: CREATE UNIQUE INDEX db_dbsetting_key_key ON public.db_dbsetting + USING btree (key) + db_dbsetting_pkey: CREATE UNIQUE INDEX db_dbsetting_pkey ON public.db_dbsetting + USING btree (id) + ix_db_dbsetting_key: CREATE INDEX ix_db_dbsetting_key ON public.db_dbsetting USING + btree (key) + db_dbuser: + db_dbuser_pkey: CREATE UNIQUE INDEX db_dbuser_pkey ON public.db_dbuser USING btree + (id) + ix_db_dbuser_email: CREATE UNIQUE INDEX ix_db_dbuser_email ON public.db_dbuser + USING btree (email) diff --git a/tests/backends/aiida_sqlalchemy/migrations/test_all_schema/test_sqla_35_07fac78e6209_.yml b/tests/backends/aiida_sqlalchemy/migrations/test_all_schema/test_sqla_35_07fac78e6209_.yml new file mode 100644 index 0000000000..53c6088492 --- /dev/null +++ b/tests/backends/aiida_sqlalchemy/migrations/test_all_schema/test_sqla_35_07fac78e6209_.yml @@ -0,0 +1,471 @@ +columns: + db_dbauthinfo: + aiidauser_id: + data_type: integer + default: null + is_nullable: true + auth_params: + data_type: jsonb + default: null + is_nullable: true + dbcomputer_id: + data_type: integer + default: null + is_nullable: true + enabled: + data_type: boolean + default: null + is_nullable: true + id: + data_type: integer + default: nextval('db_dbauthinfo_id_seq'::regclass) + is_nullable: false + metadata: + data_type: jsonb + default: null + is_nullable: true + db_dbcomment: + content: + data_type: text + default: null + is_nullable: true + ctime: + data_type: timestamp with time zone + default: null + is_nullable: true + dbnode_id: + data_type: integer + default: null + is_nullable: true + id: + data_type: integer + default: nextval('db_dbcomment_id_seq'::regclass) + is_nullable: false + mtime: + data_type: timestamp with time zone + default: null + is_nullable: true + user_id: + data_type: integer + default: null + is_nullable: true + uuid: + data_type: uuid + default: null + is_nullable: true + db_dbcomputer: + description: + data_type: text + default: null + is_nullable: true + hostname: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + id: + data_type: integer + default: nextval('db_dbcomputer_id_seq'::regclass) + is_nullable: false + metadata: + data_type: jsonb + default: null + is_nullable: true + name: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + scheduler_type: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + transport_type: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + uuid: + data_type: uuid + default: null + is_nullable: true + db_dbgroup: + description: + data_type: text + default: null + is_nullable: true + id: + data_type: integer + default: nextval('db_dbgroup_id_seq'::regclass) + is_nullable: false + label: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + time: + data_type: timestamp with time zone + default: null + is_nullable: true + type_string: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + user_id: + data_type: integer + default: null + is_nullable: true + uuid: + data_type: uuid + default: null + is_nullable: true + db_dbgroup_dbnodes: + dbgroup_id: + data_type: integer + default: null + is_nullable: true + dbnode_id: + data_type: integer + default: null + is_nullable: true + id: + data_type: integer + default: nextval('db_dbgroup_dbnodes_id_seq'::regclass) + is_nullable: false + db_dblink: + id: + data_type: integer + default: nextval('db_dblink_id_seq'::regclass) + is_nullable: false + input_id: + data_type: integer + default: null + is_nullable: true + label: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + output_id: + data_type: integer + default: null + is_nullable: true + type: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + db_dblog: + dbnode_id: + data_type: integer + default: null + is_nullable: false + id: + data_type: integer + default: nextval('db_dblog_id_seq'::regclass) + is_nullable: false + levelname: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + loggername: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + message: + data_type: text + default: null + is_nullable: true + metadata: + data_type: jsonb + default: null + is_nullable: true + time: + data_type: timestamp with time zone + default: null + is_nullable: true + uuid: + data_type: uuid + default: null + is_nullable: true + db_dbnode: + attributes: + data_type: jsonb + default: null + is_nullable: true + ctime: + data_type: timestamp with time zone + default: null + is_nullable: true + dbcomputer_id: + data_type: integer + default: null + is_nullable: true + description: + data_type: text + default: null + is_nullable: true + extras: + data_type: jsonb + default: null + is_nullable: true + id: + data_type: integer + default: nextval('db_dbnode_id_seq'::regclass) + is_nullable: false + label: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + mtime: + data_type: timestamp with time zone + default: null + is_nullable: true + node_type: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + process_type: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + user_id: + data_type: integer + default: null + is_nullable: false + uuid: + data_type: uuid + default: null + is_nullable: true + db_dbsetting: + description: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + id: + data_type: integer + default: nextval('db_dbsetting_id_seq'::regclass) + is_nullable: false + key: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + time: + data_type: timestamp with time zone + default: null + is_nullable: true + val: + data_type: jsonb + default: null + is_nullable: true + db_dbuser: + email: + data_type: character varying + default: null + is_nullable: true + max_length: 254 + first_name: + data_type: character varying + default: null + is_nullable: true + max_length: 254 + id: + data_type: integer + default: nextval('db_dbuser_id_seq'::regclass) + is_nullable: false + institution: + data_type: character varying + default: null + is_nullable: true + max_length: 254 + last_name: + data_type: character varying + default: null + is_nullable: true + max_length: 254 +constraints: + primary_key: + db_dbauthinfo: + db_dbauthinfo_pkey: + - id + db_dbcomment: + db_dbcomment_pkey: + - id + db_dbcomputer: + db_dbcomputer_pkey: + - id + db_dbgroup: + db_dbgroup_pkey: + - id + db_dbgroup_dbnodes: + db_dbgroup_dbnodes_pkey: + - id + db_dblink: + db_dblink_pkey: + - id + db_dblog: + db_dblog_pkey: + - id + db_dbnode: + db_dbnode_pkey: + - id + db_dbsetting: + db_dbsetting_pkey: + - id + db_dbuser: + db_dbuser_pkey: + - id + unique: + db_dbauthinfo: + db_dbauthinfo_aiidauser_id_dbcomputer_id_key: + - aiidauser_id + - dbcomputer_id + db_dbcomment: + db_dbcomment_uuid_key: + - uuid + db_dbcomputer: + db_dbcomputer_name_key: + - name + db_dbcomputer_uuid_key: + - uuid + db_dbgroup: + db_dbgroup_label_type_string_key: + - label + - type_string + db_dbgroup_uuid_key: + - uuid + db_dbgroup_dbnodes: + db_dbgroup_dbnodes_dbgroup_id_dbnode_id_key: + - dbgroup_id + - dbnode_id + db_dblog: + db_dblog_uuid_key: + - uuid + db_dbnode: + db_dbnode_uuid_key: + - uuid + db_dbsetting: + db_dbsetting_key_key: + - key +foreign_keys: + db_dbauthinfo: + db_dbauthinfo_aiidauser_id_fkey: FOREIGN KEY (aiidauser_id) REFERENCES db_dbuser(id) + ON DELETE CASCADE DEFERRABLE INITIALLY DEFERRED + db_dbauthinfo_dbcomputer_id_fkey: FOREIGN KEY (dbcomputer_id) REFERENCES db_dbcomputer(id) + ON DELETE CASCADE DEFERRABLE INITIALLY DEFERRED + db_dbcomment: + db_dbcomment_dbnode_id_fkey: FOREIGN KEY (dbnode_id) REFERENCES db_dbnode(id) + ON DELETE CASCADE DEFERRABLE INITIALLY DEFERRED + db_dbcomment_user_id_fkey: FOREIGN KEY (user_id) REFERENCES db_dbuser(id) ON DELETE + CASCADE DEFERRABLE INITIALLY DEFERRED + db_dbgroup: + db_dbgroup_user_id_fkey: FOREIGN KEY (user_id) REFERENCES db_dbuser(id) ON DELETE + CASCADE DEFERRABLE INITIALLY DEFERRED + db_dbgroup_dbnodes: + db_dbgroup_dbnodes_dbgroup_id_fkey: FOREIGN KEY (dbgroup_id) REFERENCES db_dbgroup(id) + DEFERRABLE INITIALLY DEFERRED + db_dbgroup_dbnodes_dbnode_id_fkey: FOREIGN KEY (dbnode_id) REFERENCES db_dbnode(id) + DEFERRABLE INITIALLY DEFERRED + db_dblink: + db_dblink_input_id_fkey: FOREIGN KEY (input_id) REFERENCES db_dbnode(id) DEFERRABLE + INITIALLY DEFERRED + db_dblink_output_id_fkey: FOREIGN KEY (output_id) REFERENCES db_dbnode(id) ON + DELETE CASCADE DEFERRABLE INITIALLY DEFERRED + db_dblog: + db_dblog_dbnode_id_fkey: FOREIGN KEY (dbnode_id) REFERENCES db_dbnode(id) ON DELETE + CASCADE DEFERRABLE INITIALLY DEFERRED + db_dbnode: + db_dbnode_dbcomputer_id_fkey: FOREIGN KEY (dbcomputer_id) REFERENCES db_dbcomputer(id) + ON DELETE RESTRICT DEFERRABLE INITIALLY DEFERRED + db_dbnode_user_id_fkey: FOREIGN KEY (user_id) REFERENCES db_dbuser(id) ON DELETE + RESTRICT DEFERRABLE INITIALLY DEFERRED +indexes: + db_dbauthinfo: + db_dbauthinfo_aiidauser_id_dbcomputer_id_key: CREATE UNIQUE INDEX db_dbauthinfo_aiidauser_id_dbcomputer_id_key + ON public.db_dbauthinfo USING btree (aiidauser_id, dbcomputer_id) + db_dbauthinfo_pkey: CREATE UNIQUE INDEX db_dbauthinfo_pkey ON public.db_dbauthinfo + USING btree (id) + db_dbcomment: + db_dbcomment_pkey: CREATE UNIQUE INDEX db_dbcomment_pkey ON public.db_dbcomment + USING btree (id) + db_dbcomment_uuid_key: CREATE UNIQUE INDEX db_dbcomment_uuid_key ON public.db_dbcomment + USING btree (uuid) + db_dbcomputer: + db_dbcomputer_name_key: CREATE UNIQUE INDEX db_dbcomputer_name_key ON public.db_dbcomputer + USING btree (name) + db_dbcomputer_pkey: CREATE UNIQUE INDEX db_dbcomputer_pkey ON public.db_dbcomputer + USING btree (id) + db_dbcomputer_uuid_key: CREATE UNIQUE INDEX db_dbcomputer_uuid_key ON public.db_dbcomputer + USING btree (uuid) + db_dbgroup: + db_dbgroup_label_type_string_key: CREATE UNIQUE INDEX db_dbgroup_label_type_string_key + ON public.db_dbgroup USING btree (label, type_string) + db_dbgroup_pkey: CREATE UNIQUE INDEX db_dbgroup_pkey ON public.db_dbgroup USING + btree (id) + db_dbgroup_uuid_key: CREATE UNIQUE INDEX db_dbgroup_uuid_key ON public.db_dbgroup + USING btree (uuid) + ix_db_dbgroup_label: CREATE INDEX ix_db_dbgroup_label ON public.db_dbgroup USING + btree (label) + ix_db_dbgroup_type_string: CREATE INDEX ix_db_dbgroup_type_string ON public.db_dbgroup + USING btree (type_string) + db_dbgroup_dbnodes: + db_dbgroup_dbnodes_dbgroup_id_dbnode_id_key: CREATE UNIQUE INDEX db_dbgroup_dbnodes_dbgroup_id_dbnode_id_key + ON public.db_dbgroup_dbnodes USING btree (dbgroup_id, dbnode_id) + db_dbgroup_dbnodes_dbgroup_id_idx: CREATE INDEX db_dbgroup_dbnodes_dbgroup_id_idx + ON public.db_dbgroup_dbnodes USING btree (dbgroup_id) + db_dbgroup_dbnodes_dbnode_id_idx: CREATE INDEX db_dbgroup_dbnodes_dbnode_id_idx + ON public.db_dbgroup_dbnodes USING btree (dbnode_id) + db_dbgroup_dbnodes_pkey: CREATE UNIQUE INDEX db_dbgroup_dbnodes_pkey ON public.db_dbgroup_dbnodes + USING btree (id) + db_dblink: + db_dblink_pkey: CREATE UNIQUE INDEX db_dblink_pkey ON public.db_dblink USING btree + (id) + ix_db_dblink_input_id: CREATE INDEX ix_db_dblink_input_id ON public.db_dblink + USING btree (input_id) + ix_db_dblink_label: CREATE INDEX ix_db_dblink_label ON public.db_dblink USING + btree (label) + ix_db_dblink_output_id: CREATE INDEX ix_db_dblink_output_id ON public.db_dblink + USING btree (output_id) + ix_db_dblink_type: CREATE INDEX ix_db_dblink_type ON public.db_dblink USING btree + (type) + db_dblog: + db_dblog_pkey: CREATE UNIQUE INDEX db_dblog_pkey ON public.db_dblog USING btree + (id) + db_dblog_uuid_key: CREATE UNIQUE INDEX db_dblog_uuid_key ON public.db_dblog USING + btree (uuid) + ix_db_dblog_levelname: CREATE INDEX ix_db_dblog_levelname ON public.db_dblog USING + btree (levelname) + ix_db_dblog_loggername: CREATE INDEX ix_db_dblog_loggername ON public.db_dblog + USING btree (loggername) + db_dbnode: + db_dbnode_pkey: CREATE UNIQUE INDEX db_dbnode_pkey ON public.db_dbnode USING btree + (id) + db_dbnode_uuid_key: CREATE UNIQUE INDEX db_dbnode_uuid_key ON public.db_dbnode + USING btree (uuid) + ix_db_dbnode_label: CREATE INDEX ix_db_dbnode_label ON public.db_dbnode USING + btree (label) + ix_db_dbnode_node_type: CREATE INDEX ix_db_dbnode_node_type ON public.db_dbnode + USING btree (node_type) + ix_db_dbnode_process_type: CREATE INDEX ix_db_dbnode_process_type ON public.db_dbnode + USING btree (process_type) + db_dbsetting: + db_dbsetting_key_key: CREATE UNIQUE INDEX db_dbsetting_key_key ON public.db_dbsetting + USING btree (key) + db_dbsetting_pkey: CREATE UNIQUE INDEX db_dbsetting_pkey ON public.db_dbsetting + USING btree (id) + ix_db_dbsetting_key: CREATE INDEX ix_db_dbsetting_key ON public.db_dbsetting USING + btree (key) + db_dbuser: + db_dbuser_pkey: CREATE UNIQUE INDEX db_dbuser_pkey ON public.db_dbuser USING btree + (id) + ix_db_dbuser_email: CREATE UNIQUE INDEX ix_db_dbuser_email ON public.db_dbuser + USING btree (email) diff --git a/tests/backends/aiida_sqlalchemy/migrations/test_all_schema/test_sqla_36_26d561acd560_.yml b/tests/backends/aiida_sqlalchemy/migrations/test_all_schema/test_sqla_36_26d561acd560_.yml new file mode 100644 index 0000000000..53c6088492 --- /dev/null +++ b/tests/backends/aiida_sqlalchemy/migrations/test_all_schema/test_sqla_36_26d561acd560_.yml @@ -0,0 +1,471 @@ +columns: + db_dbauthinfo: + aiidauser_id: + data_type: integer + default: null + is_nullable: true + auth_params: + data_type: jsonb + default: null + is_nullable: true + dbcomputer_id: + data_type: integer + default: null + is_nullable: true + enabled: + data_type: boolean + default: null + is_nullable: true + id: + data_type: integer + default: nextval('db_dbauthinfo_id_seq'::regclass) + is_nullable: false + metadata: + data_type: jsonb + default: null + is_nullable: true + db_dbcomment: + content: + data_type: text + default: null + is_nullable: true + ctime: + data_type: timestamp with time zone + default: null + is_nullable: true + dbnode_id: + data_type: integer + default: null + is_nullable: true + id: + data_type: integer + default: nextval('db_dbcomment_id_seq'::regclass) + is_nullable: false + mtime: + data_type: timestamp with time zone + default: null + is_nullable: true + user_id: + data_type: integer + default: null + is_nullable: true + uuid: + data_type: uuid + default: null + is_nullable: true + db_dbcomputer: + description: + data_type: text + default: null + is_nullable: true + hostname: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + id: + data_type: integer + default: nextval('db_dbcomputer_id_seq'::regclass) + is_nullable: false + metadata: + data_type: jsonb + default: null + is_nullable: true + name: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + scheduler_type: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + transport_type: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + uuid: + data_type: uuid + default: null + is_nullable: true + db_dbgroup: + description: + data_type: text + default: null + is_nullable: true + id: + data_type: integer + default: nextval('db_dbgroup_id_seq'::regclass) + is_nullable: false + label: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + time: + data_type: timestamp with time zone + default: null + is_nullable: true + type_string: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + user_id: + data_type: integer + default: null + is_nullable: true + uuid: + data_type: uuid + default: null + is_nullable: true + db_dbgroup_dbnodes: + dbgroup_id: + data_type: integer + default: null + is_nullable: true + dbnode_id: + data_type: integer + default: null + is_nullable: true + id: + data_type: integer + default: nextval('db_dbgroup_dbnodes_id_seq'::regclass) + is_nullable: false + db_dblink: + id: + data_type: integer + default: nextval('db_dblink_id_seq'::regclass) + is_nullable: false + input_id: + data_type: integer + default: null + is_nullable: true + label: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + output_id: + data_type: integer + default: null + is_nullable: true + type: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + db_dblog: + dbnode_id: + data_type: integer + default: null + is_nullable: false + id: + data_type: integer + default: nextval('db_dblog_id_seq'::regclass) + is_nullable: false + levelname: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + loggername: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + message: + data_type: text + default: null + is_nullable: true + metadata: + data_type: jsonb + default: null + is_nullable: true + time: + data_type: timestamp with time zone + default: null + is_nullable: true + uuid: + data_type: uuid + default: null + is_nullable: true + db_dbnode: + attributes: + data_type: jsonb + default: null + is_nullable: true + ctime: + data_type: timestamp with time zone + default: null + is_nullable: true + dbcomputer_id: + data_type: integer + default: null + is_nullable: true + description: + data_type: text + default: null + is_nullable: true + extras: + data_type: jsonb + default: null + is_nullable: true + id: + data_type: integer + default: nextval('db_dbnode_id_seq'::regclass) + is_nullable: false + label: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + mtime: + data_type: timestamp with time zone + default: null + is_nullable: true + node_type: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + process_type: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + user_id: + data_type: integer + default: null + is_nullable: false + uuid: + data_type: uuid + default: null + is_nullable: true + db_dbsetting: + description: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + id: + data_type: integer + default: nextval('db_dbsetting_id_seq'::regclass) + is_nullable: false + key: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + time: + data_type: timestamp with time zone + default: null + is_nullable: true + val: + data_type: jsonb + default: null + is_nullable: true + db_dbuser: + email: + data_type: character varying + default: null + is_nullable: true + max_length: 254 + first_name: + data_type: character varying + default: null + is_nullable: true + max_length: 254 + id: + data_type: integer + default: nextval('db_dbuser_id_seq'::regclass) + is_nullable: false + institution: + data_type: character varying + default: null + is_nullable: true + max_length: 254 + last_name: + data_type: character varying + default: null + is_nullable: true + max_length: 254 +constraints: + primary_key: + db_dbauthinfo: + db_dbauthinfo_pkey: + - id + db_dbcomment: + db_dbcomment_pkey: + - id + db_dbcomputer: + db_dbcomputer_pkey: + - id + db_dbgroup: + db_dbgroup_pkey: + - id + db_dbgroup_dbnodes: + db_dbgroup_dbnodes_pkey: + - id + db_dblink: + db_dblink_pkey: + - id + db_dblog: + db_dblog_pkey: + - id + db_dbnode: + db_dbnode_pkey: + - id + db_dbsetting: + db_dbsetting_pkey: + - id + db_dbuser: + db_dbuser_pkey: + - id + unique: + db_dbauthinfo: + db_dbauthinfo_aiidauser_id_dbcomputer_id_key: + - aiidauser_id + - dbcomputer_id + db_dbcomment: + db_dbcomment_uuid_key: + - uuid + db_dbcomputer: + db_dbcomputer_name_key: + - name + db_dbcomputer_uuid_key: + - uuid + db_dbgroup: + db_dbgroup_label_type_string_key: + - label + - type_string + db_dbgroup_uuid_key: + - uuid + db_dbgroup_dbnodes: + db_dbgroup_dbnodes_dbgroup_id_dbnode_id_key: + - dbgroup_id + - dbnode_id + db_dblog: + db_dblog_uuid_key: + - uuid + db_dbnode: + db_dbnode_uuid_key: + - uuid + db_dbsetting: + db_dbsetting_key_key: + - key +foreign_keys: + db_dbauthinfo: + db_dbauthinfo_aiidauser_id_fkey: FOREIGN KEY (aiidauser_id) REFERENCES db_dbuser(id) + ON DELETE CASCADE DEFERRABLE INITIALLY DEFERRED + db_dbauthinfo_dbcomputer_id_fkey: FOREIGN KEY (dbcomputer_id) REFERENCES db_dbcomputer(id) + ON DELETE CASCADE DEFERRABLE INITIALLY DEFERRED + db_dbcomment: + db_dbcomment_dbnode_id_fkey: FOREIGN KEY (dbnode_id) REFERENCES db_dbnode(id) + ON DELETE CASCADE DEFERRABLE INITIALLY DEFERRED + db_dbcomment_user_id_fkey: FOREIGN KEY (user_id) REFERENCES db_dbuser(id) ON DELETE + CASCADE DEFERRABLE INITIALLY DEFERRED + db_dbgroup: + db_dbgroup_user_id_fkey: FOREIGN KEY (user_id) REFERENCES db_dbuser(id) ON DELETE + CASCADE DEFERRABLE INITIALLY DEFERRED + db_dbgroup_dbnodes: + db_dbgroup_dbnodes_dbgroup_id_fkey: FOREIGN KEY (dbgroup_id) REFERENCES db_dbgroup(id) + DEFERRABLE INITIALLY DEFERRED + db_dbgroup_dbnodes_dbnode_id_fkey: FOREIGN KEY (dbnode_id) REFERENCES db_dbnode(id) + DEFERRABLE INITIALLY DEFERRED + db_dblink: + db_dblink_input_id_fkey: FOREIGN KEY (input_id) REFERENCES db_dbnode(id) DEFERRABLE + INITIALLY DEFERRED + db_dblink_output_id_fkey: FOREIGN KEY (output_id) REFERENCES db_dbnode(id) ON + DELETE CASCADE DEFERRABLE INITIALLY DEFERRED + db_dblog: + db_dblog_dbnode_id_fkey: FOREIGN KEY (dbnode_id) REFERENCES db_dbnode(id) ON DELETE + CASCADE DEFERRABLE INITIALLY DEFERRED + db_dbnode: + db_dbnode_dbcomputer_id_fkey: FOREIGN KEY (dbcomputer_id) REFERENCES db_dbcomputer(id) + ON DELETE RESTRICT DEFERRABLE INITIALLY DEFERRED + db_dbnode_user_id_fkey: FOREIGN KEY (user_id) REFERENCES db_dbuser(id) ON DELETE + RESTRICT DEFERRABLE INITIALLY DEFERRED +indexes: + db_dbauthinfo: + db_dbauthinfo_aiidauser_id_dbcomputer_id_key: CREATE UNIQUE INDEX db_dbauthinfo_aiidauser_id_dbcomputer_id_key + ON public.db_dbauthinfo USING btree (aiidauser_id, dbcomputer_id) + db_dbauthinfo_pkey: CREATE UNIQUE INDEX db_dbauthinfo_pkey ON public.db_dbauthinfo + USING btree (id) + db_dbcomment: + db_dbcomment_pkey: CREATE UNIQUE INDEX db_dbcomment_pkey ON public.db_dbcomment + USING btree (id) + db_dbcomment_uuid_key: CREATE UNIQUE INDEX db_dbcomment_uuid_key ON public.db_dbcomment + USING btree (uuid) + db_dbcomputer: + db_dbcomputer_name_key: CREATE UNIQUE INDEX db_dbcomputer_name_key ON public.db_dbcomputer + USING btree (name) + db_dbcomputer_pkey: CREATE UNIQUE INDEX db_dbcomputer_pkey ON public.db_dbcomputer + USING btree (id) + db_dbcomputer_uuid_key: CREATE UNIQUE INDEX db_dbcomputer_uuid_key ON public.db_dbcomputer + USING btree (uuid) + db_dbgroup: + db_dbgroup_label_type_string_key: CREATE UNIQUE INDEX db_dbgroup_label_type_string_key + ON public.db_dbgroup USING btree (label, type_string) + db_dbgroup_pkey: CREATE UNIQUE INDEX db_dbgroup_pkey ON public.db_dbgroup USING + btree (id) + db_dbgroup_uuid_key: CREATE UNIQUE INDEX db_dbgroup_uuid_key ON public.db_dbgroup + USING btree (uuid) + ix_db_dbgroup_label: CREATE INDEX ix_db_dbgroup_label ON public.db_dbgroup USING + btree (label) + ix_db_dbgroup_type_string: CREATE INDEX ix_db_dbgroup_type_string ON public.db_dbgroup + USING btree (type_string) + db_dbgroup_dbnodes: + db_dbgroup_dbnodes_dbgroup_id_dbnode_id_key: CREATE UNIQUE INDEX db_dbgroup_dbnodes_dbgroup_id_dbnode_id_key + ON public.db_dbgroup_dbnodes USING btree (dbgroup_id, dbnode_id) + db_dbgroup_dbnodes_dbgroup_id_idx: CREATE INDEX db_dbgroup_dbnodes_dbgroup_id_idx + ON public.db_dbgroup_dbnodes USING btree (dbgroup_id) + db_dbgroup_dbnodes_dbnode_id_idx: CREATE INDEX db_dbgroup_dbnodes_dbnode_id_idx + ON public.db_dbgroup_dbnodes USING btree (dbnode_id) + db_dbgroup_dbnodes_pkey: CREATE UNIQUE INDEX db_dbgroup_dbnodes_pkey ON public.db_dbgroup_dbnodes + USING btree (id) + db_dblink: + db_dblink_pkey: CREATE UNIQUE INDEX db_dblink_pkey ON public.db_dblink USING btree + (id) + ix_db_dblink_input_id: CREATE INDEX ix_db_dblink_input_id ON public.db_dblink + USING btree (input_id) + ix_db_dblink_label: CREATE INDEX ix_db_dblink_label ON public.db_dblink USING + btree (label) + ix_db_dblink_output_id: CREATE INDEX ix_db_dblink_output_id ON public.db_dblink + USING btree (output_id) + ix_db_dblink_type: CREATE INDEX ix_db_dblink_type ON public.db_dblink USING btree + (type) + db_dblog: + db_dblog_pkey: CREATE UNIQUE INDEX db_dblog_pkey ON public.db_dblog USING btree + (id) + db_dblog_uuid_key: CREATE UNIQUE INDEX db_dblog_uuid_key ON public.db_dblog USING + btree (uuid) + ix_db_dblog_levelname: CREATE INDEX ix_db_dblog_levelname ON public.db_dblog USING + btree (levelname) + ix_db_dblog_loggername: CREATE INDEX ix_db_dblog_loggername ON public.db_dblog + USING btree (loggername) + db_dbnode: + db_dbnode_pkey: CREATE UNIQUE INDEX db_dbnode_pkey ON public.db_dbnode USING btree + (id) + db_dbnode_uuid_key: CREATE UNIQUE INDEX db_dbnode_uuid_key ON public.db_dbnode + USING btree (uuid) + ix_db_dbnode_label: CREATE INDEX ix_db_dbnode_label ON public.db_dbnode USING + btree (label) + ix_db_dbnode_node_type: CREATE INDEX ix_db_dbnode_node_type ON public.db_dbnode + USING btree (node_type) + ix_db_dbnode_process_type: CREATE INDEX ix_db_dbnode_process_type ON public.db_dbnode + USING btree (process_type) + db_dbsetting: + db_dbsetting_key_key: CREATE UNIQUE INDEX db_dbsetting_key_key ON public.db_dbsetting + USING btree (key) + db_dbsetting_pkey: CREATE UNIQUE INDEX db_dbsetting_pkey ON public.db_dbsetting + USING btree (id) + ix_db_dbsetting_key: CREATE INDEX ix_db_dbsetting_key ON public.db_dbsetting USING + btree (key) + db_dbuser: + db_dbuser_pkey: CREATE UNIQUE INDEX db_dbuser_pkey ON public.db_dbuser USING btree + (id) + ix_db_dbuser_email: CREATE UNIQUE INDEX ix_db_dbuser_email ON public.db_dbuser + USING btree (email) diff --git a/tests/backends/aiida_sqlalchemy/migrations/test_all_schema/test_sqla_37_e797afa09270_.yml b/tests/backends/aiida_sqlalchemy/migrations/test_all_schema/test_sqla_37_e797afa09270_.yml new file mode 100644 index 0000000000..53c6088492 --- /dev/null +++ b/tests/backends/aiida_sqlalchemy/migrations/test_all_schema/test_sqla_37_e797afa09270_.yml @@ -0,0 +1,471 @@ +columns: + db_dbauthinfo: + aiidauser_id: + data_type: integer + default: null + is_nullable: true + auth_params: + data_type: jsonb + default: null + is_nullable: true + dbcomputer_id: + data_type: integer + default: null + is_nullable: true + enabled: + data_type: boolean + default: null + is_nullable: true + id: + data_type: integer + default: nextval('db_dbauthinfo_id_seq'::regclass) + is_nullable: false + metadata: + data_type: jsonb + default: null + is_nullable: true + db_dbcomment: + content: + data_type: text + default: null + is_nullable: true + ctime: + data_type: timestamp with time zone + default: null + is_nullable: true + dbnode_id: + data_type: integer + default: null + is_nullable: true + id: + data_type: integer + default: nextval('db_dbcomment_id_seq'::regclass) + is_nullable: false + mtime: + data_type: timestamp with time zone + default: null + is_nullable: true + user_id: + data_type: integer + default: null + is_nullable: true + uuid: + data_type: uuid + default: null + is_nullable: true + db_dbcomputer: + description: + data_type: text + default: null + is_nullable: true + hostname: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + id: + data_type: integer + default: nextval('db_dbcomputer_id_seq'::regclass) + is_nullable: false + metadata: + data_type: jsonb + default: null + is_nullable: true + name: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + scheduler_type: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + transport_type: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + uuid: + data_type: uuid + default: null + is_nullable: true + db_dbgroup: + description: + data_type: text + default: null + is_nullable: true + id: + data_type: integer + default: nextval('db_dbgroup_id_seq'::regclass) + is_nullable: false + label: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + time: + data_type: timestamp with time zone + default: null + is_nullable: true + type_string: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + user_id: + data_type: integer + default: null + is_nullable: true + uuid: + data_type: uuid + default: null + is_nullable: true + db_dbgroup_dbnodes: + dbgroup_id: + data_type: integer + default: null + is_nullable: true + dbnode_id: + data_type: integer + default: null + is_nullable: true + id: + data_type: integer + default: nextval('db_dbgroup_dbnodes_id_seq'::regclass) + is_nullable: false + db_dblink: + id: + data_type: integer + default: nextval('db_dblink_id_seq'::regclass) + is_nullable: false + input_id: + data_type: integer + default: null + is_nullable: true + label: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + output_id: + data_type: integer + default: null + is_nullable: true + type: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + db_dblog: + dbnode_id: + data_type: integer + default: null + is_nullable: false + id: + data_type: integer + default: nextval('db_dblog_id_seq'::regclass) + is_nullable: false + levelname: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + loggername: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + message: + data_type: text + default: null + is_nullable: true + metadata: + data_type: jsonb + default: null + is_nullable: true + time: + data_type: timestamp with time zone + default: null + is_nullable: true + uuid: + data_type: uuid + default: null + is_nullable: true + db_dbnode: + attributes: + data_type: jsonb + default: null + is_nullable: true + ctime: + data_type: timestamp with time zone + default: null + is_nullable: true + dbcomputer_id: + data_type: integer + default: null + is_nullable: true + description: + data_type: text + default: null + is_nullable: true + extras: + data_type: jsonb + default: null + is_nullable: true + id: + data_type: integer + default: nextval('db_dbnode_id_seq'::regclass) + is_nullable: false + label: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + mtime: + data_type: timestamp with time zone + default: null + is_nullable: true + node_type: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + process_type: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + user_id: + data_type: integer + default: null + is_nullable: false + uuid: + data_type: uuid + default: null + is_nullable: true + db_dbsetting: + description: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + id: + data_type: integer + default: nextval('db_dbsetting_id_seq'::regclass) + is_nullable: false + key: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + time: + data_type: timestamp with time zone + default: null + is_nullable: true + val: + data_type: jsonb + default: null + is_nullable: true + db_dbuser: + email: + data_type: character varying + default: null + is_nullable: true + max_length: 254 + first_name: + data_type: character varying + default: null + is_nullable: true + max_length: 254 + id: + data_type: integer + default: nextval('db_dbuser_id_seq'::regclass) + is_nullable: false + institution: + data_type: character varying + default: null + is_nullable: true + max_length: 254 + last_name: + data_type: character varying + default: null + is_nullable: true + max_length: 254 +constraints: + primary_key: + db_dbauthinfo: + db_dbauthinfo_pkey: + - id + db_dbcomment: + db_dbcomment_pkey: + - id + db_dbcomputer: + db_dbcomputer_pkey: + - id + db_dbgroup: + db_dbgroup_pkey: + - id + db_dbgroup_dbnodes: + db_dbgroup_dbnodes_pkey: + - id + db_dblink: + db_dblink_pkey: + - id + db_dblog: + db_dblog_pkey: + - id + db_dbnode: + db_dbnode_pkey: + - id + db_dbsetting: + db_dbsetting_pkey: + - id + db_dbuser: + db_dbuser_pkey: + - id + unique: + db_dbauthinfo: + db_dbauthinfo_aiidauser_id_dbcomputer_id_key: + - aiidauser_id + - dbcomputer_id + db_dbcomment: + db_dbcomment_uuid_key: + - uuid + db_dbcomputer: + db_dbcomputer_name_key: + - name + db_dbcomputer_uuid_key: + - uuid + db_dbgroup: + db_dbgroup_label_type_string_key: + - label + - type_string + db_dbgroup_uuid_key: + - uuid + db_dbgroup_dbnodes: + db_dbgroup_dbnodes_dbgroup_id_dbnode_id_key: + - dbgroup_id + - dbnode_id + db_dblog: + db_dblog_uuid_key: + - uuid + db_dbnode: + db_dbnode_uuid_key: + - uuid + db_dbsetting: + db_dbsetting_key_key: + - key +foreign_keys: + db_dbauthinfo: + db_dbauthinfo_aiidauser_id_fkey: FOREIGN KEY (aiidauser_id) REFERENCES db_dbuser(id) + ON DELETE CASCADE DEFERRABLE INITIALLY DEFERRED + db_dbauthinfo_dbcomputer_id_fkey: FOREIGN KEY (dbcomputer_id) REFERENCES db_dbcomputer(id) + ON DELETE CASCADE DEFERRABLE INITIALLY DEFERRED + db_dbcomment: + db_dbcomment_dbnode_id_fkey: FOREIGN KEY (dbnode_id) REFERENCES db_dbnode(id) + ON DELETE CASCADE DEFERRABLE INITIALLY DEFERRED + db_dbcomment_user_id_fkey: FOREIGN KEY (user_id) REFERENCES db_dbuser(id) ON DELETE + CASCADE DEFERRABLE INITIALLY DEFERRED + db_dbgroup: + db_dbgroup_user_id_fkey: FOREIGN KEY (user_id) REFERENCES db_dbuser(id) ON DELETE + CASCADE DEFERRABLE INITIALLY DEFERRED + db_dbgroup_dbnodes: + db_dbgroup_dbnodes_dbgroup_id_fkey: FOREIGN KEY (dbgroup_id) REFERENCES db_dbgroup(id) + DEFERRABLE INITIALLY DEFERRED + db_dbgroup_dbnodes_dbnode_id_fkey: FOREIGN KEY (dbnode_id) REFERENCES db_dbnode(id) + DEFERRABLE INITIALLY DEFERRED + db_dblink: + db_dblink_input_id_fkey: FOREIGN KEY (input_id) REFERENCES db_dbnode(id) DEFERRABLE + INITIALLY DEFERRED + db_dblink_output_id_fkey: FOREIGN KEY (output_id) REFERENCES db_dbnode(id) ON + DELETE CASCADE DEFERRABLE INITIALLY DEFERRED + db_dblog: + db_dblog_dbnode_id_fkey: FOREIGN KEY (dbnode_id) REFERENCES db_dbnode(id) ON DELETE + CASCADE DEFERRABLE INITIALLY DEFERRED + db_dbnode: + db_dbnode_dbcomputer_id_fkey: FOREIGN KEY (dbcomputer_id) REFERENCES db_dbcomputer(id) + ON DELETE RESTRICT DEFERRABLE INITIALLY DEFERRED + db_dbnode_user_id_fkey: FOREIGN KEY (user_id) REFERENCES db_dbuser(id) ON DELETE + RESTRICT DEFERRABLE INITIALLY DEFERRED +indexes: + db_dbauthinfo: + db_dbauthinfo_aiidauser_id_dbcomputer_id_key: CREATE UNIQUE INDEX db_dbauthinfo_aiidauser_id_dbcomputer_id_key + ON public.db_dbauthinfo USING btree (aiidauser_id, dbcomputer_id) + db_dbauthinfo_pkey: CREATE UNIQUE INDEX db_dbauthinfo_pkey ON public.db_dbauthinfo + USING btree (id) + db_dbcomment: + db_dbcomment_pkey: CREATE UNIQUE INDEX db_dbcomment_pkey ON public.db_dbcomment + USING btree (id) + db_dbcomment_uuid_key: CREATE UNIQUE INDEX db_dbcomment_uuid_key ON public.db_dbcomment + USING btree (uuid) + db_dbcomputer: + db_dbcomputer_name_key: CREATE UNIQUE INDEX db_dbcomputer_name_key ON public.db_dbcomputer + USING btree (name) + db_dbcomputer_pkey: CREATE UNIQUE INDEX db_dbcomputer_pkey ON public.db_dbcomputer + USING btree (id) + db_dbcomputer_uuid_key: CREATE UNIQUE INDEX db_dbcomputer_uuid_key ON public.db_dbcomputer + USING btree (uuid) + db_dbgroup: + db_dbgroup_label_type_string_key: CREATE UNIQUE INDEX db_dbgroup_label_type_string_key + ON public.db_dbgroup USING btree (label, type_string) + db_dbgroup_pkey: CREATE UNIQUE INDEX db_dbgroup_pkey ON public.db_dbgroup USING + btree (id) + db_dbgroup_uuid_key: CREATE UNIQUE INDEX db_dbgroup_uuid_key ON public.db_dbgroup + USING btree (uuid) + ix_db_dbgroup_label: CREATE INDEX ix_db_dbgroup_label ON public.db_dbgroup USING + btree (label) + ix_db_dbgroup_type_string: CREATE INDEX ix_db_dbgroup_type_string ON public.db_dbgroup + USING btree (type_string) + db_dbgroup_dbnodes: + db_dbgroup_dbnodes_dbgroup_id_dbnode_id_key: CREATE UNIQUE INDEX db_dbgroup_dbnodes_dbgroup_id_dbnode_id_key + ON public.db_dbgroup_dbnodes USING btree (dbgroup_id, dbnode_id) + db_dbgroup_dbnodes_dbgroup_id_idx: CREATE INDEX db_dbgroup_dbnodes_dbgroup_id_idx + ON public.db_dbgroup_dbnodes USING btree (dbgroup_id) + db_dbgroup_dbnodes_dbnode_id_idx: CREATE INDEX db_dbgroup_dbnodes_dbnode_id_idx + ON public.db_dbgroup_dbnodes USING btree (dbnode_id) + db_dbgroup_dbnodes_pkey: CREATE UNIQUE INDEX db_dbgroup_dbnodes_pkey ON public.db_dbgroup_dbnodes + USING btree (id) + db_dblink: + db_dblink_pkey: CREATE UNIQUE INDEX db_dblink_pkey ON public.db_dblink USING btree + (id) + ix_db_dblink_input_id: CREATE INDEX ix_db_dblink_input_id ON public.db_dblink + USING btree (input_id) + ix_db_dblink_label: CREATE INDEX ix_db_dblink_label ON public.db_dblink USING + btree (label) + ix_db_dblink_output_id: CREATE INDEX ix_db_dblink_output_id ON public.db_dblink + USING btree (output_id) + ix_db_dblink_type: CREATE INDEX ix_db_dblink_type ON public.db_dblink USING btree + (type) + db_dblog: + db_dblog_pkey: CREATE UNIQUE INDEX db_dblog_pkey ON public.db_dblog USING btree + (id) + db_dblog_uuid_key: CREATE UNIQUE INDEX db_dblog_uuid_key ON public.db_dblog USING + btree (uuid) + ix_db_dblog_levelname: CREATE INDEX ix_db_dblog_levelname ON public.db_dblog USING + btree (levelname) + ix_db_dblog_loggername: CREATE INDEX ix_db_dblog_loggername ON public.db_dblog + USING btree (loggername) + db_dbnode: + db_dbnode_pkey: CREATE UNIQUE INDEX db_dbnode_pkey ON public.db_dbnode USING btree + (id) + db_dbnode_uuid_key: CREATE UNIQUE INDEX db_dbnode_uuid_key ON public.db_dbnode + USING btree (uuid) + ix_db_dbnode_label: CREATE INDEX ix_db_dbnode_label ON public.db_dbnode USING + btree (label) + ix_db_dbnode_node_type: CREATE INDEX ix_db_dbnode_node_type ON public.db_dbnode + USING btree (node_type) + ix_db_dbnode_process_type: CREATE INDEX ix_db_dbnode_process_type ON public.db_dbnode + USING btree (process_type) + db_dbsetting: + db_dbsetting_key_key: CREATE UNIQUE INDEX db_dbsetting_key_key ON public.db_dbsetting + USING btree (key) + db_dbsetting_pkey: CREATE UNIQUE INDEX db_dbsetting_pkey ON public.db_dbsetting + USING btree (id) + ix_db_dbsetting_key: CREATE INDEX ix_db_dbsetting_key ON public.db_dbsetting USING + btree (key) + db_dbuser: + db_dbuser_pkey: CREATE UNIQUE INDEX db_dbuser_pkey ON public.db_dbuser USING btree + (id) + ix_db_dbuser_email: CREATE UNIQUE INDEX ix_db_dbuser_email ON public.db_dbuser + USING btree (email) diff --git a/tests/backends/aiida_sqlalchemy/migrations/test_all_schema/test_sqla_38_e734dd5e50d7_.yml b/tests/backends/aiida_sqlalchemy/migrations/test_all_schema/test_sqla_38_e734dd5e50d7_.yml new file mode 100644 index 0000000000..53c6088492 --- /dev/null +++ b/tests/backends/aiida_sqlalchemy/migrations/test_all_schema/test_sqla_38_e734dd5e50d7_.yml @@ -0,0 +1,471 @@ +columns: + db_dbauthinfo: + aiidauser_id: + data_type: integer + default: null + is_nullable: true + auth_params: + data_type: jsonb + default: null + is_nullable: true + dbcomputer_id: + data_type: integer + default: null + is_nullable: true + enabled: + data_type: boolean + default: null + is_nullable: true + id: + data_type: integer + default: nextval('db_dbauthinfo_id_seq'::regclass) + is_nullable: false + metadata: + data_type: jsonb + default: null + is_nullable: true + db_dbcomment: + content: + data_type: text + default: null + is_nullable: true + ctime: + data_type: timestamp with time zone + default: null + is_nullable: true + dbnode_id: + data_type: integer + default: null + is_nullable: true + id: + data_type: integer + default: nextval('db_dbcomment_id_seq'::regclass) + is_nullable: false + mtime: + data_type: timestamp with time zone + default: null + is_nullable: true + user_id: + data_type: integer + default: null + is_nullable: true + uuid: + data_type: uuid + default: null + is_nullable: true + db_dbcomputer: + description: + data_type: text + default: null + is_nullable: true + hostname: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + id: + data_type: integer + default: nextval('db_dbcomputer_id_seq'::regclass) + is_nullable: false + metadata: + data_type: jsonb + default: null + is_nullable: true + name: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + scheduler_type: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + transport_type: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + uuid: + data_type: uuid + default: null + is_nullable: true + db_dbgroup: + description: + data_type: text + default: null + is_nullable: true + id: + data_type: integer + default: nextval('db_dbgroup_id_seq'::regclass) + is_nullable: false + label: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + time: + data_type: timestamp with time zone + default: null + is_nullable: true + type_string: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + user_id: + data_type: integer + default: null + is_nullable: true + uuid: + data_type: uuid + default: null + is_nullable: true + db_dbgroup_dbnodes: + dbgroup_id: + data_type: integer + default: null + is_nullable: true + dbnode_id: + data_type: integer + default: null + is_nullable: true + id: + data_type: integer + default: nextval('db_dbgroup_dbnodes_id_seq'::regclass) + is_nullable: false + db_dblink: + id: + data_type: integer + default: nextval('db_dblink_id_seq'::regclass) + is_nullable: false + input_id: + data_type: integer + default: null + is_nullable: true + label: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + output_id: + data_type: integer + default: null + is_nullable: true + type: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + db_dblog: + dbnode_id: + data_type: integer + default: null + is_nullable: false + id: + data_type: integer + default: nextval('db_dblog_id_seq'::regclass) + is_nullable: false + levelname: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + loggername: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + message: + data_type: text + default: null + is_nullable: true + metadata: + data_type: jsonb + default: null + is_nullable: true + time: + data_type: timestamp with time zone + default: null + is_nullable: true + uuid: + data_type: uuid + default: null + is_nullable: true + db_dbnode: + attributes: + data_type: jsonb + default: null + is_nullable: true + ctime: + data_type: timestamp with time zone + default: null + is_nullable: true + dbcomputer_id: + data_type: integer + default: null + is_nullable: true + description: + data_type: text + default: null + is_nullable: true + extras: + data_type: jsonb + default: null + is_nullable: true + id: + data_type: integer + default: nextval('db_dbnode_id_seq'::regclass) + is_nullable: false + label: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + mtime: + data_type: timestamp with time zone + default: null + is_nullable: true + node_type: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + process_type: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + user_id: + data_type: integer + default: null + is_nullable: false + uuid: + data_type: uuid + default: null + is_nullable: true + db_dbsetting: + description: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + id: + data_type: integer + default: nextval('db_dbsetting_id_seq'::regclass) + is_nullable: false + key: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + time: + data_type: timestamp with time zone + default: null + is_nullable: true + val: + data_type: jsonb + default: null + is_nullable: true + db_dbuser: + email: + data_type: character varying + default: null + is_nullable: true + max_length: 254 + first_name: + data_type: character varying + default: null + is_nullable: true + max_length: 254 + id: + data_type: integer + default: nextval('db_dbuser_id_seq'::regclass) + is_nullable: false + institution: + data_type: character varying + default: null + is_nullable: true + max_length: 254 + last_name: + data_type: character varying + default: null + is_nullable: true + max_length: 254 +constraints: + primary_key: + db_dbauthinfo: + db_dbauthinfo_pkey: + - id + db_dbcomment: + db_dbcomment_pkey: + - id + db_dbcomputer: + db_dbcomputer_pkey: + - id + db_dbgroup: + db_dbgroup_pkey: + - id + db_dbgroup_dbnodes: + db_dbgroup_dbnodes_pkey: + - id + db_dblink: + db_dblink_pkey: + - id + db_dblog: + db_dblog_pkey: + - id + db_dbnode: + db_dbnode_pkey: + - id + db_dbsetting: + db_dbsetting_pkey: + - id + db_dbuser: + db_dbuser_pkey: + - id + unique: + db_dbauthinfo: + db_dbauthinfo_aiidauser_id_dbcomputer_id_key: + - aiidauser_id + - dbcomputer_id + db_dbcomment: + db_dbcomment_uuid_key: + - uuid + db_dbcomputer: + db_dbcomputer_name_key: + - name + db_dbcomputer_uuid_key: + - uuid + db_dbgroup: + db_dbgroup_label_type_string_key: + - label + - type_string + db_dbgroup_uuid_key: + - uuid + db_dbgroup_dbnodes: + db_dbgroup_dbnodes_dbgroup_id_dbnode_id_key: + - dbgroup_id + - dbnode_id + db_dblog: + db_dblog_uuid_key: + - uuid + db_dbnode: + db_dbnode_uuid_key: + - uuid + db_dbsetting: + db_dbsetting_key_key: + - key +foreign_keys: + db_dbauthinfo: + db_dbauthinfo_aiidauser_id_fkey: FOREIGN KEY (aiidauser_id) REFERENCES db_dbuser(id) + ON DELETE CASCADE DEFERRABLE INITIALLY DEFERRED + db_dbauthinfo_dbcomputer_id_fkey: FOREIGN KEY (dbcomputer_id) REFERENCES db_dbcomputer(id) + ON DELETE CASCADE DEFERRABLE INITIALLY DEFERRED + db_dbcomment: + db_dbcomment_dbnode_id_fkey: FOREIGN KEY (dbnode_id) REFERENCES db_dbnode(id) + ON DELETE CASCADE DEFERRABLE INITIALLY DEFERRED + db_dbcomment_user_id_fkey: FOREIGN KEY (user_id) REFERENCES db_dbuser(id) ON DELETE + CASCADE DEFERRABLE INITIALLY DEFERRED + db_dbgroup: + db_dbgroup_user_id_fkey: FOREIGN KEY (user_id) REFERENCES db_dbuser(id) ON DELETE + CASCADE DEFERRABLE INITIALLY DEFERRED + db_dbgroup_dbnodes: + db_dbgroup_dbnodes_dbgroup_id_fkey: FOREIGN KEY (dbgroup_id) REFERENCES db_dbgroup(id) + DEFERRABLE INITIALLY DEFERRED + db_dbgroup_dbnodes_dbnode_id_fkey: FOREIGN KEY (dbnode_id) REFERENCES db_dbnode(id) + DEFERRABLE INITIALLY DEFERRED + db_dblink: + db_dblink_input_id_fkey: FOREIGN KEY (input_id) REFERENCES db_dbnode(id) DEFERRABLE + INITIALLY DEFERRED + db_dblink_output_id_fkey: FOREIGN KEY (output_id) REFERENCES db_dbnode(id) ON + DELETE CASCADE DEFERRABLE INITIALLY DEFERRED + db_dblog: + db_dblog_dbnode_id_fkey: FOREIGN KEY (dbnode_id) REFERENCES db_dbnode(id) ON DELETE + CASCADE DEFERRABLE INITIALLY DEFERRED + db_dbnode: + db_dbnode_dbcomputer_id_fkey: FOREIGN KEY (dbcomputer_id) REFERENCES db_dbcomputer(id) + ON DELETE RESTRICT DEFERRABLE INITIALLY DEFERRED + db_dbnode_user_id_fkey: FOREIGN KEY (user_id) REFERENCES db_dbuser(id) ON DELETE + RESTRICT DEFERRABLE INITIALLY DEFERRED +indexes: + db_dbauthinfo: + db_dbauthinfo_aiidauser_id_dbcomputer_id_key: CREATE UNIQUE INDEX db_dbauthinfo_aiidauser_id_dbcomputer_id_key + ON public.db_dbauthinfo USING btree (aiidauser_id, dbcomputer_id) + db_dbauthinfo_pkey: CREATE UNIQUE INDEX db_dbauthinfo_pkey ON public.db_dbauthinfo + USING btree (id) + db_dbcomment: + db_dbcomment_pkey: CREATE UNIQUE INDEX db_dbcomment_pkey ON public.db_dbcomment + USING btree (id) + db_dbcomment_uuid_key: CREATE UNIQUE INDEX db_dbcomment_uuid_key ON public.db_dbcomment + USING btree (uuid) + db_dbcomputer: + db_dbcomputer_name_key: CREATE UNIQUE INDEX db_dbcomputer_name_key ON public.db_dbcomputer + USING btree (name) + db_dbcomputer_pkey: CREATE UNIQUE INDEX db_dbcomputer_pkey ON public.db_dbcomputer + USING btree (id) + db_dbcomputer_uuid_key: CREATE UNIQUE INDEX db_dbcomputer_uuid_key ON public.db_dbcomputer + USING btree (uuid) + db_dbgroup: + db_dbgroup_label_type_string_key: CREATE UNIQUE INDEX db_dbgroup_label_type_string_key + ON public.db_dbgroup USING btree (label, type_string) + db_dbgroup_pkey: CREATE UNIQUE INDEX db_dbgroup_pkey ON public.db_dbgroup USING + btree (id) + db_dbgroup_uuid_key: CREATE UNIQUE INDEX db_dbgroup_uuid_key ON public.db_dbgroup + USING btree (uuid) + ix_db_dbgroup_label: CREATE INDEX ix_db_dbgroup_label ON public.db_dbgroup USING + btree (label) + ix_db_dbgroup_type_string: CREATE INDEX ix_db_dbgroup_type_string ON public.db_dbgroup + USING btree (type_string) + db_dbgroup_dbnodes: + db_dbgroup_dbnodes_dbgroup_id_dbnode_id_key: CREATE UNIQUE INDEX db_dbgroup_dbnodes_dbgroup_id_dbnode_id_key + ON public.db_dbgroup_dbnodes USING btree (dbgroup_id, dbnode_id) + db_dbgroup_dbnodes_dbgroup_id_idx: CREATE INDEX db_dbgroup_dbnodes_dbgroup_id_idx + ON public.db_dbgroup_dbnodes USING btree (dbgroup_id) + db_dbgroup_dbnodes_dbnode_id_idx: CREATE INDEX db_dbgroup_dbnodes_dbnode_id_idx + ON public.db_dbgroup_dbnodes USING btree (dbnode_id) + db_dbgroup_dbnodes_pkey: CREATE UNIQUE INDEX db_dbgroup_dbnodes_pkey ON public.db_dbgroup_dbnodes + USING btree (id) + db_dblink: + db_dblink_pkey: CREATE UNIQUE INDEX db_dblink_pkey ON public.db_dblink USING btree + (id) + ix_db_dblink_input_id: CREATE INDEX ix_db_dblink_input_id ON public.db_dblink + USING btree (input_id) + ix_db_dblink_label: CREATE INDEX ix_db_dblink_label ON public.db_dblink USING + btree (label) + ix_db_dblink_output_id: CREATE INDEX ix_db_dblink_output_id ON public.db_dblink + USING btree (output_id) + ix_db_dblink_type: CREATE INDEX ix_db_dblink_type ON public.db_dblink USING btree + (type) + db_dblog: + db_dblog_pkey: CREATE UNIQUE INDEX db_dblog_pkey ON public.db_dblog USING btree + (id) + db_dblog_uuid_key: CREATE UNIQUE INDEX db_dblog_uuid_key ON public.db_dblog USING + btree (uuid) + ix_db_dblog_levelname: CREATE INDEX ix_db_dblog_levelname ON public.db_dblog USING + btree (levelname) + ix_db_dblog_loggername: CREATE INDEX ix_db_dblog_loggername ON public.db_dblog + USING btree (loggername) + db_dbnode: + db_dbnode_pkey: CREATE UNIQUE INDEX db_dbnode_pkey ON public.db_dbnode USING btree + (id) + db_dbnode_uuid_key: CREATE UNIQUE INDEX db_dbnode_uuid_key ON public.db_dbnode + USING btree (uuid) + ix_db_dbnode_label: CREATE INDEX ix_db_dbnode_label ON public.db_dbnode USING + btree (label) + ix_db_dbnode_node_type: CREATE INDEX ix_db_dbnode_node_type ON public.db_dbnode + USING btree (node_type) + ix_db_dbnode_process_type: CREATE INDEX ix_db_dbnode_process_type ON public.db_dbnode + USING btree (process_type) + db_dbsetting: + db_dbsetting_key_key: CREATE UNIQUE INDEX db_dbsetting_key_key ON public.db_dbsetting + USING btree (key) + db_dbsetting_pkey: CREATE UNIQUE INDEX db_dbsetting_pkey ON public.db_dbsetting + USING btree (id) + ix_db_dbsetting_key: CREATE INDEX ix_db_dbsetting_key ON public.db_dbsetting USING + btree (key) + db_dbuser: + db_dbuser_pkey: CREATE UNIQUE INDEX db_dbuser_pkey ON public.db_dbuser USING btree + (id) + ix_db_dbuser_email: CREATE UNIQUE INDEX ix_db_dbuser_email ON public.db_dbuser + USING btree (email) diff --git a/tests/backends/aiida_sqlalchemy/migrations/test_all_schema/test_sqla_39_7b38a9e783e7_.yml b/tests/backends/aiida_sqlalchemy/migrations/test_all_schema/test_sqla_39_7b38a9e783e7_.yml new file mode 100644 index 0000000000..53c6088492 --- /dev/null +++ b/tests/backends/aiida_sqlalchemy/migrations/test_all_schema/test_sqla_39_7b38a9e783e7_.yml @@ -0,0 +1,471 @@ +columns: + db_dbauthinfo: + aiidauser_id: + data_type: integer + default: null + is_nullable: true + auth_params: + data_type: jsonb + default: null + is_nullable: true + dbcomputer_id: + data_type: integer + default: null + is_nullable: true + enabled: + data_type: boolean + default: null + is_nullable: true + id: + data_type: integer + default: nextval('db_dbauthinfo_id_seq'::regclass) + is_nullable: false + metadata: + data_type: jsonb + default: null + is_nullable: true + db_dbcomment: + content: + data_type: text + default: null + is_nullable: true + ctime: + data_type: timestamp with time zone + default: null + is_nullable: true + dbnode_id: + data_type: integer + default: null + is_nullable: true + id: + data_type: integer + default: nextval('db_dbcomment_id_seq'::regclass) + is_nullable: false + mtime: + data_type: timestamp with time zone + default: null + is_nullable: true + user_id: + data_type: integer + default: null + is_nullable: true + uuid: + data_type: uuid + default: null + is_nullable: true + db_dbcomputer: + description: + data_type: text + default: null + is_nullable: true + hostname: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + id: + data_type: integer + default: nextval('db_dbcomputer_id_seq'::regclass) + is_nullable: false + metadata: + data_type: jsonb + default: null + is_nullable: true + name: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + scheduler_type: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + transport_type: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + uuid: + data_type: uuid + default: null + is_nullable: true + db_dbgroup: + description: + data_type: text + default: null + is_nullable: true + id: + data_type: integer + default: nextval('db_dbgroup_id_seq'::regclass) + is_nullable: false + label: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + time: + data_type: timestamp with time zone + default: null + is_nullable: true + type_string: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + user_id: + data_type: integer + default: null + is_nullable: true + uuid: + data_type: uuid + default: null + is_nullable: true + db_dbgroup_dbnodes: + dbgroup_id: + data_type: integer + default: null + is_nullable: true + dbnode_id: + data_type: integer + default: null + is_nullable: true + id: + data_type: integer + default: nextval('db_dbgroup_dbnodes_id_seq'::regclass) + is_nullable: false + db_dblink: + id: + data_type: integer + default: nextval('db_dblink_id_seq'::regclass) + is_nullable: false + input_id: + data_type: integer + default: null + is_nullable: true + label: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + output_id: + data_type: integer + default: null + is_nullable: true + type: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + db_dblog: + dbnode_id: + data_type: integer + default: null + is_nullable: false + id: + data_type: integer + default: nextval('db_dblog_id_seq'::regclass) + is_nullable: false + levelname: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + loggername: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + message: + data_type: text + default: null + is_nullable: true + metadata: + data_type: jsonb + default: null + is_nullable: true + time: + data_type: timestamp with time zone + default: null + is_nullable: true + uuid: + data_type: uuid + default: null + is_nullable: true + db_dbnode: + attributes: + data_type: jsonb + default: null + is_nullable: true + ctime: + data_type: timestamp with time zone + default: null + is_nullable: true + dbcomputer_id: + data_type: integer + default: null + is_nullable: true + description: + data_type: text + default: null + is_nullable: true + extras: + data_type: jsonb + default: null + is_nullable: true + id: + data_type: integer + default: nextval('db_dbnode_id_seq'::regclass) + is_nullable: false + label: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + mtime: + data_type: timestamp with time zone + default: null + is_nullable: true + node_type: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + process_type: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + user_id: + data_type: integer + default: null + is_nullable: false + uuid: + data_type: uuid + default: null + is_nullable: true + db_dbsetting: + description: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + id: + data_type: integer + default: nextval('db_dbsetting_id_seq'::regclass) + is_nullable: false + key: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + time: + data_type: timestamp with time zone + default: null + is_nullable: true + val: + data_type: jsonb + default: null + is_nullable: true + db_dbuser: + email: + data_type: character varying + default: null + is_nullable: true + max_length: 254 + first_name: + data_type: character varying + default: null + is_nullable: true + max_length: 254 + id: + data_type: integer + default: nextval('db_dbuser_id_seq'::regclass) + is_nullable: false + institution: + data_type: character varying + default: null + is_nullable: true + max_length: 254 + last_name: + data_type: character varying + default: null + is_nullable: true + max_length: 254 +constraints: + primary_key: + db_dbauthinfo: + db_dbauthinfo_pkey: + - id + db_dbcomment: + db_dbcomment_pkey: + - id + db_dbcomputer: + db_dbcomputer_pkey: + - id + db_dbgroup: + db_dbgroup_pkey: + - id + db_dbgroup_dbnodes: + db_dbgroup_dbnodes_pkey: + - id + db_dblink: + db_dblink_pkey: + - id + db_dblog: + db_dblog_pkey: + - id + db_dbnode: + db_dbnode_pkey: + - id + db_dbsetting: + db_dbsetting_pkey: + - id + db_dbuser: + db_dbuser_pkey: + - id + unique: + db_dbauthinfo: + db_dbauthinfo_aiidauser_id_dbcomputer_id_key: + - aiidauser_id + - dbcomputer_id + db_dbcomment: + db_dbcomment_uuid_key: + - uuid + db_dbcomputer: + db_dbcomputer_name_key: + - name + db_dbcomputer_uuid_key: + - uuid + db_dbgroup: + db_dbgroup_label_type_string_key: + - label + - type_string + db_dbgroup_uuid_key: + - uuid + db_dbgroup_dbnodes: + db_dbgroup_dbnodes_dbgroup_id_dbnode_id_key: + - dbgroup_id + - dbnode_id + db_dblog: + db_dblog_uuid_key: + - uuid + db_dbnode: + db_dbnode_uuid_key: + - uuid + db_dbsetting: + db_dbsetting_key_key: + - key +foreign_keys: + db_dbauthinfo: + db_dbauthinfo_aiidauser_id_fkey: FOREIGN KEY (aiidauser_id) REFERENCES db_dbuser(id) + ON DELETE CASCADE DEFERRABLE INITIALLY DEFERRED + db_dbauthinfo_dbcomputer_id_fkey: FOREIGN KEY (dbcomputer_id) REFERENCES db_dbcomputer(id) + ON DELETE CASCADE DEFERRABLE INITIALLY DEFERRED + db_dbcomment: + db_dbcomment_dbnode_id_fkey: FOREIGN KEY (dbnode_id) REFERENCES db_dbnode(id) + ON DELETE CASCADE DEFERRABLE INITIALLY DEFERRED + db_dbcomment_user_id_fkey: FOREIGN KEY (user_id) REFERENCES db_dbuser(id) ON DELETE + CASCADE DEFERRABLE INITIALLY DEFERRED + db_dbgroup: + db_dbgroup_user_id_fkey: FOREIGN KEY (user_id) REFERENCES db_dbuser(id) ON DELETE + CASCADE DEFERRABLE INITIALLY DEFERRED + db_dbgroup_dbnodes: + db_dbgroup_dbnodes_dbgroup_id_fkey: FOREIGN KEY (dbgroup_id) REFERENCES db_dbgroup(id) + DEFERRABLE INITIALLY DEFERRED + db_dbgroup_dbnodes_dbnode_id_fkey: FOREIGN KEY (dbnode_id) REFERENCES db_dbnode(id) + DEFERRABLE INITIALLY DEFERRED + db_dblink: + db_dblink_input_id_fkey: FOREIGN KEY (input_id) REFERENCES db_dbnode(id) DEFERRABLE + INITIALLY DEFERRED + db_dblink_output_id_fkey: FOREIGN KEY (output_id) REFERENCES db_dbnode(id) ON + DELETE CASCADE DEFERRABLE INITIALLY DEFERRED + db_dblog: + db_dblog_dbnode_id_fkey: FOREIGN KEY (dbnode_id) REFERENCES db_dbnode(id) ON DELETE + CASCADE DEFERRABLE INITIALLY DEFERRED + db_dbnode: + db_dbnode_dbcomputer_id_fkey: FOREIGN KEY (dbcomputer_id) REFERENCES db_dbcomputer(id) + ON DELETE RESTRICT DEFERRABLE INITIALLY DEFERRED + db_dbnode_user_id_fkey: FOREIGN KEY (user_id) REFERENCES db_dbuser(id) ON DELETE + RESTRICT DEFERRABLE INITIALLY DEFERRED +indexes: + db_dbauthinfo: + db_dbauthinfo_aiidauser_id_dbcomputer_id_key: CREATE UNIQUE INDEX db_dbauthinfo_aiidauser_id_dbcomputer_id_key + ON public.db_dbauthinfo USING btree (aiidauser_id, dbcomputer_id) + db_dbauthinfo_pkey: CREATE UNIQUE INDEX db_dbauthinfo_pkey ON public.db_dbauthinfo + USING btree (id) + db_dbcomment: + db_dbcomment_pkey: CREATE UNIQUE INDEX db_dbcomment_pkey ON public.db_dbcomment + USING btree (id) + db_dbcomment_uuid_key: CREATE UNIQUE INDEX db_dbcomment_uuid_key ON public.db_dbcomment + USING btree (uuid) + db_dbcomputer: + db_dbcomputer_name_key: CREATE UNIQUE INDEX db_dbcomputer_name_key ON public.db_dbcomputer + USING btree (name) + db_dbcomputer_pkey: CREATE UNIQUE INDEX db_dbcomputer_pkey ON public.db_dbcomputer + USING btree (id) + db_dbcomputer_uuid_key: CREATE UNIQUE INDEX db_dbcomputer_uuid_key ON public.db_dbcomputer + USING btree (uuid) + db_dbgroup: + db_dbgroup_label_type_string_key: CREATE UNIQUE INDEX db_dbgroup_label_type_string_key + ON public.db_dbgroup USING btree (label, type_string) + db_dbgroup_pkey: CREATE UNIQUE INDEX db_dbgroup_pkey ON public.db_dbgroup USING + btree (id) + db_dbgroup_uuid_key: CREATE UNIQUE INDEX db_dbgroup_uuid_key ON public.db_dbgroup + USING btree (uuid) + ix_db_dbgroup_label: CREATE INDEX ix_db_dbgroup_label ON public.db_dbgroup USING + btree (label) + ix_db_dbgroup_type_string: CREATE INDEX ix_db_dbgroup_type_string ON public.db_dbgroup + USING btree (type_string) + db_dbgroup_dbnodes: + db_dbgroup_dbnodes_dbgroup_id_dbnode_id_key: CREATE UNIQUE INDEX db_dbgroup_dbnodes_dbgroup_id_dbnode_id_key + ON public.db_dbgroup_dbnodes USING btree (dbgroup_id, dbnode_id) + db_dbgroup_dbnodes_dbgroup_id_idx: CREATE INDEX db_dbgroup_dbnodes_dbgroup_id_idx + ON public.db_dbgroup_dbnodes USING btree (dbgroup_id) + db_dbgroup_dbnodes_dbnode_id_idx: CREATE INDEX db_dbgroup_dbnodes_dbnode_id_idx + ON public.db_dbgroup_dbnodes USING btree (dbnode_id) + db_dbgroup_dbnodes_pkey: CREATE UNIQUE INDEX db_dbgroup_dbnodes_pkey ON public.db_dbgroup_dbnodes + USING btree (id) + db_dblink: + db_dblink_pkey: CREATE UNIQUE INDEX db_dblink_pkey ON public.db_dblink USING btree + (id) + ix_db_dblink_input_id: CREATE INDEX ix_db_dblink_input_id ON public.db_dblink + USING btree (input_id) + ix_db_dblink_label: CREATE INDEX ix_db_dblink_label ON public.db_dblink USING + btree (label) + ix_db_dblink_output_id: CREATE INDEX ix_db_dblink_output_id ON public.db_dblink + USING btree (output_id) + ix_db_dblink_type: CREATE INDEX ix_db_dblink_type ON public.db_dblink USING btree + (type) + db_dblog: + db_dblog_pkey: CREATE UNIQUE INDEX db_dblog_pkey ON public.db_dblog USING btree + (id) + db_dblog_uuid_key: CREATE UNIQUE INDEX db_dblog_uuid_key ON public.db_dblog USING + btree (uuid) + ix_db_dblog_levelname: CREATE INDEX ix_db_dblog_levelname ON public.db_dblog USING + btree (levelname) + ix_db_dblog_loggername: CREATE INDEX ix_db_dblog_loggername ON public.db_dblog + USING btree (loggername) + db_dbnode: + db_dbnode_pkey: CREATE UNIQUE INDEX db_dbnode_pkey ON public.db_dbnode USING btree + (id) + db_dbnode_uuid_key: CREATE UNIQUE INDEX db_dbnode_uuid_key ON public.db_dbnode + USING btree (uuid) + ix_db_dbnode_label: CREATE INDEX ix_db_dbnode_label ON public.db_dbnode USING + btree (label) + ix_db_dbnode_node_type: CREATE INDEX ix_db_dbnode_node_type ON public.db_dbnode + USING btree (node_type) + ix_db_dbnode_process_type: CREATE INDEX ix_db_dbnode_process_type ON public.db_dbnode + USING btree (process_type) + db_dbsetting: + db_dbsetting_key_key: CREATE UNIQUE INDEX db_dbsetting_key_key ON public.db_dbsetting + USING btree (key) + db_dbsetting_pkey: CREATE UNIQUE INDEX db_dbsetting_pkey ON public.db_dbsetting + USING btree (id) + ix_db_dbsetting_key: CREATE INDEX ix_db_dbsetting_key ON public.db_dbsetting USING + btree (key) + db_dbuser: + db_dbuser_pkey: CREATE UNIQUE INDEX db_dbuser_pkey ON public.db_dbuser USING btree + (id) + ix_db_dbuser_email: CREATE UNIQUE INDEX ix_db_dbuser_email ON public.db_dbuser + USING btree (email) diff --git a/tests/backends/aiida_sqlalchemy/migrations/test_all_schema/test_sqla_3_89176227b25_.yml b/tests/backends/aiida_sqlalchemy/migrations/test_all_schema/test_sqla_3_89176227b25_.yml new file mode 100644 index 0000000000..ba626cd6b8 --- /dev/null +++ b/tests/backends/aiida_sqlalchemy/migrations/test_all_schema/test_sqla_3_89176227b25_.yml @@ -0,0 +1,765 @@ +columns: + db_dbauthinfo: + aiidauser_id: + data_type: integer + default: null + is_nullable: true + auth_params: + data_type: jsonb + default: null + is_nullable: true + dbcomputer_id: + data_type: integer + default: null + is_nullable: true + enabled: + data_type: boolean + default: null + is_nullable: true + id: + data_type: integer + default: nextval('db_dbauthinfo_id_seq'::regclass) + is_nullable: false + metadata: + data_type: jsonb + default: null + is_nullable: true + db_dbcalcstate: + dbnode_id: + data_type: integer + default: null + is_nullable: true + id: + data_type: integer + default: nextval('db_dbcalcstate_id_seq'::regclass) + is_nullable: false + state: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + time: + data_type: timestamp with time zone + default: null + is_nullable: true + db_dbcomment: + content: + data_type: text + default: null + is_nullable: true + ctime: + data_type: timestamp with time zone + default: null + is_nullable: true + dbnode_id: + data_type: integer + default: null + is_nullable: true + id: + data_type: integer + default: nextval('db_dbcomment_id_seq'::regclass) + is_nullable: false + mtime: + data_type: timestamp with time zone + default: null + is_nullable: true + user_id: + data_type: integer + default: null + is_nullable: true + uuid: + data_type: uuid + default: null + is_nullable: true + db_dbcomputer: + description: + data_type: text + default: null + is_nullable: true + enabled: + data_type: boolean + default: null + is_nullable: true + hostname: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + id: + data_type: integer + default: nextval('db_dbcomputer_id_seq'::regclass) + is_nullable: false + metadata: + data_type: jsonb + default: null + is_nullable: true + name: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + scheduler_type: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + transport_params: + data_type: jsonb + default: null + is_nullable: true + transport_type: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + uuid: + data_type: uuid + default: null + is_nullable: true + db_dbgroup: + description: + data_type: text + default: null + is_nullable: true + id: + data_type: integer + default: nextval('db_dbgroup_id_seq'::regclass) + is_nullable: false + name: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + time: + data_type: timestamp with time zone + default: null + is_nullable: true + type: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + user_id: + data_type: integer + default: null + is_nullable: true + uuid: + data_type: uuid + default: null + is_nullable: true + db_dbgroup_dbnodes: + dbgroup_id: + data_type: integer + default: null + is_nullable: true + dbnode_id: + data_type: integer + default: null + is_nullable: true + id: + data_type: integer + default: nextval('db_dbgroup_dbnodes_id_seq'::regclass) + is_nullable: false + db_dblink: + id: + data_type: integer + default: nextval('db_dblink_id_seq'::regclass) + is_nullable: false + input_id: + data_type: integer + default: null + is_nullable: true + label: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + output_id: + data_type: integer + default: null + is_nullable: true + type: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + db_dblock: + creation: + data_type: timestamp with time zone + default: null + is_nullable: true + key: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + owner: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + timeout: + data_type: integer + default: null + is_nullable: true + db_dblog: + id: + data_type: integer + default: nextval('db_dblog_id_seq'::regclass) + is_nullable: false + levelname: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + loggername: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + message: + data_type: text + default: null + is_nullable: true + metadata: + data_type: jsonb + default: null + is_nullable: true + objname: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + objpk: + data_type: integer + default: null + is_nullable: true + time: + data_type: timestamp with time zone + default: null + is_nullable: true + db_dbnode: + attributes: + data_type: jsonb + default: null + is_nullable: true + ctime: + data_type: timestamp with time zone + default: null + is_nullable: true + dbcomputer_id: + data_type: integer + default: null + is_nullable: true + description: + data_type: text + default: null + is_nullable: true + extras: + data_type: jsonb + default: null + is_nullable: true + id: + data_type: integer + default: nextval('db_dbnode_id_seq'::regclass) + is_nullable: false + label: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + mtime: + data_type: timestamp with time zone + default: null + is_nullable: true + nodeversion: + data_type: integer + default: null + is_nullable: true + public: + data_type: boolean + default: null + is_nullable: true + type: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + user_id: + data_type: integer + default: null + is_nullable: false + uuid: + data_type: uuid + default: null + is_nullable: true + db_dbsetting: + description: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + id: + data_type: integer + default: nextval('db_dbsetting_id_seq'::regclass) + is_nullable: false + key: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + time: + data_type: timestamp with time zone + default: null + is_nullable: true + val: + data_type: jsonb + default: null + is_nullable: true + db_dbuser: + date_joined: + data_type: timestamp with time zone + default: null + is_nullable: true + email: + data_type: character varying + default: null + is_nullable: true + max_length: 254 + first_name: + data_type: character varying + default: null + is_nullable: true + max_length: 254 + id: + data_type: integer + default: nextval('db_dbuser_id_seq'::regclass) + is_nullable: false + institution: + data_type: character varying + default: null + is_nullable: true + max_length: 254 + is_active: + data_type: boolean + default: null + is_nullable: true + is_staff: + data_type: boolean + default: null + is_nullable: true + is_superuser: + data_type: boolean + default: null + is_nullable: false + last_login: + data_type: timestamp with time zone + default: null + is_nullable: true + last_name: + data_type: character varying + default: null + is_nullable: true + max_length: 254 + password: + data_type: character varying + default: null + is_nullable: true + max_length: 128 + db_dbworkflow: + ctime: + data_type: timestamp with time zone + default: null + is_nullable: true + description: + data_type: text + default: null + is_nullable: true + id: + data_type: integer + default: nextval('db_dbworkflow_id_seq'::regclass) + is_nullable: false + label: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + lastsyncedversion: + data_type: integer + default: null + is_nullable: true + module: + data_type: text + default: null + is_nullable: true + module_class: + data_type: text + default: null + is_nullable: true + mtime: + data_type: timestamp with time zone + default: null + is_nullable: true + nodeversion: + data_type: integer + default: null + is_nullable: true + report: + data_type: text + default: null + is_nullable: true + script_md5: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + script_path: + data_type: text + default: null + is_nullable: true + state: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + user_id: + data_type: integer + default: null + is_nullable: true + uuid: + data_type: uuid + default: null + is_nullable: true + db_dbworkflowdata: + aiida_obj_id: + data_type: integer + default: null + is_nullable: true + data_type: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + id: + data_type: integer + default: nextval('db_dbworkflowdata_id_seq'::regclass) + is_nullable: false + json_value: + data_type: text + default: null + is_nullable: true + name: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + parent_id: + data_type: integer + default: null + is_nullable: true + time: + data_type: timestamp with time zone + default: null + is_nullable: true + value_type: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + db_dbworkflowstep: + id: + data_type: integer + default: nextval('db_dbworkflowstep_id_seq'::regclass) + is_nullable: false + name: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + nextcall: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + parent_id: + data_type: integer + default: null + is_nullable: true + state: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + time: + data_type: timestamp with time zone + default: null + is_nullable: true + user_id: + data_type: integer + default: null + is_nullable: true + db_dbworkflowstep_calculations: + dbnode_id: + data_type: integer + default: null + is_nullable: true + dbworkflowstep_id: + data_type: integer + default: null + is_nullable: true + id: + data_type: integer + default: nextval('db_dbworkflowstep_calculations_id_seq'::regclass) + is_nullable: false + db_dbworkflowstep_sub_workflows: + dbworkflow_id: + data_type: integer + default: null + is_nullable: true + dbworkflowstep_id: + data_type: integer + default: null + is_nullable: true + id: + data_type: integer + default: nextval('db_dbworkflowstep_sub_workflows_id_seq'::regclass) + is_nullable: false +constraints: + primary_key: + db_dbauthinfo: + db_dbauthinfo_pkey: + - id + db_dbcalcstate: + db_dbcalcstate_pkey: + - id + db_dbcomment: + db_dbcomment_pkey: + - id + db_dbcomputer: + db_dbcomputer_pkey: + - id + db_dbgroup: + db_dbgroup_pkey: + - id + db_dbgroup_dbnodes: + db_dbgroup_dbnodes_pkey: + - id + db_dblink: + db_dblink_pkey: + - id + db_dblock: + db_dblock_pkey: + - key + db_dblog: + db_dblog_pkey: + - id + db_dbnode: + db_dbnode_pkey: + - id + db_dbsetting: + db_dbsetting_pkey: + - id + db_dbuser: + db_dbuser_pkey: + - id + db_dbworkflow: + db_dbworkflow_pkey: + - id + db_dbworkflowdata: + db_dbworkflowdata_pkey: + - id + db_dbworkflowstep: + db_dbworkflowstep_pkey: + - id + db_dbworkflowstep_calculations: + db_dbworkflowstep_calculations_pkey: + - id + db_dbworkflowstep_sub_workflows: + db_dbworkflowstep_sub_workflows_pkey: + - id + unique: + db_dbauthinfo: + db_dbauthinfo_aiidauser_id_dbcomputer_id_key: + - aiidauser_id + - dbcomputer_id + db_dbcalcstate: + db_dbcalcstate_dbnode_id_state_key: + - dbnode_id + - state + db_dbcomputer: + db_dbcomputer_name_key: + - name + db_dbgroup: + db_dbgroup_name_type_key: + - name + - type + db_dbsetting: + db_dbsetting_key_key: + - key + db_dbworkflowdata: + db_dbworkflowdata_parent_id_name_data_type_key: + - data_type + - name + - parent_id + db_dbworkflowstep: + db_dbworkflowstep_parent_id_name_key: + - name + - parent_id + db_dbworkflowstep_calculations: + db_dbworkflowstep_calculations_id_dbnode_id_key: + - dbnode_id + - dbworkflowstep_id + db_dbworkflowstep_sub_workflows: + db_dbworkflowstep_sub_workflows_id_dbworkflow__key: + - dbworkflow_id + - dbworkflowstep_id +foreign_keys: + db_dbauthinfo: + db_dbauthinfo_aiidauser_id_fkey: FOREIGN KEY (aiidauser_id) REFERENCES db_dbuser(id) + ON DELETE CASCADE DEFERRABLE INITIALLY DEFERRED + db_dbauthinfo_dbcomputer_id_fkey: FOREIGN KEY (dbcomputer_id) REFERENCES db_dbcomputer(id) + ON DELETE CASCADE DEFERRABLE INITIALLY DEFERRED + db_dbcalcstate: + db_dbcalcstate_dbnode_id_fkey: FOREIGN KEY (dbnode_id) REFERENCES db_dbnode(id) + ON DELETE CASCADE DEFERRABLE INITIALLY DEFERRED + db_dbcomment: + db_dbcomment_dbnode_id_fkey: FOREIGN KEY (dbnode_id) REFERENCES db_dbnode(id) + ON DELETE CASCADE DEFERRABLE INITIALLY DEFERRED + db_dbcomment_user_id_fkey: FOREIGN KEY (user_id) REFERENCES db_dbuser(id) ON DELETE + CASCADE DEFERRABLE INITIALLY DEFERRED + db_dbgroup: + db_dbgroup_user_id_fkey: FOREIGN KEY (user_id) REFERENCES db_dbuser(id) ON DELETE + CASCADE DEFERRABLE INITIALLY DEFERRED + db_dbgroup_dbnodes: + db_dbgroup_dbnodes_dbgroup_id_fkey: FOREIGN KEY (dbgroup_id) REFERENCES db_dbgroup(id) + DEFERRABLE INITIALLY DEFERRED + db_dbgroup_dbnodes_dbnode_id_fkey: FOREIGN KEY (dbnode_id) REFERENCES db_dbnode(id) + DEFERRABLE INITIALLY DEFERRED + db_dblink: + db_dblink_input_id_fkey: FOREIGN KEY (input_id) REFERENCES db_dbnode(id) DEFERRABLE + INITIALLY DEFERRED + db_dblink_output_id_fkey: FOREIGN KEY (output_id) REFERENCES db_dbnode(id) ON + DELETE CASCADE DEFERRABLE INITIALLY DEFERRED + db_dbnode: + db_dbnode_dbcomputer_id_fkey: FOREIGN KEY (dbcomputer_id) REFERENCES db_dbcomputer(id) + ON DELETE RESTRICT DEFERRABLE INITIALLY DEFERRED + db_dbnode_user_id_fkey: FOREIGN KEY (user_id) REFERENCES db_dbuser(id) ON DELETE + RESTRICT DEFERRABLE INITIALLY DEFERRED + db_dbworkflow: + db_dbworkflow_user_id_fkey: FOREIGN KEY (user_id) REFERENCES db_dbuser(id) + db_dbworkflowdata: + db_dbworkflowdata_aiida_obj_id_fkey: FOREIGN KEY (aiida_obj_id) REFERENCES db_dbnode(id) + db_dbworkflowdata_parent_id_fkey: FOREIGN KEY (parent_id) REFERENCES db_dbworkflow(id) + db_dbworkflowstep: + db_dbworkflowstep_parent_id_fkey: FOREIGN KEY (parent_id) REFERENCES db_dbworkflow(id) + db_dbworkflowstep_user_id_fkey: FOREIGN KEY (user_id) REFERENCES db_dbuser(id) + db_dbworkflowstep_calculations: + db_dbworkflowstep_calculations_dbnode_id_fkey: FOREIGN KEY (dbnode_id) REFERENCES + db_dbnode(id) + db_dbworkflowstep_calculations_dbworkflowstep_id_fkey: FOREIGN KEY (dbworkflowstep_id) + REFERENCES db_dbworkflowstep(id) + db_dbworkflowstep_sub_workflows: + db_dbworkflowstep_sub_workflows_dbworkflow_id_fkey: FOREIGN KEY (dbworkflow_id) + REFERENCES db_dbworkflow(id) + db_dbworkflowstep_sub_workflows_dbworkflowstep_id_fkey: FOREIGN KEY (dbworkflowstep_id) + REFERENCES db_dbworkflowstep(id) +indexes: + db_dbauthinfo: + db_dbauthinfo_aiidauser_id_dbcomputer_id_key: CREATE UNIQUE INDEX db_dbauthinfo_aiidauser_id_dbcomputer_id_key + ON public.db_dbauthinfo USING btree (aiidauser_id, dbcomputer_id) + db_dbauthinfo_pkey: CREATE UNIQUE INDEX db_dbauthinfo_pkey ON public.db_dbauthinfo + USING btree (id) + db_dbcalcstate: + db_dbcalcstate_dbnode_id_state_key: CREATE UNIQUE INDEX db_dbcalcstate_dbnode_id_state_key + ON public.db_dbcalcstate USING btree (dbnode_id, state) + db_dbcalcstate_pkey: CREATE UNIQUE INDEX db_dbcalcstate_pkey ON public.db_dbcalcstate + USING btree (id) + ix_db_dbcalcstate_state: CREATE INDEX ix_db_dbcalcstate_state ON public.db_dbcalcstate + USING btree (state) + db_dbcomment: + db_dbcomment_pkey: CREATE UNIQUE INDEX db_dbcomment_pkey ON public.db_dbcomment + USING btree (id) + db_dbcomputer: + db_dbcomputer_name_key: CREATE UNIQUE INDEX db_dbcomputer_name_key ON public.db_dbcomputer + USING btree (name) + db_dbcomputer_pkey: CREATE UNIQUE INDEX db_dbcomputer_pkey ON public.db_dbcomputer + USING btree (id) + db_dbgroup: + db_dbgroup_name_type_key: CREATE UNIQUE INDEX db_dbgroup_name_type_key ON public.db_dbgroup + USING btree (name, type) + db_dbgroup_pkey: CREATE UNIQUE INDEX db_dbgroup_pkey ON public.db_dbgroup USING + btree (id) + ix_db_dbgroup_name: CREATE INDEX ix_db_dbgroup_name ON public.db_dbgroup USING + btree (name) + ix_db_dbgroup_type: CREATE INDEX ix_db_dbgroup_type ON public.db_dbgroup USING + btree (type) + db_dbgroup_dbnodes: + db_dbgroup_dbnodes_pkey: CREATE UNIQUE INDEX db_dbgroup_dbnodes_pkey ON public.db_dbgroup_dbnodes + USING btree (id) + db_dblink: + db_dblink_pkey: CREATE UNIQUE INDEX db_dblink_pkey ON public.db_dblink USING btree + (id) + ix_db_dblink_label: CREATE INDEX ix_db_dblink_label ON public.db_dblink USING + btree (label) + db_dblock: + db_dblock_pkey: CREATE UNIQUE INDEX db_dblock_pkey ON public.db_dblock USING btree + (key) + db_dblog: + db_dblog_pkey: CREATE UNIQUE INDEX db_dblog_pkey ON public.db_dblog USING btree + (id) + ix_db_dblog_levelname: CREATE INDEX ix_db_dblog_levelname ON public.db_dblog USING + btree (levelname) + ix_db_dblog_loggername: CREATE INDEX ix_db_dblog_loggername ON public.db_dblog + USING btree (loggername) + ix_db_dblog_objname: CREATE INDEX ix_db_dblog_objname ON public.db_dblog USING + btree (objname) + ix_db_dblog_objpk: CREATE INDEX ix_db_dblog_objpk ON public.db_dblog USING btree + (objpk) + db_dbnode: + db_dbnode_pkey: CREATE UNIQUE INDEX db_dbnode_pkey ON public.db_dbnode USING btree + (id) + ix_db_dbnode_label: CREATE INDEX ix_db_dbnode_label ON public.db_dbnode USING + btree (label) + ix_db_dbnode_type: CREATE INDEX ix_db_dbnode_type ON public.db_dbnode USING btree + (type) + db_dbsetting: + db_dbsetting_key_key: CREATE UNIQUE INDEX db_dbsetting_key_key ON public.db_dbsetting + USING btree (key) + db_dbsetting_pkey: CREATE UNIQUE INDEX db_dbsetting_pkey ON public.db_dbsetting + USING btree (id) + ix_db_dbsetting_key: CREATE INDEX ix_db_dbsetting_key ON public.db_dbsetting USING + btree (key) + db_dbuser: + db_dbuser_pkey: CREATE UNIQUE INDEX db_dbuser_pkey ON public.db_dbuser USING btree + (id) + ix_db_dbuser_email: CREATE UNIQUE INDEX ix_db_dbuser_email ON public.db_dbuser + USING btree (email) + db_dbworkflow: + db_dbworkflow_pkey: CREATE UNIQUE INDEX db_dbworkflow_pkey ON public.db_dbworkflow + USING btree (id) + ix_db_dbworkflow_label: CREATE INDEX ix_db_dbworkflow_label ON public.db_dbworkflow + USING btree (label) + db_dbworkflowdata: + db_dbworkflowdata_parent_id_name_data_type_key: CREATE UNIQUE INDEX db_dbworkflowdata_parent_id_name_data_type_key + ON public.db_dbworkflowdata USING btree (parent_id, name, data_type) + db_dbworkflowdata_pkey: CREATE UNIQUE INDEX db_dbworkflowdata_pkey ON public.db_dbworkflowdata + USING btree (id) + ix_db_dbworkflowdata_aiida_obj_id: CREATE INDEX ix_db_dbworkflowdata_aiida_obj_id + ON public.db_dbworkflowdata USING btree (aiida_obj_id) + ix_db_dbworkflowdata_parent_id: CREATE INDEX ix_db_dbworkflowdata_parent_id ON + public.db_dbworkflowdata USING btree (parent_id) + db_dbworkflowstep: + db_dbworkflowstep_parent_id_name_key: CREATE UNIQUE INDEX db_dbworkflowstep_parent_id_name_key + ON public.db_dbworkflowstep USING btree (parent_id, name) + db_dbworkflowstep_pkey: CREATE UNIQUE INDEX db_dbworkflowstep_pkey ON public.db_dbworkflowstep + USING btree (id) + db_dbworkflowstep_calculations: + db_dbworkflowstep_calculations_id_dbnode_id_key: CREATE UNIQUE INDEX db_dbworkflowstep_calculations_id_dbnode_id_key + ON public.db_dbworkflowstep_calculations USING btree (dbworkflowstep_id, dbnode_id) + db_dbworkflowstep_calculations_pkey: CREATE UNIQUE INDEX db_dbworkflowstep_calculations_pkey + ON public.db_dbworkflowstep_calculations USING btree (id) + db_dbworkflowstep_sub_workflows: + db_dbworkflowstep_sub_workflows_id_dbworkflow__key: CREATE UNIQUE INDEX db_dbworkflowstep_sub_workflows_id_dbworkflow__key + ON public.db_dbworkflowstep_sub_workflows USING btree (dbworkflowstep_id, dbworkflow_id) + db_dbworkflowstep_sub_workflows_pkey: CREATE UNIQUE INDEX db_dbworkflowstep_sub_workflows_pkey + ON public.db_dbworkflowstep_sub_workflows USING btree (id) diff --git a/tests/backends/aiida_sqlalchemy/migrations/test_all_schema/test_sqla_40_91b573400be5_.yml b/tests/backends/aiida_sqlalchemy/migrations/test_all_schema/test_sqla_40_91b573400be5_.yml new file mode 100644 index 0000000000..53c6088492 --- /dev/null +++ b/tests/backends/aiida_sqlalchemy/migrations/test_all_schema/test_sqla_40_91b573400be5_.yml @@ -0,0 +1,471 @@ +columns: + db_dbauthinfo: + aiidauser_id: + data_type: integer + default: null + is_nullable: true + auth_params: + data_type: jsonb + default: null + is_nullable: true + dbcomputer_id: + data_type: integer + default: null + is_nullable: true + enabled: + data_type: boolean + default: null + is_nullable: true + id: + data_type: integer + default: nextval('db_dbauthinfo_id_seq'::regclass) + is_nullable: false + metadata: + data_type: jsonb + default: null + is_nullable: true + db_dbcomment: + content: + data_type: text + default: null + is_nullable: true + ctime: + data_type: timestamp with time zone + default: null + is_nullable: true + dbnode_id: + data_type: integer + default: null + is_nullable: true + id: + data_type: integer + default: nextval('db_dbcomment_id_seq'::regclass) + is_nullable: false + mtime: + data_type: timestamp with time zone + default: null + is_nullable: true + user_id: + data_type: integer + default: null + is_nullable: true + uuid: + data_type: uuid + default: null + is_nullable: true + db_dbcomputer: + description: + data_type: text + default: null + is_nullable: true + hostname: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + id: + data_type: integer + default: nextval('db_dbcomputer_id_seq'::regclass) + is_nullable: false + metadata: + data_type: jsonb + default: null + is_nullable: true + name: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + scheduler_type: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + transport_type: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + uuid: + data_type: uuid + default: null + is_nullable: true + db_dbgroup: + description: + data_type: text + default: null + is_nullable: true + id: + data_type: integer + default: nextval('db_dbgroup_id_seq'::regclass) + is_nullable: false + label: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + time: + data_type: timestamp with time zone + default: null + is_nullable: true + type_string: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + user_id: + data_type: integer + default: null + is_nullable: true + uuid: + data_type: uuid + default: null + is_nullable: true + db_dbgroup_dbnodes: + dbgroup_id: + data_type: integer + default: null + is_nullable: true + dbnode_id: + data_type: integer + default: null + is_nullable: true + id: + data_type: integer + default: nextval('db_dbgroup_dbnodes_id_seq'::regclass) + is_nullable: false + db_dblink: + id: + data_type: integer + default: nextval('db_dblink_id_seq'::regclass) + is_nullable: false + input_id: + data_type: integer + default: null + is_nullable: true + label: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + output_id: + data_type: integer + default: null + is_nullable: true + type: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + db_dblog: + dbnode_id: + data_type: integer + default: null + is_nullable: false + id: + data_type: integer + default: nextval('db_dblog_id_seq'::regclass) + is_nullable: false + levelname: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + loggername: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + message: + data_type: text + default: null + is_nullable: true + metadata: + data_type: jsonb + default: null + is_nullable: true + time: + data_type: timestamp with time zone + default: null + is_nullable: true + uuid: + data_type: uuid + default: null + is_nullable: true + db_dbnode: + attributes: + data_type: jsonb + default: null + is_nullable: true + ctime: + data_type: timestamp with time zone + default: null + is_nullable: true + dbcomputer_id: + data_type: integer + default: null + is_nullable: true + description: + data_type: text + default: null + is_nullable: true + extras: + data_type: jsonb + default: null + is_nullable: true + id: + data_type: integer + default: nextval('db_dbnode_id_seq'::regclass) + is_nullable: false + label: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + mtime: + data_type: timestamp with time zone + default: null + is_nullable: true + node_type: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + process_type: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + user_id: + data_type: integer + default: null + is_nullable: false + uuid: + data_type: uuid + default: null + is_nullable: true + db_dbsetting: + description: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + id: + data_type: integer + default: nextval('db_dbsetting_id_seq'::regclass) + is_nullable: false + key: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + time: + data_type: timestamp with time zone + default: null + is_nullable: true + val: + data_type: jsonb + default: null + is_nullable: true + db_dbuser: + email: + data_type: character varying + default: null + is_nullable: true + max_length: 254 + first_name: + data_type: character varying + default: null + is_nullable: true + max_length: 254 + id: + data_type: integer + default: nextval('db_dbuser_id_seq'::regclass) + is_nullable: false + institution: + data_type: character varying + default: null + is_nullable: true + max_length: 254 + last_name: + data_type: character varying + default: null + is_nullable: true + max_length: 254 +constraints: + primary_key: + db_dbauthinfo: + db_dbauthinfo_pkey: + - id + db_dbcomment: + db_dbcomment_pkey: + - id + db_dbcomputer: + db_dbcomputer_pkey: + - id + db_dbgroup: + db_dbgroup_pkey: + - id + db_dbgroup_dbnodes: + db_dbgroup_dbnodes_pkey: + - id + db_dblink: + db_dblink_pkey: + - id + db_dblog: + db_dblog_pkey: + - id + db_dbnode: + db_dbnode_pkey: + - id + db_dbsetting: + db_dbsetting_pkey: + - id + db_dbuser: + db_dbuser_pkey: + - id + unique: + db_dbauthinfo: + db_dbauthinfo_aiidauser_id_dbcomputer_id_key: + - aiidauser_id + - dbcomputer_id + db_dbcomment: + db_dbcomment_uuid_key: + - uuid + db_dbcomputer: + db_dbcomputer_name_key: + - name + db_dbcomputer_uuid_key: + - uuid + db_dbgroup: + db_dbgroup_label_type_string_key: + - label + - type_string + db_dbgroup_uuid_key: + - uuid + db_dbgroup_dbnodes: + db_dbgroup_dbnodes_dbgroup_id_dbnode_id_key: + - dbgroup_id + - dbnode_id + db_dblog: + db_dblog_uuid_key: + - uuid + db_dbnode: + db_dbnode_uuid_key: + - uuid + db_dbsetting: + db_dbsetting_key_key: + - key +foreign_keys: + db_dbauthinfo: + db_dbauthinfo_aiidauser_id_fkey: FOREIGN KEY (aiidauser_id) REFERENCES db_dbuser(id) + ON DELETE CASCADE DEFERRABLE INITIALLY DEFERRED + db_dbauthinfo_dbcomputer_id_fkey: FOREIGN KEY (dbcomputer_id) REFERENCES db_dbcomputer(id) + ON DELETE CASCADE DEFERRABLE INITIALLY DEFERRED + db_dbcomment: + db_dbcomment_dbnode_id_fkey: FOREIGN KEY (dbnode_id) REFERENCES db_dbnode(id) + ON DELETE CASCADE DEFERRABLE INITIALLY DEFERRED + db_dbcomment_user_id_fkey: FOREIGN KEY (user_id) REFERENCES db_dbuser(id) ON DELETE + CASCADE DEFERRABLE INITIALLY DEFERRED + db_dbgroup: + db_dbgroup_user_id_fkey: FOREIGN KEY (user_id) REFERENCES db_dbuser(id) ON DELETE + CASCADE DEFERRABLE INITIALLY DEFERRED + db_dbgroup_dbnodes: + db_dbgroup_dbnodes_dbgroup_id_fkey: FOREIGN KEY (dbgroup_id) REFERENCES db_dbgroup(id) + DEFERRABLE INITIALLY DEFERRED + db_dbgroup_dbnodes_dbnode_id_fkey: FOREIGN KEY (dbnode_id) REFERENCES db_dbnode(id) + DEFERRABLE INITIALLY DEFERRED + db_dblink: + db_dblink_input_id_fkey: FOREIGN KEY (input_id) REFERENCES db_dbnode(id) DEFERRABLE + INITIALLY DEFERRED + db_dblink_output_id_fkey: FOREIGN KEY (output_id) REFERENCES db_dbnode(id) ON + DELETE CASCADE DEFERRABLE INITIALLY DEFERRED + db_dblog: + db_dblog_dbnode_id_fkey: FOREIGN KEY (dbnode_id) REFERENCES db_dbnode(id) ON DELETE + CASCADE DEFERRABLE INITIALLY DEFERRED + db_dbnode: + db_dbnode_dbcomputer_id_fkey: FOREIGN KEY (dbcomputer_id) REFERENCES db_dbcomputer(id) + ON DELETE RESTRICT DEFERRABLE INITIALLY DEFERRED + db_dbnode_user_id_fkey: FOREIGN KEY (user_id) REFERENCES db_dbuser(id) ON DELETE + RESTRICT DEFERRABLE INITIALLY DEFERRED +indexes: + db_dbauthinfo: + db_dbauthinfo_aiidauser_id_dbcomputer_id_key: CREATE UNIQUE INDEX db_dbauthinfo_aiidauser_id_dbcomputer_id_key + ON public.db_dbauthinfo USING btree (aiidauser_id, dbcomputer_id) + db_dbauthinfo_pkey: CREATE UNIQUE INDEX db_dbauthinfo_pkey ON public.db_dbauthinfo + USING btree (id) + db_dbcomment: + db_dbcomment_pkey: CREATE UNIQUE INDEX db_dbcomment_pkey ON public.db_dbcomment + USING btree (id) + db_dbcomment_uuid_key: CREATE UNIQUE INDEX db_dbcomment_uuid_key ON public.db_dbcomment + USING btree (uuid) + db_dbcomputer: + db_dbcomputer_name_key: CREATE UNIQUE INDEX db_dbcomputer_name_key ON public.db_dbcomputer + USING btree (name) + db_dbcomputer_pkey: CREATE UNIQUE INDEX db_dbcomputer_pkey ON public.db_dbcomputer + USING btree (id) + db_dbcomputer_uuid_key: CREATE UNIQUE INDEX db_dbcomputer_uuid_key ON public.db_dbcomputer + USING btree (uuid) + db_dbgroup: + db_dbgroup_label_type_string_key: CREATE UNIQUE INDEX db_dbgroup_label_type_string_key + ON public.db_dbgroup USING btree (label, type_string) + db_dbgroup_pkey: CREATE UNIQUE INDEX db_dbgroup_pkey ON public.db_dbgroup USING + btree (id) + db_dbgroup_uuid_key: CREATE UNIQUE INDEX db_dbgroup_uuid_key ON public.db_dbgroup + USING btree (uuid) + ix_db_dbgroup_label: CREATE INDEX ix_db_dbgroup_label ON public.db_dbgroup USING + btree (label) + ix_db_dbgroup_type_string: CREATE INDEX ix_db_dbgroup_type_string ON public.db_dbgroup + USING btree (type_string) + db_dbgroup_dbnodes: + db_dbgroup_dbnodes_dbgroup_id_dbnode_id_key: CREATE UNIQUE INDEX db_dbgroup_dbnodes_dbgroup_id_dbnode_id_key + ON public.db_dbgroup_dbnodes USING btree (dbgroup_id, dbnode_id) + db_dbgroup_dbnodes_dbgroup_id_idx: CREATE INDEX db_dbgroup_dbnodes_dbgroup_id_idx + ON public.db_dbgroup_dbnodes USING btree (dbgroup_id) + db_dbgroup_dbnodes_dbnode_id_idx: CREATE INDEX db_dbgroup_dbnodes_dbnode_id_idx + ON public.db_dbgroup_dbnodes USING btree (dbnode_id) + db_dbgroup_dbnodes_pkey: CREATE UNIQUE INDEX db_dbgroup_dbnodes_pkey ON public.db_dbgroup_dbnodes + USING btree (id) + db_dblink: + db_dblink_pkey: CREATE UNIQUE INDEX db_dblink_pkey ON public.db_dblink USING btree + (id) + ix_db_dblink_input_id: CREATE INDEX ix_db_dblink_input_id ON public.db_dblink + USING btree (input_id) + ix_db_dblink_label: CREATE INDEX ix_db_dblink_label ON public.db_dblink USING + btree (label) + ix_db_dblink_output_id: CREATE INDEX ix_db_dblink_output_id ON public.db_dblink + USING btree (output_id) + ix_db_dblink_type: CREATE INDEX ix_db_dblink_type ON public.db_dblink USING btree + (type) + db_dblog: + db_dblog_pkey: CREATE UNIQUE INDEX db_dblog_pkey ON public.db_dblog USING btree + (id) + db_dblog_uuid_key: CREATE UNIQUE INDEX db_dblog_uuid_key ON public.db_dblog USING + btree (uuid) + ix_db_dblog_levelname: CREATE INDEX ix_db_dblog_levelname ON public.db_dblog USING + btree (levelname) + ix_db_dblog_loggername: CREATE INDEX ix_db_dblog_loggername ON public.db_dblog + USING btree (loggername) + db_dbnode: + db_dbnode_pkey: CREATE UNIQUE INDEX db_dbnode_pkey ON public.db_dbnode USING btree + (id) + db_dbnode_uuid_key: CREATE UNIQUE INDEX db_dbnode_uuid_key ON public.db_dbnode + USING btree (uuid) + ix_db_dbnode_label: CREATE INDEX ix_db_dbnode_label ON public.db_dbnode USING + btree (label) + ix_db_dbnode_node_type: CREATE INDEX ix_db_dbnode_node_type ON public.db_dbnode + USING btree (node_type) + ix_db_dbnode_process_type: CREATE INDEX ix_db_dbnode_process_type ON public.db_dbnode + USING btree (process_type) + db_dbsetting: + db_dbsetting_key_key: CREATE UNIQUE INDEX db_dbsetting_key_key ON public.db_dbsetting + USING btree (key) + db_dbsetting_pkey: CREATE UNIQUE INDEX db_dbsetting_pkey ON public.db_dbsetting + USING btree (id) + ix_db_dbsetting_key: CREATE INDEX ix_db_dbsetting_key ON public.db_dbsetting USING + btree (key) + db_dbuser: + db_dbuser_pkey: CREATE UNIQUE INDEX db_dbuser_pkey ON public.db_dbuser USING btree + (id) + ix_db_dbuser_email: CREATE UNIQUE INDEX ix_db_dbuser_email ON public.db_dbuser + USING btree (email) diff --git a/tests/backends/aiida_sqlalchemy/migrations/test_all_schema/test_sqla_41_118349c10896_.yml b/tests/backends/aiida_sqlalchemy/migrations/test_all_schema/test_sqla_41_118349c10896_.yml new file mode 100644 index 0000000000..53c6088492 --- /dev/null +++ b/tests/backends/aiida_sqlalchemy/migrations/test_all_schema/test_sqla_41_118349c10896_.yml @@ -0,0 +1,471 @@ +columns: + db_dbauthinfo: + aiidauser_id: + data_type: integer + default: null + is_nullable: true + auth_params: + data_type: jsonb + default: null + is_nullable: true + dbcomputer_id: + data_type: integer + default: null + is_nullable: true + enabled: + data_type: boolean + default: null + is_nullable: true + id: + data_type: integer + default: nextval('db_dbauthinfo_id_seq'::regclass) + is_nullable: false + metadata: + data_type: jsonb + default: null + is_nullable: true + db_dbcomment: + content: + data_type: text + default: null + is_nullable: true + ctime: + data_type: timestamp with time zone + default: null + is_nullable: true + dbnode_id: + data_type: integer + default: null + is_nullable: true + id: + data_type: integer + default: nextval('db_dbcomment_id_seq'::regclass) + is_nullable: false + mtime: + data_type: timestamp with time zone + default: null + is_nullable: true + user_id: + data_type: integer + default: null + is_nullable: true + uuid: + data_type: uuid + default: null + is_nullable: true + db_dbcomputer: + description: + data_type: text + default: null + is_nullable: true + hostname: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + id: + data_type: integer + default: nextval('db_dbcomputer_id_seq'::regclass) + is_nullable: false + metadata: + data_type: jsonb + default: null + is_nullable: true + name: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + scheduler_type: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + transport_type: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + uuid: + data_type: uuid + default: null + is_nullable: true + db_dbgroup: + description: + data_type: text + default: null + is_nullable: true + id: + data_type: integer + default: nextval('db_dbgroup_id_seq'::regclass) + is_nullable: false + label: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + time: + data_type: timestamp with time zone + default: null + is_nullable: true + type_string: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + user_id: + data_type: integer + default: null + is_nullable: true + uuid: + data_type: uuid + default: null + is_nullable: true + db_dbgroup_dbnodes: + dbgroup_id: + data_type: integer + default: null + is_nullable: true + dbnode_id: + data_type: integer + default: null + is_nullable: true + id: + data_type: integer + default: nextval('db_dbgroup_dbnodes_id_seq'::regclass) + is_nullable: false + db_dblink: + id: + data_type: integer + default: nextval('db_dblink_id_seq'::regclass) + is_nullable: false + input_id: + data_type: integer + default: null + is_nullable: true + label: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + output_id: + data_type: integer + default: null + is_nullable: true + type: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + db_dblog: + dbnode_id: + data_type: integer + default: null + is_nullable: false + id: + data_type: integer + default: nextval('db_dblog_id_seq'::regclass) + is_nullable: false + levelname: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + loggername: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + message: + data_type: text + default: null + is_nullable: true + metadata: + data_type: jsonb + default: null + is_nullable: true + time: + data_type: timestamp with time zone + default: null + is_nullable: true + uuid: + data_type: uuid + default: null + is_nullable: true + db_dbnode: + attributes: + data_type: jsonb + default: null + is_nullable: true + ctime: + data_type: timestamp with time zone + default: null + is_nullable: true + dbcomputer_id: + data_type: integer + default: null + is_nullable: true + description: + data_type: text + default: null + is_nullable: true + extras: + data_type: jsonb + default: null + is_nullable: true + id: + data_type: integer + default: nextval('db_dbnode_id_seq'::regclass) + is_nullable: false + label: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + mtime: + data_type: timestamp with time zone + default: null + is_nullable: true + node_type: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + process_type: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + user_id: + data_type: integer + default: null + is_nullable: false + uuid: + data_type: uuid + default: null + is_nullable: true + db_dbsetting: + description: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + id: + data_type: integer + default: nextval('db_dbsetting_id_seq'::regclass) + is_nullable: false + key: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + time: + data_type: timestamp with time zone + default: null + is_nullable: true + val: + data_type: jsonb + default: null + is_nullable: true + db_dbuser: + email: + data_type: character varying + default: null + is_nullable: true + max_length: 254 + first_name: + data_type: character varying + default: null + is_nullable: true + max_length: 254 + id: + data_type: integer + default: nextval('db_dbuser_id_seq'::regclass) + is_nullable: false + institution: + data_type: character varying + default: null + is_nullable: true + max_length: 254 + last_name: + data_type: character varying + default: null + is_nullable: true + max_length: 254 +constraints: + primary_key: + db_dbauthinfo: + db_dbauthinfo_pkey: + - id + db_dbcomment: + db_dbcomment_pkey: + - id + db_dbcomputer: + db_dbcomputer_pkey: + - id + db_dbgroup: + db_dbgroup_pkey: + - id + db_dbgroup_dbnodes: + db_dbgroup_dbnodes_pkey: + - id + db_dblink: + db_dblink_pkey: + - id + db_dblog: + db_dblog_pkey: + - id + db_dbnode: + db_dbnode_pkey: + - id + db_dbsetting: + db_dbsetting_pkey: + - id + db_dbuser: + db_dbuser_pkey: + - id + unique: + db_dbauthinfo: + db_dbauthinfo_aiidauser_id_dbcomputer_id_key: + - aiidauser_id + - dbcomputer_id + db_dbcomment: + db_dbcomment_uuid_key: + - uuid + db_dbcomputer: + db_dbcomputer_name_key: + - name + db_dbcomputer_uuid_key: + - uuid + db_dbgroup: + db_dbgroup_label_type_string_key: + - label + - type_string + db_dbgroup_uuid_key: + - uuid + db_dbgroup_dbnodes: + db_dbgroup_dbnodes_dbgroup_id_dbnode_id_key: + - dbgroup_id + - dbnode_id + db_dblog: + db_dblog_uuid_key: + - uuid + db_dbnode: + db_dbnode_uuid_key: + - uuid + db_dbsetting: + db_dbsetting_key_key: + - key +foreign_keys: + db_dbauthinfo: + db_dbauthinfo_aiidauser_id_fkey: FOREIGN KEY (aiidauser_id) REFERENCES db_dbuser(id) + ON DELETE CASCADE DEFERRABLE INITIALLY DEFERRED + db_dbauthinfo_dbcomputer_id_fkey: FOREIGN KEY (dbcomputer_id) REFERENCES db_dbcomputer(id) + ON DELETE CASCADE DEFERRABLE INITIALLY DEFERRED + db_dbcomment: + db_dbcomment_dbnode_id_fkey: FOREIGN KEY (dbnode_id) REFERENCES db_dbnode(id) + ON DELETE CASCADE DEFERRABLE INITIALLY DEFERRED + db_dbcomment_user_id_fkey: FOREIGN KEY (user_id) REFERENCES db_dbuser(id) ON DELETE + CASCADE DEFERRABLE INITIALLY DEFERRED + db_dbgroup: + db_dbgroup_user_id_fkey: FOREIGN KEY (user_id) REFERENCES db_dbuser(id) ON DELETE + CASCADE DEFERRABLE INITIALLY DEFERRED + db_dbgroup_dbnodes: + db_dbgroup_dbnodes_dbgroup_id_fkey: FOREIGN KEY (dbgroup_id) REFERENCES db_dbgroup(id) + DEFERRABLE INITIALLY DEFERRED + db_dbgroup_dbnodes_dbnode_id_fkey: FOREIGN KEY (dbnode_id) REFERENCES db_dbnode(id) + DEFERRABLE INITIALLY DEFERRED + db_dblink: + db_dblink_input_id_fkey: FOREIGN KEY (input_id) REFERENCES db_dbnode(id) DEFERRABLE + INITIALLY DEFERRED + db_dblink_output_id_fkey: FOREIGN KEY (output_id) REFERENCES db_dbnode(id) ON + DELETE CASCADE DEFERRABLE INITIALLY DEFERRED + db_dblog: + db_dblog_dbnode_id_fkey: FOREIGN KEY (dbnode_id) REFERENCES db_dbnode(id) ON DELETE + CASCADE DEFERRABLE INITIALLY DEFERRED + db_dbnode: + db_dbnode_dbcomputer_id_fkey: FOREIGN KEY (dbcomputer_id) REFERENCES db_dbcomputer(id) + ON DELETE RESTRICT DEFERRABLE INITIALLY DEFERRED + db_dbnode_user_id_fkey: FOREIGN KEY (user_id) REFERENCES db_dbuser(id) ON DELETE + RESTRICT DEFERRABLE INITIALLY DEFERRED +indexes: + db_dbauthinfo: + db_dbauthinfo_aiidauser_id_dbcomputer_id_key: CREATE UNIQUE INDEX db_dbauthinfo_aiidauser_id_dbcomputer_id_key + ON public.db_dbauthinfo USING btree (aiidauser_id, dbcomputer_id) + db_dbauthinfo_pkey: CREATE UNIQUE INDEX db_dbauthinfo_pkey ON public.db_dbauthinfo + USING btree (id) + db_dbcomment: + db_dbcomment_pkey: CREATE UNIQUE INDEX db_dbcomment_pkey ON public.db_dbcomment + USING btree (id) + db_dbcomment_uuid_key: CREATE UNIQUE INDEX db_dbcomment_uuid_key ON public.db_dbcomment + USING btree (uuid) + db_dbcomputer: + db_dbcomputer_name_key: CREATE UNIQUE INDEX db_dbcomputer_name_key ON public.db_dbcomputer + USING btree (name) + db_dbcomputer_pkey: CREATE UNIQUE INDEX db_dbcomputer_pkey ON public.db_dbcomputer + USING btree (id) + db_dbcomputer_uuid_key: CREATE UNIQUE INDEX db_dbcomputer_uuid_key ON public.db_dbcomputer + USING btree (uuid) + db_dbgroup: + db_dbgroup_label_type_string_key: CREATE UNIQUE INDEX db_dbgroup_label_type_string_key + ON public.db_dbgroup USING btree (label, type_string) + db_dbgroup_pkey: CREATE UNIQUE INDEX db_dbgroup_pkey ON public.db_dbgroup USING + btree (id) + db_dbgroup_uuid_key: CREATE UNIQUE INDEX db_dbgroup_uuid_key ON public.db_dbgroup + USING btree (uuid) + ix_db_dbgroup_label: CREATE INDEX ix_db_dbgroup_label ON public.db_dbgroup USING + btree (label) + ix_db_dbgroup_type_string: CREATE INDEX ix_db_dbgroup_type_string ON public.db_dbgroup + USING btree (type_string) + db_dbgroup_dbnodes: + db_dbgroup_dbnodes_dbgroup_id_dbnode_id_key: CREATE UNIQUE INDEX db_dbgroup_dbnodes_dbgroup_id_dbnode_id_key + ON public.db_dbgroup_dbnodes USING btree (dbgroup_id, dbnode_id) + db_dbgroup_dbnodes_dbgroup_id_idx: CREATE INDEX db_dbgroup_dbnodes_dbgroup_id_idx + ON public.db_dbgroup_dbnodes USING btree (dbgroup_id) + db_dbgroup_dbnodes_dbnode_id_idx: CREATE INDEX db_dbgroup_dbnodes_dbnode_id_idx + ON public.db_dbgroup_dbnodes USING btree (dbnode_id) + db_dbgroup_dbnodes_pkey: CREATE UNIQUE INDEX db_dbgroup_dbnodes_pkey ON public.db_dbgroup_dbnodes + USING btree (id) + db_dblink: + db_dblink_pkey: CREATE UNIQUE INDEX db_dblink_pkey ON public.db_dblink USING btree + (id) + ix_db_dblink_input_id: CREATE INDEX ix_db_dblink_input_id ON public.db_dblink + USING btree (input_id) + ix_db_dblink_label: CREATE INDEX ix_db_dblink_label ON public.db_dblink USING + btree (label) + ix_db_dblink_output_id: CREATE INDEX ix_db_dblink_output_id ON public.db_dblink + USING btree (output_id) + ix_db_dblink_type: CREATE INDEX ix_db_dblink_type ON public.db_dblink USING btree + (type) + db_dblog: + db_dblog_pkey: CREATE UNIQUE INDEX db_dblog_pkey ON public.db_dblog USING btree + (id) + db_dblog_uuid_key: CREATE UNIQUE INDEX db_dblog_uuid_key ON public.db_dblog USING + btree (uuid) + ix_db_dblog_levelname: CREATE INDEX ix_db_dblog_levelname ON public.db_dblog USING + btree (levelname) + ix_db_dblog_loggername: CREATE INDEX ix_db_dblog_loggername ON public.db_dblog + USING btree (loggername) + db_dbnode: + db_dbnode_pkey: CREATE UNIQUE INDEX db_dbnode_pkey ON public.db_dbnode USING btree + (id) + db_dbnode_uuid_key: CREATE UNIQUE INDEX db_dbnode_uuid_key ON public.db_dbnode + USING btree (uuid) + ix_db_dbnode_label: CREATE INDEX ix_db_dbnode_label ON public.db_dbnode USING + btree (label) + ix_db_dbnode_node_type: CREATE INDEX ix_db_dbnode_node_type ON public.db_dbnode + USING btree (node_type) + ix_db_dbnode_process_type: CREATE INDEX ix_db_dbnode_process_type ON public.db_dbnode + USING btree (process_type) + db_dbsetting: + db_dbsetting_key_key: CREATE UNIQUE INDEX db_dbsetting_key_key ON public.db_dbsetting + USING btree (key) + db_dbsetting_pkey: CREATE UNIQUE INDEX db_dbsetting_pkey ON public.db_dbsetting + USING btree (id) + ix_db_dbsetting_key: CREATE INDEX ix_db_dbsetting_key ON public.db_dbsetting USING + btree (key) + db_dbuser: + db_dbuser_pkey: CREATE UNIQUE INDEX db_dbuser_pkey ON public.db_dbuser USING btree + (id) + ix_db_dbuser_email: CREATE UNIQUE INDEX ix_db_dbuser_email ON public.db_dbuser + USING btree (email) diff --git a/tests/backends/aiida_sqlalchemy/migrations/test_all_schema/test_sqla_42_bf591f31dd12_.yml b/tests/backends/aiida_sqlalchemy/migrations/test_all_schema/test_sqla_42_bf591f31dd12_.yml new file mode 100644 index 0000000000..53c6088492 --- /dev/null +++ b/tests/backends/aiida_sqlalchemy/migrations/test_all_schema/test_sqla_42_bf591f31dd12_.yml @@ -0,0 +1,471 @@ +columns: + db_dbauthinfo: + aiidauser_id: + data_type: integer + default: null + is_nullable: true + auth_params: + data_type: jsonb + default: null + is_nullable: true + dbcomputer_id: + data_type: integer + default: null + is_nullable: true + enabled: + data_type: boolean + default: null + is_nullable: true + id: + data_type: integer + default: nextval('db_dbauthinfo_id_seq'::regclass) + is_nullable: false + metadata: + data_type: jsonb + default: null + is_nullable: true + db_dbcomment: + content: + data_type: text + default: null + is_nullable: true + ctime: + data_type: timestamp with time zone + default: null + is_nullable: true + dbnode_id: + data_type: integer + default: null + is_nullable: true + id: + data_type: integer + default: nextval('db_dbcomment_id_seq'::regclass) + is_nullable: false + mtime: + data_type: timestamp with time zone + default: null + is_nullable: true + user_id: + data_type: integer + default: null + is_nullable: true + uuid: + data_type: uuid + default: null + is_nullable: true + db_dbcomputer: + description: + data_type: text + default: null + is_nullable: true + hostname: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + id: + data_type: integer + default: nextval('db_dbcomputer_id_seq'::regclass) + is_nullable: false + metadata: + data_type: jsonb + default: null + is_nullable: true + name: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + scheduler_type: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + transport_type: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + uuid: + data_type: uuid + default: null + is_nullable: true + db_dbgroup: + description: + data_type: text + default: null + is_nullable: true + id: + data_type: integer + default: nextval('db_dbgroup_id_seq'::regclass) + is_nullable: false + label: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + time: + data_type: timestamp with time zone + default: null + is_nullable: true + type_string: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + user_id: + data_type: integer + default: null + is_nullable: true + uuid: + data_type: uuid + default: null + is_nullable: true + db_dbgroup_dbnodes: + dbgroup_id: + data_type: integer + default: null + is_nullable: true + dbnode_id: + data_type: integer + default: null + is_nullable: true + id: + data_type: integer + default: nextval('db_dbgroup_dbnodes_id_seq'::regclass) + is_nullable: false + db_dblink: + id: + data_type: integer + default: nextval('db_dblink_id_seq'::regclass) + is_nullable: false + input_id: + data_type: integer + default: null + is_nullable: true + label: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + output_id: + data_type: integer + default: null + is_nullable: true + type: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + db_dblog: + dbnode_id: + data_type: integer + default: null + is_nullable: false + id: + data_type: integer + default: nextval('db_dblog_id_seq'::regclass) + is_nullable: false + levelname: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + loggername: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + message: + data_type: text + default: null + is_nullable: true + metadata: + data_type: jsonb + default: null + is_nullable: true + time: + data_type: timestamp with time zone + default: null + is_nullable: true + uuid: + data_type: uuid + default: null + is_nullable: true + db_dbnode: + attributes: + data_type: jsonb + default: null + is_nullable: true + ctime: + data_type: timestamp with time zone + default: null + is_nullable: true + dbcomputer_id: + data_type: integer + default: null + is_nullable: true + description: + data_type: text + default: null + is_nullable: true + extras: + data_type: jsonb + default: null + is_nullable: true + id: + data_type: integer + default: nextval('db_dbnode_id_seq'::regclass) + is_nullable: false + label: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + mtime: + data_type: timestamp with time zone + default: null + is_nullable: true + node_type: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + process_type: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + user_id: + data_type: integer + default: null + is_nullable: false + uuid: + data_type: uuid + default: null + is_nullable: true + db_dbsetting: + description: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + id: + data_type: integer + default: nextval('db_dbsetting_id_seq'::regclass) + is_nullable: false + key: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + time: + data_type: timestamp with time zone + default: null + is_nullable: true + val: + data_type: jsonb + default: null + is_nullable: true + db_dbuser: + email: + data_type: character varying + default: null + is_nullable: true + max_length: 254 + first_name: + data_type: character varying + default: null + is_nullable: true + max_length: 254 + id: + data_type: integer + default: nextval('db_dbuser_id_seq'::regclass) + is_nullable: false + institution: + data_type: character varying + default: null + is_nullable: true + max_length: 254 + last_name: + data_type: character varying + default: null + is_nullable: true + max_length: 254 +constraints: + primary_key: + db_dbauthinfo: + db_dbauthinfo_pkey: + - id + db_dbcomment: + db_dbcomment_pkey: + - id + db_dbcomputer: + db_dbcomputer_pkey: + - id + db_dbgroup: + db_dbgroup_pkey: + - id + db_dbgroup_dbnodes: + db_dbgroup_dbnodes_pkey: + - id + db_dblink: + db_dblink_pkey: + - id + db_dblog: + db_dblog_pkey: + - id + db_dbnode: + db_dbnode_pkey: + - id + db_dbsetting: + db_dbsetting_pkey: + - id + db_dbuser: + db_dbuser_pkey: + - id + unique: + db_dbauthinfo: + db_dbauthinfo_aiidauser_id_dbcomputer_id_key: + - aiidauser_id + - dbcomputer_id + db_dbcomment: + db_dbcomment_uuid_key: + - uuid + db_dbcomputer: + db_dbcomputer_name_key: + - name + db_dbcomputer_uuid_key: + - uuid + db_dbgroup: + db_dbgroup_label_type_string_key: + - label + - type_string + db_dbgroup_uuid_key: + - uuid + db_dbgroup_dbnodes: + db_dbgroup_dbnodes_dbgroup_id_dbnode_id_key: + - dbgroup_id + - dbnode_id + db_dblog: + db_dblog_uuid_key: + - uuid + db_dbnode: + db_dbnode_uuid_key: + - uuid + db_dbsetting: + db_dbsetting_key_key: + - key +foreign_keys: + db_dbauthinfo: + db_dbauthinfo_aiidauser_id_fkey: FOREIGN KEY (aiidauser_id) REFERENCES db_dbuser(id) + ON DELETE CASCADE DEFERRABLE INITIALLY DEFERRED + db_dbauthinfo_dbcomputer_id_fkey: FOREIGN KEY (dbcomputer_id) REFERENCES db_dbcomputer(id) + ON DELETE CASCADE DEFERRABLE INITIALLY DEFERRED + db_dbcomment: + db_dbcomment_dbnode_id_fkey: FOREIGN KEY (dbnode_id) REFERENCES db_dbnode(id) + ON DELETE CASCADE DEFERRABLE INITIALLY DEFERRED + db_dbcomment_user_id_fkey: FOREIGN KEY (user_id) REFERENCES db_dbuser(id) ON DELETE + CASCADE DEFERRABLE INITIALLY DEFERRED + db_dbgroup: + db_dbgroup_user_id_fkey: FOREIGN KEY (user_id) REFERENCES db_dbuser(id) ON DELETE + CASCADE DEFERRABLE INITIALLY DEFERRED + db_dbgroup_dbnodes: + db_dbgroup_dbnodes_dbgroup_id_fkey: FOREIGN KEY (dbgroup_id) REFERENCES db_dbgroup(id) + DEFERRABLE INITIALLY DEFERRED + db_dbgroup_dbnodes_dbnode_id_fkey: FOREIGN KEY (dbnode_id) REFERENCES db_dbnode(id) + DEFERRABLE INITIALLY DEFERRED + db_dblink: + db_dblink_input_id_fkey: FOREIGN KEY (input_id) REFERENCES db_dbnode(id) DEFERRABLE + INITIALLY DEFERRED + db_dblink_output_id_fkey: FOREIGN KEY (output_id) REFERENCES db_dbnode(id) ON + DELETE CASCADE DEFERRABLE INITIALLY DEFERRED + db_dblog: + db_dblog_dbnode_id_fkey: FOREIGN KEY (dbnode_id) REFERENCES db_dbnode(id) ON DELETE + CASCADE DEFERRABLE INITIALLY DEFERRED + db_dbnode: + db_dbnode_dbcomputer_id_fkey: FOREIGN KEY (dbcomputer_id) REFERENCES db_dbcomputer(id) + ON DELETE RESTRICT DEFERRABLE INITIALLY DEFERRED + db_dbnode_user_id_fkey: FOREIGN KEY (user_id) REFERENCES db_dbuser(id) ON DELETE + RESTRICT DEFERRABLE INITIALLY DEFERRED +indexes: + db_dbauthinfo: + db_dbauthinfo_aiidauser_id_dbcomputer_id_key: CREATE UNIQUE INDEX db_dbauthinfo_aiidauser_id_dbcomputer_id_key + ON public.db_dbauthinfo USING btree (aiidauser_id, dbcomputer_id) + db_dbauthinfo_pkey: CREATE UNIQUE INDEX db_dbauthinfo_pkey ON public.db_dbauthinfo + USING btree (id) + db_dbcomment: + db_dbcomment_pkey: CREATE UNIQUE INDEX db_dbcomment_pkey ON public.db_dbcomment + USING btree (id) + db_dbcomment_uuid_key: CREATE UNIQUE INDEX db_dbcomment_uuid_key ON public.db_dbcomment + USING btree (uuid) + db_dbcomputer: + db_dbcomputer_name_key: CREATE UNIQUE INDEX db_dbcomputer_name_key ON public.db_dbcomputer + USING btree (name) + db_dbcomputer_pkey: CREATE UNIQUE INDEX db_dbcomputer_pkey ON public.db_dbcomputer + USING btree (id) + db_dbcomputer_uuid_key: CREATE UNIQUE INDEX db_dbcomputer_uuid_key ON public.db_dbcomputer + USING btree (uuid) + db_dbgroup: + db_dbgroup_label_type_string_key: CREATE UNIQUE INDEX db_dbgroup_label_type_string_key + ON public.db_dbgroup USING btree (label, type_string) + db_dbgroup_pkey: CREATE UNIQUE INDEX db_dbgroup_pkey ON public.db_dbgroup USING + btree (id) + db_dbgroup_uuid_key: CREATE UNIQUE INDEX db_dbgroup_uuid_key ON public.db_dbgroup + USING btree (uuid) + ix_db_dbgroup_label: CREATE INDEX ix_db_dbgroup_label ON public.db_dbgroup USING + btree (label) + ix_db_dbgroup_type_string: CREATE INDEX ix_db_dbgroup_type_string ON public.db_dbgroup + USING btree (type_string) + db_dbgroup_dbnodes: + db_dbgroup_dbnodes_dbgroup_id_dbnode_id_key: CREATE UNIQUE INDEX db_dbgroup_dbnodes_dbgroup_id_dbnode_id_key + ON public.db_dbgroup_dbnodes USING btree (dbgroup_id, dbnode_id) + db_dbgroup_dbnodes_dbgroup_id_idx: CREATE INDEX db_dbgroup_dbnodes_dbgroup_id_idx + ON public.db_dbgroup_dbnodes USING btree (dbgroup_id) + db_dbgroup_dbnodes_dbnode_id_idx: CREATE INDEX db_dbgroup_dbnodes_dbnode_id_idx + ON public.db_dbgroup_dbnodes USING btree (dbnode_id) + db_dbgroup_dbnodes_pkey: CREATE UNIQUE INDEX db_dbgroup_dbnodes_pkey ON public.db_dbgroup_dbnodes + USING btree (id) + db_dblink: + db_dblink_pkey: CREATE UNIQUE INDEX db_dblink_pkey ON public.db_dblink USING btree + (id) + ix_db_dblink_input_id: CREATE INDEX ix_db_dblink_input_id ON public.db_dblink + USING btree (input_id) + ix_db_dblink_label: CREATE INDEX ix_db_dblink_label ON public.db_dblink USING + btree (label) + ix_db_dblink_output_id: CREATE INDEX ix_db_dblink_output_id ON public.db_dblink + USING btree (output_id) + ix_db_dblink_type: CREATE INDEX ix_db_dblink_type ON public.db_dblink USING btree + (type) + db_dblog: + db_dblog_pkey: CREATE UNIQUE INDEX db_dblog_pkey ON public.db_dblog USING btree + (id) + db_dblog_uuid_key: CREATE UNIQUE INDEX db_dblog_uuid_key ON public.db_dblog USING + btree (uuid) + ix_db_dblog_levelname: CREATE INDEX ix_db_dblog_levelname ON public.db_dblog USING + btree (levelname) + ix_db_dblog_loggername: CREATE INDEX ix_db_dblog_loggername ON public.db_dblog + USING btree (loggername) + db_dbnode: + db_dbnode_pkey: CREATE UNIQUE INDEX db_dbnode_pkey ON public.db_dbnode USING btree + (id) + db_dbnode_uuid_key: CREATE UNIQUE INDEX db_dbnode_uuid_key ON public.db_dbnode + USING btree (uuid) + ix_db_dbnode_label: CREATE INDEX ix_db_dbnode_label ON public.db_dbnode USING + btree (label) + ix_db_dbnode_node_type: CREATE INDEX ix_db_dbnode_node_type ON public.db_dbnode + USING btree (node_type) + ix_db_dbnode_process_type: CREATE INDEX ix_db_dbnode_process_type ON public.db_dbnode + USING btree (process_type) + db_dbsetting: + db_dbsetting_key_key: CREATE UNIQUE INDEX db_dbsetting_key_key ON public.db_dbsetting + USING btree (key) + db_dbsetting_pkey: CREATE UNIQUE INDEX db_dbsetting_pkey ON public.db_dbsetting + USING btree (id) + ix_db_dbsetting_key: CREATE INDEX ix_db_dbsetting_key ON public.db_dbsetting USING + btree (key) + db_dbuser: + db_dbuser_pkey: CREATE UNIQUE INDEX db_dbuser_pkey ON public.db_dbuser USING btree + (id) + ix_db_dbuser_email: CREATE UNIQUE INDEX ix_db_dbuser_email ON public.db_dbuser + USING btree (email) diff --git a/tests/backends/aiida_sqlalchemy/migrations/test_all_schema/test_sqla_43_0edcdd5a30f0_.yml b/tests/backends/aiida_sqlalchemy/migrations/test_all_schema/test_sqla_43_0edcdd5a30f0_.yml new file mode 100644 index 0000000000..abfe65ec7a --- /dev/null +++ b/tests/backends/aiida_sqlalchemy/migrations/test_all_schema/test_sqla_43_0edcdd5a30f0_.yml @@ -0,0 +1,475 @@ +columns: + db_dbauthinfo: + aiidauser_id: + data_type: integer + default: null + is_nullable: true + auth_params: + data_type: jsonb + default: null + is_nullable: true + dbcomputer_id: + data_type: integer + default: null + is_nullable: true + enabled: + data_type: boolean + default: null + is_nullable: true + id: + data_type: integer + default: nextval('db_dbauthinfo_id_seq'::regclass) + is_nullable: false + metadata: + data_type: jsonb + default: null + is_nullable: true + db_dbcomment: + content: + data_type: text + default: null + is_nullable: true + ctime: + data_type: timestamp with time zone + default: null + is_nullable: true + dbnode_id: + data_type: integer + default: null + is_nullable: true + id: + data_type: integer + default: nextval('db_dbcomment_id_seq'::regclass) + is_nullable: false + mtime: + data_type: timestamp with time zone + default: null + is_nullable: true + user_id: + data_type: integer + default: null + is_nullable: true + uuid: + data_type: uuid + default: null + is_nullable: true + db_dbcomputer: + description: + data_type: text + default: null + is_nullable: true + hostname: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + id: + data_type: integer + default: nextval('db_dbcomputer_id_seq'::regclass) + is_nullable: false + metadata: + data_type: jsonb + default: null + is_nullable: true + name: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + scheduler_type: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + transport_type: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + uuid: + data_type: uuid + default: null + is_nullable: true + db_dbgroup: + description: + data_type: text + default: null + is_nullable: true + extras: + data_type: jsonb + default: null + is_nullable: false + id: + data_type: integer + default: nextval('db_dbgroup_id_seq'::regclass) + is_nullable: false + label: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + time: + data_type: timestamp with time zone + default: null + is_nullable: true + type_string: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + user_id: + data_type: integer + default: null + is_nullable: true + uuid: + data_type: uuid + default: null + is_nullable: true + db_dbgroup_dbnodes: + dbgroup_id: + data_type: integer + default: null + is_nullable: true + dbnode_id: + data_type: integer + default: null + is_nullable: true + id: + data_type: integer + default: nextval('db_dbgroup_dbnodes_id_seq'::regclass) + is_nullable: false + db_dblink: + id: + data_type: integer + default: nextval('db_dblink_id_seq'::regclass) + is_nullable: false + input_id: + data_type: integer + default: null + is_nullable: true + label: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + output_id: + data_type: integer + default: null + is_nullable: true + type: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + db_dblog: + dbnode_id: + data_type: integer + default: null + is_nullable: false + id: + data_type: integer + default: nextval('db_dblog_id_seq'::regclass) + is_nullable: false + levelname: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + loggername: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + message: + data_type: text + default: null + is_nullable: true + metadata: + data_type: jsonb + default: null + is_nullable: true + time: + data_type: timestamp with time zone + default: null + is_nullable: true + uuid: + data_type: uuid + default: null + is_nullable: true + db_dbnode: + attributes: + data_type: jsonb + default: null + is_nullable: true + ctime: + data_type: timestamp with time zone + default: null + is_nullable: true + dbcomputer_id: + data_type: integer + default: null + is_nullable: true + description: + data_type: text + default: null + is_nullable: true + extras: + data_type: jsonb + default: null + is_nullable: true + id: + data_type: integer + default: nextval('db_dbnode_id_seq'::regclass) + is_nullable: false + label: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + mtime: + data_type: timestamp with time zone + default: null + is_nullable: true + node_type: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + process_type: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + user_id: + data_type: integer + default: null + is_nullable: false + uuid: + data_type: uuid + default: null + is_nullable: true + db_dbsetting: + description: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + id: + data_type: integer + default: nextval('db_dbsetting_id_seq'::regclass) + is_nullable: false + key: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + time: + data_type: timestamp with time zone + default: null + is_nullable: true + val: + data_type: jsonb + default: null + is_nullable: true + db_dbuser: + email: + data_type: character varying + default: null + is_nullable: true + max_length: 254 + first_name: + data_type: character varying + default: null + is_nullable: true + max_length: 254 + id: + data_type: integer + default: nextval('db_dbuser_id_seq'::regclass) + is_nullable: false + institution: + data_type: character varying + default: null + is_nullable: true + max_length: 254 + last_name: + data_type: character varying + default: null + is_nullable: true + max_length: 254 +constraints: + primary_key: + db_dbauthinfo: + db_dbauthinfo_pkey: + - id + db_dbcomment: + db_dbcomment_pkey: + - id + db_dbcomputer: + db_dbcomputer_pkey: + - id + db_dbgroup: + db_dbgroup_pkey: + - id + db_dbgroup_dbnodes: + db_dbgroup_dbnodes_pkey: + - id + db_dblink: + db_dblink_pkey: + - id + db_dblog: + db_dblog_pkey: + - id + db_dbnode: + db_dbnode_pkey: + - id + db_dbsetting: + db_dbsetting_pkey: + - id + db_dbuser: + db_dbuser_pkey: + - id + unique: + db_dbauthinfo: + db_dbauthinfo_aiidauser_id_dbcomputer_id_key: + - aiidauser_id + - dbcomputer_id + db_dbcomment: + db_dbcomment_uuid_key: + - uuid + db_dbcomputer: + db_dbcomputer_name_key: + - name + db_dbcomputer_uuid_key: + - uuid + db_dbgroup: + db_dbgroup_label_type_string_key: + - label + - type_string + db_dbgroup_uuid_key: + - uuid + db_dbgroup_dbnodes: + db_dbgroup_dbnodes_dbgroup_id_dbnode_id_key: + - dbgroup_id + - dbnode_id + db_dblog: + db_dblog_uuid_key: + - uuid + db_dbnode: + db_dbnode_uuid_key: + - uuid + db_dbsetting: + db_dbsetting_key_key: + - key +foreign_keys: + db_dbauthinfo: + db_dbauthinfo_aiidauser_id_fkey: FOREIGN KEY (aiidauser_id) REFERENCES db_dbuser(id) + ON DELETE CASCADE DEFERRABLE INITIALLY DEFERRED + db_dbauthinfo_dbcomputer_id_fkey: FOREIGN KEY (dbcomputer_id) REFERENCES db_dbcomputer(id) + ON DELETE CASCADE DEFERRABLE INITIALLY DEFERRED + db_dbcomment: + db_dbcomment_dbnode_id_fkey: FOREIGN KEY (dbnode_id) REFERENCES db_dbnode(id) + ON DELETE CASCADE DEFERRABLE INITIALLY DEFERRED + db_dbcomment_user_id_fkey: FOREIGN KEY (user_id) REFERENCES db_dbuser(id) ON DELETE + CASCADE DEFERRABLE INITIALLY DEFERRED + db_dbgroup: + db_dbgroup_user_id_fkey: FOREIGN KEY (user_id) REFERENCES db_dbuser(id) ON DELETE + CASCADE DEFERRABLE INITIALLY DEFERRED + db_dbgroup_dbnodes: + db_dbgroup_dbnodes_dbgroup_id_fkey: FOREIGN KEY (dbgroup_id) REFERENCES db_dbgroup(id) + DEFERRABLE INITIALLY DEFERRED + db_dbgroup_dbnodes_dbnode_id_fkey: FOREIGN KEY (dbnode_id) REFERENCES db_dbnode(id) + DEFERRABLE INITIALLY DEFERRED + db_dblink: + db_dblink_input_id_fkey: FOREIGN KEY (input_id) REFERENCES db_dbnode(id) DEFERRABLE + INITIALLY DEFERRED + db_dblink_output_id_fkey: FOREIGN KEY (output_id) REFERENCES db_dbnode(id) ON + DELETE CASCADE DEFERRABLE INITIALLY DEFERRED + db_dblog: + db_dblog_dbnode_id_fkey: FOREIGN KEY (dbnode_id) REFERENCES db_dbnode(id) ON DELETE + CASCADE DEFERRABLE INITIALLY DEFERRED + db_dbnode: + db_dbnode_dbcomputer_id_fkey: FOREIGN KEY (dbcomputer_id) REFERENCES db_dbcomputer(id) + ON DELETE RESTRICT DEFERRABLE INITIALLY DEFERRED + db_dbnode_user_id_fkey: FOREIGN KEY (user_id) REFERENCES db_dbuser(id) ON DELETE + RESTRICT DEFERRABLE INITIALLY DEFERRED +indexes: + db_dbauthinfo: + db_dbauthinfo_aiidauser_id_dbcomputer_id_key: CREATE UNIQUE INDEX db_dbauthinfo_aiidauser_id_dbcomputer_id_key + ON public.db_dbauthinfo USING btree (aiidauser_id, dbcomputer_id) + db_dbauthinfo_pkey: CREATE UNIQUE INDEX db_dbauthinfo_pkey ON public.db_dbauthinfo + USING btree (id) + db_dbcomment: + db_dbcomment_pkey: CREATE UNIQUE INDEX db_dbcomment_pkey ON public.db_dbcomment + USING btree (id) + db_dbcomment_uuid_key: CREATE UNIQUE INDEX db_dbcomment_uuid_key ON public.db_dbcomment + USING btree (uuid) + db_dbcomputer: + db_dbcomputer_name_key: CREATE UNIQUE INDEX db_dbcomputer_name_key ON public.db_dbcomputer + USING btree (name) + db_dbcomputer_pkey: CREATE UNIQUE INDEX db_dbcomputer_pkey ON public.db_dbcomputer + USING btree (id) + db_dbcomputer_uuid_key: CREATE UNIQUE INDEX db_dbcomputer_uuid_key ON public.db_dbcomputer + USING btree (uuid) + db_dbgroup: + db_dbgroup_label_type_string_key: CREATE UNIQUE INDEX db_dbgroup_label_type_string_key + ON public.db_dbgroup USING btree (label, type_string) + db_dbgroup_pkey: CREATE UNIQUE INDEX db_dbgroup_pkey ON public.db_dbgroup USING + btree (id) + db_dbgroup_uuid_key: CREATE UNIQUE INDEX db_dbgroup_uuid_key ON public.db_dbgroup + USING btree (uuid) + ix_db_dbgroup_label: CREATE INDEX ix_db_dbgroup_label ON public.db_dbgroup USING + btree (label) + ix_db_dbgroup_type_string: CREATE INDEX ix_db_dbgroup_type_string ON public.db_dbgroup + USING btree (type_string) + db_dbgroup_dbnodes: + db_dbgroup_dbnodes_dbgroup_id_dbnode_id_key: CREATE UNIQUE INDEX db_dbgroup_dbnodes_dbgroup_id_dbnode_id_key + ON public.db_dbgroup_dbnodes USING btree (dbgroup_id, dbnode_id) + db_dbgroup_dbnodes_dbgroup_id_idx: CREATE INDEX db_dbgroup_dbnodes_dbgroup_id_idx + ON public.db_dbgroup_dbnodes USING btree (dbgroup_id) + db_dbgroup_dbnodes_dbnode_id_idx: CREATE INDEX db_dbgroup_dbnodes_dbnode_id_idx + ON public.db_dbgroup_dbnodes USING btree (dbnode_id) + db_dbgroup_dbnodes_pkey: CREATE UNIQUE INDEX db_dbgroup_dbnodes_pkey ON public.db_dbgroup_dbnodes + USING btree (id) + db_dblink: + db_dblink_pkey: CREATE UNIQUE INDEX db_dblink_pkey ON public.db_dblink USING btree + (id) + ix_db_dblink_input_id: CREATE INDEX ix_db_dblink_input_id ON public.db_dblink + USING btree (input_id) + ix_db_dblink_label: CREATE INDEX ix_db_dblink_label ON public.db_dblink USING + btree (label) + ix_db_dblink_output_id: CREATE INDEX ix_db_dblink_output_id ON public.db_dblink + USING btree (output_id) + ix_db_dblink_type: CREATE INDEX ix_db_dblink_type ON public.db_dblink USING btree + (type) + db_dblog: + db_dblog_pkey: CREATE UNIQUE INDEX db_dblog_pkey ON public.db_dblog USING btree + (id) + db_dblog_uuid_key: CREATE UNIQUE INDEX db_dblog_uuid_key ON public.db_dblog USING + btree (uuid) + ix_db_dblog_levelname: CREATE INDEX ix_db_dblog_levelname ON public.db_dblog USING + btree (levelname) + ix_db_dblog_loggername: CREATE INDEX ix_db_dblog_loggername ON public.db_dblog + USING btree (loggername) + db_dbnode: + db_dbnode_pkey: CREATE UNIQUE INDEX db_dbnode_pkey ON public.db_dbnode USING btree + (id) + db_dbnode_uuid_key: CREATE UNIQUE INDEX db_dbnode_uuid_key ON public.db_dbnode + USING btree (uuid) + ix_db_dbnode_label: CREATE INDEX ix_db_dbnode_label ON public.db_dbnode USING + btree (label) + ix_db_dbnode_node_type: CREATE INDEX ix_db_dbnode_node_type ON public.db_dbnode + USING btree (node_type) + ix_db_dbnode_process_type: CREATE INDEX ix_db_dbnode_process_type ON public.db_dbnode + USING btree (process_type) + db_dbsetting: + db_dbsetting_key_key: CREATE UNIQUE INDEX db_dbsetting_key_key ON public.db_dbsetting + USING btree (key) + db_dbsetting_pkey: CREATE UNIQUE INDEX db_dbsetting_pkey ON public.db_dbsetting + USING btree (id) + ix_db_dbsetting_key: CREATE INDEX ix_db_dbsetting_key ON public.db_dbsetting USING + btree (key) + db_dbuser: + db_dbuser_pkey: CREATE UNIQUE INDEX db_dbuser_pkey ON public.db_dbuser USING btree + (id) + ix_db_dbuser_email: CREATE UNIQUE INDEX ix_db_dbuser_email ON public.db_dbuser + USING btree (email) diff --git a/tests/backends/aiida_sqlalchemy/migrations/test_all_schema/test_sqla_44_7536a82b2cc4_.yml b/tests/backends/aiida_sqlalchemy/migrations/test_all_schema/test_sqla_44_7536a82b2cc4_.yml new file mode 100644 index 0000000000..5f81aea07f --- /dev/null +++ b/tests/backends/aiida_sqlalchemy/migrations/test_all_schema/test_sqla_44_7536a82b2cc4_.yml @@ -0,0 +1,479 @@ +columns: + db_dbauthinfo: + aiidauser_id: + data_type: integer + default: null + is_nullable: true + auth_params: + data_type: jsonb + default: null + is_nullable: true + dbcomputer_id: + data_type: integer + default: null + is_nullable: true + enabled: + data_type: boolean + default: null + is_nullable: true + id: + data_type: integer + default: nextval('db_dbauthinfo_id_seq'::regclass) + is_nullable: false + metadata: + data_type: jsonb + default: null + is_nullable: true + db_dbcomment: + content: + data_type: text + default: null + is_nullable: true + ctime: + data_type: timestamp with time zone + default: null + is_nullable: true + dbnode_id: + data_type: integer + default: null + is_nullable: true + id: + data_type: integer + default: nextval('db_dbcomment_id_seq'::regclass) + is_nullable: false + mtime: + data_type: timestamp with time zone + default: null + is_nullable: true + user_id: + data_type: integer + default: null + is_nullable: true + uuid: + data_type: uuid + default: null + is_nullable: true + db_dbcomputer: + description: + data_type: text + default: null + is_nullable: true + hostname: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + id: + data_type: integer + default: nextval('db_dbcomputer_id_seq'::regclass) + is_nullable: false + metadata: + data_type: jsonb + default: null + is_nullable: true + name: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + scheduler_type: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + transport_type: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + uuid: + data_type: uuid + default: null + is_nullable: true + db_dbgroup: + description: + data_type: text + default: null + is_nullable: true + extras: + data_type: jsonb + default: null + is_nullable: false + id: + data_type: integer + default: nextval('db_dbgroup_id_seq'::regclass) + is_nullable: false + label: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + time: + data_type: timestamp with time zone + default: null + is_nullable: true + type_string: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + user_id: + data_type: integer + default: null + is_nullable: true + uuid: + data_type: uuid + default: null + is_nullable: true + db_dbgroup_dbnodes: + dbgroup_id: + data_type: integer + default: null + is_nullable: true + dbnode_id: + data_type: integer + default: null + is_nullable: true + id: + data_type: integer + default: nextval('db_dbgroup_dbnodes_id_seq'::regclass) + is_nullable: false + db_dblink: + id: + data_type: integer + default: nextval('db_dblink_id_seq'::regclass) + is_nullable: false + input_id: + data_type: integer + default: null + is_nullable: true + label: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + output_id: + data_type: integer + default: null + is_nullable: true + type: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + db_dblog: + dbnode_id: + data_type: integer + default: null + is_nullable: false + id: + data_type: integer + default: nextval('db_dblog_id_seq'::regclass) + is_nullable: false + levelname: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + loggername: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + message: + data_type: text + default: null + is_nullable: true + metadata: + data_type: jsonb + default: null + is_nullable: true + time: + data_type: timestamp with time zone + default: null + is_nullable: true + uuid: + data_type: uuid + default: null + is_nullable: true + db_dbnode: + attributes: + data_type: jsonb + default: null + is_nullable: true + ctime: + data_type: timestamp with time zone + default: null + is_nullable: true + dbcomputer_id: + data_type: integer + default: null + is_nullable: true + description: + data_type: text + default: null + is_nullable: true + extras: + data_type: jsonb + default: null + is_nullable: true + id: + data_type: integer + default: nextval('db_dbnode_id_seq'::regclass) + is_nullable: false + label: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + mtime: + data_type: timestamp with time zone + default: null + is_nullable: true + node_type: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + process_type: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + repository_metadata: + data_type: jsonb + default: null + is_nullable: false + user_id: + data_type: integer + default: null + is_nullable: false + uuid: + data_type: uuid + default: null + is_nullable: true + db_dbsetting: + description: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + id: + data_type: integer + default: nextval('db_dbsetting_id_seq'::regclass) + is_nullable: false + key: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + time: + data_type: timestamp with time zone + default: null + is_nullable: true + val: + data_type: jsonb + default: null + is_nullable: true + db_dbuser: + email: + data_type: character varying + default: null + is_nullable: true + max_length: 254 + first_name: + data_type: character varying + default: null + is_nullable: true + max_length: 254 + id: + data_type: integer + default: nextval('db_dbuser_id_seq'::regclass) + is_nullable: false + institution: + data_type: character varying + default: null + is_nullable: true + max_length: 254 + last_name: + data_type: character varying + default: null + is_nullable: true + max_length: 254 +constraints: + primary_key: + db_dbauthinfo: + db_dbauthinfo_pkey: + - id + db_dbcomment: + db_dbcomment_pkey: + - id + db_dbcomputer: + db_dbcomputer_pkey: + - id + db_dbgroup: + db_dbgroup_pkey: + - id + db_dbgroup_dbnodes: + db_dbgroup_dbnodes_pkey: + - id + db_dblink: + db_dblink_pkey: + - id + db_dblog: + db_dblog_pkey: + - id + db_dbnode: + db_dbnode_pkey: + - id + db_dbsetting: + db_dbsetting_pkey: + - id + db_dbuser: + db_dbuser_pkey: + - id + unique: + db_dbauthinfo: + db_dbauthinfo_aiidauser_id_dbcomputer_id_key: + - aiidauser_id + - dbcomputer_id + db_dbcomment: + db_dbcomment_uuid_key: + - uuid + db_dbcomputer: + db_dbcomputer_name_key: + - name + db_dbcomputer_uuid_key: + - uuid + db_dbgroup: + db_dbgroup_label_type_string_key: + - label + - type_string + db_dbgroup_uuid_key: + - uuid + db_dbgroup_dbnodes: + db_dbgroup_dbnodes_dbgroup_id_dbnode_id_key: + - dbgroup_id + - dbnode_id + db_dblog: + db_dblog_uuid_key: + - uuid + db_dbnode: + db_dbnode_uuid_key: + - uuid + db_dbsetting: + db_dbsetting_key_key: + - key +foreign_keys: + db_dbauthinfo: + db_dbauthinfo_aiidauser_id_fkey: FOREIGN KEY (aiidauser_id) REFERENCES db_dbuser(id) + ON DELETE CASCADE DEFERRABLE INITIALLY DEFERRED + db_dbauthinfo_dbcomputer_id_fkey: FOREIGN KEY (dbcomputer_id) REFERENCES db_dbcomputer(id) + ON DELETE CASCADE DEFERRABLE INITIALLY DEFERRED + db_dbcomment: + db_dbcomment_dbnode_id_fkey: FOREIGN KEY (dbnode_id) REFERENCES db_dbnode(id) + ON DELETE CASCADE DEFERRABLE INITIALLY DEFERRED + db_dbcomment_user_id_fkey: FOREIGN KEY (user_id) REFERENCES db_dbuser(id) ON DELETE + CASCADE DEFERRABLE INITIALLY DEFERRED + db_dbgroup: + db_dbgroup_user_id_fkey: FOREIGN KEY (user_id) REFERENCES db_dbuser(id) ON DELETE + CASCADE DEFERRABLE INITIALLY DEFERRED + db_dbgroup_dbnodes: + db_dbgroup_dbnodes_dbgroup_id_fkey: FOREIGN KEY (dbgroup_id) REFERENCES db_dbgroup(id) + DEFERRABLE INITIALLY DEFERRED + db_dbgroup_dbnodes_dbnode_id_fkey: FOREIGN KEY (dbnode_id) REFERENCES db_dbnode(id) + DEFERRABLE INITIALLY DEFERRED + db_dblink: + db_dblink_input_id_fkey: FOREIGN KEY (input_id) REFERENCES db_dbnode(id) DEFERRABLE + INITIALLY DEFERRED + db_dblink_output_id_fkey: FOREIGN KEY (output_id) REFERENCES db_dbnode(id) ON + DELETE CASCADE DEFERRABLE INITIALLY DEFERRED + db_dblog: + db_dblog_dbnode_id_fkey: FOREIGN KEY (dbnode_id) REFERENCES db_dbnode(id) ON DELETE + CASCADE DEFERRABLE INITIALLY DEFERRED + db_dbnode: + db_dbnode_dbcomputer_id_fkey: FOREIGN KEY (dbcomputer_id) REFERENCES db_dbcomputer(id) + ON DELETE RESTRICT DEFERRABLE INITIALLY DEFERRED + db_dbnode_user_id_fkey: FOREIGN KEY (user_id) REFERENCES db_dbuser(id) ON DELETE + RESTRICT DEFERRABLE INITIALLY DEFERRED +indexes: + db_dbauthinfo: + db_dbauthinfo_aiidauser_id_dbcomputer_id_key: CREATE UNIQUE INDEX db_dbauthinfo_aiidauser_id_dbcomputer_id_key + ON public.db_dbauthinfo USING btree (aiidauser_id, dbcomputer_id) + db_dbauthinfo_pkey: CREATE UNIQUE INDEX db_dbauthinfo_pkey ON public.db_dbauthinfo + USING btree (id) + db_dbcomment: + db_dbcomment_pkey: CREATE UNIQUE INDEX db_dbcomment_pkey ON public.db_dbcomment + USING btree (id) + db_dbcomment_uuid_key: CREATE UNIQUE INDEX db_dbcomment_uuid_key ON public.db_dbcomment + USING btree (uuid) + db_dbcomputer: + db_dbcomputer_name_key: CREATE UNIQUE INDEX db_dbcomputer_name_key ON public.db_dbcomputer + USING btree (name) + db_dbcomputer_pkey: CREATE UNIQUE INDEX db_dbcomputer_pkey ON public.db_dbcomputer + USING btree (id) + db_dbcomputer_uuid_key: CREATE UNIQUE INDEX db_dbcomputer_uuid_key ON public.db_dbcomputer + USING btree (uuid) + db_dbgroup: + db_dbgroup_label_type_string_key: CREATE UNIQUE INDEX db_dbgroup_label_type_string_key + ON public.db_dbgroup USING btree (label, type_string) + db_dbgroup_pkey: CREATE UNIQUE INDEX db_dbgroup_pkey ON public.db_dbgroup USING + btree (id) + db_dbgroup_uuid_key: CREATE UNIQUE INDEX db_dbgroup_uuid_key ON public.db_dbgroup + USING btree (uuid) + ix_db_dbgroup_label: CREATE INDEX ix_db_dbgroup_label ON public.db_dbgroup USING + btree (label) + ix_db_dbgroup_type_string: CREATE INDEX ix_db_dbgroup_type_string ON public.db_dbgroup + USING btree (type_string) + db_dbgroup_dbnodes: + db_dbgroup_dbnodes_dbgroup_id_dbnode_id_key: CREATE UNIQUE INDEX db_dbgroup_dbnodes_dbgroup_id_dbnode_id_key + ON public.db_dbgroup_dbnodes USING btree (dbgroup_id, dbnode_id) + db_dbgroup_dbnodes_dbgroup_id_idx: CREATE INDEX db_dbgroup_dbnodes_dbgroup_id_idx + ON public.db_dbgroup_dbnodes USING btree (dbgroup_id) + db_dbgroup_dbnodes_dbnode_id_idx: CREATE INDEX db_dbgroup_dbnodes_dbnode_id_idx + ON public.db_dbgroup_dbnodes USING btree (dbnode_id) + db_dbgroup_dbnodes_pkey: CREATE UNIQUE INDEX db_dbgroup_dbnodes_pkey ON public.db_dbgroup_dbnodes + USING btree (id) + db_dblink: + db_dblink_pkey: CREATE UNIQUE INDEX db_dblink_pkey ON public.db_dblink USING btree + (id) + ix_db_dblink_input_id: CREATE INDEX ix_db_dblink_input_id ON public.db_dblink + USING btree (input_id) + ix_db_dblink_label: CREATE INDEX ix_db_dblink_label ON public.db_dblink USING + btree (label) + ix_db_dblink_output_id: CREATE INDEX ix_db_dblink_output_id ON public.db_dblink + USING btree (output_id) + ix_db_dblink_type: CREATE INDEX ix_db_dblink_type ON public.db_dblink USING btree + (type) + db_dblog: + db_dblog_pkey: CREATE UNIQUE INDEX db_dblog_pkey ON public.db_dblog USING btree + (id) + db_dblog_uuid_key: CREATE UNIQUE INDEX db_dblog_uuid_key ON public.db_dblog USING + btree (uuid) + ix_db_dblog_levelname: CREATE INDEX ix_db_dblog_levelname ON public.db_dblog USING + btree (levelname) + ix_db_dblog_loggername: CREATE INDEX ix_db_dblog_loggername ON public.db_dblog + USING btree (loggername) + db_dbnode: + db_dbnode_pkey: CREATE UNIQUE INDEX db_dbnode_pkey ON public.db_dbnode USING btree + (id) + db_dbnode_uuid_key: CREATE UNIQUE INDEX db_dbnode_uuid_key ON public.db_dbnode + USING btree (uuid) + ix_db_dbnode_label: CREATE INDEX ix_db_dbnode_label ON public.db_dbnode USING + btree (label) + ix_db_dbnode_node_type: CREATE INDEX ix_db_dbnode_node_type ON public.db_dbnode + USING btree (node_type) + ix_db_dbnode_process_type: CREATE INDEX ix_db_dbnode_process_type ON public.db_dbnode + USING btree (process_type) + db_dbsetting: + db_dbsetting_key_key: CREATE UNIQUE INDEX db_dbsetting_key_key ON public.db_dbsetting + USING btree (key) + db_dbsetting_pkey: CREATE UNIQUE INDEX db_dbsetting_pkey ON public.db_dbsetting + USING btree (id) + ix_db_dbsetting_key: CREATE INDEX ix_db_dbsetting_key ON public.db_dbsetting USING + btree (key) + db_dbuser: + db_dbuser_pkey: CREATE UNIQUE INDEX db_dbuser_pkey ON public.db_dbuser USING btree + (id) + ix_db_dbuser_email: CREATE UNIQUE INDEX ix_db_dbuser_email ON public.db_dbuser + USING btree (email) diff --git a/tests/backends/aiida_sqlalchemy/migrations/test_all_schema/test_sqla_45_1feaea71bd5a_.yml b/tests/backends/aiida_sqlalchemy/migrations/test_all_schema/test_sqla_45_1feaea71bd5a_.yml new file mode 100644 index 0000000000..5f81aea07f --- /dev/null +++ b/tests/backends/aiida_sqlalchemy/migrations/test_all_schema/test_sqla_45_1feaea71bd5a_.yml @@ -0,0 +1,479 @@ +columns: + db_dbauthinfo: + aiidauser_id: + data_type: integer + default: null + is_nullable: true + auth_params: + data_type: jsonb + default: null + is_nullable: true + dbcomputer_id: + data_type: integer + default: null + is_nullable: true + enabled: + data_type: boolean + default: null + is_nullable: true + id: + data_type: integer + default: nextval('db_dbauthinfo_id_seq'::regclass) + is_nullable: false + metadata: + data_type: jsonb + default: null + is_nullable: true + db_dbcomment: + content: + data_type: text + default: null + is_nullable: true + ctime: + data_type: timestamp with time zone + default: null + is_nullable: true + dbnode_id: + data_type: integer + default: null + is_nullable: true + id: + data_type: integer + default: nextval('db_dbcomment_id_seq'::regclass) + is_nullable: false + mtime: + data_type: timestamp with time zone + default: null + is_nullable: true + user_id: + data_type: integer + default: null + is_nullable: true + uuid: + data_type: uuid + default: null + is_nullable: true + db_dbcomputer: + description: + data_type: text + default: null + is_nullable: true + hostname: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + id: + data_type: integer + default: nextval('db_dbcomputer_id_seq'::regclass) + is_nullable: false + metadata: + data_type: jsonb + default: null + is_nullable: true + name: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + scheduler_type: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + transport_type: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + uuid: + data_type: uuid + default: null + is_nullable: true + db_dbgroup: + description: + data_type: text + default: null + is_nullable: true + extras: + data_type: jsonb + default: null + is_nullable: false + id: + data_type: integer + default: nextval('db_dbgroup_id_seq'::regclass) + is_nullable: false + label: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + time: + data_type: timestamp with time zone + default: null + is_nullable: true + type_string: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + user_id: + data_type: integer + default: null + is_nullable: true + uuid: + data_type: uuid + default: null + is_nullable: true + db_dbgroup_dbnodes: + dbgroup_id: + data_type: integer + default: null + is_nullable: true + dbnode_id: + data_type: integer + default: null + is_nullable: true + id: + data_type: integer + default: nextval('db_dbgroup_dbnodes_id_seq'::regclass) + is_nullable: false + db_dblink: + id: + data_type: integer + default: nextval('db_dblink_id_seq'::regclass) + is_nullable: false + input_id: + data_type: integer + default: null + is_nullable: true + label: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + output_id: + data_type: integer + default: null + is_nullable: true + type: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + db_dblog: + dbnode_id: + data_type: integer + default: null + is_nullable: false + id: + data_type: integer + default: nextval('db_dblog_id_seq'::regclass) + is_nullable: false + levelname: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + loggername: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + message: + data_type: text + default: null + is_nullable: true + metadata: + data_type: jsonb + default: null + is_nullable: true + time: + data_type: timestamp with time zone + default: null + is_nullable: true + uuid: + data_type: uuid + default: null + is_nullable: true + db_dbnode: + attributes: + data_type: jsonb + default: null + is_nullable: true + ctime: + data_type: timestamp with time zone + default: null + is_nullable: true + dbcomputer_id: + data_type: integer + default: null + is_nullable: true + description: + data_type: text + default: null + is_nullable: true + extras: + data_type: jsonb + default: null + is_nullable: true + id: + data_type: integer + default: nextval('db_dbnode_id_seq'::regclass) + is_nullable: false + label: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + mtime: + data_type: timestamp with time zone + default: null + is_nullable: true + node_type: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + process_type: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + repository_metadata: + data_type: jsonb + default: null + is_nullable: false + user_id: + data_type: integer + default: null + is_nullable: false + uuid: + data_type: uuid + default: null + is_nullable: true + db_dbsetting: + description: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + id: + data_type: integer + default: nextval('db_dbsetting_id_seq'::regclass) + is_nullable: false + key: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + time: + data_type: timestamp with time zone + default: null + is_nullable: true + val: + data_type: jsonb + default: null + is_nullable: true + db_dbuser: + email: + data_type: character varying + default: null + is_nullable: true + max_length: 254 + first_name: + data_type: character varying + default: null + is_nullable: true + max_length: 254 + id: + data_type: integer + default: nextval('db_dbuser_id_seq'::regclass) + is_nullable: false + institution: + data_type: character varying + default: null + is_nullable: true + max_length: 254 + last_name: + data_type: character varying + default: null + is_nullable: true + max_length: 254 +constraints: + primary_key: + db_dbauthinfo: + db_dbauthinfo_pkey: + - id + db_dbcomment: + db_dbcomment_pkey: + - id + db_dbcomputer: + db_dbcomputer_pkey: + - id + db_dbgroup: + db_dbgroup_pkey: + - id + db_dbgroup_dbnodes: + db_dbgroup_dbnodes_pkey: + - id + db_dblink: + db_dblink_pkey: + - id + db_dblog: + db_dblog_pkey: + - id + db_dbnode: + db_dbnode_pkey: + - id + db_dbsetting: + db_dbsetting_pkey: + - id + db_dbuser: + db_dbuser_pkey: + - id + unique: + db_dbauthinfo: + db_dbauthinfo_aiidauser_id_dbcomputer_id_key: + - aiidauser_id + - dbcomputer_id + db_dbcomment: + db_dbcomment_uuid_key: + - uuid + db_dbcomputer: + db_dbcomputer_name_key: + - name + db_dbcomputer_uuid_key: + - uuid + db_dbgroup: + db_dbgroup_label_type_string_key: + - label + - type_string + db_dbgroup_uuid_key: + - uuid + db_dbgroup_dbnodes: + db_dbgroup_dbnodes_dbgroup_id_dbnode_id_key: + - dbgroup_id + - dbnode_id + db_dblog: + db_dblog_uuid_key: + - uuid + db_dbnode: + db_dbnode_uuid_key: + - uuid + db_dbsetting: + db_dbsetting_key_key: + - key +foreign_keys: + db_dbauthinfo: + db_dbauthinfo_aiidauser_id_fkey: FOREIGN KEY (aiidauser_id) REFERENCES db_dbuser(id) + ON DELETE CASCADE DEFERRABLE INITIALLY DEFERRED + db_dbauthinfo_dbcomputer_id_fkey: FOREIGN KEY (dbcomputer_id) REFERENCES db_dbcomputer(id) + ON DELETE CASCADE DEFERRABLE INITIALLY DEFERRED + db_dbcomment: + db_dbcomment_dbnode_id_fkey: FOREIGN KEY (dbnode_id) REFERENCES db_dbnode(id) + ON DELETE CASCADE DEFERRABLE INITIALLY DEFERRED + db_dbcomment_user_id_fkey: FOREIGN KEY (user_id) REFERENCES db_dbuser(id) ON DELETE + CASCADE DEFERRABLE INITIALLY DEFERRED + db_dbgroup: + db_dbgroup_user_id_fkey: FOREIGN KEY (user_id) REFERENCES db_dbuser(id) ON DELETE + CASCADE DEFERRABLE INITIALLY DEFERRED + db_dbgroup_dbnodes: + db_dbgroup_dbnodes_dbgroup_id_fkey: FOREIGN KEY (dbgroup_id) REFERENCES db_dbgroup(id) + DEFERRABLE INITIALLY DEFERRED + db_dbgroup_dbnodes_dbnode_id_fkey: FOREIGN KEY (dbnode_id) REFERENCES db_dbnode(id) + DEFERRABLE INITIALLY DEFERRED + db_dblink: + db_dblink_input_id_fkey: FOREIGN KEY (input_id) REFERENCES db_dbnode(id) DEFERRABLE + INITIALLY DEFERRED + db_dblink_output_id_fkey: FOREIGN KEY (output_id) REFERENCES db_dbnode(id) ON + DELETE CASCADE DEFERRABLE INITIALLY DEFERRED + db_dblog: + db_dblog_dbnode_id_fkey: FOREIGN KEY (dbnode_id) REFERENCES db_dbnode(id) ON DELETE + CASCADE DEFERRABLE INITIALLY DEFERRED + db_dbnode: + db_dbnode_dbcomputer_id_fkey: FOREIGN KEY (dbcomputer_id) REFERENCES db_dbcomputer(id) + ON DELETE RESTRICT DEFERRABLE INITIALLY DEFERRED + db_dbnode_user_id_fkey: FOREIGN KEY (user_id) REFERENCES db_dbuser(id) ON DELETE + RESTRICT DEFERRABLE INITIALLY DEFERRED +indexes: + db_dbauthinfo: + db_dbauthinfo_aiidauser_id_dbcomputer_id_key: CREATE UNIQUE INDEX db_dbauthinfo_aiidauser_id_dbcomputer_id_key + ON public.db_dbauthinfo USING btree (aiidauser_id, dbcomputer_id) + db_dbauthinfo_pkey: CREATE UNIQUE INDEX db_dbauthinfo_pkey ON public.db_dbauthinfo + USING btree (id) + db_dbcomment: + db_dbcomment_pkey: CREATE UNIQUE INDEX db_dbcomment_pkey ON public.db_dbcomment + USING btree (id) + db_dbcomment_uuid_key: CREATE UNIQUE INDEX db_dbcomment_uuid_key ON public.db_dbcomment + USING btree (uuid) + db_dbcomputer: + db_dbcomputer_name_key: CREATE UNIQUE INDEX db_dbcomputer_name_key ON public.db_dbcomputer + USING btree (name) + db_dbcomputer_pkey: CREATE UNIQUE INDEX db_dbcomputer_pkey ON public.db_dbcomputer + USING btree (id) + db_dbcomputer_uuid_key: CREATE UNIQUE INDEX db_dbcomputer_uuid_key ON public.db_dbcomputer + USING btree (uuid) + db_dbgroup: + db_dbgroup_label_type_string_key: CREATE UNIQUE INDEX db_dbgroup_label_type_string_key + ON public.db_dbgroup USING btree (label, type_string) + db_dbgroup_pkey: CREATE UNIQUE INDEX db_dbgroup_pkey ON public.db_dbgroup USING + btree (id) + db_dbgroup_uuid_key: CREATE UNIQUE INDEX db_dbgroup_uuid_key ON public.db_dbgroup + USING btree (uuid) + ix_db_dbgroup_label: CREATE INDEX ix_db_dbgroup_label ON public.db_dbgroup USING + btree (label) + ix_db_dbgroup_type_string: CREATE INDEX ix_db_dbgroup_type_string ON public.db_dbgroup + USING btree (type_string) + db_dbgroup_dbnodes: + db_dbgroup_dbnodes_dbgroup_id_dbnode_id_key: CREATE UNIQUE INDEX db_dbgroup_dbnodes_dbgroup_id_dbnode_id_key + ON public.db_dbgroup_dbnodes USING btree (dbgroup_id, dbnode_id) + db_dbgroup_dbnodes_dbgroup_id_idx: CREATE INDEX db_dbgroup_dbnodes_dbgroup_id_idx + ON public.db_dbgroup_dbnodes USING btree (dbgroup_id) + db_dbgroup_dbnodes_dbnode_id_idx: CREATE INDEX db_dbgroup_dbnodes_dbnode_id_idx + ON public.db_dbgroup_dbnodes USING btree (dbnode_id) + db_dbgroup_dbnodes_pkey: CREATE UNIQUE INDEX db_dbgroup_dbnodes_pkey ON public.db_dbgroup_dbnodes + USING btree (id) + db_dblink: + db_dblink_pkey: CREATE UNIQUE INDEX db_dblink_pkey ON public.db_dblink USING btree + (id) + ix_db_dblink_input_id: CREATE INDEX ix_db_dblink_input_id ON public.db_dblink + USING btree (input_id) + ix_db_dblink_label: CREATE INDEX ix_db_dblink_label ON public.db_dblink USING + btree (label) + ix_db_dblink_output_id: CREATE INDEX ix_db_dblink_output_id ON public.db_dblink + USING btree (output_id) + ix_db_dblink_type: CREATE INDEX ix_db_dblink_type ON public.db_dblink USING btree + (type) + db_dblog: + db_dblog_pkey: CREATE UNIQUE INDEX db_dblog_pkey ON public.db_dblog USING btree + (id) + db_dblog_uuid_key: CREATE UNIQUE INDEX db_dblog_uuid_key ON public.db_dblog USING + btree (uuid) + ix_db_dblog_levelname: CREATE INDEX ix_db_dblog_levelname ON public.db_dblog USING + btree (levelname) + ix_db_dblog_loggername: CREATE INDEX ix_db_dblog_loggername ON public.db_dblog + USING btree (loggername) + db_dbnode: + db_dbnode_pkey: CREATE UNIQUE INDEX db_dbnode_pkey ON public.db_dbnode USING btree + (id) + db_dbnode_uuid_key: CREATE UNIQUE INDEX db_dbnode_uuid_key ON public.db_dbnode + USING btree (uuid) + ix_db_dbnode_label: CREATE INDEX ix_db_dbnode_label ON public.db_dbnode USING + btree (label) + ix_db_dbnode_node_type: CREATE INDEX ix_db_dbnode_node_type ON public.db_dbnode + USING btree (node_type) + ix_db_dbnode_process_type: CREATE INDEX ix_db_dbnode_process_type ON public.db_dbnode + USING btree (process_type) + db_dbsetting: + db_dbsetting_key_key: CREATE UNIQUE INDEX db_dbsetting_key_key ON public.db_dbsetting + USING btree (key) + db_dbsetting_pkey: CREATE UNIQUE INDEX db_dbsetting_pkey ON public.db_dbsetting + USING btree (id) + ix_db_dbsetting_key: CREATE INDEX ix_db_dbsetting_key ON public.db_dbsetting USING + btree (key) + db_dbuser: + db_dbuser_pkey: CREATE UNIQUE INDEX db_dbuser_pkey ON public.db_dbuser USING btree + (id) + ix_db_dbuser_email: CREATE UNIQUE INDEX ix_db_dbuser_email ON public.db_dbuser + USING btree (email) diff --git a/tests/backends/aiida_sqlalchemy/migrations/test_all_schema/test_sqla_46_535039300e4a_.yml b/tests/backends/aiida_sqlalchemy/migrations/test_all_schema/test_sqla_46_535039300e4a_.yml new file mode 100644 index 0000000000..fda1f11682 --- /dev/null +++ b/tests/backends/aiida_sqlalchemy/migrations/test_all_schema/test_sqla_46_535039300e4a_.yml @@ -0,0 +1,479 @@ +columns: + db_dbauthinfo: + aiidauser_id: + data_type: integer + default: null + is_nullable: true + auth_params: + data_type: jsonb + default: null + is_nullable: true + dbcomputer_id: + data_type: integer + default: null + is_nullable: true + enabled: + data_type: boolean + default: null + is_nullable: true + id: + data_type: integer + default: nextval('db_dbauthinfo_id_seq'::regclass) + is_nullable: false + metadata: + data_type: jsonb + default: null + is_nullable: true + db_dbcomment: + content: + data_type: text + default: null + is_nullable: true + ctime: + data_type: timestamp with time zone + default: null + is_nullable: true + dbnode_id: + data_type: integer + default: null + is_nullable: true + id: + data_type: integer + default: nextval('db_dbcomment_id_seq'::regclass) + is_nullable: false + mtime: + data_type: timestamp with time zone + default: null + is_nullable: true + user_id: + data_type: integer + default: null + is_nullable: true + uuid: + data_type: uuid + default: null + is_nullable: true + db_dbcomputer: + description: + data_type: text + default: null + is_nullable: true + hostname: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + id: + data_type: integer + default: nextval('db_dbcomputer_id_seq'::regclass) + is_nullable: false + label: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + metadata: + data_type: jsonb + default: null + is_nullable: true + scheduler_type: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + transport_type: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + uuid: + data_type: uuid + default: null + is_nullable: true + db_dbgroup: + description: + data_type: text + default: null + is_nullable: true + extras: + data_type: jsonb + default: null + is_nullable: false + id: + data_type: integer + default: nextval('db_dbgroup_id_seq'::regclass) + is_nullable: false + label: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + time: + data_type: timestamp with time zone + default: null + is_nullable: true + type_string: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + user_id: + data_type: integer + default: null + is_nullable: true + uuid: + data_type: uuid + default: null + is_nullable: true + db_dbgroup_dbnodes: + dbgroup_id: + data_type: integer + default: null + is_nullable: true + dbnode_id: + data_type: integer + default: null + is_nullable: true + id: + data_type: integer + default: nextval('db_dbgroup_dbnodes_id_seq'::regclass) + is_nullable: false + db_dblink: + id: + data_type: integer + default: nextval('db_dblink_id_seq'::regclass) + is_nullable: false + input_id: + data_type: integer + default: null + is_nullable: true + label: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + output_id: + data_type: integer + default: null + is_nullable: true + type: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + db_dblog: + dbnode_id: + data_type: integer + default: null + is_nullable: false + id: + data_type: integer + default: nextval('db_dblog_id_seq'::regclass) + is_nullable: false + levelname: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + loggername: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + message: + data_type: text + default: null + is_nullable: true + metadata: + data_type: jsonb + default: null + is_nullable: true + time: + data_type: timestamp with time zone + default: null + is_nullable: true + uuid: + data_type: uuid + default: null + is_nullable: true + db_dbnode: + attributes: + data_type: jsonb + default: null + is_nullable: true + ctime: + data_type: timestamp with time zone + default: null + is_nullable: true + dbcomputer_id: + data_type: integer + default: null + is_nullable: true + description: + data_type: text + default: null + is_nullable: true + extras: + data_type: jsonb + default: null + is_nullable: true + id: + data_type: integer + default: nextval('db_dbnode_id_seq'::regclass) + is_nullable: false + label: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + mtime: + data_type: timestamp with time zone + default: null + is_nullable: true + node_type: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + process_type: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + repository_metadata: + data_type: jsonb + default: null + is_nullable: false + user_id: + data_type: integer + default: null + is_nullable: false + uuid: + data_type: uuid + default: null + is_nullable: true + db_dbsetting: + description: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + id: + data_type: integer + default: nextval('db_dbsetting_id_seq'::regclass) + is_nullable: false + key: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + time: + data_type: timestamp with time zone + default: null + is_nullable: true + val: + data_type: jsonb + default: null + is_nullable: true + db_dbuser: + email: + data_type: character varying + default: null + is_nullable: true + max_length: 254 + first_name: + data_type: character varying + default: null + is_nullable: true + max_length: 254 + id: + data_type: integer + default: nextval('db_dbuser_id_seq'::regclass) + is_nullable: false + institution: + data_type: character varying + default: null + is_nullable: true + max_length: 254 + last_name: + data_type: character varying + default: null + is_nullable: true + max_length: 254 +constraints: + primary_key: + db_dbauthinfo: + db_dbauthinfo_pkey: + - id + db_dbcomment: + db_dbcomment_pkey: + - id + db_dbcomputer: + db_dbcomputer_pkey: + - id + db_dbgroup: + db_dbgroup_pkey: + - id + db_dbgroup_dbnodes: + db_dbgroup_dbnodes_pkey: + - id + db_dblink: + db_dblink_pkey: + - id + db_dblog: + db_dblog_pkey: + - id + db_dbnode: + db_dbnode_pkey: + - id + db_dbsetting: + db_dbsetting_pkey: + - id + db_dbuser: + db_dbuser_pkey: + - id + unique: + db_dbauthinfo: + db_dbauthinfo_aiidauser_id_dbcomputer_id_key: + - aiidauser_id + - dbcomputer_id + db_dbcomment: + db_dbcomment_uuid_key: + - uuid + db_dbcomputer: + db_dbcomputer_label_key: + - label + db_dbcomputer_uuid_key: + - uuid + db_dbgroup: + db_dbgroup_label_type_string_key: + - label + - type_string + db_dbgroup_uuid_key: + - uuid + db_dbgroup_dbnodes: + db_dbgroup_dbnodes_dbgroup_id_dbnode_id_key: + - dbgroup_id + - dbnode_id + db_dblog: + db_dblog_uuid_key: + - uuid + db_dbnode: + db_dbnode_uuid_key: + - uuid + db_dbsetting: + db_dbsetting_key_key: + - key +foreign_keys: + db_dbauthinfo: + db_dbauthinfo_aiidauser_id_fkey: FOREIGN KEY (aiidauser_id) REFERENCES db_dbuser(id) + ON DELETE CASCADE DEFERRABLE INITIALLY DEFERRED + db_dbauthinfo_dbcomputer_id_fkey: FOREIGN KEY (dbcomputer_id) REFERENCES db_dbcomputer(id) + ON DELETE CASCADE DEFERRABLE INITIALLY DEFERRED + db_dbcomment: + db_dbcomment_dbnode_id_fkey: FOREIGN KEY (dbnode_id) REFERENCES db_dbnode(id) + ON DELETE CASCADE DEFERRABLE INITIALLY DEFERRED + db_dbcomment_user_id_fkey: FOREIGN KEY (user_id) REFERENCES db_dbuser(id) ON DELETE + CASCADE DEFERRABLE INITIALLY DEFERRED + db_dbgroup: + db_dbgroup_user_id_fkey: FOREIGN KEY (user_id) REFERENCES db_dbuser(id) ON DELETE + CASCADE DEFERRABLE INITIALLY DEFERRED + db_dbgroup_dbnodes: + db_dbgroup_dbnodes_dbgroup_id_fkey: FOREIGN KEY (dbgroup_id) REFERENCES db_dbgroup(id) + DEFERRABLE INITIALLY DEFERRED + db_dbgroup_dbnodes_dbnode_id_fkey: FOREIGN KEY (dbnode_id) REFERENCES db_dbnode(id) + DEFERRABLE INITIALLY DEFERRED + db_dblink: + db_dblink_input_id_fkey: FOREIGN KEY (input_id) REFERENCES db_dbnode(id) DEFERRABLE + INITIALLY DEFERRED + db_dblink_output_id_fkey: FOREIGN KEY (output_id) REFERENCES db_dbnode(id) ON + DELETE CASCADE DEFERRABLE INITIALLY DEFERRED + db_dblog: + db_dblog_dbnode_id_fkey: FOREIGN KEY (dbnode_id) REFERENCES db_dbnode(id) ON DELETE + CASCADE DEFERRABLE INITIALLY DEFERRED + db_dbnode: + db_dbnode_dbcomputer_id_fkey: FOREIGN KEY (dbcomputer_id) REFERENCES db_dbcomputer(id) + ON DELETE RESTRICT DEFERRABLE INITIALLY DEFERRED + db_dbnode_user_id_fkey: FOREIGN KEY (user_id) REFERENCES db_dbuser(id) ON DELETE + RESTRICT DEFERRABLE INITIALLY DEFERRED +indexes: + db_dbauthinfo: + db_dbauthinfo_aiidauser_id_dbcomputer_id_key: CREATE UNIQUE INDEX db_dbauthinfo_aiidauser_id_dbcomputer_id_key + ON public.db_dbauthinfo USING btree (aiidauser_id, dbcomputer_id) + db_dbauthinfo_pkey: CREATE UNIQUE INDEX db_dbauthinfo_pkey ON public.db_dbauthinfo + USING btree (id) + db_dbcomment: + db_dbcomment_pkey: CREATE UNIQUE INDEX db_dbcomment_pkey ON public.db_dbcomment + USING btree (id) + db_dbcomment_uuid_key: CREATE UNIQUE INDEX db_dbcomment_uuid_key ON public.db_dbcomment + USING btree (uuid) + db_dbcomputer: + db_dbcomputer_label_key: CREATE UNIQUE INDEX db_dbcomputer_label_key ON public.db_dbcomputer + USING btree (label) + db_dbcomputer_pkey: CREATE UNIQUE INDEX db_dbcomputer_pkey ON public.db_dbcomputer + USING btree (id) + db_dbcomputer_uuid_key: CREATE UNIQUE INDEX db_dbcomputer_uuid_key ON public.db_dbcomputer + USING btree (uuid) + db_dbgroup: + db_dbgroup_label_type_string_key: CREATE UNIQUE INDEX db_dbgroup_label_type_string_key + ON public.db_dbgroup USING btree (label, type_string) + db_dbgroup_pkey: CREATE UNIQUE INDEX db_dbgroup_pkey ON public.db_dbgroup USING + btree (id) + db_dbgroup_uuid_key: CREATE UNIQUE INDEX db_dbgroup_uuid_key ON public.db_dbgroup + USING btree (uuid) + ix_db_dbgroup_label: CREATE INDEX ix_db_dbgroup_label ON public.db_dbgroup USING + btree (label) + ix_db_dbgroup_type_string: CREATE INDEX ix_db_dbgroup_type_string ON public.db_dbgroup + USING btree (type_string) + db_dbgroup_dbnodes: + db_dbgroup_dbnodes_dbgroup_id_dbnode_id_key: CREATE UNIQUE INDEX db_dbgroup_dbnodes_dbgroup_id_dbnode_id_key + ON public.db_dbgroup_dbnodes USING btree (dbgroup_id, dbnode_id) + db_dbgroup_dbnodes_dbgroup_id_idx: CREATE INDEX db_dbgroup_dbnodes_dbgroup_id_idx + ON public.db_dbgroup_dbnodes USING btree (dbgroup_id) + db_dbgroup_dbnodes_dbnode_id_idx: CREATE INDEX db_dbgroup_dbnodes_dbnode_id_idx + ON public.db_dbgroup_dbnodes USING btree (dbnode_id) + db_dbgroup_dbnodes_pkey: CREATE UNIQUE INDEX db_dbgroup_dbnodes_pkey ON public.db_dbgroup_dbnodes + USING btree (id) + db_dblink: + db_dblink_pkey: CREATE UNIQUE INDEX db_dblink_pkey ON public.db_dblink USING btree + (id) + ix_db_dblink_input_id: CREATE INDEX ix_db_dblink_input_id ON public.db_dblink + USING btree (input_id) + ix_db_dblink_label: CREATE INDEX ix_db_dblink_label ON public.db_dblink USING + btree (label) + ix_db_dblink_output_id: CREATE INDEX ix_db_dblink_output_id ON public.db_dblink + USING btree (output_id) + ix_db_dblink_type: CREATE INDEX ix_db_dblink_type ON public.db_dblink USING btree + (type) + db_dblog: + db_dblog_pkey: CREATE UNIQUE INDEX db_dblog_pkey ON public.db_dblog USING btree + (id) + db_dblog_uuid_key: CREATE UNIQUE INDEX db_dblog_uuid_key ON public.db_dblog USING + btree (uuid) + ix_db_dblog_levelname: CREATE INDEX ix_db_dblog_levelname ON public.db_dblog USING + btree (levelname) + ix_db_dblog_loggername: CREATE INDEX ix_db_dblog_loggername ON public.db_dblog + USING btree (loggername) + db_dbnode: + db_dbnode_pkey: CREATE UNIQUE INDEX db_dbnode_pkey ON public.db_dbnode USING btree + (id) + db_dbnode_uuid_key: CREATE UNIQUE INDEX db_dbnode_uuid_key ON public.db_dbnode + USING btree (uuid) + ix_db_dbnode_label: CREATE INDEX ix_db_dbnode_label ON public.db_dbnode USING + btree (label) + ix_db_dbnode_node_type: CREATE INDEX ix_db_dbnode_node_type ON public.db_dbnode + USING btree (node_type) + ix_db_dbnode_process_type: CREATE INDEX ix_db_dbnode_process_type ON public.db_dbnode + USING btree (process_type) + db_dbsetting: + db_dbsetting_key_key: CREATE UNIQUE INDEX db_dbsetting_key_key ON public.db_dbsetting + USING btree (key) + db_dbsetting_pkey: CREATE UNIQUE INDEX db_dbsetting_pkey ON public.db_dbsetting + USING btree (id) + ix_db_dbsetting_key: CREATE INDEX ix_db_dbsetting_key ON public.db_dbsetting USING + btree (key) + db_dbuser: + db_dbuser_pkey: CREATE UNIQUE INDEX db_dbuser_pkey ON public.db_dbuser USING btree + (id) + ix_db_dbuser_email: CREATE UNIQUE INDEX ix_db_dbuser_email ON public.db_dbuser + USING btree (email) diff --git a/tests/backends/aiida_sqlalchemy/migrations/test_all_schema/test_sqla_47_34a831f4286d_.yml b/tests/backends/aiida_sqlalchemy/migrations/test_all_schema/test_sqla_47_34a831f4286d_.yml new file mode 100644 index 0000000000..fda1f11682 --- /dev/null +++ b/tests/backends/aiida_sqlalchemy/migrations/test_all_schema/test_sqla_47_34a831f4286d_.yml @@ -0,0 +1,479 @@ +columns: + db_dbauthinfo: + aiidauser_id: + data_type: integer + default: null + is_nullable: true + auth_params: + data_type: jsonb + default: null + is_nullable: true + dbcomputer_id: + data_type: integer + default: null + is_nullable: true + enabled: + data_type: boolean + default: null + is_nullable: true + id: + data_type: integer + default: nextval('db_dbauthinfo_id_seq'::regclass) + is_nullable: false + metadata: + data_type: jsonb + default: null + is_nullable: true + db_dbcomment: + content: + data_type: text + default: null + is_nullable: true + ctime: + data_type: timestamp with time zone + default: null + is_nullable: true + dbnode_id: + data_type: integer + default: null + is_nullable: true + id: + data_type: integer + default: nextval('db_dbcomment_id_seq'::regclass) + is_nullable: false + mtime: + data_type: timestamp with time zone + default: null + is_nullable: true + user_id: + data_type: integer + default: null + is_nullable: true + uuid: + data_type: uuid + default: null + is_nullable: true + db_dbcomputer: + description: + data_type: text + default: null + is_nullable: true + hostname: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + id: + data_type: integer + default: nextval('db_dbcomputer_id_seq'::regclass) + is_nullable: false + label: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + metadata: + data_type: jsonb + default: null + is_nullable: true + scheduler_type: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + transport_type: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + uuid: + data_type: uuid + default: null + is_nullable: true + db_dbgroup: + description: + data_type: text + default: null + is_nullable: true + extras: + data_type: jsonb + default: null + is_nullable: false + id: + data_type: integer + default: nextval('db_dbgroup_id_seq'::regclass) + is_nullable: false + label: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + time: + data_type: timestamp with time zone + default: null + is_nullable: true + type_string: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + user_id: + data_type: integer + default: null + is_nullable: true + uuid: + data_type: uuid + default: null + is_nullable: true + db_dbgroup_dbnodes: + dbgroup_id: + data_type: integer + default: null + is_nullable: true + dbnode_id: + data_type: integer + default: null + is_nullable: true + id: + data_type: integer + default: nextval('db_dbgroup_dbnodes_id_seq'::regclass) + is_nullable: false + db_dblink: + id: + data_type: integer + default: nextval('db_dblink_id_seq'::regclass) + is_nullable: false + input_id: + data_type: integer + default: null + is_nullable: true + label: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + output_id: + data_type: integer + default: null + is_nullable: true + type: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + db_dblog: + dbnode_id: + data_type: integer + default: null + is_nullable: false + id: + data_type: integer + default: nextval('db_dblog_id_seq'::regclass) + is_nullable: false + levelname: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + loggername: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + message: + data_type: text + default: null + is_nullable: true + metadata: + data_type: jsonb + default: null + is_nullable: true + time: + data_type: timestamp with time zone + default: null + is_nullable: true + uuid: + data_type: uuid + default: null + is_nullable: true + db_dbnode: + attributes: + data_type: jsonb + default: null + is_nullable: true + ctime: + data_type: timestamp with time zone + default: null + is_nullable: true + dbcomputer_id: + data_type: integer + default: null + is_nullable: true + description: + data_type: text + default: null + is_nullable: true + extras: + data_type: jsonb + default: null + is_nullable: true + id: + data_type: integer + default: nextval('db_dbnode_id_seq'::regclass) + is_nullable: false + label: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + mtime: + data_type: timestamp with time zone + default: null + is_nullable: true + node_type: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + process_type: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + repository_metadata: + data_type: jsonb + default: null + is_nullable: false + user_id: + data_type: integer + default: null + is_nullable: false + uuid: + data_type: uuid + default: null + is_nullable: true + db_dbsetting: + description: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + id: + data_type: integer + default: nextval('db_dbsetting_id_seq'::regclass) + is_nullable: false + key: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + time: + data_type: timestamp with time zone + default: null + is_nullable: true + val: + data_type: jsonb + default: null + is_nullable: true + db_dbuser: + email: + data_type: character varying + default: null + is_nullable: true + max_length: 254 + first_name: + data_type: character varying + default: null + is_nullable: true + max_length: 254 + id: + data_type: integer + default: nextval('db_dbuser_id_seq'::regclass) + is_nullable: false + institution: + data_type: character varying + default: null + is_nullable: true + max_length: 254 + last_name: + data_type: character varying + default: null + is_nullable: true + max_length: 254 +constraints: + primary_key: + db_dbauthinfo: + db_dbauthinfo_pkey: + - id + db_dbcomment: + db_dbcomment_pkey: + - id + db_dbcomputer: + db_dbcomputer_pkey: + - id + db_dbgroup: + db_dbgroup_pkey: + - id + db_dbgroup_dbnodes: + db_dbgroup_dbnodes_pkey: + - id + db_dblink: + db_dblink_pkey: + - id + db_dblog: + db_dblog_pkey: + - id + db_dbnode: + db_dbnode_pkey: + - id + db_dbsetting: + db_dbsetting_pkey: + - id + db_dbuser: + db_dbuser_pkey: + - id + unique: + db_dbauthinfo: + db_dbauthinfo_aiidauser_id_dbcomputer_id_key: + - aiidauser_id + - dbcomputer_id + db_dbcomment: + db_dbcomment_uuid_key: + - uuid + db_dbcomputer: + db_dbcomputer_label_key: + - label + db_dbcomputer_uuid_key: + - uuid + db_dbgroup: + db_dbgroup_label_type_string_key: + - label + - type_string + db_dbgroup_uuid_key: + - uuid + db_dbgroup_dbnodes: + db_dbgroup_dbnodes_dbgroup_id_dbnode_id_key: + - dbgroup_id + - dbnode_id + db_dblog: + db_dblog_uuid_key: + - uuid + db_dbnode: + db_dbnode_uuid_key: + - uuid + db_dbsetting: + db_dbsetting_key_key: + - key +foreign_keys: + db_dbauthinfo: + db_dbauthinfo_aiidauser_id_fkey: FOREIGN KEY (aiidauser_id) REFERENCES db_dbuser(id) + ON DELETE CASCADE DEFERRABLE INITIALLY DEFERRED + db_dbauthinfo_dbcomputer_id_fkey: FOREIGN KEY (dbcomputer_id) REFERENCES db_dbcomputer(id) + ON DELETE CASCADE DEFERRABLE INITIALLY DEFERRED + db_dbcomment: + db_dbcomment_dbnode_id_fkey: FOREIGN KEY (dbnode_id) REFERENCES db_dbnode(id) + ON DELETE CASCADE DEFERRABLE INITIALLY DEFERRED + db_dbcomment_user_id_fkey: FOREIGN KEY (user_id) REFERENCES db_dbuser(id) ON DELETE + CASCADE DEFERRABLE INITIALLY DEFERRED + db_dbgroup: + db_dbgroup_user_id_fkey: FOREIGN KEY (user_id) REFERENCES db_dbuser(id) ON DELETE + CASCADE DEFERRABLE INITIALLY DEFERRED + db_dbgroup_dbnodes: + db_dbgroup_dbnodes_dbgroup_id_fkey: FOREIGN KEY (dbgroup_id) REFERENCES db_dbgroup(id) + DEFERRABLE INITIALLY DEFERRED + db_dbgroup_dbnodes_dbnode_id_fkey: FOREIGN KEY (dbnode_id) REFERENCES db_dbnode(id) + DEFERRABLE INITIALLY DEFERRED + db_dblink: + db_dblink_input_id_fkey: FOREIGN KEY (input_id) REFERENCES db_dbnode(id) DEFERRABLE + INITIALLY DEFERRED + db_dblink_output_id_fkey: FOREIGN KEY (output_id) REFERENCES db_dbnode(id) ON + DELETE CASCADE DEFERRABLE INITIALLY DEFERRED + db_dblog: + db_dblog_dbnode_id_fkey: FOREIGN KEY (dbnode_id) REFERENCES db_dbnode(id) ON DELETE + CASCADE DEFERRABLE INITIALLY DEFERRED + db_dbnode: + db_dbnode_dbcomputer_id_fkey: FOREIGN KEY (dbcomputer_id) REFERENCES db_dbcomputer(id) + ON DELETE RESTRICT DEFERRABLE INITIALLY DEFERRED + db_dbnode_user_id_fkey: FOREIGN KEY (user_id) REFERENCES db_dbuser(id) ON DELETE + RESTRICT DEFERRABLE INITIALLY DEFERRED +indexes: + db_dbauthinfo: + db_dbauthinfo_aiidauser_id_dbcomputer_id_key: CREATE UNIQUE INDEX db_dbauthinfo_aiidauser_id_dbcomputer_id_key + ON public.db_dbauthinfo USING btree (aiidauser_id, dbcomputer_id) + db_dbauthinfo_pkey: CREATE UNIQUE INDEX db_dbauthinfo_pkey ON public.db_dbauthinfo + USING btree (id) + db_dbcomment: + db_dbcomment_pkey: CREATE UNIQUE INDEX db_dbcomment_pkey ON public.db_dbcomment + USING btree (id) + db_dbcomment_uuid_key: CREATE UNIQUE INDEX db_dbcomment_uuid_key ON public.db_dbcomment + USING btree (uuid) + db_dbcomputer: + db_dbcomputer_label_key: CREATE UNIQUE INDEX db_dbcomputer_label_key ON public.db_dbcomputer + USING btree (label) + db_dbcomputer_pkey: CREATE UNIQUE INDEX db_dbcomputer_pkey ON public.db_dbcomputer + USING btree (id) + db_dbcomputer_uuid_key: CREATE UNIQUE INDEX db_dbcomputer_uuid_key ON public.db_dbcomputer + USING btree (uuid) + db_dbgroup: + db_dbgroup_label_type_string_key: CREATE UNIQUE INDEX db_dbgroup_label_type_string_key + ON public.db_dbgroup USING btree (label, type_string) + db_dbgroup_pkey: CREATE UNIQUE INDEX db_dbgroup_pkey ON public.db_dbgroup USING + btree (id) + db_dbgroup_uuid_key: CREATE UNIQUE INDEX db_dbgroup_uuid_key ON public.db_dbgroup + USING btree (uuid) + ix_db_dbgroup_label: CREATE INDEX ix_db_dbgroup_label ON public.db_dbgroup USING + btree (label) + ix_db_dbgroup_type_string: CREATE INDEX ix_db_dbgroup_type_string ON public.db_dbgroup + USING btree (type_string) + db_dbgroup_dbnodes: + db_dbgroup_dbnodes_dbgroup_id_dbnode_id_key: CREATE UNIQUE INDEX db_dbgroup_dbnodes_dbgroup_id_dbnode_id_key + ON public.db_dbgroup_dbnodes USING btree (dbgroup_id, dbnode_id) + db_dbgroup_dbnodes_dbgroup_id_idx: CREATE INDEX db_dbgroup_dbnodes_dbgroup_id_idx + ON public.db_dbgroup_dbnodes USING btree (dbgroup_id) + db_dbgroup_dbnodes_dbnode_id_idx: CREATE INDEX db_dbgroup_dbnodes_dbnode_id_idx + ON public.db_dbgroup_dbnodes USING btree (dbnode_id) + db_dbgroup_dbnodes_pkey: CREATE UNIQUE INDEX db_dbgroup_dbnodes_pkey ON public.db_dbgroup_dbnodes + USING btree (id) + db_dblink: + db_dblink_pkey: CREATE UNIQUE INDEX db_dblink_pkey ON public.db_dblink USING btree + (id) + ix_db_dblink_input_id: CREATE INDEX ix_db_dblink_input_id ON public.db_dblink + USING btree (input_id) + ix_db_dblink_label: CREATE INDEX ix_db_dblink_label ON public.db_dblink USING + btree (label) + ix_db_dblink_output_id: CREATE INDEX ix_db_dblink_output_id ON public.db_dblink + USING btree (output_id) + ix_db_dblink_type: CREATE INDEX ix_db_dblink_type ON public.db_dblink USING btree + (type) + db_dblog: + db_dblog_pkey: CREATE UNIQUE INDEX db_dblog_pkey ON public.db_dblog USING btree + (id) + db_dblog_uuid_key: CREATE UNIQUE INDEX db_dblog_uuid_key ON public.db_dblog USING + btree (uuid) + ix_db_dblog_levelname: CREATE INDEX ix_db_dblog_levelname ON public.db_dblog USING + btree (levelname) + ix_db_dblog_loggername: CREATE INDEX ix_db_dblog_loggername ON public.db_dblog + USING btree (loggername) + db_dbnode: + db_dbnode_pkey: CREATE UNIQUE INDEX db_dbnode_pkey ON public.db_dbnode USING btree + (id) + db_dbnode_uuid_key: CREATE UNIQUE INDEX db_dbnode_uuid_key ON public.db_dbnode + USING btree (uuid) + ix_db_dbnode_label: CREATE INDEX ix_db_dbnode_label ON public.db_dbnode USING + btree (label) + ix_db_dbnode_node_type: CREATE INDEX ix_db_dbnode_node_type ON public.db_dbnode + USING btree (node_type) + ix_db_dbnode_process_type: CREATE INDEX ix_db_dbnode_process_type ON public.db_dbnode + USING btree (process_type) + db_dbsetting: + db_dbsetting_key_key: CREATE UNIQUE INDEX db_dbsetting_key_key ON public.db_dbsetting + USING btree (key) + db_dbsetting_pkey: CREATE UNIQUE INDEX db_dbsetting_pkey ON public.db_dbsetting + USING btree (id) + ix_db_dbsetting_key: CREATE INDEX ix_db_dbsetting_key ON public.db_dbsetting USING + btree (key) + db_dbuser: + db_dbuser_pkey: CREATE UNIQUE INDEX db_dbuser_pkey ON public.db_dbuser USING btree + (id) + ix_db_dbuser_email: CREATE UNIQUE INDEX ix_db_dbuser_email ON public.db_dbuser + USING btree (email) diff --git a/tests/backends/aiida_sqlalchemy/migrations/test_all_schema/test_sqla_48_1de112340b16_.yml b/tests/backends/aiida_sqlalchemy/migrations/test_all_schema/test_sqla_48_1de112340b16_.yml new file mode 100644 index 0000000000..fda1f11682 --- /dev/null +++ b/tests/backends/aiida_sqlalchemy/migrations/test_all_schema/test_sqla_48_1de112340b16_.yml @@ -0,0 +1,479 @@ +columns: + db_dbauthinfo: + aiidauser_id: + data_type: integer + default: null + is_nullable: true + auth_params: + data_type: jsonb + default: null + is_nullable: true + dbcomputer_id: + data_type: integer + default: null + is_nullable: true + enabled: + data_type: boolean + default: null + is_nullable: true + id: + data_type: integer + default: nextval('db_dbauthinfo_id_seq'::regclass) + is_nullable: false + metadata: + data_type: jsonb + default: null + is_nullable: true + db_dbcomment: + content: + data_type: text + default: null + is_nullable: true + ctime: + data_type: timestamp with time zone + default: null + is_nullable: true + dbnode_id: + data_type: integer + default: null + is_nullable: true + id: + data_type: integer + default: nextval('db_dbcomment_id_seq'::regclass) + is_nullable: false + mtime: + data_type: timestamp with time zone + default: null + is_nullable: true + user_id: + data_type: integer + default: null + is_nullable: true + uuid: + data_type: uuid + default: null + is_nullable: true + db_dbcomputer: + description: + data_type: text + default: null + is_nullable: true + hostname: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + id: + data_type: integer + default: nextval('db_dbcomputer_id_seq'::regclass) + is_nullable: false + label: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + metadata: + data_type: jsonb + default: null + is_nullable: true + scheduler_type: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + transport_type: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + uuid: + data_type: uuid + default: null + is_nullable: true + db_dbgroup: + description: + data_type: text + default: null + is_nullable: true + extras: + data_type: jsonb + default: null + is_nullable: false + id: + data_type: integer + default: nextval('db_dbgroup_id_seq'::regclass) + is_nullable: false + label: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + time: + data_type: timestamp with time zone + default: null + is_nullable: true + type_string: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + user_id: + data_type: integer + default: null + is_nullable: true + uuid: + data_type: uuid + default: null + is_nullable: true + db_dbgroup_dbnodes: + dbgroup_id: + data_type: integer + default: null + is_nullable: true + dbnode_id: + data_type: integer + default: null + is_nullable: true + id: + data_type: integer + default: nextval('db_dbgroup_dbnodes_id_seq'::regclass) + is_nullable: false + db_dblink: + id: + data_type: integer + default: nextval('db_dblink_id_seq'::regclass) + is_nullable: false + input_id: + data_type: integer + default: null + is_nullable: true + label: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + output_id: + data_type: integer + default: null + is_nullable: true + type: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + db_dblog: + dbnode_id: + data_type: integer + default: null + is_nullable: false + id: + data_type: integer + default: nextval('db_dblog_id_seq'::regclass) + is_nullable: false + levelname: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + loggername: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + message: + data_type: text + default: null + is_nullable: true + metadata: + data_type: jsonb + default: null + is_nullable: true + time: + data_type: timestamp with time zone + default: null + is_nullable: true + uuid: + data_type: uuid + default: null + is_nullable: true + db_dbnode: + attributes: + data_type: jsonb + default: null + is_nullable: true + ctime: + data_type: timestamp with time zone + default: null + is_nullable: true + dbcomputer_id: + data_type: integer + default: null + is_nullable: true + description: + data_type: text + default: null + is_nullable: true + extras: + data_type: jsonb + default: null + is_nullable: true + id: + data_type: integer + default: nextval('db_dbnode_id_seq'::regclass) + is_nullable: false + label: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + mtime: + data_type: timestamp with time zone + default: null + is_nullable: true + node_type: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + process_type: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + repository_metadata: + data_type: jsonb + default: null + is_nullable: false + user_id: + data_type: integer + default: null + is_nullable: false + uuid: + data_type: uuid + default: null + is_nullable: true + db_dbsetting: + description: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + id: + data_type: integer + default: nextval('db_dbsetting_id_seq'::regclass) + is_nullable: false + key: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + time: + data_type: timestamp with time zone + default: null + is_nullable: true + val: + data_type: jsonb + default: null + is_nullable: true + db_dbuser: + email: + data_type: character varying + default: null + is_nullable: true + max_length: 254 + first_name: + data_type: character varying + default: null + is_nullable: true + max_length: 254 + id: + data_type: integer + default: nextval('db_dbuser_id_seq'::regclass) + is_nullable: false + institution: + data_type: character varying + default: null + is_nullable: true + max_length: 254 + last_name: + data_type: character varying + default: null + is_nullable: true + max_length: 254 +constraints: + primary_key: + db_dbauthinfo: + db_dbauthinfo_pkey: + - id + db_dbcomment: + db_dbcomment_pkey: + - id + db_dbcomputer: + db_dbcomputer_pkey: + - id + db_dbgroup: + db_dbgroup_pkey: + - id + db_dbgroup_dbnodes: + db_dbgroup_dbnodes_pkey: + - id + db_dblink: + db_dblink_pkey: + - id + db_dblog: + db_dblog_pkey: + - id + db_dbnode: + db_dbnode_pkey: + - id + db_dbsetting: + db_dbsetting_pkey: + - id + db_dbuser: + db_dbuser_pkey: + - id + unique: + db_dbauthinfo: + db_dbauthinfo_aiidauser_id_dbcomputer_id_key: + - aiidauser_id + - dbcomputer_id + db_dbcomment: + db_dbcomment_uuid_key: + - uuid + db_dbcomputer: + db_dbcomputer_label_key: + - label + db_dbcomputer_uuid_key: + - uuid + db_dbgroup: + db_dbgroup_label_type_string_key: + - label + - type_string + db_dbgroup_uuid_key: + - uuid + db_dbgroup_dbnodes: + db_dbgroup_dbnodes_dbgroup_id_dbnode_id_key: + - dbgroup_id + - dbnode_id + db_dblog: + db_dblog_uuid_key: + - uuid + db_dbnode: + db_dbnode_uuid_key: + - uuid + db_dbsetting: + db_dbsetting_key_key: + - key +foreign_keys: + db_dbauthinfo: + db_dbauthinfo_aiidauser_id_fkey: FOREIGN KEY (aiidauser_id) REFERENCES db_dbuser(id) + ON DELETE CASCADE DEFERRABLE INITIALLY DEFERRED + db_dbauthinfo_dbcomputer_id_fkey: FOREIGN KEY (dbcomputer_id) REFERENCES db_dbcomputer(id) + ON DELETE CASCADE DEFERRABLE INITIALLY DEFERRED + db_dbcomment: + db_dbcomment_dbnode_id_fkey: FOREIGN KEY (dbnode_id) REFERENCES db_dbnode(id) + ON DELETE CASCADE DEFERRABLE INITIALLY DEFERRED + db_dbcomment_user_id_fkey: FOREIGN KEY (user_id) REFERENCES db_dbuser(id) ON DELETE + CASCADE DEFERRABLE INITIALLY DEFERRED + db_dbgroup: + db_dbgroup_user_id_fkey: FOREIGN KEY (user_id) REFERENCES db_dbuser(id) ON DELETE + CASCADE DEFERRABLE INITIALLY DEFERRED + db_dbgroup_dbnodes: + db_dbgroup_dbnodes_dbgroup_id_fkey: FOREIGN KEY (dbgroup_id) REFERENCES db_dbgroup(id) + DEFERRABLE INITIALLY DEFERRED + db_dbgroup_dbnodes_dbnode_id_fkey: FOREIGN KEY (dbnode_id) REFERENCES db_dbnode(id) + DEFERRABLE INITIALLY DEFERRED + db_dblink: + db_dblink_input_id_fkey: FOREIGN KEY (input_id) REFERENCES db_dbnode(id) DEFERRABLE + INITIALLY DEFERRED + db_dblink_output_id_fkey: FOREIGN KEY (output_id) REFERENCES db_dbnode(id) ON + DELETE CASCADE DEFERRABLE INITIALLY DEFERRED + db_dblog: + db_dblog_dbnode_id_fkey: FOREIGN KEY (dbnode_id) REFERENCES db_dbnode(id) ON DELETE + CASCADE DEFERRABLE INITIALLY DEFERRED + db_dbnode: + db_dbnode_dbcomputer_id_fkey: FOREIGN KEY (dbcomputer_id) REFERENCES db_dbcomputer(id) + ON DELETE RESTRICT DEFERRABLE INITIALLY DEFERRED + db_dbnode_user_id_fkey: FOREIGN KEY (user_id) REFERENCES db_dbuser(id) ON DELETE + RESTRICT DEFERRABLE INITIALLY DEFERRED +indexes: + db_dbauthinfo: + db_dbauthinfo_aiidauser_id_dbcomputer_id_key: CREATE UNIQUE INDEX db_dbauthinfo_aiidauser_id_dbcomputer_id_key + ON public.db_dbauthinfo USING btree (aiidauser_id, dbcomputer_id) + db_dbauthinfo_pkey: CREATE UNIQUE INDEX db_dbauthinfo_pkey ON public.db_dbauthinfo + USING btree (id) + db_dbcomment: + db_dbcomment_pkey: CREATE UNIQUE INDEX db_dbcomment_pkey ON public.db_dbcomment + USING btree (id) + db_dbcomment_uuid_key: CREATE UNIQUE INDEX db_dbcomment_uuid_key ON public.db_dbcomment + USING btree (uuid) + db_dbcomputer: + db_dbcomputer_label_key: CREATE UNIQUE INDEX db_dbcomputer_label_key ON public.db_dbcomputer + USING btree (label) + db_dbcomputer_pkey: CREATE UNIQUE INDEX db_dbcomputer_pkey ON public.db_dbcomputer + USING btree (id) + db_dbcomputer_uuid_key: CREATE UNIQUE INDEX db_dbcomputer_uuid_key ON public.db_dbcomputer + USING btree (uuid) + db_dbgroup: + db_dbgroup_label_type_string_key: CREATE UNIQUE INDEX db_dbgroup_label_type_string_key + ON public.db_dbgroup USING btree (label, type_string) + db_dbgroup_pkey: CREATE UNIQUE INDEX db_dbgroup_pkey ON public.db_dbgroup USING + btree (id) + db_dbgroup_uuid_key: CREATE UNIQUE INDEX db_dbgroup_uuid_key ON public.db_dbgroup + USING btree (uuid) + ix_db_dbgroup_label: CREATE INDEX ix_db_dbgroup_label ON public.db_dbgroup USING + btree (label) + ix_db_dbgroup_type_string: CREATE INDEX ix_db_dbgroup_type_string ON public.db_dbgroup + USING btree (type_string) + db_dbgroup_dbnodes: + db_dbgroup_dbnodes_dbgroup_id_dbnode_id_key: CREATE UNIQUE INDEX db_dbgroup_dbnodes_dbgroup_id_dbnode_id_key + ON public.db_dbgroup_dbnodes USING btree (dbgroup_id, dbnode_id) + db_dbgroup_dbnodes_dbgroup_id_idx: CREATE INDEX db_dbgroup_dbnodes_dbgroup_id_idx + ON public.db_dbgroup_dbnodes USING btree (dbgroup_id) + db_dbgroup_dbnodes_dbnode_id_idx: CREATE INDEX db_dbgroup_dbnodes_dbnode_id_idx + ON public.db_dbgroup_dbnodes USING btree (dbnode_id) + db_dbgroup_dbnodes_pkey: CREATE UNIQUE INDEX db_dbgroup_dbnodes_pkey ON public.db_dbgroup_dbnodes + USING btree (id) + db_dblink: + db_dblink_pkey: CREATE UNIQUE INDEX db_dblink_pkey ON public.db_dblink USING btree + (id) + ix_db_dblink_input_id: CREATE INDEX ix_db_dblink_input_id ON public.db_dblink + USING btree (input_id) + ix_db_dblink_label: CREATE INDEX ix_db_dblink_label ON public.db_dblink USING + btree (label) + ix_db_dblink_output_id: CREATE INDEX ix_db_dblink_output_id ON public.db_dblink + USING btree (output_id) + ix_db_dblink_type: CREATE INDEX ix_db_dblink_type ON public.db_dblink USING btree + (type) + db_dblog: + db_dblog_pkey: CREATE UNIQUE INDEX db_dblog_pkey ON public.db_dblog USING btree + (id) + db_dblog_uuid_key: CREATE UNIQUE INDEX db_dblog_uuid_key ON public.db_dblog USING + btree (uuid) + ix_db_dblog_levelname: CREATE INDEX ix_db_dblog_levelname ON public.db_dblog USING + btree (levelname) + ix_db_dblog_loggername: CREATE INDEX ix_db_dblog_loggername ON public.db_dblog + USING btree (loggername) + db_dbnode: + db_dbnode_pkey: CREATE UNIQUE INDEX db_dbnode_pkey ON public.db_dbnode USING btree + (id) + db_dbnode_uuid_key: CREATE UNIQUE INDEX db_dbnode_uuid_key ON public.db_dbnode + USING btree (uuid) + ix_db_dbnode_label: CREATE INDEX ix_db_dbnode_label ON public.db_dbnode USING + btree (label) + ix_db_dbnode_node_type: CREATE INDEX ix_db_dbnode_node_type ON public.db_dbnode + USING btree (node_type) + ix_db_dbnode_process_type: CREATE INDEX ix_db_dbnode_process_type ON public.db_dbnode + USING btree (process_type) + db_dbsetting: + db_dbsetting_key_key: CREATE UNIQUE INDEX db_dbsetting_key_key ON public.db_dbsetting + USING btree (key) + db_dbsetting_pkey: CREATE UNIQUE INDEX db_dbsetting_pkey ON public.db_dbsetting + USING btree (id) + ix_db_dbsetting_key: CREATE INDEX ix_db_dbsetting_key ON public.db_dbsetting USING + btree (key) + db_dbuser: + db_dbuser_pkey: CREATE UNIQUE INDEX db_dbuser_pkey ON public.db_dbuser USING btree + (id) + ix_db_dbuser_email: CREATE UNIQUE INDEX ix_db_dbuser_email ON public.db_dbuser + USING btree (email) diff --git a/tests/backends/aiida_sqlalchemy/migrations/test_all_schema/test_sqla_49_1de112340b17_.yml b/tests/backends/aiida_sqlalchemy/migrations/test_all_schema/test_sqla_49_1de112340b17_.yml new file mode 100644 index 0000000000..7f6286fc51 --- /dev/null +++ b/tests/backends/aiida_sqlalchemy/migrations/test_all_schema/test_sqla_49_1de112340b17_.yml @@ -0,0 +1,478 @@ +columns: + db_dbauthinfo: + aiidauser_id: + data_type: integer + default: null + is_nullable: false + auth_params: + data_type: jsonb + default: null + is_nullable: false + dbcomputer_id: + data_type: integer + default: null + is_nullable: false + enabled: + data_type: boolean + default: null + is_nullable: false + id: + data_type: integer + default: nextval('db_dbauthinfo_id_seq'::regclass) + is_nullable: false + metadata: + data_type: jsonb + default: null + is_nullable: false + db_dbcomment: + content: + data_type: text + default: null + is_nullable: false + ctime: + data_type: timestamp with time zone + default: null + is_nullable: false + dbnode_id: + data_type: integer + default: null + is_nullable: false + id: + data_type: integer + default: nextval('db_dbcomment_id_seq'::regclass) + is_nullable: false + mtime: + data_type: timestamp with time zone + default: null + is_nullable: false + user_id: + data_type: integer + default: null + is_nullable: false + uuid: + data_type: uuid + default: null + is_nullable: false + db_dbcomputer: + description: + data_type: text + default: null + is_nullable: false + hostname: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + id: + data_type: integer + default: nextval('db_dbcomputer_id_seq'::regclass) + is_nullable: false + label: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + metadata: + data_type: jsonb + default: null + is_nullable: false + scheduler_type: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + transport_type: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + uuid: + data_type: uuid + default: null + is_nullable: false + db_dbgroup: + description: + data_type: text + default: null + is_nullable: false + extras: + data_type: jsonb + default: null + is_nullable: false + id: + data_type: integer + default: nextval('db_dbgroup_id_seq'::regclass) + is_nullable: false + label: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + time: + data_type: timestamp with time zone + default: null + is_nullable: false + type_string: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + user_id: + data_type: integer + default: null + is_nullable: false + uuid: + data_type: uuid + default: null + is_nullable: false + db_dbgroup_dbnodes: + dbgroup_id: + data_type: integer + default: null + is_nullable: false + dbnode_id: + data_type: integer + default: null + is_nullable: false + id: + data_type: integer + default: nextval('db_dbgroup_dbnodes_id_seq'::regclass) + is_nullable: false + db_dblink: + id: + data_type: integer + default: nextval('db_dblink_id_seq'::regclass) + is_nullable: false + input_id: + data_type: integer + default: null + is_nullable: false + label: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + output_id: + data_type: integer + default: null + is_nullable: false + type: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + db_dblog: + dbnode_id: + data_type: integer + default: null + is_nullable: false + id: + data_type: integer + default: nextval('db_dblog_id_seq'::regclass) + is_nullable: false + levelname: + data_type: character varying + default: null + is_nullable: false + max_length: 50 + loggername: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + message: + data_type: text + default: null + is_nullable: false + metadata: + data_type: jsonb + default: null + is_nullable: false + time: + data_type: timestamp with time zone + default: null + is_nullable: false + uuid: + data_type: uuid + default: null + is_nullable: false + db_dbnode: + attributes: + data_type: jsonb + default: null + is_nullable: true + ctime: + data_type: timestamp with time zone + default: null + is_nullable: false + dbcomputer_id: + data_type: integer + default: null + is_nullable: true + description: + data_type: text + default: null + is_nullable: false + extras: + data_type: jsonb + default: null + is_nullable: true + id: + data_type: integer + default: nextval('db_dbnode_id_seq'::regclass) + is_nullable: false + label: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + mtime: + data_type: timestamp with time zone + default: null + is_nullable: false + node_type: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + process_type: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + repository_metadata: + data_type: jsonb + default: null + is_nullable: false + user_id: + data_type: integer + default: null + is_nullable: false + uuid: + data_type: uuid + default: null + is_nullable: false + db_dbsetting: + description: + data_type: text + default: null + is_nullable: false + id: + data_type: integer + default: nextval('db_dbsetting_id_seq'::regclass) + is_nullable: false + key: + data_type: character varying + default: null + is_nullable: false + max_length: 1024 + time: + data_type: timestamp with time zone + default: null + is_nullable: false + val: + data_type: jsonb + default: null + is_nullable: true + db_dbuser: + email: + data_type: character varying + default: null + is_nullable: false + max_length: 254 + first_name: + data_type: character varying + default: null + is_nullable: false + max_length: 254 + id: + data_type: integer + default: nextval('db_dbuser_id_seq'::regclass) + is_nullable: false + institution: + data_type: character varying + default: null + is_nullable: false + max_length: 254 + last_name: + data_type: character varying + default: null + is_nullable: false + max_length: 254 +constraints: + primary_key: + db_dbauthinfo: + db_dbauthinfo_pkey: + - id + db_dbcomment: + db_dbcomment_pkey: + - id + db_dbcomputer: + db_dbcomputer_pkey: + - id + db_dbgroup: + db_dbgroup_pkey: + - id + db_dbgroup_dbnodes: + db_dbgroup_dbnodes_pkey: + - id + db_dblink: + db_dblink_pkey: + - id + db_dblog: + db_dblog_pkey: + - id + db_dbnode: + db_dbnode_pkey: + - id + db_dbsetting: + db_dbsetting_pkey: + - id + db_dbuser: + db_dbuser_pkey: + - id + unique: + db_dbauthinfo: + db_dbauthinfo_aiidauser_id_dbcomputer_id_key: + - aiidauser_id + - dbcomputer_id + db_dbcomment: + db_dbcomment_uuid_key: + - uuid + db_dbcomputer: + db_dbcomputer_label_key: + - label + db_dbcomputer_uuid_key: + - uuid + db_dbgroup: + db_dbgroup_label_type_string_key: + - label + - type_string + db_dbgroup_uuid_key: + - uuid + db_dbgroup_dbnodes: + db_dbgroup_dbnodes_dbgroup_id_dbnode_id_key: + - dbgroup_id + - dbnode_id + db_dblog: + db_dblog_uuid_key: + - uuid + db_dbnode: + db_dbnode_uuid_key: + - uuid + db_dbsetting: + db_dbsetting_key_key: + - key +foreign_keys: + db_dbauthinfo: + db_dbauthinfo_aiidauser_id_fkey: FOREIGN KEY (aiidauser_id) REFERENCES db_dbuser(id) + ON DELETE CASCADE DEFERRABLE INITIALLY DEFERRED + db_dbauthinfo_dbcomputer_id_fkey: FOREIGN KEY (dbcomputer_id) REFERENCES db_dbcomputer(id) + ON DELETE CASCADE DEFERRABLE INITIALLY DEFERRED + db_dbcomment: + db_dbcomment_dbnode_id_fkey: FOREIGN KEY (dbnode_id) REFERENCES db_dbnode(id) + ON DELETE CASCADE DEFERRABLE INITIALLY DEFERRED + db_dbcomment_user_id_fkey: FOREIGN KEY (user_id) REFERENCES db_dbuser(id) ON DELETE + CASCADE DEFERRABLE INITIALLY DEFERRED + db_dbgroup: + db_dbgroup_user_id_fkey: FOREIGN KEY (user_id) REFERENCES db_dbuser(id) ON DELETE + CASCADE DEFERRABLE INITIALLY DEFERRED + db_dbgroup_dbnodes: + db_dbgroup_dbnodes_dbgroup_id_fkey: FOREIGN KEY (dbgroup_id) REFERENCES db_dbgroup(id) + DEFERRABLE INITIALLY DEFERRED + db_dbgroup_dbnodes_dbnode_id_fkey: FOREIGN KEY (dbnode_id) REFERENCES db_dbnode(id) + DEFERRABLE INITIALLY DEFERRED + db_dblink: + db_dblink_input_id_fkey: FOREIGN KEY (input_id) REFERENCES db_dbnode(id) DEFERRABLE + INITIALLY DEFERRED + db_dblink_output_id_fkey: FOREIGN KEY (output_id) REFERENCES db_dbnode(id) ON + DELETE CASCADE DEFERRABLE INITIALLY DEFERRED + db_dblog: + db_dblog_dbnode_id_fkey: FOREIGN KEY (dbnode_id) REFERENCES db_dbnode(id) ON DELETE + CASCADE DEFERRABLE INITIALLY DEFERRED + db_dbnode: + db_dbnode_dbcomputer_id_fkey: FOREIGN KEY (dbcomputer_id) REFERENCES db_dbcomputer(id) + ON DELETE RESTRICT DEFERRABLE INITIALLY DEFERRED + db_dbnode_user_id_fkey: FOREIGN KEY (user_id) REFERENCES db_dbuser(id) ON DELETE + RESTRICT DEFERRABLE INITIALLY DEFERRED +indexes: + db_dbauthinfo: + db_dbauthinfo_aiidauser_id_dbcomputer_id_key: CREATE UNIQUE INDEX db_dbauthinfo_aiidauser_id_dbcomputer_id_key + ON public.db_dbauthinfo USING btree (aiidauser_id, dbcomputer_id) + db_dbauthinfo_pkey: CREATE UNIQUE INDEX db_dbauthinfo_pkey ON public.db_dbauthinfo + USING btree (id) + db_dbcomment: + db_dbcomment_pkey: CREATE UNIQUE INDEX db_dbcomment_pkey ON public.db_dbcomment + USING btree (id) + db_dbcomment_uuid_key: CREATE UNIQUE INDEX db_dbcomment_uuid_key ON public.db_dbcomment + USING btree (uuid) + db_dbcomputer: + db_dbcomputer_label_key: CREATE UNIQUE INDEX db_dbcomputer_label_key ON public.db_dbcomputer + USING btree (label) + db_dbcomputer_pkey: CREATE UNIQUE INDEX db_dbcomputer_pkey ON public.db_dbcomputer + USING btree (id) + db_dbcomputer_uuid_key: CREATE UNIQUE INDEX db_dbcomputer_uuid_key ON public.db_dbcomputer + USING btree (uuid) + db_dbgroup: + db_dbgroup_label_type_string_key: CREATE UNIQUE INDEX db_dbgroup_label_type_string_key + ON public.db_dbgroup USING btree (label, type_string) + db_dbgroup_pkey: CREATE UNIQUE INDEX db_dbgroup_pkey ON public.db_dbgroup USING + btree (id) + db_dbgroup_uuid_key: CREATE UNIQUE INDEX db_dbgroup_uuid_key ON public.db_dbgroup + USING btree (uuid) + ix_db_dbgroup_label: CREATE INDEX ix_db_dbgroup_label ON public.db_dbgroup USING + btree (label) + ix_db_dbgroup_type_string: CREATE INDEX ix_db_dbgroup_type_string ON public.db_dbgroup + USING btree (type_string) + db_dbgroup_dbnodes: + db_dbgroup_dbnodes_dbgroup_id_dbnode_id_key: CREATE UNIQUE INDEX db_dbgroup_dbnodes_dbgroup_id_dbnode_id_key + ON public.db_dbgroup_dbnodes USING btree (dbgroup_id, dbnode_id) + db_dbgroup_dbnodes_dbgroup_id_idx: CREATE INDEX db_dbgroup_dbnodes_dbgroup_id_idx + ON public.db_dbgroup_dbnodes USING btree (dbgroup_id) + db_dbgroup_dbnodes_dbnode_id_idx: CREATE INDEX db_dbgroup_dbnodes_dbnode_id_idx + ON public.db_dbgroup_dbnodes USING btree (dbnode_id) + db_dbgroup_dbnodes_pkey: CREATE UNIQUE INDEX db_dbgroup_dbnodes_pkey ON public.db_dbgroup_dbnodes + USING btree (id) + db_dblink: + db_dblink_pkey: CREATE UNIQUE INDEX db_dblink_pkey ON public.db_dblink USING btree + (id) + ix_db_dblink_input_id: CREATE INDEX ix_db_dblink_input_id ON public.db_dblink + USING btree (input_id) + ix_db_dblink_label: CREATE INDEX ix_db_dblink_label ON public.db_dblink USING + btree (label) + ix_db_dblink_output_id: CREATE INDEX ix_db_dblink_output_id ON public.db_dblink + USING btree (output_id) + ix_db_dblink_type: CREATE INDEX ix_db_dblink_type ON public.db_dblink USING btree + (type) + db_dblog: + db_dblog_pkey: CREATE UNIQUE INDEX db_dblog_pkey ON public.db_dblog USING btree + (id) + db_dblog_uuid_key: CREATE UNIQUE INDEX db_dblog_uuid_key ON public.db_dblog USING + btree (uuid) + ix_db_dblog_levelname: CREATE INDEX ix_db_dblog_levelname ON public.db_dblog USING + btree (levelname) + ix_db_dblog_loggername: CREATE INDEX ix_db_dblog_loggername ON public.db_dblog + USING btree (loggername) + db_dbnode: + db_dbnode_pkey: CREATE UNIQUE INDEX db_dbnode_pkey ON public.db_dbnode USING btree + (id) + db_dbnode_uuid_key: CREATE UNIQUE INDEX db_dbnode_uuid_key ON public.db_dbnode + USING btree (uuid) + ix_db_dbnode_label: CREATE INDEX ix_db_dbnode_label ON public.db_dbnode USING + btree (label) + ix_db_dbnode_node_type: CREATE INDEX ix_db_dbnode_node_type ON public.db_dbnode + USING btree (node_type) + ix_db_dbnode_process_type: CREATE INDEX ix_db_dbnode_process_type ON public.db_dbnode + USING btree (process_type) + db_dbsetting: + db_dbsetting_key_key: CREATE UNIQUE INDEX db_dbsetting_key_key ON public.db_dbsetting + USING btree (key) + db_dbsetting_pkey: CREATE UNIQUE INDEX db_dbsetting_pkey ON public.db_dbsetting + USING btree (id) + ix_db_dbsetting_key: CREATE INDEX ix_db_dbsetting_key ON public.db_dbsetting USING + btree (key) + db_dbuser: + db_dbuser_pkey: CREATE UNIQUE INDEX db_dbuser_pkey ON public.db_dbuser USING btree + (id) + ix_db_dbuser_email: CREATE UNIQUE INDEX ix_db_dbuser_email ON public.db_dbuser + USING btree (email) diff --git a/tests/backends/aiida_sqlalchemy/migrations/test_all_schema/test_sqla_4_35d4ee9a1b0e_.yml b/tests/backends/aiida_sqlalchemy/migrations/test_all_schema/test_sqla_4_35d4ee9a1b0e_.yml new file mode 100644 index 0000000000..ba626cd6b8 --- /dev/null +++ b/tests/backends/aiida_sqlalchemy/migrations/test_all_schema/test_sqla_4_35d4ee9a1b0e_.yml @@ -0,0 +1,765 @@ +columns: + db_dbauthinfo: + aiidauser_id: + data_type: integer + default: null + is_nullable: true + auth_params: + data_type: jsonb + default: null + is_nullable: true + dbcomputer_id: + data_type: integer + default: null + is_nullable: true + enabled: + data_type: boolean + default: null + is_nullable: true + id: + data_type: integer + default: nextval('db_dbauthinfo_id_seq'::regclass) + is_nullable: false + metadata: + data_type: jsonb + default: null + is_nullable: true + db_dbcalcstate: + dbnode_id: + data_type: integer + default: null + is_nullable: true + id: + data_type: integer + default: nextval('db_dbcalcstate_id_seq'::regclass) + is_nullable: false + state: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + time: + data_type: timestamp with time zone + default: null + is_nullable: true + db_dbcomment: + content: + data_type: text + default: null + is_nullable: true + ctime: + data_type: timestamp with time zone + default: null + is_nullable: true + dbnode_id: + data_type: integer + default: null + is_nullable: true + id: + data_type: integer + default: nextval('db_dbcomment_id_seq'::regclass) + is_nullable: false + mtime: + data_type: timestamp with time zone + default: null + is_nullable: true + user_id: + data_type: integer + default: null + is_nullable: true + uuid: + data_type: uuid + default: null + is_nullable: true + db_dbcomputer: + description: + data_type: text + default: null + is_nullable: true + enabled: + data_type: boolean + default: null + is_nullable: true + hostname: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + id: + data_type: integer + default: nextval('db_dbcomputer_id_seq'::regclass) + is_nullable: false + metadata: + data_type: jsonb + default: null + is_nullable: true + name: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + scheduler_type: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + transport_params: + data_type: jsonb + default: null + is_nullable: true + transport_type: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + uuid: + data_type: uuid + default: null + is_nullable: true + db_dbgroup: + description: + data_type: text + default: null + is_nullable: true + id: + data_type: integer + default: nextval('db_dbgroup_id_seq'::regclass) + is_nullable: false + name: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + time: + data_type: timestamp with time zone + default: null + is_nullable: true + type: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + user_id: + data_type: integer + default: null + is_nullable: true + uuid: + data_type: uuid + default: null + is_nullable: true + db_dbgroup_dbnodes: + dbgroup_id: + data_type: integer + default: null + is_nullable: true + dbnode_id: + data_type: integer + default: null + is_nullable: true + id: + data_type: integer + default: nextval('db_dbgroup_dbnodes_id_seq'::regclass) + is_nullable: false + db_dblink: + id: + data_type: integer + default: nextval('db_dblink_id_seq'::regclass) + is_nullable: false + input_id: + data_type: integer + default: null + is_nullable: true + label: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + output_id: + data_type: integer + default: null + is_nullable: true + type: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + db_dblock: + creation: + data_type: timestamp with time zone + default: null + is_nullable: true + key: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + owner: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + timeout: + data_type: integer + default: null + is_nullable: true + db_dblog: + id: + data_type: integer + default: nextval('db_dblog_id_seq'::regclass) + is_nullable: false + levelname: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + loggername: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + message: + data_type: text + default: null + is_nullable: true + metadata: + data_type: jsonb + default: null + is_nullable: true + objname: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + objpk: + data_type: integer + default: null + is_nullable: true + time: + data_type: timestamp with time zone + default: null + is_nullable: true + db_dbnode: + attributes: + data_type: jsonb + default: null + is_nullable: true + ctime: + data_type: timestamp with time zone + default: null + is_nullable: true + dbcomputer_id: + data_type: integer + default: null + is_nullable: true + description: + data_type: text + default: null + is_nullable: true + extras: + data_type: jsonb + default: null + is_nullable: true + id: + data_type: integer + default: nextval('db_dbnode_id_seq'::regclass) + is_nullable: false + label: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + mtime: + data_type: timestamp with time zone + default: null + is_nullable: true + nodeversion: + data_type: integer + default: null + is_nullable: true + public: + data_type: boolean + default: null + is_nullable: true + type: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + user_id: + data_type: integer + default: null + is_nullable: false + uuid: + data_type: uuid + default: null + is_nullable: true + db_dbsetting: + description: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + id: + data_type: integer + default: nextval('db_dbsetting_id_seq'::regclass) + is_nullable: false + key: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + time: + data_type: timestamp with time zone + default: null + is_nullable: true + val: + data_type: jsonb + default: null + is_nullable: true + db_dbuser: + date_joined: + data_type: timestamp with time zone + default: null + is_nullable: true + email: + data_type: character varying + default: null + is_nullable: true + max_length: 254 + first_name: + data_type: character varying + default: null + is_nullable: true + max_length: 254 + id: + data_type: integer + default: nextval('db_dbuser_id_seq'::regclass) + is_nullable: false + institution: + data_type: character varying + default: null + is_nullable: true + max_length: 254 + is_active: + data_type: boolean + default: null + is_nullable: true + is_staff: + data_type: boolean + default: null + is_nullable: true + is_superuser: + data_type: boolean + default: null + is_nullable: false + last_login: + data_type: timestamp with time zone + default: null + is_nullable: true + last_name: + data_type: character varying + default: null + is_nullable: true + max_length: 254 + password: + data_type: character varying + default: null + is_nullable: true + max_length: 128 + db_dbworkflow: + ctime: + data_type: timestamp with time zone + default: null + is_nullable: true + description: + data_type: text + default: null + is_nullable: true + id: + data_type: integer + default: nextval('db_dbworkflow_id_seq'::regclass) + is_nullable: false + label: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + lastsyncedversion: + data_type: integer + default: null + is_nullable: true + module: + data_type: text + default: null + is_nullable: true + module_class: + data_type: text + default: null + is_nullable: true + mtime: + data_type: timestamp with time zone + default: null + is_nullable: true + nodeversion: + data_type: integer + default: null + is_nullable: true + report: + data_type: text + default: null + is_nullable: true + script_md5: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + script_path: + data_type: text + default: null + is_nullable: true + state: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + user_id: + data_type: integer + default: null + is_nullable: true + uuid: + data_type: uuid + default: null + is_nullable: true + db_dbworkflowdata: + aiida_obj_id: + data_type: integer + default: null + is_nullable: true + data_type: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + id: + data_type: integer + default: nextval('db_dbworkflowdata_id_seq'::regclass) + is_nullable: false + json_value: + data_type: text + default: null + is_nullable: true + name: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + parent_id: + data_type: integer + default: null + is_nullable: true + time: + data_type: timestamp with time zone + default: null + is_nullable: true + value_type: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + db_dbworkflowstep: + id: + data_type: integer + default: nextval('db_dbworkflowstep_id_seq'::regclass) + is_nullable: false + name: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + nextcall: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + parent_id: + data_type: integer + default: null + is_nullable: true + state: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + time: + data_type: timestamp with time zone + default: null + is_nullable: true + user_id: + data_type: integer + default: null + is_nullable: true + db_dbworkflowstep_calculations: + dbnode_id: + data_type: integer + default: null + is_nullable: true + dbworkflowstep_id: + data_type: integer + default: null + is_nullable: true + id: + data_type: integer + default: nextval('db_dbworkflowstep_calculations_id_seq'::regclass) + is_nullable: false + db_dbworkflowstep_sub_workflows: + dbworkflow_id: + data_type: integer + default: null + is_nullable: true + dbworkflowstep_id: + data_type: integer + default: null + is_nullable: true + id: + data_type: integer + default: nextval('db_dbworkflowstep_sub_workflows_id_seq'::regclass) + is_nullable: false +constraints: + primary_key: + db_dbauthinfo: + db_dbauthinfo_pkey: + - id + db_dbcalcstate: + db_dbcalcstate_pkey: + - id + db_dbcomment: + db_dbcomment_pkey: + - id + db_dbcomputer: + db_dbcomputer_pkey: + - id + db_dbgroup: + db_dbgroup_pkey: + - id + db_dbgroup_dbnodes: + db_dbgroup_dbnodes_pkey: + - id + db_dblink: + db_dblink_pkey: + - id + db_dblock: + db_dblock_pkey: + - key + db_dblog: + db_dblog_pkey: + - id + db_dbnode: + db_dbnode_pkey: + - id + db_dbsetting: + db_dbsetting_pkey: + - id + db_dbuser: + db_dbuser_pkey: + - id + db_dbworkflow: + db_dbworkflow_pkey: + - id + db_dbworkflowdata: + db_dbworkflowdata_pkey: + - id + db_dbworkflowstep: + db_dbworkflowstep_pkey: + - id + db_dbworkflowstep_calculations: + db_dbworkflowstep_calculations_pkey: + - id + db_dbworkflowstep_sub_workflows: + db_dbworkflowstep_sub_workflows_pkey: + - id + unique: + db_dbauthinfo: + db_dbauthinfo_aiidauser_id_dbcomputer_id_key: + - aiidauser_id + - dbcomputer_id + db_dbcalcstate: + db_dbcalcstate_dbnode_id_state_key: + - dbnode_id + - state + db_dbcomputer: + db_dbcomputer_name_key: + - name + db_dbgroup: + db_dbgroup_name_type_key: + - name + - type + db_dbsetting: + db_dbsetting_key_key: + - key + db_dbworkflowdata: + db_dbworkflowdata_parent_id_name_data_type_key: + - data_type + - name + - parent_id + db_dbworkflowstep: + db_dbworkflowstep_parent_id_name_key: + - name + - parent_id + db_dbworkflowstep_calculations: + db_dbworkflowstep_calculations_id_dbnode_id_key: + - dbnode_id + - dbworkflowstep_id + db_dbworkflowstep_sub_workflows: + db_dbworkflowstep_sub_workflows_id_dbworkflow__key: + - dbworkflow_id + - dbworkflowstep_id +foreign_keys: + db_dbauthinfo: + db_dbauthinfo_aiidauser_id_fkey: FOREIGN KEY (aiidauser_id) REFERENCES db_dbuser(id) + ON DELETE CASCADE DEFERRABLE INITIALLY DEFERRED + db_dbauthinfo_dbcomputer_id_fkey: FOREIGN KEY (dbcomputer_id) REFERENCES db_dbcomputer(id) + ON DELETE CASCADE DEFERRABLE INITIALLY DEFERRED + db_dbcalcstate: + db_dbcalcstate_dbnode_id_fkey: FOREIGN KEY (dbnode_id) REFERENCES db_dbnode(id) + ON DELETE CASCADE DEFERRABLE INITIALLY DEFERRED + db_dbcomment: + db_dbcomment_dbnode_id_fkey: FOREIGN KEY (dbnode_id) REFERENCES db_dbnode(id) + ON DELETE CASCADE DEFERRABLE INITIALLY DEFERRED + db_dbcomment_user_id_fkey: FOREIGN KEY (user_id) REFERENCES db_dbuser(id) ON DELETE + CASCADE DEFERRABLE INITIALLY DEFERRED + db_dbgroup: + db_dbgroup_user_id_fkey: FOREIGN KEY (user_id) REFERENCES db_dbuser(id) ON DELETE + CASCADE DEFERRABLE INITIALLY DEFERRED + db_dbgroup_dbnodes: + db_dbgroup_dbnodes_dbgroup_id_fkey: FOREIGN KEY (dbgroup_id) REFERENCES db_dbgroup(id) + DEFERRABLE INITIALLY DEFERRED + db_dbgroup_dbnodes_dbnode_id_fkey: FOREIGN KEY (dbnode_id) REFERENCES db_dbnode(id) + DEFERRABLE INITIALLY DEFERRED + db_dblink: + db_dblink_input_id_fkey: FOREIGN KEY (input_id) REFERENCES db_dbnode(id) DEFERRABLE + INITIALLY DEFERRED + db_dblink_output_id_fkey: FOREIGN KEY (output_id) REFERENCES db_dbnode(id) ON + DELETE CASCADE DEFERRABLE INITIALLY DEFERRED + db_dbnode: + db_dbnode_dbcomputer_id_fkey: FOREIGN KEY (dbcomputer_id) REFERENCES db_dbcomputer(id) + ON DELETE RESTRICT DEFERRABLE INITIALLY DEFERRED + db_dbnode_user_id_fkey: FOREIGN KEY (user_id) REFERENCES db_dbuser(id) ON DELETE + RESTRICT DEFERRABLE INITIALLY DEFERRED + db_dbworkflow: + db_dbworkflow_user_id_fkey: FOREIGN KEY (user_id) REFERENCES db_dbuser(id) + db_dbworkflowdata: + db_dbworkflowdata_aiida_obj_id_fkey: FOREIGN KEY (aiida_obj_id) REFERENCES db_dbnode(id) + db_dbworkflowdata_parent_id_fkey: FOREIGN KEY (parent_id) REFERENCES db_dbworkflow(id) + db_dbworkflowstep: + db_dbworkflowstep_parent_id_fkey: FOREIGN KEY (parent_id) REFERENCES db_dbworkflow(id) + db_dbworkflowstep_user_id_fkey: FOREIGN KEY (user_id) REFERENCES db_dbuser(id) + db_dbworkflowstep_calculations: + db_dbworkflowstep_calculations_dbnode_id_fkey: FOREIGN KEY (dbnode_id) REFERENCES + db_dbnode(id) + db_dbworkflowstep_calculations_dbworkflowstep_id_fkey: FOREIGN KEY (dbworkflowstep_id) + REFERENCES db_dbworkflowstep(id) + db_dbworkflowstep_sub_workflows: + db_dbworkflowstep_sub_workflows_dbworkflow_id_fkey: FOREIGN KEY (dbworkflow_id) + REFERENCES db_dbworkflow(id) + db_dbworkflowstep_sub_workflows_dbworkflowstep_id_fkey: FOREIGN KEY (dbworkflowstep_id) + REFERENCES db_dbworkflowstep(id) +indexes: + db_dbauthinfo: + db_dbauthinfo_aiidauser_id_dbcomputer_id_key: CREATE UNIQUE INDEX db_dbauthinfo_aiidauser_id_dbcomputer_id_key + ON public.db_dbauthinfo USING btree (aiidauser_id, dbcomputer_id) + db_dbauthinfo_pkey: CREATE UNIQUE INDEX db_dbauthinfo_pkey ON public.db_dbauthinfo + USING btree (id) + db_dbcalcstate: + db_dbcalcstate_dbnode_id_state_key: CREATE UNIQUE INDEX db_dbcalcstate_dbnode_id_state_key + ON public.db_dbcalcstate USING btree (dbnode_id, state) + db_dbcalcstate_pkey: CREATE UNIQUE INDEX db_dbcalcstate_pkey ON public.db_dbcalcstate + USING btree (id) + ix_db_dbcalcstate_state: CREATE INDEX ix_db_dbcalcstate_state ON public.db_dbcalcstate + USING btree (state) + db_dbcomment: + db_dbcomment_pkey: CREATE UNIQUE INDEX db_dbcomment_pkey ON public.db_dbcomment + USING btree (id) + db_dbcomputer: + db_dbcomputer_name_key: CREATE UNIQUE INDEX db_dbcomputer_name_key ON public.db_dbcomputer + USING btree (name) + db_dbcomputer_pkey: CREATE UNIQUE INDEX db_dbcomputer_pkey ON public.db_dbcomputer + USING btree (id) + db_dbgroup: + db_dbgroup_name_type_key: CREATE UNIQUE INDEX db_dbgroup_name_type_key ON public.db_dbgroup + USING btree (name, type) + db_dbgroup_pkey: CREATE UNIQUE INDEX db_dbgroup_pkey ON public.db_dbgroup USING + btree (id) + ix_db_dbgroup_name: CREATE INDEX ix_db_dbgroup_name ON public.db_dbgroup USING + btree (name) + ix_db_dbgroup_type: CREATE INDEX ix_db_dbgroup_type ON public.db_dbgroup USING + btree (type) + db_dbgroup_dbnodes: + db_dbgroup_dbnodes_pkey: CREATE UNIQUE INDEX db_dbgroup_dbnodes_pkey ON public.db_dbgroup_dbnodes + USING btree (id) + db_dblink: + db_dblink_pkey: CREATE UNIQUE INDEX db_dblink_pkey ON public.db_dblink USING btree + (id) + ix_db_dblink_label: CREATE INDEX ix_db_dblink_label ON public.db_dblink USING + btree (label) + db_dblock: + db_dblock_pkey: CREATE UNIQUE INDEX db_dblock_pkey ON public.db_dblock USING btree + (key) + db_dblog: + db_dblog_pkey: CREATE UNIQUE INDEX db_dblog_pkey ON public.db_dblog USING btree + (id) + ix_db_dblog_levelname: CREATE INDEX ix_db_dblog_levelname ON public.db_dblog USING + btree (levelname) + ix_db_dblog_loggername: CREATE INDEX ix_db_dblog_loggername ON public.db_dblog + USING btree (loggername) + ix_db_dblog_objname: CREATE INDEX ix_db_dblog_objname ON public.db_dblog USING + btree (objname) + ix_db_dblog_objpk: CREATE INDEX ix_db_dblog_objpk ON public.db_dblog USING btree + (objpk) + db_dbnode: + db_dbnode_pkey: CREATE UNIQUE INDEX db_dbnode_pkey ON public.db_dbnode USING btree + (id) + ix_db_dbnode_label: CREATE INDEX ix_db_dbnode_label ON public.db_dbnode USING + btree (label) + ix_db_dbnode_type: CREATE INDEX ix_db_dbnode_type ON public.db_dbnode USING btree + (type) + db_dbsetting: + db_dbsetting_key_key: CREATE UNIQUE INDEX db_dbsetting_key_key ON public.db_dbsetting + USING btree (key) + db_dbsetting_pkey: CREATE UNIQUE INDEX db_dbsetting_pkey ON public.db_dbsetting + USING btree (id) + ix_db_dbsetting_key: CREATE INDEX ix_db_dbsetting_key ON public.db_dbsetting USING + btree (key) + db_dbuser: + db_dbuser_pkey: CREATE UNIQUE INDEX db_dbuser_pkey ON public.db_dbuser USING btree + (id) + ix_db_dbuser_email: CREATE UNIQUE INDEX ix_db_dbuser_email ON public.db_dbuser + USING btree (email) + db_dbworkflow: + db_dbworkflow_pkey: CREATE UNIQUE INDEX db_dbworkflow_pkey ON public.db_dbworkflow + USING btree (id) + ix_db_dbworkflow_label: CREATE INDEX ix_db_dbworkflow_label ON public.db_dbworkflow + USING btree (label) + db_dbworkflowdata: + db_dbworkflowdata_parent_id_name_data_type_key: CREATE UNIQUE INDEX db_dbworkflowdata_parent_id_name_data_type_key + ON public.db_dbworkflowdata USING btree (parent_id, name, data_type) + db_dbworkflowdata_pkey: CREATE UNIQUE INDEX db_dbworkflowdata_pkey ON public.db_dbworkflowdata + USING btree (id) + ix_db_dbworkflowdata_aiida_obj_id: CREATE INDEX ix_db_dbworkflowdata_aiida_obj_id + ON public.db_dbworkflowdata USING btree (aiida_obj_id) + ix_db_dbworkflowdata_parent_id: CREATE INDEX ix_db_dbworkflowdata_parent_id ON + public.db_dbworkflowdata USING btree (parent_id) + db_dbworkflowstep: + db_dbworkflowstep_parent_id_name_key: CREATE UNIQUE INDEX db_dbworkflowstep_parent_id_name_key + ON public.db_dbworkflowstep USING btree (parent_id, name) + db_dbworkflowstep_pkey: CREATE UNIQUE INDEX db_dbworkflowstep_pkey ON public.db_dbworkflowstep + USING btree (id) + db_dbworkflowstep_calculations: + db_dbworkflowstep_calculations_id_dbnode_id_key: CREATE UNIQUE INDEX db_dbworkflowstep_calculations_id_dbnode_id_key + ON public.db_dbworkflowstep_calculations USING btree (dbworkflowstep_id, dbnode_id) + db_dbworkflowstep_calculations_pkey: CREATE UNIQUE INDEX db_dbworkflowstep_calculations_pkey + ON public.db_dbworkflowstep_calculations USING btree (id) + db_dbworkflowstep_sub_workflows: + db_dbworkflowstep_sub_workflows_id_dbworkflow__key: CREATE UNIQUE INDEX db_dbworkflowstep_sub_workflows_id_dbworkflow__key + ON public.db_dbworkflowstep_sub_workflows USING btree (dbworkflowstep_id, dbworkflow_id) + db_dbworkflowstep_sub_workflows_pkey: CREATE UNIQUE INDEX db_dbworkflowstep_sub_workflows_pkey + ON public.db_dbworkflowstep_sub_workflows USING btree (id) diff --git a/tests/backends/aiida_sqlalchemy/migrations/test_all_schema/test_sqla_50_1de112340b18_.yml b/tests/backends/aiida_sqlalchemy/migrations/test_all_schema/test_sqla_50_1de112340b18_.yml new file mode 100644 index 0000000000..30b7e96182 --- /dev/null +++ b/tests/backends/aiida_sqlalchemy/migrations/test_all_schema/test_sqla_50_1de112340b18_.yml @@ -0,0 +1,523 @@ +columns: + db_dbauthinfo: + aiidauser_id: + data_type: integer + default: null + is_nullable: false + auth_params: + data_type: jsonb + default: null + is_nullable: false + dbcomputer_id: + data_type: integer + default: null + is_nullable: false + enabled: + data_type: boolean + default: null + is_nullable: false + id: + data_type: integer + default: nextval('db_dbauthinfo_id_seq'::regclass) + is_nullable: false + metadata: + data_type: jsonb + default: null + is_nullable: false + db_dbcomment: + content: + data_type: text + default: null + is_nullable: false + ctime: + data_type: timestamp with time zone + default: null + is_nullable: false + dbnode_id: + data_type: integer + default: null + is_nullable: false + id: + data_type: integer + default: nextval('db_dbcomment_id_seq'::regclass) + is_nullable: false + mtime: + data_type: timestamp with time zone + default: null + is_nullable: false + user_id: + data_type: integer + default: null + is_nullable: false + uuid: + data_type: uuid + default: null + is_nullable: false + db_dbcomputer: + description: + data_type: text + default: null + is_nullable: false + hostname: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + id: + data_type: integer + default: nextval('db_dbcomputer_id_seq'::regclass) + is_nullable: false + label: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + metadata: + data_type: jsonb + default: null + is_nullable: false + scheduler_type: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + transport_type: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + uuid: + data_type: uuid + default: null + is_nullable: false + db_dbgroup: + description: + data_type: text + default: null + is_nullable: false + extras: + data_type: jsonb + default: null + is_nullable: false + id: + data_type: integer + default: nextval('db_dbgroup_id_seq'::regclass) + is_nullable: false + label: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + time: + data_type: timestamp with time zone + default: null + is_nullable: false + type_string: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + user_id: + data_type: integer + default: null + is_nullable: false + uuid: + data_type: uuid + default: null + is_nullable: false + db_dbgroup_dbnodes: + dbgroup_id: + data_type: integer + default: null + is_nullable: false + dbnode_id: + data_type: integer + default: null + is_nullable: false + id: + data_type: integer + default: nextval('db_dbgroup_dbnodes_id_seq'::regclass) + is_nullable: false + db_dblink: + id: + data_type: integer + default: nextval('db_dblink_id_seq'::regclass) + is_nullable: false + input_id: + data_type: integer + default: null + is_nullable: false + label: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + output_id: + data_type: integer + default: null + is_nullable: false + type: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + db_dblog: + dbnode_id: + data_type: integer + default: null + is_nullable: false + id: + data_type: integer + default: nextval('db_dblog_id_seq'::regclass) + is_nullable: false + levelname: + data_type: character varying + default: null + is_nullable: false + max_length: 50 + loggername: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + message: + data_type: text + default: null + is_nullable: false + metadata: + data_type: jsonb + default: null + is_nullable: false + time: + data_type: timestamp with time zone + default: null + is_nullable: false + uuid: + data_type: uuid + default: null + is_nullable: false + db_dbnode: + attributes: + data_type: jsonb + default: null + is_nullable: true + ctime: + data_type: timestamp with time zone + default: null + is_nullable: false + dbcomputer_id: + data_type: integer + default: null + is_nullable: true + description: + data_type: text + default: null + is_nullable: false + extras: + data_type: jsonb + default: null + is_nullable: true + id: + data_type: integer + default: nextval('db_dbnode_id_seq'::regclass) + is_nullable: false + label: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + mtime: + data_type: timestamp with time zone + default: null + is_nullable: false + node_type: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + process_type: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + repository_metadata: + data_type: jsonb + default: null + is_nullable: false + user_id: + data_type: integer + default: null + is_nullable: false + uuid: + data_type: uuid + default: null + is_nullable: false + db_dbsetting: + description: + data_type: text + default: null + is_nullable: false + id: + data_type: integer + default: nextval('db_dbsetting_id_seq'::regclass) + is_nullable: false + key: + data_type: character varying + default: null + is_nullable: false + max_length: 1024 + time: + data_type: timestamp with time zone + default: null + is_nullable: false + val: + data_type: jsonb + default: null + is_nullable: true + db_dbuser: + email: + data_type: character varying + default: null + is_nullable: false + max_length: 254 + first_name: + data_type: character varying + default: null + is_nullable: false + max_length: 254 + id: + data_type: integer + default: nextval('db_dbuser_id_seq'::regclass) + is_nullable: false + institution: + data_type: character varying + default: null + is_nullable: false + max_length: 254 + last_name: + data_type: character varying + default: null + is_nullable: false + max_length: 254 +constraints: + primary_key: + db_dbauthinfo: + db_dbauthinfo_pkey: + - id + db_dbcomment: + db_dbcomment_pkey: + - id + db_dbcomputer: + db_dbcomputer_pkey: + - id + db_dbgroup: + db_dbgroup_pkey: + - id + db_dbgroup_dbnodes: + db_dbgroup_dbnodes_pkey: + - id + db_dblink: + db_dblink_pkey: + - id + db_dblog: + db_dblog_pkey: + - id + db_dbnode: + db_dbnode_pkey: + - id + db_dbsetting: + db_dbsetting_pkey: + - id + db_dbuser: + db_dbuser_pkey: + - id + unique: + db_dbauthinfo: + uq_db_dbauthinfo_aiidauser_id_dbcomputer_id: + - aiidauser_id + - dbcomputer_id + db_dbcomment: + uq_db_dbcomment_uuid: + - uuid + db_dbcomputer: + uq_db_dbcomputer_label: + - label + uq_db_dbcomputer_uuid: + - uuid + db_dbgroup: + uq_db_dbgroup_label_type_string: + - label + - type_string + uq_db_dbgroup_uuid: + - uuid + db_dbgroup_dbnodes: + uq_db_dbgroup_dbnodes_dbgroup_id_dbnode_id: + - dbgroup_id + - dbnode_id + db_dblog: + uq_db_dblog_uuid: + - uuid + db_dbnode: + uq_db_dbnode_uuid: + - uuid + db_dbsetting: + uq_db_dbsetting_key: + - key + db_dbuser: + uq_db_dbuser_email: + - email +foreign_keys: + db_dbauthinfo: + fk_db_dbauthinfo_aiidauser_id_db_dbuser: FOREIGN KEY (aiidauser_id) REFERENCES + db_dbuser(id) ON DELETE CASCADE DEFERRABLE INITIALLY DEFERRED + fk_db_dbauthinfo_dbcomputer_id_db_dbcomputer: FOREIGN KEY (dbcomputer_id) REFERENCES + db_dbcomputer(id) ON DELETE CASCADE DEFERRABLE INITIALLY DEFERRED + db_dbcomment: + fk_db_dbcomment_dbnode_id_db_dbnode: FOREIGN KEY (dbnode_id) REFERENCES db_dbnode(id) + ON DELETE CASCADE DEFERRABLE INITIALLY DEFERRED + fk_db_dbcomment_user_id_db_dbuser: FOREIGN KEY (user_id) REFERENCES db_dbuser(id) + ON DELETE CASCADE DEFERRABLE INITIALLY DEFERRED + db_dbgroup: + db_dbgroup_user_id_db_dbuser: FOREIGN KEY (user_id) REFERENCES db_dbuser(id) ON + DELETE CASCADE DEFERRABLE INITIALLY DEFERRED + db_dbgroup_dbnodes: + fk_db_dbgroup_dbnodes_dbgroup_id_db_dbgroup: FOREIGN KEY (dbgroup_id) REFERENCES + db_dbgroup(id) DEFERRABLE INITIALLY DEFERRED + fk_db_dbgroup_dbnodes_dbnode_id_db_dbnode: FOREIGN KEY (dbnode_id) REFERENCES + db_dbnode(id) DEFERRABLE INITIALLY DEFERRED + db_dblink: + fk_db_dblink_input_id_db_dbnode: FOREIGN KEY (input_id) REFERENCES db_dbnode(id) + DEFERRABLE INITIALLY DEFERRED + fk_db_dblink_output_id_db_dbnode: FOREIGN KEY (output_id) REFERENCES db_dbnode(id) + ON DELETE CASCADE DEFERRABLE INITIALLY DEFERRED + db_dblog: + fk_db_dblog_dbnode_id_db_dbnode: FOREIGN KEY (dbnode_id) REFERENCES db_dbnode(id) + ON DELETE CASCADE DEFERRABLE INITIALLY DEFERRED + db_dbnode: + fk_db_dbnode_dbcomputer_id_db_dbcomputer: FOREIGN KEY (dbcomputer_id) REFERENCES + db_dbcomputer(id) ON DELETE RESTRICT DEFERRABLE INITIALLY DEFERRED + fk_db_dbnode_user_id_db_dbuser: FOREIGN KEY (user_id) REFERENCES db_dbuser(id) + ON DELETE RESTRICT DEFERRABLE INITIALLY DEFERRED +indexes: + db_dbauthinfo: + db_dbauthinfo_pkey: CREATE UNIQUE INDEX db_dbauthinfo_pkey ON public.db_dbauthinfo + USING btree (id) + ix_db_dbauthinfo_aiidauser_id: CREATE INDEX ix_db_dbauthinfo_aiidauser_id ON public.db_dbauthinfo + USING btree (aiidauser_id) + ix_db_dbauthinfo_dbcomputer_id: CREATE INDEX ix_db_dbauthinfo_dbcomputer_id ON + public.db_dbauthinfo USING btree (dbcomputer_id) + uq_db_dbauthinfo_aiidauser_id_dbcomputer_id: CREATE UNIQUE INDEX uq_db_dbauthinfo_aiidauser_id_dbcomputer_id + ON public.db_dbauthinfo USING btree (aiidauser_id, dbcomputer_id) + db_dbcomment: + db_dbcomment_pkey: CREATE UNIQUE INDEX db_dbcomment_pkey ON public.db_dbcomment + USING btree (id) + ix_db_dbcomment_dbnode_id: CREATE INDEX ix_db_dbcomment_dbnode_id ON public.db_dbcomment + USING btree (dbnode_id) + ix_db_dbcomment_user_id: CREATE INDEX ix_db_dbcomment_user_id ON public.db_dbcomment + USING btree (user_id) + uq_db_dbcomment_uuid: CREATE UNIQUE INDEX uq_db_dbcomment_uuid ON public.db_dbcomment + USING btree (uuid) + db_dbcomputer: + db_dbcomputer_pkey: CREATE UNIQUE INDEX db_dbcomputer_pkey ON public.db_dbcomputer + USING btree (id) + ix_pat_db_dbcomputer_label: CREATE INDEX ix_pat_db_dbcomputer_label ON public.db_dbcomputer + USING btree (label varchar_pattern_ops) + uq_db_dbcomputer_label: CREATE UNIQUE INDEX uq_db_dbcomputer_label ON public.db_dbcomputer + USING btree (label) + uq_db_dbcomputer_uuid: CREATE UNIQUE INDEX uq_db_dbcomputer_uuid ON public.db_dbcomputer + USING btree (uuid) + db_dbgroup: + db_dbgroup_pkey: CREATE UNIQUE INDEX db_dbgroup_pkey ON public.db_dbgroup USING + btree (id) + ix_db_dbgroup_label: CREATE INDEX ix_db_dbgroup_label ON public.db_dbgroup USING + btree (label) + ix_db_dbgroup_type_string: CREATE INDEX ix_db_dbgroup_type_string ON public.db_dbgroup + USING btree (type_string) + ix_db_dbgroup_user_id: CREATE INDEX ix_db_dbgroup_user_id ON public.db_dbgroup + USING btree (user_id) + ix_pat_db_dbgroup_label: CREATE INDEX ix_pat_db_dbgroup_label ON public.db_dbgroup + USING btree (label varchar_pattern_ops) + ix_pat_db_dbgroup_type_string: CREATE INDEX ix_pat_db_dbgroup_type_string ON public.db_dbgroup + USING btree (type_string varchar_pattern_ops) + uq_db_dbgroup_label_type_string: CREATE UNIQUE INDEX uq_db_dbgroup_label_type_string + ON public.db_dbgroup USING btree (label, type_string) + uq_db_dbgroup_uuid: CREATE UNIQUE INDEX uq_db_dbgroup_uuid ON public.db_dbgroup + USING btree (uuid) + db_dbgroup_dbnodes: + db_dbgroup_dbnodes_pkey: CREATE UNIQUE INDEX db_dbgroup_dbnodes_pkey ON public.db_dbgroup_dbnodes + USING btree (id) + ix_db_dbgroup_dbnodes_dbgroup_id: CREATE INDEX ix_db_dbgroup_dbnodes_dbgroup_id + ON public.db_dbgroup_dbnodes USING btree (dbgroup_id) + ix_db_dbgroup_dbnodes_dbnode_id: CREATE INDEX ix_db_dbgroup_dbnodes_dbnode_id + ON public.db_dbgroup_dbnodes USING btree (dbnode_id) + uq_db_dbgroup_dbnodes_dbgroup_id_dbnode_id: CREATE UNIQUE INDEX uq_db_dbgroup_dbnodes_dbgroup_id_dbnode_id + ON public.db_dbgroup_dbnodes USING btree (dbgroup_id, dbnode_id) + db_dblink: + db_dblink_pkey: CREATE UNIQUE INDEX db_dblink_pkey ON public.db_dblink USING btree + (id) + ix_db_dblink_input_id: CREATE INDEX ix_db_dblink_input_id ON public.db_dblink + USING btree (input_id) + ix_db_dblink_label: CREATE INDEX ix_db_dblink_label ON public.db_dblink USING + btree (label) + ix_db_dblink_output_id: CREATE INDEX ix_db_dblink_output_id ON public.db_dblink + USING btree (output_id) + ix_db_dblink_type: CREATE INDEX ix_db_dblink_type ON public.db_dblink USING btree + (type) + ix_pat_db_dblink_label: CREATE INDEX ix_pat_db_dblink_label ON public.db_dblink + USING btree (label varchar_pattern_ops) + ix_pat_db_dblink_type: CREATE INDEX ix_pat_db_dblink_type ON public.db_dblink + USING btree (type varchar_pattern_ops) + db_dblog: + db_dblog_pkey: CREATE UNIQUE INDEX db_dblog_pkey ON public.db_dblog USING btree + (id) + ix_db_dblog_dbnode_id: CREATE INDEX ix_db_dblog_dbnode_id ON public.db_dblog USING + btree (dbnode_id) + ix_db_dblog_levelname: CREATE INDEX ix_db_dblog_levelname ON public.db_dblog USING + btree (levelname) + ix_db_dblog_loggername: CREATE INDEX ix_db_dblog_loggername ON public.db_dblog + USING btree (loggername) + ix_pat_db_dblog_levelname: CREATE INDEX ix_pat_db_dblog_levelname ON public.db_dblog + USING btree (levelname varchar_pattern_ops) + ix_pat_db_dblog_loggername: CREATE INDEX ix_pat_db_dblog_loggername ON public.db_dblog + USING btree (loggername varchar_pattern_ops) + uq_db_dblog_uuid: CREATE UNIQUE INDEX uq_db_dblog_uuid ON public.db_dblog USING + btree (uuid) + db_dbnode: + db_dbnode_pkey: CREATE UNIQUE INDEX db_dbnode_pkey ON public.db_dbnode USING btree + (id) + ix_db_dbnode_ctime: CREATE INDEX ix_db_dbnode_ctime ON public.db_dbnode USING + btree (ctime) + ix_db_dbnode_dbcomputer_id: CREATE INDEX ix_db_dbnode_dbcomputer_id ON public.db_dbnode + USING btree (dbcomputer_id) + ix_db_dbnode_label: CREATE INDEX ix_db_dbnode_label ON public.db_dbnode USING + btree (label) + ix_db_dbnode_mtime: CREATE INDEX ix_db_dbnode_mtime ON public.db_dbnode USING + btree (mtime) + ix_db_dbnode_node_type: CREATE INDEX ix_db_dbnode_node_type ON public.db_dbnode + USING btree (node_type) + ix_db_dbnode_process_type: CREATE INDEX ix_db_dbnode_process_type ON public.db_dbnode + USING btree (process_type) + ix_db_dbnode_user_id: CREATE INDEX ix_db_dbnode_user_id ON public.db_dbnode USING + btree (user_id) + ix_pat_db_dbnode_label: CREATE INDEX ix_pat_db_dbnode_label ON public.db_dbnode + USING btree (label varchar_pattern_ops) + ix_pat_db_dbnode_node_type: CREATE INDEX ix_pat_db_dbnode_node_type ON public.db_dbnode + USING btree (node_type varchar_pattern_ops) + ix_pat_db_dbnode_process_type: CREATE INDEX ix_pat_db_dbnode_process_type ON public.db_dbnode + USING btree (process_type varchar_pattern_ops) + uq_db_dbnode_uuid: CREATE UNIQUE INDEX uq_db_dbnode_uuid ON public.db_dbnode USING + btree (uuid) + db_dbsetting: + db_dbsetting_pkey: CREATE UNIQUE INDEX db_dbsetting_pkey ON public.db_dbsetting + USING btree (id) + ix_pat_db_dbsetting_key: CREATE INDEX ix_pat_db_dbsetting_key ON public.db_dbsetting + USING btree (key varchar_pattern_ops) + uq_db_dbsetting_key: CREATE UNIQUE INDEX uq_db_dbsetting_key ON public.db_dbsetting + USING btree (key) + db_dbuser: + db_dbuser_pkey: CREATE UNIQUE INDEX db_dbuser_pkey ON public.db_dbuser USING btree + (id) + ix_pat_db_dbuser_email: CREATE INDEX ix_pat_db_dbuser_email ON public.db_dbuser + USING btree (email varchar_pattern_ops) + uq_db_dbuser_email: CREATE UNIQUE INDEX uq_db_dbuser_email ON public.db_dbuser + USING btree (email) diff --git a/tests/backends/aiida_sqlalchemy/migrations/test_all_schema/test_sqla_5_7a6587e16f4c_.yml b/tests/backends/aiida_sqlalchemy/migrations/test_all_schema/test_sqla_5_7a6587e16f4c_.yml new file mode 100644 index 0000000000..5a450f748a --- /dev/null +++ b/tests/backends/aiida_sqlalchemy/migrations/test_all_schema/test_sqla_5_7a6587e16f4c_.yml @@ -0,0 +1,771 @@ +columns: + db_dbauthinfo: + aiidauser_id: + data_type: integer + default: null + is_nullable: true + auth_params: + data_type: jsonb + default: null + is_nullable: true + dbcomputer_id: + data_type: integer + default: null + is_nullable: true + enabled: + data_type: boolean + default: null + is_nullable: true + id: + data_type: integer + default: nextval('db_dbauthinfo_id_seq'::regclass) + is_nullable: false + metadata: + data_type: jsonb + default: null + is_nullable: true + db_dbcalcstate: + dbnode_id: + data_type: integer + default: null + is_nullable: true + id: + data_type: integer + default: nextval('db_dbcalcstate_id_seq'::regclass) + is_nullable: false + state: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + time: + data_type: timestamp with time zone + default: null + is_nullable: true + db_dbcomment: + content: + data_type: text + default: null + is_nullable: true + ctime: + data_type: timestamp with time zone + default: null + is_nullable: true + dbnode_id: + data_type: integer + default: null + is_nullable: true + id: + data_type: integer + default: nextval('db_dbcomment_id_seq'::regclass) + is_nullable: false + mtime: + data_type: timestamp with time zone + default: null + is_nullable: true + user_id: + data_type: integer + default: null + is_nullable: true + uuid: + data_type: uuid + default: null + is_nullable: true + db_dbcomputer: + description: + data_type: text + default: null + is_nullable: true + enabled: + data_type: boolean + default: null + is_nullable: true + hostname: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + id: + data_type: integer + default: nextval('db_dbcomputer_id_seq'::regclass) + is_nullable: false + metadata: + data_type: jsonb + default: null + is_nullable: true + name: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + scheduler_type: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + transport_params: + data_type: jsonb + default: null + is_nullable: true + transport_type: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + uuid: + data_type: uuid + default: null + is_nullable: true + db_dbgroup: + description: + data_type: text + default: null + is_nullable: true + id: + data_type: integer + default: nextval('db_dbgroup_id_seq'::regclass) + is_nullable: false + name: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + time: + data_type: timestamp with time zone + default: null + is_nullable: true + type: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + user_id: + data_type: integer + default: null + is_nullable: true + uuid: + data_type: uuid + default: null + is_nullable: true + db_dbgroup_dbnodes: + dbgroup_id: + data_type: integer + default: null + is_nullable: true + dbnode_id: + data_type: integer + default: null + is_nullable: true + id: + data_type: integer + default: nextval('db_dbgroup_dbnodes_id_seq'::regclass) + is_nullable: false + db_dblink: + id: + data_type: integer + default: nextval('db_dblink_id_seq'::regclass) + is_nullable: false + input_id: + data_type: integer + default: null + is_nullable: true + label: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + output_id: + data_type: integer + default: null + is_nullable: true + type: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + db_dblock: + creation: + data_type: timestamp with time zone + default: null + is_nullable: true + key: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + owner: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + timeout: + data_type: integer + default: null + is_nullable: true + db_dblog: + id: + data_type: integer + default: nextval('db_dblog_id_seq'::regclass) + is_nullable: false + levelname: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + loggername: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + message: + data_type: text + default: null + is_nullable: true + metadata: + data_type: jsonb + default: null + is_nullable: true + objname: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + objpk: + data_type: integer + default: null + is_nullable: true + time: + data_type: timestamp with time zone + default: null + is_nullable: true + db_dbnode: + attributes: + data_type: jsonb + default: null + is_nullable: true + ctime: + data_type: timestamp with time zone + default: null + is_nullable: true + dbcomputer_id: + data_type: integer + default: null + is_nullable: true + description: + data_type: text + default: null + is_nullable: true + extras: + data_type: jsonb + default: null + is_nullable: true + id: + data_type: integer + default: nextval('db_dbnode_id_seq'::regclass) + is_nullable: false + label: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + mtime: + data_type: timestamp with time zone + default: null + is_nullable: true + nodeversion: + data_type: integer + default: null + is_nullable: true + public: + data_type: boolean + default: null + is_nullable: true + type: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + user_id: + data_type: integer + default: null + is_nullable: false + uuid: + data_type: uuid + default: null + is_nullable: true + db_dbsetting: + description: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + id: + data_type: integer + default: nextval('db_dbsetting_id_seq'::regclass) + is_nullable: false + key: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + time: + data_type: timestamp with time zone + default: null + is_nullable: true + val: + data_type: jsonb + default: null + is_nullable: true + db_dbuser: + date_joined: + data_type: timestamp with time zone + default: null + is_nullable: true + email: + data_type: character varying + default: null + is_nullable: true + max_length: 254 + first_name: + data_type: character varying + default: null + is_nullable: true + max_length: 254 + id: + data_type: integer + default: nextval('db_dbuser_id_seq'::regclass) + is_nullable: false + institution: + data_type: character varying + default: null + is_nullable: true + max_length: 254 + is_active: + data_type: boolean + default: null + is_nullable: true + is_staff: + data_type: boolean + default: null + is_nullable: true + is_superuser: + data_type: boolean + default: null + is_nullable: false + last_login: + data_type: timestamp with time zone + default: null + is_nullable: true + last_name: + data_type: character varying + default: null + is_nullable: true + max_length: 254 + password: + data_type: character varying + default: null + is_nullable: true + max_length: 128 + db_dbworkflow: + ctime: + data_type: timestamp with time zone + default: null + is_nullable: true + description: + data_type: text + default: null + is_nullable: true + id: + data_type: integer + default: nextval('db_dbworkflow_id_seq'::regclass) + is_nullable: false + label: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + lastsyncedversion: + data_type: integer + default: null + is_nullable: true + module: + data_type: text + default: null + is_nullable: true + module_class: + data_type: text + default: null + is_nullable: true + mtime: + data_type: timestamp with time zone + default: null + is_nullable: true + nodeversion: + data_type: integer + default: null + is_nullable: true + report: + data_type: text + default: null + is_nullable: true + script_md5: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + script_path: + data_type: text + default: null + is_nullable: true + state: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + user_id: + data_type: integer + default: null + is_nullable: true + uuid: + data_type: uuid + default: null + is_nullable: true + db_dbworkflowdata: + aiida_obj_id: + data_type: integer + default: null + is_nullable: true + data_type: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + id: + data_type: integer + default: nextval('db_dbworkflowdata_id_seq'::regclass) + is_nullable: false + json_value: + data_type: text + default: null + is_nullable: true + name: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + parent_id: + data_type: integer + default: null + is_nullable: true + time: + data_type: timestamp with time zone + default: null + is_nullable: true + value_type: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + db_dbworkflowstep: + id: + data_type: integer + default: nextval('db_dbworkflowstep_id_seq'::regclass) + is_nullable: false + name: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + nextcall: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + parent_id: + data_type: integer + default: null + is_nullable: true + state: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + time: + data_type: timestamp with time zone + default: null + is_nullable: true + user_id: + data_type: integer + default: null + is_nullable: true + db_dbworkflowstep_calculations: + dbnode_id: + data_type: integer + default: null + is_nullable: true + dbworkflowstep_id: + data_type: integer + default: null + is_nullable: true + id: + data_type: integer + default: nextval('db_dbworkflowstep_calculations_id_seq'::regclass) + is_nullable: false + db_dbworkflowstep_sub_workflows: + dbworkflow_id: + data_type: integer + default: null + is_nullable: true + dbworkflowstep_id: + data_type: integer + default: null + is_nullable: true + id: + data_type: integer + default: nextval('db_dbworkflowstep_sub_workflows_id_seq'::regclass) + is_nullable: false +constraints: + primary_key: + db_dbauthinfo: + db_dbauthinfo_pkey: + - id + db_dbcalcstate: + db_dbcalcstate_pkey: + - id + db_dbcomment: + db_dbcomment_pkey: + - id + db_dbcomputer: + db_dbcomputer_pkey: + - id + db_dbgroup: + db_dbgroup_pkey: + - id + db_dbgroup_dbnodes: + db_dbgroup_dbnodes_pkey: + - id + db_dblink: + db_dblink_pkey: + - id + db_dblock: + db_dblock_pkey: + - key + db_dblog: + db_dblog_pkey: + - id + db_dbnode: + db_dbnode_pkey: + - id + db_dbsetting: + db_dbsetting_pkey: + - id + db_dbuser: + db_dbuser_pkey: + - id + db_dbworkflow: + db_dbworkflow_pkey: + - id + db_dbworkflowdata: + db_dbworkflowdata_pkey: + - id + db_dbworkflowstep: + db_dbworkflowstep_pkey: + - id + db_dbworkflowstep_calculations: + db_dbworkflowstep_calculations_pkey: + - id + db_dbworkflowstep_sub_workflows: + db_dbworkflowstep_sub_workflows_pkey: + - id + unique: + db_dbauthinfo: + db_dbauthinfo_aiidauser_id_dbcomputer_id_key: + - aiidauser_id + - dbcomputer_id + db_dbcalcstate: + db_dbcalcstate_dbnode_id_state_key: + - dbnode_id + - state + db_dbcomputer: + db_dbcomputer_name_key: + - name + db_dbgroup: + db_dbgroup_name_type_key: + - name + - type + db_dbgroup_dbnodes: + uix_dbnode_id_dbgroup_id: + - dbgroup_id + - dbnode_id + db_dbsetting: + db_dbsetting_key_key: + - key + db_dbworkflowdata: + db_dbworkflowdata_parent_id_name_data_type_key: + - data_type + - name + - parent_id + db_dbworkflowstep: + db_dbworkflowstep_parent_id_name_key: + - name + - parent_id + db_dbworkflowstep_calculations: + db_dbworkflowstep_calculations_id_dbnode_id_key: + - dbnode_id + - dbworkflowstep_id + db_dbworkflowstep_sub_workflows: + db_dbworkflowstep_sub_workflows_id_dbworkflow__key: + - dbworkflow_id + - dbworkflowstep_id +foreign_keys: + db_dbauthinfo: + db_dbauthinfo_aiidauser_id_fkey: FOREIGN KEY (aiidauser_id) REFERENCES db_dbuser(id) + ON DELETE CASCADE DEFERRABLE INITIALLY DEFERRED + db_dbauthinfo_dbcomputer_id_fkey: FOREIGN KEY (dbcomputer_id) REFERENCES db_dbcomputer(id) + ON DELETE CASCADE DEFERRABLE INITIALLY DEFERRED + db_dbcalcstate: + db_dbcalcstate_dbnode_id_fkey: FOREIGN KEY (dbnode_id) REFERENCES db_dbnode(id) + ON DELETE CASCADE DEFERRABLE INITIALLY DEFERRED + db_dbcomment: + db_dbcomment_dbnode_id_fkey: FOREIGN KEY (dbnode_id) REFERENCES db_dbnode(id) + ON DELETE CASCADE DEFERRABLE INITIALLY DEFERRED + db_dbcomment_user_id_fkey: FOREIGN KEY (user_id) REFERENCES db_dbuser(id) ON DELETE + CASCADE DEFERRABLE INITIALLY DEFERRED + db_dbgroup: + db_dbgroup_user_id_fkey: FOREIGN KEY (user_id) REFERENCES db_dbuser(id) ON DELETE + CASCADE DEFERRABLE INITIALLY DEFERRED + db_dbgroup_dbnodes: + db_dbgroup_dbnodes_dbgroup_id_fkey: FOREIGN KEY (dbgroup_id) REFERENCES db_dbgroup(id) + DEFERRABLE INITIALLY DEFERRED + db_dbgroup_dbnodes_dbnode_id_fkey: FOREIGN KEY (dbnode_id) REFERENCES db_dbnode(id) + DEFERRABLE INITIALLY DEFERRED + db_dblink: + db_dblink_input_id_fkey: FOREIGN KEY (input_id) REFERENCES db_dbnode(id) DEFERRABLE + INITIALLY DEFERRED + db_dblink_output_id_fkey: FOREIGN KEY (output_id) REFERENCES db_dbnode(id) ON + DELETE CASCADE DEFERRABLE INITIALLY DEFERRED + db_dbnode: + db_dbnode_dbcomputer_id_fkey: FOREIGN KEY (dbcomputer_id) REFERENCES db_dbcomputer(id) + ON DELETE RESTRICT DEFERRABLE INITIALLY DEFERRED + db_dbnode_user_id_fkey: FOREIGN KEY (user_id) REFERENCES db_dbuser(id) ON DELETE + RESTRICT DEFERRABLE INITIALLY DEFERRED + db_dbworkflow: + db_dbworkflow_user_id_fkey: FOREIGN KEY (user_id) REFERENCES db_dbuser(id) + db_dbworkflowdata: + db_dbworkflowdata_aiida_obj_id_fkey: FOREIGN KEY (aiida_obj_id) REFERENCES db_dbnode(id) + db_dbworkflowdata_parent_id_fkey: FOREIGN KEY (parent_id) REFERENCES db_dbworkflow(id) + db_dbworkflowstep: + db_dbworkflowstep_parent_id_fkey: FOREIGN KEY (parent_id) REFERENCES db_dbworkflow(id) + db_dbworkflowstep_user_id_fkey: FOREIGN KEY (user_id) REFERENCES db_dbuser(id) + db_dbworkflowstep_calculations: + db_dbworkflowstep_calculations_dbnode_id_fkey: FOREIGN KEY (dbnode_id) REFERENCES + db_dbnode(id) + db_dbworkflowstep_calculations_dbworkflowstep_id_fkey: FOREIGN KEY (dbworkflowstep_id) + REFERENCES db_dbworkflowstep(id) + db_dbworkflowstep_sub_workflows: + db_dbworkflowstep_sub_workflows_dbworkflow_id_fkey: FOREIGN KEY (dbworkflow_id) + REFERENCES db_dbworkflow(id) + db_dbworkflowstep_sub_workflows_dbworkflowstep_id_fkey: FOREIGN KEY (dbworkflowstep_id) + REFERENCES db_dbworkflowstep(id) +indexes: + db_dbauthinfo: + db_dbauthinfo_aiidauser_id_dbcomputer_id_key: CREATE UNIQUE INDEX db_dbauthinfo_aiidauser_id_dbcomputer_id_key + ON public.db_dbauthinfo USING btree (aiidauser_id, dbcomputer_id) + db_dbauthinfo_pkey: CREATE UNIQUE INDEX db_dbauthinfo_pkey ON public.db_dbauthinfo + USING btree (id) + db_dbcalcstate: + db_dbcalcstate_dbnode_id_state_key: CREATE UNIQUE INDEX db_dbcalcstate_dbnode_id_state_key + ON public.db_dbcalcstate USING btree (dbnode_id, state) + db_dbcalcstate_pkey: CREATE UNIQUE INDEX db_dbcalcstate_pkey ON public.db_dbcalcstate + USING btree (id) + ix_db_dbcalcstate_state: CREATE INDEX ix_db_dbcalcstate_state ON public.db_dbcalcstate + USING btree (state) + db_dbcomment: + db_dbcomment_pkey: CREATE UNIQUE INDEX db_dbcomment_pkey ON public.db_dbcomment + USING btree (id) + db_dbcomputer: + db_dbcomputer_name_key: CREATE UNIQUE INDEX db_dbcomputer_name_key ON public.db_dbcomputer + USING btree (name) + db_dbcomputer_pkey: CREATE UNIQUE INDEX db_dbcomputer_pkey ON public.db_dbcomputer + USING btree (id) + db_dbgroup: + db_dbgroup_name_type_key: CREATE UNIQUE INDEX db_dbgroup_name_type_key ON public.db_dbgroup + USING btree (name, type) + db_dbgroup_pkey: CREATE UNIQUE INDEX db_dbgroup_pkey ON public.db_dbgroup USING + btree (id) + ix_db_dbgroup_name: CREATE INDEX ix_db_dbgroup_name ON public.db_dbgroup USING + btree (name) + ix_db_dbgroup_type: CREATE INDEX ix_db_dbgroup_type ON public.db_dbgroup USING + btree (type) + db_dbgroup_dbnodes: + db_dbgroup_dbnodes_pkey: CREATE UNIQUE INDEX db_dbgroup_dbnodes_pkey ON public.db_dbgroup_dbnodes + USING btree (id) + uix_dbnode_id_dbgroup_id: CREATE UNIQUE INDEX uix_dbnode_id_dbgroup_id ON public.db_dbgroup_dbnodes + USING btree (dbnode_id, dbgroup_id) + db_dblink: + db_dblink_pkey: CREATE UNIQUE INDEX db_dblink_pkey ON public.db_dblink USING btree + (id) + ix_db_dblink_label: CREATE INDEX ix_db_dblink_label ON public.db_dblink USING + btree (label) + db_dblock: + db_dblock_pkey: CREATE UNIQUE INDEX db_dblock_pkey ON public.db_dblock USING btree + (key) + db_dblog: + db_dblog_pkey: CREATE UNIQUE INDEX db_dblog_pkey ON public.db_dblog USING btree + (id) + ix_db_dblog_levelname: CREATE INDEX ix_db_dblog_levelname ON public.db_dblog USING + btree (levelname) + ix_db_dblog_loggername: CREATE INDEX ix_db_dblog_loggername ON public.db_dblog + USING btree (loggername) + ix_db_dblog_objname: CREATE INDEX ix_db_dblog_objname ON public.db_dblog USING + btree (objname) + ix_db_dblog_objpk: CREATE INDEX ix_db_dblog_objpk ON public.db_dblog USING btree + (objpk) + db_dbnode: + db_dbnode_pkey: CREATE UNIQUE INDEX db_dbnode_pkey ON public.db_dbnode USING btree + (id) + ix_db_dbnode_label: CREATE INDEX ix_db_dbnode_label ON public.db_dbnode USING + btree (label) + ix_db_dbnode_type: CREATE INDEX ix_db_dbnode_type ON public.db_dbnode USING btree + (type) + db_dbsetting: + db_dbsetting_key_key: CREATE UNIQUE INDEX db_dbsetting_key_key ON public.db_dbsetting + USING btree (key) + db_dbsetting_pkey: CREATE UNIQUE INDEX db_dbsetting_pkey ON public.db_dbsetting + USING btree (id) + ix_db_dbsetting_key: CREATE INDEX ix_db_dbsetting_key ON public.db_dbsetting USING + btree (key) + db_dbuser: + db_dbuser_pkey: CREATE UNIQUE INDEX db_dbuser_pkey ON public.db_dbuser USING btree + (id) + ix_db_dbuser_email: CREATE UNIQUE INDEX ix_db_dbuser_email ON public.db_dbuser + USING btree (email) + db_dbworkflow: + db_dbworkflow_pkey: CREATE UNIQUE INDEX db_dbworkflow_pkey ON public.db_dbworkflow + USING btree (id) + ix_db_dbworkflow_label: CREATE INDEX ix_db_dbworkflow_label ON public.db_dbworkflow + USING btree (label) + db_dbworkflowdata: + db_dbworkflowdata_parent_id_name_data_type_key: CREATE UNIQUE INDEX db_dbworkflowdata_parent_id_name_data_type_key + ON public.db_dbworkflowdata USING btree (parent_id, name, data_type) + db_dbworkflowdata_pkey: CREATE UNIQUE INDEX db_dbworkflowdata_pkey ON public.db_dbworkflowdata + USING btree (id) + ix_db_dbworkflowdata_aiida_obj_id: CREATE INDEX ix_db_dbworkflowdata_aiida_obj_id + ON public.db_dbworkflowdata USING btree (aiida_obj_id) + ix_db_dbworkflowdata_parent_id: CREATE INDEX ix_db_dbworkflowdata_parent_id ON + public.db_dbworkflowdata USING btree (parent_id) + db_dbworkflowstep: + db_dbworkflowstep_parent_id_name_key: CREATE UNIQUE INDEX db_dbworkflowstep_parent_id_name_key + ON public.db_dbworkflowstep USING btree (parent_id, name) + db_dbworkflowstep_pkey: CREATE UNIQUE INDEX db_dbworkflowstep_pkey ON public.db_dbworkflowstep + USING btree (id) + db_dbworkflowstep_calculations: + db_dbworkflowstep_calculations_id_dbnode_id_key: CREATE UNIQUE INDEX db_dbworkflowstep_calculations_id_dbnode_id_key + ON public.db_dbworkflowstep_calculations USING btree (dbworkflowstep_id, dbnode_id) + db_dbworkflowstep_calculations_pkey: CREATE UNIQUE INDEX db_dbworkflowstep_calculations_pkey + ON public.db_dbworkflowstep_calculations USING btree (id) + db_dbworkflowstep_sub_workflows: + db_dbworkflowstep_sub_workflows_id_dbworkflow__key: CREATE UNIQUE INDEX db_dbworkflowstep_sub_workflows_id_dbworkflow__key + ON public.db_dbworkflowstep_sub_workflows USING btree (dbworkflowstep_id, dbworkflow_id) + db_dbworkflowstep_sub_workflows_pkey: CREATE UNIQUE INDEX db_dbworkflowstep_sub_workflows_pkey + ON public.db_dbworkflowstep_sub_workflows USING btree (id) diff --git a/tests/backends/aiida_sqlalchemy/migrations/test_all_schema/test_sqla_6_0aebbeab274d_.yml b/tests/backends/aiida_sqlalchemy/migrations/test_all_schema/test_sqla_6_0aebbeab274d_.yml new file mode 100644 index 0000000000..5a450f748a --- /dev/null +++ b/tests/backends/aiida_sqlalchemy/migrations/test_all_schema/test_sqla_6_0aebbeab274d_.yml @@ -0,0 +1,771 @@ +columns: + db_dbauthinfo: + aiidauser_id: + data_type: integer + default: null + is_nullable: true + auth_params: + data_type: jsonb + default: null + is_nullable: true + dbcomputer_id: + data_type: integer + default: null + is_nullable: true + enabled: + data_type: boolean + default: null + is_nullable: true + id: + data_type: integer + default: nextval('db_dbauthinfo_id_seq'::regclass) + is_nullable: false + metadata: + data_type: jsonb + default: null + is_nullable: true + db_dbcalcstate: + dbnode_id: + data_type: integer + default: null + is_nullable: true + id: + data_type: integer + default: nextval('db_dbcalcstate_id_seq'::regclass) + is_nullable: false + state: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + time: + data_type: timestamp with time zone + default: null + is_nullable: true + db_dbcomment: + content: + data_type: text + default: null + is_nullable: true + ctime: + data_type: timestamp with time zone + default: null + is_nullable: true + dbnode_id: + data_type: integer + default: null + is_nullable: true + id: + data_type: integer + default: nextval('db_dbcomment_id_seq'::regclass) + is_nullable: false + mtime: + data_type: timestamp with time zone + default: null + is_nullable: true + user_id: + data_type: integer + default: null + is_nullable: true + uuid: + data_type: uuid + default: null + is_nullable: true + db_dbcomputer: + description: + data_type: text + default: null + is_nullable: true + enabled: + data_type: boolean + default: null + is_nullable: true + hostname: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + id: + data_type: integer + default: nextval('db_dbcomputer_id_seq'::regclass) + is_nullable: false + metadata: + data_type: jsonb + default: null + is_nullable: true + name: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + scheduler_type: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + transport_params: + data_type: jsonb + default: null + is_nullable: true + transport_type: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + uuid: + data_type: uuid + default: null + is_nullable: true + db_dbgroup: + description: + data_type: text + default: null + is_nullable: true + id: + data_type: integer + default: nextval('db_dbgroup_id_seq'::regclass) + is_nullable: false + name: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + time: + data_type: timestamp with time zone + default: null + is_nullable: true + type: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + user_id: + data_type: integer + default: null + is_nullable: true + uuid: + data_type: uuid + default: null + is_nullable: true + db_dbgroup_dbnodes: + dbgroup_id: + data_type: integer + default: null + is_nullable: true + dbnode_id: + data_type: integer + default: null + is_nullable: true + id: + data_type: integer + default: nextval('db_dbgroup_dbnodes_id_seq'::regclass) + is_nullable: false + db_dblink: + id: + data_type: integer + default: nextval('db_dblink_id_seq'::regclass) + is_nullable: false + input_id: + data_type: integer + default: null + is_nullable: true + label: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + output_id: + data_type: integer + default: null + is_nullable: true + type: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + db_dblock: + creation: + data_type: timestamp with time zone + default: null + is_nullable: true + key: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + owner: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + timeout: + data_type: integer + default: null + is_nullable: true + db_dblog: + id: + data_type: integer + default: nextval('db_dblog_id_seq'::regclass) + is_nullable: false + levelname: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + loggername: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + message: + data_type: text + default: null + is_nullable: true + metadata: + data_type: jsonb + default: null + is_nullable: true + objname: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + objpk: + data_type: integer + default: null + is_nullable: true + time: + data_type: timestamp with time zone + default: null + is_nullable: true + db_dbnode: + attributes: + data_type: jsonb + default: null + is_nullable: true + ctime: + data_type: timestamp with time zone + default: null + is_nullable: true + dbcomputer_id: + data_type: integer + default: null + is_nullable: true + description: + data_type: text + default: null + is_nullable: true + extras: + data_type: jsonb + default: null + is_nullable: true + id: + data_type: integer + default: nextval('db_dbnode_id_seq'::regclass) + is_nullable: false + label: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + mtime: + data_type: timestamp with time zone + default: null + is_nullable: true + nodeversion: + data_type: integer + default: null + is_nullable: true + public: + data_type: boolean + default: null + is_nullable: true + type: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + user_id: + data_type: integer + default: null + is_nullable: false + uuid: + data_type: uuid + default: null + is_nullable: true + db_dbsetting: + description: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + id: + data_type: integer + default: nextval('db_dbsetting_id_seq'::regclass) + is_nullable: false + key: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + time: + data_type: timestamp with time zone + default: null + is_nullable: true + val: + data_type: jsonb + default: null + is_nullable: true + db_dbuser: + date_joined: + data_type: timestamp with time zone + default: null + is_nullable: true + email: + data_type: character varying + default: null + is_nullable: true + max_length: 254 + first_name: + data_type: character varying + default: null + is_nullable: true + max_length: 254 + id: + data_type: integer + default: nextval('db_dbuser_id_seq'::regclass) + is_nullable: false + institution: + data_type: character varying + default: null + is_nullable: true + max_length: 254 + is_active: + data_type: boolean + default: null + is_nullable: true + is_staff: + data_type: boolean + default: null + is_nullable: true + is_superuser: + data_type: boolean + default: null + is_nullable: false + last_login: + data_type: timestamp with time zone + default: null + is_nullable: true + last_name: + data_type: character varying + default: null + is_nullable: true + max_length: 254 + password: + data_type: character varying + default: null + is_nullable: true + max_length: 128 + db_dbworkflow: + ctime: + data_type: timestamp with time zone + default: null + is_nullable: true + description: + data_type: text + default: null + is_nullable: true + id: + data_type: integer + default: nextval('db_dbworkflow_id_seq'::regclass) + is_nullable: false + label: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + lastsyncedversion: + data_type: integer + default: null + is_nullable: true + module: + data_type: text + default: null + is_nullable: true + module_class: + data_type: text + default: null + is_nullable: true + mtime: + data_type: timestamp with time zone + default: null + is_nullable: true + nodeversion: + data_type: integer + default: null + is_nullable: true + report: + data_type: text + default: null + is_nullable: true + script_md5: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + script_path: + data_type: text + default: null + is_nullable: true + state: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + user_id: + data_type: integer + default: null + is_nullable: true + uuid: + data_type: uuid + default: null + is_nullable: true + db_dbworkflowdata: + aiida_obj_id: + data_type: integer + default: null + is_nullable: true + data_type: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + id: + data_type: integer + default: nextval('db_dbworkflowdata_id_seq'::regclass) + is_nullable: false + json_value: + data_type: text + default: null + is_nullable: true + name: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + parent_id: + data_type: integer + default: null + is_nullable: true + time: + data_type: timestamp with time zone + default: null + is_nullable: true + value_type: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + db_dbworkflowstep: + id: + data_type: integer + default: nextval('db_dbworkflowstep_id_seq'::regclass) + is_nullable: false + name: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + nextcall: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + parent_id: + data_type: integer + default: null + is_nullable: true + state: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + time: + data_type: timestamp with time zone + default: null + is_nullable: true + user_id: + data_type: integer + default: null + is_nullable: true + db_dbworkflowstep_calculations: + dbnode_id: + data_type: integer + default: null + is_nullable: true + dbworkflowstep_id: + data_type: integer + default: null + is_nullable: true + id: + data_type: integer + default: nextval('db_dbworkflowstep_calculations_id_seq'::regclass) + is_nullable: false + db_dbworkflowstep_sub_workflows: + dbworkflow_id: + data_type: integer + default: null + is_nullable: true + dbworkflowstep_id: + data_type: integer + default: null + is_nullable: true + id: + data_type: integer + default: nextval('db_dbworkflowstep_sub_workflows_id_seq'::regclass) + is_nullable: false +constraints: + primary_key: + db_dbauthinfo: + db_dbauthinfo_pkey: + - id + db_dbcalcstate: + db_dbcalcstate_pkey: + - id + db_dbcomment: + db_dbcomment_pkey: + - id + db_dbcomputer: + db_dbcomputer_pkey: + - id + db_dbgroup: + db_dbgroup_pkey: + - id + db_dbgroup_dbnodes: + db_dbgroup_dbnodes_pkey: + - id + db_dblink: + db_dblink_pkey: + - id + db_dblock: + db_dblock_pkey: + - key + db_dblog: + db_dblog_pkey: + - id + db_dbnode: + db_dbnode_pkey: + - id + db_dbsetting: + db_dbsetting_pkey: + - id + db_dbuser: + db_dbuser_pkey: + - id + db_dbworkflow: + db_dbworkflow_pkey: + - id + db_dbworkflowdata: + db_dbworkflowdata_pkey: + - id + db_dbworkflowstep: + db_dbworkflowstep_pkey: + - id + db_dbworkflowstep_calculations: + db_dbworkflowstep_calculations_pkey: + - id + db_dbworkflowstep_sub_workflows: + db_dbworkflowstep_sub_workflows_pkey: + - id + unique: + db_dbauthinfo: + db_dbauthinfo_aiidauser_id_dbcomputer_id_key: + - aiidauser_id + - dbcomputer_id + db_dbcalcstate: + db_dbcalcstate_dbnode_id_state_key: + - dbnode_id + - state + db_dbcomputer: + db_dbcomputer_name_key: + - name + db_dbgroup: + db_dbgroup_name_type_key: + - name + - type + db_dbgroup_dbnodes: + uix_dbnode_id_dbgroup_id: + - dbgroup_id + - dbnode_id + db_dbsetting: + db_dbsetting_key_key: + - key + db_dbworkflowdata: + db_dbworkflowdata_parent_id_name_data_type_key: + - data_type + - name + - parent_id + db_dbworkflowstep: + db_dbworkflowstep_parent_id_name_key: + - name + - parent_id + db_dbworkflowstep_calculations: + db_dbworkflowstep_calculations_id_dbnode_id_key: + - dbnode_id + - dbworkflowstep_id + db_dbworkflowstep_sub_workflows: + db_dbworkflowstep_sub_workflows_id_dbworkflow__key: + - dbworkflow_id + - dbworkflowstep_id +foreign_keys: + db_dbauthinfo: + db_dbauthinfo_aiidauser_id_fkey: FOREIGN KEY (aiidauser_id) REFERENCES db_dbuser(id) + ON DELETE CASCADE DEFERRABLE INITIALLY DEFERRED + db_dbauthinfo_dbcomputer_id_fkey: FOREIGN KEY (dbcomputer_id) REFERENCES db_dbcomputer(id) + ON DELETE CASCADE DEFERRABLE INITIALLY DEFERRED + db_dbcalcstate: + db_dbcalcstate_dbnode_id_fkey: FOREIGN KEY (dbnode_id) REFERENCES db_dbnode(id) + ON DELETE CASCADE DEFERRABLE INITIALLY DEFERRED + db_dbcomment: + db_dbcomment_dbnode_id_fkey: FOREIGN KEY (dbnode_id) REFERENCES db_dbnode(id) + ON DELETE CASCADE DEFERRABLE INITIALLY DEFERRED + db_dbcomment_user_id_fkey: FOREIGN KEY (user_id) REFERENCES db_dbuser(id) ON DELETE + CASCADE DEFERRABLE INITIALLY DEFERRED + db_dbgroup: + db_dbgroup_user_id_fkey: FOREIGN KEY (user_id) REFERENCES db_dbuser(id) ON DELETE + CASCADE DEFERRABLE INITIALLY DEFERRED + db_dbgroup_dbnodes: + db_dbgroup_dbnodes_dbgroup_id_fkey: FOREIGN KEY (dbgroup_id) REFERENCES db_dbgroup(id) + DEFERRABLE INITIALLY DEFERRED + db_dbgroup_dbnodes_dbnode_id_fkey: FOREIGN KEY (dbnode_id) REFERENCES db_dbnode(id) + DEFERRABLE INITIALLY DEFERRED + db_dblink: + db_dblink_input_id_fkey: FOREIGN KEY (input_id) REFERENCES db_dbnode(id) DEFERRABLE + INITIALLY DEFERRED + db_dblink_output_id_fkey: FOREIGN KEY (output_id) REFERENCES db_dbnode(id) ON + DELETE CASCADE DEFERRABLE INITIALLY DEFERRED + db_dbnode: + db_dbnode_dbcomputer_id_fkey: FOREIGN KEY (dbcomputer_id) REFERENCES db_dbcomputer(id) + ON DELETE RESTRICT DEFERRABLE INITIALLY DEFERRED + db_dbnode_user_id_fkey: FOREIGN KEY (user_id) REFERENCES db_dbuser(id) ON DELETE + RESTRICT DEFERRABLE INITIALLY DEFERRED + db_dbworkflow: + db_dbworkflow_user_id_fkey: FOREIGN KEY (user_id) REFERENCES db_dbuser(id) + db_dbworkflowdata: + db_dbworkflowdata_aiida_obj_id_fkey: FOREIGN KEY (aiida_obj_id) REFERENCES db_dbnode(id) + db_dbworkflowdata_parent_id_fkey: FOREIGN KEY (parent_id) REFERENCES db_dbworkflow(id) + db_dbworkflowstep: + db_dbworkflowstep_parent_id_fkey: FOREIGN KEY (parent_id) REFERENCES db_dbworkflow(id) + db_dbworkflowstep_user_id_fkey: FOREIGN KEY (user_id) REFERENCES db_dbuser(id) + db_dbworkflowstep_calculations: + db_dbworkflowstep_calculations_dbnode_id_fkey: FOREIGN KEY (dbnode_id) REFERENCES + db_dbnode(id) + db_dbworkflowstep_calculations_dbworkflowstep_id_fkey: FOREIGN KEY (dbworkflowstep_id) + REFERENCES db_dbworkflowstep(id) + db_dbworkflowstep_sub_workflows: + db_dbworkflowstep_sub_workflows_dbworkflow_id_fkey: FOREIGN KEY (dbworkflow_id) + REFERENCES db_dbworkflow(id) + db_dbworkflowstep_sub_workflows_dbworkflowstep_id_fkey: FOREIGN KEY (dbworkflowstep_id) + REFERENCES db_dbworkflowstep(id) +indexes: + db_dbauthinfo: + db_dbauthinfo_aiidauser_id_dbcomputer_id_key: CREATE UNIQUE INDEX db_dbauthinfo_aiidauser_id_dbcomputer_id_key + ON public.db_dbauthinfo USING btree (aiidauser_id, dbcomputer_id) + db_dbauthinfo_pkey: CREATE UNIQUE INDEX db_dbauthinfo_pkey ON public.db_dbauthinfo + USING btree (id) + db_dbcalcstate: + db_dbcalcstate_dbnode_id_state_key: CREATE UNIQUE INDEX db_dbcalcstate_dbnode_id_state_key + ON public.db_dbcalcstate USING btree (dbnode_id, state) + db_dbcalcstate_pkey: CREATE UNIQUE INDEX db_dbcalcstate_pkey ON public.db_dbcalcstate + USING btree (id) + ix_db_dbcalcstate_state: CREATE INDEX ix_db_dbcalcstate_state ON public.db_dbcalcstate + USING btree (state) + db_dbcomment: + db_dbcomment_pkey: CREATE UNIQUE INDEX db_dbcomment_pkey ON public.db_dbcomment + USING btree (id) + db_dbcomputer: + db_dbcomputer_name_key: CREATE UNIQUE INDEX db_dbcomputer_name_key ON public.db_dbcomputer + USING btree (name) + db_dbcomputer_pkey: CREATE UNIQUE INDEX db_dbcomputer_pkey ON public.db_dbcomputer + USING btree (id) + db_dbgroup: + db_dbgroup_name_type_key: CREATE UNIQUE INDEX db_dbgroup_name_type_key ON public.db_dbgroup + USING btree (name, type) + db_dbgroup_pkey: CREATE UNIQUE INDEX db_dbgroup_pkey ON public.db_dbgroup USING + btree (id) + ix_db_dbgroup_name: CREATE INDEX ix_db_dbgroup_name ON public.db_dbgroup USING + btree (name) + ix_db_dbgroup_type: CREATE INDEX ix_db_dbgroup_type ON public.db_dbgroup USING + btree (type) + db_dbgroup_dbnodes: + db_dbgroup_dbnodes_pkey: CREATE UNIQUE INDEX db_dbgroup_dbnodes_pkey ON public.db_dbgroup_dbnodes + USING btree (id) + uix_dbnode_id_dbgroup_id: CREATE UNIQUE INDEX uix_dbnode_id_dbgroup_id ON public.db_dbgroup_dbnodes + USING btree (dbnode_id, dbgroup_id) + db_dblink: + db_dblink_pkey: CREATE UNIQUE INDEX db_dblink_pkey ON public.db_dblink USING btree + (id) + ix_db_dblink_label: CREATE INDEX ix_db_dblink_label ON public.db_dblink USING + btree (label) + db_dblock: + db_dblock_pkey: CREATE UNIQUE INDEX db_dblock_pkey ON public.db_dblock USING btree + (key) + db_dblog: + db_dblog_pkey: CREATE UNIQUE INDEX db_dblog_pkey ON public.db_dblog USING btree + (id) + ix_db_dblog_levelname: CREATE INDEX ix_db_dblog_levelname ON public.db_dblog USING + btree (levelname) + ix_db_dblog_loggername: CREATE INDEX ix_db_dblog_loggername ON public.db_dblog + USING btree (loggername) + ix_db_dblog_objname: CREATE INDEX ix_db_dblog_objname ON public.db_dblog USING + btree (objname) + ix_db_dblog_objpk: CREATE INDEX ix_db_dblog_objpk ON public.db_dblog USING btree + (objpk) + db_dbnode: + db_dbnode_pkey: CREATE UNIQUE INDEX db_dbnode_pkey ON public.db_dbnode USING btree + (id) + ix_db_dbnode_label: CREATE INDEX ix_db_dbnode_label ON public.db_dbnode USING + btree (label) + ix_db_dbnode_type: CREATE INDEX ix_db_dbnode_type ON public.db_dbnode USING btree + (type) + db_dbsetting: + db_dbsetting_key_key: CREATE UNIQUE INDEX db_dbsetting_key_key ON public.db_dbsetting + USING btree (key) + db_dbsetting_pkey: CREATE UNIQUE INDEX db_dbsetting_pkey ON public.db_dbsetting + USING btree (id) + ix_db_dbsetting_key: CREATE INDEX ix_db_dbsetting_key ON public.db_dbsetting USING + btree (key) + db_dbuser: + db_dbuser_pkey: CREATE UNIQUE INDEX db_dbuser_pkey ON public.db_dbuser USING btree + (id) + ix_db_dbuser_email: CREATE UNIQUE INDEX ix_db_dbuser_email ON public.db_dbuser + USING btree (email) + db_dbworkflow: + db_dbworkflow_pkey: CREATE UNIQUE INDEX db_dbworkflow_pkey ON public.db_dbworkflow + USING btree (id) + ix_db_dbworkflow_label: CREATE INDEX ix_db_dbworkflow_label ON public.db_dbworkflow + USING btree (label) + db_dbworkflowdata: + db_dbworkflowdata_parent_id_name_data_type_key: CREATE UNIQUE INDEX db_dbworkflowdata_parent_id_name_data_type_key + ON public.db_dbworkflowdata USING btree (parent_id, name, data_type) + db_dbworkflowdata_pkey: CREATE UNIQUE INDEX db_dbworkflowdata_pkey ON public.db_dbworkflowdata + USING btree (id) + ix_db_dbworkflowdata_aiida_obj_id: CREATE INDEX ix_db_dbworkflowdata_aiida_obj_id + ON public.db_dbworkflowdata USING btree (aiida_obj_id) + ix_db_dbworkflowdata_parent_id: CREATE INDEX ix_db_dbworkflowdata_parent_id ON + public.db_dbworkflowdata USING btree (parent_id) + db_dbworkflowstep: + db_dbworkflowstep_parent_id_name_key: CREATE UNIQUE INDEX db_dbworkflowstep_parent_id_name_key + ON public.db_dbworkflowstep USING btree (parent_id, name) + db_dbworkflowstep_pkey: CREATE UNIQUE INDEX db_dbworkflowstep_pkey ON public.db_dbworkflowstep + USING btree (id) + db_dbworkflowstep_calculations: + db_dbworkflowstep_calculations_id_dbnode_id_key: CREATE UNIQUE INDEX db_dbworkflowstep_calculations_id_dbnode_id_key + ON public.db_dbworkflowstep_calculations USING btree (dbworkflowstep_id, dbnode_id) + db_dbworkflowstep_calculations_pkey: CREATE UNIQUE INDEX db_dbworkflowstep_calculations_pkey + ON public.db_dbworkflowstep_calculations USING btree (id) + db_dbworkflowstep_sub_workflows: + db_dbworkflowstep_sub_workflows_id_dbworkflow__key: CREATE UNIQUE INDEX db_dbworkflowstep_sub_workflows_id_dbworkflow__key + ON public.db_dbworkflowstep_sub_workflows USING btree (dbworkflowstep_id, dbworkflow_id) + db_dbworkflowstep_sub_workflows_pkey: CREATE UNIQUE INDEX db_dbworkflowstep_sub_workflows_pkey + ON public.db_dbworkflowstep_sub_workflows USING btree (id) diff --git a/tests/backends/aiida_sqlalchemy/migrations/test_all_schema/test_sqla_7_6c629c886f84_.yml b/tests/backends/aiida_sqlalchemy/migrations/test_all_schema/test_sqla_7_6c629c886f84_.yml new file mode 100644 index 0000000000..bd8018591c --- /dev/null +++ b/tests/backends/aiida_sqlalchemy/migrations/test_all_schema/test_sqla_7_6c629c886f84_.yml @@ -0,0 +1,778 @@ +columns: + db_dbauthinfo: + aiidauser_id: + data_type: integer + default: null + is_nullable: true + auth_params: + data_type: jsonb + default: null + is_nullable: true + dbcomputer_id: + data_type: integer + default: null + is_nullable: true + enabled: + data_type: boolean + default: null + is_nullable: true + id: + data_type: integer + default: nextval('db_dbauthinfo_id_seq'::regclass) + is_nullable: false + metadata: + data_type: jsonb + default: null + is_nullable: true + db_dbcalcstate: + dbnode_id: + data_type: integer + default: null + is_nullable: true + id: + data_type: integer + default: nextval('db_dbcalcstate_id_seq'::regclass) + is_nullable: false + state: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + time: + data_type: timestamp with time zone + default: null + is_nullable: true + db_dbcomment: + content: + data_type: text + default: null + is_nullable: true + ctime: + data_type: timestamp with time zone + default: null + is_nullable: true + dbnode_id: + data_type: integer + default: null + is_nullable: true + id: + data_type: integer + default: nextval('db_dbcomment_id_seq'::regclass) + is_nullable: false + mtime: + data_type: timestamp with time zone + default: null + is_nullable: true + user_id: + data_type: integer + default: null + is_nullable: true + uuid: + data_type: uuid + default: null + is_nullable: true + db_dbcomputer: + description: + data_type: text + default: null + is_nullable: true + enabled: + data_type: boolean + default: null + is_nullable: true + hostname: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + id: + data_type: integer + default: nextval('db_dbcomputer_id_seq'::regclass) + is_nullable: false + metadata: + data_type: jsonb + default: null + is_nullable: true + name: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + scheduler_type: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + transport_params: + data_type: jsonb + default: null + is_nullable: true + transport_type: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + uuid: + data_type: uuid + default: null + is_nullable: true + db_dbgroup: + description: + data_type: text + default: null + is_nullable: true + id: + data_type: integer + default: nextval('db_dbgroup_id_seq'::regclass) + is_nullable: false + name: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + time: + data_type: timestamp with time zone + default: null + is_nullable: true + type: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + user_id: + data_type: integer + default: null + is_nullable: true + uuid: + data_type: uuid + default: null + is_nullable: true + db_dbgroup_dbnodes: + dbgroup_id: + data_type: integer + default: null + is_nullable: true + dbnode_id: + data_type: integer + default: null + is_nullable: true + id: + data_type: integer + default: nextval('db_dbgroup_dbnodes_id_seq'::regclass) + is_nullable: false + db_dblink: + id: + data_type: integer + default: nextval('db_dblink_id_seq'::regclass) + is_nullable: false + input_id: + data_type: integer + default: null + is_nullable: true + label: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + output_id: + data_type: integer + default: null + is_nullable: true + type: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + db_dblock: + creation: + data_type: timestamp with time zone + default: null + is_nullable: true + key: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + owner: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + timeout: + data_type: integer + default: null + is_nullable: true + db_dblog: + id: + data_type: integer + default: nextval('db_dblog_id_seq'::regclass) + is_nullable: false + levelname: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + loggername: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + message: + data_type: text + default: null + is_nullable: true + metadata: + data_type: jsonb + default: null + is_nullable: true + objname: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + objpk: + data_type: integer + default: null + is_nullable: true + time: + data_type: timestamp with time zone + default: null + is_nullable: true + db_dbnode: + attributes: + data_type: jsonb + default: null + is_nullable: true + ctime: + data_type: timestamp with time zone + default: null + is_nullable: true + dbcomputer_id: + data_type: integer + default: null + is_nullable: true + description: + data_type: text + default: null + is_nullable: true + extras: + data_type: jsonb + default: null + is_nullable: true + id: + data_type: integer + default: nextval('db_dbnode_id_seq'::regclass) + is_nullable: false + label: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + mtime: + data_type: timestamp with time zone + default: null + is_nullable: true + nodeversion: + data_type: integer + default: null + is_nullable: true + process_type: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + public: + data_type: boolean + default: null + is_nullable: true + type: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + user_id: + data_type: integer + default: null + is_nullable: false + uuid: + data_type: uuid + default: null + is_nullable: true + db_dbsetting: + description: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + id: + data_type: integer + default: nextval('db_dbsetting_id_seq'::regclass) + is_nullable: false + key: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + time: + data_type: timestamp with time zone + default: null + is_nullable: true + val: + data_type: jsonb + default: null + is_nullable: true + db_dbuser: + date_joined: + data_type: timestamp with time zone + default: null + is_nullable: true + email: + data_type: character varying + default: null + is_nullable: true + max_length: 254 + first_name: + data_type: character varying + default: null + is_nullable: true + max_length: 254 + id: + data_type: integer + default: nextval('db_dbuser_id_seq'::regclass) + is_nullable: false + institution: + data_type: character varying + default: null + is_nullable: true + max_length: 254 + is_active: + data_type: boolean + default: null + is_nullable: true + is_staff: + data_type: boolean + default: null + is_nullable: true + is_superuser: + data_type: boolean + default: null + is_nullable: false + last_login: + data_type: timestamp with time zone + default: null + is_nullable: true + last_name: + data_type: character varying + default: null + is_nullable: true + max_length: 254 + password: + data_type: character varying + default: null + is_nullable: true + max_length: 128 + db_dbworkflow: + ctime: + data_type: timestamp with time zone + default: null + is_nullable: true + description: + data_type: text + default: null + is_nullable: true + id: + data_type: integer + default: nextval('db_dbworkflow_id_seq'::regclass) + is_nullable: false + label: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + lastsyncedversion: + data_type: integer + default: null + is_nullable: true + module: + data_type: text + default: null + is_nullable: true + module_class: + data_type: text + default: null + is_nullable: true + mtime: + data_type: timestamp with time zone + default: null + is_nullable: true + nodeversion: + data_type: integer + default: null + is_nullable: true + report: + data_type: text + default: null + is_nullable: true + script_md5: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + script_path: + data_type: text + default: null + is_nullable: true + state: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + user_id: + data_type: integer + default: null + is_nullable: true + uuid: + data_type: uuid + default: null + is_nullable: true + db_dbworkflowdata: + aiida_obj_id: + data_type: integer + default: null + is_nullable: true + data_type: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + id: + data_type: integer + default: nextval('db_dbworkflowdata_id_seq'::regclass) + is_nullable: false + json_value: + data_type: text + default: null + is_nullable: true + name: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + parent_id: + data_type: integer + default: null + is_nullable: true + time: + data_type: timestamp with time zone + default: null + is_nullable: true + value_type: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + db_dbworkflowstep: + id: + data_type: integer + default: nextval('db_dbworkflowstep_id_seq'::regclass) + is_nullable: false + name: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + nextcall: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + parent_id: + data_type: integer + default: null + is_nullable: true + state: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + time: + data_type: timestamp with time zone + default: null + is_nullable: true + user_id: + data_type: integer + default: null + is_nullable: true + db_dbworkflowstep_calculations: + dbnode_id: + data_type: integer + default: null + is_nullable: true + dbworkflowstep_id: + data_type: integer + default: null + is_nullable: true + id: + data_type: integer + default: nextval('db_dbworkflowstep_calculations_id_seq'::regclass) + is_nullable: false + db_dbworkflowstep_sub_workflows: + dbworkflow_id: + data_type: integer + default: null + is_nullable: true + dbworkflowstep_id: + data_type: integer + default: null + is_nullable: true + id: + data_type: integer + default: nextval('db_dbworkflowstep_sub_workflows_id_seq'::regclass) + is_nullable: false +constraints: + primary_key: + db_dbauthinfo: + db_dbauthinfo_pkey: + - id + db_dbcalcstate: + db_dbcalcstate_pkey: + - id + db_dbcomment: + db_dbcomment_pkey: + - id + db_dbcomputer: + db_dbcomputer_pkey: + - id + db_dbgroup: + db_dbgroup_pkey: + - id + db_dbgroup_dbnodes: + db_dbgroup_dbnodes_pkey: + - id + db_dblink: + db_dblink_pkey: + - id + db_dblock: + db_dblock_pkey: + - key + db_dblog: + db_dblog_pkey: + - id + db_dbnode: + db_dbnode_pkey: + - id + db_dbsetting: + db_dbsetting_pkey: + - id + db_dbuser: + db_dbuser_pkey: + - id + db_dbworkflow: + db_dbworkflow_pkey: + - id + db_dbworkflowdata: + db_dbworkflowdata_pkey: + - id + db_dbworkflowstep: + db_dbworkflowstep_pkey: + - id + db_dbworkflowstep_calculations: + db_dbworkflowstep_calculations_pkey: + - id + db_dbworkflowstep_sub_workflows: + db_dbworkflowstep_sub_workflows_pkey: + - id + unique: + db_dbauthinfo: + db_dbauthinfo_aiidauser_id_dbcomputer_id_key: + - aiidauser_id + - dbcomputer_id + db_dbcalcstate: + db_dbcalcstate_dbnode_id_state_key: + - dbnode_id + - state + db_dbcomputer: + db_dbcomputer_name_key: + - name + db_dbgroup: + db_dbgroup_name_type_key: + - name + - type + db_dbgroup_dbnodes: + uix_dbnode_id_dbgroup_id: + - dbgroup_id + - dbnode_id + db_dbsetting: + db_dbsetting_key_key: + - key + db_dbworkflowdata: + db_dbworkflowdata_parent_id_name_data_type_key: + - data_type + - name + - parent_id + db_dbworkflowstep: + db_dbworkflowstep_parent_id_name_key: + - name + - parent_id + db_dbworkflowstep_calculations: + db_dbworkflowstep_calculations_id_dbnode_id_key: + - dbnode_id + - dbworkflowstep_id + db_dbworkflowstep_sub_workflows: + db_dbworkflowstep_sub_workflows_id_dbworkflow__key: + - dbworkflow_id + - dbworkflowstep_id +foreign_keys: + db_dbauthinfo: + db_dbauthinfo_aiidauser_id_fkey: FOREIGN KEY (aiidauser_id) REFERENCES db_dbuser(id) + ON DELETE CASCADE DEFERRABLE INITIALLY DEFERRED + db_dbauthinfo_dbcomputer_id_fkey: FOREIGN KEY (dbcomputer_id) REFERENCES db_dbcomputer(id) + ON DELETE CASCADE DEFERRABLE INITIALLY DEFERRED + db_dbcalcstate: + db_dbcalcstate_dbnode_id_fkey: FOREIGN KEY (dbnode_id) REFERENCES db_dbnode(id) + ON DELETE CASCADE DEFERRABLE INITIALLY DEFERRED + db_dbcomment: + db_dbcomment_dbnode_id_fkey: FOREIGN KEY (dbnode_id) REFERENCES db_dbnode(id) + ON DELETE CASCADE DEFERRABLE INITIALLY DEFERRED + db_dbcomment_user_id_fkey: FOREIGN KEY (user_id) REFERENCES db_dbuser(id) ON DELETE + CASCADE DEFERRABLE INITIALLY DEFERRED + db_dbgroup: + db_dbgroup_user_id_fkey: FOREIGN KEY (user_id) REFERENCES db_dbuser(id) ON DELETE + CASCADE DEFERRABLE INITIALLY DEFERRED + db_dbgroup_dbnodes: + db_dbgroup_dbnodes_dbgroup_id_fkey: FOREIGN KEY (dbgroup_id) REFERENCES db_dbgroup(id) + DEFERRABLE INITIALLY DEFERRED + db_dbgroup_dbnodes_dbnode_id_fkey: FOREIGN KEY (dbnode_id) REFERENCES db_dbnode(id) + DEFERRABLE INITIALLY DEFERRED + db_dblink: + db_dblink_input_id_fkey: FOREIGN KEY (input_id) REFERENCES db_dbnode(id) DEFERRABLE + INITIALLY DEFERRED + db_dblink_output_id_fkey: FOREIGN KEY (output_id) REFERENCES db_dbnode(id) ON + DELETE CASCADE DEFERRABLE INITIALLY DEFERRED + db_dbnode: + db_dbnode_dbcomputer_id_fkey: FOREIGN KEY (dbcomputer_id) REFERENCES db_dbcomputer(id) + ON DELETE RESTRICT DEFERRABLE INITIALLY DEFERRED + db_dbnode_user_id_fkey: FOREIGN KEY (user_id) REFERENCES db_dbuser(id) ON DELETE + RESTRICT DEFERRABLE INITIALLY DEFERRED + db_dbworkflow: + db_dbworkflow_user_id_fkey: FOREIGN KEY (user_id) REFERENCES db_dbuser(id) + db_dbworkflowdata: + db_dbworkflowdata_aiida_obj_id_fkey: FOREIGN KEY (aiida_obj_id) REFERENCES db_dbnode(id) + db_dbworkflowdata_parent_id_fkey: FOREIGN KEY (parent_id) REFERENCES db_dbworkflow(id) + db_dbworkflowstep: + db_dbworkflowstep_parent_id_fkey: FOREIGN KEY (parent_id) REFERENCES db_dbworkflow(id) + db_dbworkflowstep_user_id_fkey: FOREIGN KEY (user_id) REFERENCES db_dbuser(id) + db_dbworkflowstep_calculations: + db_dbworkflowstep_calculations_dbnode_id_fkey: FOREIGN KEY (dbnode_id) REFERENCES + db_dbnode(id) + db_dbworkflowstep_calculations_dbworkflowstep_id_fkey: FOREIGN KEY (dbworkflowstep_id) + REFERENCES db_dbworkflowstep(id) + db_dbworkflowstep_sub_workflows: + db_dbworkflowstep_sub_workflows_dbworkflow_id_fkey: FOREIGN KEY (dbworkflow_id) + REFERENCES db_dbworkflow(id) + db_dbworkflowstep_sub_workflows_dbworkflowstep_id_fkey: FOREIGN KEY (dbworkflowstep_id) + REFERENCES db_dbworkflowstep(id) +indexes: + db_dbauthinfo: + db_dbauthinfo_aiidauser_id_dbcomputer_id_key: CREATE UNIQUE INDEX db_dbauthinfo_aiidauser_id_dbcomputer_id_key + ON public.db_dbauthinfo USING btree (aiidauser_id, dbcomputer_id) + db_dbauthinfo_pkey: CREATE UNIQUE INDEX db_dbauthinfo_pkey ON public.db_dbauthinfo + USING btree (id) + db_dbcalcstate: + db_dbcalcstate_dbnode_id_state_key: CREATE UNIQUE INDEX db_dbcalcstate_dbnode_id_state_key + ON public.db_dbcalcstate USING btree (dbnode_id, state) + db_dbcalcstate_pkey: CREATE UNIQUE INDEX db_dbcalcstate_pkey ON public.db_dbcalcstate + USING btree (id) + ix_db_dbcalcstate_state: CREATE INDEX ix_db_dbcalcstate_state ON public.db_dbcalcstate + USING btree (state) + db_dbcomment: + db_dbcomment_pkey: CREATE UNIQUE INDEX db_dbcomment_pkey ON public.db_dbcomment + USING btree (id) + db_dbcomputer: + db_dbcomputer_name_key: CREATE UNIQUE INDEX db_dbcomputer_name_key ON public.db_dbcomputer + USING btree (name) + db_dbcomputer_pkey: CREATE UNIQUE INDEX db_dbcomputer_pkey ON public.db_dbcomputer + USING btree (id) + db_dbgroup: + db_dbgroup_name_type_key: CREATE UNIQUE INDEX db_dbgroup_name_type_key ON public.db_dbgroup + USING btree (name, type) + db_dbgroup_pkey: CREATE UNIQUE INDEX db_dbgroup_pkey ON public.db_dbgroup USING + btree (id) + ix_db_dbgroup_name: CREATE INDEX ix_db_dbgroup_name ON public.db_dbgroup USING + btree (name) + ix_db_dbgroup_type: CREATE INDEX ix_db_dbgroup_type ON public.db_dbgroup USING + btree (type) + db_dbgroup_dbnodes: + db_dbgroup_dbnodes_pkey: CREATE UNIQUE INDEX db_dbgroup_dbnodes_pkey ON public.db_dbgroup_dbnodes + USING btree (id) + uix_dbnode_id_dbgroup_id: CREATE UNIQUE INDEX uix_dbnode_id_dbgroup_id ON public.db_dbgroup_dbnodes + USING btree (dbnode_id, dbgroup_id) + db_dblink: + db_dblink_pkey: CREATE UNIQUE INDEX db_dblink_pkey ON public.db_dblink USING btree + (id) + ix_db_dblink_label: CREATE INDEX ix_db_dblink_label ON public.db_dblink USING + btree (label) + db_dblock: + db_dblock_pkey: CREATE UNIQUE INDEX db_dblock_pkey ON public.db_dblock USING btree + (key) + db_dblog: + db_dblog_pkey: CREATE UNIQUE INDEX db_dblog_pkey ON public.db_dblog USING btree + (id) + ix_db_dblog_levelname: CREATE INDEX ix_db_dblog_levelname ON public.db_dblog USING + btree (levelname) + ix_db_dblog_loggername: CREATE INDEX ix_db_dblog_loggername ON public.db_dblog + USING btree (loggername) + ix_db_dblog_objname: CREATE INDEX ix_db_dblog_objname ON public.db_dblog USING + btree (objname) + ix_db_dblog_objpk: CREATE INDEX ix_db_dblog_objpk ON public.db_dblog USING btree + (objpk) + db_dbnode: + db_dbnode_pkey: CREATE UNIQUE INDEX db_dbnode_pkey ON public.db_dbnode USING btree + (id) + ix_db_dbnode_label: CREATE INDEX ix_db_dbnode_label ON public.db_dbnode USING + btree (label) + ix_db_dbnode_process_type: CREATE INDEX ix_db_dbnode_process_type ON public.db_dbnode + USING btree (process_type) + ix_db_dbnode_type: CREATE INDEX ix_db_dbnode_type ON public.db_dbnode USING btree + (type) + db_dbsetting: + db_dbsetting_key_key: CREATE UNIQUE INDEX db_dbsetting_key_key ON public.db_dbsetting + USING btree (key) + db_dbsetting_pkey: CREATE UNIQUE INDEX db_dbsetting_pkey ON public.db_dbsetting + USING btree (id) + ix_db_dbsetting_key: CREATE INDEX ix_db_dbsetting_key ON public.db_dbsetting USING + btree (key) + db_dbuser: + db_dbuser_pkey: CREATE UNIQUE INDEX db_dbuser_pkey ON public.db_dbuser USING btree + (id) + ix_db_dbuser_email: CREATE UNIQUE INDEX ix_db_dbuser_email ON public.db_dbuser + USING btree (email) + db_dbworkflow: + db_dbworkflow_pkey: CREATE UNIQUE INDEX db_dbworkflow_pkey ON public.db_dbworkflow + USING btree (id) + ix_db_dbworkflow_label: CREATE INDEX ix_db_dbworkflow_label ON public.db_dbworkflow + USING btree (label) + db_dbworkflowdata: + db_dbworkflowdata_parent_id_name_data_type_key: CREATE UNIQUE INDEX db_dbworkflowdata_parent_id_name_data_type_key + ON public.db_dbworkflowdata USING btree (parent_id, name, data_type) + db_dbworkflowdata_pkey: CREATE UNIQUE INDEX db_dbworkflowdata_pkey ON public.db_dbworkflowdata + USING btree (id) + ix_db_dbworkflowdata_aiida_obj_id: CREATE INDEX ix_db_dbworkflowdata_aiida_obj_id + ON public.db_dbworkflowdata USING btree (aiida_obj_id) + ix_db_dbworkflowdata_parent_id: CREATE INDEX ix_db_dbworkflowdata_parent_id ON + public.db_dbworkflowdata USING btree (parent_id) + db_dbworkflowstep: + db_dbworkflowstep_parent_id_name_key: CREATE UNIQUE INDEX db_dbworkflowstep_parent_id_name_key + ON public.db_dbworkflowstep USING btree (parent_id, name) + db_dbworkflowstep_pkey: CREATE UNIQUE INDEX db_dbworkflowstep_pkey ON public.db_dbworkflowstep + USING btree (id) + db_dbworkflowstep_calculations: + db_dbworkflowstep_calculations_id_dbnode_id_key: CREATE UNIQUE INDEX db_dbworkflowstep_calculations_id_dbnode_id_key + ON public.db_dbworkflowstep_calculations USING btree (dbworkflowstep_id, dbnode_id) + db_dbworkflowstep_calculations_pkey: CREATE UNIQUE INDEX db_dbworkflowstep_calculations_pkey + ON public.db_dbworkflowstep_calculations USING btree (id) + db_dbworkflowstep_sub_workflows: + db_dbworkflowstep_sub_workflows_id_dbworkflow__key: CREATE UNIQUE INDEX db_dbworkflowstep_sub_workflows_id_dbworkflow__key + ON public.db_dbworkflowstep_sub_workflows USING btree (dbworkflowstep_id, dbworkflow_id) + db_dbworkflowstep_sub_workflows_pkey: CREATE UNIQUE INDEX db_dbworkflowstep_sub_workflows_pkey + ON public.db_dbworkflowstep_sub_workflows USING btree (id) diff --git a/tests/backends/aiida_sqlalchemy/migrations/test_all_schema/test_sqla_8_f9a69de76a9a_.yml b/tests/backends/aiida_sqlalchemy/migrations/test_all_schema/test_sqla_8_f9a69de76a9a_.yml new file mode 100644 index 0000000000..bd8018591c --- /dev/null +++ b/tests/backends/aiida_sqlalchemy/migrations/test_all_schema/test_sqla_8_f9a69de76a9a_.yml @@ -0,0 +1,778 @@ +columns: + db_dbauthinfo: + aiidauser_id: + data_type: integer + default: null + is_nullable: true + auth_params: + data_type: jsonb + default: null + is_nullable: true + dbcomputer_id: + data_type: integer + default: null + is_nullable: true + enabled: + data_type: boolean + default: null + is_nullable: true + id: + data_type: integer + default: nextval('db_dbauthinfo_id_seq'::regclass) + is_nullable: false + metadata: + data_type: jsonb + default: null + is_nullable: true + db_dbcalcstate: + dbnode_id: + data_type: integer + default: null + is_nullable: true + id: + data_type: integer + default: nextval('db_dbcalcstate_id_seq'::regclass) + is_nullable: false + state: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + time: + data_type: timestamp with time zone + default: null + is_nullable: true + db_dbcomment: + content: + data_type: text + default: null + is_nullable: true + ctime: + data_type: timestamp with time zone + default: null + is_nullable: true + dbnode_id: + data_type: integer + default: null + is_nullable: true + id: + data_type: integer + default: nextval('db_dbcomment_id_seq'::regclass) + is_nullable: false + mtime: + data_type: timestamp with time zone + default: null + is_nullable: true + user_id: + data_type: integer + default: null + is_nullable: true + uuid: + data_type: uuid + default: null + is_nullable: true + db_dbcomputer: + description: + data_type: text + default: null + is_nullable: true + enabled: + data_type: boolean + default: null + is_nullable: true + hostname: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + id: + data_type: integer + default: nextval('db_dbcomputer_id_seq'::regclass) + is_nullable: false + metadata: + data_type: jsonb + default: null + is_nullable: true + name: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + scheduler_type: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + transport_params: + data_type: jsonb + default: null + is_nullable: true + transport_type: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + uuid: + data_type: uuid + default: null + is_nullable: true + db_dbgroup: + description: + data_type: text + default: null + is_nullable: true + id: + data_type: integer + default: nextval('db_dbgroup_id_seq'::regclass) + is_nullable: false + name: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + time: + data_type: timestamp with time zone + default: null + is_nullable: true + type: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + user_id: + data_type: integer + default: null + is_nullable: true + uuid: + data_type: uuid + default: null + is_nullable: true + db_dbgroup_dbnodes: + dbgroup_id: + data_type: integer + default: null + is_nullable: true + dbnode_id: + data_type: integer + default: null + is_nullable: true + id: + data_type: integer + default: nextval('db_dbgroup_dbnodes_id_seq'::regclass) + is_nullable: false + db_dblink: + id: + data_type: integer + default: nextval('db_dblink_id_seq'::regclass) + is_nullable: false + input_id: + data_type: integer + default: null + is_nullable: true + label: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + output_id: + data_type: integer + default: null + is_nullable: true + type: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + db_dblock: + creation: + data_type: timestamp with time zone + default: null + is_nullable: true + key: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + owner: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + timeout: + data_type: integer + default: null + is_nullable: true + db_dblog: + id: + data_type: integer + default: nextval('db_dblog_id_seq'::regclass) + is_nullable: false + levelname: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + loggername: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + message: + data_type: text + default: null + is_nullable: true + metadata: + data_type: jsonb + default: null + is_nullable: true + objname: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + objpk: + data_type: integer + default: null + is_nullable: true + time: + data_type: timestamp with time zone + default: null + is_nullable: true + db_dbnode: + attributes: + data_type: jsonb + default: null + is_nullable: true + ctime: + data_type: timestamp with time zone + default: null + is_nullable: true + dbcomputer_id: + data_type: integer + default: null + is_nullable: true + description: + data_type: text + default: null + is_nullable: true + extras: + data_type: jsonb + default: null + is_nullable: true + id: + data_type: integer + default: nextval('db_dbnode_id_seq'::regclass) + is_nullable: false + label: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + mtime: + data_type: timestamp with time zone + default: null + is_nullable: true + nodeversion: + data_type: integer + default: null + is_nullable: true + process_type: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + public: + data_type: boolean + default: null + is_nullable: true + type: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + user_id: + data_type: integer + default: null + is_nullable: false + uuid: + data_type: uuid + default: null + is_nullable: true + db_dbsetting: + description: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + id: + data_type: integer + default: nextval('db_dbsetting_id_seq'::regclass) + is_nullable: false + key: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + time: + data_type: timestamp with time zone + default: null + is_nullable: true + val: + data_type: jsonb + default: null + is_nullable: true + db_dbuser: + date_joined: + data_type: timestamp with time zone + default: null + is_nullable: true + email: + data_type: character varying + default: null + is_nullable: true + max_length: 254 + first_name: + data_type: character varying + default: null + is_nullable: true + max_length: 254 + id: + data_type: integer + default: nextval('db_dbuser_id_seq'::regclass) + is_nullable: false + institution: + data_type: character varying + default: null + is_nullable: true + max_length: 254 + is_active: + data_type: boolean + default: null + is_nullable: true + is_staff: + data_type: boolean + default: null + is_nullable: true + is_superuser: + data_type: boolean + default: null + is_nullable: false + last_login: + data_type: timestamp with time zone + default: null + is_nullable: true + last_name: + data_type: character varying + default: null + is_nullable: true + max_length: 254 + password: + data_type: character varying + default: null + is_nullable: true + max_length: 128 + db_dbworkflow: + ctime: + data_type: timestamp with time zone + default: null + is_nullable: true + description: + data_type: text + default: null + is_nullable: true + id: + data_type: integer + default: nextval('db_dbworkflow_id_seq'::regclass) + is_nullable: false + label: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + lastsyncedversion: + data_type: integer + default: null + is_nullable: true + module: + data_type: text + default: null + is_nullable: true + module_class: + data_type: text + default: null + is_nullable: true + mtime: + data_type: timestamp with time zone + default: null + is_nullable: true + nodeversion: + data_type: integer + default: null + is_nullable: true + report: + data_type: text + default: null + is_nullable: true + script_md5: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + script_path: + data_type: text + default: null + is_nullable: true + state: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + user_id: + data_type: integer + default: null + is_nullable: true + uuid: + data_type: uuid + default: null + is_nullable: true + db_dbworkflowdata: + aiida_obj_id: + data_type: integer + default: null + is_nullable: true + data_type: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + id: + data_type: integer + default: nextval('db_dbworkflowdata_id_seq'::regclass) + is_nullable: false + json_value: + data_type: text + default: null + is_nullable: true + name: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + parent_id: + data_type: integer + default: null + is_nullable: true + time: + data_type: timestamp with time zone + default: null + is_nullable: true + value_type: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + db_dbworkflowstep: + id: + data_type: integer + default: nextval('db_dbworkflowstep_id_seq'::regclass) + is_nullable: false + name: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + nextcall: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + parent_id: + data_type: integer + default: null + is_nullable: true + state: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + time: + data_type: timestamp with time zone + default: null + is_nullable: true + user_id: + data_type: integer + default: null + is_nullable: true + db_dbworkflowstep_calculations: + dbnode_id: + data_type: integer + default: null + is_nullable: true + dbworkflowstep_id: + data_type: integer + default: null + is_nullable: true + id: + data_type: integer + default: nextval('db_dbworkflowstep_calculations_id_seq'::regclass) + is_nullable: false + db_dbworkflowstep_sub_workflows: + dbworkflow_id: + data_type: integer + default: null + is_nullable: true + dbworkflowstep_id: + data_type: integer + default: null + is_nullable: true + id: + data_type: integer + default: nextval('db_dbworkflowstep_sub_workflows_id_seq'::regclass) + is_nullable: false +constraints: + primary_key: + db_dbauthinfo: + db_dbauthinfo_pkey: + - id + db_dbcalcstate: + db_dbcalcstate_pkey: + - id + db_dbcomment: + db_dbcomment_pkey: + - id + db_dbcomputer: + db_dbcomputer_pkey: + - id + db_dbgroup: + db_dbgroup_pkey: + - id + db_dbgroup_dbnodes: + db_dbgroup_dbnodes_pkey: + - id + db_dblink: + db_dblink_pkey: + - id + db_dblock: + db_dblock_pkey: + - key + db_dblog: + db_dblog_pkey: + - id + db_dbnode: + db_dbnode_pkey: + - id + db_dbsetting: + db_dbsetting_pkey: + - id + db_dbuser: + db_dbuser_pkey: + - id + db_dbworkflow: + db_dbworkflow_pkey: + - id + db_dbworkflowdata: + db_dbworkflowdata_pkey: + - id + db_dbworkflowstep: + db_dbworkflowstep_pkey: + - id + db_dbworkflowstep_calculations: + db_dbworkflowstep_calculations_pkey: + - id + db_dbworkflowstep_sub_workflows: + db_dbworkflowstep_sub_workflows_pkey: + - id + unique: + db_dbauthinfo: + db_dbauthinfo_aiidauser_id_dbcomputer_id_key: + - aiidauser_id + - dbcomputer_id + db_dbcalcstate: + db_dbcalcstate_dbnode_id_state_key: + - dbnode_id + - state + db_dbcomputer: + db_dbcomputer_name_key: + - name + db_dbgroup: + db_dbgroup_name_type_key: + - name + - type + db_dbgroup_dbnodes: + uix_dbnode_id_dbgroup_id: + - dbgroup_id + - dbnode_id + db_dbsetting: + db_dbsetting_key_key: + - key + db_dbworkflowdata: + db_dbworkflowdata_parent_id_name_data_type_key: + - data_type + - name + - parent_id + db_dbworkflowstep: + db_dbworkflowstep_parent_id_name_key: + - name + - parent_id + db_dbworkflowstep_calculations: + db_dbworkflowstep_calculations_id_dbnode_id_key: + - dbnode_id + - dbworkflowstep_id + db_dbworkflowstep_sub_workflows: + db_dbworkflowstep_sub_workflows_id_dbworkflow__key: + - dbworkflow_id + - dbworkflowstep_id +foreign_keys: + db_dbauthinfo: + db_dbauthinfo_aiidauser_id_fkey: FOREIGN KEY (aiidauser_id) REFERENCES db_dbuser(id) + ON DELETE CASCADE DEFERRABLE INITIALLY DEFERRED + db_dbauthinfo_dbcomputer_id_fkey: FOREIGN KEY (dbcomputer_id) REFERENCES db_dbcomputer(id) + ON DELETE CASCADE DEFERRABLE INITIALLY DEFERRED + db_dbcalcstate: + db_dbcalcstate_dbnode_id_fkey: FOREIGN KEY (dbnode_id) REFERENCES db_dbnode(id) + ON DELETE CASCADE DEFERRABLE INITIALLY DEFERRED + db_dbcomment: + db_dbcomment_dbnode_id_fkey: FOREIGN KEY (dbnode_id) REFERENCES db_dbnode(id) + ON DELETE CASCADE DEFERRABLE INITIALLY DEFERRED + db_dbcomment_user_id_fkey: FOREIGN KEY (user_id) REFERENCES db_dbuser(id) ON DELETE + CASCADE DEFERRABLE INITIALLY DEFERRED + db_dbgroup: + db_dbgroup_user_id_fkey: FOREIGN KEY (user_id) REFERENCES db_dbuser(id) ON DELETE + CASCADE DEFERRABLE INITIALLY DEFERRED + db_dbgroup_dbnodes: + db_dbgroup_dbnodes_dbgroup_id_fkey: FOREIGN KEY (dbgroup_id) REFERENCES db_dbgroup(id) + DEFERRABLE INITIALLY DEFERRED + db_dbgroup_dbnodes_dbnode_id_fkey: FOREIGN KEY (dbnode_id) REFERENCES db_dbnode(id) + DEFERRABLE INITIALLY DEFERRED + db_dblink: + db_dblink_input_id_fkey: FOREIGN KEY (input_id) REFERENCES db_dbnode(id) DEFERRABLE + INITIALLY DEFERRED + db_dblink_output_id_fkey: FOREIGN KEY (output_id) REFERENCES db_dbnode(id) ON + DELETE CASCADE DEFERRABLE INITIALLY DEFERRED + db_dbnode: + db_dbnode_dbcomputer_id_fkey: FOREIGN KEY (dbcomputer_id) REFERENCES db_dbcomputer(id) + ON DELETE RESTRICT DEFERRABLE INITIALLY DEFERRED + db_dbnode_user_id_fkey: FOREIGN KEY (user_id) REFERENCES db_dbuser(id) ON DELETE + RESTRICT DEFERRABLE INITIALLY DEFERRED + db_dbworkflow: + db_dbworkflow_user_id_fkey: FOREIGN KEY (user_id) REFERENCES db_dbuser(id) + db_dbworkflowdata: + db_dbworkflowdata_aiida_obj_id_fkey: FOREIGN KEY (aiida_obj_id) REFERENCES db_dbnode(id) + db_dbworkflowdata_parent_id_fkey: FOREIGN KEY (parent_id) REFERENCES db_dbworkflow(id) + db_dbworkflowstep: + db_dbworkflowstep_parent_id_fkey: FOREIGN KEY (parent_id) REFERENCES db_dbworkflow(id) + db_dbworkflowstep_user_id_fkey: FOREIGN KEY (user_id) REFERENCES db_dbuser(id) + db_dbworkflowstep_calculations: + db_dbworkflowstep_calculations_dbnode_id_fkey: FOREIGN KEY (dbnode_id) REFERENCES + db_dbnode(id) + db_dbworkflowstep_calculations_dbworkflowstep_id_fkey: FOREIGN KEY (dbworkflowstep_id) + REFERENCES db_dbworkflowstep(id) + db_dbworkflowstep_sub_workflows: + db_dbworkflowstep_sub_workflows_dbworkflow_id_fkey: FOREIGN KEY (dbworkflow_id) + REFERENCES db_dbworkflow(id) + db_dbworkflowstep_sub_workflows_dbworkflowstep_id_fkey: FOREIGN KEY (dbworkflowstep_id) + REFERENCES db_dbworkflowstep(id) +indexes: + db_dbauthinfo: + db_dbauthinfo_aiidauser_id_dbcomputer_id_key: CREATE UNIQUE INDEX db_dbauthinfo_aiidauser_id_dbcomputer_id_key + ON public.db_dbauthinfo USING btree (aiidauser_id, dbcomputer_id) + db_dbauthinfo_pkey: CREATE UNIQUE INDEX db_dbauthinfo_pkey ON public.db_dbauthinfo + USING btree (id) + db_dbcalcstate: + db_dbcalcstate_dbnode_id_state_key: CREATE UNIQUE INDEX db_dbcalcstate_dbnode_id_state_key + ON public.db_dbcalcstate USING btree (dbnode_id, state) + db_dbcalcstate_pkey: CREATE UNIQUE INDEX db_dbcalcstate_pkey ON public.db_dbcalcstate + USING btree (id) + ix_db_dbcalcstate_state: CREATE INDEX ix_db_dbcalcstate_state ON public.db_dbcalcstate + USING btree (state) + db_dbcomment: + db_dbcomment_pkey: CREATE UNIQUE INDEX db_dbcomment_pkey ON public.db_dbcomment + USING btree (id) + db_dbcomputer: + db_dbcomputer_name_key: CREATE UNIQUE INDEX db_dbcomputer_name_key ON public.db_dbcomputer + USING btree (name) + db_dbcomputer_pkey: CREATE UNIQUE INDEX db_dbcomputer_pkey ON public.db_dbcomputer + USING btree (id) + db_dbgroup: + db_dbgroup_name_type_key: CREATE UNIQUE INDEX db_dbgroup_name_type_key ON public.db_dbgroup + USING btree (name, type) + db_dbgroup_pkey: CREATE UNIQUE INDEX db_dbgroup_pkey ON public.db_dbgroup USING + btree (id) + ix_db_dbgroup_name: CREATE INDEX ix_db_dbgroup_name ON public.db_dbgroup USING + btree (name) + ix_db_dbgroup_type: CREATE INDEX ix_db_dbgroup_type ON public.db_dbgroup USING + btree (type) + db_dbgroup_dbnodes: + db_dbgroup_dbnodes_pkey: CREATE UNIQUE INDEX db_dbgroup_dbnodes_pkey ON public.db_dbgroup_dbnodes + USING btree (id) + uix_dbnode_id_dbgroup_id: CREATE UNIQUE INDEX uix_dbnode_id_dbgroup_id ON public.db_dbgroup_dbnodes + USING btree (dbnode_id, dbgroup_id) + db_dblink: + db_dblink_pkey: CREATE UNIQUE INDEX db_dblink_pkey ON public.db_dblink USING btree + (id) + ix_db_dblink_label: CREATE INDEX ix_db_dblink_label ON public.db_dblink USING + btree (label) + db_dblock: + db_dblock_pkey: CREATE UNIQUE INDEX db_dblock_pkey ON public.db_dblock USING btree + (key) + db_dblog: + db_dblog_pkey: CREATE UNIQUE INDEX db_dblog_pkey ON public.db_dblog USING btree + (id) + ix_db_dblog_levelname: CREATE INDEX ix_db_dblog_levelname ON public.db_dblog USING + btree (levelname) + ix_db_dblog_loggername: CREATE INDEX ix_db_dblog_loggername ON public.db_dblog + USING btree (loggername) + ix_db_dblog_objname: CREATE INDEX ix_db_dblog_objname ON public.db_dblog USING + btree (objname) + ix_db_dblog_objpk: CREATE INDEX ix_db_dblog_objpk ON public.db_dblog USING btree + (objpk) + db_dbnode: + db_dbnode_pkey: CREATE UNIQUE INDEX db_dbnode_pkey ON public.db_dbnode USING btree + (id) + ix_db_dbnode_label: CREATE INDEX ix_db_dbnode_label ON public.db_dbnode USING + btree (label) + ix_db_dbnode_process_type: CREATE INDEX ix_db_dbnode_process_type ON public.db_dbnode + USING btree (process_type) + ix_db_dbnode_type: CREATE INDEX ix_db_dbnode_type ON public.db_dbnode USING btree + (type) + db_dbsetting: + db_dbsetting_key_key: CREATE UNIQUE INDEX db_dbsetting_key_key ON public.db_dbsetting + USING btree (key) + db_dbsetting_pkey: CREATE UNIQUE INDEX db_dbsetting_pkey ON public.db_dbsetting + USING btree (id) + ix_db_dbsetting_key: CREATE INDEX ix_db_dbsetting_key ON public.db_dbsetting USING + btree (key) + db_dbuser: + db_dbuser_pkey: CREATE UNIQUE INDEX db_dbuser_pkey ON public.db_dbuser USING btree + (id) + ix_db_dbuser_email: CREATE UNIQUE INDEX ix_db_dbuser_email ON public.db_dbuser + USING btree (email) + db_dbworkflow: + db_dbworkflow_pkey: CREATE UNIQUE INDEX db_dbworkflow_pkey ON public.db_dbworkflow + USING btree (id) + ix_db_dbworkflow_label: CREATE INDEX ix_db_dbworkflow_label ON public.db_dbworkflow + USING btree (label) + db_dbworkflowdata: + db_dbworkflowdata_parent_id_name_data_type_key: CREATE UNIQUE INDEX db_dbworkflowdata_parent_id_name_data_type_key + ON public.db_dbworkflowdata USING btree (parent_id, name, data_type) + db_dbworkflowdata_pkey: CREATE UNIQUE INDEX db_dbworkflowdata_pkey ON public.db_dbworkflowdata + USING btree (id) + ix_db_dbworkflowdata_aiida_obj_id: CREATE INDEX ix_db_dbworkflowdata_aiida_obj_id + ON public.db_dbworkflowdata USING btree (aiida_obj_id) + ix_db_dbworkflowdata_parent_id: CREATE INDEX ix_db_dbworkflowdata_parent_id ON + public.db_dbworkflowdata USING btree (parent_id) + db_dbworkflowstep: + db_dbworkflowstep_parent_id_name_key: CREATE UNIQUE INDEX db_dbworkflowstep_parent_id_name_key + ON public.db_dbworkflowstep USING btree (parent_id, name) + db_dbworkflowstep_pkey: CREATE UNIQUE INDEX db_dbworkflowstep_pkey ON public.db_dbworkflowstep + USING btree (id) + db_dbworkflowstep_calculations: + db_dbworkflowstep_calculations_id_dbnode_id_key: CREATE UNIQUE INDEX db_dbworkflowstep_calculations_id_dbnode_id_key + ON public.db_dbworkflowstep_calculations USING btree (dbworkflowstep_id, dbnode_id) + db_dbworkflowstep_calculations_pkey: CREATE UNIQUE INDEX db_dbworkflowstep_calculations_pkey + ON public.db_dbworkflowstep_calculations USING btree (id) + db_dbworkflowstep_sub_workflows: + db_dbworkflowstep_sub_workflows_id_dbworkflow__key: CREATE UNIQUE INDEX db_dbworkflowstep_sub_workflows_id_dbworkflow__key + ON public.db_dbworkflowstep_sub_workflows USING btree (dbworkflowstep_id, dbworkflow_id) + db_dbworkflowstep_sub_workflows_pkey: CREATE UNIQUE INDEX db_dbworkflowstep_sub_workflows_pkey + ON public.db_dbworkflowstep_sub_workflows USING btree (id) diff --git a/tests/backends/aiida_sqlalchemy/migrations/test_all_schema/test_sqla_9_a514d673c163_.yml b/tests/backends/aiida_sqlalchemy/migrations/test_all_schema/test_sqla_9_a514d673c163_.yml new file mode 100644 index 0000000000..3f1971f7ee --- /dev/null +++ b/tests/backends/aiida_sqlalchemy/migrations/test_all_schema/test_sqla_9_a514d673c163_.yml @@ -0,0 +1,753 @@ +columns: + db_dbauthinfo: + aiidauser_id: + data_type: integer + default: null + is_nullable: true + auth_params: + data_type: jsonb + default: null + is_nullable: true + dbcomputer_id: + data_type: integer + default: null + is_nullable: true + enabled: + data_type: boolean + default: null + is_nullable: true + id: + data_type: integer + default: nextval('db_dbauthinfo_id_seq'::regclass) + is_nullable: false + metadata: + data_type: jsonb + default: null + is_nullable: true + db_dbcalcstate: + dbnode_id: + data_type: integer + default: null + is_nullable: true + id: + data_type: integer + default: nextval('db_dbcalcstate_id_seq'::regclass) + is_nullable: false + state: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + time: + data_type: timestamp with time zone + default: null + is_nullable: true + db_dbcomment: + content: + data_type: text + default: null + is_nullable: true + ctime: + data_type: timestamp with time zone + default: null + is_nullable: true + dbnode_id: + data_type: integer + default: null + is_nullable: true + id: + data_type: integer + default: nextval('db_dbcomment_id_seq'::regclass) + is_nullable: false + mtime: + data_type: timestamp with time zone + default: null + is_nullable: true + user_id: + data_type: integer + default: null + is_nullable: true + uuid: + data_type: uuid + default: null + is_nullable: true + db_dbcomputer: + description: + data_type: text + default: null + is_nullable: true + enabled: + data_type: boolean + default: null + is_nullable: true + hostname: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + id: + data_type: integer + default: nextval('db_dbcomputer_id_seq'::regclass) + is_nullable: false + metadata: + data_type: jsonb + default: null + is_nullable: true + name: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + scheduler_type: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + transport_params: + data_type: jsonb + default: null + is_nullable: true + transport_type: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + uuid: + data_type: uuid + default: null + is_nullable: true + db_dbgroup: + description: + data_type: text + default: null + is_nullable: true + id: + data_type: integer + default: nextval('db_dbgroup_id_seq'::regclass) + is_nullable: false + name: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + time: + data_type: timestamp with time zone + default: null + is_nullable: true + type: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + user_id: + data_type: integer + default: null + is_nullable: true + uuid: + data_type: uuid + default: null + is_nullable: true + db_dbgroup_dbnodes: + dbgroup_id: + data_type: integer + default: null + is_nullable: true + dbnode_id: + data_type: integer + default: null + is_nullable: true + id: + data_type: integer + default: nextval('db_dbgroup_dbnodes_id_seq'::regclass) + is_nullable: false + db_dblink: + id: + data_type: integer + default: nextval('db_dblink_id_seq'::regclass) + is_nullable: false + input_id: + data_type: integer + default: null + is_nullable: true + label: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + output_id: + data_type: integer + default: null + is_nullable: true + type: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + db_dblog: + id: + data_type: integer + default: nextval('db_dblog_id_seq'::regclass) + is_nullable: false + levelname: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + loggername: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + message: + data_type: text + default: null + is_nullable: true + metadata: + data_type: jsonb + default: null + is_nullable: true + objname: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + objpk: + data_type: integer + default: null + is_nullable: true + time: + data_type: timestamp with time zone + default: null + is_nullable: true + db_dbnode: + attributes: + data_type: jsonb + default: null + is_nullable: true + ctime: + data_type: timestamp with time zone + default: null + is_nullable: true + dbcomputer_id: + data_type: integer + default: null + is_nullable: true + description: + data_type: text + default: null + is_nullable: true + extras: + data_type: jsonb + default: null + is_nullable: true + id: + data_type: integer + default: nextval('db_dbnode_id_seq'::regclass) + is_nullable: false + label: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + mtime: + data_type: timestamp with time zone + default: null + is_nullable: true + nodeversion: + data_type: integer + default: null + is_nullable: true + process_type: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + public: + data_type: boolean + default: null + is_nullable: true + type: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + user_id: + data_type: integer + default: null + is_nullable: false + uuid: + data_type: uuid + default: null + is_nullable: true + db_dbsetting: + description: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + id: + data_type: integer + default: nextval('db_dbsetting_id_seq'::regclass) + is_nullable: false + key: + data_type: character varying + default: null + is_nullable: false + max_length: 255 + time: + data_type: timestamp with time zone + default: null + is_nullable: true + val: + data_type: jsonb + default: null + is_nullable: true + db_dbuser: + date_joined: + data_type: timestamp with time zone + default: null + is_nullable: true + email: + data_type: character varying + default: null + is_nullable: true + max_length: 254 + first_name: + data_type: character varying + default: null + is_nullable: true + max_length: 254 + id: + data_type: integer + default: nextval('db_dbuser_id_seq'::regclass) + is_nullable: false + institution: + data_type: character varying + default: null + is_nullable: true + max_length: 254 + is_active: + data_type: boolean + default: null + is_nullable: true + is_staff: + data_type: boolean + default: null + is_nullable: true + is_superuser: + data_type: boolean + default: null + is_nullable: false + last_login: + data_type: timestamp with time zone + default: null + is_nullable: true + last_name: + data_type: character varying + default: null + is_nullable: true + max_length: 254 + password: + data_type: character varying + default: null + is_nullable: true + max_length: 128 + db_dbworkflow: + ctime: + data_type: timestamp with time zone + default: null + is_nullable: true + description: + data_type: text + default: null + is_nullable: true + id: + data_type: integer + default: nextval('db_dbworkflow_id_seq'::regclass) + is_nullable: false + label: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + lastsyncedversion: + data_type: integer + default: null + is_nullable: true + module: + data_type: text + default: null + is_nullable: true + module_class: + data_type: text + default: null + is_nullable: true + mtime: + data_type: timestamp with time zone + default: null + is_nullable: true + nodeversion: + data_type: integer + default: null + is_nullable: true + report: + data_type: text + default: null + is_nullable: true + script_md5: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + script_path: + data_type: text + default: null + is_nullable: true + state: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + user_id: + data_type: integer + default: null + is_nullable: true + uuid: + data_type: uuid + default: null + is_nullable: true + db_dbworkflowdata: + aiida_obj_id: + data_type: integer + default: null + is_nullable: true + data_type: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + id: + data_type: integer + default: nextval('db_dbworkflowdata_id_seq'::regclass) + is_nullable: false + json_value: + data_type: text + default: null + is_nullable: true + name: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + parent_id: + data_type: integer + default: null + is_nullable: true + time: + data_type: timestamp with time zone + default: null + is_nullable: true + value_type: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + db_dbworkflowstep: + id: + data_type: integer + default: nextval('db_dbworkflowstep_id_seq'::regclass) + is_nullable: false + name: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + nextcall: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + parent_id: + data_type: integer + default: null + is_nullable: true + state: + data_type: character varying + default: null + is_nullable: true + max_length: 255 + time: + data_type: timestamp with time zone + default: null + is_nullable: true + user_id: + data_type: integer + default: null + is_nullable: true + db_dbworkflowstep_calculations: + dbnode_id: + data_type: integer + default: null + is_nullable: true + dbworkflowstep_id: + data_type: integer + default: null + is_nullable: true + id: + data_type: integer + default: nextval('db_dbworkflowstep_calculations_id_seq'::regclass) + is_nullable: false + db_dbworkflowstep_sub_workflows: + dbworkflow_id: + data_type: integer + default: null + is_nullable: true + dbworkflowstep_id: + data_type: integer + default: null + is_nullable: true + id: + data_type: integer + default: nextval('db_dbworkflowstep_sub_workflows_id_seq'::regclass) + is_nullable: false +constraints: + primary_key: + db_dbauthinfo: + db_dbauthinfo_pkey: + - id + db_dbcalcstate: + db_dbcalcstate_pkey: + - id + db_dbcomment: + db_dbcomment_pkey: + - id + db_dbcomputer: + db_dbcomputer_pkey: + - id + db_dbgroup: + db_dbgroup_pkey: + - id + db_dbgroup_dbnodes: + db_dbgroup_dbnodes_pkey: + - id + db_dblink: + db_dblink_pkey: + - id + db_dblog: + db_dblog_pkey: + - id + db_dbnode: + db_dbnode_pkey: + - id + db_dbsetting: + db_dbsetting_pkey: + - id + db_dbuser: + db_dbuser_pkey: + - id + db_dbworkflow: + db_dbworkflow_pkey: + - id + db_dbworkflowdata: + db_dbworkflowdata_pkey: + - id + db_dbworkflowstep: + db_dbworkflowstep_pkey: + - id + db_dbworkflowstep_calculations: + db_dbworkflowstep_calculations_pkey: + - id + db_dbworkflowstep_sub_workflows: + db_dbworkflowstep_sub_workflows_pkey: + - id + unique: + db_dbauthinfo: + db_dbauthinfo_aiidauser_id_dbcomputer_id_key: + - aiidauser_id + - dbcomputer_id + db_dbcalcstate: + db_dbcalcstate_dbnode_id_state_key: + - dbnode_id + - state + db_dbcomputer: + db_dbcomputer_name_key: + - name + db_dbgroup: + db_dbgroup_name_type_key: + - name + - type + db_dbgroup_dbnodes: + uix_dbnode_id_dbgroup_id: + - dbgroup_id + - dbnode_id + db_dbsetting: + db_dbsetting_key_key: + - key + db_dbworkflowdata: + db_dbworkflowdata_parent_id_name_data_type_key: + - data_type + - name + - parent_id + db_dbworkflowstep: + db_dbworkflowstep_parent_id_name_key: + - name + - parent_id + db_dbworkflowstep_calculations: + db_dbworkflowstep_calculations_id_dbnode_id_key: + - dbnode_id + - dbworkflowstep_id + db_dbworkflowstep_sub_workflows: + db_dbworkflowstep_sub_workflows_id_dbworkflow__key: + - dbworkflow_id + - dbworkflowstep_id +foreign_keys: + db_dbauthinfo: + db_dbauthinfo_aiidauser_id_fkey: FOREIGN KEY (aiidauser_id) REFERENCES db_dbuser(id) + ON DELETE CASCADE DEFERRABLE INITIALLY DEFERRED + db_dbauthinfo_dbcomputer_id_fkey: FOREIGN KEY (dbcomputer_id) REFERENCES db_dbcomputer(id) + ON DELETE CASCADE DEFERRABLE INITIALLY DEFERRED + db_dbcalcstate: + db_dbcalcstate_dbnode_id_fkey: FOREIGN KEY (dbnode_id) REFERENCES db_dbnode(id) + ON DELETE CASCADE DEFERRABLE INITIALLY DEFERRED + db_dbcomment: + db_dbcomment_dbnode_id_fkey: FOREIGN KEY (dbnode_id) REFERENCES db_dbnode(id) + ON DELETE CASCADE DEFERRABLE INITIALLY DEFERRED + db_dbcomment_user_id_fkey: FOREIGN KEY (user_id) REFERENCES db_dbuser(id) ON DELETE + CASCADE DEFERRABLE INITIALLY DEFERRED + db_dbgroup: + db_dbgroup_user_id_fkey: FOREIGN KEY (user_id) REFERENCES db_dbuser(id) ON DELETE + CASCADE DEFERRABLE INITIALLY DEFERRED + db_dbgroup_dbnodes: + db_dbgroup_dbnodes_dbgroup_id_fkey: FOREIGN KEY (dbgroup_id) REFERENCES db_dbgroup(id) + DEFERRABLE INITIALLY DEFERRED + db_dbgroup_dbnodes_dbnode_id_fkey: FOREIGN KEY (dbnode_id) REFERENCES db_dbnode(id) + DEFERRABLE INITIALLY DEFERRED + db_dblink: + db_dblink_input_id_fkey: FOREIGN KEY (input_id) REFERENCES db_dbnode(id) DEFERRABLE + INITIALLY DEFERRED + db_dblink_output_id_fkey: FOREIGN KEY (output_id) REFERENCES db_dbnode(id) ON + DELETE CASCADE DEFERRABLE INITIALLY DEFERRED + db_dbnode: + db_dbnode_dbcomputer_id_fkey: FOREIGN KEY (dbcomputer_id) REFERENCES db_dbcomputer(id) + ON DELETE RESTRICT DEFERRABLE INITIALLY DEFERRED + db_dbnode_user_id_fkey: FOREIGN KEY (user_id) REFERENCES db_dbuser(id) ON DELETE + RESTRICT DEFERRABLE INITIALLY DEFERRED + db_dbworkflow: + db_dbworkflow_user_id_fkey: FOREIGN KEY (user_id) REFERENCES db_dbuser(id) + db_dbworkflowdata: + db_dbworkflowdata_aiida_obj_id_fkey: FOREIGN KEY (aiida_obj_id) REFERENCES db_dbnode(id) + db_dbworkflowdata_parent_id_fkey: FOREIGN KEY (parent_id) REFERENCES db_dbworkflow(id) + db_dbworkflowstep: + db_dbworkflowstep_parent_id_fkey: FOREIGN KEY (parent_id) REFERENCES db_dbworkflow(id) + db_dbworkflowstep_user_id_fkey: FOREIGN KEY (user_id) REFERENCES db_dbuser(id) + db_dbworkflowstep_calculations: + db_dbworkflowstep_calculations_dbnode_id_fkey: FOREIGN KEY (dbnode_id) REFERENCES + db_dbnode(id) + db_dbworkflowstep_calculations_dbworkflowstep_id_fkey: FOREIGN KEY (dbworkflowstep_id) + REFERENCES db_dbworkflowstep(id) + db_dbworkflowstep_sub_workflows: + db_dbworkflowstep_sub_workflows_dbworkflow_id_fkey: FOREIGN KEY (dbworkflow_id) + REFERENCES db_dbworkflow(id) + db_dbworkflowstep_sub_workflows_dbworkflowstep_id_fkey: FOREIGN KEY (dbworkflowstep_id) + REFERENCES db_dbworkflowstep(id) +indexes: + db_dbauthinfo: + db_dbauthinfo_aiidauser_id_dbcomputer_id_key: CREATE UNIQUE INDEX db_dbauthinfo_aiidauser_id_dbcomputer_id_key + ON public.db_dbauthinfo USING btree (aiidauser_id, dbcomputer_id) + db_dbauthinfo_pkey: CREATE UNIQUE INDEX db_dbauthinfo_pkey ON public.db_dbauthinfo + USING btree (id) + db_dbcalcstate: + db_dbcalcstate_dbnode_id_state_key: CREATE UNIQUE INDEX db_dbcalcstate_dbnode_id_state_key + ON public.db_dbcalcstate USING btree (dbnode_id, state) + db_dbcalcstate_pkey: CREATE UNIQUE INDEX db_dbcalcstate_pkey ON public.db_dbcalcstate + USING btree (id) + ix_db_dbcalcstate_state: CREATE INDEX ix_db_dbcalcstate_state ON public.db_dbcalcstate + USING btree (state) + db_dbcomment: + db_dbcomment_pkey: CREATE UNIQUE INDEX db_dbcomment_pkey ON public.db_dbcomment + USING btree (id) + db_dbcomputer: + db_dbcomputer_name_key: CREATE UNIQUE INDEX db_dbcomputer_name_key ON public.db_dbcomputer + USING btree (name) + db_dbcomputer_pkey: CREATE UNIQUE INDEX db_dbcomputer_pkey ON public.db_dbcomputer + USING btree (id) + db_dbgroup: + db_dbgroup_name_type_key: CREATE UNIQUE INDEX db_dbgroup_name_type_key ON public.db_dbgroup + USING btree (name, type) + db_dbgroup_pkey: CREATE UNIQUE INDEX db_dbgroup_pkey ON public.db_dbgroup USING + btree (id) + ix_db_dbgroup_name: CREATE INDEX ix_db_dbgroup_name ON public.db_dbgroup USING + btree (name) + ix_db_dbgroup_type: CREATE INDEX ix_db_dbgroup_type ON public.db_dbgroup USING + btree (type) + db_dbgroup_dbnodes: + db_dbgroup_dbnodes_pkey: CREATE UNIQUE INDEX db_dbgroup_dbnodes_pkey ON public.db_dbgroup_dbnodes + USING btree (id) + uix_dbnode_id_dbgroup_id: CREATE UNIQUE INDEX uix_dbnode_id_dbgroup_id ON public.db_dbgroup_dbnodes + USING btree (dbnode_id, dbgroup_id) + db_dblink: + db_dblink_pkey: CREATE UNIQUE INDEX db_dblink_pkey ON public.db_dblink USING btree + (id) + ix_db_dblink_label: CREATE INDEX ix_db_dblink_label ON public.db_dblink USING + btree (label) + db_dblog: + db_dblog_pkey: CREATE UNIQUE INDEX db_dblog_pkey ON public.db_dblog USING btree + (id) + ix_db_dblog_levelname: CREATE INDEX ix_db_dblog_levelname ON public.db_dblog USING + btree (levelname) + ix_db_dblog_loggername: CREATE INDEX ix_db_dblog_loggername ON public.db_dblog + USING btree (loggername) + ix_db_dblog_objname: CREATE INDEX ix_db_dblog_objname ON public.db_dblog USING + btree (objname) + ix_db_dblog_objpk: CREATE INDEX ix_db_dblog_objpk ON public.db_dblog USING btree + (objpk) + db_dbnode: + db_dbnode_pkey: CREATE UNIQUE INDEX db_dbnode_pkey ON public.db_dbnode USING btree + (id) + ix_db_dbnode_label: CREATE INDEX ix_db_dbnode_label ON public.db_dbnode USING + btree (label) + ix_db_dbnode_process_type: CREATE INDEX ix_db_dbnode_process_type ON public.db_dbnode + USING btree (process_type) + ix_db_dbnode_type: CREATE INDEX ix_db_dbnode_type ON public.db_dbnode USING btree + (type) + db_dbsetting: + db_dbsetting_key_key: CREATE UNIQUE INDEX db_dbsetting_key_key ON public.db_dbsetting + USING btree (key) + db_dbsetting_pkey: CREATE UNIQUE INDEX db_dbsetting_pkey ON public.db_dbsetting + USING btree (id) + ix_db_dbsetting_key: CREATE INDEX ix_db_dbsetting_key ON public.db_dbsetting USING + btree (key) + db_dbuser: + db_dbuser_pkey: CREATE UNIQUE INDEX db_dbuser_pkey ON public.db_dbuser USING btree + (id) + ix_db_dbuser_email: CREATE UNIQUE INDEX ix_db_dbuser_email ON public.db_dbuser + USING btree (email) + db_dbworkflow: + db_dbworkflow_pkey: CREATE UNIQUE INDEX db_dbworkflow_pkey ON public.db_dbworkflow + USING btree (id) + ix_db_dbworkflow_label: CREATE INDEX ix_db_dbworkflow_label ON public.db_dbworkflow + USING btree (label) + db_dbworkflowdata: + db_dbworkflowdata_parent_id_name_data_type_key: CREATE UNIQUE INDEX db_dbworkflowdata_parent_id_name_data_type_key + ON public.db_dbworkflowdata USING btree (parent_id, name, data_type) + db_dbworkflowdata_pkey: CREATE UNIQUE INDEX db_dbworkflowdata_pkey ON public.db_dbworkflowdata + USING btree (id) + ix_db_dbworkflowdata_aiida_obj_id: CREATE INDEX ix_db_dbworkflowdata_aiida_obj_id + ON public.db_dbworkflowdata USING btree (aiida_obj_id) + ix_db_dbworkflowdata_parent_id: CREATE INDEX ix_db_dbworkflowdata_parent_id ON + public.db_dbworkflowdata USING btree (parent_id) + db_dbworkflowstep: + db_dbworkflowstep_parent_id_name_key: CREATE UNIQUE INDEX db_dbworkflowstep_parent_id_name_key + ON public.db_dbworkflowstep USING btree (parent_id, name) + db_dbworkflowstep_pkey: CREATE UNIQUE INDEX db_dbworkflowstep_pkey ON public.db_dbworkflowstep + USING btree (id) + db_dbworkflowstep_calculations: + db_dbworkflowstep_calculations_id_dbnode_id_key: CREATE UNIQUE INDEX db_dbworkflowstep_calculations_id_dbnode_id_key + ON public.db_dbworkflowstep_calculations USING btree (dbworkflowstep_id, dbnode_id) + db_dbworkflowstep_calculations_pkey: CREATE UNIQUE INDEX db_dbworkflowstep_calculations_pkey + ON public.db_dbworkflowstep_calculations USING btree (id) + db_dbworkflowstep_sub_workflows: + db_dbworkflowstep_sub_workflows_id_dbworkflow__key: CREATE UNIQUE INDEX db_dbworkflowstep_sub_workflows_id_dbworkflow__key + ON public.db_dbworkflowstep_sub_workflows USING btree (dbworkflowstep_id, dbworkflow_id) + db_dbworkflowstep_sub_workflows_pkey: CREATE UNIQUE INDEX db_dbworkflowstep_sub_workflows_pkey + ON public.db_dbworkflowstep_sub_workflows USING btree (id) diff --git a/tests/backends/aiida_sqlalchemy/migrations/test_schemas.py b/tests/backends/aiida_sqlalchemy/migrations/test_schemas.py deleted file mode 100644 index 6e61010e2c..0000000000 --- a/tests/backends/aiida_sqlalchemy/migrations/test_schemas.py +++ /dev/null @@ -1,98 +0,0 @@ -# -*- coding: utf-8 -*- -########################################################################### -# Copyright (c), The AiiDA team. All rights reserved. # -# This file is part of the AiiDA code. # -# # -# The code is hosted on GitHub at https://github.com/aiidateam/aiida-core # -# For further information on the license, see the LICENSE.txt file # -# For further information please visit http://www.aiida.net # -########################################################################### -"""Check that the schema that results from a migration is the same generated by the models. - -This is important since migrations are frequently written by hand or extended manually, -and we have to ensure that the final result is what is conceived in the SQLA models. -""" -import os - -from alembic import command -from alembic.config import Config -import pytest -from sqlalchemy.engine import create_engine -from sqlalchemydiff import compare - -from aiida.backends.sqlalchemy import manager -from aiida.backends.sqlalchemy.models.base import Base - -from ..test_utils import destroy_database, new_database - - -class TestMigrationSchemaVsModelsSchema: - """Class to perform test.""" - - @pytest.fixture(autouse=True) - def init_db(self, clear_database_before_test, backend): # pylint: disable=unused-argument - """Initialise the databases""" - # pylint: disable=attribute-defined-outside-init - from sqlalchemydiff.util import get_temporary_uri - - from aiida.backends.sqlalchemy.migrations import versions - - # The path to the folder that contains the migration configuration (the - # actual configuration - not the testing) - migr_method_dir_path = os.path.dirname(os.path.realpath(manager.__file__)) - # Set the alembic script directory location - alembic_dpath = os.path.join(migr_method_dir_path, manager.ALEMBIC_REL_PATH) # pylint: disable=no-member - - # Constructing the versions directory - versions_dpath = os.path.join(os.path.dirname(versions.__file__)) - - # Setting dynamically the the path to the alembic configuration - # (this is where the env.py file can be found) - self.alembic_cfg_left = Config() - self.alembic_cfg_left.set_main_option('script_location', alembic_dpath) - # Setting dynamically the versions directory. These are the - # migration scripts to pass from one version to the other. The - # default ones are overridden with test-specific migrations. - self.alembic_cfg_left.set_main_option('version_locations', versions_dpath) - - # The correction URL to the SQLA database of the current - # AiiDA connection - curr_db_url = backend.get_session().bind.url - - # Create new urls for the two new databases - self.db_url_left = get_temporary_uri(str(curr_db_url)) - self.db_url_right = get_temporary_uri(str(curr_db_url)) - - # Put the correct database url to the database used by alembic - self.alembic_cfg_left.set_main_option('sqlalchemy.url', self.db_url_left) - - # Database creation - new_database(self.db_url_left) - new_database(self.db_url_right) - - yield - - destroy_database(self.db_url_left) - destroy_database(self.db_url_right) - - def test_model_and_migration_schemas_are_the_same(self): # pylint: disable=invalid-name - """Compare two databases. - - Compares the database obtained with all migrations against the - one we get out of the models. It produces a text file with the - results to help debug differences. - """ - with create_engine(self.db_url_left).begin() as connection: - self.alembic_cfg_left.attributes['connection'] = connection # pylint: disable=unsupported-assignment-operation - command.upgrade(self.alembic_cfg_left, 'head') - - engine_right = create_engine(self.db_url_right) - Base.metadata.create_all(engine_right) - engine_right.dispose() - - result = compare(self.db_url_left, self.db_url_right, set(['alembic_version'])) - - assert result.is_match, ( - "The migration database doesn't match to the one " # pylint: disable=protected-access - f'created by the models.\nDifferences: {result._dump_data(result.errors)}' - ) diff --git a/tests/backends/aiida_sqlalchemy/test_nodes.py b/tests/backends/aiida_sqlalchemy/test_nodes.py index 349a96c289..d5e9a43d3d 100644 --- a/tests/backends/aiida_sqlalchemy/test_nodes.py +++ b/tests/backends/aiida_sqlalchemy/test_nodes.py @@ -18,40 +18,8 @@ class TestNodeBasicSQLA(AiidaTestCase): """These tests check the basic features of nodes(setting of attributes, copying of files, ...).""" - def test_settings(self): - """Test the settings table (similar to Attributes, but without the key.""" - from aiida.backends.sqlalchemy import get_scoped_session - from aiida.backends.sqlalchemy.models.settings import DbSetting - session = get_scoped_session() - - from pytz import UTC - from sqlalchemy.exc import IntegrityError - - from aiida.common import timezone - - DbSetting.set_value(key='pippo', value=[1, 2, 3]) - - # s_1 = DbSetting.objects.get(key='pippo') - s_1 = DbSetting.query.filter_by(key='pippo').first() # pylint: disable=no-member - - self.assertEqual(s_1.getvalue(), [1, 2, 3]) - - s_2 = DbSetting(key='pippo') - s_2.time = timezone.datetime.now(tz=UTC) - with self.assertRaises(IntegrityError): - with session.begin_nested(): - # same name... - session.add(s_2) - - # Should replace pippo - DbSetting.set_value(key='pippo', value='a') - s_1 = DbSetting.query.filter_by(key='pippo').first() # pylint: disable=no-member - - self.assertEqual(s_1.getvalue(), 'a') - def test_load_nodes(self): """Test for load_node() function.""" - from aiida.backends.sqlalchemy import get_scoped_session from aiida.orm import load_node a_obj = Data() @@ -62,7 +30,7 @@ def test_load_nodes(self): self.assertEqual(a_obj.pk, load_node(pk=a_obj.pk).pk) self.assertEqual(a_obj.pk, load_node(uuid=a_obj.uuid).pk) - session = get_scoped_session() + session = self.backend.get_session() try: session.begin_nested() @@ -105,19 +73,16 @@ def test_multiple_node_creation(self): (and subsequently committed) when a user is in the session. It tests the fix for the issue #234 """ - import aiida.backends.sqlalchemy from aiida.backends.sqlalchemy.models.node import DbNode from aiida.common.utils import get_new_uuid - backend = self.backend - # Get the automatic user - dbuser = backend.users.create(f'{self.id()}@aiida.net').store().dbmodel + dbuser = self.backend.users.create(f'{self.id()}@aiida.net').store().dbmodel # Create a new node but don't add it to the session node_uuid = get_new_uuid() DbNode(user=dbuser, uuid=node_uuid, node_type=None) - session = aiida.backends.sqlalchemy.get_scoped_session() + session = self.backend.get_session() # Query the session before commit res = session.query(DbNode.uuid).filter(DbNode.uuid == node_uuid).all() diff --git a/tests/backends/aiida_sqlalchemy/test_schema.py b/tests/backends/aiida_sqlalchemy/test_schema.py index 92f35e8a95..69724b1d1c 100644 --- a/tests/backends/aiida_sqlalchemy/test_schema.py +++ b/tests/backends/aiida_sqlalchemy/test_schema.py @@ -13,12 +13,12 @@ from sqlalchemy import exc as sa_exc -import aiida from aiida.backends.sqlalchemy.models.node import DbNode from aiida.backends.sqlalchemy.models.user import DbUser from aiida.backends.testbase import AiidaTestCase from aiida.common.links import LinkType from aiida.common.utils import get_new_uuid +from aiida.manage import get_manager from aiida.orm import CalculationNode, Data @@ -98,7 +98,7 @@ def test_user_node_1(self): self.assertIsNone(dbu1.id) self.assertIsNone(dbn_1.id) - session = aiida.backends.sqlalchemy.get_scoped_session() + session = get_manager().get_profile_storage().get_session() # Add only the node and commit session.add(dbn_1) session.commit() @@ -124,7 +124,7 @@ def test_user_node_2(self): self.assertIsNone(dbu1.id) self.assertIsNone(dbn_1.id) - session = aiida.backends.sqlalchemy.get_scoped_session() + session = get_manager().get_profile_storage().get_session() # Catch all the SQLAlchemy warnings generated by the following code with warnings.catch_warnings(): # pylint: disable=no-member @@ -159,7 +159,7 @@ def test_user_node_3(self): self.assertIsNone(dbn_1.id) self.assertIsNone(dbn_2.id) - session = aiida.backends.sqlalchemy.get_scoped_session() + session = get_manager().get_profile_storage().get_session() # Add only first node and commit session.add(dbn_1) @@ -198,7 +198,7 @@ def test_user_node_4(self): self.assertIsNone(dbu1.id) self.assertIsNone(dbn_1.id) - session = aiida.backends.sqlalchemy.get_scoped_session() + session = get_manager().get_profile_storage().get_session() # Add only first node and commit session.add(dbn_1) diff --git a/tests/backends/aiida_sqlalchemy/test_session.py b/tests/backends/aiida_sqlalchemy/test_session.py index 101ee978d8..5d428cfd76 100644 --- a/tests/backends/aiida_sqlalchemy/test_session.py +++ b/tests/backends/aiida_sqlalchemy/test_session.py @@ -7,14 +7,12 @@ # For further information on the license, see the LICENSE.txt file # # For further information please visit http://www.aiida.net # ########################################################################### -# pylint: disable=import-error,no-name-in-module,no-member +# pylint: disable=import-error,no-name-in-module,no-member,protected-access """Testing Session possible problems.""" import pytest from sqlalchemy.orm import sessionmaker -import aiida.backends -from aiida.backends.utils import create_scoped_session_factory -from aiida.manage.manager import get_manager +from aiida.backends.sqlalchemy.utils import create_scoped_session_factory class TestSessionSqla: @@ -30,38 +28,31 @@ class TestSessionSqla: """ @pytest.fixture(autouse=True) - def init_db(self, aiida_profile, backend): # pylint: disable=unused-argument + def init_db(self, aiida_profile_clean, backend): # pylint: disable=unused-argument """Initialize the database.""" # pylint: disable=attribute-defined-outside-init - aiida_profile.reset_db(with_user=False) self.backend = backend - self.test_profile = aiida_profile - @staticmethod - def set_connection(expire_on_commit=True): + def set_connection(self, expire_on_commit=True): """Set connection to a database.""" - aiida.backends.sqlalchemy.get_scoped_session().expunge_all() - aiida.backends.sqlalchemy.SESSION_FACTORY = create_scoped_session_factory( - aiida.backends.sqlalchemy.ENGINE, expire_on_commit=expire_on_commit + self.backend.get_session().expunge_all() + self.backend._session_factory = create_scoped_session_factory( + self.backend._session_factory.bind, expire_on_commit=expire_on_commit ) - @staticmethod - def drop_connection(): + def drop_connection(self): """Drop connection to a database.""" - session = aiida.backends.sqlalchemy.get_scoped_session() - session.expunge_all() - session.close() - aiida.backends.sqlalchemy.SESSION_FACTORY = None + self.backend.close() + self.backend._initialise_session() def test_session_update_and_expiration_1(self): """expire_on_commit=True & adding manually and committing computer and code objects.""" self.set_connection(expire_on_commit=True) - session = aiida.backends.sqlalchemy.get_scoped_session() + session = self.backend.get_session() - email = get_manager().get_profile().default_user_email - user = self.backend.users.create(email=email) + user = self.backend.users.create(email='other@example.com') session.add(user.dbmodel) session.commit() @@ -83,10 +74,9 @@ def test_session_update_and_expiration_2(self): their built-in store function.""" self.set_connection(expire_on_commit=True) - session = aiida.backends.sqlalchemy.get_scoped_session() + session = self.backend.get_session() - email = get_manager().get_profile().default_user_email - user = self.backend.users.create(email=email) + user = self.backend.users.create(email='other@example.com') session.add(user.dbmodel) session.commit() @@ -105,10 +95,9 @@ def test_session_update_and_expiration_3(self): """ self.set_connection(expire_on_commit=False) - session = aiida.backends.sqlalchemy.get_scoped_session() + session = self.backend.get_session() - email = get_manager().get_profile().default_user_email - user = self.backend.users.create(email=email) + user = self.backend.users.create(email='other@example.com') session.add(user.dbmodel) session.commit() @@ -131,10 +120,9 @@ def test_session_update_and_expiration_4(self): self.set_connection(expire_on_commit=False) - session = aiida.backends.sqlalchemy.get_scoped_session() + session = self.backend.get_session() - email = get_manager().get_profile().default_user_email - user = self.backend.users.create(email=email) + user = self.backend.users.create(email='other@example.com') session.add(user.dbmodel) session.commit() @@ -152,17 +140,18 @@ def test_node_access_with_sessions(self): or the daemon) are immediately reflected on the AiiDA node when read directly e.g. a change to node.description will immediately be seen. - Tests for bug #1372""" + Tests for bug #1372 + """ import aiida.backends.sqlalchemy as sa from aiida.common import timezone - session = sessionmaker(bind=sa.ENGINE, future=True) + session = sessionmaker(bind=self.backend.get_session().bind, future=True) custom_session = session() try: user = self.backend.users.create(email='test@localhost').store() node = self.backend.nodes.create(node_type='', user=user).store() - master_session = node.dbmodel.session + master_session = node._dbmodel.session # pylint: disable=protected-access assert master_session is not custom_session # Manually load the DbNode in a different session diff --git a/tests/backends/managers/__init__.py b/tests/backends/managers/__init__.py deleted file mode 100644 index 2776a55f97..0000000000 --- a/tests/backends/managers/__init__.py +++ /dev/null @@ -1,9 +0,0 @@ -# -*- coding: utf-8 -*- -########################################################################### -# Copyright (c), The AiiDA team. All rights reserved. # -# This file is part of the AiiDA code. # -# # -# The code is hosted on GitHub at https://github.com/aiidateam/aiida-core # -# For further information on the license, see the LICENSE.txt file # -# For further information please visit http://www.aiida.net # -########################################################################### diff --git a/tests/backends/test_control.py b/tests/backends/test_control.py index cc111b8266..b905008292 100644 --- a/tests/backends/test_control.py +++ b/tests/backends/test_control.py @@ -10,7 +10,7 @@ """Tests for the :mod:`aiida.backends.control` module.""" import pytest -from aiida.manage.manager import get_manager +from aiida.manage import get_manager class MockRepositoryBackend(): @@ -18,11 +18,11 @@ class MockRepositoryBackend(): # pylint: disable=no-self-use - def get_info(self, *args, **kwargs): + def get_info(self, *args, **kwargs): # pylint: disable=unused-argument """Method to return information.""" return 'this is information about the repo' - def delete_objects(self, *args, **kwargs): + def delete_objects(self, *args, **kwargs): # pylint: disable=unused-argument """Method to delete objects.""" def maintain(self, live=True, dry_run=False, **kwargs): @@ -42,9 +42,9 @@ def maintain(self, live=True, dry_run=False, **kwargs): @pytest.fixture(scope='function') -def clear_storage_before_test(clear_database_before_test): # pylint: disable=unused-argument +def clear_storage_before_test(aiida_profile_clean): # pylint: disable=unused-argument """Clears the storage before a test.""" - repository = get_manager().get_backend().get_repository() + repository = get_manager().get_profile_storage().get_repository() object_keys = list(repository.list_objects()) repository.delete_objects(object_keys) repository.maintain(live=False) @@ -77,7 +77,7 @@ def test_get_unreferenced_keyset(): datanode.put_object_from_filelike(BytesIO(b'File content'), 'file.txt') datanode.store() - aiida_backend = get_manager().get_backend() + aiida_backend = get_manager().get_profile_storage() keys = list(orm.Node.objects(aiida_backend).iter_repo_keys()) repository_backend = aiida_backend.get_repository() @@ -121,7 +121,7 @@ def mock_maintain(self, live=True, dry_run=False, **kwargs): # pylint: disable= logmsg += f' > {key}: {val}\n' logging.info(logmsg) - RepoBackendClass = get_manager().get_backend().get_repository().__class__ # pylint: disable=invalid-name + RepoBackendClass = get_manager().get_profile_storage().get_repository().__class__ # pylint: disable=invalid-name monkeypatch.setattr(RepoBackendClass, 'maintain', mock_maintain) with caplog.at_level(logging.INFO): @@ -142,7 +142,7 @@ def mock_get_info(self, statistics=False, **kwargs): # pylint: disable=unused-a output['extra_value'] = 0 return output - RepoBackendClass = get_manager().get_backend().get_repository().__class__ # pylint: disable=invalid-name + RepoBackendClass = get_manager().get_profile_storage().get_repository().__class__ # pylint: disable=invalid-name monkeypatch.setattr(RepoBackendClass, 'get_info', mock_get_info) repository_info_out = get_repository_info() diff --git a/tests/backends/test_schema_parity.py b/tests/backends/test_schema_parity.py deleted file mode 100644 index 17ddf964a9..0000000000 --- a/tests/backends/test_schema_parity.py +++ /dev/null @@ -1,88 +0,0 @@ -# -*- coding: utf-8 -*- -########################################################################### -# Copyright (c), The AiiDA team. All rights reserved. # -# This file is part of the AiiDA code. # -# # -# The code is hosted on GitHub at https://github.com/aiidateam/aiida-core # -# For further information on the license, see the LICENSE.txt file # -# For further information please visit http://www.aiida.net # -########################################################################### -"""Check the schema parity between Django and SQLAlchemy.""" - - -def test_columns(backend, data_regression): - """Test parity of table columns.""" - data = {} - for tbl_name, col_name, data_type, is_nullable, column_default, char_max_length in get_table_fields(backend): - data.setdefault(tbl_name, {})[col_name] = { - 'data_type': data_type, - 'is_nullable': is_nullable, - 'default': column_default, - } - if char_max_length: - data[tbl_name][col_name]['max_length'] = char_max_length - data_regression.check(data) - - -def test_primary_keys(backend, data_regression): - """Test parity of primary key constraints.""" - data = {} - for tbl_name, name, col_names in sorted(get_constraints(backend, 'p')): - data.setdefault(tbl_name, {})[name] = col_names - data_regression.check(data) - - -def test_unique_constraints(backend, data_regression): - """Test parity of unique constraints.""" - data = {} - for tbl_name, name, col_names in sorted(get_constraints(backend, 'u')): - data.setdefault(tbl_name, {})[name] = sorted(col_names) - data_regression.check(data) - - -def test_indexes(backend, data_regression): - """Test parity of indexes.""" - data = {} - for tbl_name, name, definition in sorted(get_indexes(backend)): - data.setdefault(tbl_name, {})[name] = definition - data_regression.check(data) - - -def get_table_fields(backend): - """Get the fields of all AiiDA tables.""" - # see https://www.postgresql.org/docs/9.1/infoschema-columns.html - rows = backend.execute_raw( - 'SELECT table_name,column_name,data_type,is_nullable,column_default,character_maximum_length ' - 'FROM information_schema.columns ' - "WHERE table_schema = 'public' AND table_name LIKE 'db_%';" - ) - rows = [list(row) for row in rows] - for row in rows: - row[3] = row[3].upper() == 'YES' - return rows - - -def get_constraints(backend, ctype): - """Get the constraints of all AiiDA tables, for a particular constraint type.""" - # see https://www.postgresql.org/docs/9.1/catalog-pg-constraint.html - rows = backend.execute_raw( - 'SELECT tbl.relname,c.conname,ARRAY_AGG(a.attname) FROM pg_constraint AS c ' - 'INNER JOIN pg_class AS tbl ON tbl.oid = c.conrelid ' - 'INNER JOIN pg_attribute AS a ON a.attrelid = c.conrelid AND a.attnum = ANY(c.conkey) ' - f"WHERE c.contype='{ctype}' AND tbl.relname LIKE 'db_%' " - 'GROUP BY tbl.relname,c.conname;' - ) - rows = [list(row) for row in rows] - return rows - - -def get_indexes(backend): - """Get the indexes of all AiiDA tables.""" - # see https://www.postgresql.org/docs/9.1/view-pg-indexes.html - rows = backend.execute_raw( - 'SELECT tablename,indexname,indexdef FROM pg_indexes ' - "WHERE tablename LIKE 'db_%' " - 'ORDER BY tablename,indexname;' - ) - rows = [list(row) for row in rows] - return rows diff --git a/tests/backends/test_schema_parity/test_columns.yml b/tests/backends/test_schema_parity/test_columns.yml deleted file mode 100644 index 836cc8dad2..0000000000 --- a/tests/backends/test_schema_parity/test_columns.yml +++ /dev/null @@ -1,300 +0,0 @@ -db_dbauthinfo: - aiidauser_id: - data_type: integer - default: null - is_nullable: false - auth_params: - data_type: jsonb - default: null - is_nullable: false - dbcomputer_id: - data_type: integer - default: null - is_nullable: false - enabled: - data_type: boolean - default: null - is_nullable: false - id: - data_type: integer - default: nextval('db_dbauthinfo_id_seq'::regclass) - is_nullable: false - metadata: - data_type: jsonb - default: null - is_nullable: false -db_dbcomment: - content: - data_type: text - default: null - is_nullable: false - ctime: - data_type: timestamp with time zone - default: null - is_nullable: false - dbnode_id: - data_type: integer - default: null - is_nullable: false - id: - data_type: integer - default: nextval('db_dbcomment_id_seq'::regclass) - is_nullable: false - mtime: - data_type: timestamp with time zone - default: null - is_nullable: false - user_id: - data_type: integer - default: null - is_nullable: false - uuid: - data_type: uuid - default: null - is_nullable: false -db_dbcomputer: - description: - data_type: text - default: null - is_nullable: false - hostname: - data_type: character varying - default: null - is_nullable: false - max_length: 255 - id: - data_type: integer - default: nextval('db_dbcomputer_id_seq'::regclass) - is_nullable: false - label: - data_type: character varying - default: null - is_nullable: false - max_length: 255 - metadata: - data_type: jsonb - default: null - is_nullable: false - scheduler_type: - data_type: character varying - default: null - is_nullable: false - max_length: 255 - transport_type: - data_type: character varying - default: null - is_nullable: false - max_length: 255 - uuid: - data_type: uuid - default: null - is_nullable: false -db_dbgroup: - description: - data_type: text - default: null - is_nullable: false - extras: - data_type: jsonb - default: null - is_nullable: false - id: - data_type: integer - default: nextval('db_dbgroup_id_seq'::regclass) - is_nullable: false - label: - data_type: character varying - default: null - is_nullable: false - max_length: 255 - time: - data_type: timestamp with time zone - default: null - is_nullable: false - type_string: - data_type: character varying - default: null - is_nullable: false - max_length: 255 - user_id: - data_type: integer - default: null - is_nullable: false - uuid: - data_type: uuid - default: null - is_nullable: false -db_dbgroup_dbnodes: - dbgroup_id: - data_type: integer - default: null - is_nullable: false - dbnode_id: - data_type: integer - default: null - is_nullable: false - id: - data_type: integer - default: nextval('db_dbgroup_dbnodes_id_seq'::regclass) - is_nullable: false -db_dblink: - id: - data_type: integer - default: nextval('db_dblink_id_seq'::regclass) - is_nullable: false - input_id: - data_type: integer - default: null - is_nullable: false - label: - data_type: character varying - default: null - is_nullable: false - max_length: 255 - output_id: - data_type: integer - default: null - is_nullable: false - type: - data_type: character varying - default: null - is_nullable: false - max_length: 255 -db_dblog: - dbnode_id: - data_type: integer - default: null - is_nullable: false - id: - data_type: integer - default: nextval('db_dblog_id_seq'::regclass) - is_nullable: false - levelname: - data_type: character varying - default: null - is_nullable: false - max_length: 50 - loggername: - data_type: character varying - default: null - is_nullable: false - max_length: 255 - message: - data_type: text - default: null - is_nullable: false - metadata: - data_type: jsonb - default: null - is_nullable: false - time: - data_type: timestamp with time zone - default: null - is_nullable: false - uuid: - data_type: uuid - default: null - is_nullable: false -db_dbnode: - attributes: - data_type: jsonb - default: null - is_nullable: true - ctime: - data_type: timestamp with time zone - default: null - is_nullable: false - dbcomputer_id: - data_type: integer - default: null - is_nullable: true - description: - data_type: text - default: null - is_nullable: false - extras: - data_type: jsonb - default: null - is_nullable: true - id: - data_type: integer - default: nextval('db_dbnode_id_seq'::regclass) - is_nullable: false - label: - data_type: character varying - default: null - is_nullable: false - max_length: 255 - mtime: - data_type: timestamp with time zone - default: null - is_nullable: false - node_type: - data_type: character varying - default: null - is_nullable: false - max_length: 255 - process_type: - data_type: character varying - default: null - is_nullable: true - max_length: 255 - repository_metadata: - data_type: jsonb - default: null - is_nullable: true - user_id: - data_type: integer - default: null - is_nullable: false - uuid: - data_type: uuid - default: null - is_nullable: false -db_dbsetting: - description: - data_type: text - default: null - is_nullable: false - id: - data_type: integer - default: nextval('db_dbsetting_id_seq'::regclass) - is_nullable: false - key: - data_type: character varying - default: null - is_nullable: false - max_length: 1024 - time: - data_type: timestamp with time zone - default: null - is_nullable: false - val: - data_type: jsonb - default: null - is_nullable: true -db_dbuser: - email: - data_type: character varying - default: null - is_nullable: false - max_length: 254 - first_name: - data_type: character varying - default: null - is_nullable: false - max_length: 254 - id: - data_type: integer - default: nextval('db_dbuser_id_seq'::regclass) - is_nullable: false - institution: - data_type: character varying - default: null - is_nullable: false - max_length: 254 - last_name: - data_type: character varying - default: null - is_nullable: false - max_length: 254 diff --git a/tests/backends/test_schema_parity/test_indexes.yml b/tests/backends/test_schema_parity/test_indexes.yml deleted file mode 100644 index afd288ce8b..0000000000 --- a/tests/backends/test_schema_parity/test_indexes.yml +++ /dev/null @@ -1,122 +0,0 @@ -db_dbauthinfo: - db_dbauthinfo_aiidauser_id_0684fdfb: CREATE INDEX db_dbauthinfo_aiidauser_id_0684fdfb - ON public.db_dbauthinfo USING btree (aiidauser_id) - db_dbauthinfo_aiidauser_id_dbcomputer_id_777cdaa8_uniq: CREATE UNIQUE INDEX db_dbauthinfo_aiidauser_id_dbcomputer_id_777cdaa8_uniq - ON public.db_dbauthinfo USING btree (aiidauser_id, dbcomputer_id) - db_dbauthinfo_dbcomputer_id_424f7ac4: CREATE INDEX db_dbauthinfo_dbcomputer_id_424f7ac4 - ON public.db_dbauthinfo USING btree (dbcomputer_id) - db_dbauthinfo_pkey: CREATE UNIQUE INDEX db_dbauthinfo_pkey ON public.db_dbauthinfo - USING btree (id) -db_dbcomment: - db_dbcomment_dbnode_id_3b812b6b: CREATE INDEX db_dbcomment_dbnode_id_3b812b6b ON - public.db_dbcomment USING btree (dbnode_id) - db_dbcomment_pkey: CREATE UNIQUE INDEX db_dbcomment_pkey ON public.db_dbcomment - USING btree (id) - db_dbcomment_user_id_8ed5e360: CREATE INDEX db_dbcomment_user_id_8ed5e360 ON public.db_dbcomment - USING btree (user_id) - db_dbcomment_uuid_49bac08c_uniq: CREATE UNIQUE INDEX db_dbcomment_uuid_49bac08c_uniq - ON public.db_dbcomment USING btree (uuid) -db_dbcomputer: - db_dbcomputer_label_bc480bab_like: CREATE INDEX db_dbcomputer_label_bc480bab_like - ON public.db_dbcomputer USING btree (label varchar_pattern_ops) - db_dbcomputer_label_bc480bab_uniq: CREATE UNIQUE INDEX db_dbcomputer_label_bc480bab_uniq - ON public.db_dbcomputer USING btree (label) - db_dbcomputer_pkey: CREATE UNIQUE INDEX db_dbcomputer_pkey ON public.db_dbcomputer - USING btree (id) - db_dbcomputer_uuid_f35defa6_uniq: CREATE UNIQUE INDEX db_dbcomputer_uuid_f35defa6_uniq - ON public.db_dbcomputer USING btree (uuid) -db_dbgroup: - db_dbgroup_name_66c75272: CREATE INDEX db_dbgroup_name_66c75272 ON public.db_dbgroup - USING btree (label) - db_dbgroup_name_66c75272_like: CREATE INDEX db_dbgroup_name_66c75272_like ON public.db_dbgroup - USING btree (label varchar_pattern_ops) - db_dbgroup_name_type_12656f33_uniq: CREATE UNIQUE INDEX db_dbgroup_name_type_12656f33_uniq - ON public.db_dbgroup USING btree (label, type_string) - db_dbgroup_pkey: CREATE UNIQUE INDEX db_dbgroup_pkey ON public.db_dbgroup USING - btree (id) - db_dbgroup_type_23b2a748: CREATE INDEX db_dbgroup_type_23b2a748 ON public.db_dbgroup - USING btree (type_string) - db_dbgroup_type_23b2a748_like: CREATE INDEX db_dbgroup_type_23b2a748_like ON public.db_dbgroup - USING btree (type_string varchar_pattern_ops) - db_dbgroup_user_id_100f8a51: CREATE INDEX db_dbgroup_user_id_100f8a51 ON public.db_dbgroup - USING btree (user_id) - db_dbgroup_uuid_af896177_uniq: CREATE UNIQUE INDEX db_dbgroup_uuid_af896177_uniq - ON public.db_dbgroup USING btree (uuid) -db_dbgroup_dbnodes: - db_dbgroup_dbnodes_dbgroup_id_9d3a0f9d: CREATE INDEX db_dbgroup_dbnodes_dbgroup_id_9d3a0f9d - ON public.db_dbgroup_dbnodes USING btree (dbgroup_id) - db_dbgroup_dbnodes_dbgroup_id_dbnode_id_eee23cce_uniq: CREATE UNIQUE INDEX db_dbgroup_dbnodes_dbgroup_id_dbnode_id_eee23cce_uniq - ON public.db_dbgroup_dbnodes USING btree (dbgroup_id, dbnode_id) - db_dbgroup_dbnodes_dbnode_id_118b9439: CREATE INDEX db_dbgroup_dbnodes_dbnode_id_118b9439 - ON public.db_dbgroup_dbnodes USING btree (dbnode_id) - db_dbgroup_dbnodes_pkey: CREATE UNIQUE INDEX db_dbgroup_dbnodes_pkey ON public.db_dbgroup_dbnodes - USING btree (id) -db_dblink: - db_dblink_input_id_9245bd73: CREATE INDEX db_dblink_input_id_9245bd73 ON public.db_dblink - USING btree (input_id) - db_dblink_label_f1343cfb: CREATE INDEX db_dblink_label_f1343cfb ON public.db_dblink - USING btree (label) - db_dblink_label_f1343cfb_like: CREATE INDEX db_dblink_label_f1343cfb_like ON public.db_dblink - USING btree (label varchar_pattern_ops) - db_dblink_output_id_c0167528: CREATE INDEX db_dblink_output_id_c0167528 ON public.db_dblink - USING btree (output_id) - db_dblink_pkey: CREATE UNIQUE INDEX db_dblink_pkey ON public.db_dblink USING btree - (id) - db_dblink_type_229f212b: CREATE INDEX db_dblink_type_229f212b ON public.db_dblink - USING btree (type) - db_dblink_type_229f212b_like: CREATE INDEX db_dblink_type_229f212b_like ON public.db_dblink - USING btree (type varchar_pattern_ops) -db_dblog: - db_dblog_dbnode_id_da34b732: CREATE INDEX db_dblog_dbnode_id_da34b732 ON public.db_dblog - USING btree (dbnode_id) - db_dblog_levelname_ad5dc346: CREATE INDEX db_dblog_levelname_ad5dc346 ON public.db_dblog - USING btree (levelname) - db_dblog_levelname_ad5dc346_like: CREATE INDEX db_dblog_levelname_ad5dc346_like - ON public.db_dblog USING btree (levelname varchar_pattern_ops) - db_dblog_loggername_00b5ba16: CREATE INDEX db_dblog_loggername_00b5ba16 ON public.db_dblog - USING btree (loggername) - db_dblog_loggername_00b5ba16_like: CREATE INDEX db_dblog_loggername_00b5ba16_like - ON public.db_dblog USING btree (loggername varchar_pattern_ops) - db_dblog_pkey: CREATE UNIQUE INDEX db_dblog_pkey ON public.db_dblog USING btree - (id) - db_dblog_uuid_9cf77df3_uniq: CREATE UNIQUE INDEX db_dblog_uuid_9cf77df3_uniq ON - public.db_dblog USING btree (uuid) -db_dbnode: - db_dbnode_ctime_71626ef5: CREATE INDEX db_dbnode_ctime_71626ef5 ON public.db_dbnode - USING btree (ctime) - db_dbnode_dbcomputer_id_315372a3: CREATE INDEX db_dbnode_dbcomputer_id_315372a3 - ON public.db_dbnode USING btree (dbcomputer_id) - db_dbnode_label_6469539e: CREATE INDEX db_dbnode_label_6469539e ON public.db_dbnode - USING btree (label) - db_dbnode_label_6469539e_like: CREATE INDEX db_dbnode_label_6469539e_like ON public.db_dbnode - USING btree (label varchar_pattern_ops) - db_dbnode_mtime_0554ea3d: CREATE INDEX db_dbnode_mtime_0554ea3d ON public.db_dbnode - USING btree (mtime) - db_dbnode_pkey: CREATE UNIQUE INDEX db_dbnode_pkey ON public.db_dbnode USING btree - (id) - db_dbnode_process_type_df7298d0: CREATE INDEX db_dbnode_process_type_df7298d0 ON - public.db_dbnode USING btree (process_type) - db_dbnode_process_type_df7298d0_like: CREATE INDEX db_dbnode_process_type_df7298d0_like - ON public.db_dbnode USING btree (process_type varchar_pattern_ops) - db_dbnode_type_a8ce9753: CREATE INDEX db_dbnode_type_a8ce9753 ON public.db_dbnode - USING btree (node_type) - db_dbnode_type_a8ce9753_like: CREATE INDEX db_dbnode_type_a8ce9753_like ON public.db_dbnode - USING btree (node_type varchar_pattern_ops) - db_dbnode_user_id_12e7aeaf: CREATE INDEX db_dbnode_user_id_12e7aeaf ON public.db_dbnode - USING btree (user_id) - db_dbnode_uuid_62e0bf98_uniq: CREATE UNIQUE INDEX db_dbnode_uuid_62e0bf98_uniq ON - public.db_dbnode USING btree (uuid) -db_dbsetting: - db_dbsetting_key_1b84beb4_like: CREATE INDEX db_dbsetting_key_1b84beb4_like ON public.db_dbsetting - USING btree (key varchar_pattern_ops) - db_dbsetting_key_1b84beb4_uniq: CREATE UNIQUE INDEX db_dbsetting_key_1b84beb4_uniq - ON public.db_dbsetting USING btree (key) - db_dbsetting_pkey: CREATE UNIQUE INDEX db_dbsetting_pkey ON public.db_dbsetting - USING btree (id) -db_dbuser: - db_dbuser_email_30150b7e_like: CREATE INDEX db_dbuser_email_30150b7e_like ON public.db_dbuser - USING btree (email varchar_pattern_ops) - db_dbuser_email_30150b7e_uniq: CREATE UNIQUE INDEX db_dbuser_email_30150b7e_uniq - ON public.db_dbuser USING btree (email) - db_dbuser_pkey: CREATE UNIQUE INDEX db_dbuser_pkey ON public.db_dbuser USING btree - (id) diff --git a/tests/backends/test_schema_parity/test_primary_keys.yml b/tests/backends/test_schema_parity/test_primary_keys.yml deleted file mode 100644 index 5b7aa52d60..0000000000 --- a/tests/backends/test_schema_parity/test_primary_keys.yml +++ /dev/null @@ -1,30 +0,0 @@ -db_dbauthinfo: - db_dbauthinfo_pkey: - - id -db_dbcomment: - db_dbcomment_pkey: - - id -db_dbcomputer: - db_dbcomputer_pkey: - - id -db_dbgroup: - db_dbgroup_pkey: - - id -db_dbgroup_dbnodes: - db_dbgroup_dbnodes_pkey: - - id -db_dblink: - db_dblink_pkey: - - id -db_dblog: - db_dblog_pkey: - - id -db_dbnode: - db_dbnode_pkey: - - id -db_dbsetting: - db_dbsetting_pkey: - - id -db_dbuser: - db_dbuser_pkey: - - id diff --git a/tests/backends/test_schema_parity/test_unique_constraints.yml b/tests/backends/test_schema_parity/test_unique_constraints.yml deleted file mode 100644 index c4662b8a40..0000000000 --- a/tests/backends/test_schema_parity/test_unique_constraints.yml +++ /dev/null @@ -1,34 +0,0 @@ -db_dbauthinfo: - db_dbauthinfo_aiidauser_id_dbcomputer_id_777cdaa8_uniq: - - aiidauser_id - - dbcomputer_id -db_dbcomment: - db_dbcomment_uuid_49bac08c_uniq: - - uuid -db_dbcomputer: - db_dbcomputer_label_bc480bab_uniq: - - label - db_dbcomputer_uuid_f35defa6_uniq: - - uuid -db_dbgroup: - db_dbgroup_name_type_12656f33_uniq: - - label - - type_string - db_dbgroup_uuid_af896177_uniq: - - uuid -db_dbgroup_dbnodes: - db_dbgroup_dbnodes_dbgroup_id_dbnode_id_eee23cce_uniq: - - dbgroup_id - - dbnode_id -db_dblog: - db_dblog_uuid_9cf77df3_uniq: - - uuid -db_dbnode: - db_dbnode_uuid_62e0bf98_uniq: - - uuid -db_dbsetting: - db_dbsetting_key_1b84beb4_uniq: - - key -db_dbuser: - db_dbuser_email_30150b7e_uniq: - - email diff --git a/tests/benchmark/test_archive.py b/tests/benchmark/test_archive.py index 8a8fdb5aec..63af4a335f 100644 --- a/tests/benchmark/test_archive.py +++ b/tests/benchmark/test_archive.py @@ -69,7 +69,7 @@ def get_export_kwargs(**kwargs): @pytest.mark.parametrize('depth,breadth,num_objects', TREE.values(), ids=TREE.keys()) -@pytest.mark.usefixtures('clear_database_before_test') +@pytest.mark.usefixtures('aiida_profile_clean') @pytest.mark.benchmark(group='import-export') def test_export(benchmark, tmp_path, depth, breadth, num_objects): """Benchmark exporting a provenance graph.""" @@ -93,7 +93,7 @@ def _run(): @pytest.mark.benchmark(group='import-export') def test_import(aiida_profile, benchmark, tmp_path, depth, breadth, num_objects): """Benchmark importing a provenance graph.""" - aiida_profile.reset_db() + aiida_profile.clear_profile() root_node = Dict() recursive_provenance(root_node, depth=depth, breadth=breadth, num_objects=num_objects) root_uuid = root_node.uuid @@ -102,7 +102,7 @@ def test_import(aiida_profile, benchmark, tmp_path, depth, breadth, num_objects) create_archive([root_node], **kwargs) def _setup(): - aiida_profile.reset_db() + aiida_profile.clear_profile() def _run(): import_archive(str(out_path)) diff --git a/tests/benchmark/test_engine.py b/tests/benchmark/test_engine.py index 4d2ae35176..bc14dbb361 100644 --- a/tests/benchmark/test_engine.py +++ b/tests/benchmark/test_engine.py @@ -18,7 +18,7 @@ import pytest from aiida.engine import WorkChain, run_get_node, submit, while_ -from aiida.manage.manager import get_manager +from aiida.manage import get_manager from aiida.orm import Code, Int from aiida.plugins.factories import CalculationFactory @@ -115,7 +115,7 @@ def run_task(self): @pytest.mark.parametrize('workchain,iterations,outgoing', WORKCHAINS.values(), ids=WORKCHAINS.keys()) -@pytest.mark.usefixtures('clear_database_before_test') +@pytest.mark.usefixtures('aiida_profile_clean') @pytest.mark.benchmark(group='engine') def test_workchain_local(benchmark, aiida_localhost, workchain, iterations, outgoing): """Benchmark Workchains, executed in the local runner.""" @@ -171,7 +171,7 @@ async def _do_submit(): @pytest.mark.parametrize('workchain,iterations,outgoing', WORKCHAINS.values(), ids=WORKCHAINS.keys()) -@pytest.mark.usefixtures('clear_database_before_test') +@pytest.mark.usefixtures('aiida_profile_clean') @pytest.mark.benchmark(group='engine') def test_workchain_daemon(benchmark, submit_get_node, aiida_localhost, workchain, iterations, outgoing): """Benchmark Workchains, executed in the via a daemon runner.""" diff --git a/tests/benchmark/test_nodes.py b/tests/benchmark/test_nodes.py index ad0b1e6a85..cc45fcda52 100644 --- a/tests/benchmark/test_nodes.py +++ b/tests/benchmark/test_nodes.py @@ -42,7 +42,7 @@ def get_data_node_and_object(store=True): return (), {'node': data} -@pytest.mark.usefixtures('clear_database_before_test') +@pytest.mark.usefixtures('aiida_profile_clean') @pytest.mark.benchmark(group=GROUP_NAME, min_rounds=100) def test_store_backend(benchmark): """Benchmark for creating and storing a node directly, @@ -59,7 +59,7 @@ def _run(): assert node.is_stored, node -@pytest.mark.usefixtures('clear_database_before_test') +@pytest.mark.usefixtures('aiida_profile_clean') @pytest.mark.benchmark(group=GROUP_NAME, min_rounds=100) def test_store(benchmark): """Benchmark for creating and storing a node, @@ -69,7 +69,7 @@ def test_store(benchmark): assert node_dict['node'].is_stored, node_dict -@pytest.mark.usefixtures('clear_database_before_test') +@pytest.mark.usefixtures('aiida_profile_clean') @pytest.mark.benchmark(group=GROUP_NAME, min_rounds=100) def test_store_with_object(benchmark): """Benchmark for creating and storing a node, @@ -79,7 +79,7 @@ def test_store_with_object(benchmark): assert node_dict['node'].is_stored, node_dict -@pytest.mark.usefixtures('clear_database_before_test') +@pytest.mark.usefixtures('aiida_profile_clean') @pytest.mark.benchmark(group=GROUP_NAME) def test_delete_backend(benchmark): """Benchmark for deleting a stored node directly, @@ -96,7 +96,7 @@ def _run(node): load_node(pk) -@pytest.mark.usefixtures('clear_database_before_test') +@pytest.mark.usefixtures('aiida_profile_clean') @pytest.mark.benchmark(group=GROUP_NAME) def test_delete(benchmark): """Benchmark for deleting a node, @@ -113,7 +113,7 @@ def _run(node): load_node(pk) -@pytest.mark.usefixtures('clear_database_before_test') +@pytest.mark.usefixtures('aiida_profile_clean') @pytest.mark.benchmark(group=GROUP_NAME) def test_delete_with_object(benchmark): """Benchmark for deleting a node, diff --git a/tests/calculations/arithmetic/test_add.py b/tests/calculations/arithmetic/test_add.py index 988345f813..a78cc45000 100644 --- a/tests/calculations/arithmetic/test_add.py +++ b/tests/calculations/arithmetic/test_add.py @@ -16,7 +16,7 @@ @pytest.mark.requires_rmq -@pytest.mark.usefixtures('clear_database_before_test') +@pytest.mark.usefixtures('aiida_profile_clean') def test_add_default(fixture_sandbox, aiida_localhost, generate_calc_job): """Test a default `ArithmeticAddCalculation`.""" entry_point_name = 'core.arithmetic.add' @@ -45,7 +45,7 @@ def test_add_default(fixture_sandbox, aiida_localhost, generate_calc_job): @pytest.mark.requires_rmq -@pytest.mark.usefixtures('clear_database_before_test') +@pytest.mark.usefixtures('aiida_profile_clean') def test_add_custom_filenames(fixture_sandbox, aiida_localhost, generate_calc_job): """Test an `ArithmeticAddCalculation` with non-default input and output filenames.""" entry_point_name = 'core.arithmetic.add' diff --git a/tests/calculations/test_templatereplacer.py b/tests/calculations/test_templatereplacer.py index fd71351ee5..6ed859d20c 100644 --- a/tests/calculations/test_templatereplacer.py +++ b/tests/calculations/test_templatereplacer.py @@ -17,7 +17,7 @@ @pytest.mark.requires_rmq -@pytest.mark.usefixtures('clear_database_before_test') +@pytest.mark.usefixtures('aiida_profile_clean') def test_base_template(fixture_sandbox, aiida_localhost, generate_calc_job): """Test a base template that emulates the arithmetic add.""" @@ -73,7 +73,7 @@ def test_base_template(fixture_sandbox, aiida_localhost, generate_calc_job): @pytest.mark.requires_rmq -@pytest.mark.usefixtures('clear_database_before_test') +@pytest.mark.usefixtures('aiida_profile_clean') def test_file_usage(fixture_sandbox, aiida_localhost, generate_calc_job): """Test a base template that uses two files.""" diff --git a/tests/calculations/test_transfer.py b/tests/calculations/test_transfer.py index a3a23792da..22735382ee 100644 --- a/tests/calculations/test_transfer.py +++ b/tests/calculations/test_transfer.py @@ -17,7 +17,7 @@ @pytest.mark.requires_rmq -@pytest.mark.usefixtures('clear_database_before_test') +@pytest.mark.usefixtures('aiida_profile_clean') def test_get_transfer(fixture_sandbox, aiida_localhost, generate_calc_job, tmp_path): """Test a default `TransferCalculation`.""" @@ -67,7 +67,7 @@ def test_get_transfer(fixture_sandbox, aiida_localhost, generate_calc_job, tmp_p @pytest.mark.requires_rmq -@pytest.mark.usefixtures('clear_database_before_test') +@pytest.mark.usefixtures('aiida_profile_clean') def test_put_transfer(fixture_sandbox, aiida_localhost, generate_calc_job, tmp_path): """Test a default `TransferCalculation`.""" diff --git a/tests/cmdline/commands/test_archive_create.py b/tests/cmdline/commands/test_archive_create.py index 7885f10429..51a2229ce0 100644 --- a/tests/cmdline/commands/test_archive_create.py +++ b/tests/cmdline/commands/test_archive_create.py @@ -35,7 +35,7 @@ def test_create_force(run_cli_command, tmp_path): run_cli_command(cmd_archive.create, options) -@pytest.mark.usefixtures('clear_database_before_test') +@pytest.mark.usefixtures('aiida_profile_clean') def test_create_all(run_cli_command, tmp_path): """Test that creating an archive for a set of various ORM entities works with the zip format.""" computer = Computer( @@ -59,7 +59,7 @@ def test_create_all(run_cli_command, tmp_path): assert archive.querybuilder().append(Group, project=['uuid']).all(flat=True) == [group.uuid] -@pytest.mark.usefixtures('clear_database_before_test') +@pytest.mark.usefixtures('aiida_profile_clean') def test_create_basic(run_cli_command, tmp_path): """Test that creating an archive for a set of various ORM entities works with the zip format.""" computer = Computer( diff --git a/tests/cmdline/commands/test_archive_import.py b/tests/cmdline/commands/test_archive_import.py index dfa913ca74..7ea347e882 100644 --- a/tests/cmdline/commands/test_archive_import.py +++ b/tests/cmdline/commands/test_archive_import.py @@ -22,7 +22,7 @@ class TestVerdiImport: """Tests for `verdi import`.""" @pytest.fixture(autouse=True) - def init_cls(self, clear_database_before_test): # pylint: disable=unused-argument + def init_cls(self, aiida_profile_clean): # pylint: disable=unused-argument """Setup for each method""" # pylint: disable=attribute-defined-outside-init self.cli_runner = CliRunner() diff --git a/tests/cmdline/commands/test_code.py b/tests/cmdline/commands/test_code.py index 3e04c4c8f1..67adee3494 100644 --- a/tests/cmdline/commands/test_code.py +++ b/tests/cmdline/commands/test_code.py @@ -43,14 +43,14 @@ def test_help(run_cli_command): run_cli_command(cmd_code.setup_code, ['--help']) -@pytest.mark.usefixtures('clear_database_before_test') +@pytest.mark.usefixtures('aiida_profile_clean') def test_code_list_no_codes_error_message(run_cli_command): """Test ``verdi code list`` when no codes exist.""" result = run_cli_command(cmd_code.code_list) assert '# No codes found matching the specified criteria.' in result.output -@pytest.mark.usefixtures('clear_database_before_test') +@pytest.mark.usefixtures('aiida_profile_clean') def test_code_list(run_cli_command, code): """Test ``verdi code list``.""" code2 = Code( @@ -68,7 +68,7 @@ def test_code_list(run_cli_command, code): assert '# No codes found matching the specified criteria.' not in result.output -@pytest.mark.usefixtures('clear_database_before_test') +@pytest.mark.usefixtures('aiida_profile_clean') def test_code_list_hide(run_cli_command, code): """Test that hidden codes are shown (or not) properly.""" code.hide() @@ -81,41 +81,41 @@ def test_code_list_hide(run_cli_command, code): assert code.full_label in result.output -@pytest.mark.usefixtures('clear_database_before_test') +@pytest.mark.usefixtures('aiida_profile_clean') def test_hide_one(run_cli_command, code): """Test ``verdi code hide``.""" run_cli_command(cmd_code.hide, [str(code.pk)]) assert code.hidden -@pytest.mark.usefixtures('clear_database_before_test') +@pytest.mark.usefixtures('aiida_profile_clean') def test_reveal_one(run_cli_command, code): """Test ``verdi code reveal``.""" run_cli_command(cmd_code.reveal, [str(code.pk)]) assert not code.hidden -@pytest.mark.usefixtures('clear_database_before_test') +@pytest.mark.usefixtures('aiida_profile_clean') def test_relabel_code(run_cli_command, code): """Test ``verdi code relabel``.""" run_cli_command(cmd_code.relabel, [str(code.pk), 'new_code']) assert load_code(code.pk).label == 'new_code' -@pytest.mark.usefixtures('clear_database_before_test') +@pytest.mark.usefixtures('aiida_profile_clean') def test_relabel_code_full(run_cli_command, code): """Test ``verdi code relabel`` passing the full code label.""" run_cli_command(cmd_code.relabel, [str(code.pk), f'new_code@{code.computer.label}']) assert load_code(code.pk).label == 'new_code' -@pytest.mark.usefixtures('clear_database_before_test') +@pytest.mark.usefixtures('aiida_profile_clean') def test_relabel_code_full_bad(run_cli_command, code): """Test ``verdi code relabel`` with an incorrect full code label.""" run_cli_command(cmd_code.relabel, [str(code.pk), 'new_code@otherstuff'], raises=True) -@pytest.mark.usefixtures('clear_database_before_test') +@pytest.mark.usefixtures('aiida_profile_clean') def test_code_delete_one_force(run_cli_command, code): """Test force code deletion.""" run_cli_command(cmd_code.delete, [str(code.pk), '--force']) @@ -124,13 +124,13 @@ def test_code_delete_one_force(run_cli_command, code): load_code('code') -@pytest.mark.usefixtures('clear_database_before_test') +@pytest.mark.usefixtures('aiida_profile_clean') def test_code_show(run_cli_command, code): result = run_cli_command(cmd_code.show, [str(code.pk)]) assert str(code.pk) in result.output -@pytest.mark.usefixtures('clear_database_before_test') +@pytest.mark.usefixtures('aiida_profile_clean') @pytest.mark.parametrize('non_interactive_editor', ('vim -cwq',), indirect=True) def test_code_duplicate_non_interactive(run_cli_command, code, non_interactive_editor): """Test code duplication non-interactive.""" @@ -144,7 +144,7 @@ def test_code_duplicate_non_interactive(run_cli_command, code, non_interactive_e assert code.get_input_plugin_name() == duplicate.get_input_plugin_name() -@pytest.mark.usefixtures('clear_database_before_test') +@pytest.mark.usefixtures('aiida_profile_clean') @pytest.mark.parametrize('non_interactive_editor', ('vim -cwq',), indirect=True) def test_noninteractive_remote(run_cli_command, aiida_localhost, non_interactive_editor): """Test non-interactive remote code setup.""" @@ -157,7 +157,7 @@ def test_noninteractive_remote(run_cli_command, aiida_localhost, non_interactive assert isinstance(load_code(label), Code) -@pytest.mark.usefixtures('clear_database_before_test') +@pytest.mark.usefixtures('aiida_profile_clean') @pytest.mark.parametrize('non_interactive_editor', ('vim -cwq',), indirect=True) def test_noninteractive_upload(run_cli_command, non_interactive_editor): """Test non-interactive code setup.""" @@ -170,7 +170,7 @@ def test_noninteractive_upload(run_cli_command, non_interactive_editor): assert isinstance(load_code(label), Code) -@pytest.mark.usefixtures('clear_database_before_test') +@pytest.mark.usefixtures('aiida_profile_clean') @pytest.mark.parametrize('non_interactive_editor', ('vim -cwq',), indirect=True) def test_interactive_remote(run_cli_command, aiida_localhost, non_interactive_editor): """Test interactive remote code setup.""" @@ -180,7 +180,7 @@ def test_interactive_remote(run_cli_command, aiida_localhost, non_interactive_ed assert isinstance(load_code(label), Code) -@pytest.mark.usefixtures('clear_database_before_test') +@pytest.mark.usefixtures('aiida_profile_clean') @pytest.mark.parametrize('non_interactive_editor', ('vim -cwq',), indirect=True) def test_interactive_upload(run_cli_command, non_interactive_editor): """Test interactive code setup.""" @@ -192,7 +192,7 @@ def test_interactive_upload(run_cli_command, non_interactive_editor): assert isinstance(load_code(label), Code) -@pytest.mark.usefixtures('clear_database_before_test') +@pytest.mark.usefixtures('aiida_profile_clean') @pytest.mark.parametrize('non_interactive_editor', ('vim -cwq',), indirect=True) def test_mixed(run_cli_command, aiida_localhost, non_interactive_editor): """Test mixed (interactive/from config) code setup.""" @@ -203,7 +203,7 @@ def test_mixed(run_cli_command, aiida_localhost, non_interactive_editor): assert isinstance(load_code(label), Code) -@pytest.mark.usefixtures('clear_database_before_test') +@pytest.mark.usefixtures('aiida_profile_clean') @pytest.mark.parametrize('non_interactive_editor', ('vim -cwq',), indirect=True) def test_code_duplicate_interactive(run_cli_command, aiida_local_code_factory, non_interactive_editor): """Test code duplication interactive.""" @@ -218,7 +218,7 @@ def test_code_duplicate_interactive(run_cli_command, aiida_local_code_factory, n assert code.get_append_text() == duplicate.get_append_text() -@pytest.mark.usefixtures('clear_database_before_test') +@pytest.mark.usefixtures('aiida_profile_clean') @pytest.mark.parametrize('non_interactive_editor', ('vim -cwq',), indirect=True) def test_code_duplicate_ignore(run_cli_command, aiida_local_code_factory, non_interactive_editor): """Providing "!" to description should lead to empty description. @@ -234,7 +234,7 @@ def test_code_duplicate_ignore(run_cli_command, aiida_local_code_factory, non_in assert duplicate.description == '' -@pytest.mark.usefixtures('clear_database_before_test') +@pytest.mark.usefixtures('aiida_profile_clean') @pytest.mark.parametrize('non_interactive_editor', ('vim -cwq',), indirect=True) def test_from_config_local_file(non_interactive_editor, run_cli_command, aiida_localhost): """Test setting up a code from a config file on disk.""" @@ -255,7 +255,7 @@ def test_from_config_local_file(non_interactive_editor, run_cli_command, aiida_l assert isinstance(load_code(label), Code) -@pytest.mark.usefixtures('clear_database_before_test') +@pytest.mark.usefixtures('aiida_profile_clean') @pytest.mark.parametrize('non_interactive_editor', ('vim -cwq',), indirect=True) def test_from_config_url(non_interactive_editor, run_cli_command, aiida_localhost, monkeypatch): """Test setting up a code from a config file from URL.""" @@ -281,7 +281,7 @@ def test_from_config_url(non_interactive_editor, run_cli_command, aiida_localhos assert isinstance(load_code(label), Code) -@pytest.mark.usefixtures('clear_database_before_test') +@pytest.mark.usefixtures('aiida_profile_clean') @pytest.mark.parametrize('non_interactive_editor', ('sleep 1; vim -cwq',), indirect=True) def test_code_setup_remote_duplicate_full_label_interactive( run_cli_command, aiida_local_code_factory, aiida_localhost, non_interactive_editor @@ -297,7 +297,7 @@ def test_code_setup_remote_duplicate_full_label_interactive( assert isinstance(load_code(label_unique), Code) -@pytest.mark.usefixtures('clear_database_before_test') +@pytest.mark.usefixtures('aiida_profile_clean') @pytest.mark.parametrize('label_first', (True, False)) def test_code_setup_remote_duplicate_full_label_non_interactive( run_cli_command, aiida_local_code_factory, aiida_localhost, label_first @@ -335,7 +335,7 @@ def test_code_setup_local_duplicate_full_label_interactive( assert isinstance(load_code(label_unique), Code) -@pytest.mark.usefixtures('clear_database_before_test') +@pytest.mark.usefixtures('aiida_profile_clean') def test_code_setup_local_duplicate_full_label_non_interactive( run_cli_command, aiida_local_code_factory, aiida_localhost ): @@ -355,7 +355,7 @@ def test_code_setup_local_duplicate_full_label_non_interactive( assert f'the code `{label}` already exists.' in result.output -@pytest.mark.usefixtures('clear_database_before_test') +@pytest.mark.usefixtures('aiida_profile_clean') def test_validate_label_uniqueness(monkeypatch, aiida_localhost): """Test the ``validate_label_uniqueness`` validator.""" from aiida import orm @@ -378,7 +378,7 @@ def load_code(*args, **kwargs): validate_label_uniqueness(ctx, None, 'some-code') -@pytest.mark.usefixtures('clear_database_before_test') +@pytest.mark.usefixtures('aiida_profile_clean') def test_code_test(run_cli_command): """Test the ``verdi code test`` command.""" computer = Computer( diff --git a/tests/cmdline/commands/test_computer.py b/tests/cmdline/commands/test_computer.py index 8ba75ccc54..0aaa043bdb 100644 --- a/tests/cmdline/commands/test_computer.py +++ b/tests/cmdline/commands/test_computer.py @@ -109,13 +109,13 @@ def generate_setup_options_interactive(ordereddict): return options -@pytest.mark.usefixtures('clear_database_before_test') +@pytest.mark.usefixtures('aiida_profile_clean') def test_help(run_cli_command): """Test the help of verdi computer setup.""" run_cli_command(computer_setup, ['--help'], catch_exceptions=False) -@pytest.mark.usefixtures('clear_database_before_test') +@pytest.mark.usefixtures('aiida_profile_clean') def test_reachable(): """Test if the verdi computer setup is reachable.""" import subprocess as sp @@ -123,7 +123,7 @@ def test_reachable(): assert b'Usage:' in output -@pytest.mark.usefixtures('clear_database_before_test') +@pytest.mark.usefixtures('aiida_profile_clean') def test_mixed(run_cli_command): """ Test verdi computer setup in mixed mode. @@ -170,7 +170,7 @@ def test_mixed(run_cli_command): assert new_computer.get_append_text() == options_dict_full['append-text'] -@pytest.mark.usefixtures('clear_database_before_test') +@pytest.mark.usefixtures('aiida_profile_clean') @pytest.mark.parametrize('non_interactive_editor', ('vim -cwq',), indirect=True) def test_noninteractive(run_cli_command, aiida_localhost, non_interactive_editor): """ @@ -201,7 +201,7 @@ def test_noninteractive(run_cli_command, aiida_localhost, non_interactive_editor assert 'already exists' in result.output -@pytest.mark.usefixtures('clear_database_before_test') +@pytest.mark.usefixtures('aiida_profile_clean') def test_noninteractive_optional_default_mpiprocs(run_cli_command): """ Check that if is ok not to specify mpiprocs-per-machine @@ -216,7 +216,7 @@ def test_noninteractive_optional_default_mpiprocs(run_cli_command): assert new_computer.get_default_mpiprocs_per_machine() is None -@pytest.mark.usefixtures('clear_database_before_test') +@pytest.mark.usefixtures('aiida_profile_clean') def test_noninteractive_optional_default_mpiprocs_2(run_cli_command): """ Check that if is the specified value is zero, it means unspecified @@ -231,7 +231,7 @@ def test_noninteractive_optional_default_mpiprocs_2(run_cli_command): assert new_computer.get_default_mpiprocs_per_machine() is None -@pytest.mark.usefixtures('clear_database_before_test') +@pytest.mark.usefixtures('aiida_profile_clean') def test_noninteractive_optional_default_mpiprocs_3(run_cli_command): """ Check that it fails for a negative number of mpiprocs @@ -268,7 +268,7 @@ def test_noninteractive_optional_default_memory_invalid(run_cli_command): assert 'Invalid value for def_memory_per_machine, must be a positive int, got: -1' in result.output -@pytest.mark.usefixtures('clear_database_before_test') +@pytest.mark.usefixtures('aiida_profile_clean') def test_noninteractive_wrong_transport_fail(run_cli_command): """ Check that if fails as expected for an unknown transport @@ -280,7 +280,7 @@ def test_noninteractive_wrong_transport_fail(run_cli_command): assert "entry point 'unknown_transport' is not valid" in result.output -@pytest.mark.usefixtures('clear_database_before_test') +@pytest.mark.usefixtures('aiida_profile_clean') def test_noninteractive_wrong_scheduler_fail(run_cli_command): """ Check that if fails as expected for an unknown transport @@ -292,7 +292,7 @@ def test_noninteractive_wrong_scheduler_fail(run_cli_command): assert "entry point 'unknown_scheduler' is not valid" in result.output -@pytest.mark.usefixtures('clear_database_before_test') +@pytest.mark.usefixtures('aiida_profile_clean') def test_noninteractive_invalid_shebang_fail(run_cli_command): """ Check that if fails as expected for an unknown transport @@ -304,7 +304,7 @@ def test_noninteractive_invalid_shebang_fail(run_cli_command): assert 'The shebang line should start with' in result.output -@pytest.mark.usefixtures('clear_database_before_test') +@pytest.mark.usefixtures('aiida_profile_clean') def test_noninteractive_invalid_mpirun_fail(run_cli_command): """ Check that if fails as expected for an unknown transport @@ -318,7 +318,7 @@ def test_noninteractive_invalid_mpirun_fail(run_cli_command): assert "unknown replacement field 'unknown_key'" in str(result.output) -@pytest.mark.usefixtures('clear_database_before_test') +@pytest.mark.usefixtures('aiida_profile_clean') def test_noninteractive_from_config(run_cli_command): """Test setting up a computer from a config file""" label = 'noninteractive_config' @@ -719,7 +719,7 @@ def test_computer_delete(self): orm.Computer.objects.get(label='computer_for_test_delete') -@pytest.mark.usefixtures('clear_database_before_test') +@pytest.mark.usefixtures('aiida_profile_clean') @pytest.mark.parametrize('non_interactive_editor', ('vim -cwq',), indirect=True) def test_computer_duplicate_interactive(run_cli_command, aiida_localhost, non_interactive_editor): """Test 'verdi computer duplicate' in interactive mode.""" @@ -742,7 +742,7 @@ def test_computer_duplicate_interactive(run_cli_command, aiida_localhost, non_in assert new_computer.get_append_text() == computer.get_append_text() -@pytest.mark.usefixtures('clear_database_before_test') +@pytest.mark.usefixtures('aiida_profile_clean') @pytest.mark.parametrize('non_interactive_editor', ('vim -cwq',), indirect=True) def test_computer_duplicate_non_interactive(run_cli_command, aiida_localhost, non_interactive_editor): """Test if 'verdi computer duplicate' in non-interactive mode.""" @@ -764,9 +764,9 @@ def test_computer_duplicate_non_interactive(run_cli_command, aiida_localhost, no assert new_computer.get_append_text() == computer.get_append_text() -@pytest.mark.usefixtures('clear_database_before_test') +@pytest.mark.usefixtures('aiida_profile_clean') @pytest.mark.parametrize('non_interactive_editor', ('sleep 1; vim -cwq',), indirect=True) -def test_interactive(run_cli_command, clear_database_before_test, non_interactive_editor): +def test_interactive(run_cli_command, aiida_profile_clean, non_interactive_editor): """Test verdi computer setup in interactive mode.""" label = 'interactive_computer' @@ -795,7 +795,7 @@ def test_interactive(run_cli_command, clear_database_before_test, non_interactiv assert new_computer.get_append_text() == '' -@pytest.mark.usefixtures('clear_database_before_test') +@pytest.mark.usefixtures('aiida_profile_clean') def test_computer_test_stderr(run_cli_command, aiida_localhost, monkeypatch): """Test `verdi computer test` where tested command returns non-empty stderr.""" from aiida.transports.plugins.local import LocalTransport @@ -813,7 +813,7 @@ def exec_command_wait(self, command, **kwargs): assert stderr in result.output -@pytest.mark.usefixtures('clear_database_before_test') +@pytest.mark.usefixtures('aiida_profile_clean') def test_computer_test_stdout(run_cli_command, aiida_localhost, monkeypatch): """Test `verdi computer test` where tested command returns non-empty stdout.""" from aiida.transports.plugins.local import LocalTransport diff --git a/tests/cmdline/commands/test_config.py b/tests/cmdline/commands/test_config.py index 8bd124f2d7..f8f06df0d2 100644 --- a/tests/cmdline/commands/test_config.py +++ b/tests/cmdline/commands/test_config.py @@ -11,6 +11,7 @@ """Tests for `verdi config`.""" import pytest +from aiida import get_profile from aiida.cmdline.commands import cmd_verdi from aiida.manage.configuration import get_config @@ -32,7 +33,7 @@ def test_config_set_option(self, run_cli_command): for option_value in option_values: options = ['config', 'set', option_name, str(option_value)] run_cli_command(cmd_verdi.verdi, options) - assert str(config.get_option(option_name, scope=config.current_profile.name)) == option_value + assert str(config.get_option(option_name, scope=get_profile().name)) == option_value def test_config_append_option(self, run_cli_command): """Test the `verdi config set --append` command when appending an option value.""" @@ -41,18 +42,18 @@ def test_config_append_option(self, run_cli_command): for value in ['x', 'y']: options = ['config', 'set', '--append', option_name, value] run_cli_command(cmd_verdi.verdi, options) - assert config.get_option(option_name, scope=config.current_profile.name) == ['x', 'y'] + assert config.get_option(option_name, scope=get_profile().name) == ['x', 'y'] def test_config_remove_option(self, run_cli_command): """Test the `verdi config set --remove` command when removing an option value.""" config = get_config() option_name = 'caching.disabled_for' - config.set_option(option_name, ['x', 'y'], scope=config.current_profile.name) + config.set_option(option_name, ['x', 'y'], scope=get_profile().name) options = ['config', 'set', '--remove', option_name, 'x'] run_cli_command(cmd_verdi.verdi, options) - assert config.get_option(option_name, scope=config.current_profile.name) == ['y'] + assert config.get_option(option_name, scope=get_profile().name) == ['y'] def test_config_get_option(self, run_cli_command): """Test the `verdi config show` command when getting an option.""" @@ -88,7 +89,6 @@ def test_config_unset_option(self, run_cli_command): def test_config_set_option_global_only(self, run_cli_command): """Test that `global_only` options are only set globally even if the `--global` flag is not set.""" - config = get_config() option_name = 'autofill.user.email' option_value = 'some@email.com' @@ -100,7 +100,7 @@ def test_config_set_option_global_only(self, run_cli_command): # Check that the current profile name is not in the output assert option_value in result.output.strip() - assert config.current_profile.name not in result.output.strip() + assert get_profile().name not in result.output.strip() def test_config_list(self, run_cli_command): """Test `verdi config list`""" @@ -134,7 +134,7 @@ def test_config_caching(self, run_cli_command): assert 'core.arithmetic.add' in result.output.strip() config = get_config() - config.set_option('caching.default_enabled', True, scope=config.current_profile.name) + config.set_option('caching.default_enabled', True, scope=get_profile().name) result = run_cli_command(cmd_verdi.verdi, ['config', 'caching']) assert 'core.arithmetic.add' in result.output.strip() diff --git a/tests/cmdline/commands/test_daemon.py b/tests/cmdline/commands/test_daemon.py index 3c251ee1b1..64f024c548 100644 --- a/tests/cmdline/commands/test_daemon.py +++ b/tests/cmdline/commands/test_daemon.py @@ -11,6 +11,7 @@ """Tests for ``verdi daemon``.""" import pytest +from aiida import get_profile from aiida.cmdline.commands import cmd_daemon @@ -62,7 +63,7 @@ def test_daemon_start_number(run_cli_command, daemon_client): def test_daemon_start_number_config(run_cli_command, daemon_client, isolated_config): """Test ``verdi daemon start`` with ``daemon.default_workers`` config option being set.""" number = 3 - isolated_config.set_option('daemon.default_workers', number, scope=isolated_config.current_profile.name) + isolated_config.set_option('daemon.default_workers', number, scope=get_profile().name) isolated_config.store() run_cli_command(cmd_daemon.start) @@ -83,12 +84,12 @@ def test_foreground_multiple_workers(run_cli_command): run_cli_command(cmd_daemon.start, ['--foreground', str(4)], raises=True) -@pytest.mark.usefixtures('daemon_client') -def test_daemon_status(run_cli_command, isolated_config): +@pytest.mark.usefixtures('daemon_client', 'isolated_config') +def test_daemon_status(run_cli_command): """Test ``verdi daemon status``.""" run_cli_command(cmd_daemon.start) result = run_cli_command(cmd_daemon.status) last_line = result.output_lines[-1] - assert f'Profile: {isolated_config.current_profile.name}' in result.output + assert f'Profile: {get_profile().name}' in result.output assert last_line == 'Use verdi daemon [incr | decr] [num] to increase / decrease the amount of workers' diff --git a/tests/cmdline/commands/test_group_ls.py b/tests/cmdline/commands/test_group_ls.py index f74f4379cf..1c3c1e5cc3 100644 --- a/tests/cmdline/commands/test_group_ls.py +++ b/tests/cmdline/commands/test_group_ls.py @@ -19,7 +19,7 @@ @pytest.fixture -def setup_groups(clear_database_before_test): +def setup_groups(aiida_profile_clean): """Setup some groups for testing.""" for label in ['a', 'a/b', 'a/c/d', 'a/c/e/g', 'a/f']: group, _ = orm.Group.objects.get_or_create(label) diff --git a/tests/cmdline/commands/test_node.py b/tests/cmdline/commands/test_node.py index 6b0d455c2a..47d9e0e380 100644 --- a/tests/cmdline/commands/test_node.py +++ b/tests/cmdline/commands/test_node.py @@ -608,7 +608,7 @@ def test_rehash_invalid_entry_point(self): ['--force'], ) ) -@pytest.mark.usefixtures('clear_database_before_test') +@pytest.mark.usefixtures('aiida_profile_clean') def test_node_delete_basics(run_cli_command, options): """ Testing the correct translation for the `--force` and `--verbosity` options. diff --git a/tests/cmdline/commands/test_process.py b/tests/cmdline/commands/test_process.py index 688b296360..666ba51af4 100644 --- a/tests/cmdline/commands/test_process.py +++ b/tests/cmdline/commands/test_process.py @@ -17,6 +17,7 @@ import plumpy import pytest +from aiida import get_profile from aiida.backends.testbase import AiidaTestCase from aiida.cmdline.commands import cmd_process from aiida.common.links import LinkType @@ -291,11 +292,11 @@ def test_report(self): for flag in ['-l', '--levelname']: result = self.cli_runner.invoke(cmd_process.process_report, [str(grandparent.pk), flag, 'WARNING']) self.assertIsNone(result.exception, result.output) - self.assertEqual(len(get_result_lines(result)), 1) + self.assertEqual(len(get_result_lines(result)), 1, get_result_lines(result)) self.assertEqual(get_result_lines(result)[0], 'No log messages recorded for this entry') -@pytest.mark.usefixtures('clear_database_before_test') +@pytest.mark.usefixtures('aiida_profile_clean') def test_list_worker_slot_warning(run_cli_command, monkeypatch): """ Test that the if the number of used worker process slots exceeds a threshold, @@ -310,7 +311,7 @@ def test_list_worker_slot_warning(run_cli_command, monkeypatch): # Get the number of allowed processes per worker: config = get_config() - worker_process_slots = config.get_option('daemon.worker_process_slots', config.current_profile.name) + worker_process_slots = config.get_option('daemon.worker_process_slots', get_profile().name) limit = int(worker_process_slots * 0.9) # Create additional active nodes such that we have 90% of the active slot limit @@ -388,7 +389,7 @@ def test_multiple_processes(self): @pytest.mark.skip(reason='fails to complete randomly (see issue #4731)') @pytest.mark.requires_rmq -@pytest.mark.usefixtures('with_daemon', 'clear_database_before_test') +@pytest.mark.usefixtures('with_daemon', 'aiida_profile_clean') @pytest.mark.parametrize('cmd_try_all', (True, False)) def test_pause_play_kill(cmd_try_all, run_cli_command): """ @@ -397,7 +398,7 @@ def test_pause_play_kill(cmd_try_all, run_cli_command): # pylint: disable=no-member, too-many-locals from aiida.cmdline.commands.cmd_process import process_kill, process_pause, process_play from aiida.engine import ProcessState - from aiida.manage.manager import get_manager + from aiida.manage import get_manager from aiida.orm import load_node runner = get_manager().create_runner(rmq_submit=True) diff --git a/tests/cmdline/commands/test_profile.py b/tests/cmdline/commands/test_profile.py index ca75fa9128..a05c62f806 100644 --- a/tests/cmdline/commands/test_profile.py +++ b/tests/cmdline/commands/test_profile.py @@ -14,7 +14,7 @@ from aiida.backends.testbase import AiidaPostgresTestCase from aiida.cmdline.commands import cmd_profile, cmd_verdi -from aiida.manage import configuration +from aiida.manage import configuration, get_manager from tests.utils.configuration import create_mock_profile @@ -102,11 +102,12 @@ def test_show(self): self.assertClickSuccess(result) for key, value in profile.dictionary.items(): if isinstance(value, str): - self.assertIn(key.lower(), result.output) + self.assertIn(key, result.output) self.assertIn(value, result.output) def test_show_with_profile_option(self): """Test the `verdi profile show` command in combination with `-p/--profile.""" + get_manager().unload_profile() self.mock_profiles() profile_name_non_default = self.profile_list[1] @@ -140,7 +141,7 @@ def test_delete(self): """Test for verdi profile delete command.""" from aiida.cmdline.commands.cmd_profile import profile_delete, profile_list - configuration.reset_profile() + get_manager().unload_profile() kwargs = {'database_port': self.pg_test.dsn['port']} self.mock_profiles(**kwargs) diff --git a/tests/cmdline/commands/test_run.py b/tests/cmdline/commands/test_run.py index f7af187a9e..6b26747e31 100644 --- a/tests/cmdline/commands/test_run.py +++ b/tests/cmdline/commands/test_run.py @@ -63,7 +63,7 @@ def wf(): self.assertClickResultNoException(result) # Try to load the function calculation node from the printed pk in the output - pk = int(result.output) + pk = int(result.output.splitlines()[-1]) node = load_node(pk) # Verify that the node has the correct function name and content @@ -77,21 +77,8 @@ class TestAutoGroups(AiidaTestCase): def setUp(self): """Setup the CLI runner to run command line commands.""" - from aiida.orm import autogroup - super().setUp() self.cli_runner = CliRunner() - # I need to disable the global variable of this test environment, because invoke is just calling the function - # and therefore inheriting the global variable - self._old_autogroup = autogroup.CURRENT_AUTOGROUP - autogroup.CURRENT_AUTOGROUP = None - - def tearDown(self): - """Setup the CLI runner to run command line commands.""" - from aiida.orm import autogroup - - super().tearDown() - autogroup.CURRENT_AUTOGROUP = self._old_autogroup def test_autogroup(self): """Check if the autogroup is properly generated.""" diff --git a/tests/cmdline/commands/test_setup.py b/tests/cmdline/commands/test_setup.py index 91a7bd8b7f..95bb84f395 100644 --- a/tests/cmdline/commands/test_setup.py +++ b/tests/cmdline/commands/test_setup.py @@ -8,18 +8,18 @@ # For further information please visit http://www.aiida.net # ########################################################################### """Tests for `verdi profile`.""" +import os +import tempfile import traceback from click.testing import CliRunner import pytest from aiida import orm -from aiida.backends import BACKEND_DJANGO from aiida.backends.testbase import AiidaPostgresTestCase from aiida.cmdline.commands import cmd_setup -from aiida.manage import configuration +from aiida.manage import configuration, get_manager from aiida.manage.external.postgres import Postgres -from aiida.manage.manager import get_manager @pytest.mark.usefixtures('config_with_profile') @@ -28,10 +28,8 @@ class TestVerdiSetup(AiidaPostgresTestCase): def setUp(self): """Create a CLI runner to invoke the CLI commands.""" - if configuration.PROFILE.storage_backend == BACKEND_DJANGO: - pytest.skip('Reenable when #2813 is addressed') super().setUp() - self.backend = configuration.PROFILE.storage_backend + self.backend = configuration.get_profile().storage_backend self.cli_runner = CliRunner() def test_help(self): @@ -45,7 +43,15 @@ def test_help(self): def test_quicksetup(self): """Test `verdi quicksetup`.""" - configuration.reset_profile() + config = configuration.get_config() + get_manager().unload_profile() + profile_name = 'testing' + user_email = 'some@email.com' + user_first_name = 'John' + user_last_name = 'Smith' + user_institution = 'ECMA' + + config = configuration.get_config() profile_name = 'testing' user_email = 'some@email.com' @@ -78,15 +84,12 @@ def test_quicksetup(self): self.assertEqual(user.institution, user_institution) # Check that the repository UUID was stored in the database - manager = get_manager() - backend_manager = manager.get_backend_manager() - self.assertEqual(backend_manager.get_repository_uuid(), self.backend.get_repository().uuid) + backend = profile.storage_cls(profile) + self.assertEqual(backend.get_global_variable('repository|uuid'), backend.get_repository().uuid) def test_quicksetup_from_config_file(self): """Test `verdi quicksetup` from configuration file.""" - import os - import tempfile - + get_manager().unload_profile() with tempfile.NamedTemporaryFile('w') as handle: handle.write( f"""--- @@ -103,7 +106,7 @@ def test_quicksetup_from_config_file(self): def test_quicksetup_wrong_port(self): """Test `verdi quicksetup` exits if port is wrong.""" - configuration.reset_profile() + get_manager().unload_profile() profile_name = 'testing' user_email = 'some@email.com' @@ -129,7 +132,7 @@ def test_setup(self): db_pass = 'aiida_test_setup' postgres.create_dbuser(db_user, db_pass) postgres.create_db(db_user, db_name) - configuration.reset_profile() + get_manager().unload_profile() profile_name = 'testing' user_email = 'some@email.com' @@ -165,6 +168,5 @@ def test_setup(self): self.assertEqual(user.institution, user_institution) # Check that the repository UUID was stored in the database - manager = get_manager() - backend_manager = manager.get_backend_manager() - self.assertEqual(backend_manager.get_repository_uuid(), self.backend.get_repository().uuid) + backend = profile.storage_cls(profile) + self.assertEqual(backend.get_global_variable('repository|uuid'), backend.get_repository().uuid) diff --git a/tests/cmdline/commands/test_status.py b/tests/cmdline/commands/test_status.py index d5a3055e6d..f1ba4868df 100644 --- a/tests/cmdline/commands/test_status.py +++ b/tests/cmdline/commands/test_status.py @@ -10,7 +10,8 @@ """Tests for `verdi status`.""" import pytest -from aiida import __version__ +from aiida import __version__, get_profile +from aiida.backends.sqlalchemy import migrator from aiida.cmdline.commands import cmd_status from aiida.cmdline.utils.echo import ExitCode @@ -51,49 +52,45 @@ def test_status_no_rmq(run_cli_command): assert string in result.output -def test_database_incompatible(run_cli_command, monkeypatch): - """Test `verdi status` when database schema version is incompatible with that of the code.""" - from aiida.manage.manager import get_manager +def test_storage_unable_to_connect(run_cli_command): + """Test `verdi status` when there is an unknown error while connecting to the storage.""" + # pylint: disable=protected-access + profile = get_profile() - def get_backend(): - from aiida.common.exceptions import IncompatibleDatabaseSchema - raise IncompatibleDatabaseSchema() + old_port = profile._attributes['storage']['config']['database_port'] + profile._attributes['storage']['config']['database_port'] = 123 - monkeypatch.setattr(get_manager(), 'get_backend', get_backend) + try: + result = run_cli_command(cmd_status.verdi_status, raises=True) + assert 'Unable to connect to profile\'s storage' in result.output + assert result.exit_code is ExitCode.CRITICAL + finally: + profile._attributes['storage']['config']['database_port'] = old_port - result = run_cli_command(cmd_status.verdi_status, raises=True) - assert 'Database schema' in result.output - assert 'is incompatible with the code.' in result.output - assert '`verdi storage migrate`' in result.output - assert result.exit_code is ExitCode.CRITICAL - - -def test_database_unable_to_connect(run_cli_command, monkeypatch): - """Test `verdi status` when there is an unknown error while connecting to the database.""" - from aiida.manage.manager import get_manager - profile = get_manager().get_profile() +def test_storage_incompatible(run_cli_command, monkeypatch): + """Test `verdi status` when storage schema version is incompatible with that of the code.""" - def get_backend(): - raise RuntimeError() + def storage_cls(*args, **kwargs): # pylint: disable=unused-argument + from aiida.common.exceptions import IncompatibleStorageSchema + raise IncompatibleStorageSchema() - monkeypatch.setattr(get_manager(), 'get_backend', get_backend) + monkeypatch.setattr(migrator.PsqlDostoreMigrator, 'validate_storage', storage_cls) result = run_cli_command(cmd_status.verdi_status, raises=True) - assert 'Unable to connect to database' in result.output - assert profile.storage_config['database_name'] in result.output - assert profile.storage_config['database_username'] in result.output - assert profile.storage_config['database_hostname'] in result.output - assert str(profile.storage_config['database_port']) in result.output + assert 'verdi storage migrate' in result.output assert result.exit_code is ExitCode.CRITICAL -@pytest.mark.usefixtures('aiida_profile') -def tests_database_version(run_cli_command, manager): - """Test the ``verdi database version`` command.""" - backend_manager = manager.get_backend_manager() - db_gen = backend_manager.get_schema_generation_database() - db_ver = backend_manager.get_schema_version_backend() +def test_storage_corrupted(run_cli_command, monkeypatch): + """Test `verdi status` when the storage is found to be corrupt (e.g. non-matching repository UUIDs).""" - result = run_cli_command(cmd_status.verdi_status) - assert f'{db_gen} / {db_ver}' in result.output + def storage_cls(*args, **kwargs): # pylint: disable=unused-argument + from aiida.common.exceptions import CorruptStorage + raise CorruptStorage() + + monkeypatch.setattr(migrator.PsqlDostoreMigrator, 'validate_storage', storage_cls) + + result = run_cli_command(cmd_status.verdi_status, raises=True) + assert 'Storage is corrupted' in result.output + assert result.exit_code is ExitCode.CRITICAL diff --git a/tests/cmdline/commands/test_storage.py b/tests/cmdline/commands/test_storage.py index e7737b3c18..fb1168b9ce 100644 --- a/tests/cmdline/commands/test_storage.py +++ b/tests/cmdline/commands/test_storage.py @@ -10,11 +10,20 @@ """Tests for `verdi storage`.""" import pytest +from aiida import get_profile from aiida.cmdline.commands import cmd_storage from aiida.common import exceptions -@pytest.mark.usefixtures('clear_database_before_test') +@pytest.mark.usefixtures('aiida_profile_clean') +def tests_storage_version(run_cli_command): + """Test the ``verdi storage version`` command.""" + result = run_cli_command(cmd_storage.storage_version) + version = get_profile().storage_cls.version_profile(get_profile()) + assert version in result.output + + +@pytest.mark.usefixtures('aiida_profile_clean') def tests_storage_info(aiida_localhost, run_cli_command): """Test the ``verdi storage info`` command with the ``-statistics`` option.""" from aiida import orm @@ -29,12 +38,12 @@ def tests_storage_info(aiida_localhost, run_cli_command): def tests_storage_migrate_force(run_cli_command): """Test the ``verdi storage migrate`` command (with force option).""" result = run_cli_command(cmd_storage.storage_migrate, options=['--force']) - assert result.output == '' + assert 'Migrating to the head of the main branch' in result.output def tests_storage_migrate_interactive(run_cli_command): """Test the ``verdi storage migrate`` command (with interactive prompt).""" - from aiida.manage.manager import get_manager + from aiida.manage import get_manager profile = get_manager().get_profile() result = run_cli_command(cmd_storage.storage_migrate, user_input='MIGRATE NOW') @@ -68,7 +77,7 @@ def tests_storage_migrate_cancel_prompt(run_cli_command, monkeypatch): @pytest.mark.parametrize('raise_type', [ exceptions.ConfigurationError, - exceptions.DatabaseMigrationError, + exceptions.StorageMigrationError, ]) @pytest.mark.parametrize( 'call_kwargs', [ @@ -91,13 +100,13 @@ def tests_storage_migrate_raises(run_cli_command, raise_type, call_kwargs, monke Instead, the class of the object needs to be patched so that all further created objects will have the modified method. """ - from aiida.manage.manager import get_manager + from aiida.manage import get_manager manager = get_manager() def mocked_migrate(self): # pylint: disable=no-self-use raise raise_type('passed error message') - monkeypatch.setattr(manager.get_backend().__class__, 'migrate', mocked_migrate) + monkeypatch.setattr(manager.get_profile_storage().__class__, 'migrate', mocked_migrate) result = run_cli_command(cmd_storage.storage_migrate, **call_kwargs) assert result.exc_info[0] is SystemExit diff --git a/tests/cmdline/params/types/test_code.py b/tests/cmdline/params/types/test_code.py index e9357cf0f2..587328d9a7 100644 --- a/tests/cmdline/params/types/test_code.py +++ b/tests/cmdline/params/types/test_code.py @@ -24,7 +24,7 @@ def parameter_type(): @pytest.fixture -def setup_codes(clear_database_before_test, aiida_localhost): +def setup_codes(aiida_profile_clean, aiida_localhost): """Create some `Code` instances to test the `CodeParamType` parameter type for the command line infrastructure. We create an initial code with a random name and then on purpose create two code with a name that matches exactly diff --git a/tests/cmdline/params/types/test_computer.py b/tests/cmdline/params/types/test_computer.py index b709011804..fd52fa0a91 100644 --- a/tests/cmdline/params/types/test_computer.py +++ b/tests/cmdline/params/types/test_computer.py @@ -23,7 +23,7 @@ def parameter_type(): @pytest.fixture -def setup_computers(clear_database_before_test): # pylint: disable=unused-argument +def setup_computers(aiida_profile_clean): # pylint: disable=unused-argument """Create some `Computer` instances to test the `ComputerParamType` parameter type for the command line infrastructure. diff --git a/tests/cmdline/params/types/test_group.py b/tests/cmdline/params/types/test_group.py index de692c0dfe..5c090194ac 100644 --- a/tests/cmdline/params/types/test_group.py +++ b/tests/cmdline/params/types/test_group.py @@ -24,7 +24,7 @@ def parameter_type(): @pytest.fixture -def setup_groups(clear_database_before_test): +def setup_groups(aiida_profile_clean): """Create some groups to test the `GroupParamType` parameter type for the command line infrastructure. We create an initial group with a random name and then on purpose create two groups with a name that matches exactly diff --git a/tests/cmdline/utils/test_common.py b/tests/cmdline/utils/test_common.py index 0df23c4309..2f524ee0b9 100644 --- a/tests/cmdline/utils/test_common.py +++ b/tests/cmdline/utils/test_common.py @@ -16,7 +16,7 @@ from aiida.orm import CalcFunctionNode, CalculationNode, WorkflowNode -@pytest.mark.usefixtures('clear_database_before_test') +@pytest.mark.usefixtures('aiida_profile_clean') def test_get_node_summary(aiida_local_code_factory): """Test the ``get_node_summary`` utility.""" code = aiida_local_code_factory(entry_point='core.arithmetic.add', executable='/bin/bash') @@ -30,7 +30,7 @@ def test_get_node_summary(aiida_local_code_factory): assert node.computer.label in summary -@pytest.mark.usefixtures('clear_database_before_test') +@pytest.mark.usefixtures('aiida_profile_clean') def test_get_node_info_multiple_call_links(): """Test the ``get_node_info`` utility. @@ -52,7 +52,7 @@ def test_get_node_info_multiple_call_links(): assert str(node_two.pk) in node_info -@pytest.mark.usefixtures('clear_database_before_test') +@pytest.mark.usefixtures('aiida_profile_clean') def test_get_process_function_report(): """Test the ``get_process_function_report`` utility.""" warning = 'You have been warned' diff --git a/tests/cmdline/utils/test_repository.py b/tests/cmdline/utils/test_repository.py index 0fae8bf272..d5937325ff 100644 --- a/tests/cmdline/utils/test_repository.py +++ b/tests/cmdline/utils/test_repository.py @@ -34,14 +34,14 @@ def folder_data(): return node -@pytest.mark.usefixtures('clear_database_before_test') +@pytest.mark.usefixtures('aiida_profile_clean') def test_list_repository_contents(capsys, folder_data): """Test the `list_repository_contents` method.""" list_repository_contents(folder_data, path='', color=True) assert capsys.readouterr().out == 'file.txt\nnested\n' -@pytest.mark.usefixtures('clear_database_before_test') +@pytest.mark.usefixtures('aiida_profile_clean') def test_list_repository_contents_color(runner, folder_data): """Test the `list_repository_contents` method. diff --git a/tests/conftest.py b/tests/conftest.py index c8d7afe827..ea08471917 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -16,6 +16,7 @@ import click import pytest +from aiida import get_profile from aiida.manage.configuration import Config, Profile, get_config, load_profile pytest_plugins = ['aiida.manage.tests.pytest_fixtures', 'sphinx.testing.fixtures'] # pylint: disable=invalid-name @@ -83,7 +84,7 @@ def generate_calc_job(): def _generate_calc_job(folder, entry_point_name, inputs=None, return_process=False): """Fixture to generate a mock `CalcInfo` for testing calculation jobs.""" from aiida.engine.utils import instantiate_process - from aiida.manage.manager import get_manager + from aiida.manage import get_manager from aiida.plugins import CalculationFactory inputs = inputs or {} @@ -113,7 +114,7 @@ def _generate_work_chain(entry_point, inputs=None): :return: a `WorkChain` instance. """ from aiida.engine.utils import instantiate_process - from aiida.manage.manager import get_manager + from aiida.manage import get_manager from aiida.plugins import WorkflowFactory inputs = inputs or {} @@ -172,7 +173,7 @@ def isolated_config(monkeypatch): current_config = configuration.CONFIG configuration.CONFIG = copy.deepcopy(current_config) - configuration.CONFIG.set_default_profile(configuration.PROFILE.name, overwrite=True) + configuration.CONFIG.set_default_profile(configuration.get_profile().name, overwrite=True) try: yield configuration.CONFIG @@ -191,15 +192,17 @@ def empty_config(tmp_path) -> Config: :return: a new empty config instance. """ from aiida.common.utils import Capturing - from aiida.manage import configuration - from aiida.manage.configuration import reset_profile, settings + from aiida.manage import configuration, get_manager + from aiida.manage.configuration import settings + + manager = get_manager() # Store the current configuration instance and config directory path current_config = configuration.CONFIG current_config_path = current_config.dirpath - current_profile_name = configuration.PROFILE.name + current_profile_name = configuration.get_profile().name - reset_profile() + manager.unload_profile() configuration.CONFIG = None # Create a temporary folder, set it as the current config directory path and reset the loaded configuration @@ -217,10 +220,10 @@ def empty_config(tmp_path) -> Config: finally: # Reset the config folder path and the config instance. Note this will always be executed after the yield no # matter what happened in the test that used this fixture. - reset_profile() + manager.unload_profile() settings.AIIDA_CONFIG_FOLDER = current_config_path configuration.CONFIG = current_config - load_profile(current_profile_name) + manager.load_profile(current_profile_name) @pytest.fixture @@ -237,7 +240,7 @@ def _create_profile(name='test-profile', **kwargs): profile_dictionary = { 'default_user_email': kwargs.pop('default_user_email', 'dummy@localhost'), 'storage': { - 'backend': kwargs.pop('storage_backend', 'django'), + 'backend': kwargs.pop('storage_backend', 'psql_dos'), 'config': { 'database_engine': kwargs.pop('database_engine', 'postgresql_psycopg2'), 'database_hostname': kwargs.pop('database_hostname', 'localhost'), @@ -278,8 +281,8 @@ def config_with_profile_factory(empty_config, profile_factory) -> Config: Example:: def test_config_with_profile(config_with_profile_factory): - config = config_with_profile_factory(set_as_default=True, name='default', storage_backend='django') - assert config.current_profile.name == 'default' + config = config_with_profile_factory(name='default', set_as_default=True, load=True) + assert get_profile().name == 'default' As with `empty_config`, the currently loaded configuration and profile are stored in memory, and are automatically restored at the end of this context manager. @@ -324,7 +327,7 @@ def config_with_profile(config_with_profile_factory): @pytest.fixture def manager(aiida_profile): # pylint: disable=unused-argument """Get the ``Manager`` instance of the currently loaded profile.""" - from aiida.manage.manager import get_manager + from aiida.manage import get_manager return get_manager() @@ -339,8 +342,8 @@ def event_loop(manager): @pytest.fixture def backend(manager): - """Get the ``Backend`` instance of the currently loaded profile.""" - return manager.get_backend() + """Get the ``Backend`` storage instance of the currently loaded profile.""" + return manager.get_profile_storage() @pytest.fixture @@ -349,22 +352,6 @@ def communicator(manager): return manager.get_communicator() -@pytest.fixture -def skip_if_not_django(backend): - """Fixture that will skip any test that uses it when a profile is loaded with any other backend then Django.""" - from aiida.orm.implementation.django.backend import DjangoBackend - if not isinstance(backend, DjangoBackend): - pytest.skip('this test should only be run for the Django backend.') - - -@pytest.fixture -def skip_if_not_sqlalchemy(backend): - """Fixture that will skip any test that uses it when a profile is loaded with any other backend then SqlAlchemy.""" - from aiida.orm.implementation.sqlalchemy.backend import SqlaBackend - if not isinstance(backend, SqlaBackend): - pytest.skip('this test should only be run for the SqlAlchemy backend.') - - @pytest.fixture(scope='function') def override_logging(isolated_config): """Temporarily override the log level for the AiiDA logger and the database log handler to ``DEBUG``. @@ -401,7 +388,7 @@ def with_daemon(): env = get_env_with_venv_bin() env['PYTHONPATH'] = ':'.join(sys.path) - profile = get_config().current_profile + profile = get_profile() daemon = subprocess.Popen( # pylint: disable=consider-using-with DaemonClient(profile).cmd_string.split(), stderr=sys.stderr, @@ -419,7 +406,6 @@ def with_daemon(): def daemon_client(): """Return a daemon client instance and stop any daemon instances running for the test profile after the test.""" from aiida.engine.daemon.client import DaemonClient - from aiida.manage.configuration import get_profile client = DaemonClient(get_profile()) @@ -470,7 +456,6 @@ def _run_cli_command( from aiida.cmdline.commands.cmd_verdi import VerdiCommandGroup from aiida.common import AttributeDict - from aiida.manage.configuration import get_profile config = get_config() profile = get_profile() @@ -514,4 +499,4 @@ def reset_log_level(): yield finally: log.CLI_LOG_LEVEL = None - log.configure_logging() + log.configure_logging(with_orm=True) diff --git a/tests/engine/daemon/test_execmanager.py b/tests/engine/daemon/test_execmanager.py index 3edca1c71d..c90dfebea9 100644 --- a/tests/engine/daemon/test_execmanager.py +++ b/tests/engine/daemon/test_execmanager.py @@ -85,7 +85,7 @@ def test_hierarchy_utility(file_hierarchy, tmp_path): # yapf: disable -@pytest.mark.usefixtures('clear_database_before_test') +@pytest.mark.usefixtures('aiida_profile_clean') @pytest.mark.parametrize('retrieve_list, expected_hierarchy', ( # Single file or folder, either toplevel or nested (['file_a.txt'], {'file_a.txt': 'file_a'}), @@ -130,7 +130,7 @@ def test_retrieve_files_from_list( assert serialize_file_hierarchy(target) == expected_hierarchy -@pytest.mark.usefixtures('clear_database_before_test') +@pytest.mark.usefixtures('aiida_profile_clean') def test_upload_local_copy_list(fixture_sandbox, aiida_localhost, aiida_local_code_factory, file_hierarchy, tmp_path): """Test the ``local_copy_list`` functionality in ``upload_calculation``. diff --git a/tests/engine/processes/calcjobs/test_calc_job.py b/tests/engine/processes/calcjobs/test_calc_job.py index 26fed5984f..806cb52238 100644 --- a/tests/engine/processes/calcjobs/test_calc_job.py +++ b/tests/engine/processes/calcjobs/test_calc_job.py @@ -109,7 +109,7 @@ def prepare_for_submission(self, folder): @pytest.mark.requires_rmq -@pytest.mark.usefixtures('clear_database_before_test', 'chdir_tmp_path') +@pytest.mark.usefixtures('aiida_profile_clean', 'chdir_tmp_path') @pytest.mark.parametrize('parallel_run', [True, False]) def test_multi_codes_run_parallel(aiida_local_code_factory, file_regression, parallel_run): """test codes_run_mode set in CalcJob""" @@ -138,7 +138,7 @@ def test_multi_codes_run_parallel(aiida_local_code_factory, file_regression, par @pytest.mark.requires_rmq -@pytest.mark.usefixtures('clear_database_before_test', 'chdir_tmp_path') +@pytest.mark.usefixtures('aiida_profile_clean', 'chdir_tmp_path') @pytest.mark.parametrize('calcjob_withmpi', [True, False]) def test_multi_codes_run_withmpi(aiida_local_code_factory, file_regression, calcjob_withmpi): """test withmpi set in CalcJob only take effect for codes which have codeinfo.withmpi not set""" @@ -182,7 +182,7 @@ def setUpClass(cls, *args, **kwargs): def instantiate_process(self, state=CalcJobState.PARSING): """Instantiate a process with default inputs and return the `Process` instance.""" from aiida.engine.utils import instantiate_process - from aiida.manage.manager import get_manager + from aiida.manage import get_manager inputs = deepcopy(self.inputs) inputs['code'] = self.remote_code @@ -486,7 +486,7 @@ def test_get_importer(self): def generate_process(aiida_local_code_factory): """Instantiate a process with default inputs and return the `Process` instance.""" from aiida.engine.utils import instantiate_process - from aiida.manage.manager import get_manager + from aiida.manage import get_manager def _generate_process(inputs=None): @@ -515,7 +515,7 @@ def _generate_process(inputs=None): @pytest.mark.requires_rmq -@pytest.mark.usefixtures('clear_database_before_test', 'override_logging') +@pytest.mark.usefixtures('aiida_profile_clean', 'override_logging') def test_parse_insufficient_data(generate_process): """Test the scheduler output parsing logic in `CalcJob.parse`. @@ -546,7 +546,7 @@ def test_parse_insufficient_data(generate_process): @pytest.mark.requires_rmq -@pytest.mark.usefixtures('clear_database_before_test', 'override_logging') +@pytest.mark.usefixtures('aiida_profile_clean', 'override_logging') def test_parse_non_zero_retval(generate_process): """Test the scheduler output parsing logic in `CalcJob.parse`. @@ -566,7 +566,7 @@ def test_parse_non_zero_retval(generate_process): @pytest.mark.requires_rmq -@pytest.mark.usefixtures('clear_database_before_test', 'override_logging') +@pytest.mark.usefixtures('aiida_profile_clean', 'override_logging') def test_parse_not_implemented(generate_process): """Test the scheduler output parsing logic in `CalcJob.parse`. @@ -598,7 +598,7 @@ def test_parse_not_implemented(generate_process): @pytest.mark.requires_rmq -@pytest.mark.usefixtures('clear_database_before_test', 'override_logging') +@pytest.mark.usefixtures('aiida_profile_clean', 'override_logging') def test_parse_scheduler_excepted(generate_process, monkeypatch): """Test the scheduler output parsing logic in `CalcJob.parse`. @@ -643,7 +643,7 @@ def raise_exception(*args, **kwargs): (100, 400, 400), (100, 0, 0), )) -@pytest.mark.usefixtures('clear_database_before_test') +@pytest.mark.usefixtures('aiida_profile_clean') def test_parse_exit_code_priority( exit_status_scheduler, exit_status_retrieved, @@ -704,7 +704,7 @@ def parse_retrieved_output(_, __): @pytest.mark.requires_rmq -@pytest.mark.usefixtures('clear_database_before_test') +@pytest.mark.usefixtures('aiida_profile_clean') def test_additional_retrieve_list(generate_process, fixture_sandbox): """Test the ``additional_retrieve_list`` option.""" process = generate_process() @@ -746,7 +746,7 @@ def test_additional_retrieve_list(generate_process, fixture_sandbox): process = generate_process({'metadata': {'options': {'additional_retrieve_list': ['/abs/path']}}}) -@pytest.mark.usefixtures('clear_database_before_test') +@pytest.mark.usefixtures('aiida_profile_clean') @pytest.mark.parametrize(('stash_options', 'expected'), ( ({ 'target_base': None diff --git a/tests/engine/processes/workchains/test_restart.py b/tests/engine/processes/workchains/test_restart.py index fb01549342..acd97fcb0c 100644 --- a/tests/engine/processes/workchains/test_restart.py +++ b/tests/engine/processes/workchains/test_restart.py @@ -50,7 +50,7 @@ def test_get_process_handler(): @pytest.mark.requires_rmq -@pytest.mark.usefixtures('clear_database_before_test') +@pytest.mark.usefixtures('aiida_profile_clean') def test_excepted_process(generate_work_chain, generate_calculation_node): """Test that the workchain aborts if the sub process was excepted.""" process = generate_work_chain(SomeWorkChain, {}) @@ -60,7 +60,7 @@ def test_excepted_process(generate_work_chain, generate_calculation_node): @pytest.mark.requires_rmq -@pytest.mark.usefixtures('clear_database_before_test') +@pytest.mark.usefixtures('aiida_profile_clean') def test_killed_process(generate_work_chain, generate_calculation_node): """Test that the workchain aborts if the sub process was killed.""" process = generate_work_chain(SomeWorkChain, {}) @@ -70,7 +70,7 @@ def test_killed_process(generate_work_chain, generate_calculation_node): @pytest.mark.requires_rmq -@pytest.mark.usefixtures('clear_database_before_test') +@pytest.mark.usefixtures('aiida_profile_clean') def test_unhandled_failure(generate_work_chain, generate_calculation_node): """Test the unhandled failure mechanism. @@ -89,7 +89,7 @@ def test_unhandled_failure(generate_work_chain, generate_calculation_node): @pytest.mark.requires_rmq -@pytest.mark.usefixtures('clear_database_before_test') +@pytest.mark.usefixtures('aiida_profile_clean') def test_unhandled_reset_after_success(generate_work_chain, generate_calculation_node): """Test `ctx.unhandled_failure` is reset to `False` in `inspect_process` after a successful process.""" process = generate_work_chain(SomeWorkChain, {}) @@ -104,7 +104,7 @@ def test_unhandled_reset_after_success(generate_work_chain, generate_calculation @pytest.mark.requires_rmq -@pytest.mark.usefixtures('clear_database_before_test') +@pytest.mark.usefixtures('aiida_profile_clean') def test_unhandled_reset_after_handled(generate_work_chain, generate_calculation_node): """Test `ctx.unhandled_failure` is reset to `False` in `inspect_process` after a handled failed process.""" process = generate_work_chain(SomeWorkChain, {}) @@ -126,7 +126,7 @@ def test_unhandled_reset_after_handled(generate_work_chain, generate_calculation @pytest.mark.requires_rmq -@pytest.mark.usefixtures('clear_database_before_test') +@pytest.mark.usefixtures('aiida_profile_clean') def test_run_process(generate_work_chain, generate_calculation_node, monkeypatch): """Test the `run_process` method.""" diff --git a/tests/engine/test_daemon.py b/tests/engine/test_daemon.py index 53f6b4a20b..915e874ef2 100644 --- a/tests/engine/test_daemon.py +++ b/tests/engine/test_daemon.py @@ -13,7 +13,7 @@ from plumpy.process_states import ProcessState import pytest -from aiida.manage.manager import get_manager +from aiida.manage import get_manager from tests.utils import processes as test_processes @@ -22,7 +22,7 @@ async def reach_waiting_state(process): await asyncio.sleep(0.1) -@pytest.mark.usefixtures('clear_database_before_test') +@pytest.mark.usefixtures('aiida_profile_clean') def test_cancel_process_task(): """This test is designed to replicate how processes are cancelled in the current `shutdown_runner` callback. diff --git a/tests/engine/test_futures.py b/tests/engine/test_futures.py index 2c751e5d2c..1c63f2e9a0 100644 --- a/tests/engine/test_futures.py +++ b/tests/engine/test_futures.py @@ -14,7 +14,7 @@ from aiida.backends.testbase import AiidaTestCase from aiida.engine import processes, run -from aiida.manage.manager import get_manager +from aiida.manage import get_manager from tests.utils import processes as test_processes diff --git a/tests/engine/test_process.py b/tests/engine/test_process.py index 691e3a3d96..71e1154dfe 100644 --- a/tests/engine/test_process.py +++ b/tests/engine/test_process.py @@ -350,7 +350,7 @@ def test_exposed_outputs(self): from aiida.common import AttributeDict from aiida.common.links import LinkType from aiida.engine.utils import instantiate_process - from aiida.manage.manager import get_manager + from aiida.manage import get_manager runner = get_manager().get_runner() @@ -398,7 +398,7 @@ def test_exposed_outputs_non_existing_namespace(self): """Test the ``Process.exposed_outputs`` method for non-existing namespace.""" from aiida.common.links import LinkType from aiida.engine.utils import instantiate_process - from aiida.manage.manager import get_manager + from aiida.manage import get_manager runner = get_manager().get_runner() diff --git a/tests/engine/test_rmq.py b/tests/engine/test_rmq.py index 66128535cb..c54bb56c97 100644 --- a/tests/engine/test_rmq.py +++ b/tests/engine/test_rmq.py @@ -15,7 +15,7 @@ from aiida.backends.testbase import AiidaTestCase from aiida.engine import ProcessState -from aiida.manage.manager import get_manager +from aiida.manage import get_manager from aiida.orm import Int from tests.utils import processes as test_processes diff --git a/tests/engine/test_runners.py b/tests/engine/test_runners.py index 8a7fe06603..b079817cd3 100644 --- a/tests/engine/test_runners.py +++ b/tests/engine/test_runners.py @@ -16,7 +16,7 @@ import pytest from aiida.engine import Process -from aiida.manage.manager import get_manager +from aiida.manage import get_manager from aiida.orm import WorkflowNode @@ -45,7 +45,7 @@ def the_hans_klok_comeback(loop): @pytest.mark.requires_rmq -@pytest.mark.usefixtures('clear_database_before_test') +@pytest.mark.usefixtures('aiida_profile_clean') def test_call_on_process_finish(create_runner): """Test call on calculation finish.""" runner = create_runner() diff --git a/tests/engine/test_work_chain.py b/tests/engine/test_work_chain.py index fa3d5cb2e4..6479b5109f 100644 --- a/tests/engine/test_work_chain.py +++ b/tests/engine/test_work_chain.py @@ -22,7 +22,7 @@ from aiida.common.utils import Capturing from aiida.engine import ExitCode, Process, ToContext, WorkChain, append_, calcfunction, if_, launch, return_, while_ from aiida.engine.persistence import ObjectLoader -from aiida.manage.manager import get_manager +from aiida.manage import get_manager from aiida.orm import Bool, Float, Int, Str, load_node diff --git a/tests/manage/configuration/migrations/test_migrations.py b/tests/manage/configuration/migrations/test_migrations.py index a38300e53a..9608aaaaf2 100644 --- a/tests/manage/configuration/migrations/test_migrations.py +++ b/tests/manage/configuration/migrations/test_migrations.py @@ -123,3 +123,10 @@ def test_6_5_migration(self): config_reference = self.load_config_sample('input/5.json') config_migrated = downgrade_config(config_initial, 5) self.assertEqual(config_migrated, config_reference) + + def test_6_7_migration(self): + """Test the step between config versions 6 and 7.""" + config_initial = self.load_config_sample('input/6.json') + config_reference = self.load_config_sample('reference/7.json') + config_migrated = upgrade_config(config_initial, 7) + self.assertEqual(config_migrated, config_reference) diff --git a/tests/manage/configuration/migrations/test_samples/input/6.json b/tests/manage/configuration/migrations/test_samples/input/6.json new file mode 100644 index 0000000000..78255f87b8 --- /dev/null +++ b/tests/manage/configuration/migrations/test_samples/input/6.json @@ -0,0 +1,33 @@ +{ + "CONFIG_VERSION": { "CURRENT": 6, "OLDEST_COMPATIBLE": 6 }, + "default_profile": "default", + "profiles": { + "default": { + "PROFILE_UUID": "00000000000000000000000000000000", + "default_user_email": "email@aiida.net", + "storage": { + "backend": "django", + "config": { + "database_engine": "postgresql_psycopg2", + "database_password": "some_random_password", + "database_name": "aiidadb_qs_some_user", + "database_hostname": "localhost", + "database_port": "5432", + "database_username": "aiida_qs_greschd", + "repository_uri": "file:////home/some_user/.aiida/repository-quicksetup/" + } + }, + "process_control": { + "backend": "rabbitmq", + "config": { + "broker_protocol": "amqp", + "broker_username": "guest", + "broker_password": "guest", + "broker_host": "127.0.0.1", + "broker_port": 5672, + "broker_virtual_host": "" + } + } + } + } +} diff --git a/tests/manage/configuration/migrations/test_samples/reference/7.json b/tests/manage/configuration/migrations/test_samples/reference/7.json new file mode 100644 index 0000000000..f8f929f1ce --- /dev/null +++ b/tests/manage/configuration/migrations/test_samples/reference/7.json @@ -0,0 +1,34 @@ +{ + "CONFIG_VERSION": { "CURRENT": 7, "OLDEST_COMPATIBLE": 7 }, + "default_profile": "default", + "profiles": { + "default": { + "PROFILE_UUID": "00000000000000000000000000000000", + "default_user_email": "email@aiida.net", + "storage": { + "_v6_backend": "django", + "backend": "psql_dos", + "config": { + "database_engine": "postgresql_psycopg2", + "database_password": "some_random_password", + "database_name": "aiidadb_qs_some_user", + "database_hostname": "localhost", + "database_port": "5432", + "database_username": "aiida_qs_greschd", + "repository_uri": "file:////home/some_user/.aiida/repository-quicksetup/" + } + }, + "process_control": { + "backend": "rabbitmq", + "config": { + "broker_protocol": "amqp", + "broker_username": "guest", + "broker_password": "guest", + "broker_host": "127.0.0.1", + "broker_port": 5672, + "broker_virtual_host": "" + } + } + } + } +} diff --git a/tests/manage/configuration/migrations/test_samples/reference/final.json b/tests/manage/configuration/migrations/test_samples/reference/final.json index 46e7e96a2d..b493dbf903 100644 --- a/tests/manage/configuration/migrations/test_samples/reference/final.json +++ b/tests/manage/configuration/migrations/test_samples/reference/final.json @@ -1,12 +1,12 @@ { - "CONFIG_VERSION": { "CURRENT": 6, "OLDEST_COMPATIBLE": 6 }, + "CONFIG_VERSION": { "CURRENT": 7, "OLDEST_COMPATIBLE": 7 }, "default_profile": "default", "profiles": { "default": { "default_user_email": "email@aiida.net", "PROFILE_UUID": "00000000000000000000000000000000", "storage": { - "backend": "django", + "backend": "psql_dos", "config": { "database_engine": "postgresql_psycopg2", "database_password": "some_random_password", @@ -15,7 +15,8 @@ "database_port": "5432", "database_username": "aiida_qs_greschd", "repository_uri": "file:////home/some_user/.aiida/repository-quicksetup/" - } + }, + "_v6_backend": "django" }, "process_control": { "backend": "rabbitmq", diff --git a/tests/manage/configuration/test_configuration.py b/tests/manage/configuration/test_configuration.py index 7e9bcc3238..2f095fe07a 100644 --- a/tests/manage/configuration/test_configuration.py +++ b/tests/manage/configuration/test_configuration.py @@ -3,7 +3,7 @@ import pytest import aiida -from aiida.manage.configuration import check_version +from aiida.manage.manager import check_version def test_check_version_release(monkeypatch, capsys, isolated_config): diff --git a/tests/manage/configuration/test_options.py b/tests/manage/configuration/test_options.py index 7373739096..40ee872a76 100644 --- a/tests/manage/configuration/test_options.py +++ b/tests/manage/configuration/test_options.py @@ -10,6 +10,7 @@ """Tests for the configuration options.""" import pytest +from aiida import get_profile from aiida.backends.testbase import AiidaTestCase from aiida.common.exceptions import ConfigurationError from aiida.manage.configuration import ConfigValidationError, get_config, get_config_option @@ -66,7 +67,7 @@ def test_get_config_option_default(self): def test_get_config_option_profile_specific(self): """Tests that `get_option` correctly gets a configuration option if specified for the current profile.""" config = get_config() - profile = config.current_profile + profile = get_profile() option_name = 'logging.aiida_loglevel' option_value_profile = 'WARNING' diff --git a/tests/manage/configuration/test_profile.py b/tests/manage/configuration/test_profile.py index 0a398207e5..55a8519a20 100644 --- a/tests/manage/configuration/test_profile.py +++ b/tests/manage/configuration/test_profile.py @@ -28,7 +28,7 @@ def setUpClass(cls, *args, **kwargs): cls.profile_dictionary = { 'default_user_email': 'dummy@localhost', 'storage': { - 'backend': 'django', + 'backend': 'psql_dos', 'config': { 'database_engine': 'postgresql_psycopg2', 'database_name': cls.profile_name, @@ -57,7 +57,7 @@ def test_base_properties(self): """Test the basic properties of a Profile instance.""" self.assertEqual(self.profile.name, self.profile_name) - self.assertEqual(self.profile.storage_backend, 'django') + self.assertEqual(self.profile.storage_backend, 'psql_dos') self.assertEqual(self.profile.storage_config, self.profile_dictionary['storage']['config']) self.assertEqual(self.profile.process_control_backend, 'rabbitmq') self.assertEqual(self.profile.process_control_config, self.profile_dictionary['process_control']['config']) diff --git a/tests/manage/test_caching_config.py b/tests/manage/test_caching_config.py index f44031ea22..f24cd44986 100644 --- a/tests/manage/test_caching_config.py +++ b/tests/manage/test_caching_config.py @@ -49,16 +49,16 @@ def test_merge_deprecated_yaml(tmp_path): An AiidaDeprecationWarning should also be raised. """ from aiida.common.warnings import AiidaDeprecationWarning - from aiida.manage import configuration - from aiida.manage.configuration import get_config_option, load_profile, reset_profile, settings + from aiida.manage import configuration, get_manager + from aiida.manage.configuration import get_config_option, load_profile, settings # Store the current configuration instance and config directory path current_config = configuration.CONFIG current_config_path = current_config.dirpath - current_profile_name = configuration.PROFILE.name + current_profile_name = configuration.get_profile().name try: - reset_profile() + get_manager().unload_profile() configuration.CONFIG = None # Create a temporary folder, set it as the current config directory path @@ -90,7 +90,7 @@ def test_merge_deprecated_yaml(tmp_path): finally: # Reset the config folder path and the config instance. Note this will always be executed after the yield no # matter what happened in the test that used this fixture. - reset_profile() + get_manager().unload_profile() settings.AIIDA_CONFIG_FOLDER = current_config_path configuration.CONFIG = current_config load_profile(current_profile_name) diff --git a/tests/manage/test_profile_access.py b/tests/manage/test_profile_access.py index e0606c2531..0d0bd2681e 100644 --- a/tests/manage/test_profile_access.py +++ b/tests/manage/test_profile_access.py @@ -33,7 +33,7 @@ @pytest.fixture(name='profile_access_manager') def fixture_profile_access_manager(): """Create special SQLAlchemy engine for use with QueryBuilder - backend-agnostic""" - from aiida.manage.manager import get_manager + from aiida.manage import get_manager aiida_profile = get_manager().get_profile() return ProfileAccessManager(aiida_profile) @@ -75,7 +75,7 @@ def test_check_methods(profile_access_manager, monkeypatch): are returned by `_get_tracking_files`, and when they are not. """ - def mockfun_return_path(*args, **kwargs): + def mockfun_return_path(*args, **kwargs): # pylint: disable=unused-argument """Mock of _raise_if_locked.""" return [Path('file.txt')] @@ -83,7 +83,7 @@ def mockfun_return_path(*args, **kwargs): assert profile_access_manager.is_active() assert profile_access_manager.is_locked() - def mockfun_return_empty(*args, **kwargs): + def mockfun_return_empty(*args, **kwargs): # pylint: disable=unused-argument """Mock of _raise_if_locked.""" return [] @@ -107,7 +107,7 @@ def test_raise_methods(profile_access_manager, monkeypatch): tempfile = Path(file_stem + '.txt') tempfile.write_text(file_content, encoding='utf-8') - def mock_get_tracking_files(*args, **kwargs): + def mock_get_tracking_files(*args, **kwargs): # pylint: disable=unused-argument """Mock of _raise_if_locked.""" return [tempfile] @@ -166,7 +166,7 @@ def test_clear_stale_pid_files(profile_access_manager): # with actual processes. It is therefore also dependant on the call to # the class in: # -# > aiida.manage.manager::Manager._load_backend +# > aiida.manage.manager::Manager.get_profile_storage() # # Moreover, they also require the use of a separate construct to keep # track of processes accessing aiida profiles with ease (MockProcess). diff --git a/tests/orm/data/test_code.py b/tests/orm/data/test_code.py index 51c5f3e057..6b5864229e 100644 --- a/tests/orm/data/test_code.py +++ b/tests/orm/data/test_code.py @@ -15,7 +15,7 @@ from aiida.orm import Code, Computer -@pytest.mark.usefixtures('clear_database_before_test') +@pytest.mark.usefixtures('aiida_profile_clean') def test_validate_remote_exec_path(): """Test ``Code.validate_remote_exec_path``.""" computer = Computer( diff --git a/tests/orm/implementation/test_backend.py b/tests/orm/implementation/test_backend.py index 82d3b6f72b..2b2bad67b1 100644 --- a/tests/orm/implementation/test_backend.py +++ b/tests/orm/implementation/test_backend.py @@ -16,12 +16,11 @@ from aiida.orm.entities import EntityTypes -@pytest.mark.usefixtures('clear_database_before_test') class TestBackend: """Test backend.""" @pytest.fixture(autouse=True) - def init_test(self, backend): + def init_test(self, aiida_profile_clean, backend): # pylint: disable=unused-argument """Set up the backend.""" self.backend = backend # pylint: disable=attribute-defined-outside-init diff --git a/tests/orm/implementation/test_groups.py b/tests/orm/implementation/test_groups.py index 20a6a2e92a..55556ffd5b 100644 --- a/tests/orm/implementation/test_groups.py +++ b/tests/orm/implementation/test_groups.py @@ -13,7 +13,7 @@ from aiida import orm -@pytest.mark.usefixtures('clear_database_before_test') +@pytest.mark.usefixtures('aiida_profile_clean') def test_creation_from_dbgroup(backend): """Test creation of a group from another group.""" node = orm.Data().store() @@ -31,7 +31,7 @@ def test_creation_from_dbgroup(backend): assert group.uuid == gcopy.uuid -@pytest.mark.usefixtures('clear_database_before_test', 'skip_if_not_sqlalchemy') +@pytest.mark.usefixtures('aiida_profile_clean') def test_add_nodes_skip_orm(): """Test the `SqlaGroup.add_nodes` method with the `skip_orm=True` flag.""" group = orm.Group(label='test_adding_nodes').store().backend_entity @@ -54,7 +54,7 @@ def test_add_nodes_skip_orm(): assert set(_.pk for _ in nodes) == set(_.pk for _ in group.nodes) -@pytest.mark.usefixtures('clear_database_before_test', 'skip_if_not_sqlalchemy') +@pytest.mark.usefixtures('aiida_profile_clean') def test_add_nodes_skip_orm_batch(): """Test the `SqlaGroup.add_nodes` method with the `skip_orm=True` flag and batches.""" nodes = [orm.Data().store().backend_entity for _ in range(100)] @@ -67,7 +67,7 @@ def test_add_nodes_skip_orm_batch(): assert set(_.pk for _ in nodes) == set(_.pk for _ in group.nodes) -@pytest.mark.usefixtures('clear_database_before_test', 'skip_if_not_sqlalchemy') +@pytest.mark.usefixtures('aiida_profile_clean') def test_remove_nodes_bulk(): """Test node removal with `skip_orm=True`.""" group = orm.Group(label='test_removing_nodes').store().backend_entity diff --git a/tests/orm/nodes/data/test_array.py b/tests/orm/nodes/data/test_array.py index b575f721c5..92b50e1510 100644 --- a/tests/orm/nodes/data/test_array.py +++ b/tests/orm/nodes/data/test_array.py @@ -14,7 +14,7 @@ from aiida.orm import ArrayData, load_node -@pytest.mark.usefixtures('clear_database_before_test') +@pytest.mark.usefixtures('aiida_profile_clean') def test_read_stored(): """Test reading an array from an ``ArrayData`` after storing and loading it.""" array = numpy.array([1, 2, 3, 4, 5, 6, 7, 8, 9]) diff --git a/tests/orm/nodes/data/test_array_bands.py b/tests/orm/nodes/data/test_array_bands.py index eacd593f2f..44d655326d 100644 --- a/tests/orm/nodes/data/test_array_bands.py +++ b/tests/orm/nodes/data/test_array_bands.py @@ -45,7 +45,7 @@ def _get_default_ns(): return args @pytest.mark.parametrize('all_users, expected', ((True, [True, True]), (False, [True, False]))) - @pytest.mark.usefixtures('clear_database_before_test') + @pytest.mark.usefixtures('aiida_profile_clean') def test_all_users(self, alternate_user, all_users, expected): """Test the behavior for the ``all_users`` argument.""" bands_default_user = BandsData().store() @@ -60,7 +60,7 @@ def test_all_users(self, alternate_user, all_users, expected): assert [node.pk in node_pks for node in bands] == expected @pytest.mark.parametrize('argument, attribute', (('group_name', 'label'), ('group_pk', 'pk'))) - @pytest.mark.usefixtures('clear_database_before_test') + @pytest.mark.usefixtures('aiida_profile_clean') def test_identifier(self, argument, attribute): """Test the behavior for the ``group_name`` and ``group_pk`` arguments.""" bands_data_grouped = BandsData().store() diff --git a/tests/orm/nodes/data/test_base.py b/tests/orm/nodes/data/test_base.py index f70ff8b39f..093bcd7c03 100644 --- a/tests/orm/nodes/data/test_base.py +++ b/tests/orm/nodes/data/test_base.py @@ -17,7 +17,7 @@ from aiida.orm import Bool, Float, Int, NumericType, Str, load_node -@pytest.mark.usefixtures('clear_database_before_test') +@pytest.mark.usefixtures('aiida_profile_clean') @pytest.mark.parametrize( 'node_type, default, value', [ (Bool, False, True), @@ -36,7 +36,7 @@ def test_create(node_type, default, value): assert node.value == value -@pytest.mark.usefixtures('clear_database_before_test') +@pytest.mark.usefixtures('aiida_profile_clean') @pytest.mark.parametrize('node_type', [Bool, Float, Int, Str]) def test_store_load(node_type): """Test ``BaseType`` node storing and loading.""" @@ -46,7 +46,7 @@ def test_store_load(node_type): assert node.value == loaded.value -@pytest.mark.usefixtures('clear_database_before_test') +@pytest.mark.usefixtures('aiida_profile_clean') def test_modulo(): """Test ``Int`` modulus operation.""" term_a = Int(12) @@ -60,7 +60,7 @@ def test_modulo(): assert isinstance(12 % term_b, NumericType) -@pytest.mark.usefixtures('clear_database_before_test') +@pytest.mark.usefixtures('aiida_profile_clean') @pytest.mark.parametrize('node_type, a, b', [ (Int, 3, 5), (Float, 1.2, 5.5), @@ -90,7 +90,7 @@ def test_add(node_type, a, b): assert result.value == a + b -@pytest.mark.usefixtures('clear_database_before_test') +@pytest.mark.usefixtures('aiida_profile_clean') @pytest.mark.parametrize('node_type, a, b', [ (Int, 3, 5), (Float, 1.2, 5.5), @@ -121,12 +121,12 @@ def test_multiplication(node_type, a, b): assert result.value == a * b -@pytest.mark.usefixtures('clear_database_before_test') +@pytest.mark.usefixtures('aiida_profile_clean') @pytest.mark.parametrize('node_type, a, b', [ (Int, 3, 5), (Float, 1.2, 5.5), ]) -@pytest.mark.usefixtures('clear_database_before_test') +@pytest.mark.usefixtures('aiida_profile_clean') def test_division(node_type, a, b): """Test the ``BaseType`` normal division operator.""" node_a = node_type(a) @@ -137,12 +137,12 @@ def test_division(node_type, a, b): assert isinstance(result, Float) # Should be a `Float` for both node types -@pytest.mark.usefixtures('clear_database_before_test') +@pytest.mark.usefixtures('aiida_profile_clean') @pytest.mark.parametrize('node_type, a, b', [ (Int, 3, 5), (Float, 1.2, 5.5), ]) -@pytest.mark.usefixtures('clear_database_before_test') +@pytest.mark.usefixtures('aiida_profile_clean') def test_division_integer(node_type, a, b): """Test the ``Int`` integer division operator.""" node_a = node_type(a) @@ -153,7 +153,7 @@ def test_division_integer(node_type, a, b): assert isinstance(result, node_type) -@pytest.mark.usefixtures('clear_database_before_test') +@pytest.mark.usefixtures('aiida_profile_clean') @pytest.mark.parametrize('node_type, base, power', [ (Int, 5, 2), (Float, 3.5, 3), @@ -168,7 +168,7 @@ def test_power(node_type, base, power): assert isinstance(result, node_type) -@pytest.mark.usefixtures('clear_database_before_test') +@pytest.mark.usefixtures('aiida_profile_clean') @pytest.mark.parametrize('node_type, a, b', [ (Int, 5, 2), (Float, 3.5, 3), @@ -188,7 +188,7 @@ def test_modulus(node_type, a, b): assert isinstance(a % node_b, node_type) -@pytest.mark.usefixtures('clear_database_before_test') +@pytest.mark.usefixtures('aiida_profile_clean') @pytest.mark.parametrize( 'opera', [ operator.add, operator.mul, operator.pow, operator.lt, operator.le, operator.gt, operator.ge, operator.iadd, @@ -207,7 +207,7 @@ def test_operator(opera): assert res == opera(node_x.value, node_y.value) -@pytest.mark.usefixtures('clear_database_before_test') +@pytest.mark.usefixtures('aiida_profile_clean') @pytest.mark.parametrize('node_type, a, b', [ (Bool, False, True), (Int, 2, 5), diff --git a/tests/orm/nodes/data/test_data.py b/tests/orm/nodes/data/test_data.py index 77499e6e19..5293ff6444 100644 --- a/tests/orm/nodes/data/test_data.py +++ b/tests/orm/nodes/data/test_data.py @@ -19,7 +19,7 @@ @pytest.fixture -@pytest.mark.usefixtures('clear_database_before_test') +@pytest.mark.usefixtures('aiida_profile_clean') def generate_class_instance(): """Generate a dummy `Data` instance for the given sub class.""" @@ -75,7 +75,7 @@ def data_plugin(request): return request.param.load() -@pytest.mark.usefixtures('clear_database_before_test') +@pytest.mark.usefixtures('aiida_profile_clean') def test_constructor(): """Test the constructor. @@ -87,7 +87,7 @@ def test_constructor(): assert node.source == source -@pytest.mark.usefixtures('clear_database_before_test') +@pytest.mark.usefixtures('aiida_profile_clean') def test_data_exporters(data_plugin, generate_class_instance): """Verify that the return value of the export methods of all `Data` sub classes have the correct type. diff --git a/tests/orm/nodes/data/test_dict.py b/tests/orm/nodes/data/test_dict.py index 350492c97f..ad78d2af9d 100644 --- a/tests/orm/nodes/data/test_dict.py +++ b/tests/orm/nodes/data/test_dict.py @@ -19,28 +19,28 @@ def dictionary(): return {'value': 1, 'nested': {'dict': 'ionary'}} -@pytest.mark.usefixtures('clear_database_before_test') +@pytest.mark.usefixtures('aiida_profile_clean') def test_keys(dictionary): """Test the ``keys`` method.""" node = Dict(dictionary) assert sorted(node.keys()) == sorted(dictionary.keys()) -@pytest.mark.usefixtures('clear_database_before_test') +@pytest.mark.usefixtures('aiida_profile_clean') def test_items(dictionary): """Test the ``items`` method.""" node = Dict(dictionary) assert sorted(node.items()) == sorted(dictionary.items()) -@pytest.mark.usefixtures('clear_database_before_test') +@pytest.mark.usefixtures('aiida_profile_clean') def test_get_dict(dictionary): """Test the ``get_dict`` method.""" node = Dict(dictionary) assert node.get_dict() == dictionary -@pytest.mark.usefixtures('clear_database_before_test') +@pytest.mark.usefixtures('aiida_profile_clean') def test_dict_property(dictionary): """Test the ``dict`` property.""" node = Dict(dictionary) @@ -48,7 +48,7 @@ def test_dict_property(dictionary): assert node.dict.nested == dictionary['nested'] -@pytest.mark.usefixtures('clear_database_before_test') +@pytest.mark.usefixtures('aiida_profile_clean') def test_get_item(dictionary): """Test the ``__getitem__`` method.""" node = Dict(dictionary) @@ -56,7 +56,7 @@ def test_get_item(dictionary): assert node['nested'] == dictionary['nested'] -@pytest.mark.usefixtures('clear_database_before_test') +@pytest.mark.usefixtures('aiida_profile_clean') def test_set_item(dictionary): """Test the methods for setting the item. @@ -72,7 +72,7 @@ def test_set_item(dictionary): assert node['value'] == 3 -@pytest.mark.usefixtures('clear_database_before_test') +@pytest.mark.usefixtures('aiida_profile_clean') @pytest.mark.parametrize('key, expected', (('value', True), ('non-existing', False))) def test_contains(dictionary, key, expected): """Test the ``__contains__`` implementation.""" @@ -84,7 +84,7 @@ def test_contains(dictionary, key, expected): assert (key in node) is expected -@pytest.mark.usefixtures('clear_database_before_test') +@pytest.mark.usefixtures('aiida_profile_clean') def test_correct_raises(dictionary): """Test that the methods for accessing the item raise the correct error. @@ -100,7 +100,7 @@ def test_correct_raises(dictionary): _ = node.dict.inexistent_key -@pytest.mark.usefixtures('clear_database_before_test') +@pytest.mark.usefixtures('aiida_profile_clean') def test_equality(dictionary): """Test the equality comparison for the ``Dict`` type. @@ -129,7 +129,7 @@ def test_equality(dictionary): assert node != different_node -@pytest.mark.usefixtures('clear_database_before_test') +@pytest.mark.usefixtures('aiida_profile_clean') def test_initialise_with_dict_kwarg(dictionary): """Test that the ``Dict`` node can be initialized with the ``dict`` keyword argument for backwards compatibility.""" node = Dict(dict=dictionary) diff --git a/tests/orm/nodes/data/test_folder.py b/tests/orm/nodes/data/test_folder.py index 17aade1904..fdfb61b8f7 100644 --- a/tests/orm/nodes/data/test_folder.py +++ b/tests/orm/nodes/data/test_folder.py @@ -8,36 +8,19 @@ # For further information please visit http://www.aiida.net # ########################################################################### """Tests for the `FolderData` class.""" +import pytest -import os -import shutil -import tempfile - -from aiida.backends.testbase import AiidaTestCase from aiida.orm import FolderData -class TestFolderData(AiidaTestCase): - """Test for the `FolderData` class.""" - - @classmethod - def setUpClass(cls, *args, **kwargs): - super().setUpClass(*args, **kwargs) - cls.tempdir = tempfile.mkdtemp() - cls.tree = { - 'a.txt': 'Content of file A\nWith some newlines', - 'b.txt': 'Content of file B without newline', - } - - for filename, content in cls.tree.items(): - with open(os.path.join(cls.tempdir, filename), 'w', encoding='utf8') as handle: - handle.write(content) - - @classmethod - def tearDownClass(cls): - shutil.rmtree(cls.tempdir) - - def test_constructor_tree(self): - """Test the `tree` constructor keyword.""" - node = FolderData(tree=self.tempdir) - self.assertEqual(sorted(node.list_object_names()), sorted(self.tree.keys())) +@pytest.mark.usefixtures('aiida_profile_clean') +def test_constructor_tree(tmp_path): + """Test the `tree` constructor keyword.""" + tree = { + 'a.txt': 'Content of file A\nWith some newlines', + 'b.txt': 'Content of file B without newline', + } + for filename, content in tree.items(): + tmp_path.joinpath(filename).write_text(content, encoding='utf8') + node = FolderData(tree=str(tmp_path)) + assert sorted(node.list_object_names()) == sorted(tree.keys()) diff --git a/tests/orm/nodes/data/test_jsonable.py b/tests/orm/nodes/data/test_jsonable.py index fe185d5da8..f4cc9bdb1c 100644 --- a/tests/orm/nodes/data/test_jsonable.py +++ b/tests/orm/nodes/data/test_jsonable.py @@ -68,7 +68,7 @@ def test_invalid_class_not_serializable(): JsonableData(obj) -@pytest.mark.usefixtures('clear_database_before_test') +@pytest.mark.usefixtures('aiida_profile_clean') def test_store(): """Test storing a ``JsonableData`` instance.""" data = {'a': 1} @@ -80,7 +80,7 @@ def test_store(): assert node.is_stored -@pytest.mark.usefixtures('clear_database_before_test') +@pytest.mark.usefixtures('aiida_profile_clean') def test_load(): """Test loading a ``JsonableData`` instance.""" data = {'a': 1} @@ -93,7 +93,7 @@ def test_load(): assert loaded == node -@pytest.mark.usefixtures('clear_database_before_test') +@pytest.mark.usefixtures('aiida_profile_clean') def test_obj(): """Test the ``JsonableData.obj`` property.""" data = [1, float('inf'), float('-inf'), float('nan')] @@ -117,7 +117,7 @@ def test_obj(): assert left == right -@pytest.mark.usefixtures('clear_database_before_test') +@pytest.mark.usefixtures('aiida_profile_clean') def test_unimportable_module(): """Test the ``JsonableData.obj`` property if the associated module cannot be loaded.""" obj = Molecule(['H'], [[0, 0, 0]]) @@ -133,7 +133,7 @@ def test_unimportable_module(): _ = loaded.obj -@pytest.mark.usefixtures('clear_database_before_test') +@pytest.mark.usefixtures('aiida_profile_clean') def test_unimportable_class(): """Test the ``JsonableData.obj`` property if the associated class cannot be loaded.""" obj = Molecule(['H'], [[0, 0, 0]]) @@ -149,7 +149,7 @@ def test_unimportable_class(): _ = loaded.obj -@pytest.mark.usefixtures('clear_database_before_test') +@pytest.mark.usefixtures('aiida_profile_clean') def test_msonable(): """Test that an ``MSONAble`` object can be wrapped, stored and loaded again.""" obj = Molecule(['H'], [[0, 0, 0]]) diff --git a/tests/orm/nodes/data/test_list.py b/tests/orm/nodes/data/test_list.py index 41ff099d1a..a64e31c9e3 100644 --- a/tests/orm/nodes/data/test_list.py +++ b/tests/orm/nodes/data/test_list.py @@ -25,7 +25,7 @@ def int_listing(): return [2, 1, 3] -@pytest.mark.usefixtures('clear_database_before_test') +@pytest.mark.usefixtures('aiida_profile_clean') def test_creation(): """Test the creation of an empty ``List`` node.""" node = List() @@ -34,7 +34,7 @@ def test_creation(): node[0] # pylint: disable=pointless-statement -@pytest.mark.usefixtures('clear_database_before_test') +@pytest.mark.usefixtures('aiida_profile_clean') def test_mutability(): """Test list's mutability before and after storage.""" node = List() @@ -58,7 +58,7 @@ def test_mutability(): node.reverse() -@pytest.mark.usefixtures('clear_database_before_test') +@pytest.mark.usefixtures('aiida_profile_clean') def test_store_load(listing): """Test load_node on just stored object.""" node = List(listing) @@ -68,7 +68,7 @@ def test_store_load(listing): assert node.get_list() == node_loaded.get_list() -@pytest.mark.usefixtures('clear_database_before_test') +@pytest.mark.usefixtures('aiida_profile_clean') def test_special_methods(listing): """Test the special methods of the ``List`` class.""" node = List(listing) @@ -89,7 +89,7 @@ def test_special_methods(listing): assert len(node) == 2 -@pytest.mark.usefixtures('clear_database_before_test') +@pytest.mark.usefixtures('aiida_profile_clean') def test_equality(listing): """Test equality comparison for ``List`` nodes.""" different_list = ['I', 'am', 'different'] @@ -106,7 +106,7 @@ def test_equality(listing): assert node != different_node -@pytest.mark.usefixtures('clear_database_before_test') +@pytest.mark.usefixtures('aiida_profile_clean') def test_append(listing): """Test the ``List.append()`` method.""" @@ -127,7 +127,7 @@ def do_checks(node): assert node[-1] == 'more' -@pytest.mark.usefixtures('clear_database_before_test') +@pytest.mark.usefixtures('aiida_profile_clean') def test_extend(listing): """Test extend() member function.""" @@ -150,7 +150,7 @@ def do_checks(node, lst): do_checks(node, listing * 2) -@pytest.mark.usefixtures('clear_database_before_test') +@pytest.mark.usefixtures('aiida_profile_clean') def test_insert(listing): """Test the ``List.insert()`` method.""" node = List(listing) @@ -159,7 +159,7 @@ def test_insert(listing): assert len(node) == 4 -@pytest.mark.usefixtures('clear_database_before_test') +@pytest.mark.usefixtures('aiida_profile_clean') def test_remove(listing): """Test the ``List.remove()`` method.""" node = List(listing) @@ -171,7 +171,7 @@ def test_remove(listing): node.remove('non-existent') -@pytest.mark.usefixtures('clear_database_before_test') +@pytest.mark.usefixtures('aiida_profile_clean') def test_pop(listing): """Test the ``List.pop()`` method.""" node = List(listing) @@ -179,7 +179,7 @@ def test_pop(listing): assert node.get_list() == listing[:-1] -@pytest.mark.usefixtures('clear_database_before_test') +@pytest.mark.usefixtures('aiida_profile_clean') def test_index(listing): """Test the ``List.index()`` method.""" node = List(listing) @@ -187,7 +187,7 @@ def test_index(listing): assert node.index(True) == listing.index(True) -@pytest.mark.usefixtures('clear_database_before_test') +@pytest.mark.usefixtures('aiida_profile_clean') def test_count(listing): """Test the ``List.count()`` method.""" node = List(listing) @@ -195,7 +195,7 @@ def test_count(listing): assert node.count(value) == listing.count(value) -@pytest.mark.usefixtures('clear_database_before_test') +@pytest.mark.usefixtures('aiida_profile_clean') def test_sort(listing, int_listing): """Test the ``List.sort()`` method.""" node = List(int_listing) @@ -208,7 +208,7 @@ def test_sort(listing, int_listing): node.sort() -@pytest.mark.usefixtures('clear_database_before_test') +@pytest.mark.usefixtures('aiida_profile_clean') def test_reverse(listing): """Test the ``List.reverse()`` method.""" node = List(listing) @@ -217,7 +217,7 @@ def test_reverse(listing): assert node.get_list() == listing -@pytest.mark.usefixtures('clear_database_before_test') +@pytest.mark.usefixtures('aiida_profile_clean') def test_initialise_with_list_kwarg(listing): """Test that the ``List`` node can be initialized with the ``list`` keyword argument for backwards compatibility.""" node = List(listing) diff --git a/tests/orm/nodes/data/test_remote.py b/tests/orm/nodes/data/test_remote.py index 3e98d4179a..8746deb1ae 100644 --- a/tests/orm/nodes/data/test_remote.py +++ b/tests/orm/nodes/data/test_remote.py @@ -24,7 +24,7 @@ def remote_data(tmp_path, aiida_localhost): return node -@pytest.mark.usefixtures('clear_database_before_test') +@pytest.mark.usefixtures('aiida_profile_clean') def test_clean(remote_data): """Test the :meth:`aiida.orm.nodes.data.remote.base.RemoteData.clean` method.""" assert not remote_data.is_empty diff --git a/tests/orm/nodes/data/test_remote_stash.py b/tests/orm/nodes/data/test_remote_stash.py index 45318ca1b3..be51fa61cc 100644 --- a/tests/orm/nodes/data/test_remote_stash.py +++ b/tests/orm/nodes/data/test_remote_stash.py @@ -15,7 +15,7 @@ from aiida.orm import RemoteStashData, RemoteStashFolderData -@pytest.mark.usefixtures('clear_database_before_test') +@pytest.mark.usefixtures('aiida_profile_clean') def test_base_class(): """Verify that base class cannot be stored.""" node = RemoteStashData(stash_mode=StashMode.COPY) @@ -24,7 +24,7 @@ def test_base_class(): node.store() -@pytest.mark.usefixtures('clear_database_before_test') +@pytest.mark.usefixtures('aiida_profile_clean') @pytest.mark.parametrize('store', (False, True)) def test_constructor(store): """Test the constructor and storing functionality.""" @@ -46,7 +46,7 @@ def test_constructor(store): assert data.source_list == source_list -@pytest.mark.usefixtures('clear_database_before_test') +@pytest.mark.usefixtures('aiida_profile_clean') @pytest.mark.parametrize( 'argument, value', ( ('stash_mode', 'copy'), diff --git a/tests/orm/nodes/data/test_singlefile.py b/tests/orm/nodes/data/test_singlefile.py index c1e2cdbe59..c33be71e5b 100644 --- a/tests/orm/nodes/data/test_singlefile.py +++ b/tests/orm/nodes/data/test_singlefile.py @@ -63,7 +63,7 @@ def inner(node, content_reference, filename, open_mode='r'): def test_reload_singlefile_data( - clear_database_before_test, # pylint: disable=unused-argument + aiida_profile_clean, # pylint: disable=unused-argument check_singlefile_content_with_store, # pylint: disable=redefined-outer-name check_singlefile_content # pylint: disable=redefined-outer-name ): @@ -99,7 +99,7 @@ def test_reload_singlefile_data( def test_construct_from_filelike( - clear_database_before_test, # pylint: disable=unused-argument + aiida_profile_clean, # pylint: disable=unused-argument check_singlefile_content_with_store # pylint: disable=redefined-outer-name ): """Test constructing an instance from filelike instead of filepath.""" @@ -120,7 +120,7 @@ def test_construct_from_filelike( def test_construct_from_string( - clear_database_before_test, # pylint: disable=unused-argument + aiida_profile_clean, # pylint: disable=unused-argument check_singlefile_content_with_store # pylint: disable=redefined-outer-name ): """Test constructing an instance from a string.""" @@ -137,7 +137,7 @@ def test_construct_from_string( def test_construct_with_path( - clear_database_before_test, # pylint: disable=unused-argument + aiida_profile_clean, # pylint: disable=unused-argument check_singlefile_content_with_store # pylint: disable=redefined-outer-name ): """Test constructing an instance from a pathlib.Path.""" @@ -158,7 +158,7 @@ def test_construct_with_path( def test_construct_with_filename( - clear_database_before_test, # pylint: disable=unused-argument + aiida_profile_clean, # pylint: disable=unused-argument check_singlefile_content # pylint: disable=redefined-outer-name ): """Test constructing an instance, providing a filename.""" @@ -182,7 +182,7 @@ def test_construct_with_filename( def test_binary_file( - clear_database_before_test, # pylint: disable=unused-argument + aiida_profile_clean, # pylint: disable=unused-argument check_singlefile_content_with_store # pylint: disable=redefined-outer-name ): """Test that the constructor accepts binary files.""" diff --git a/tests/orm/nodes/data/test_to_aiida_type.py b/tests/orm/nodes/data/test_to_aiida_type.py index 06ef09ca17..51bcba874a 100644 --- a/tests/orm/nodes/data/test_to_aiida_type.py +++ b/tests/orm/nodes/data/test_to_aiida_type.py @@ -15,7 +15,7 @@ #yapf: disable -@pytest.mark.usefixtures('clear_database_before_test') +@pytest.mark.usefixtures('aiida_profile_clean') @pytest.mark.parametrize( 'expected_type, value', ( (orm.Bool, True), diff --git a/tests/orm/nodes/test_node.py b/tests/orm/nodes/test_node.py index 55768d493d..85574b8c19 100644 --- a/tests/orm/nodes/test_node.py +++ b/tests/orm/nodes/test_node.py @@ -18,12 +18,12 @@ import pytest from aiida.common import LinkType, exceptions, timezone -from aiida.manage.manager import get_manager +from aiida.manage import get_manager from aiida.orm import CalculationNode, Computer, Data, Log, Node, User, WorkflowNode, load_node from aiida.orm.utils.links import LinkTriple -@pytest.mark.usefixtures('clear_database_before_test_class') +@pytest.mark.usefixtures('aiida_profile_clean_class') class TestNode: """Tests for generic node functionality.""" @@ -115,7 +115,7 @@ def test_process_class_raises(process_type, match): node.process_class # pylint: disable=pointless-statement -@pytest.mark.usefixtures('clear_database_before_test_class') +@pytest.mark.usefixtures('aiida_profile_clean_class') class TestNodeAttributesExtras: """Test for node attributes and extras.""" @@ -454,7 +454,7 @@ def test_attribute_decimal(self): assert self.node.get_attribute('a_val') == 3.141 -@pytest.mark.usefixtures('clear_database_before_test_class') +@pytest.mark.usefixtures('aiida_profile_clean_class') class TestNodeLinks: """Test for linking from and to Node.""" @@ -828,10 +828,10 @@ class TestNodeDelete: """Tests for deleting nodes.""" # pylint: disable=no-member,no-self-use - @pytest.mark.usefixtures('clear_database_before_test') + @pytest.mark.usefixtures('aiida_profile_clean') def test_delete_through_backend(self): """Test deletion works correctly through the backend.""" - backend = get_manager().get_backend() + backend = get_manager().get_profile_storage() data_one = Data().store() data_two = Data().store() @@ -855,7 +855,7 @@ def test_delete_through_backend(self): assert Log.objects.get_logs_for(data_one)[0].pk == log_one.pk assert len(Log.objects.get_logs_for(data_two)) == 0 - @pytest.mark.usefixtures('clear_database_before_test') + @pytest.mark.usefixtures('aiida_profile_clean') def test_delete_collection_logs(self): """Test deletion works correctly through objects collection.""" data_one = Data().store() @@ -875,7 +875,7 @@ def test_delete_collection_logs(self): assert Log.objects.get_logs_for(data_one)[0].pk == log_one.pk assert len(Log.objects.get_logs_for(data_two)) == 0 - @pytest.mark.usefixtures('clear_database_before_test') + @pytest.mark.usefixtures('aiida_profile_clean') def test_delete_collection_incoming_link(self): """Test deletion through objects collection raises when there are incoming links.""" data = Data().store() @@ -886,7 +886,7 @@ def test_delete_collection_incoming_link(self): with pytest.raises(exceptions.InvalidOperation): Node.objects.delete(calculation.pk) - @pytest.mark.usefixtures('clear_database_before_test') + @pytest.mark.usefixtures('aiida_profile_clean') def test_delete_collection_outgoing_link(self): """Test deletion through objects collection raises when there are outgoing links.""" calculation = CalculationNode().store() @@ -898,7 +898,7 @@ def test_delete_collection_outgoing_link(self): Node.objects.delete(calculation.pk) -@pytest.mark.usefixtures('clear_database_before_test') +@pytest.mark.usefixtures('aiida_profile_clean') class TestNodeComments: """Tests for creating comments on nodes.""" @@ -943,7 +943,7 @@ def test_remove_comment(self): assert len(data.get_comments()) == 0 -@pytest.mark.usefixtures('clear_database_before_test') +@pytest.mark.usefixtures('aiida_profile_clean') class TestNodeCaching: """Tests the caching behavior of the ``Node`` class.""" @@ -1006,7 +1006,7 @@ def test_uuid_equality_fallback(self): assert hash(node_b) != hash(node_0) -@pytest.mark.usefixtures('clear_database_before_test') +@pytest.mark.usefixtures('aiida_profile_clean') def test_iter_repo_keys(): """Test the ``iter_repo_keys`` method.""" data1 = Data() diff --git a/tests/orm/nodes/test_repository.py b/tests/orm/nodes/test_repository.py index ba185ba025..c876d2f48d 100644 --- a/tests/orm/nodes/test_repository.py +++ b/tests/orm/nodes/test_repository.py @@ -26,7 +26,7 @@ def cacheable_node(): return node -@pytest.mark.usefixtures('clear_database_before_test') +@pytest.mark.usefixtures('aiida_profile_clean') def test_initialization(): """Test that the repository instance is lazily constructed.""" node = Data() @@ -37,7 +37,7 @@ def test_initialization(): assert isinstance(node._repository.backend, SandboxRepositoryBackend) -@pytest.mark.usefixtures('clear_database_before_test') +@pytest.mark.usefixtures('aiida_profile_clean') def test_unstored(): """Test the repository for unstored nodes.""" node = Data() @@ -47,7 +47,7 @@ def test_unstored(): assert node.repository_metadata == {} -@pytest.mark.usefixtures('clear_database_before_test') +@pytest.mark.usefixtures('aiida_profile_clean') def test_store(): """Test the repository after storing.""" node = Data() @@ -64,7 +64,7 @@ def test_store(): assert node._repository.hash() == hash_unstored -@pytest.mark.usefixtures('clear_database_before_test') +@pytest.mark.usefixtures('aiida_profile_clean') def test_load(): """Test the repository after loading.""" node = Data() @@ -80,7 +80,7 @@ def test_load(): assert loaded._repository.hash() == hash_stored -@pytest.mark.usefixtures('clear_database_before_test') +@pytest.mark.usefixtures('aiida_profile_clean') def test_load_updated(): """Test the repository after loading.""" node = CalcJobNode() @@ -91,7 +91,7 @@ def test_load_updated(): assert loaded.get_object_content('relative/path', mode='rb') == b'content' -@pytest.mark.usefixtures('clear_database_before_test') +@pytest.mark.usefixtures('aiida_profile_clean') def test_caching(cacheable_node): """Test the repository after a node is stored from the cache.""" @@ -106,7 +106,7 @@ def test_caching(cacheable_node): assert cacheable_node._repository.hash() == cached._repository.hash() -@pytest.mark.usefixtures('clear_database_before_test') +@pytest.mark.usefixtures('aiida_profile_clean') def test_clone(): """Test the repository after a node is cloned from a stored node.""" node = Data() @@ -124,7 +124,7 @@ def test_clone(): assert clone._repository.hash() == node._repository.hash() -@pytest.mark.usefixtures('clear_database_before_test') +@pytest.mark.usefixtures('aiida_profile_clean') def test_clone_unstored(): """Test the repository after a node is cloned from an unstored node.""" node = Data() @@ -139,7 +139,7 @@ def test_clone_unstored(): assert clone.get_object_content('relative/path', mode='rb') == b'content' -@pytest.mark.usefixtures('clear_database_before_test') +@pytest.mark.usefixtures('aiida_profile_clean') def test_sealed(): """Test the repository interface for a calculation node before and after it is sealed.""" node = CalcJobNode() @@ -151,7 +151,7 @@ def test_sealed(): node.put_object_from_filelike(io.BytesIO(b'content'), 'path') -@pytest.mark.usefixtures('clear_database_before_test') +@pytest.mark.usefixtures('aiida_profile_clean') def test_get_object_raises(): """Test the ``NodeRepositoryMixin.get_object`` method when it is supposed to raise.""" node = Data() @@ -166,7 +166,7 @@ def test_get_object_raises(): node.get_object('non_existant') -@pytest.mark.usefixtures('clear_database_before_test') +@pytest.mark.usefixtures('aiida_profile_clean') def test_get_object(): """Test the ``NodeRepositoryMixin.get_object`` method.""" node = CalcJobNode() @@ -191,7 +191,7 @@ def test_get_object(): assert file_object.is_dir() is False -@pytest.mark.usefixtures('clear_database_before_test') +@pytest.mark.usefixtures('aiida_profile_clean') def test_walk(): """Test the ``NodeRepositoryMixin.walk`` method.""" node = Data() @@ -219,7 +219,7 @@ def test_walk(): ] -@pytest.mark.usefixtures('clear_database_before_test') +@pytest.mark.usefixtures('aiida_profile_clean') def test_glob(): """Test the ``NodeRepositoryMixin.glob`` method.""" node = Data() @@ -228,7 +228,7 @@ def test_glob(): assert {path.as_posix() for path in node.glob()} == {'relative', 'relative/path'} -@pytest.mark.usefixtures('clear_database_before_test') +@pytest.mark.usefixtures('aiida_profile_clean') def test_copy_tree(tmp_path): """Test the ``Repository.copy_tree`` method.""" node = Data() diff --git a/tests/orm/test_autogroups.py b/tests/orm/test_autogroups.py index e75915395f..e685ed6aed 100644 --- a/tests/orm/test_autogroups.py +++ b/tests/orm/test_autogroups.py @@ -8,108 +8,98 @@ # For further information please visit http://www.aiida.net # ########################################################################### """Tests for the Autogroup functionality.""" -from aiida.backends.testbase import AiidaTestCase from aiida.orm import AutoGroup, QueryBuilder -from aiida.orm.autogroup import Autogroup +from aiida.orm.autogroup import AutogroupManager -class TestAutogroup(AiidaTestCase): - """Tests the Autogroup logic.""" +def test_get_or_create(backend): + """Test the ``get_or_create_group`` method of ``Autogroup``.""" + label_prefix = 'test_prefix_TestAutogroup' - def test_get_or_create(self): - """Test the ``get_or_create_group`` method of ``Autogroup``.""" - label_prefix = 'test_prefix_TestAutogroup' + # Check that there are no groups to begin with + queryb = QueryBuilder(backend).append(AutoGroup, filters={'label': label_prefix}) + assert not list(queryb.all()) + queryb = QueryBuilder(backend).append(AutoGroup, filters={'label': {'like': r'{}\_%'.format(label_prefix)}}) + assert not list(queryb.all()) - # Check that there are no groups to begin with - queryb = QueryBuilder().append(AutoGroup, filters={'label': label_prefix}) - assert not list(queryb.all()) - queryb = QueryBuilder().append(AutoGroup, filters={'label': {'like': r'{}\_%'.format(label_prefix)}}) - assert not list(queryb.all()) + # First group (no existing one) + autogroup = AutogroupManager(backend) + autogroup.set_group_label_prefix(label_prefix) + group = autogroup.get_or_create_group() + expected_label = label_prefix + assert group.label == expected_label, ( + f"The auto-group should be labelled '{expected_label}', it is instead '{group.label}'" + ) - # First group (no existing one) - autogroup = Autogroup() - autogroup.set_group_label_prefix(label_prefix) - group = autogroup.get_or_create_group() - expected_label = label_prefix - self.assertEqual( - group.label, expected_label, - f"The auto-group should be labelled '{expected_label}', it is instead '{group.label}'" - ) + # Second group (only one with no suffix existing) + autogroup = AutogroupManager(backend) + autogroup.set_group_label_prefix(label_prefix) + group = autogroup.get_or_create_group() + expected_label = f'{label_prefix}_1' + assert group.label == expected_label, ( + f"The auto-group should be labelled '{expected_label}', it is instead '{group.label}'" + ) - # Second group (only one with no suffix existing) - autogroup = Autogroup() - autogroup.set_group_label_prefix(label_prefix) - group = autogroup.get_or_create_group() - expected_label = f'{label_prefix}_1' - self.assertEqual( - group.label, expected_label, - f"The auto-group should be labelled '{expected_label}', it is instead '{group.label}'" - ) + # Second group (only one suffix _1 existing) + autogroup = AutogroupManager(backend) + autogroup.set_group_label_prefix(label_prefix) + group = autogroup.get_or_create_group() + expected_label = f'{label_prefix}_2' + assert group.label == expected_label, ( + f"The auto-group should be labelled '{expected_label}', it is instead '{group.label}'" + ) - # Second group (only one suffix _1 existing) - autogroup = Autogroup() - autogroup.set_group_label_prefix(label_prefix) - group = autogroup.get_or_create_group() - expected_label = f'{label_prefix}_2' - self.assertEqual( - group.label, expected_label, - f"The auto-group should be labelled '{expected_label}', it is instead '{group.label}'" - ) + # I create a group with a large integer suffix (9) + AutoGroup(label=f'{label_prefix}_9').store() + # The next autogroup should become number 10 + autogroup = AutogroupManager(backend) + autogroup.set_group_label_prefix(label_prefix) + group = autogroup.get_or_create_group() + expected_label = f'{label_prefix}_10' + assert group.label == expected_label, ( + f"The auto-group should be labelled '{expected_label}', it is instead '{group.label}'" + ) - # I create a group with a large integer suffix (9) - AutoGroup(label=f'{label_prefix}_9').store() - # The next autogroup should become number 10 - autogroup = Autogroup() - autogroup.set_group_label_prefix(label_prefix) - group = autogroup.get_or_create_group() - expected_label = f'{label_prefix}_10' - self.assertEqual( - group.label, expected_label, - f"The auto-group should be labelled '{expected_label}', it is instead '{group.label}'" - ) + # I create a group with a non-integer suffix (15a), it should be ignored + AutoGroup(label=f'{label_prefix}_15b').store() + # The next autogroup should become number 11 + autogroup = AutogroupManager(backend) + autogroup.set_group_label_prefix(label_prefix) + group = autogroup.get_or_create_group() + expected_label = f'{label_prefix}_11' + assert group.label == expected_label, ( + f"The auto-group should be labelled '{expected_label}', it is instead '{group.label}'" + ) - # I create a group with a non-integer suffix (15a), it should be ignored - AutoGroup(label=f'{label_prefix}_15b').store() - # The next autogroup should become number 11 - autogroup = Autogroup() - autogroup.set_group_label_prefix(label_prefix) - group = autogroup.get_or_create_group() - expected_label = f'{label_prefix}_11' - self.assertEqual( - group.label, expected_label, - f"The auto-group should be labelled '{expected_label}', it is instead '{group.label}'" - ) - def test_get_or_create_invalid_prefix(self): - """Test the ``get_or_create_group`` method of ``Autogroup`` when there is already a group - with the same prefix, but followed by other non-underscore characters.""" - label_prefix = 'new_test_prefix_TestAutogroup' - # I create a group with the same prefix, but followed by non-underscore - # characters. These should be ignored in the logic. - AutoGroup(label=f'{label_prefix}xx').store() +def test_get_or_create_invalid_prefix(backend): + """Test the ``get_or_create_group`` method of ``Autogroup`` when there is already a group + with the same prefix, but followed by other non-underscore characters.""" + label_prefix = 'new_test_prefix_TestAutogroup' + # I create a group with the same prefix, but followed by non-underscore + # characters. These should be ignored in the logic. + AutoGroup(label=f'{label_prefix}xx').store() - # Check that there are no groups to begin with - queryb = QueryBuilder().append(AutoGroup, filters={'label': label_prefix}) - assert not list(queryb.all()) - queryb = QueryBuilder().append(AutoGroup, filters={'label': {'like': r'{}\_%'.format(label_prefix)}}) - assert not list(queryb.all()) + # Check that there are no groups to begin with + queryb = QueryBuilder(backend).append(AutoGroup, filters={'label': label_prefix}) + assert not list(queryb.all()) + queryb = QueryBuilder(backend).append(AutoGroup, filters={'label': {'like': r'{}\_%'.format(label_prefix)}}) + assert not list(queryb.all()) - # First group (no existing one) - autogroup = Autogroup() - autogroup.set_group_label_prefix(label_prefix) - group = autogroup.get_or_create_group() - expected_label = label_prefix - self.assertEqual( - group.label, expected_label, - f"The auto-group should be labelled '{expected_label}', it is instead '{group.label}'" - ) + # First group (no existing one) + autogroup = AutogroupManager(backend) + autogroup.set_group_label_prefix(label_prefix) + group = autogroup.get_or_create_group() + expected_label = label_prefix + assert group.label == expected_label, ( + f"The auto-group should be labelled '{expected_label}', it is instead '{group.label}'" + ) - # Second group (only one with no suffix existing) - autogroup = Autogroup() - autogroup.set_group_label_prefix(label_prefix) - group = autogroup.get_or_create_group() - expected_label = f'{label_prefix}_1' - self.assertEqual( - group.label, expected_label, - f"The auto-group should be labelled '{expected_label}', it is instead '{group.label}'" - ) + # Second group (only one with no suffix existing) + autogroup = AutogroupManager(backend) + autogroup.set_group_label_prefix(label_prefix) + group = autogroup.get_or_create_group() + expected_label = f'{label_prefix}_1' + assert group.label == expected_label, ( + f"The auto-group should be labelled '{expected_label}', it is instead '{group.label}'" + ) diff --git a/tests/orm/test_querybuilder.py b/tests/orm/test_querybuilder.py index 206f5f4ba5..9ad8cdc8b5 100644 --- a/tests/orm/test_querybuilder.py +++ b/tests/orm/test_querybuilder.py @@ -19,12 +19,11 @@ from aiida import orm, plugins from aiida.common.links import LinkType -from aiida.manage import configuration from aiida.orm.querybuilder import _get_ormclass from aiida.orm.utils.links import LinkQuadruple -@pytest.mark.usefixtures('clear_database_before_test') +@pytest.mark.usefixtures('aiida_profile_clean') class TestBasic: def test_date_filters_support(self): @@ -695,7 +694,7 @@ def test_query_links(self): assert builder.one()[0] == LinkQuadruple(d2.id, c2.id, LinkType.INPUT_CALC.value, 'link_d2c2') -@pytest.mark.usefixtures('clear_database_before_test') +@pytest.mark.usefixtures('aiida_profile_clean') class TestMultipleProjections: """Unit tests for the QueryBuilder ORM class.""" @@ -717,7 +716,7 @@ class TestRepresentations: """Test representing the query in different formats.""" @pytest.fixture(autouse=True) - def init_db(self, clear_database_before_test, data_regression, file_regression): + def init_db(self, aiida_profile_clean, data_regression, file_regression): self.regress_dict = data_regression.check self.regress_str = file_regression.check @@ -804,7 +803,7 @@ def test_round_trip_append(self): assert sorted([uuid for uuid, in qb.all()]) == sorted([uuid for uuid, in qb_new.all()]) -def test_analyze_query(clear_database_before_test): +def test_analyze_query(aiida_profile_clean): """Test the query plan is correctly generated.""" qb = orm.QueryBuilder() # include literal values in test @@ -814,7 +813,7 @@ def test_analyze_query(clear_database_before_test): assert 'uuid' in analysis_str, analysis_str -@pytest.mark.usefixtures('clear_database_before_test') +@pytest.mark.usefixtures('aiida_profile_clean') class TestQueryBuilderCornerCases: """ In this class corner cases of QueryBuilder are added. @@ -849,7 +848,7 @@ def test_empty_filters(self): assert qb.count() == 1 -@pytest.mark.usefixtures('clear_database_before_test') +@pytest.mark.usefixtures('aiida_profile_clean') class TestAttributes: def test_attribute_existence(self): @@ -924,17 +923,12 @@ def test_attribute_type(self): qb = orm.QueryBuilder().append(orm.Node, filters={f'attributes.{key}': {'==': '1'}}, project='uuid') res = [str(_) for _, in qb.all()] assert set(res) == set((n_str.uuid,)) - if configuration.PROFILE.storage_backend == 'sqlalchemy': - # I can't query the length of an array with Django, - # so I exclude. Not the nicest way, But I would like to keep this piece - # of code because of the initialization part, that would need to be - # duplicated or wrapped otherwise. - qb = orm.QueryBuilder().append(orm.Node, filters={f'attributes.{key}': {'of_length': 3}}, project='uuid') - res = [str(_) for _, in qb.all()] - assert set(res) == set((n_arr.uuid,)) + qb = orm.QueryBuilder().append(orm.Node, filters={f'attributes.{key}': {'of_length': 3}}, project='uuid') + res = [str(_) for _, in qb.all()] + assert set(res) == set((n_arr.uuid,)) -@pytest.mark.usefixtures('clear_database_before_test') +@pytest.mark.usefixtures('aiida_profile_clean') class TestQueryBuilderLimitOffsets: def test_ordering_limits_offsets_of_results_general(self): @@ -1002,7 +996,7 @@ def test_ordering_limits_offsets_of_results_general(self): assert res == tuple(range(4, 1, -1)) -@pytest.mark.usefixtures('clear_database_before_test') +@pytest.mark.usefixtures('aiida_profile_clean') class TestQueryBuilderJoins: def test_joins_node_incoming(self): @@ -1190,7 +1184,7 @@ def test_joins_group_node(self): class QueryBuilderPath: @pytest.fixture(autouse=True) - def init_db(self, clear_database_before_test, backend): + def init_db(self, aiida_profile_clean, backend): self.backend = backend @staticmethod @@ -1392,7 +1386,7 @@ def test_query_path(self): # self.assertTrue(set(next(zip(*qb.all()))), set([5])) -@pytest.mark.usefixtures('clear_database_before_test') +@pytest.mark.usefixtures('aiida_profile_clean') class TestConsistency: def test_create_node_and_query(self): @@ -1433,7 +1427,7 @@ def test_len_results(self): class TestManager: @pytest.fixture(autouse=True) - def init_db(self, clear_database_before_test, backend): + def init_db(self, aiida_profile_clean, backend): self.backend = backend def test_statistics(self): @@ -1519,7 +1513,7 @@ class TestDoubleStar: """ @pytest.fixture(autouse=True) - def init_db(self, clear_database_before_test, aiida_localhost): + def init_db(self, aiida_profile_clean, aiida_localhost): self.computer = aiida_localhost def test_authinfo(self): diff --git a/tests/orm/utils/test_calcjob.py b/tests/orm/utils/test_calcjob.py index 51bed44625..e8534bcf7a 100644 --- a/tests/orm/utils/test_calcjob.py +++ b/tests/orm/utils/test_calcjob.py @@ -17,7 +17,7 @@ @pytest.fixture -def get_calcjob_node(clear_database_before_test, generate_calculation_node): +def get_calcjob_node(aiida_profile_clean, generate_calculation_node): """Return a calculation node with `Dict` output with default output label and the dictionary it contains.""" node = generate_calculation_node(entry_point='aiida.calculations:core.templatereplacer').store() dictionary = { @@ -31,7 +31,7 @@ def get_calcjob_node(clear_database_before_test, generate_calculation_node): return node, dictionary -@pytest.mark.usefixtures('clear_database_before_test') +@pytest.mark.usefixtures('aiida_profile_clean') def test_no_process_type(generate_calculation_node): """`get_results` should raise `ValueError` if `CalcJobNode` has no `process_type`""" node = generate_calculation_node() @@ -41,7 +41,7 @@ def test_no_process_type(generate_calculation_node): manager.get_results() -@pytest.mark.usefixtures('clear_database_before_test') +@pytest.mark.usefixtures('aiida_profile_clean') def test_invalid_process_type(generate_calculation_node): """`get_results` should raise `ValueError` if `CalcJobNode` has invalid `process_type`""" node = generate_calculation_node(entry_point='aiida.calculations:invalid') @@ -51,7 +51,7 @@ def test_invalid_process_type(generate_calculation_node): manager.get_results() -@pytest.mark.usefixtures('clear_database_before_test') +@pytest.mark.usefixtures('aiida_profile_clean') def test_process_class_no_default_node(generate_calculation_node): """`get_results` should raise `ValueError` if process class does not define default output node.""" # This is a valid process class however ArithmeticAddCalculation does define a default output node @@ -62,7 +62,7 @@ def test_process_class_no_default_node(generate_calculation_node): manager.get_results() -@pytest.mark.usefixtures('clear_database_before_test') +@pytest.mark.usefixtures('aiida_profile_clean') def test_iterator(get_calcjob_node): """Test that the manager can be iterated over.""" node, dictionary = get_calcjob_node @@ -71,7 +71,7 @@ def test_iterator(get_calcjob_node): assert key in dictionary.keys() -@pytest.mark.usefixtures('clear_database_before_test') +@pytest.mark.usefixtures('aiida_profile_clean') def test_getitem(get_calcjob_node): """Test that the manager supports the getitem operator.""" node, dictionary = get_calcjob_node @@ -84,7 +84,7 @@ def test_getitem(get_calcjob_node): assert manager['non-existent-key'] -@pytest.mark.usefixtures('clear_database_before_test') +@pytest.mark.usefixtures('aiida_profile_clean') def test_getitem_no_results(generate_calculation_node): """Test that `getitem` raises `KeyError` if no results can be retrieved whatsoever e.g. there is no output.""" node = generate_calculation_node() @@ -94,7 +94,7 @@ def test_getitem_no_results(generate_calculation_node): assert manager['key'] -@pytest.mark.usefixtures('clear_database_before_test') +@pytest.mark.usefixtures('aiida_profile_clean') def test_getattr(get_calcjob_node): """Test that the manager supports the getattr operator.""" node, dictionary = get_calcjob_node @@ -107,7 +107,7 @@ def test_getattr(get_calcjob_node): assert getattr(manager, 'non-existent-key') -@pytest.mark.usefixtures('clear_database_before_test') +@pytest.mark.usefixtures('aiida_profile_clean') def test_getattr_no_results(generate_calculation_node): """Test that `getattr` raises `AttributeError` if no results can be retrieved whatsoever e.g. there is no output.""" node = generate_calculation_node() @@ -117,7 +117,7 @@ def test_getattr_no_results(generate_calculation_node): assert getattr(manager, 'key') -@pytest.mark.usefixtures('clear_database_before_test') +@pytest.mark.usefixtures('aiida_profile_clean') def test_dir(get_calcjob_node): """Test that `dir` returns all keys of the dictionary and nothing else.""" node, dictionary = get_calcjob_node diff --git a/tests/orm/utils/test_managers.py b/tests/orm/utils/test_managers.py index 97b0c95003..64f3f50d9a 100644 --- a/tests/orm/utils/test_managers.py +++ b/tests/orm/utils/test_managers.py @@ -16,7 +16,7 @@ from aiida.common.exceptions import NotExistent, NotExistentAttributeError, NotExistentKeyError -def test_dot_dict_manager(clear_database_before_test): +def test_dot_dict_manager(aiida_profile_clean): """Verify that the Dict.dict manager behaves as intended.""" dict_content = {'a': True, 'b': 1, 'c': 'Some string'} dict_node = orm.Dict(dict=dict_content) @@ -47,7 +47,7 @@ def test_dot_dict_manager(clear_database_before_test): _ = dict_node.dict['NotExistentKey'] -def test_link_manager(clear_database_before_test): +def test_link_manager(aiida_profile_clean): """Test the LinkManager via .inputs and .outputs from a ProcessNode.""" # I first create a calculation with two inputs and two outputs @@ -138,7 +138,7 @@ def test_link_manager(clear_database_before_test): _ = calc.outputs['NotExistentLabel'] -def test_link_manager_with_nested_namespaces(clear_database_before_test): +def test_link_manager_with_nested_namespaces(aiida_profile_clean): """Test the ``LinkManager`` works with nested namespaces.""" inp1 = orm.Data() inp1.store() @@ -194,7 +194,7 @@ def test_link_manager_with_nested_namespaces(clear_database_before_test): _ = calc.outputs['remote_folder__namespace'] -def test_link_manager_contains(clear_database_before_test): +def test_link_manager_contains(aiida_profile_clean): """Test the ``__contains__`` method for the ``LinkManager``.""" data = orm.Data() data.store() diff --git a/tests/orm/utils/test_serialize.py b/tests/orm/utils/test_serialize.py index 446f5d1dc5..675c7b7793 100644 --- a/tests/orm/utils/test_serialize.py +++ b/tests/orm/utils/test_serialize.py @@ -17,7 +17,7 @@ from aiida.common.links import LinkType from aiida.orm.utils import serialize -pytestmark = pytest.mark.usefixtures('clear_database_before_test') +pytestmark = pytest.mark.usefixtures('aiida_profile_clean') def test_serialize_round_trip(): diff --git a/tests/restapi/conftest.py b/tests/restapi/conftest.py index 7bd6504072..b29efbc37a 100644 --- a/tests/restapi/conftest.py +++ b/tests/restapi/conftest.py @@ -48,18 +48,40 @@ def server_url(): @pytest.fixture -def restrict_sqlalchemy_queuepool(aiida_profile): - """Create special SQLAlchemy engine for use with QueryBuilder - backend-agnostic""" - from aiida.manage.manager import get_manager - - backend_manager = get_manager().get_backend_manager() - backend_manager.reset_backend_environment() - actual_profile = aiida_profile._manager._profile # pylint: disable=protected-access - backend_manager.load_backend_environment(actual_profile, pool_timeout=1, max_overflow=0) +def restrict_db_connections(aiida_profile): # pylint: disable=unused-argument + """Restrict the number of database connections allowed to the PSQL database.""" + from aiida.manage import get_manager + + manager = get_manager() + + # create a new profile with the engine key-word arguments + # pool_timeout: number of seconds to wait before giving up on getting a connection from the pool. + # max_overflow: maximum number of connections that can be opened above the pool_size (whose default is 5) + current_profile = manager.get_profile() + new_profile = current_profile.copy() + new_profile.set_storage( + new_profile.storage_backend, + dict(engine_kwargs={ + 'pool_timeout': 1, + 'max_overflow': 0 + }, **new_profile.storage_config) + ) + # load the new profile and initialise the database connection + manager.unload_profile() + manager.load_profile(new_profile) + backend = manager.get_profile_storage() + # double check that the connection is set with these parameters + session = backend.get_session() + assert session.bind.pool.timeout() == 1 + assert session.bind.pool._max_overflow == 0 # pylint: disable=protected-access + yield + # reset the original profile + manager.unload_profile() + manager.load_profile(current_profile) @pytest.fixture -def populate_restapi_database(clear_database_before_test): +def populate_restapi_database(aiida_profile_clean): """Populates the database with a considerable set of nodes to test the restAPI""" # pylint: disable=unused-argument from aiida import orm diff --git a/tests/restapi/test_identifiers.py b/tests/restapi/test_identifiers.py index cf744fe6b6..181bc86083 100644 --- a/tests/restapi/test_identifiers.py +++ b/tests/restapi/test_identifiers.py @@ -85,7 +85,7 @@ def test_get_filters_errors(): @pytest.mark.parametrize( 'process_class', [orm.CalcFunctionNode, orm.CalcJobNode, orm.WorkFunctionNode, orm.WorkChainNode] ) -@pytest.mark.usefixtures('clear_database_before_test') +@pytest.mark.usefixtures('aiida_profile_clean') def test_full_type_unregistered(process_class, restapi_server, server_url): """Functionality test for the compatibility with old process_type entries. @@ -174,7 +174,7 @@ def test_full_type_unregistered(process_class, restapi_server, server_url): @pytest.mark.parametrize('node_class', [orm.CalcFunctionNode, orm.Dict]) -@pytest.mark.usefixtures('clear_database_before_test') +@pytest.mark.usefixtures('aiida_profile_clean') def test_full_type_backwards_compatibility(node_class, restapi_server, server_url): """Functionality test for the compatibility with old process_type entries. diff --git a/tests/restapi/test_routes.py b/tests/restapi/test_routes.py index 5f9d0b4d3d..912ddc968e 100644 --- a/tests/restapi/test_routes.py +++ b/tests/restapi/test_routes.py @@ -139,6 +139,14 @@ def setUpClass(cls): # pylint: disable=too-many-locals, too-many-statements # Prepare typical REST responses cls.process_dummy_data() + @classmethod + def tearDownClass(cls): + # we need to reset the default user here, + # because the REST API's close_thread_connection decorator wil have closed its session, + # meaning the `PsqlDosBackend._clear` method will fail + orm.User.objects.reset() + super().tearDownClass() + def get_dummy_data(self): return self._dummy_data diff --git a/tests/restapi/test_threaded_restapi.py b/tests/restapi/test_threaded_restapi.py index cfad9ebdc5..56b7465b9a 100644 --- a/tests/restapi/test_threaded_restapi.py +++ b/tests/restapi/test_threaded_restapi.py @@ -21,9 +21,12 @@ NO_OF_REQUESTS = 100 -@pytest.mark.usefixtures('clear_database_before_test', 'restrict_sqlalchemy_queuepool') +@pytest.mark.usefixtures('aiida_profile_clean', 'restrict_db_connections') def test_run_threaded_server(restapi_server, server_url, aiida_localhost): - """Run AiiDA REST API threaded in a separate thread and perform many sequential requests""" + """Run AiiDA REST API threaded in a separate thread and perform many sequential requests. + + This test will fail, if database connections are not being properly closed by the end-point calls. + """ server = restapi_server() computer_id = aiida_localhost.uuid @@ -63,7 +66,7 @@ def test_run_threaded_server(restapi_server, server_url, aiida_localhost): @pytest.mark.skip('Is often failing on Python 3.8 and 3.9: see https://github.com/aiidateam/aiida-core/issues/4281') -@pytest.mark.usefixtures('clear_database_before_test', 'restrict_sqlalchemy_queuepool') +@pytest.mark.usefixtures('aiida_profile_clean', 'restrict_db_connections') def test_run_without_close_session(restapi_server, server_url, aiida_localhost, capfd): """Run AiiDA REST API threaded in a separate thread and perform many sequential requests""" from aiida.restapi.api import AiidaApi diff --git a/tests/test_nodes.py b/tests/test_nodes.py index 63950bc3e5..0d7f76ed1e 100644 --- a/tests/test_nodes.py +++ b/tests/test_nodes.py @@ -175,7 +175,7 @@ def test_uuid_type(self): class TestQueryWithAiidaObjects(AiidaTestCase): """ Test if queries work properly also with aiida.orm.Node classes instead of - aiida.backends.djsite.db.models.DbNode objects. + backend model objects. """ def test_with_subclasses(self): @@ -264,7 +264,6 @@ def test_uuid_uniquess(self): """ A uniqueness constraint on the UUID column of the Node model should prevent multiple nodes with identical UUID """ - from django.db import IntegrityError as DjIntegrityError from sqlalchemy.exc import IntegrityError as SqlaIntegrityError a = orm.Data() @@ -272,7 +271,7 @@ def test_uuid_uniquess(self): b.backend_entity.dbmodel.uuid = a.uuid a.store() - with self.assertRaises((DjIntegrityError, SqlaIntegrityError)): + with self.assertRaises(SqlaIntegrityError): b.store() def test_attribute_mutability(self): diff --git a/tests/tools/archive/migration/test_prov_redesign.py b/tests/tools/archive/migration/test_prov_redesign.py index 2c7ce02744..3526f25ca1 100644 --- a/tests/tools/archive/migration/test_prov_redesign.py +++ b/tests/tools/archive/migration/test_prov_redesign.py @@ -27,7 +27,7 @@ def test_base_data_type_change(tmp_path, aiida_profile): """ Base Data types type string changed Example: Bool: “data.base.Bool.” → “data.bool.Bool.” """ - aiida_profile.reset_db() + aiida_profile.clear_profile() # Test content test_content = ('Hello', 6, -1.2399834e12, False) @@ -58,7 +58,7 @@ def test_base_data_type_change(tmp_path, aiida_profile): create_archive(export_nodes, filename=filename) # Clean the database - aiida_profile.reset_db() + aiida_profile.clear_profile() # Import nodes again import_archive(filename) @@ -89,7 +89,7 @@ def test_node_process_type(aiida_profile, tmp_path): from aiida.engine import run_get_node from tests.utils.processes import AddProcess - aiida_profile.reset_db() + aiida_profile.clear_profile() # Node types node_type = 'process.workflow.WorkflowNode.' @@ -111,7 +111,7 @@ def test_node_process_type(aiida_profile, tmp_path): create_archive([node], filename=filename) # Clean the database and reimport data - aiida_profile.reset_db() + aiida_profile.clear_profile() import_archive(filename) # Retrieve node and check exactly one node is imported @@ -133,7 +133,7 @@ def test_node_process_type(aiida_profile, tmp_path): assert node.process_type == node_process_type -def test_code_type_change(clear_database_before_test, tmp_path, aiida_localhost): +def test_code_type_change(aiida_profile_clean, tmp_path, aiida_localhost): """ Code type string changed Change: “code.Bool.” → “data.code.Code.” """ @@ -154,7 +154,7 @@ def test_code_type_change(clear_database_before_test, tmp_path, aiida_localhost) create_archive([code], filename=filename) # Clean the database and reimport - clear_database_before_test.reset_db() + aiida_profile_clean.clear_profile() import_archive(filename) # Retrieve Code node and make sure exactly 1 is retrieved @@ -197,7 +197,7 @@ def test_group_name_and_type_change(tmp_path, aiida_profile): """ from aiida.orm.nodes.data.upf import upload_upf_family - aiida_profile.reset_db() + aiida_profile.clear_profile() # To be saved groups_label = ['Users', 'UpfData'] @@ -238,7 +238,7 @@ def test_group_name_and_type_change(tmp_path, aiida_profile): create_archive([group_user, group_upf], filename=filename) # Clean the database and reimport - aiida_profile.reset_db() + aiida_profile.clear_profile() import_archive(filename) # Retrieve Groups and make sure exactly 3 are retrieved (including the "import group") diff --git a/tests/tools/archive/migration/test_v05_to_v06.py b/tests/tools/archive/migration/test_v05_to_v06.py index 1aa886ef60..411b57dcca 100644 --- a/tests/tools/archive/migration/test_v05_to_v06.py +++ b/tests/tools/archive/migration/test_v05_to_v06.py @@ -8,7 +8,7 @@ # For further information please visit http://www.aiida.net # ########################################################################### """Test archive file migration from export version 0.5 to 0.6""" -from aiida.backends.general.migrations.calc_state import STATE_MAPPING +from aiida.backends.sqlalchemy.migrations.utils.calc_state import STATE_MAPPING from aiida.tools.archive.implementations.sqlite.migrations.legacy import migrate_v5_to_v6 from aiida.tools.archive.implementations.sqlite.migrations.utils import verify_metadata_version from tests.utils.archives import get_archive_file, read_json_files diff --git a/tests/tools/archive/orm/test_attributes.py b/tests/tools/archive/orm/test_attributes.py index 518df6e575..be5fe2451b 100644 --- a/tests/tools/archive/orm/test_attributes.py +++ b/tests/tools/archive/orm/test_attributes.py @@ -16,7 +16,7 @@ def test_import_of_attributes(tmp_path, aiida_profile): """Check if attributes are properly imported""" - aiida_profile.reset_db() + aiida_profile.clear_profile() # Create Data with attributes data = orm.Data() data.label = 'my_test_data_node' @@ -28,7 +28,7 @@ def test_import_of_attributes(tmp_path, aiida_profile): create_archive([data], filename=export_file) # Clean db - aiida_profile.reset_db() + aiida_profile.clear_profile() import_archive(export_file) builder = orm.QueryBuilder().append(orm.Data, filters={'label': 'my_test_data_node'}) @@ -39,7 +39,7 @@ def test_import_of_attributes(tmp_path, aiida_profile): assert imported_node.get_attribute('c') == 3 -@pytest.mark.usefixtures('clear_database_before_test') +@pytest.mark.usefixtures('aiida_profile_clean') def test_strip_checkpoints(tmp_path): """Test that `ProcessNode` checkpoints are stripped. diff --git a/tests/tools/archive/orm/test_authinfo.py b/tests/tools/archive/orm/test_authinfo.py index 6d2a7b8c60..a2cf7e0114 100644 --- a/tests/tools/archive/orm/test_authinfo.py +++ b/tests/tools/archive/orm/test_authinfo.py @@ -15,7 +15,7 @@ from aiida.tools.archive.abstract import get_format -@pytest.mark.usefixtures('clear_database_before_test', 'aiida_localhost') +@pytest.mark.usefixtures('aiida_profile_clean', 'aiida_localhost') def test_create_all_no_authinfo(tmp_path): """Test archive creation that does not include authinfo.""" filename1 = tmp_path / 'export1.aiida' @@ -24,7 +24,7 @@ def test_create_all_no_authinfo(tmp_path): assert archive.querybuilder().append(orm.AuthInfo).count() == 0 -@pytest.mark.usefixtures('clear_database_before_test', 'aiida_localhost') +@pytest.mark.usefixtures('aiida_profile_clean', 'aiida_localhost') def test_create_all_with_authinfo(tmp_path): """Test archive creation that does include authinfo.""" filename1 = tmp_path / 'export1.aiida' @@ -33,7 +33,7 @@ def test_create_all_with_authinfo(tmp_path): assert archive.querybuilder().append(orm.AuthInfo).count() == 1 -@pytest.mark.usefixtures('clear_database_before_test') +@pytest.mark.usefixtures('aiida_profile_clean') def test_create_comp_with_authinfo(tmp_path, aiida_localhost): """Test archive creation that does include authinfo.""" filename1 = tmp_path / 'export1.aiida' @@ -42,11 +42,11 @@ def test_create_comp_with_authinfo(tmp_path, aiida_localhost): assert archive.querybuilder().append(orm.AuthInfo).count() == 1 -def test_import_authinfo(tmp_path, clear_database_before_test, aiida_localhost): +def test_import_authinfo(tmp_path, aiida_profile_clean, aiida_localhost): """Test archive import, including authinfo""" filename1 = tmp_path / 'export1.aiida' create_archive([aiida_localhost], filename=filename1, include_authinfos=True) - clear_database_before_test.reset_db() + aiida_profile_clean.clear_profile() # create a computer + authinfo, so that the PKs are different than the original ones # (to check that they are correctly translated) computer = orm.Computer( diff --git a/tests/tools/archive/orm/test_calculations.py b/tests/tools/archive/orm/test_calculations.py index 5ab1851f81..789e826bf0 100644 --- a/tests/tools/archive/orm/test_calculations.py +++ b/tests/tools/archive/orm/test_calculations.py @@ -21,7 +21,7 @@ @pytest.mark.requires_rmq def test_calcfunction(tmp_path, aiida_profile): """Test @calcfunction""" - aiida_profile.reset_db() + aiida_profile.clear_profile() @calcfunction def add(a, b): @@ -44,7 +44,7 @@ def max_(**kwargs): # At this point we export the generated data filename1 = tmp_path / 'export1.aiida' create_archive([res], filename=filename1, return_backward=True) - aiida_profile.reset_db() + aiida_profile.clear_profile() import_archive(filename1) # Check that the imported nodes are correctly imported and that the value is preserved for uuid, value in uuids_values: @@ -56,7 +56,7 @@ def max_(**kwargs): def test_workcalculation(tmp_path, aiida_profile): """Test simple master/slave WorkChainNodes""" - aiida_profile.reset_db() + aiida_profile.clear_profile() master = orm.WorkChainNode() slave = orm.WorkChainNode() @@ -79,7 +79,7 @@ def test_workcalculation(tmp_path, aiida_profile): uuids_values = [(v.uuid, v.value) for v in (output_1,)] filename1 = tmp_path / 'export1.aiida' create_archive([output_1], filename=filename1) - aiida_profile.reset_db() + aiida_profile.clear_profile() import_archive(filename1) for uuid, value in uuids_values: diff --git a/tests/tools/archive/orm/test_codes.py b/tests/tools/archive/orm/test_codes.py index b2fdd07461..363bc4d1ce 100644 --- a/tests/tools/archive/orm/test_codes.py +++ b/tests/tools/archive/orm/test_codes.py @@ -14,7 +14,7 @@ from tests.tools.archive.utils import get_all_node_links -def test_that_solo_code_is_exported_correctly(tmp_path, clear_database_before_test, aiida_localhost): +def test_that_solo_code_is_exported_correctly(tmp_path, aiida_profile_clean, aiida_localhost): """ This test checks that when a calculation is exported then the corresponding code is also exported. @@ -31,14 +31,14 @@ def test_that_solo_code_is_exported_correctly(tmp_path, clear_database_before_te export_file = tmp_path / 'export.aiida' create_archive([code], filename=export_file) - clear_database_before_test.reset_db() + aiida_profile_clean.clear_profile() import_archive(export_file) assert orm.load_node(code_uuid).label == code_label -def test_input_code(tmp_path, clear_database_before_test, aiida_localhost): +def test_input_code(tmp_path, aiida_profile_clean, aiida_localhost): """ This test checks that when a calculation is exported then the corresponding code is also exported. It also checks that the links @@ -67,7 +67,7 @@ def test_input_code(tmp_path, clear_database_before_test, aiida_localhost): export_file = tmp_path / 'export.aiida' create_archive([calc], filename=export_file) - clear_database_before_test.reset_db() + aiida_profile_clean.clear_profile() import_archive(export_file) @@ -83,7 +83,7 @@ def test_input_code(tmp_path, clear_database_before_test, aiida_localhost): 'the calculation node after import. {} found.'.format(len(import_links)) -def test_solo_code(tmp_path, clear_database_before_test, aiida_localhost): +def test_solo_code(tmp_path, aiida_profile_clean, aiida_localhost): """ This test checks that when a calculation is exported then the corresponding code is also exported. @@ -100,7 +100,7 @@ def test_solo_code(tmp_path, clear_database_before_test, aiida_localhost): export_file = tmp_path / 'export.aiida' create_archive([code], filename=export_file) - clear_database_before_test.reset_db() + aiida_profile_clean.clear_profile() import_archive(export_file) diff --git a/tests/tools/archive/orm/test_comments.py b/tests/tools/archive/orm/test_comments.py index a862c6addd..b740abf7d0 100644 --- a/tests/tools/archive/orm/test_comments.py +++ b/tests/tools/archive/orm/test_comments.py @@ -22,7 +22,7 @@ ) -def test_multiple_imports_for_single_node(tmp_path, clear_database_before_test): +def test_multiple_imports_for_single_node(tmp_path, aiida_profile_clean): """Test multiple imports for single node with different comments are imported correctly""" user = orm.User.objects.get_default() @@ -48,7 +48,7 @@ def test_multiple_imports_for_single_node(tmp_path, clear_database_before_test): create_archive([node], filename=export_file_full) # Clean database and reimport "EXISTING" DB - clear_database_before_test.reset_db() + aiida_profile_clean.clear_profile() import_archive(export_file_existing) # Check correct import @@ -90,7 +90,7 @@ def test_multiple_imports_for_single_node(tmp_path, clear_database_before_test): assert imported_comment_content in COMMENTS -def test_exclude_comments_flag(tmp_path, clear_database_before_test): +def test_exclude_comments_flag(tmp_path, aiida_profile_clean): """Test comments and associated commenting users are not exported when using `include_comments=False`.""" # Create users, node, and comments user_one = orm.User.objects.get_default() @@ -115,7 +115,7 @@ def test_exclude_comments_flag(tmp_path, clear_database_before_test): create_archive([node], filename=export_file, include_comments=False) # Clean database and reimport exported file - clear_database_before_test.reset_db() + aiida_profile_clean.clear_profile() import_archive(export_file) # Get node, users, and comments @@ -133,7 +133,7 @@ def test_exclude_comments_flag(tmp_path, clear_database_before_test): assert str(import_users[0][0]) == users_email[0] -def test_calc_and_data_nodes_with_comments(tmp_path, clear_database_before_test): +def test_calc_and_data_nodes_with_comments(tmp_path, aiida_profile_clean): """ Test comments for CalculatioNode and Data node are correctly ex-/imported """ # Create user, nodes, and comments user = orm.User.objects.get_default() @@ -159,7 +159,7 @@ def test_calc_and_data_nodes_with_comments(tmp_path, clear_database_before_test) create_archive([calc_node, data_node], filename=export_file) # Clean database and reimport exported file - clear_database_before_test.reset_db() + aiida_profile_clean.clear_profile() import_archive(export_file) # Get nodes and comments @@ -184,7 +184,7 @@ def test_calc_and_data_nodes_with_comments(tmp_path, clear_database_before_test) assert import_comment_uuid in data_comments_uuid -def test_multiple_user_comments_single_node(tmp_path, clear_database_before_test): +def test_multiple_user_comments_single_node(tmp_path, aiida_profile_clean): """ Test multiple users commenting on a single orm.CalculationNode """ # Create users, node, and comments user_one = orm.User.objects.get_default() @@ -210,7 +210,7 @@ def test_multiple_user_comments_single_node(tmp_path, clear_database_before_test create_archive([node], filename=export_file) # Clean database and reimport exported file - clear_database_before_test.reset_db() + aiida_profile_clean.clear_profile() import_archive(export_file) # Get node, users, and comments @@ -265,7 +265,7 @@ def test_multiple_user_comments_single_node(tmp_path, clear_database_before_test assert imported_user_two_comment_uuids == set(user_two_comments_uuid) -def test_mtime_of_imported_comments(tmp_path, clear_database_before_test): +def test_mtime_of_imported_comments(tmp_path, aiida_profile_clean): """ Test mtime does not change for imported comments This is related to correct usage of `merge_comments` when importing. @@ -296,7 +296,7 @@ def test_mtime_of_imported_comments(tmp_path, clear_database_before_test): # Export, reset database and reimport export_file = tmp_path / 'export.aiida' create_archive([calc], filename=export_file) - clear_database_before_test.reset_db() + aiida_profile_clean.clear_profile() import_archive(export_file) # Retrieve node and comment @@ -321,7 +321,7 @@ def test_mtime_of_imported_comments(tmp_path, clear_database_before_test): assert import_calc_mtime == calc_mtime -@pytest.mark.usefixtures('clear_database_before_test') +@pytest.mark.usefixtures('aiida_profile_clean') def test_import_arg_comment_mode(tmp_path): """ Test the import modes of `merge_comments`. @@ -389,7 +389,7 @@ def test_import_arg_comment_mode(tmp_path): import_archive(export_file, merge_comments='invalid') -def test_reimport_of_comments_for_single_node(tmp_path, clear_database_before_test): +def test_reimport_of_comments_for_single_node(tmp_path, aiida_profile_clean): """ When a node with comments already exist in the DB, and more comments are imported for the same node (same UUID), test that only new comment-entries @@ -475,7 +475,7 @@ def test_reimport_of_comments_for_single_node(tmp_path, clear_database_before_te create_archive([calc], filename=export_file_full) # Clean database - clear_database_before_test.reset_db() + aiida_profile_clean.clear_profile() ## Part II # Reimport "EXISTING" DB @@ -514,7 +514,7 @@ def test_reimport_of_comments_for_single_node(tmp_path, clear_database_before_te create_archive([calc], filename=export_file_new) # Clean database - clear_database_before_test.reset_db() + aiida_profile_clean.clear_profile() ## Part III # Reimport "EXISTING" DB @@ -567,7 +567,7 @@ def test_reimport_of_comments_for_single_node(tmp_path, clear_database_before_te assert comment_content in COMMENTS -def test_import_newest(tmp_path, clear_database_before_test): +def test_import_newest(tmp_path, aiida_profile_clean): """Test `merge_comments='newest'""" user = orm.User.objects.get_default() node = orm.Data().store() @@ -583,7 +583,7 @@ def test_import_newest(tmp_path, clear_database_before_test): export_file_new = tmp_path / 'export_new.aiida' create_archive([node], filename=export_file_new, include_comments=True) - clear_database_before_test.reset_db() + aiida_profile_clean.clear_profile() import_archive(export_file_old) assert orm.Comment.objects.get(uuid=comment_1_uuid).content == 'Comment old' diff --git a/tests/tools/archive/orm/test_computers.py b/tests/tools/archive/orm/test_computers.py index a28badf0f0..3f1513e03c 100644 --- a/tests/tools/archive/orm/test_computers.py +++ b/tests/tools/archive/orm/test_computers.py @@ -17,7 +17,7 @@ from tests.utils.archives import import_test_archive -def test_same_computer_import(tmp_path, clear_database_before_test, aiida_localhost): +def test_same_computer_import(tmp_path, aiida_profile_clean, aiida_localhost): """ Test that you can import nodes in steps without any problems. In this test we will import a first calculation and then a second one. The @@ -60,7 +60,7 @@ def test_same_computer_import(tmp_path, clear_database_before_test, aiida_localh create_archive([calc2], filename=filename2) # Clean the local database - clear_database_before_test.reset_db() + aiida_profile_clean.clear_profile() # Check that there are no computers builder = orm.QueryBuilder() @@ -113,7 +113,7 @@ def test_same_computer_import(tmp_path, clear_database_before_test, aiida_localh assert ret_labels == set([calc1_label, calc2_label]), 'The labels of the calculations are not correct.' -def test_same_computer_different_name_import(tmp_path, clear_database_before_test, aiida_localhost): +def test_same_computer_different_name_import(tmp_path, aiida_profile_clean, aiida_localhost): """ This test checks that if the computer is re-imported with a different name to the same database, then the original computer will not be @@ -156,7 +156,7 @@ def test_same_computer_different_name_import(tmp_path, clear_database_before_tes create_archive([calc2], filename=filename2) # Clean the local database - clear_database_before_test.reset_db() + aiida_profile_clean.clear_profile() # Check that there are no computers builder = orm.QueryBuilder() @@ -194,7 +194,7 @@ def test_same_computer_different_name_import(tmp_path, clear_database_before_tes assert str(builder.first()[0]) == comp1_name, 'The computer name is not correct.' -def test_different_computer_same_name_import(tmp_path, clear_database_before_test, aiida_localhost_factory): +def test_different_computer_same_name_import(tmp_path, aiida_profile_clean, aiida_localhost_factory): """ This test checks that if there is a name collision, the imported computers are renamed accordingly. @@ -216,7 +216,7 @@ def test_different_computer_same_name_import(tmp_path, clear_database_before_tes create_archive([calc1], filename=filename1) # Reset the database - clear_database_before_test.reset_db() + aiida_profile_clean.clear_profile() # Store a second calculation calc2_label = 'calc2' @@ -232,7 +232,7 @@ def test_different_computer_same_name_import(tmp_path, clear_database_before_tes create_archive([calc2], filename=filename2) # Reset the database - clear_database_before_test.reset_db() + aiida_profile_clean.clear_profile() # Store a third calculation calc3_label = 'calc3' @@ -248,7 +248,7 @@ def test_different_computer_same_name_import(tmp_path, clear_database_before_tes create_archive([calc3], filename=filename3) # Clean the local database - clear_database_before_test.reset_db() + aiida_profile_clean.clear_profile() # Check that there are no computers builder = orm.QueryBuilder() @@ -278,7 +278,7 @@ def test_different_computer_same_name_import(tmp_path, clear_database_before_tes assert [calc3_label, DUPLICATE_LABEL_TEMPLATE.format(comp1_name, 1)] in res, 'Calc-Computer combination not found.' -def test_import_of_computer_json_params(tmp_path, clear_database_before_test, aiida_localhost): +def test_import_of_computer_json_params(tmp_path, aiida_profile_clean, aiida_localhost): """ This test checks that the metadata and transport params are exported and imported correctly in both backends. """ @@ -302,7 +302,7 @@ def test_import_of_computer_json_params(tmp_path, clear_database_before_test, ai create_archive([calc1], filename=filename1) # Clean the local database - clear_database_before_test.reset_db() + aiida_profile_clean.clear_profile() # Import the data import_archive(filename1) @@ -315,9 +315,9 @@ def test_import_of_computer_json_params(tmp_path, clear_database_before_test, ai assert res['comp']['metadata'] == comp1_metadata, 'Not the expected metadata were found' -@pytest.mark.parametrize('backend', ['sqlalchemy', 'django']) -def test_import_of_django_sqla_export_file(clear_database_before_test, aiida_localhost, backend): # pylint: disable=unused-argument - """Check that sqla import manages to import the django archive file correctly""" +@pytest.mark.parametrize('backend', ['django', 'sqlalchemy']) +def test_import_of_django_sqla_export_file(aiida_profile_clean, aiida_localhost, backend): # pylint: disable=unused-argument + """Check that import manages to import the archive file correctly for legacy storage backends.""" archive = f'{backend}.aiida' # Import the needed data diff --git a/tests/tools/archive/orm/test_extras.py b/tests/tools/archive/orm/test_extras.py index 30d5025e8b..90b6db7ac9 100644 --- a/tests/tools/archive/orm/test_extras.py +++ b/tests/tools/archive/orm/test_extras.py @@ -16,7 +16,7 @@ @pytest.fixture(scope='function') -def new_archive(clear_database_before_test, tmp_path): +def new_archive(aiida_profile_clean, tmp_path): """Create a new archive""" data = orm.Data() data.label = 'my_test_data_node' @@ -24,7 +24,7 @@ def new_archive(clear_database_before_test, tmp_path): data.set_extra_many({'b': 2, 'c': 3}) archive_file = tmp_path / 'export.aiida' create_archive([data], filename=archive_file) - clear_database_before_test.reset_db() + aiida_profile_clean.clear_profile() yield archive_file diff --git a/tests/tools/archive/orm/test_groups.py b/tests/tools/archive/orm/test_groups.py index fec7a3097f..c9d6a02052 100644 --- a/tests/tools/archive/orm/test_groups.py +++ b/tests/tools/archive/orm/test_groups.py @@ -16,7 +16,7 @@ from aiida.tools.archive import create_archive, import_archive -def test_nodes_in_group(tmp_path, clear_database_before_test, aiida_localhost): +def test_nodes_in_group(tmp_path, aiida_profile_clean, aiida_localhost): """ This test checks that nodes that belong to a specific group are correctly imported and exported. @@ -51,7 +51,7 @@ def test_nodes_in_group(tmp_path, clear_database_before_test, aiida_localhost): filename1 = tmp_path / 'export1.aiida' create_archive([sd1, jc1, gr1], filename=filename1) n_uuids = [sd1.uuid, jc1.uuid] - clear_database_before_test.reset_db() + aiida_profile_clean.clear_profile() import_archive(filename1) # Check that the imported nodes are correctly imported and that @@ -65,7 +65,7 @@ def test_nodes_in_group(tmp_path, clear_database_before_test, aiida_localhost): assert builder.count() == 1, 'The group was not found.' -def test_group_export(tmp_path, clear_database_before_test): +def test_group_export(tmp_path, aiida_profile_clean): """Exporting a group includes its extras and nodes.""" # Create a new user new_email = 'newuser@new.n' @@ -89,7 +89,7 @@ def test_group_export(tmp_path, clear_database_before_test): filename = tmp_path / 'export.aiida' create_archive([group], filename=filename) n_uuids = [sd1.uuid] - clear_database_before_test.reset_db() + aiida_profile_clean.clear_profile() import_archive(filename) # Check that the imported nodes are correctly imported and that @@ -105,7 +105,7 @@ def test_group_export(tmp_path, clear_database_before_test): assert imported_group.get_extra('test') == 1, 'Extra missing on imported group' -def test_group_import_existing(tmp_path, clear_database_before_test): +def test_group_import_existing(tmp_path, aiida_profile_clean): """ Testing what happens when I try to import a group that already exists in the database. This should raise an appropriate exception @@ -131,7 +131,7 @@ def test_group_import_existing(tmp_path, clear_database_before_test): # At this point we export the generated data filename = tmp_path / 'export1.aiida' create_archive([group], filename=filename) - clear_database_before_test.reset_db() + aiida_profile_clean.clear_profile() # Creating a group of the same name group = orm.Group(label='node_group_existing') @@ -155,7 +155,7 @@ def test_group_import_existing(tmp_path, clear_database_before_test): assert builder.count() == 2 -def test_import_to_group(tmp_path, clear_database_before_test): +def test_import_to_group(tmp_path, aiida_profile_clean): """Test `group` parameter Make sure an unstored Group is stored by the import function, forwarding the Group object. Make sure the Group is correctly handled and used for imported nodes. @@ -168,7 +168,7 @@ def test_import_to_group(tmp_path, clear_database_before_test): # Export Nodes filename = tmp_path / 'export.aiida' create_archive([data1, data2], filename=filename) - clear_database_before_test.reset_db() + aiida_profile_clean.clear_profile() # Create Group, do not store group_label = 'import_madness' @@ -213,7 +213,7 @@ def test_import_to_group(tmp_path, clear_database_before_test): assert node.uuid in node_uuids -def test_create_group(tmp_path, clear_database_before_test): # pylint: disable=unused-argument +def test_create_group(tmp_path, aiida_profile_clean): # pylint: disable=unused-argument """Test create_group argument""" node = orm.Data().store() filename = tmp_path / 'export.aiida' diff --git a/tests/tools/archive/orm/test_links.py b/tests/tools/archive/orm/test_links.py index 4a8670bf8f..242dea30e7 100644 --- a/tests/tools/archive/orm/test_links.py +++ b/tests/tools/archive/orm/test_links.py @@ -16,7 +16,7 @@ from tests.tools.archive.utils import get_all_node_links -def test_links_to_unknown_nodes(tmp_path, clear_database_before_test): +def test_links_to_unknown_nodes(tmp_path, aiida_profile_clean): """Test importing of nodes, that have links to unknown nodes.""" # store a node node = orm.Data() @@ -44,7 +44,7 @@ def test_links_to_unknown_nodes(tmp_path, clear_database_before_test): with ArchiveFormatSqlZip().open(filename, 'r') as archive: assert archive.querybuilder().append(entity_type='link').count() == 1 - clear_database_before_test.reset_db() + aiida_profile_clean.clear_profile() # since the query builder only looks for links between known nodes, # this should not import the erroneous link @@ -54,7 +54,7 @@ def test_links_to_unknown_nodes(tmp_path, clear_database_before_test): assert orm.QueryBuilder().append(entity_type='link').count() == 0 -def test_input_and_create_links(tmp_path, clear_database_before_test): +def test_input_and_create_links(tmp_path, aiida_profile_clean): """ Simple test that will verify that INPUT and CREATE links are properly exported and correctly recreated upon import. @@ -73,7 +73,7 @@ def test_input_and_create_links(tmp_path, clear_database_before_test): export_file = tmp_path.joinpath('export.aiida') create_archive([node_output], filename=export_file) - clear_database_before_test.reset_db() + aiida_profile_clean.clear_profile() import_archive(export_file) import_links = get_all_node_links() @@ -215,7 +215,7 @@ def construct_complex_graph(aiida_localhost_factory, export_combination=0, work_ return graph_nodes, export_list[export_combination] -def test_complex_workflow_graph_links(tmp_path, clear_database_before_test, aiida_localhost_factory): +def test_complex_workflow_graph_links(tmp_path, aiida_profile_clean, aiida_localhost_factory): """ This test checks that all the needed links are correctly exported and imported. More precisely, it checks that INPUT, CREATE, RETURN and CALL @@ -245,7 +245,7 @@ def test_complex_workflow_graph_links(tmp_path, clear_database_before_test, aiid export_file = tmp_path.joinpath('export.aiida') create_archive(graph_nodes, filename=export_file) - clear_database_before_test.reset_db() + aiida_profile_clean.clear_profile() import_archive(export_file) import_links = get_all_node_links() @@ -256,7 +256,7 @@ def test_complex_workflow_graph_links(tmp_path, clear_database_before_test, aiid assert set(export_set) == set(import_set) -def test_complex_workflow_graph_export_sets(tmp_path, clear_database_before_test, aiida_localhost_factory): +def test_complex_workflow_graph_export_sets(tmp_path, aiida_profile_clean, aiida_localhost_factory): """Test ex-/import of individual nodes in complex graph""" for export_conf in range(0, 9): @@ -267,7 +267,7 @@ def test_complex_workflow_graph_export_sets(tmp_path, clear_database_before_test create_archive([export_node], filename=export_file, overwrite=True) export_node_str = str(export_node) - clear_database_before_test.reset_db() + aiida_profile_clean.clear_profile() import_archive(export_file) @@ -282,7 +282,7 @@ def test_complex_workflow_graph_export_sets(tmp_path, clear_database_before_test str(export_target_uuids.symmetric_difference(imported_node_uuids)) -def test_high_level_workflow_links(tmp_path, clear_database_before_test, aiida_localhost_factory): +def test_high_level_workflow_links(tmp_path, aiida_profile_clean, aiida_localhost_factory): """ This test checks that all the needed links are correctly exported and imported. INPUT_CALC, INPUT_WORK, CALL_CALC, CALL_WORK, CREATE, and RETURN @@ -297,7 +297,7 @@ def test_high_level_workflow_links(tmp_path, clear_database_before_test, aiida_l for calcs in high_level_calc_nodes: for works in high_level_work_nodes: - clear_database_before_test.reset_db() + aiida_profile_clean.clear_profile() graph_nodes, _ = construct_complex_graph(aiida_localhost_factory, calc_nodes=calcs, work_nodes=works) @@ -325,7 +325,7 @@ def test_high_level_workflow_links(tmp_path, clear_database_before_test, aiida_l export_file = tmp_path.joinpath('export.aiida') create_archive(graph_nodes, filename=export_file, overwrite=True) - clear_database_before_test.reset_db() + aiida_profile_clean.clear_profile() import_archive(export_file) import_links = get_all_node_links() @@ -425,7 +425,7 @@ def link_flags_export_helper(name, all_nodes, tmp_path, nodes_to_export, flags, return ret -def test_link_flags(tmp_path, clear_database_before_test, aiida_localhost_factory): +def test_link_flags(tmp_path, aiida_profile_clean, aiida_localhost_factory): """Verify all link follow flags are working as intended. Graph (from ``construct_complex_graph()``):: @@ -533,13 +533,13 @@ def test_link_flags(tmp_path, clear_database_before_test, aiida_localhost_factor ) ) - link_flags_import_helper(input_links_forward, clear_database_before_test.reset_db) - link_flags_import_helper(create_return_links_backward, clear_database_before_test.reset_db) - link_flags_import_helper(call_links_backward_calc1, clear_database_before_test.reset_db) - link_flags_import_helper(call_links_backward_work2, clear_database_before_test.reset_db) + link_flags_import_helper(input_links_forward, aiida_profile_clean.reset_db) + link_flags_import_helper(create_return_links_backward, aiida_profile_clean.reset_db) + link_flags_import_helper(call_links_backward_calc1, aiida_profile_clean.reset_db) + link_flags_import_helper(call_links_backward_work2, aiida_profile_clean.reset_db) -def test_double_return_links_for_workflows(tmp_path, clear_database_before_test): +def test_double_return_links_for_workflows(tmp_path, aiida_profile_clean): """ This test checks that double return links to a node can be exported and imported without problems, @@ -565,7 +565,7 @@ def test_double_return_links_for_workflows(tmp_path, clear_database_before_test) export_file = tmp_path.joinpath('export.aiida') create_archive([data_out, work1, work2, data_in], filename=export_file) - clear_database_before_test.reset_db() + aiida_profile_clean.clear_profile() import_archive(export_file) @@ -580,7 +580,7 @@ def test_double_return_links_for_workflows(tmp_path, clear_database_before_test) assert len(links_in_db) == links_count # After import -def test_multiple_post_return_links(tmp_path, clear_database_before_test): # pylint: disable=too-many-locals +def test_multiple_post_return_links(tmp_path, aiida_profile_clean): # pylint: disable=too-many-locals """Check extra RETURN links can be added to existing Nodes, when label is not unique""" data = orm.Int(1).store() calc = orm.CalculationNode().store() @@ -604,7 +604,7 @@ def test_multiple_post_return_links(tmp_path, clear_database_before_test): # py create_archive([data], filename=data_provenance, return_backward=False) create_archive([data], filename=all_provenance, return_backward=True) - clear_database_before_test.reset_db() + aiida_profile_clean.clear_profile() # import data provenance import_archive(data_provenance) diff --git a/tests/tools/archive/orm/test_logs.py b/tests/tools/archive/orm/test_logs.py index 08ffc0603f..07f548094d 100644 --- a/tests/tools/archive/orm/test_logs.py +++ b/tests/tools/archive/orm/test_logs.py @@ -13,7 +13,7 @@ from aiida.tools.archive import create_archive, import_archive -def test_critical_log_msg_and_metadata(tmp_path, clear_database_before_test): +def test_critical_log_msg_and_metadata(tmp_path, aiida_profile_clean): """ Testing logging of critical message """ message = 'Testing logging of critical failure' calc = orm.CalculationNode() @@ -34,7 +34,7 @@ def test_critical_log_msg_and_metadata(tmp_path, clear_database_before_test): export_file = tmp_path.joinpath('export.aiida') create_archive([calc], filename=export_file) - clear_database_before_test.reset_db() + aiida_profile_clean.clear_profile() import_archive(export_file) @@ -46,7 +46,7 @@ def test_critical_log_msg_and_metadata(tmp_path, clear_database_before_test): assert logs[0].metadata == log_metadata -def test_exclude_logs_flag(tmp_path, clear_database_before_test): +def test_exclude_logs_flag(tmp_path, aiida_profile_clean): """Test that the `include_logs` argument for `export` works.""" log_msg = 'Testing logging of critical failure' @@ -66,7 +66,7 @@ def test_exclude_logs_flag(tmp_path, clear_database_before_test): create_archive([calc], filename=export_file, include_logs=False) # Clean database and reimport exported data - clear_database_before_test.reset_db() + aiida_profile_clean.clear_profile() import_archive(export_file) # Finding all the log messages @@ -81,7 +81,7 @@ def test_exclude_logs_flag(tmp_path, clear_database_before_test): assert str(import_calcs[0][0]) == calc_uuid -def test_export_of_imported_logs(tmp_path, clear_database_before_test): +def test_export_of_imported_logs(tmp_path, aiida_profile_clean): """Test export of imported Log""" log_msg = 'Testing export of imported log' @@ -103,7 +103,7 @@ def test_export_of_imported_logs(tmp_path, clear_database_before_test): create_archive([calc], filename=export_file) # Clean database and reimport exported data - clear_database_before_test.reset_db() + aiida_profile_clean.clear_profile() import_archive(export_file) # Finding all the log messages @@ -124,7 +124,7 @@ def test_export_of_imported_logs(tmp_path, clear_database_before_test): create_archive([calc], filename=re_export_file) # Clean database and reimport exported data - clear_database_before_test.reset_db() + aiida_profile_clean.clear_profile() import_archive(re_export_file) # Finding all the log messages @@ -140,7 +140,7 @@ def test_export_of_imported_logs(tmp_path, clear_database_before_test): assert str(import_logs[0][0]) == log_uuid -def test_multiple_imports_for_single_node(tmp_path, clear_database_before_test): +def test_multiple_imports_for_single_node(tmp_path, aiida_profile_clean): """Test multiple imports for single node with different logs are imported correctly""" log_msgs = ['Life is like riding a bicycle.', 'To keep your balance,', 'you must keep moving.'] @@ -167,7 +167,7 @@ def test_multiple_imports_for_single_node(tmp_path, clear_database_before_test): create_archive([node], filename=export_file_full) # Clean database and reimport "EXISTING" DB - clear_database_before_test.reset_db() + aiida_profile_clean.clear_profile() import_archive(export_file_existing) # Check correct import @@ -208,7 +208,7 @@ def test_multiple_imports_for_single_node(tmp_path, clear_database_before_test): assert imported_log_content in log_msgs -def test_reimport_of_logs_for_single_node(tmp_path, clear_database_before_test): +def test_reimport_of_logs_for_single_node(tmp_path, aiida_profile_clean): """ When a node with logs already exist in the DB, and more logs are imported for the same node (same UUID), test that only new log-entries are added. @@ -291,7 +291,7 @@ def test_reimport_of_logs_for_single_node(tmp_path, clear_database_before_test): create_archive([calc], filename=export_file_full) # Clean database - clear_database_before_test.reset_db() + aiida_profile_clean.clear_profile() ## Part II # Reimport "EXISTING" DB @@ -329,7 +329,7 @@ def test_reimport_of_logs_for_single_node(tmp_path, clear_database_before_test): create_archive([calc], filename=export_file_new) # Clean database - clear_database_before_test.reset_db() + aiida_profile_clean.clear_profile() ## Part III # Reimport "EXISTING" DB diff --git a/tests/tools/archive/orm/test_users.py b/tests/tools/archive/orm/test_users.py index c3ee27569b..dc6b9e3953 100644 --- a/tests/tools/archive/orm/test_users.py +++ b/tests/tools/archive/orm/test_users.py @@ -12,13 +12,13 @@ from aiida.tools.archive import create_archive, import_archive -def test_nodes_belonging_to_different_users(tmp_path, clear_database_before_test, aiida_localhost): +def test_nodes_belonging_to_different_users(tmp_path, aiida_profile_clean, aiida_localhost): """ This test checks that nodes belonging to different users are correctly exported & imported. """ from aiida.common.links import LinkType - from aiida.manage.manager import get_manager + from aiida.manage import get_manager manager = get_manager() @@ -67,7 +67,7 @@ def test_nodes_belonging_to_different_users(tmp_path, clear_database_before_test filename = tmp_path.joinpath('export.aiida') create_archive([sd3], filename=filename) - clear_database_before_test.reset_db() + aiida_profile_clean.clear_profile() import_archive(filename) # Check that the imported nodes are correctly imported and that @@ -79,7 +79,7 @@ def test_nodes_belonging_to_different_users(tmp_path, clear_database_before_test assert orm.load_node(uuid).user.email == manager.get_profile().default_user_email -def test_non_default_user_nodes(tmp_path, clear_database_before_test, aiida_localhost_factory): # pylint: disable=too-many-statements +def test_non_default_user_nodes(tmp_path, aiida_profile_clean, aiida_localhost_factory): # pylint: disable=too-many-statements """ This test checks that nodes belonging to user A (which is not the default user) can be correctly exported, imported, enriched with nodes @@ -88,7 +88,7 @@ def test_non_default_user_nodes(tmp_path, clear_database_before_test, aiida_loca users. """ from aiida.common.links import LinkType - from aiida.manage.manager import get_manager + from aiida.manage import get_manager manager = get_manager() @@ -123,7 +123,7 @@ def test_non_default_user_nodes(tmp_path, clear_database_before_test, aiida_loca filename1 = tmp_path.joinpath('export1.aiidaz') create_archive([sd2], filename=filename1) uuids1 = [sd1.uuid, jc1.uuid, sd2.uuid] - clear_database_before_test.reset_db() + aiida_profile_clean.clear_profile() import_archive(filename1) # Check that the imported nodes are correctly imported and that @@ -154,7 +154,7 @@ def test_non_default_user_nodes(tmp_path, clear_database_before_test, aiida_loca filename2 = tmp_path.joinpath('export2.aiida') create_archive([sd3], filename=filename2) - clear_database_before_test.reset_db() + aiida_profile_clean.clear_profile() import_archive(filename2) # Check that the imported nodes are correctly imported and that diff --git a/tests/tools/archive/test_complex.py b/tests/tools/archive/test_complex.py index cad77b3882..4b882e87c4 100644 --- a/tests/tools/archive/test_complex.py +++ b/tests/tools/archive/test_complex.py @@ -22,7 +22,7 @@ from aiida.tools.archive import create_archive, import_archive -def test_complex_graph_import_export(clear_database_before_test, tmp_path, aiida_localhost): +def test_complex_graph_import_export(aiida_profile_clean, tmp_path, aiida_localhost): """ This test checks that a small and bit complex graph can be correctly exported and imported. @@ -81,7 +81,7 @@ def test_complex_graph_import_export(clear_database_before_test, tmp_path, aiida filename = tmp_path / 'export.aiida' create_archive([fd1], filename=filename) - clear_database_before_test.reset_db() + aiida_profile_clean.clear_profile() import_archive(filename) @@ -103,7 +103,7 @@ def test_reexport(aiida_profile, tmp_path): |___| |___| |___| """ - aiida_profile.reset_db() + aiida_profile.clear_profile() # Creating a folder for the archive files chars = string.ascii_uppercase + string.digits @@ -156,7 +156,7 @@ def test_reexport(aiida_profile, tmp_path): # this also checks if group memberships are preserved! create_archive([group] + list(group.nodes), filename=filename) # cleaning the DB! - aiida_profile.reset_db() + aiida_profile.clear_profile() # reimporting the data from the file import_archive(filename) # creating the hash from db content diff --git a/tests/tools/archive/test_repository.py b/tests/tools/archive/test_repository.py index ae18c263b5..a3501226ed 100644 --- a/tests/tools/archive/test_repository.py +++ b/tests/tools/archive/test_repository.py @@ -17,13 +17,9 @@ from aiida.tools.archive import create_archive, import_archive -@pytest.mark.usefixtures('clear_database_before_test') +@pytest.mark.usefixtures('aiida_profile_clean') def test_export_repository(aiida_profile, tmp_path): """Test exporting a node with files in the repository.""" - from aiida.manage.manager import get_manager - - repository = get_manager().get_backend().get_repository() - node = orm.Data() node.put_object_from_filelike(io.BytesIO(b'file_a'), 'file_a') node.put_object_from_filelike(io.BytesIO(b'file_b'), 'relative/file_b') @@ -34,9 +30,7 @@ def test_export_repository(aiida_profile, tmp_path): filepath = os.path.join(tmp_path / 'export.aiida') create_archive([node], filename=filepath) - aiida_profile.reset_db() - repository.erase() - repository.initialise() + aiida_profile.clear_profile() import_archive(filepath) loaded = orm.load_node(uuid=node_uuid) diff --git a/tests/tools/archive/test_simple.py b/tests/tools/archive/test_simple.py index 9962fd3cdd..0f1002ee6e 100644 --- a/tests/tools/archive/test_simple.py +++ b/tests/tools/archive/test_simple.py @@ -19,7 +19,7 @@ @pytest.mark.parametrize('entities', ['all', 'specific']) -def test_base_data_nodes(clear_database_before_test, tmp_path, entities): +def test_base_data_nodes(aiida_profile_clean, tmp_path, entities): """Test ex-/import of Base Data nodes""" # producing values for each base type values = ('Hello', 6, -1.2399834e12, False) @@ -35,7 +35,7 @@ def test_base_data_nodes(clear_database_before_test, tmp_path, entities): else: create_archive(nodes, filename=filename) # cleaning: - clear_database_before_test.reset_db() + aiida_profile_clean.clear_profile() # Importing back the data: import_archive(filename) # Checking whether values are preserved: @@ -43,7 +43,7 @@ def test_base_data_nodes(clear_database_before_test, tmp_path, entities): assert orm.load_node(uuid).value == refval -def test_calc_of_structuredata(clear_database_before_test, tmp_path, aiida_localhost): +def test_calc_of_structuredata(aiida_profile_clean, tmp_path, aiida_localhost): """Simple ex-/import of CalcJobNode with input StructureData""" struct = orm.StructureData() struct.store() @@ -69,7 +69,7 @@ def test_calc_of_structuredata(clear_database_before_test, tmp_path, aiida_local create_archive([calc], filename=filename) - clear_database_before_test.reset_db() + aiida_profile_clean.clear_profile() import_archive(filename) for uuid, value in attrs.items(): @@ -78,7 +78,7 @@ def test_calc_of_structuredata(clear_database_before_test, tmp_path, aiida_local assert value[k] == node.get_attribute(k) -def test_check_for_export_format_version(clear_database_before_test, tmp_path): +def test_check_for_export_format_version(aiida_profile_clean, tmp_path): """Test the check for the export format version.""" # first create an archive struct = orm.StructureData() @@ -101,12 +101,12 @@ def test_check_for_export_format_version(clear_database_before_test, tmp_path): (outpath / subpath.at).write_bytes(subpath.read_bytes()) # then try to import it - clear_database_before_test.reset_db() + aiida_profile_clean.clear_profile() with pytest.raises(exceptions.IncompatibleArchiveVersionError): import_archive(filename2) -@pytest.mark.usefixtures('clear_database_before_test') +@pytest.mark.usefixtures('aiida_profile_clean') def test_control_of_licenses(tmp_path): """Test control of licenses.""" struct = orm.StructureData() diff --git a/tests/tools/archive/test_specific_import.py b/tests/tools/archive/test_specific_import.py index 877f22c337..732ecc78fd 100644 --- a/tests/tools/archive/test_specific_import.py +++ b/tests/tools/archive/test_specific_import.py @@ -14,7 +14,7 @@ from aiida.tools.archive import create_archive, import_archive -def test_simple_import(clear_database_before_test, tmp_path): +def test_simple_import(aiida_profile_clean, tmp_path): """ This is a very simple test which checks that an archive file with nodes that are not associated to a computer is imported correctly. In Django @@ -50,7 +50,7 @@ def test_simple_import(clear_database_before_test, tmp_path): assert orm.QueryBuilder().append(orm.Node).count() == len(nodes) # Clean the database and verify there are no nodes left - clear_database_before_test.reset_db() + aiida_profile_clean.clear_profile() assert orm.QueryBuilder().append(orm.Node).count() == 0 # After importing we should have the original number of nodes again @@ -58,7 +58,7 @@ def test_simple_import(clear_database_before_test, tmp_path): assert orm.QueryBuilder().append(orm.Node).count() == len(nodes) -def test_cycle_structure_data(clear_database_before_test, aiida_localhost, tmp_path): +def test_cycle_structure_data(aiida_profile_clean, aiida_localhost, tmp_path): """ Create an export with some orm.CalculationNode and Data nodes and import it after having cleaned the database. Verify that the nodes and their attributes are restored @@ -111,7 +111,7 @@ def test_cycle_structure_data(clear_database_before_test, aiida_localhost, tmp_p assert orm.QueryBuilder().append(orm.Node).count() == len(nodes) # Clean the database and verify there are no nodes left - clear_database_before_test.reset_db() + aiida_profile_clean.clear_profile() assert orm.QueryBuilder().append(orm.Node).count() == 0 # After importing we should have the original number of nodes again @@ -151,7 +151,7 @@ def test_cycle_structure_data(clear_database_before_test, aiida_localhost, tmp_p assert len(builder.all()) > 0 -def test_import_checkpoints(clear_database_before_test, tmp_path): +def test_import_checkpoints(aiida_profile_clean, tmp_path): """Check that process node checkpoints are stripped when importing. The process node checkpoints need to be stripped because they @@ -172,7 +172,7 @@ def test_import_checkpoints(clear_database_before_test, tmp_path): assert orm.QueryBuilder().append(orm.Node).count() == len(nodes) # Clean the database and verify there are no nodes left - clear_database_before_test.reset_db() + aiida_profile_clean.clear_profile() assert orm.QueryBuilder().append(orm.Node).count() == 0 import_archive(archive_path) diff --git a/tests/tools/groups/test_paths.py b/tests/tools/groups/test_paths.py index a864a86cce..5068d8c097 100644 --- a/tests/tools/groups/test_paths.py +++ b/tests/tools/groups/test_paths.py @@ -16,7 +16,7 @@ @pytest.fixture -def setup_groups(clear_database_before_test): +def setup_groups(aiida_profile_clean): """Setup some groups for testing.""" for label in ['a', 'a/b', 'a/c/d', 'a/c/e/g', 'a/f']: group, _ = orm.Group.objects.get_or_create(label) @@ -117,7 +117,7 @@ def test_walk(setup_groups): @pytest.mark.filterwarnings('ignore::UserWarning') -def test_walk_with_invalid_path(clear_database_before_test): +def test_walk_with_invalid_path(aiida_profile_clean): """Test the ``GroupPath.walk`` method with invalid paths.""" for label in ['a', 'a/b', 'a/c/d', 'a/c/e/g', 'a/f', 'bad//group', 'bad/other']: orm.Group.objects.get_or_create(label) @@ -126,7 +126,7 @@ def test_walk_with_invalid_path(clear_database_before_test): assert [c.path for c in sorted(group_path.walk())] == expected -def test_walk_nodes(clear_database_before_test): +def test_walk_nodes(aiida_profile_clean): """Test the ``GroupPath.walk_nodes()`` function.""" group, _ = orm.Group.objects.get_or_create('a') node = orm.Data() @@ -137,7 +137,7 @@ def test_walk_nodes(clear_database_before_test): assert [(r.group_path.path, r.node.attributes) for r in group_path.walk_nodes()] == [('a', {'i': 1, 'j': 2})] -def test_cls(clear_database_before_test): +def test_cls(aiida_profile_clean): """Test that only instances of `cls` or its subclasses are matched by ``GroupPath``.""" for label in ['a', 'a/b', 'a/c/d', 'a/c/e/g']: orm.Group.objects.get_or_create(label) @@ -150,7 +150,7 @@ def test_cls(clear_database_before_test): assert GroupPath('a/b/c') != GroupPath('a/b/c', cls=orm.UpfFamily) -def test_attr(clear_database_before_test): +def test_attr(aiida_profile_clean): """Test ``GroupAttr``.""" for label in ['a', 'a/b', 'a/c/d', 'a/c/e/g', 'a/f', 'bad space', 'bad@char', '_badstart']: orm.Group.objects.get_or_create(label) @@ -163,7 +163,7 @@ def test_attr(clear_database_before_test): group_path.browse.a.c.x # pylint: disable=pointless-statement -def test_cls_label_clashes(clear_database_before_test): +def test_cls_label_clashes(aiida_profile_clean): """Test behaviour when multiple group classes have the same label.""" group_01, _ = orm.Group.objects.get_or_create('a') node_01 = orm.Data().store() diff --git a/tests/utils/configuration.py b/tests/utils/configuration.py index 5fb7666b7b..5d81daf839 100644 --- a/tests/utils/configuration.py +++ b/tests/utils/configuration.py @@ -27,7 +27,7 @@ def create_mock_profile(name, repository_dirpath=None, **kwargs): profile_dictionary = { 'default_user_email': kwargs.pop('default_user_email', 'dummy@localhost'), 'storage': { - 'backend': kwargs.pop('storage_backend', 'django'), + 'backend': kwargs.pop('storage_backend', 'psql_dos'), 'config': { 'database_engine': kwargs.pop('database_engine', 'postgresql_psycopg2'), 'database_hostname': kwargs.pop('database_hostname', DEFAULT_DBINFO['host']), diff --git a/tests/workflows/arithmetic/test_add_multiply.py b/tests/workflows/arithmetic/test_add_multiply.py index 580282a910..515ac6baa4 100644 --- a/tests/workflows/arithmetic/test_add_multiply.py +++ b/tests/workflows/arithmetic/test_add_multiply.py @@ -22,7 +22,7 @@ def test_factory(): @pytest.mark.requires_rmq -@pytest.mark.usefixtures('clear_database_before_test', 'temporary_event_loop') +@pytest.mark.usefixtures('aiida_profile_clean', 'temporary_event_loop') def test_run(): """Test running the work function.""" x = Int(1) diff --git a/utils/make_all.py b/utils/make_all.py index 5fecc431e9..77004cd64b 100644 --- a/utils/make_all.py +++ b/utils/make_all.py @@ -159,7 +159,7 @@ def write_inits(folder_path: str, all_dict: dict, skip_children: Dict[str, List[ # skipped since we don't want to expose the implmentation 'orm': ['implementation'], # skipped since both implementations share class/function names - 'orm/implementation': ['django', 'sqlalchemy', 'sql'], + 'orm/implementation': ['sqlalchemy', 'sql'], # skip all since the module requires extra requirements 'restapi': ['*'], # keep at aiida.tools.archive level diff --git a/utils/validate_consistency.py b/utils/validate_consistency.py index 6b8d9db572..28ccb17fc2 100644 --- a/utils/validate_consistency.py +++ b/utils/validate_consistency.py @@ -100,6 +100,10 @@ def validate_verdi_documentation(): """Auto-generate the documentation for `verdi` through `click`.""" from click import Context + from aiida.manage.configuration import load_documentation_profile + + load_documentation_profile() + from aiida.cmdline.commands.cmd_verdi import verdi width = 90 # The maximum width of the formatted help strings in characters