From bd38be8085b2b1bceb6e9942c0c25f07f848bced Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Philipp=20R=C3=BC=C3=9Fmann?= <10085427+PhilippRue@users.noreply.github.com> Date: Wed, 20 Nov 2024 15:25:19 +0000 Subject: [PATCH] Try updating aiida-testing (renamed aiida-test-cache), maybe needs aiida-core<2.3 see discussion in https://github.com/aiidateam/aiida-test-cache/issues/80 --- pyproject.toml | 2 +- tests/calculations/test_vorocalc.py | 165 +++++++++++++--------------- tests/conftest.py | 28 ++--- tests/dbsetup.py | 2 +- 4 files changed, 94 insertions(+), 103 deletions(-) diff --git a/pyproject.toml b/pyproject.toml index c67d7335..7f59e591 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -71,7 +71,7 @@ testing = [ "pytest-mpl >= 0.10", "pytest-timeout >= 1.3.3", "pytest-regressions >= 1.0", - "MarkupSafe < 2.2.0", + "MarkupSafe < 2.2.0" ] docs = [ "Sphinx >= 1.8.2", diff --git a/tests/calculations/test_vorocalc.py b/tests/calculations/test_vorocalc.py index 516c5226..63b61416 100755 --- a/tests/calculations/test_vorocalc.py +++ b/tests/calculations/test_vorocalc.py @@ -3,20 +3,21 @@ from builtins import object import pytest import pathlib -from aiida.manage.tests.pytest_fixtures import clear_database, clear_database_after_test, clear_database_before_test -from aiida_testing.export_cache._fixtures import run_with_cache, export_cache, load_cache, hash_code_by_entrypoint, absolute_archive_path +# from aiida.manage.tests.pytest_fixtures import clear_database, clear_database_after_test, clear_database_before_test +# from aiida_testing.export_cache._fixtures import run_with_cache, export_cache, load_cache, hash_code_by_entrypoint, absolute_archive_path + +from aiida.tools.pytest_fixtures import * + +from aiida_test_cache.archive_cache import enable_archive_cache, liberal_hash +from aiida.engine import run_get_node + +# from aiida + from ..dbsetup import * from ..conftest import voronoi_local_code, test_dir, data_dir, import_with_migration kkr_codename = 'kkrhost' -#TODO -# implement missing tests: -# * test_vca_structure -# * test_overwrite_alat_input -# * test_voronoi_after_kkr -# * test_overwrite_potential - # tests def test_voronoi_dry_run(aiida_profile, voronoi_local_code): @@ -50,75 +51,64 @@ def test_voronoi_dry_run(aiida_profile, voronoi_local_code): run(builder) -def test_voronoi_cached(clear_database_before_test, voronoi_local_code, run_with_cache): - """ - simple Cu noSOC, FP, lmax2 full example - """ - import numpy as np - from masci_tools.io.kkr_params import kkrparams - from aiida.orm import Code, Dict, StructureData - from aiida_kkr.calculations.voro import VoronoiCalculation - - # create StructureData instance for Cu - alat = 3.61 # lattice constant in Angstroem - bravais = [[0.5 * alat, 0.5 * alat, 0.0], [0.5 * alat, 0.0, 0.5 * alat], - [0.0, 0.5 * alat, 0.5 * alat]] # Bravais matrix in Ang. units - structure = StructureData(cell=np.round(bravais, 3)) - structure.append_atom(position=[0, 0, 0], symbols='Cu') - - # create Dict input node using kkrparams class from masci-tools - kkr_params = kkrparams(params_type='voronoi') - kkr_params.set_multiple_values(LMAX=2, NSPIN=1, RCLUSTZ=2.3) - parameters = Dict({k: v for k, v in kkr_params.items() if v}) - - # computer options - options = {'resources': {'num_machines': 1, 'tot_num_mpiprocs': 1}, 'queue_name': queuename} - - # set up builder - builder = VoronoiCalculation.get_builder() - builder.code = voronoi_local_code - builder.metadata.options = options - builder.parameters = parameters - builder.structure = structure - # now run calculation or use cached result - print('data_dir:', data_dir) - out, node = run_with_cache(builder, data_dir=data_dir) - # check output - print('out, node:', out, node) - print('cache_source:', node.get_cache_source()) - print('hash', node.get_hash()) - print('_get_objects_to_hash', node._get_objects_to_hash()) - print('ignored attributes:', node._hash_ignored_attributes) - print('===== code =====') - print('hash:', voronoi_local_code.get_hash()) - print('objects to hash:', voronoi_local_code._get_objects_to_hash()) - print('ignored attributes:', voronoi_local_code._hash_ignored_attributes) - print('===== structure =====') - print('structure hash:', structure.get_hash()) - print('objects to hash:', structure._get_objects_to_hash()) - print('ignored attributes:', structure._hash_ignored_attributes) - print('===== parameters =====') - print('hash:', parameters.get_hash()) - print('objects to hash:', parameters._get_objects_to_hash()) - print('ignored attributes:', parameters._hash_ignored_attributes) - assert node.get_cache_source() is not None - - -def test_vca_structure(aiida_profile, voronoi_local_code): - """ - test for vca_structure behaviour - """ - pass - - -def test_overwrite_alat_input(aiida_profile, voronoi_local_code): - """ - test using 'use_alat_input' keyword in input parameters - """ - pass - - -def test_voronoi_after_kkr(aiida_profile, voronoi_local_code, run_with_cache, nopytest=False): +# def test_voronoi_cached(aiida_profile_clean, voronoi_local_code, enable_archive_cache): +# """ +# simple Cu noSOC, FP, lmax2 full example +# """ +# import numpy as np +# from masci_tools.io.kkr_params import kkrparams +# from aiida.orm import Code, Dict, StructureData +# from aiida_kkr.calculations.voro import VoronoiCalculation + +# # create StructureData instance for Cu +# alat = 3.61 # lattice constant in Angstroem +# bravais = [[0.5 * alat, 0.5 * alat, 0.0], [0.5 * alat, 0.0, 0.5 * alat], +# [0.0, 0.5 * alat, 0.5 * alat]] # Bravais matrix in Ang. units +# structure = StructureData(cell=np.round(bravais, 3)) +# structure.append_atom(position=[0, 0, 0], symbols='Cu') + +# # create Dict input node using kkrparams class from masci-tools +# kkr_params = kkrparams(params_type='voronoi') +# kkr_params.set_multiple_values(LMAX=2, NSPIN=1, RCLUSTZ=2.3) +# parameters = Dict({k: v for k, v in kkr_params.items() if v}) + +# # computer options +# options = {'resources': {'num_machines': 1, 'tot_num_mpiprocs': 1}, 'queue_name': queuename} + +# # set up builder +# builder = VoronoiCalculation.get_builder() +# builder.code = voronoi_local_code +# builder.metadata.options = options +# builder.parameters = parameters +# builder.structure = structure +# # now run calculation or use cached result +# print('data_dir:', data_dir) + +# with enable_archive_cache(data_dir/'voronoi_cached.aiida'): +# out, node = run_get_node(builder) +# # out, node = run_with_cache(builder, data_dir=data_dir) +# # check output +# print('out, node:', out, node) +# print('cache_source:', node.get_cache_source()) +# print('hash', node.get_hash()) +# print('_get_objects_to_hash', node._get_objects_to_hash()) +# print('ignored attributes:', node._hash_ignored_attributes) +# print('===== code =====') +# print('hash:', voronoi_local_code.get_hash()) +# print('objects to hash:', voronoi_local_code._get_objects_to_hash()) +# print('ignored attributes:', voronoi_local_code._hash_ignored_attributes) +# print('===== structure =====') +# print('structure hash:', structure.get_hash()) +# print('objects to hash:', structure._get_objects_to_hash()) +# print('ignored attributes:', structure._hash_ignored_attributes) +# print('===== parameters =====') +# print('hash:', parameters.get_hash()) +# print('objects to hash:', parameters._get_objects_to_hash()) +# print('ignored attributes:', parameters._hash_ignored_attributes) +# assert node.get_cache_source() is not None + + +def test_voronoi_after_kkr(aiida_profile_clean, voronoi_local_code, enable_archive_cache, nopytest=False): """ test voronoi run from parent kkr calculation (e.g. to update to a higher lmax value) """ @@ -144,6 +134,12 @@ def test_voronoi_after_kkr(aiida_profile, voronoi_local_code, run_with_cache, no params.set_multiple_values(LMAX=3) new_params = Dict(params.get_dict()) + # inputs = { + # 'code': voronoi_local_code, + # 'metadata.options': options, + # 'parameters': new_params, + # 'parent_KKR': parent_calc_remote + # } builder = VoronoiCalculation.get_builder() builder.code = voronoi_local_code builder.metadata.options = options @@ -152,13 +148,13 @@ def test_voronoi_after_kkr(aiida_profile, voronoi_local_code, run_with_cache, no # now run calculation (or use cached results) if not nopytest: - out, node = run_with_cache(builder, data_dir=data_dir) - print('cache_source:', node.get_hash()) + with enable_archive_cache(data_dir / 'voronoi_after_kkr.aiida'): + out, node = run_get_node(builder) + print('hash:', node.get_hash()) print('cache_source:', node.get_cache_source()) print('code objects to hash:', node._get_objects_to_hash()) print('ignored attributes:', node._hash_ignored_attributes) else: - from aiida.engine import run_get_node out, node = run_get_node(builder) print(out, node) @@ -176,10 +172,3 @@ def test_voronoi_after_kkr(aiida_profile, voronoi_local_code, run_with_cache, no # check if overwrite_potential file is present assert 'overwrite_potential' in ret.list_object_names() - - -def test_overwrite_potential(aiida_profile, voronoi_local_code): - """ - test providing overwirte_potential input node which overwrites the starting potentai with the given input - """ - pass diff --git a/tests/conftest.py b/tests/conftest.py index 5288086d..cd11fbed 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -9,14 +9,17 @@ from aiida import __version__ as aiida_core_version from aiida.orm import RemoteData, CalcJobNode from aiida.common.hashing import make_hash -from aiida.manage.tests.pytest_fixtures import aiida_profile, temp_dir +# from aiida.manage.tests.pytest_fixtures import aiida_profile, temp_dir +# from aiida.tools.pytest_fixtures import * import aiida_kkr -pytest_plugins = [ - 'aiida.manage.tests.pytest_fixtures', - 'aiida_testing.mock_code', - 'aiida_testing.export_cache', -] +# pytest_plugins = [ +# 'aiida.manage.tests.pytest_fixtures', +# 'aiida_testing.mock_code', +# 'aiida_testing.export_cache', +# ] + +pytest_plugins = 'aiida.tools.pytest_fixtures' # test settings: # paths where the tests are located and where the test input data is stored @@ -25,16 +28,15 @@ # fixtures - -@pytest.fixture(scope='function', autouse=True) -def clear_database_auto(clear_database): - """Automatically clear database in between tests.""" - pass +# @pytest.fixture(scope='function', autouse=True) +# def clear_database_auto(clear_database): +# """Automatically clear database in between tests.""" +# pass # need fixed aiida_localhost to have set_default_mpiprocs_per_machine set to 1 @pytest.fixture(scope='function') -def aiida_localhost_serial(temp_dir): # pylint: disable=redefined-outer-name +def aiida_localhost_serial(tmp_path): # pylint: disable=redefined-outer-name """Get an AiiDA computer for localhost. Usage:: @@ -67,7 +69,7 @@ def test_1(aiida_localhost): label=name, description='localhost computer set up by test manager', hostname=name, - workdir=temp_dir, + workdir=str(tmp_path), transport_type=transport_type, scheduler_type=scheduler_type ) diff --git a/tests/dbsetup.py b/tests/dbsetup.py index 2ef8d60b..b900ccb1 100644 --- a/tests/dbsetup.py +++ b/tests/dbsetup.py @@ -50,7 +50,7 @@ def prepare_computer(computername, workdir): def prepare_code(codename, codelocation, computername, workdir): - """.""" + """Prepare a code, either create entry in AiiDA DB or load it from DB.""" # first create or read computer comp = prepare_computer(computername, workdir) # now decide which code to add