diff --git a/aiida_nanotech_empa/workflows/cp2k/__init__.py b/aiida_nanotech_empa/workflows/cp2k/__init__.py index 8bf1ffc..103839f 100644 --- a/aiida_nanotech_empa/workflows/cp2k/__init__.py +++ b/aiida_nanotech_empa/workflows/cp2k/__init__.py @@ -10,6 +10,7 @@ from .orbitals_workchain import Cp2kOrbitalsWorkChain from .pdos_workchain import Cp2kPdosWorkChain from .phonons_workchain import Cp2kPhononsWorkChain +from .reftraj_md_workchain import Cp2kRefTrajWorkChain from .replica_workchain import Cp2kReplicaWorkChain from .stm_workchain import Cp2kStmWorkChain @@ -28,4 +29,5 @@ "Cp2kReplicaWorkChain", "Cp2kNebWorkChain", "Cp2kPhononsWorkChain", + "Cp2kRefTrajWorkChain", ) diff --git a/aiida_nanotech_empa/workflows/cp2k/cp2k_utils.py b/aiida_nanotech_empa/workflows/cp2k/cp2k_utils.py index eb0b16b..3451c3b 100644 --- a/aiida_nanotech_empa/workflows/cp2k/cp2k_utils.py +++ b/aiida_nanotech_empa/workflows/cp2k/cp2k_utils.py @@ -69,7 +69,14 @@ def get_kinds_section(kinds_dict, protocol="gapw_std"): def determine_kinds(structure, magnetization_per_site=None, ghost_per_site=None): """Gather the same atoms with the same magnetization into one atomic kind.""" - ase_structure = structure.get_ase() + + if isinstance(structure, orm.TrajectoryData): + cell = structure.get_array("cells")[0] + positions = structure.get_array("positions")[0] + symbols = structure.symbols + ase_structure = ase.Atoms(symbols, positions=positions, cell=cell) + else: + ase_structure = structure.get_ase() if magnetization_per_site is None or len(magnetization_per_site) == 0: magnetization_per_site = [0 for i in range(len(ase_structure))] @@ -176,7 +183,6 @@ def load_protocol(fname, protocol=None): def get_dft_inputs(dft_params, structure, template, protocol): - ase_atoms = structure.get_ase() files = { "basis": orm.SinglefileData( file=os.path.join( @@ -196,6 +202,18 @@ def get_dft_inputs(dft_params, structure, template, protocol): ), } + # number of atoms + if isinstance(structure, orm.TrajectoryData): + natoms = structure.get_shape("positions")[1] + structure = orm.StructureData( + ase=ase.Atoms( + structure.symbols, + positions=structure.get_array("positions")[0], + cell=(structure.get_array("cells")[0]), + ) + ) + natoms = len(structure.sites) + # Load input template. input_dict = load_protocol(template, protocol) @@ -211,7 +229,7 @@ def get_dft_inputs(dft_params, structure, template, protocol): input_dict["FORCE_EVAL"]["DFT"]["CHARGE"] = dft_params["charge"] # uks - magnetization_per_site = [0 for i in range(len(ase_atoms))] + magnetization_per_site = [0 for i in range(natoms)] if "uks" in dft_params: if dft_params["uks"]: magnetization_per_site = dft_params["magnetization_per_site"] diff --git a/aiida_nanotech_empa/workflows/cp2k/protocols/md_reftraj_protocol.yml b/aiida_nanotech_empa/workflows/cp2k/protocols/md_reftraj_protocol.yml new file mode 100644 index 0000000..856e1bf --- /dev/null +++ b/aiida_nanotech_empa/workflows/cp2k/protocols/md_reftraj_protocol.yml @@ -0,0 +1,249 @@ +standard: + GLOBAL: + EXTENDED_FFT_LENGTHS: '' + PRINT_LEVEL: MEDIUM + RUN_TYPE: MD + WALLTIME: '600' + ELPA_KERNEL: AVX2_BLOCK2 + MOTION: + MD: + STEPS: '1' + ENSEMBLE: 'REFTRAJ' + REFTRAJ: + FIRST_SNAPSHOT: '1' + LAST_SNAPSHOT: '1' + STRIDE: '1' + EVAL_FORCES: '.TRUE.' + TRAJ_FILE_NAME: 'aiida-reftraj.xyz' + VARIABLE_VOLUME: .TRUE. + CELL_FILE_NAME: 'aiida-reftraj.cell' + PRINT: + RESTART_HISTORY: + _: 'OFF' + FORCES: + EACH: + MD: '1' + FORMAT: 'XYZ' + CELL: + EACH: + MD: '1' + FORCE_EVAL: + METHOD: Quickstep + SUBSYS: + CELL: + PERIODIC: XYZ + SYMMETRY: ORTHORHOMBIC + DFT: + UKS: .FALSE. + MULTIPLICITY: '0' + CHARGE: '0' + BASIS_SET_FILE_NAME: BASIS_MOLOPT + POTENTIAL_FILE_NAME: POTENTIAL + RESTART_FILE_NAME: ./parent_calc/aiida-RESTART.wfn + MGRID: + CUTOFF: '600' + NGRIDS: '5' + POISSON: + PERIODIC: XYZ + POISSON_SOLVER: PERIODIC + QS: + EPS_DEFAULT: '1.0E-14' + EXTRAPOLATION: ASPC + EXTRAPOLATION_ORDER: '3' + METHOD: GPW + SCF: + SCF_GUESS: RESTART + EPS_SCF: '1.0E-7' + MAX_SCF: '40' + OT: + MINIMIZER: CG + PRECONDITIONER: FULL_SINGLE_INVERSE + OUTER_SCF: + EPS_SCF: '1.0E-7' + MAX_SCF: '50' + PRINT: + RESTART: + ADD_LAST: NUMERIC + EACH: + MD: '1' + FILENAME: RESTART + RESTART_HISTORY: + _: 'OFF' + BACKUP_COPIES: '0' + XC: + VDW_POTENTIAL: + DISPERSION_FUNCTIONAL: PAIR_POTENTIAL + PAIR_POTENTIAL: + CALCULATE_C9_TERM: .TRUE. + PARAMETER_FILE_NAME: dftd3.dat + REFERENCE_FUNCTIONAL: PBE + R_CUTOFF: '15' + TYPE: DFTD3 + XC_FUNCTIONAL: + _: PBE +low_accuracy: + GLOBAL: + EXTENDED_FFT_LENGTHS: '' + PRINT_LEVEL: MEDIUM + RUN_TYPE: MD + WALLTIME: '600' + ELPA_KERNEL: AVX2_BLOCK2 + MOTION: + MD: + STEPS: '1' + ENSEMBLE: 'REFTRAJ' + REFTRAJ: + FIRST_SNAPSHOT: '1' + LAST_SNAPSHOT: '1' + STRIDE: '1' + EVAL_FORCES: '.TRUE.' + TRAJ_FILE_NAME: 'aiida-refttraj.xyz' + VARIABLE_VOLUME: .TRUE. + CELL_FILE_NAME: 'aiida-reftraj.cell' + PRINT: + RESTART_HISTORY: + _: 'OFF' + FORCES: + EACH: + MD: '1' + FORMAT: 'XYZ' + CELL: + EACH: + MD: '1' + FORCE_EVAL: + METHOD: Quickstep + SUBSYS: + CELL: + PERIODIC: XYZ + SYMMETRY: ORTHORHOMBIC + DFT: + UKS: .FALSE. + MULTIPLICITY: '0' + CHARGE: '0' + BASIS_SET_FILE_NAME: BASIS_MOLOPT + POTENTIAL_FILE_NAME: POTENTIAL + RESTART_FILE_NAME: ./parent_calc/aiida-RESTART.wfn + MGRID: + CUTOFF: '600' + NGRIDS: '5' + POISSON: + PERIODIC: XYZ + POISSON_SOLVER: PERIODIC + QS: + EPS_DEFAULT: '1.0E-14' + EXTRAPOLATION: ASPC + EXTRAPOLATION_ORDER: '3' + METHOD: GPW + SCF: + SCF_GUESS: RESTART + EPS_SCF: '1.0E-7' + MAX_SCF: '40' + OT: + MINIMIZER: CG + PRECONDITIONER: FULL_SINGLE_INVERSE + OUTER_SCF: + EPS_SCF: '1.0E-7' + MAX_SCF: '50' + PRINT: + RESTART: + ADD_LAST: NUMERIC + EACH: + MD: '1' + FILENAME: RESTART + RESTART_HISTORY: + _: 'OFF' + BACKUP_COPIES: '0' + XC: + VDW_POTENTIAL: + DISPERSION_FUNCTIONAL: PAIR_POTENTIAL + PAIR_POTENTIAL: + CALCULATE_C9_TERM: .TRUE. + PARAMETER_FILE_NAME: dftd3.dat + REFERENCE_FUNCTIONAL: PBE + R_CUTOFF: '15' + TYPE: DFTD3 + XC_FUNCTIONAL: + _: PBE +debug: + GLOBAL: + EXTENDED_FFT_LENGTHS: '' + PRINT_LEVEL: MEDIUM + RUN_TYPE: MD + WALLTIME: '600' + ELPA_KERNEL: AVX2_BLOCK2 + MOTION: + MD: + STEPS: '1' + ENSEMBLE: 'REFTRAJ' + REFTRAJ: + FIRST_SNAPSHOT: '1' + LAST_SNAPSHOT: '1' + STRIDE: '1' + EVAL_FORCES: '.TRUE.' + TRAJ_FILE_NAME: 'aiida-reftraj.xyz' + VARIABLE_VOLUME: .TRUE. + CELL_FILE_NAME: 'aiida-reftraj.cell' + PRINT: + RESTART_HISTORY: + _: 'OFF' + FORCES: + EACH: + MD: '1' + FORMAT: 'XYZ' + CELL: + EACH: + MD: '1' + FORCE_EVAL: + METHOD: Quickstep + SUBSYS: + CELL: + PERIODIC: XYZ + SYMMETRY: ORTHORHOMBIC + DFT: + UKS: .FALSE. + MULTIPLICITY: '0' + CHARGE: '0' + BASIS_SET_FILE_NAME: BASIS_MOLOPT + POTENTIAL_FILE_NAME: POTENTIAL + RESTART_FILE_NAME: ./parent_calc/aiida-RESTART.wfn + MGRID: + CUTOFF: '600' + NGRIDS: '5' + POISSON: + PERIODIC: XYZ + POISSON_SOLVER: PERIODIC + QS: + EPS_DEFAULT: '1.0E-12' + EXTRAPOLATION: ASPC + EXTRAPOLATION_ORDER: '3' + METHOD: GPW + SCF: + SCF_GUESS: RESTART + EPS_SCF: '1.0E-1' + MAX_SCF: '40' + OT: + MINIMIZER: CG + PRECONDITIONER: FULL_SINGLE_INVERSE + OUTER_SCF: + EPS_SCF: '1.0E-1' + MAX_SCF: '50' + PRINT: + RESTART: + ADD_LAST: NUMERIC + EACH: + MD: '1' + FILENAME: RESTART + RESTART_HISTORY: + _: 'OFF' + BACKUP_COPIES: '0' + XC: + VDW_POTENTIAL: + DISPERSION_FUNCTIONAL: PAIR_POTENTIAL + PAIR_POTENTIAL: + CALCULATE_C9_TERM: .TRUE. + PARAMETER_FILE_NAME: dftd3.dat + REFERENCE_FUNCTIONAL: PBE + R_CUTOFF: '15' + TYPE: DFTD3 + XC_FUNCTIONAL: + _: PBE diff --git a/aiida_nanotech_empa/workflows/cp2k/reftraj_md_workchain.py b/aiida_nanotech_empa/workflows/cp2k/reftraj_md_workchain.py new file mode 100644 index 0000000..6b9b52a --- /dev/null +++ b/aiida_nanotech_empa/workflows/cp2k/reftraj_md_workchain.py @@ -0,0 +1,328 @@ +from copy import deepcopy + +import numpy as np +from aiida import engine, orm, plugins +from aiida_cp2k.utils import merge_trajectory_data_unique + +# from ...utils import common_utils +from . import cp2k_utils + +Cp2kBaseWorkChain = plugins.WorkflowFactory("cp2k.base") +# Cp2kRefTrajWorkChain = plugins.WorkflowFactory("cp2k.reftraj") +TrajectoryData = plugins.DataFactory("array.trajectory") + + +def last_reftraj_wc(input_trajectory): + """Identify latest workchain that run the same input trajectory.""" + + # Query for WorkChainNode that had this node as input and has the label "mylabel" + + qb = orm.QueryBuilder() + qb.append( + orm.Node, filters={"uuid": input_trajectory.uuid}, tag="input_node" + ) # Input node filter + qb.append( + orm.WorkChainNode, + filters={ + "label": "CP2K_RefTraj", + "attributes.process_state": {"in": ["finished", "excepted", "killed"]}, + }, # Filter for workchain label "mylabel" + with_incoming="input_node", # WorkChain must have this node as input + # project=['id'], # Only project the PK (id) + tag="workchain", + ) + + # Sort by the ctime to get the last workchain + qb.order_by({"workchain": {"ctime": "desc"}}) + + # Fetch the result + result = qb.first() + + if result: + return result[0] # Return the PK of the workchain + else: + return None # No workchain found + + +def retireve_previous_trajectories(reftraj_wc): + """Identify latest workchain that run teh same input trajectory and retrieve the trajectories from it.""" + + trajectories = [] + if reftraj_wc.is_finished_ok: + trajectories.append(reftraj_wc.outputs.output_trajectory) + else: + # check if a merged directory is already available at the beginnign of the previous workchain + if reftraj_wc.called_descendants[0].label == "merge_trajectory_data_unique": + trajectories = [reftraj_wc.called_descendants[0].outputs.result] + + base_workchains = [ + wc + for wc in reftraj_wc.called_descendants + if wc.process_label == "Cp2kBaseWorkChain" + ] + for base_wc in base_workchains: + # if BaseWorkChain is_finished_ok use the TrajectoryData + if base_wc.is_finished_ok: + trajectories.append(base_wc.outputs.output_trajectory) + # otehrwise use the data from each completed cp2kcalc + else: + cp2k_calcs = [ + calc + for calc in base_wc.called_descendants + if calc.process_label == "Cp2kCalculation" and calc.is_finished_ok + ] + for calc in cp2k_calcs: + trajectories.append(calc.outputs.output_trajectory) + return orm.List(trajectories) + + +@engine.calcfunction +def merge_trajectories(*trajectories): + """Merge a list of trajectories into a single one.""" + + arrays = {} + traj_keys = trajectories[0].get_arraynames() + symbols = trajectories[0].symbols + traj_keys.remove("steps") + for key in traj_keys: + arrays[key] = [] + for trajectory in trajectories: + for key in traj_keys: + arrays[key].append(trajectory.get_array(key)) + + merged_trajectory = TrajectoryData() + if "cells" in traj_keys: + merged_trajectory.set_trajectory( + symbols, + np.concatenate(arrays["positions"]), + cells=np.concatenate(arrays["cells"]), + ) + else: + merged_trajectory.set_trajectory(symbols, np.concatenate(arrays["positions"])) + traj_keys = [key for key in traj_keys if key not in ["cells", "positions"]] + for key in traj_keys: + merged_trajectory.set_array(key, np.concatenate(arrays[key])) + + return merged_trajectory + + +# @engine.calcfunction +def create_batches(trajectory, num_batches, steps_completed): + """Create lists of consecutive integers. Counting start from 1 for CP2K input. The first list contains only one element.""" + + input_list = [i + 1 for i in range(trajectory.get_shape("positions")[0])] + for i in steps_completed: + input_list.remove(i) + + if len(input_list) == 0: + return {} + # If there are fewer elements than num_batches + 1, return each element as a separate list + if len(input_list) < num_batches.value + 1: + return {i: [value] for i, value in enumerate(input_list)} + + # Initialize the batches with the first batch containing only the first element + batches = [[input_list[0]]] + + # Calculate the number of remaining elements to distribute among other batches + remaining_elements = input_list[1:] + total_remaining = len(remaining_elements) + + # Calculate the minimum number of elements each batch must have + min_elements_per_batch = total_remaining // num_batches.value + extra_elements = ( + total_remaining % num_batches.value + ) # Determine how many batches will have an extra element + + start_idx = 0 + for i in range(num_batches.value): + # If there are extra elements, add one more to this batch + end_idx = start_idx + min_elements_per_batch + (1 if i < extra_elements else 0) + batches.append(remaining_elements[start_idx:end_idx]) + start_idx = end_idx + + return {i: batch for i, batch in enumerate(batches)} + + +class Cp2kRefTrajWorkChain(engine.WorkChain): + """Workflow to run Replica Chain calculations with CP2K.""" + + @classmethod + def define(cls, spec): + """Define the workflow.""" + super().define(spec) + + # Define the inputs of the workflow. + spec.input("code", valid_type=orm.Code) + # spec.input("structure", valid_type=orm.StructureData) + spec.input("trajectory", valid_type=TrajectoryData) + spec.input("num_batches", valid_type=orm.Int, default=lambda: orm.Int(10)) + spec.input("parent_calc_folder", valid_type=orm.RemoteData, required=False) + spec.input("restart", valid_type=orm.Bool, required=False) + spec.input( + "protocol", + valid_type=orm.Str, + default=lambda: orm.Str("standard"), + required=False, + help="Protocol supported by the Cp2kBaseWorkChain.", + ) + spec.input("dft_params", valid_type=orm.Dict) + spec.input("sys_params", valid_type=orm.Dict) + spec.input( + "options", + valid_type=dict, + non_db=True, + help="Define options for the cacluations: walltime, memory, CPUs, etc.", + ) + + spec.outline( + cls.setup, # create batches, if reordering of structures create indexing + engine.if_(cls.something_to_run)( + cls.first_structure, # Run the first SCF to get the initial wavefunction + cls.run_reftraj_batches, + ), # Run the batches of the reftraj simulations + cls.merge_batches_output, + ) + + spec.outputs.dynamic = True + spec.output_namespace("structures", valid_type=orm.StructureData) + spec.output_namespace("details", valid_type=orm.Dict) + spec.exit_code(390, "ERROR_TERMINATION", message="One or more steps failed") + + def setup(self): + """Initialize the workchain process.""" + self.report("Inspecting input and setting up things") + self.ctx.previuos_trajectory = None + self.ctx.steps_completed = [] + restart = self.inputs.get("restart", None) + if restart: + last_wc = last_reftraj_wc(self.inputs.trajectory) + self.report(f"Restrating from last workchain found: {last_wc}") + previous_trajectories = retireve_previous_trajectories(last_wc) + self.ctx.previuos_trajectory = merge_trajectory_data_unique( + *previous_trajectories + ) + self.ctx.steps_completed = ( + self.ctx.previuos_trajectory.get_stepids().tolist() + ) + + ( + self.ctx.files, + self.ctx.input_dict, + self.ctx.structure_with_tags, + ) = cp2k_utils.get_dft_inputs( + self.inputs.dft_params.get_dict(), + self.inputs.trajectory, + "md_reftraj_protocol.yml", + self.inputs.protocol.value, + ) + self.ctx.input_dict["GLOBAL"]["WALLTIME"] = max( + 600, self.inputs.options["max_wallclock_seconds"] - 600 + ) + # create batches avoiding steps already completed. + self.ctx.something_to_run = False + self.ctx.batches = create_batches( + self.inputs.trajectory, self.inputs.num_batches, self.ctx.steps_completed + ) + if len(self.ctx.batches) > 0: + self.ctx.something_to_run = True + self.ctx.n_batches = len(self.ctx.batches) + return engine.ExitCode(0) + + def something_to_run(self): + """Function that returnns whether to run or not soem batch""" + return self.ctx.something_to_run + + def first_structure(self): + """Run scf on the initial geometry.""" + input_dict = deepcopy(self.ctx.input_dict) + batch = self.ctx.batches[0] + + self.report(f"Running structure {batch[0]} to {batch[-1]} ") + + input_dict["MOTION"]["MD"]["REFTRAJ"]["FIRST_SNAPSHOT"] = batch[0] + input_dict["MOTION"]["MD"]["REFTRAJ"]["LAST_SNAPSHOT"] = batch[-1] + + # create the input for the reftraj workchain + builder = Cp2kBaseWorkChain.get_builder() + builder.cp2k.structure = orm.StructureData(ase=self.ctx.structure_with_tags) + builder.cp2k.trajectory = self.inputs.trajectory + builder.cp2k.code = self.inputs.code + builder.cp2k.file = self.ctx.files + if "parent_calc_folder" in self.inputs: + builder.cp2k.parent_calc_folder = self.inputs.parent_calc_folder + builder.cp2k.metadata.options = self.inputs.options + builder.cp2k.metadata.label = f"structures_{batch[0]}_to_{batch[-1]}" + builder.cp2k.metadata.options.parser_name = "cp2k_advanced_parser" + + builder.cp2k.parameters = orm.Dict(dict=input_dict) + + future = self.submit(builder) + + key = f"reftraj_batch_0" + self.report(f"Submitted reftraj batch: {key} with pk: {future.pk}") + + self.to_context(**{key: future}) + + def run_reftraj_batches(self): + """Check if all calculations completed and merge trejectories.""" + key0 = f"reftraj_batch_0" + if not getattr(self.ctx, key0).is_finished_ok: + self.report(f"Batch {key0} failed") + return self.exit_codes.ERROR_TERMINATION + for batch in range(1, self.ctx.n_batches): + first = self.ctx.batches[batch][0] + last = self.ctx.batches[batch][-1] + self.report(f"Running structures {first} to {last} ") + + # update the input_dict with the new batch + input_dict = deepcopy(self.ctx.input_dict) + input_dict["MOTION"]["MD"]["STEPS"] = 1 + first - last + input_dict["MOTION"]["MD"]["REFTRAJ"]["FIRST_SNAPSHOT"] = first + input_dict["MOTION"]["MD"]["REFTRAJ"]["LAST_SNAPSHOT"] = last + + # create the input for the reftraj workchain + builder = Cp2kBaseWorkChain.get_builder() + builder.cp2k.structure = orm.StructureData(ase=self.ctx.structure_with_tags) + builder.cp2k.trajectory = self.inputs.trajectory + builder.cp2k.code = self.inputs.code + builder.cp2k.file = self.ctx.files + builder.cp2k.metadata.options = self.inputs.options + builder.cp2k.metadata.label = f"structures_{first}_to_{last}" + builder.cp2k.metadata.options.parser_name = "cp2k_advanced_parser" + builder.cp2k.parameters = orm.Dict(dict=input_dict) + builder.cp2k.parent_calc_folder = getattr( + self.ctx, key0 + ).outputs.remote_folder + + future = self.submit(builder) + + key = f"reftraj_batch_{batch}" + self.report(f"Submitted reftraj batch: {key} with pk: {future.pk}") + + self.to_context(**{key: future}) + + def merge_batches_output(self): + """Merge the output of the succefull batches only.""" + + # merged_traj = [] + # for i_batch in range(self.ctx.n_batches): + # merged_traj.extend(self.ctx[f"reftraj_batch_{i_batch}"].outputs.trajectory) + + trajectories_to_merge = [] + for batch in self.ctx.batches: + key = f"reftraj_batch_{batch}" + if not getattr(self.ctx, key).is_finished_ok: + self.report(f"Batch {key} failed") + return self.exit_codes.ERROR_TERMINATION + trajectories_to_merge.append( + getattr(self.ctx, key).outputs.output_trajectory + ) + if self.ctx.previuos_trajectory is not None: + trajectories_to_merge.append(self.ctx.previuos_trajectory) + merged_trajectory = merge_trajectory_data_unique( + *trajectories_to_merge + ) # merge_trajectories(*trajectories_to_merge) + + self.out("output_trajectory", merged_trajectory) + self.report("done") + return engine.ExitCode(0) diff --git a/examples/workflows/aa b/examples/workflows/aa deleted file mode 100755 index f9b714b..0000000 --- a/examples/workflows/aa +++ /dev/null @@ -1,431 +0,0 @@ -verdi node delete 337 -verdi node delete 344 -verdi node delete 345 -verdi node delete 353 -verdi node delete 354 -verdi node delete 362 -verdi node delete 369 -verdi node delete 370 -verdi node delete 378 -verdi node delete 379 -verdi node delete 387 -verdi node delete 394 -verdi node delete 395 -verdi node delete 403 -verdi node delete 404 -verdi node delete 414 -verdi node delete 422 -verdi node delete 423 -verdi node delete 434 -verdi node delete 442 -verdi node delete 443 -verdi node delete 498 -verdi node delete 501 -verdi node delete 508 -verdi node delete 509 -verdi node delete 517 -verdi node delete 518 -verdi node delete 523 -verdi node delete 537 -verdi node delete 545 -verdi node delete 550 -verdi node delete 555 -verdi node delete 556 -verdi node delete 557 -verdi node delete 558 -verdi node delete 571 -verdi node delete 575 -verdi node delete 576 -verdi node delete 577 -verdi node delete 588 -verdi node delete 593 -verdi node delete 596 -verdi node delete 603 -verdi node delete 610 -verdi node delete 611 -verdi node delete 612 -verdi node delete 621 -verdi node delete 622 -verdi node delete 630 -verdi node delete 631 -verdi node delete 638 -verdi node delete 655 -verdi node delete 663 -verdi node delete 668 -verdi node delete 673 -verdi node delete 674 -verdi node delete 675 -verdi node delete 676 -verdi node delete 689 -verdi node delete 693 -verdi node delete 694 -verdi node delete 695 -verdi node delete 710 -verdi node delete 715 -verdi node delete 718 -verdi node delete 725 -verdi node delete 732 -verdi node delete 733 -verdi node delete 734 -verdi node delete 743 -verdi node delete 744 -verdi node delete 752 -verdi node delete 753 -verdi node delete 760 -verdi node delete 975 -verdi node delete 981 -verdi node delete 988 -verdi node delete 989 -verdi node delete 997 -verdi node delete 998 -verdi node delete 1002 -verdi node delete 1003 -verdi node delete 1012 -verdi node delete 1018 -verdi node delete 1025 -verdi node delete 1026 -verdi node delete 1034 -verdi node delete 1035 -verdi node delete 1039 -verdi node delete 1040 -verdi node delete 1051 -verdi node delete 1056 -verdi node delete 1063 -verdi node delete 1064 -verdi node delete 1072 -verdi node delete 1073 -verdi node delete 1077 -verdi node delete 1080 -verdi node delete 1086 -verdi node delete 1090 -verdi node delete 1097 -verdi node delete 1098 -verdi node delete 1106 -verdi node delete 1107 -verdi node delete 1112 -verdi node delete 1118 -verdi node delete 1121 -verdi node delete 1128 -verdi node delete 1129 -verdi node delete 1137 -verdi node delete 1138 -verdi node delete 1143 -verdi node delete 1153 -verdi node delete 1161 -verdi node delete 1162 -verdi node delete 1174 -verdi node delete 1182 -verdi node delete 1183 -verdi node delete 1198 -verdi node delete 1206 -verdi node delete 1207 -verdi node delete 1222 -verdi node delete 1230 -verdi node delete 1231 -verdi node delete 1243 -verdi node delete 1251 -verdi node delete 1252 -verdi node delete 1263 -verdi node delete 1269 -verdi node delete 1275 -verdi node delete 1281 -verdi node delete 1287 -verdi node delete 1293 -verdi node delete 1299 -verdi node delete 1305 -verdi node delete 1311 -verdi node delete 1319 -verdi node delete 1320 -verdi node delete 1324 -verdi node delete 1332 -verdi node delete 1333 -verdi node delete 1341 -verdi node delete 1349 -verdi node delete 1350 -verdi node delete 1355 -verdi node delete 1363 -verdi node delete 1364 -verdi node delete 1369 -verdi node delete 1377 -verdi node delete 1378 -verdi node delete 1386 -verdi node delete 1394 -verdi node delete 1395 -verdi node delete 1433 -verdi node delete 1441 -verdi node delete 1442 -verdi node delete 1450 -verdi node delete 1456 -verdi node delete 1464 -verdi node delete 1465 -verdi node delete 1473 -verdi node delete 1481 -verdi node delete 1482 -verdi node delete 1490 -verdi node delete 1498 -verdi node delete 1499 -verdi node delete 1507 -verdi node delete 1515 -verdi node delete 1516 -verdi node delete 1524 -verdi node delete 1532 -verdi node delete 1533 -verdi node delete 1541 -verdi node delete 1549 -verdi node delete 1550 -verdi node delete 1553 -verdi node delete 1559 -verdi node delete 1567 -verdi node delete 1568 -verdi node delete 1571 -verdi node delete 1576 -verdi node delete 1584 -verdi node delete 1585 -verdi node delete 1588 -verdi node delete 1593 -verdi node delete 1601 -verdi node delete 1602 -verdi node delete 1610 -verdi node delete 1618 -verdi node delete 1619 -verdi node delete 1627 -verdi node delete 1635 -verdi node delete 1636 -verdi node delete 1644 -verdi node delete 1652 -verdi node delete 1653 -verdi node delete 1661 -verdi node delete 1667 -verdi node delete 1675 -verdi node delete 1676 -verdi node delete 1684 -verdi node delete 1692 -verdi node delete 1693 -verdi node delete 1701 -verdi node delete 1707 -verdi node delete 1715 -verdi node delete 1716 -verdi node delete 1724 -verdi node delete 1732 -verdi node delete 1733 -verdi node delete 1741 -verdi node delete 1749 -verdi node delete 1750 -verdi node delete 1753 -verdi node delete 1759 -verdi node delete 1767 -verdi node delete 1768 -verdi node delete 1776 -verdi node delete 1784 -verdi node delete 1785 -verdi node delete 1793 -verdi node delete 1801 -verdi node delete 1802 -verdi node delete 1807 -verdi node delete 1815 -verdi node delete 1816 -verdi node delete 1824 -verdi node delete 1832 -verdi node delete 1833 -verdi node delete 1841 -verdi node delete 1849 -verdi node delete 1850 -verdi node delete 1855 -verdi node delete 1863 -verdi node delete 1864 -verdi node delete 1872 -verdi node delete 1880 -verdi node delete 1881 -verdi node delete 1889 -verdi node delete 1897 -verdi node delete 1898 -verdi node delete 1906 -verdi node delete 1914 -verdi node delete 1915 -verdi node delete 1923 -verdi node delete 1931 -verdi node delete 1932 -verdi node delete 1940 -verdi node delete 1948 -verdi node delete 1949 -verdi node delete 1956 -verdi node delete 1961 -verdi node delete 1969 -verdi node delete 1970 -verdi node delete 1973 -verdi node delete 1978 -verdi node delete 1986 -verdi node delete 1987 -verdi node delete 1991 -verdi node delete 1999 -verdi node delete 2000 -verdi node delete 2033 -verdi node delete 2043 -verdi node delete 2050 -verdi node delete 2058 -verdi node delete 2068 -verdi node delete 2075 -verdi node delete 2082 -verdi node delete 2092 -verdi node delete 2099 -verdi node delete 2112 -verdi node delete 2122 -verdi node delete 2129 -verdi node delete 2143 -verdi node delete 2150 -verdi node delete 2163 -verdi node delete 2173 -verdi node delete 2179 -verdi node delete 2186 -verdi node delete 2196 -verdi node delete 2203 -verdi node delete 2216 -verdi node delete 2243 -verdi node delete 2249 -verdi node delete 2254 -verdi node delete 2259 -verdi node delete 2271 -verdi node delete 2279 -verdi node delete 2280 -verdi node delete 2281 -verdi node delete 2298 -verdi node delete 2306 -verdi node delete 2307 -verdi node delete 2308 -verdi node delete 2325 -verdi node delete 2326 -verdi node delete 2339 -verdi node delete 2340 -verdi node delete 2349 -verdi node delete 2354 -verdi node delete 2366 -verdi node delete 2374 -verdi node delete 2375 -verdi node delete 2376 -verdi node delete 2393 -verdi node delete 2401 -verdi node delete 2402 -verdi node delete 2403 -verdi node delete 2420 -verdi node delete 2428 -verdi node delete 2429 -verdi node delete 2430 -verdi node delete 2447 -verdi node delete 2455 -verdi node delete 2456 -verdi node delete 2457 -verdi node delete 2474 -verdi node delete 2482 -verdi node delete 2483 -verdi node delete 2484 -verdi node delete 2501 -verdi node delete 2509 -verdi node delete 2510 -verdi node delete 2511 -verdi node delete 2528 -verdi node delete 2529 -verdi node delete 2561 -verdi node delete 2566 -verdi node delete 2574 -verdi node delete 2601 -verdi node delete 2605 -verdi node delete 2610 -verdi node delete 2618 -verdi node delete 2623 -verdi node delete 2650 -verdi node delete 2655 -verdi node delete 2660 -verdi node delete 2665 -verdi node delete 2670 -verdi node delete 2678 -verdi node delete 2683 -verdi node delete 2695 -verdi node delete 2703 -verdi node delete 2704 -verdi node delete 2705 -verdi node delete 2722 -verdi node delete 2730 -verdi node delete 2731 -verdi node delete 2732 -verdi node delete 2749 -verdi node delete 2750 -verdi node delete 2763 -verdi node delete 2764 -verdi node delete 2773 -verdi node delete 2778 -verdi node delete 2790 -verdi node delete 2798 -verdi node delete 2799 -verdi node delete 2800 -verdi node delete 2817 -verdi node delete 2825 -verdi node delete 2826 -verdi node delete 2827 -verdi node delete 2844 -verdi node delete 2852 -verdi node delete 2853 -verdi node delete 2854 -verdi node delete 2871 -verdi node delete 2879 -verdi node delete 2880 -verdi node delete 2881 -verdi node delete 2898 -verdi node delete 2906 -verdi node delete 2907 -verdi node delete 2908 -verdi node delete 2925 -verdi node delete 2933 -verdi node delete 2934 -verdi node delete 2935 -verdi node delete 2952 -verdi node delete 2953 -verdi node delete 2961 -verdi node delete 2966 -verdi node delete 2978 -verdi node delete 2979 -verdi node delete 2990 -verdi node delete 2994 -verdi node delete 2999 -verdi node delete 3011 -verdi node delete 3019 -verdi node delete 3020 -verdi node delete 3021 -verdi node delete 3027 -verdi node delete 3029 -verdi node delete 3032 -verdi node delete 3034 -verdi node delete 3035 -verdi node delete 3037 -verdi node delete 3054 -verdi node delete 3062 -verdi node delete 3063 -verdi node delete 3064 -verdi node delete 3069 -verdi node delete 3071 -verdi node delete 3073 -verdi node delete 3077 -verdi node delete 3079 -verdi node delete 3081 -verdi node delete 3088 -verdi node delete 3090 -verdi node delete 3092 -verdi node delete 3105 -verdi node delete 3106 -verdi node delete 3115 -verdi node delete 3123 -verdi node delete 3124 -verdi node delete 3132 -verdi node delete 3140 -verdi node delete 3141 -verdi node delete 3149 -verdi node delete 3157 -verdi node delete 3158 -verdi node delete 3166 -verdi node delete 3174 -verdi node delete 3175 -verdi node delete 3183 -verdi node delete 3191 -verdi node delete 3192 -verdi node delete 3200 -verdi node delete 3208 -verdi node delete 3209 diff --git a/examples/workflows/example_cp2k_md_reftraj.py b/examples/workflows/example_cp2k_md_reftraj.py new file mode 100644 index 0000000..d2f7114 --- /dev/null +++ b/examples/workflows/example_cp2k_md_reftraj.py @@ -0,0 +1,129 @@ +import os + +import ase.io +import click +import numpy as np +from aiida import engine, orm, plugins + +Cp2kReftrajWorkChain = plugins.WorkflowFactory("nanotech_empa.cp2k.reftraj") +StructureData = DataFactory("core.structure") +TrajectoryData = DataFactory("core.array.trajectory") + + +def _example_cp2k_reftraj(cp2k_code, num_batches=2, restart=False): + os.path.dirname(os.path.realpath(__file__)) + + # Structure. + # structure = StructureData(ase=ase.io.read(os.path.join(thisdir, ".", "h2.xyz"))) + + # check if input trajectory already in database otherwise create it + qb = orm.QueryBuilder() + qb.append(TrajectoryData, filters={"label": "H2_trajectory"}) + if qb.count() == 0: + steps = 20 + positions = np.array( + [ + [ + [2.528, 3.966, 3.75 + 0.0001 * i], + [2.528, 3.966, 3], + ] + for i in range(steps) + ] + ) + cells = np.array( + [[[5, 0, 0], [0, 5, 0], [0, 0, 5 + 0.0001 * i]] for i in range(steps)] + ) + symbols = ["H", "H"] + trajectory = TrajectoryData() + trajectory.set_trajectory(symbols, positions, cells=cells) + trajectory.label = "H2_trajectory" + trajectory.store() + print("stored trajectory ", trajectory.pk) + else: + trajectory = qb.first()[0] + + builder = Cp2kReftrajWorkChain.get_builder() + # if restart_uuid is not None: + # builder.restart_from = orm.Str(restart_uuid) + + builder.metadata.label = "CP2K_RefTraj" + builder.metadata.description = "test description" + builder.code = cp2k_code + builder.options = { + "max_wallclock_seconds": 600, + "resources": { + "num_machines": 1, + "num_mpiprocs_per_machine": 1, + "num_cores_per_mpiproc": 1, + }, + } + + # builder.structure = structure + builder.trajectory = trajectory + if restart: + builder.restart = orm.Bool(True) + builder.num_batches = orm.Int(num_batches) + builder.protocol = orm.Str("debug") + + dft_params = { + "uks": True, + "magnetization_per_site": [0, 1], + "charge": 0, + "periodic": "NONE", + "vdw": False, + "multiplicity": 1, + "cutoff": 300, + } + + sys_params = {} + builder.dft_params = orm.Dict(dft_params) + builder.sys_params = orm.Dict(sys_params) + + _, calc_node = engine.run_get_node(builder) + + assert calc_node.is_finished_ok + return calc_node.pk + + +def example_cp2k_reftraj(cp2k_code): + _example_cp2k_reftraj(cp2k_code) + # _example_cp2k_replicachain(cp2k_code, pk1) + + +@click.command("cli") +@click.argument("cp2k_code", default="cp2k@localhost") +@click.option("-n", "--n-nodes", default=1) +@click.option("-c", "--n-cores-per-node", default=1) +def run_all(cp2k_code, n_nodes, n_cores_per_node): + print("#### UKS one batch") + uuid1 = _example_cp2k_reftraj(cp2k_code=orm.load_code(cp2k_code), num_batches=1) + print("#### UKS two batches") + uuid2 = _example_cp2k_reftraj( + cp2k_code=orm.load_code(cp2k_code), num_batches=2, restart=False + ) + print("#### UKS two batches restart") + uuid3 = _example_cp2k_reftraj( + cp2k_code=orm.load_code(cp2k_code), num_batches=2, restart=True + ) + traj1 = orm.load_node(uuid1).outputs.output_trajectory + traj2 = orm.load_node(uuid2).outputs.output_trajectory + traj3 = orm.load_node(uuid3).outputs.output_trajectory + print("#### DONE ####") + assert np.allclose( + traj1.get_array("cells"), + traj2.get_array("cells"), + rtol=1e-07, + atol=1e-08, + equal_nan=False, + ) and np.allclose( + traj1.get_array("cells"), + traj3.get_array("cells"), + rtol=1e-07, + atol=1e-08, + equal_nan=False, + ) + print(f"arrays match") + + +if __name__ == "__main__": + run_all() diff --git a/pyproject.toml b/pyproject.toml index 591b8aa..7461fe4 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -84,6 +84,7 @@ dev = [ "nanotech_empa.cp2k.replica" = "aiida_nanotech_empa.workflows.cp2k:Cp2kReplicaWorkChain" "nanotech_empa.cp2k.neb" = "aiida_nanotech_empa.workflows.cp2k:Cp2kNebWorkChain" "nanotech_empa.cp2k.phonons" = "aiida_nanotech_empa.workflows.cp2k:Cp2kPhononsWorkChain" +"nanotech_empa.cp2k.reftraj" = "aiida_nanotech_empa.workflows.cp2k:Cp2kRefTrajWorkChain" [project.entry-points."aiida.schedulers"]