diff --git a/aiida_kkr/tools/tools_STM_scan.py b/aiida_kkr/tools/tools_STM_scan.py index 217c2fa3..123bf306 100644 --- a/aiida_kkr/tools/tools_STM_scan.py +++ b/aiida_kkr/tools/tools_STM_scan.py @@ -6,15 +6,15 @@ import numpy as np from aiida import orm, engine from aiida_kkr.tools import find_parent_structure -from aiida_kkr.tools.imp_cluster_tools import pos_exists_already, combine_clusters, create_combined_imp_info_cf, get_scoef_single_imp -#from aiida_kkr.tools.combine_imps import get_scoef_single_imp +from aiida_kkr.tools.combine_imps import get_scoef_single_imp +from aiida_kkr.tools.imp_cluster_tools import pos_exists_already, combine_clusters from masci_tools.io.common_functions import get_alat_from_bravais __copyright__ = (u'Copyright (c), 2023, Forschungszentrum Jülich GmbH, ' 'IAS-1/PGI-1, Germany. All rights reserved.') __license__ = 'MIT license, see LICENSE.txt file' -__version__ = '0.1.1' -__contributors__ = (u'Philipp Rüßmann, Raffaele Aliberti') +__version__ = '0.1.2' +__contributors__ = (u'Philipp Rüßmann', u'Raffaele Aliberti') ############################################################################## # combine impurty clusters @@ -53,8 +53,12 @@ def get_imp_cls_add(host_structure, add_position): define auxiliary imp_info for adding position and generate rcls """ Zadd = get_Zadd(host_structure, add_position) - imp_info2 = orm.Dict({'ilayer_center': add_position['ilayer'], 'Zimp': [Zadd], 'Rcut': 1e-5}) - clust2 = get_scoef_single_imp(host_structure, imp_info2) + ilayer = add_position['ilayer'] + imp_info2 = orm.Dict({'ilayer_center': ilayer, 'Zimp': [Zadd], 'Rcut': 1e-5}) + # old version is too slow: + # clust2 = get_scoef_single_imp(host_structure, imp_info2) + # new version creates the array without calling the get_scoef_single_imp function: + clust2 = np.array([[0., 0., 0., ilayer + 1, 0., 0.]]) return imp_info2, clust2 @@ -64,8 +68,9 @@ def get_r_offset(clust1, clust2, host_structure, add_position): """ # calculate out-of plane vector from the ilayer indices of the two clusters r_out_of_plane = np.array([0., 0., 0.]) - ilayer1 = int(clust1[0, 3]) - ilayer2 = int(clust2[0, 3]) + # minus 1 because of conversion from fortran to python standard (counting starts at 0) + ilayer1 = int(clust1[0, 3]) - 1 + ilayer2 = int(clust2[0, 3]) - 1 if ilayer1 != ilayer2: pos1 = np.array(host_structure.sites[ilayer1].position) pos2 = np.array(host_structure.sites[ilayer2].position) @@ -98,7 +103,7 @@ def offset_clust2(clust1, clust2, host_structure, add_position): """ Compute and add offset to clust2 """ - r_offset = get_r_offset(clust1, clust2, host_structure, orm.Dict(add_position)) + r_offset = get_r_offset(clust1, clust2, host_structure, add_position) clust2_offset = clust2.copy() clust2_offset[:, :3] += r_offset @@ -188,7 +193,14 @@ def add_host_potential_to_imp(add_position, host_calc, imp_potential_node): combine host potential with impurity potential """ # get add potential from host - pot_add = extract_host_potential(add_position, host_calc) + potname = f'host_pot:{add_position["ilayer"]}' + if potname in imp_potential_node.extras: + # reuse existing host position if we have found it previously + pot_add = imp_potential_node.extras[potname] + else: + # get host postition and store as extra + pot_add = extract_host_potential(add_position, host_calc) + imp_potential_node.set_extra(potname, pot_add) # get impurity potential and convert to list pot_imp = imp_potential_node.get_content().split('\n') @@ -234,26 +246,16 @@ def create_combined_potential_node_cf(add_position, host_calc, imp_potential_nod # STM pathfinder -def STM_pathfinder(host_structure): - #from aiida_kkr.tools import find_parent_structure - from ase.spacegroup import Spacegroup +def STM_pathfinder(host_remote): """This function is used to help visualize the scanned positions - and the symmetries that are present in the system """ - """ -<<<<<<< HEAD + and the symmetries that are present in the system + inputs:: - host_struture : RemoteData : The Remote data contains all the information needed to create the path to scan + host_remote : RemoteData : The Remote data contains all the information needed to create the path to scan outputs:: -======= - inputs: - host_struture : RemoteData : The Remote data contains all the information needed to create the path to scan - - outputs: ->>>>>>> deeb2f88313d293946f7657d00f7dcac0bc22d12 struc_info : Dict : Dictionary containing the structural information of the film matrices : Array : Array containing the matrices that generate the symmetries of the system - """ def info_creation(structure): @@ -268,12 +270,13 @@ def info_creation(structure): if vec[2] == 0: plane_vectors['plane_vectors'].append(vec[:2]) - space_symmetry = get_spacegroup(ase_struc) + space_symmetry = get_spacegroup(structure) plane_vectors['space_group'] = space_symmetry.no return plane_vectors def symmetry_finder(struc_info): + from ase.spacegroup import Spacegroup # Here we get the symmetry operations that are possible symmetry_matrices = Spacegroup(struc_info['space_group']) @@ -290,12 +293,21 @@ def symmetry_finder(struc_info): return unique_matrices - struc = find_parent_structure(host_structure) + struc = find_parent_structure(host_remote) # clone the structure since it has already been saved in AiiDA and cannot be modified supp_struc = struc.clone() # If the structure is not periodic in every direction we force it to be. - supp_struc.pbc = (True, True, True) + if not supp_struc.pbc[2]: + # find film thickness + zs = np.array([i.position[2] for i in supp_struc.sites]) + z = zs.max() - zs.min() + 5 # add 5 to have a unit cell larger than the considered film thickness + # set third bravais vector along z direction + cell = supp_struc.cell + cell[2] = [0, 0, z] + supp_struc.set_cell(cell) + # change periodic boundary conditions to periodic + supp_struc.pbc = (True, True, True) # ASE struc ase_struc = supp_struc.get_ase() @@ -348,14 +360,8 @@ def lattice_generation(x_len, y_len, rot, vec): for i in range(-x_len, x_len + 1): for j in range(-y_len, y_len + 1): -<<<<<<< HEAD - if ( - (lattice_points[i][j][0] > 0 or math.isclose(lattice_points[i][j][0],0, abs_tol=1e-3)) and - (lattice_points[i][j][1] > 0 or math.isclose(lattice_points[i][j][1],0, abs_tol=1e-3)) - ): -======= - if lattice_points[i][j][0] >= 0 and lattice_points[i][j][1] >= 0: ->>>>>>> deeb2f88313d293946f7657d00f7dcac0bc22d12 + if ((lattice_points[i][j][0] > 0 or math.isclose(lattice_points[i][j][0], 0, abs_tol=1e-3)) and + (lattice_points[i][j][1] > 0 or math.isclose(lattice_points[i][j][1], 0, abs_tol=1e-3))): for element in rot[1:]: point = np.dot(element, lattice_points[i][j]) if point[0] >= 0 and point[1] >= 0: diff --git a/aiida_kkr/workflows/kkr_STM.py b/aiida_kkr/workflows/kkr_STM.py index c80cd461..46d78f9c 100644 --- a/aiida_kkr/workflows/kkr_STM.py +++ b/aiida_kkr/workflows/kkr_STM.py @@ -13,7 +13,7 @@ __copyright__ = (u'Copyright (c), 2024, Forschungszentrum Jülich GmbH, ' 'IAS-1/PGI-1, Germany. All rights reserved.') __license__ = 'MIT license, see LICENSE.txt file' -__version__ = '0.1.0' +__version__ = '0.1.1' __contributors__ = (u'Raffaele Aliberti', u'David Antognini Silva', u'Philipp Rüßmann') _VERBOSE_ = True @@ -32,11 +32,8 @@ class kkr_STM_wc(WorkChain): :param wf_parameters: (Dict), parameters that are used to run the workflow :param kkr: (Code), KKR host code for the writing out kkrflex files :param kkrimp: (Code), KKR impurity code for the normal state impurity scf and BdG impurity DOS calculation -<<<<<<< HEAD :param gf_writeout.params_kkr_overwrite (Dict), overwrite parameters for the GF calculation :param kkr_imp_sub.params_kkr_overwrite (Dict), overwrite parameters for the impurity calculation -======= ->>>>>>> deeb2f88313d293946f7657d00f7dcac0bc22d12 returns:: @@ -84,13 +81,7 @@ def define(cls, spec): super(kkr_STM_wc, cls).define(spec) spec.input('kkr', valid_type=Code, required=False, help='KKRhost code, needed if gf_dos_remote is not given.') -<<<<<<< HEAD spec.input('kkrimp', valid_type=Code, required=True, help='KKRimp code, always needed.') -======= - - spec.input('kkrimp', valid_type=Code, required=True, help='KKRimp code, always needed.') - ->>>>>>> deeb2f88313d293946f7657d00f7dcac0bc22d12 spec.input( 'options', valid_type=Dict, @@ -98,10 +89,6 @@ def define(cls, spec): default=lambda: Dict(dict=cls._options_default), help='Computer options (resources, quene name, etc.).' ) -<<<<<<< HEAD -======= - ->>>>>>> deeb2f88313d293946f7657d00f7dcac0bc22d12 spec.input( 'wf_parameters', valid_type=Dict, @@ -109,10 +96,6 @@ def define(cls, spec): default=lambda: Dict(dict=cls._wf_default), help='Workflow parameter (see `kkr_dos_wc.get_wf_defaults()`).' ) -<<<<<<< HEAD -======= - ->>>>>>> deeb2f88313d293946f7657d00f7dcac0bc22d12 spec.input( 'tip_position', valid_type=Dict, @@ -127,25 +110,15 @@ def define(cls, spec): # nx is the number of (symmetric) steps that we take in the x-direction starting from the impurity # ny is the number of (symmetric) steps that we take in the y-direction starting from the impurity # (0,0) correspond to calculate the DOS only on the impurity site -<<<<<<< HEAD help= 'How many sites will be scanned in the da and db direction (Bravais Lattice). And the layer that is being scanned.' ) -======= - help='How many sites will be scanned in the x and y direction. And the layer that is being scanned.' - ) - ->>>>>>> deeb2f88313d293946f7657d00f7dcac0bc22d12 spec.input( 'imp_info', valid_type=Dict, required=True, help='Information of the impurity like position in the unit cell, screening cluster, atom type.' ) -<<<<<<< HEAD -======= - ->>>>>>> deeb2f88313d293946f7657d00f7dcac0bc22d12 spec.input( 'host_calc', valid_type=RemoteData, @@ -153,40 +126,24 @@ def define(cls, spec): help='The information about the clean host structure is required in order to continue the cluster' 'Inside a bigger host structure with empty sites.' ) -<<<<<<< HEAD -======= - ->>>>>>> deeb2f88313d293946f7657d00f7dcac0bc22d12 spec.input( 'host_remote', valid_type=RemoteData, required=True, help='Remote Data containing the remote folder from the outputs of the host calculation', ) -<<<<<<< HEAD -======= - ->>>>>>> deeb2f88313d293946f7657d00f7dcac0bc22d12 spec.input( 'imp_potential_node', valid_type=SinglefileData, required=True, help='Impurity potential node', ) -<<<<<<< HEAD -======= - ->>>>>>> deeb2f88313d293946f7657d00f7dcac0bc22d12 spec.input( 'remote_data', valid_type=RemoteData, required=False, help='Remote data from a converged kkr calculation, required for the gf writeout step', ) -<<<<<<< HEAD -======= - ->>>>>>> deeb2f88313d293946f7657d00f7dcac0bc22d12 spec.input( 'kkrflex_files', valid_type=RemoteData, @@ -196,9 +153,8 @@ def define(cls, spec): # Here we expose the inputs for the GF calculations step. # One parameter which is crucial is the NSHELD, which determines the impurity cluster radius. - spec.expose_inputs(kkr_flex_wc, namespace='gf_writeout', include=('params_kkr_overwrite')) + spec.expose_inputs(kkr_flex_wc, namespace='gf_writeout', include=('params_kkr_overwrite', 'options')) -<<<<<<< HEAD # Here we expose the BdG calculations from the kkr_imp_dos_wc spec.expose_inputs(kkr_imp_sub_wc, namespace='BdG', include=('params_overwrite')) spec.expose_inputs(kkr_imp_sub_wc, include=('initial_noco_angles', 'rimpshift')) @@ -210,21 +166,6 @@ def define(cls, spec): #spec.output('workflow_info', valid_type=Dict) spec.output('kkrflexfiles', valid_type=RemoteData) spec.output('combined_imp_info', valid_type=Dict) -======= - # Specify the possible outputs - spec.output('tip_position', valid_type=Dict) - - spec.output('STM_dos_data', valid_type=XyData, required=True) - - spec.output('STM_dos_data_lmdos', valid_type=XyData, required=True) - - #spec.output('workflow_info', valid_type=Dict) - - spec.output('kkrflexfiles', valid_type=RemoteData) - - spec.output('combined_imp_info', valid_type=Dict) - ->>>>>>> deeb2f88313d293946f7657d00f7dcac0bc22d12 spec.output('combined_imp_potential', valid_type=SinglefileData) # Define all possible error messages @@ -234,9 +175,8 @@ def define(cls, spec): spec.exit_code(102, 'ERROR_NO_IMP_POT_SFD', 'No impurity node has been given in the intput') spec.exit_code(103, 'ERROR_NO_IMPURITY_INFO', 'No impurity info has been given in the input') spec.exit_code( - 104, 'ERROR_NO_DATA_FOR_THE_GF_STEP', - """Neither the kkrflex files nor the KKR builder have been given. Please - provide already converged kkrflex files, or the kkr builder to evaluate them""" + 104, 'ERROR_NO_DATA_FOR_THE_GF_STEP', """Neither the kkrflex files nor the KKR builder have been given. +Please provide already converged kkrflex files, or the kkr builder to evaluate them""" ) spec.exit_code(201, 'ERROR_IMP_SUB_WORKFLOW_FAILURE', 'A step in the kkr_imp_dos workflow has failed') @@ -245,44 +185,24 @@ def define(cls, spec): cls.start, # We first aggregate all the impurity data # The gf is then used to evaluate the STM lmdos - #cls.gf_writeout_run, cls.STM_lmdos_run, - # Data aggregator, used to make the final result more user friendly - # cls.finalize_results, cls.results ) -<<<<<<< HEAD def combine_potentials(self, host_structure, impurity_to_combine, da, db): from aiida_kkr.tools.tools_STM_scan import get_imp_info_add_position import numpy as np # TO DO: optimize this call, only need append from numpy """ Here we want to combine the impurity information and the host information """ -======= - def combine_potentials(self, impurity_to_combine, da, db): - from aiida_kkr.tools.tools_STM_scan import get_imp_info_add_position_cf - """ - Here we want to combine the impurity information and the host information - """ - - imp_info = self.inputs.imp_info #(impurity to combine) - host_remote = self.inputs.host_remote - - # Since the objects in AiiDA are immutable we have to create a new dictionary and then convert - # it to the right AiiDA type ->>>>>>> deeb2f88313d293946f7657d00f7dcac0bc22d12 tip_position = {} tip_position['ilayer'] = self.inputs.tip_position['ilayer'] tip_position['da'] = da tip_position['db'] = db -<<<<<<< HEAD - #print(impurity_to_combine.get_dict()) imp_info = self.inputs.imp_info #(impurity to combine) - #host_remote = self.inputs.host_remote - combined_imp_info = get_imp_info_add_position(tip_position, host_structure, imp_info) + combined_imp_info = get_imp_info_add_position(Dict(tip_position), host_structure, imp_info) # Since the objects in AiiDA are immutable we have to create a new dictionary and then convert # it to the right AiiDA type @@ -291,51 +211,13 @@ def combine_potentials(self, impurity_to_combine, da, db): if 'imp_cls' in impurity_to_combine: for key in impurity_to_combine.keys(): -======= - - combined_imp_info = get_imp_info_add_position_cf(tip_position, host_remote, imp_info) - # Add check to see if imp_cls is there - if 'imp_cls' in impurity_to_combine: - - new_combined_imp_info = {} - - for key, value in impurity_to_combine.items(): ->>>>>>> deeb2f88313d293946f7657d00f7dcac0bc22d12 if key == 'Zimp': - #print(impurity_to_combine[key]) impurity_to_combine[key].append(combined_imp_info[key][-1]) - #if key == 'Rimp_rel': - # np.append(impurity_to_combine[key], [combined_imp_info[key][-1]], axis=0) - # print(impurity_to_combine[key]) else: -<<<<<<< HEAD - #print(combined_imp_info[key][-1]) impurity_to_combine[key] = np.append(impurity_to_combine[key], [combined_imp_info[key][-1]], axis=0) - #print(impurity_to_combine[key]) - - #new_combined_imp_info[ - - #new_combined_imp_info[key] = impurity_to_combine[key].tolist() - #new_combined_imp_info[key].append(combined_imp_info[key][-1].tolist()) - #new_combined_imp_info[key] = new_combined_imp_info[key].tolist() - #else: - # # Here we have lists of list that we need to confront - # new_combined_imp_info[key] = impurity_to_combine[key] - # set_tmp = [set(row) for row in impurity_to_combine[key]] - # - # new_combined_imp_info[key] += [row for row in combined_imp_info[key] if set(row) not in set_tmp] # Convert to an AiiDA Dictionary new_combined_imp_info = impurity_to_combine -======= - # Here we have lists of list that we need to confront - new_combined_imp_info[key] = impurity_to_combine[key] - set_tmp = [set(row) for row in impurity_to_combine[key]] - - new_combined_imp_info[key] += [row for row in combined_imp_info[key] if set(row) not in set_tmp] - - new_combined_imp_info = orm.Dict(dict=new_combined_imp_info) ->>>>>>> deeb2f88313d293946f7657d00f7dcac0bc22d12 else: @@ -343,42 +225,22 @@ def combine_potentials(self, impurity_to_combine, da, db): return new_combined_imp_info -<<<<<<< HEAD def combine_nodes(self, host_calc, node_to_combine, da, db): from aiida_kkr.tools.tools_STM_scan import create_combined_potential_node -======= - def combine_nodes(self, node_to_combine, da, db): - from aiida_kkr.tools.tools_STM_scan import create_combined_potential_node_cf ->>>>>>> deeb2f88313d293946f7657d00f7dcac0bc22d12 """ Here we create a combined potential node from the host potential (no impurity) and from the impurity potential """ - #imp_potential_node = self.inputs.imp_potential_node # (node_to_combine). -<<<<<<< HEAD - #host_remote = self.inputs.host_remote # the remote host structure remains the same. -======= - host_remote = self.inputs.host_remote # the remote host structure remains the same. ->>>>>>> deeb2f88313d293946f7657d00f7dcac0bc22d12 # Since the objects in AiiDA are immutable we have to create a new dictionary and then convert # it to the right AiiDA type - tip_position = {} -<<<<<<< HEAD - tip_position['ilayer'] = self.inputs.tip_position['ilayer'] # for now we require that the z position remains the same. + # for now we require that the z position remains the same. + tip_position['ilayer'] = self.inputs.tip_position['ilayer'] tip_position['da'] = da tip_position['db'] = db combined_node = create_combined_potential_node(tip_position, host_calc, node_to_combine) -======= - tip_position['ilayer'] = self.inputs.tip_position['ilayer' - ] # for now we require that the z position remains the same. - tip_position['da'] = da - tip_position['db'] = db - - combined_node = create_combined_potential_node_cf(tip_position, host_remote, node_to_combine) ->>>>>>> deeb2f88313d293946f7657d00f7dcac0bc22d12 return combined_node def start(self): @@ -408,7 +270,6 @@ def start(self): self.report('INFO: usign defalut wf parameters') # In this section we assign the computational resources to the builder - self.ctx.withmpi = options_dict.get('withmpi', self._options_default['withmpi']) self.ctx.resources = options_dict.get('resources', self._options_default['resources']) self.ctx.max_wallclock_seconds = options_dict.get( @@ -427,7 +288,7 @@ def start(self): }) # Set workflow parameters for the KKR imputrity calculations - """This part is really important, this should always be set to True for an STM calculation""" + # This part is really important, this should always be set to True for an STM calculation self.ctx.lmdos = wf_param_dict.get('lmdos', self._wf_default['lmdos']) self.ctx.retrieve_kkrflex = wf_param_dict.get('retrieve_kkrflex', self._wf_default['retrieve_kkrflex']) @@ -446,91 +307,123 @@ def start(self): self.ctx.description_wf = self.inputs.get('description', self._wf_description) self.ctx.label_wf = self.inputs.get('label', self._wf_label) - message = f""" - INFO: use the following parameter: - withmpi: {self.ctx.withmpi} - Resources: {self.ctx.resources} - Walltime (s): {self.ctx.max_wallclock_seconds} - queue name: {self.ctx.queue} - scheduler command: {self.ctx.custom_scheduler_commands} - description: {self.ctx.description_wf} - label: {self.ctx.label_wf} - """ - print(message) - self.report(message) - - # return para/vars - self.ctx.successful = True - self.ctx.errors = [] - self.ctx.formula = '' + if _VERBOSE_: + message = f""" +INFO: use the following parameter: +withmpi: {self.ctx.withmpi} +Resources: {self.ctx.resources} +Walltime (s): {self.ctx.max_wallclock_seconds} +queue name: {self.ctx.queue} +scheduler command: {self.ctx.custom_scheduler_commands} +description: {self.ctx.description_wf} +label: {self.ctx.label_wf} + """ + self.report(message) def validate_input(self): - + """Check if inputs are valid""" inputs = self.inputs - inputs_ok = True - gf_writeout_calc = None if not 'imp_potential_node' in inputs: - inputs_ok = False - return self.exit_codes.ERROR_NO_IMP_POT_SFD + return self.exit_codes.ERROR_NO_IMP_POT_SFD # pylint: disable=no-member if not 'imp_info' in inputs: - inputs_ok = False - return self.exit_codes.ERROR_NO_IMP_INFO + return self.exit_codes.ERROR_NO_IMP_INFO # pylint: disable=no-member if not 'kkrflex_files' and 'kkr' in inputs: - inputs_ok = False - return self.exit_codes.ERROR_NO_DATA_FOR_THE_GF_STEP + return self.exit_codes.ERROR_NO_DATA_FOR_THE_GF_STEP # pylint: disable=no-member def impurity_cluster_evaluation(self): - from aiida_kkr.tools import tools_STM_scan -<<<<<<< HEAD + """ + Create the combined impurity cluster and potential for the impurity region + used in self-consistency + the additional scanning sites. + """ from aiida_kkr.tools import find_parent_structure -======= ->>>>>>> deeb2f88313d293946f7657d00f7dcac0bc22d12 + if _VERBOSE_: + from time import time - # Here we create an impurity cluster that has inside all the positions on which the STM will scan + # measure time at start + t_start = time() - # We now want to iterate over several in-plane positions. - # These are the number of vectors in which we want to move the STM tip. - x = self.inputs.tip_position['nx'] - y = self.inputs.tip_position['ny'] + # Here we create an impurity cluster that has inside all the positions on which the STM will scan impurity_info = self.inputs.imp_info # for the first step we combine the impurity info from the input imp_potential_node = self.inputs.imp_potential_node # for the first step we combine the impurity node from the input -<<<<<<< HEAD host_remote = self.inputs.host_remote host_calc = host_remote.get_incoming(node_class=CalcJobNode).first().node host_structure = find_parent_structure(host_remote) -======= ->>>>>>> deeb2f88313d293946f7657d00f7dcac0bc22d12 - # Information of the host structure - struc_info, symm_matrices = tools_STM_scan.STM_pathfinder(host_remote) - - # Path creation step. (The the identity operator is present, but will be excluded) - unused_pos, used_pos = tools_STM_scan.lattice_generation(x, y, symm_matrices, struc_info['plane_vectors']) + # now find all the positions we need to scan + coeff = self.get_scanning_positions(host_remote) - # Since the combine tools use the element already in the units of da and db, we use a helper function - # to have the indices of the linear combination of the used position vectors in the base of the Bravais lattice. - - coeff = tools_STM_scan.find_linear_combination_coefficients(struc_info['plane_vectors'], used_pos) + if _VERBOSE_: + # timing counters + t_imp_info, t_pot = 0., 0. + # construct impurity potential and imp_info for the impurity cluster + scanning area for element in coeff: -<<<<<<< HEAD + if _VERBOSE_: + t0 = time() tmp_imp_info = self.combine_potentials(host_structure, impurity_info, element[0], element[1]) -======= - tmp_imp_info = self.combine_potentials(impurity_info, element[0], element[1]) ->>>>>>> deeb2f88313d293946f7657d00f7dcac0bc22d12 impurity_info = tmp_imp_info + if _VERBOSE_: + t_imp_info += time() - t0 + t0 = time() + # Aggregation the impurity nodes tmp_imp_pot = self.combine_nodes(host_calc, imp_potential_node, element[0], element[1]) imp_potential_node = tmp_imp_pot + if _VERBOSE_: + t_pot += time() - t0 + + if _VERBOSE_: + # report elapsed time for cluster generation + self.report(f'time for cluster generation (s): {time()-t_start}, imp_info={t_imp_info}, pot={t_pot}') + return impurity_info, imp_potential_node + def get_scanning_positions(self, host_remote): + """ + Extract scanning positions either from input 'scan_positions' or from 'nx', 'ny' + symmetry analysis + + If 'scan_positions' is found in 'tip_position' input dict we use these positions which should + be 2D array of integers with the positions in units of the structure's in-plane Bravais matrix. + + Otherwise we use the 'nx', 'ny' input to define a scanning region where an automated symmetry + analysis is done to reduce the scanning area to the irreducible part. + """ + from aiida_kkr.tools import tools_STM_scan + + generate_scan_positions = True + if 'scan_positions' in self.inputs.tip_position: + coeff = self.inputs.tip_position['scan_positions'] + if coeff is not None: + # check if coefficients exists and are valid + # TODO: improve the validity check + generate_scan_positions = False + + if generate_scan_positions: + + # Information of the host structure + struc_info, symm_matrices = tools_STM_scan.STM_pathfinder(host_remote) + + # We now want to iterate over several in-plane positions. + # These are the number of vectors in which we want to move the STM tip. + x = self.inputs.tip_position['nx'] + y = self.inputs.tip_position['ny'] + + # Path creation step. (The the identity operator is present, but will be excluded) + unused_pos, used_pos = tools_STM_scan.lattice_generation(x, y, symm_matrices, struc_info['plane_vectors']) + + # Since the combine tools use the element already in the units of da and db, we use a helper function + # to have the indices of the linear combination of the used position vectors in the base of the Bravais lattice. + coeff = tools_STM_scan.find_linear_combination_coefficients(struc_info['plane_vectors'], used_pos) + + return coeff + def STM_lmdos_run(self): """In this part of the worflow we want to simulate the lmdos which a STM is able to measure """ @@ -546,6 +439,8 @@ def STM_lmdos_run(self): # Check if the kkrflex files are already given in the outputs if 'kkrflex_files' in self.inputs: builder.gf_dos_remote = self.inputs.kkrflex_files + message = f'Remote host function is given in the outputs from the node: {self.inputs.kkrflex_files}' + self.report(message) else: builder.kkr = self.inputs.kkr # needed to evaluate the kkr_flex files in the DOS step @@ -553,24 +448,24 @@ def STM_lmdos_run(self): # The bigger the scanning position, the greater it must be set. if 'gf_writeout' in self.inputs: if 'params_kkr_overwrite' in self.inputs.gf_writeout: - builder.gf_writeout.params_kkr_overwrite = self.inputs.gf_writeout.params_kkr_overwrite + builder.gf_writeout.params_kkr_overwrite = self.inputs.gf_writeout.params_kkr_overwrite # pylint: disable=no-member + if 'options' in self.inputs.gf_writeout: + builder.gf_writeout.options = self.inputs.gf_writeout.options # pylint: disable=no-member else: # This is a big value of NSHELD to make sure that most calculations work - builder.gf_writeout.params_kkr_overwrite = Dict(dict={'NSHELD': 1500}) -<<<<<<< HEAD - + builder.gf_writeout.params_kkr_overwrite = Dict(dict={'NSHELD': 1500}) # pylint: disable=no-member + # Update the BdG parameters if they are inserted in the workflow if 'BdG' in self.inputs: if 'params_kkr_overwrite' in self.inputs.BdG: - builder.BdG.params_overwrite = self.inputs.BdG.params_kkr_overwrite -======= ->>>>>>> deeb2f88313d293946f7657d00f7dcac0bc22d12 + builder.BdG.params_overwrite = self.inputs.BdG.params_kkr_overwrite # pylint: disable=no-member self.ctx.kkrimp_params_dict = Dict( dict={ - 'nsteps': 1, - 'kkr_runmax': 1, - 'dos_run': True, + 'nsteps': 1, # redundant because this is already set inside the kkr_imp_dos workchain?! + 'kkr_runmax': 1, # redundant because this is already set inside the kkr_imp_dos workchain?! + 'dos_run': True, # redundant because this is already set inside the kkr_imp_dos workchain?! + 'retrieve_kkrflex': self.ctx.retrieve_kkrflex, 'lmdos': self.ctx.lmdos, 'jij_run': self.ctx.jij_run, 'dos_params': self.ctx.dos_params_dict @@ -578,22 +473,16 @@ def STM_lmdos_run(self): ) # We want to set the energy to the Fermi level - - self.ctx.kkrimp_params_dict['dos_params']['emin'] = 0 - 0.005 - self.ctx.kkrimp_params_dict['dos_params']['emax'] = 0 + 0.005 + if 'emin' not in self.ctx.dos_params_dict: + self.ctx.kkrimp_params_dict['dos_params']['emin'] = 0 - 0.005 + if 'emax' not in self.ctx.dos_params_dict: + self.ctx.kkrimp_params_dict['dos_params']['emax'] = 0 + 0.005 # Finally we overwrite the number of energy points to 1 # This is because we want many epoints around the impurity position - -<<<<<<< HEAD - self.ctx.kkrimp_params_dict['dos_params']['nepts'] = 7 # Here 7 because of the interpolated files that aren't generated -======= - self.ctx.kkrimp_params_dict['dos_params'][ - 'nepts'] = 7 # Here 7 because of the interpolated files that aren't generated ->>>>>>> deeb2f88313d293946f7657d00f7dcac0bc22d12 - - #builder.metadata.label = label_imp # pylint: disable=no-member - #builder.metadata.description = description_imp # pylint: disable=no-member + if 'nepts' not in self.ctx.dos_params_dict: + self.ctx.kkrimp_params_dict['dos_params'][ + 'nepts'] = 7 # Here 7 because of the interpolated files that aren't generated builder.wf_parameters = self.ctx.kkrimp_params_dict # Host remote files that will be used for the actual plot step. @@ -606,14 +495,14 @@ def STM_lmdos_run(self): builder.impurity_info = impurity_info builder.imp_pot_sfd = imp_pot_sfd - x = self.inputs.tip_position['nx'] - y = self.inputs.tip_position['ny'] - + # submit calculation calc = self.submit(builder) - message = f"""INFO: running DOS step for an STM measurement (pk: {calc.pk}) at position - (ilayer: {self.inputs.tip_position['ilayer']}, da: {x}, db: {y} )""" - print(message) + # print report + message = f"""INFO: running DOS step for an STM measurement (pk: {calc.pk}) at position (ilayer: {self.inputs.tip_position['ilayer']})""" + if 'params_kkr_overwrite' in self.inputs.BdG: + if self.inputs.BdG.params_kkr_overwrite: + message += f'\nINFO: runnig DOS step (pk: {calc.pk}) BdG is present' self.report(message) # Save the calculated impurity cluster and impurity info in the context @@ -623,32 +512,22 @@ def STM_lmdos_run(self): return ToContext(STM_data=calc) def results(self): + """Collect results and return output nodes""" if not self.ctx.STM_data.is_finished_ok: - - message = 'ERROR: sub workflow for STM calculation failed' - print(message) - self.report(message) - return self.exit_codes.ERROR_IMP_SUB_WORKFLOW_FAILURE - + self.report('ERROR: sub workflow for STM calculation failed') + return self.exit_codes.ERROR_IMP_SUB_WORKFLOW_FAILURE # pylint: disable=no-member else: - # Declaring the output self.out('STM_dos_data', self.ctx.STM_data.outputs.dos_data) self.out('STM_dos_data_lmdos', self.ctx.STM_data.outputs.dos_data_lm) - #self.out("workflow_info", self.ctx.STM_lmdos.outputs.workflow_info) self.out('tip_position', self.inputs.tip_position) - try: + if 'gf_dos_remote' in self.ctx.STM_data.outputs: self.out('kkrflexfiles', self.ctx.STM_data.outputs.gf_dos_remote) - except: - pass - self.out('combined_imp_info', self.ctx.impurity_info) self.out('combined_imp_potential', self.ctx.imp_pot_sfd) - message = 'INFO: created output nodes for KKR STM workflow.' - print(message) - self.report(message) + self.report('INFO: created output nodes for KKR STM workflow.') self.report( '\n'