Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Supplementary unittests for merge_traj function to detect issue #957 #961

Merged
merged 7 commits into from
Dec 3, 2022
Merged
Show file tree
Hide file tree
Changes from 4 commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
9 changes: 6 additions & 3 deletions dpgen/generator/run.py
Original file line number Diff line number Diff line change
Expand Up @@ -2190,8 +2190,11 @@ def _trust_limitation_check(sys_idx, lim):
cwd = os.getcwd()
os.chdir(fp_task_path)
if cluster_cutoff is None:
if model_devi_engine in ("lammps", "gromacs"):
os.symlink(os.path.relpath(conf_name), 'conf.dump')
if model_devi_engine in ("lammps", "gromacs"):
if model_devi_merge_traj:
conf_sys.to("lammps/lmp", "conf.dump")
Chengqian-Zhang marked this conversation as resolved.
Show resolved Hide resolved
else:
os.symlink(os.path.relpath(conf_name), 'conf.dump')
os.symlink(os.path.relpath(job_name), 'job.json')
elif model_devi_engine == "amber":
# read and write with ase
Expand Down Expand Up @@ -2239,7 +2242,7 @@ def _trust_limitation_check(sys_idx, lim):
if model_devi_engine == "lammps":
sys = None
if model_devi_merge_traj:
sys = conf_sys
sys = dpdata.System('conf.dump', fmt = "lammps/lmp", type_map = type_map)
else :
sys = dpdata.System('conf.dump', fmt = "lammps/dump", type_map = type_map)
sys.to_vasp_poscar('POSCAR')
Expand Down
2 changes: 1 addition & 1 deletion tests/dispatcher/loc/task0/dir0/test2
Original file line number Diff line number Diff line change
@@ -1 +1 @@
f0c56f70-627b-445a-89de-3ad3e81f3785
140c75e5-993c-4644-b877-cd3ceb2b254a
2 changes: 1 addition & 1 deletion tests/dispatcher/loc/task0/test0
Original file line number Diff line number Diff line change
@@ -1 +1 @@
5dc17ca2-0d58-4968-af22-41536e667668
dfea7618-49df-42ac-b723-f7c04e349203
2 changes: 1 addition & 1 deletion tests/dispatcher/loc/task0/test1
Original file line number Diff line number Diff line change
@@ -1 +1 @@
814f28b0-38bd-493a-b400-d678c3fe1a0e
99cee2e2-0de4-43ba-a296-805f4e551ace
2 changes: 1 addition & 1 deletion tests/dispatcher/loc/task1/dir0/test2
Original file line number Diff line number Diff line change
@@ -1 +1 @@
d3c9fb33-4ffd-48c2-86bb-933fbe7fd512
0d7eaf5f-0a04-492a-b9ae-c7d77781c928
2 changes: 1 addition & 1 deletion tests/dispatcher/loc/task1/test0
Original file line number Diff line number Diff line change
@@ -1 +1 @@
8a4f3e52-3ace-45c5-8bd1-1bbbb4d9abd0
b96519be-c495-4150-b634-39b61b54ffd9
2 changes: 1 addition & 1 deletion tests/dispatcher/loc/task1/test1
Original file line number Diff line number Diff line change
@@ -1 +1 @@
b8389a9b-bc75-4498-94bf-c565a4e387b6
00bc5947-dfb6-47e4-909e-3c647b551c82
1 change: 1 addition & 0 deletions tests/generator/context.py
Original file line number Diff line number Diff line change
Expand Up @@ -10,6 +10,7 @@
from dpgen.generator.lib.parse_calypso import _parse_calypso_input,_parse_calypso_dis_mtx

param_file = 'param-mg-vasp.json'
param_file_merge_traj = 'param-mg-vasp_merge_traj.json'
param_file_v1 = 'param-mg-vasp-v1.json'
param_file_v1_et = 'param-mg-vasp-v1-et.json'
param_old_file = 'param-mg-vasp-old.json'
Expand Down
108 changes: 108 additions & 0 deletions tests/generator/param-mg-vasp_merge_traj.json
Original file line number Diff line number Diff line change
@@ -0,0 +1,108 @@
{
"type_map": ["Mg", "Al"],
"mass_map": [24, 27],

"init_data_prefix": "data",
"init_data_sys": ["deepmd"
],
"init_batch_size": [16],
"sys_configs": [
["data/mg.fcc.02x02x02/01.scale_pert/sys-0032/scale*/000000/POSCAR"],
["data/mg.fcc.02x02x02/01.scale_pert/sys-0032/scale*/000001/POSCAR"]
],
"_comment": "0 1 2 3",
"_comment": "4 5 6 7",
"sys_batch_size": [1, 1
],

"_comment": " 00.train ",
"numb_models": 4,
"default_training_param" : {
"_comment": " model parameters",
"use_smooth": true,
"sel_a": [90],
"rcut_smth": 2.00,
"rcut": 6.00,
"filter_neuron": [25, 50, 100],
"filter_resnet_dt": false,
"n_axis_neuron": 12,
"n_neuron": [240, 240, 240],
"resnet_dt": true,
"coord_norm": true,
"type_fitting_net": false,

"_comment": " traing controls",
"systems": [],
"set_prefix": "set",
"stop_batch": 400000,
"batch_size": 1,
"start_lr": 0.002,
"decay_steps": 2000,
"decay_rate": 0.95,
"seed": 0,

"start_pref_e": 0.02,
"limit_pref_e": 2,
"start_pref_f": 1000,
"limit_pref_f": 1,
"start_pref_v": 0.0,
"limit_pref_v": 0.0,

"_comment": " display and restart",
"_comment": " frequencies counted in batch",
"disp_file": "lcurve.out",
"disp_freq": 2000,
"numb_test": 10,
"save_freq": 20000,
"save_ckpt": "model.ckpt",
"load_ckpt": "model.ckpt",
"disp_training": true,
"time_training": true,
"profiling": false,
"profiling_file": "timeline.json",

"_comment": "that's all"
},

"_comment": " 01.model_devi ",
"_comment": "model_devi_skip: the first x of the recorded frames",
"model_devi_dt": 0.002,
"model_devi_skip": 0,
"model_devi_f_trust_lo": 0.050,
"model_devi_f_trust_hi": 0.150,
"model_devi_merge_traj": true,
"model_devi_jobs": [
{"sys_idx": [0,1], "temps": [50,100], "press": [1.0,2.0], "trj_freq": 10, "nsteps": 1000, "ensemble": "npt", "_idx": "00"}
],

"_comment": " 02.fp ",
"fp_style": "vasp",
"shuffle_poscar": false,
"fp_task_max": 100,
"fp_task_min": 10,
"fp_pp_path": ".",
"fp_pp_files": ["vasp/potcars/POTCAR.mg", "vasp/potcars/POTCAR.al"],
"_comment": " user provided vasp script ",
"user_fp_params": {
"PREC": "A",
"ENCUT": 600,
"ISYM": 0,
"ALGO": "fast",
"EDIFF": 1e-05,
"LREAL": "A",
"NPAR": 1,
"KPAR": 1,
"NELMIN": 4,
"ISIF": 2,
"ISMEAR": 1,
"SIGMA": 0.25,
"IBRION": -1,
"NSW": 0,
"LWAVE": false,
"LCHARG": false,
"PSTRESS": 0,
"KSPACING": 0.16,
"KGAMMA": false
},
"_comment": " that's all "
}
116 changes: 114 additions & 2 deletions tests/generator/test_make_fp.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,13 +3,15 @@
import numpy as np
import unittest
import importlib
import textwrap
Chengqian-Zhang marked this conversation as resolved.
Show resolved Hide resolved

sys.path.insert(0, os.path.abspath(os.path.join(os.path.dirname(__file__), '..')))
__package__ = 'generator'
from .context import make_fp
from .context import detect_multiplicity
from .context import parse_cur_job
from .context import param_file
from .context import param_file_merge_traj
from .context import param_old_file
from .context import param_pwscf_file
from .context import param_pwscf_old_file
Expand Down Expand Up @@ -219,7 +221,7 @@ def _write_lammps_dump(sys, dump_file, f_idx = 0) :
bd, tilt = _box2dumpbox(np.zeros(3), cell)
atype = sys['atom_types']
natoms = len(sys['atom_types'])
with open(dump_file, 'w') as fp:
with open(dump_file, 'a') as fp:
fp.write('ITEM: TIMESTEP\n')
fp.write('0\n')
fp.write('ITEM: NUMBER OF ATOMS\n')
Expand All @@ -231,7 +233,6 @@ def _write_lammps_dump(sys, dump_file, f_idx = 0) :
for ii in range(natoms) :
fp.write('%d %d %f %f %f\n' % (ii+1, atype[ii]+1, coord[ii][0], coord[ii][1], coord[ii][2]))


Chengqian-Zhang marked this conversation as resolved.
Show resolved Hide resolved
def _make_fake_md(idx, md_descript, atom_types, type_map, ele_temp = None) :
"""
md_descript: list of dimension
Expand Down Expand Up @@ -271,6 +272,52 @@ def _make_fake_md(idx, md_descript, atom_types, type_map, ele_temp = None) :
with open(os.path.join(task_dir, 'job.json'), 'w') as fp:
json.dump({"ele_temp": ele_temp[sidx][midx]}, fp)

def _make_fake_md_merge_traj(idx, md_descript, atom_types, type_map, ele_temp = None) :
"""
md_descript: list of dimension
[n_sys][n_MD][n_frame]
ele_temp: list of dimension
[n_sys][n_MD]
"""
natoms = len(atom_types)
ntypes = len(type_map)
atom_types = np.array(atom_types, dtype = int)
atom_numbs = [np.sum(atom_types == ii) for ii in range(ntypes)]
sys = dpdata.System()
sys.data['atom_names'] = type_map
sys.data['atom_numbs'] = atom_numbs
sys.data['atom_types'] = atom_types
for sidx,ss in enumerate(md_descript) :
for midx,mm in enumerate(ss) :
nframes = len(mm)
cells = np.random.random([nframes,3,3])
coords = np.random.random([nframes,natoms,3])
sys.data['coords'] = coords
sys.data['cells'] = cells
task_dir = os.path.join('iter.%06d' % idx,
'01.model_devi',
'task.%03d.%06d' % (sidx, midx))
cwd = os.getcwd()
os.makedirs(task_dir,exist_ok = True)
for ii in range(nframes):
_write_lammps_dump(sys,os.path.join(task_dir,'all.lammpstrj'),ii)
file_content = textwrap.dedent("""\
0.000000000000000000e+01 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00
1.000000000000000000e+01 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 2.899999999999999800e-02 0.000000000000000000e+00 0.000000000000000000e+00
2.000000000000000000e+01 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 5.799999999999999600e-02 0.000000000000000000e+00 0.000000000000000000e+00
3.000000000000000000e+01 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 8.699999999999999400e-02 0.000000000000000000e+00 0.000000000000000000e+00
4.000000000000000000e+01 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 1.159999999999999920e-01 0.000000000000000000e+00 0.000000000000000000e+00
5.000000000000000000e+01 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 1.449999999999999900e-01 0.000000000000000000e+00 0.000000000000000000e+00
6.000000000000000000e+01 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 1.739999999999999880e-01 0.000000000000000000e+00 0.000000000000000000e+00
7.000000000000000000e+01 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 2.029999999999999860e-01 0.000000000000000000e+00 0.000000000000000000e+00
8.000000000000000000e+01 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 2.319999999999999840e-01 0.000000000000000000e+00 0.000000000000000000e+00
9.000000000000000000e+01 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 2.610000000000000098e-01 0.000000000000000000e+00 0.000000000000000000e+00
""")
with open(os.path.join(task_dir, 'model_devi.out') , 'w') as fp:
fp.write(file_content)
if ele_temp is not None:
with open(os.path.join(task_dir, 'job.json'), 'w') as fp:
json.dump({"ele_temp": ele_temp[sidx][midx]}, fp)

def _check_poscars(testCase, idx, fp_task_max, type_map) :
fp_path = os.path.join('iter.%06d' % idx, '02.fp')
Expand All @@ -297,6 +344,40 @@ def _check_poscars(testCase, idx, fp_task_max, type_map) :
sys1 = dpdata.System(poscar_file, fmt = 'vasp/poscar')
test_atom_names(testCase, sys0, sys1)

def _check_poscars_merge_traj(testCase, idx, fp_task_max, type_map ) :
fp_path = os.path.join('iter.%06d' % idx, '02.fp')
candi_files = glob.glob(os.path.join(fp_path, 'candidate.shuffled.*.out'))
candi_files.sort()
sys_idx = [str(os.path.basename(ii).split('.')[2]) for ii in candi_files]
for sidx,ii in zip(sys_idx, candi_files) :
md_task = []
f_idx = []
with open(ii) as fp:
for ii in fp :
md_task.append(ii.split()[0])
f_idx.append(ii.split()[1])
md_task = md_task[:fp_task_max]
f_idx = f_idx[:fp_task_max]
cc = 0
label_0 = 0
label_1 = 0

for tt,ff in zip(md_task, f_idx) :
traj_file = os.path.join(tt, 'all.lammpstrj')
poscar_file = os.path.join(fp_path,
'task.%03d.%06d' % (int(sidx), cc),
'POSCAR')
cc += 1
sys0 = dpdata.System(traj_file, fmt = 'lammps/dump', type_map = type_map)
sys1 = dpdata.System(poscar_file, fmt = 'vasp/poscar')
new_coords_0 = float(sys1["coords"][0][0][0])
new_coords_1 = float(sys1["coords"][0][1][0])
if (label_0 == new_coords_0 and label_1 == new_coords_1):
raise RuntimeError("The exact same POSCAR is generated under different first-principles calculation catalogs")
label_0 = new_coords_0
label_1 = new_coords_1
test_atom_names(testCase, sys0[int(int(ff)/10)], sys1)

def _check_kpoints_exists(testCase, idx) :
fp_path = os.path.join('iter.%06d' % idx, '02.fp')
tasks = glob.glob(os.path.join(fp_path, 'task.*'))
Expand Down Expand Up @@ -710,6 +791,37 @@ def test_make_fp_vasp(self):
# checked elsewhere
# _check_potcar(self, 0, jdata['fp_pp_path'], jdata['fp_pp_files'])
shutil.rmtree('iter.000000')

def test_make_fp_vasp_merge_traj(self):
setUpModule()
if os.path.isdir('iter.000000') :
shutil.rmtree('iter.000000')
with open (param_file_merge_traj, 'r') as fp :
jdata = json.load (fp)
with open (machine_file, 'r') as fp:
mdata = json.load (fp)
md_descript = []
nsys = 2
nmd = 3
n_frame = 10
for ii in range(nsys) :
tmp = []
for jj in range(nmd) :
tmp.append(np.arange(0, 0.29, 0.29/10))
md_descript.append(tmp)
atom_types = [0, 1, 0, 1]
type_map = jdata['type_map']

_make_fake_md_merge_traj(0, md_descript, atom_types, type_map)
make_fp(0, jdata, {"fp_user_forward_files" : ["vdw_kernel.bindat"] })
_check_poscars_merge_traj(self, 0, jdata['fp_task_max'], jdata['type_map'])
#_check_incar_exists(self, 0)
_check_incar(self, 0)
_check_kpoints_exists(self, 0)
_check_kpoints(self,0)
# checked elsewhere
# _check_potcar(self, 0, jdata['fp_pp_path'], jdata['fp_pp_files'])
shutil.rmtree('iter.000000')

def test_make_fp_vasp_old(self):
setUpModule()
Expand Down