Skip to content
This commit does not belong to any branch on this repository, and may belong to a fork outside of the repository.

Commit 1f2abbd

Browse files
committedMay 25, 2024·
Add get_slurm_template
1 parent 4245e99 commit 1f2abbd

File tree

2 files changed

+113
-10
lines changed

2 files changed

+113
-10
lines changed
 

‎abipy/flowtk/qutils.py

Lines changed: 64 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -271,7 +271,7 @@ def slurm_sbatch(slurm_filepath: PathLike) -> int:
271271
"""
272272
Submit a job script to the queue with sbatch. Return Slurm JOB ID.
273273
"""
274-
from monty.os import cd
274+
from monty.os import cd
275275
dirpath = os.path.dirname(slurm_filepath)
276276
print("dirpath", dirpath)
277277
with cd(dirpath):
@@ -299,3 +299,66 @@ def slurm_sbatch(slurm_filepath: PathLike) -> int:
299299
raise exc
300300
else:
301301
raise RuntimeError(f"Error while submitting {slurm_filepath=}")
302+
303+
304+
def get_slurm_template() -> str:
305+
"""
306+
Return template for slurm submission that is supposed to be customized by the user
307+
"""
308+
309+
return """\
310+
#!/bin/bash
311+
312+
# Please customize this section using your settings.
313+
#SBATCH --job-name=my_job
314+
#SBATCH --output=%j_%x.out
315+
#SBATCH --error=%j_%x.err
316+
#SBATCH --partition=debug
317+
#SBATCH --nodes=1
318+
#SBATCH --ntasks-per-node=64
319+
#SBATCH --mem-per-cpu=2G
320+
#SBATCH --time=2:00:00
321+
#SBATCH --account=htforft
322+
323+
# ------------------------------------------------------------------------------
324+
# Printing some information
325+
# ------------------------------------------------------------------------------
326+
327+
echo "------------------- Job info -------------------"
328+
echo "job_id : $SLURM_JOB_ID"
329+
echo "jobname : $SLURM_JOB_NAME"
330+
echo "queue : $SLURM_JOB_PARTITION"
331+
echo "qos : $SLURM_JOB_QOS"
332+
echo "account : $SLURM_JOB_ACCOUNT"
333+
echo "submit dir : $SLURM_SUBMIT_DIR"
334+
echo "number of mpi tasks: $SLURM_NTASKS tasks"
335+
echo "OMP_NUM_THREADS : $OMP_NUM_THREADS"
336+
echo "number of gpus : $SLURM_GPUS_ON_NODE"
337+
338+
echo "------------------- Node list ------------------"
339+
echo $SLURM_JOB_NODELIST
340+
341+
echo "---------------- Checking limits ---------------"
342+
ulimit -a
343+
344+
# ------------------------------------------------------------------------------
345+
# Setting up the environment
346+
# ------------------------------------------------------------------------------
347+
348+
echo "----------------- Environment ------------------"
349+
source $HOME/vasp.6.2.1/modules.sh
350+
module list
351+
352+
# ------------------------------------------------------------------------------
353+
# And finally running the code
354+
# ------------------------------------------------------------------------------
355+
356+
echo "--------------- Running the code ---------------"
357+
echo -n "This run started on: "
358+
date
359+
360+
mpirun vasp_std > log 2> err
361+
362+
echo -n "This run completed on: "
363+
date
364+
"""

‎abipy/ml/extxyz_generator.py

Lines changed: 49 additions & 9 deletions
Original file line numberDiff line numberDiff line change
@@ -10,7 +10,6 @@
1010
#except ImportError as exc:
1111
# raise ImportError("ase not installed. Try `pip install ase`.") from exc
1212
from pathlib import Path
13-
#from inspect import isclass
1413
#from multiprocessing import Pool
1514
#from typing import Type, Any, Optional, Union
1615
#from enum import IntEnum
@@ -21,14 +20,12 @@
2120
#from monty.functools import lazy_property
2221
#from monty.json import MontyEncoder
2322
#from monty.collections import AttrDict
24-
#from pymatgen.io.ase import AseAtomsAdaptor
2523
#from ase import units
2624
#from ase.atoms import Atoms
2725
#from ase.io.trajectory import write_traj, Trajectory
2826
from ase.io import read
2927
#from ase.calculators.calculator import Calculator
3028
#from ase.io.vasp import write_vasp_xdatcar, write_vasp
31-
#from ase.neb import NEB
3229
#from ase.stress import voigt_6_to_full_3x3_strain
3330
#from ase.calculators.calculator import PropertyNotImplementedError
3431
from ase.calculators.singlepoint import SinglePointCalculator
@@ -38,6 +35,7 @@
3835
from abipy.electrons.gsr import GsrFile
3936
#from abipy.tools.iotools import workdir_with_prefix, PythonScript, yaml_safe_load_path
4037
from abipy.tools.typing import PathLike
38+
import abipy.flowtk.qutils as qu
4139
#from abipy.tools.serialization import HasPickleIO
4240
#from abipy.tools.context_managers import Timer
4341
#from abipy.tools.parallel import get_max_nprocs, pool_nprocs_pmode
@@ -90,6 +88,7 @@ def __init__(self, filepaths: list[PathLike]):
9088
for ext in self.SUPPORTED_EXTS:
9189
if all(f.endswith(ext) for f in self.filepaths):
9290
self.ext = ext
91+
break
9392
else:
9493
raise ValueError(f"Cannot detect extension from filepaths, should be in: {self.SUPPORTED_EXTS}")
9594

@@ -146,6 +145,38 @@ def yield_atoms(self):
146145
yield atoms
147146

148147

148+
def check_vasp_success(vasprun, outcar, verbose: int = 0) -> bool:
149+
"""
150+
Check if a VASP calculation completed successfully.
151+
152+
Returns:
153+
bool: True if the calculation completed successfully, False otherwise.
154+
"""
155+
def my_print(*args, **kwargs)
156+
if verbose: print(*args, **kwargs)
157+
158+
from pymatgen.io.vasp.outputs import Vasprun, Outcar
159+
try:
160+
# vasprun = Vasprun(f"{directory}/vasprun.xml")
161+
if not vasprun.converged:
162+
my_print("Calculation did not converge.")
163+
return False
164+
165+
#outcar = Outcar(f"{directory}/OUTCAR")
166+
if outcar.run_stats.get("Elapsed time (sec)"):
167+
my_print("Calculation completed in {} seconds.".format(outcar.run_stats["Elapsed time (sec)"]))
168+
else:
169+
my_print("Elapsed time not found in OUTCAR.")
170+
return False
171+
172+
my_print("Calculation completed successfully.")
173+
return True
174+
175+
except Exception as e:
176+
my_print(f"Error checking calculation status: {e}")
177+
return False
178+
179+
149180

150181
class SinglePointRunner:
151182
"""
@@ -154,13 +185,23 @@ class SinglePointRunner:
154185
runner.collect_xyz("foo.xyz")
155186
"""
156187

157-
def __init__(self, traj_path: PathLike, topdir: PathLike, traj_range: range,
158-
abinitio_code: str, slurm_template: PathLike, **kwargs):
188+
def __init__(self, traj_path: PathLike, topdir: PathLike, traj_range: range,
189+
abinitio_code: str, slurm_template: PathLike, verbose=0, **kwargs):
190+
"""
191+
"""
159192
self.traj_path = traj_path
160193
self.topdir = Path(str(topdir)).absolute()
161194
self.traj_range = traj_range
195+
if not isinstance(traj_range):
196+
raise TypeError(f"Got type{traj_range} instead of range")
162197
self.abinitio_code = abinitio_code
198+
if not ps.path.exists(self.slurm_template):
199+
s = qu.get_slurm_template()
200+
open(slurm_template, "wt").write(s)
201+
raise RuntimeError("")
202+
163203
self.slurm_template = open(slurm_template, "rt").read()
204+
self.verbose = int(verbose)
164205
self.kwargs = kwargs
165206

166207
def __str__(self) -> str:
@@ -176,19 +217,18 @@ def to_string(self, verbose=0) -> str:
176217
def sbatch(self):
177218
"""
178219
"""
179-
from abipy.flowtk.qutils import slurm_sbatch
180-
181220
if not self.topdir.exists(): self.topdir.mkdir()
182221

183222
for index in self.traj_range:
184223
workdir = self.topdir / f"SINGLEPOINT_{index}"
185224
if workdir.exists():
225+
print("{workdir=} already exists. Ignoring it")
186226
continue
187227

188-
workdir.mkdir()
189228
atoms = read(self.traj_path, index=index)
190229
structure = Structure.as_structure(atoms)
191230
script_filepath = workdir / "run.sh"
231+
workdir.mkdir()
192232

193233
if self.abinitio_code == "vasp":
194234
# Generate VASP input files using the Materials Project settings for a single-point calculation
@@ -202,7 +242,7 @@ def sbatch(self):
202242
with open(script_filepath, "wt") as fh:
203243
fh.write(self.slurm_template)
204244

205-
slurm_sbatch(script_filepath)
245+
qu.slurm_sbatch(script_filepath)
206246

207247
def write_xyz(self, xyz_filepath: PathLike, dryrun=False) -> None:
208248
"""

0 commit comments

Comments
 (0)
Please sign in to comment.