Skip to content

Commit

Permalink
Merge branch 'feature/lixin/hyper-parameters_merge' into master
Browse files Browse the repository at this point in the history
  • Loading branch information
stevetorr authored Nov 19, 2019
2 parents e0e8632 + c755bec commit f57194e
Show file tree
Hide file tree
Showing 17 changed files with 4,332 additions and 1,124 deletions.
1,041 changes: 590 additions & 451 deletions flare/gp.py

Large diffs are not rendered by default.

644 changes: 467 additions & 177 deletions flare/gp_algebra.py

Large diffs are not rendered by default.

744 changes: 744 additions & 0 deletions flare/gp_algebra_multi.py

Large diffs are not rendered by default.

23 changes: 17 additions & 6 deletions flare/gp_from_aimd.py
Original file line number Diff line number Diff line change
Expand Up @@ -92,6 +92,8 @@ def __init__(self, frames: List[Structure],
self.skip = skip
assert (skip >= 1), "skip needs to be an integer >= 1"
self.validate_ratio = validate_ratio
assert (validate_ratio>=0 and validate_ratio<=1), \
"validate_ratio needs to be [0,1]"
self.max_trains = max_trains
self.curr_step = 0
self.max_atoms_from_frame = max_atoms_from_frame
Expand Down Expand Up @@ -212,11 +214,19 @@ def pre_run(self):
if self.verbose >= 3 and atom_count > 0:
print(f"Added {atom_count} atoms to pretrain")

if self.seed_envs or atom_count or self.seed_frames:
if (self.seed_envs or atom_count or self.seed_frames) and self.max_trains>0:
if self.verbose >= 3:
print("Now commencing pre-run training of GP (which has "
"non-empty training set)")
self.train_gp(max_iter=self.pre_train_max_iter)
else:
if self.verbose >= 3:
print("Now commencing pre-run set up of GP (which has "
"non-empty training set)")
self.gp.set_L_alpha()

if self.model_write:
self.gp.write_model(self.model_write, self.model_format)

def run(self):
"""
Expand All @@ -229,10 +239,7 @@ def run(self):
print("Commencing run with pre-run...")
self.pre_run()

if self.validate_ratio > 0:
train_frame = int(len(self.frames) * (1 - self.validate_ratio))
else:
train_frame = len(self.frames)
train_frame = int(len(self.frames) * (1 - self.validate_ratio))

# Loop through trajectory
nsample = 0
Expand Down Expand Up @@ -292,6 +299,10 @@ def run(self):
else:
self.gp.update_L_alpha()
nsample = 0
if self.checkpoint_interval \
and self.train_count % self.checkpoint_interval == 0 \
and self.model_write:
self.gp.write_model(self.model_write, self.model_format)
else:
self.gp.update_L_alpha()

Expand Down Expand Up @@ -357,4 +368,4 @@ def train_gp(self, max_iter: int = None):
if self.checkpoint_interval \
and self.train_count % self.checkpoint_interval == 0 \
and self.model_format:
self.gp.write_model(self.output_name+'_model', self.model_format)
self.gp.write_model(self.output_name+'_model', self.model_format)
2 changes: 1 addition & 1 deletion flare/kernels.py
Original file line number Diff line number Diff line change
Expand Up @@ -362,7 +362,7 @@ def three_body_jit(bond_array_1, bond_array_2,

fj = fj1*fj2*fj3
fdj = fdj1*fj2*fj3+fj1*fdj2*fj3

kern += triplet_kernel(ci1, ci2, cj1, cj2, ri1, ri2, ri3,
rj1, rj2, rj3, fi, fj, fdi, fdj,
ls1, ls2, ls3, sig2)
Expand Down
Loading

0 comments on commit f57194e

Please sign in to comment.