diff --git a/ravenframework/Optimizers/BayesianOptimizer.py b/ravenframework/Optimizers/BayesianOptimizer.py index a00830bcec..78b15721e2 100644 --- a/ravenframework/Optimizers/BayesianOptimizer.py +++ b/ravenframework/Optimizers/BayesianOptimizer.py @@ -150,7 +150,6 @@ def __init__(self): self._paramSelectionOptions = {'ftol':1e-10, 'maxiter':200, 'disp':False} # Optimizer options for hyperparameter selection self._externalParamOptimizer = 'fmin_l_bfgs_b' # Optimizer for external hyperparameter selection self._resetModel = False # Reset regression model if True - def handleInput(self, paramInput): """ diff --git a/ravenframework/Optimizers/GradientDescent.py b/ravenframework/Optimizers/GradientDescent.py index 8910073f43..4a67aa42ae 100644 --- a/ravenframework/Optimizers/GradientDescent.py +++ b/ravenframework/Optimizers/GradientDescent.py @@ -202,7 +202,7 @@ def __init__(self): # history trackers, by traj, are deques (-1 is most recent) self._gradHistory = {} # gradients self._stepHistory = {} # {'magnitude': size, 'versor': direction, 'info': dict} for step - self._acceptHistory = {} # acceptability + self._acceptHistory = {} # acceptabilitys self._stepRecommendations = {} # by traj, if a 'cut' or 'grow' is recommended else None self._acceptRerun = {} # by traj, if True then override accept for point rerun self._convergenceCriteria = defaultdict(mathUtils.giveZero) # names and values for convergence checks @@ -212,7 +212,6 @@ def __init__(self): self._followerProximity = 1e-2 # distance at which annihilation can start occurring, in ?normalized? space self._trajectoryFollowers = defaultdict(list) # map of trajectories to the trajectories following them self._functionalConstraintExplorationLimit = 500 # number of input-space explorations allowable for functional constraints - # __private # additional methods # register adaptive sample identification criteria