From ba72a6189be07df263b23af2947fd4498443eb13 Mon Sep 17 00:00:00 2001 From: Vincent Roulet Date: Thu, 19 Oct 2023 08:50:40 -0700 Subject: [PATCH] no abbreviations --- jaxopt/_src/anderson_wrapper.py | 2 +- jaxopt/_src/armijo_sgd.py | 4 ++-- jaxopt/_src/backtracking_linesearch.py | 8 ++++---- jaxopt/_src/bfgs.py | 6 +++--- jaxopt/_src/bisection.py | 2 +- jaxopt/_src/block_cd.py | 2 +- jaxopt/_src/broyden.py | 2 +- jaxopt/_src/fixed_point_iteration.py | 2 +- jaxopt/_src/gauss_newton.py | 2 +- jaxopt/_src/hager_zhang_linesearch.py | 4 ++-- jaxopt/_src/lbfgs.py | 6 +++--- jaxopt/_src/lbfgsb.py | 6 +++--- jaxopt/_src/levenberg_marquardt.py | 6 +++--- jaxopt/_src/mirror_descent.py | 2 +- jaxopt/_src/nonlinear_cg.py | 6 +++--- jaxopt/_src/optax_wrapper.py | 4 ++-- jaxopt/_src/polyak_sgd.py | 4 ++-- jaxopt/_src/proximal_gradient.py | 2 +- jaxopt/_src/zoom_linesearch.py | 6 +++--- 19 files changed, 38 insertions(+), 38 deletions(-) diff --git a/jaxopt/_src/anderson_wrapper.py b/jaxopt/_src/anderson_wrapper.py index 0a8a8024..dea00a0e 100644 --- a/jaxopt/_src/anderson_wrapper.py +++ b/jaxopt/_src/anderson_wrapper.py @@ -163,7 +163,7 @@ def use_param(t): residual_gram=residual_gram) if self.verbose: - self.log_info(next_state, error_name="Inner Solver Err.") + self.log_info(next_state, error_name="Inner Solver Error") return base.OptStep(params=next_params, state=next_state) def optimality_fun(self, params, *args, **kwargs): diff --git a/jaxopt/_src/armijo_sgd.py b/jaxopt/_src/armijo_sgd.py index 9c48aae9..c9b79b93 100644 --- a/jaxopt/_src/armijo_sgd.py +++ b/jaxopt/_src/armijo_sgd.py @@ -319,9 +319,9 @@ def update(self, params, state, *args, **kwargs) -> base.OptStep: if self.verbose: self.log_info( next_state, - error_name="Grad. Norm", + error_name="Gradient Norm", additional_info={ - 'Obj. Val.': next_state.value, + 'Objective Value': next_state.value, 'Stepsize': stepsize }, ) diff --git a/jaxopt/_src/backtracking_linesearch.py b/jaxopt/_src/backtracking_linesearch.py index 15f21399..72db4722 100644 --- a/jaxopt/_src/backtracking_linesearch.py +++ b/jaxopt/_src/backtracking_linesearch.py @@ -284,12 +284,12 @@ def update( num_grad_eval=num_grad_eval) if self.verbose: - additional_info = {'Stepsize': stepsize, 'Obj. Val.': new_value} + additional_info = {'Stepsize': stepsize, 'Objective Value': new_value} if self.condition != 'armijo': - error_name = "Min. Decr. & Curv. Err." - additional_info.update({'Decr. Err.': error_cond1}) + error_name = "Minimum Decrease & Curvature Errors" + additional_info.update({'Decrease Error': error_cond1}) else: - error_name = "Decr. Err." + error_name = "Decrease Error" self.log_info( new_state, error_name=error_name, diff --git a/jaxopt/_src/bfgs.py b/jaxopt/_src/bfgs.py index 31a346af..d7b6939b 100644 --- a/jaxopt/_src/bfgs.py +++ b/jaxopt/_src/bfgs.py @@ -264,11 +264,11 @@ def update(self, if self.verbose: self.log_info( new_state, - error_name="Grad. Norm", + error_name="Gradient Norm", additional_info={ - "Obj. Val.": new_value, + "Objective Value": new_value, "Stepsize": new_stepsize, - "Num. Linesearch Iter.": + "Number Linesearch Iterations": new_state.num_linesearch_iter - state.num_linesearch_iter } ) diff --git a/jaxopt/_src/bisection.py b/jaxopt/_src/bisection.py index ea99ac75..943a20d3 100644 --- a/jaxopt/_src/bisection.py +++ b/jaxopt/_src/bisection.py @@ -155,7 +155,7 @@ def update(self, if self.verbose: self.log_info( state, - error_name="Abs. Val. Output", + error_name="Absolute Value Output", additional_info={ "High Point": high, "Low Point": low diff --git a/jaxopt/_src/block_cd.py b/jaxopt/_src/block_cd.py index dcf9bc2f..d77eb4d9 100644 --- a/jaxopt/_src/block_cd.py +++ b/jaxopt/_src/block_cd.py @@ -170,7 +170,7 @@ def body_fun(i, tup): if self.verbose: self.log_info( state, - error_name="Dist. btw Iterates" + error_name="Distance btw Iterates" ) return base.OptStep(params=params, state=state) diff --git a/jaxopt/_src/broyden.py b/jaxopt/_src/broyden.py index a717d5b0..df968319 100644 --- a/jaxopt/_src/broyden.py +++ b/jaxopt/_src/broyden.py @@ -387,7 +387,7 @@ def ls_fun_with_aux(params, *args, **kwargs): error_name="Norm Output", additional_info={ "Stepsize": new_stepsize, - "Num. Linesearch Iter.": + "Number Linesearch Iterations": new_state.num_linesearch_iter - state.num_linesearch_iter } ) diff --git a/jaxopt/_src/fixed_point_iteration.py b/jaxopt/_src/fixed_point_iteration.py index c77f28d1..24190449 100644 --- a/jaxopt/_src/fixed_point_iteration.py +++ b/jaxopt/_src/fixed_point_iteration.py @@ -121,7 +121,7 @@ def update(self, if self.verbose: self.log_info( next_state, - error_name="Dist. btw Iterates" + error_name="Distance btw Iterates" ) return base.OptStep(params=next_params, state=next_state) diff --git a/jaxopt/_src/gauss_newton.py b/jaxopt/_src/gauss_newton.py index 384b50cb..ea1d89cd 100644 --- a/jaxopt/_src/gauss_newton.py +++ b/jaxopt/_src/gauss_newton.py @@ -131,7 +131,7 @@ def update(self, self.log_info( state, error_name="Norm GN Update", - additional_info={"Obj. Val.": value} + additional_info={"Objective Value": value} ) return base.OptStep(params=params, state=state) diff --git a/jaxopt/_src/hager_zhang_linesearch.py b/jaxopt/_src/hager_zhang_linesearch.py index c5380c1e..6d03cd92 100644 --- a/jaxopt/_src/hager_zhang_linesearch.py +++ b/jaxopt/_src/hager_zhang_linesearch.py @@ -557,10 +557,10 @@ def _reupdate(): if self.verbose: self.log_info( new_state, - error_name="Min. Decr. & Curv. Err.", + error_name="Minimum Decrease & Curvature Errors", additional_info={ "Stepsize": new_stepsize, - "Obj. Val.": new_value + "Objective Value": new_value } ) diff --git a/jaxopt/_src/lbfgs.py b/jaxopt/_src/lbfgs.py index 938bcbf9..1be37e1b 100644 --- a/jaxopt/_src/lbfgs.py +++ b/jaxopt/_src/lbfgs.py @@ -407,11 +407,11 @@ def update(self, if self.verbose: self.log_info( new_state, - error_name="Grad. Norm", + error_name="Gradient Norm", additional_info={ - "Obj. Val.": new_value, + "Objective Value": new_value, "Stepsize": new_stepsize, - "Num. Linesearch Iter.": + "Number Linesearch Iterations": new_state.num_linesearch_iter - state.num_linesearch_iter } ) diff --git a/jaxopt/_src/lbfgsb.py b/jaxopt/_src/lbfgsb.py index 29e632c7..030e7082 100644 --- a/jaxopt/_src/lbfgsb.py +++ b/jaxopt/_src/lbfgsb.py @@ -559,11 +559,11 @@ def update( if self.verbose: self.log_info( new_state, - error_name="Proj. Grad. Norm", + error_name="Projected Gradient Norm", additional_info={ - "Obj. Val.": new_value, + "Objective Value": new_value, "Stepsize": new_stepsize, - "Num. Linesearch Iter.": + "Number Linesearch Iterations": new_state.num_linesearch_iter - state.num_linesearch_iter } ) diff --git a/jaxopt/_src/levenberg_marquardt.py b/jaxopt/_src/levenberg_marquardt.py index 9e32750f..1a3646ac 100644 --- a/jaxopt/_src/levenberg_marquardt.py +++ b/jaxopt/_src/levenberg_marquardt.py @@ -452,10 +452,10 @@ def update(self, params, state: NamedTuple, *args, **kwargs) -> base.OptStep: if self.verbose: self.log_info( state, - error_name="Grad. Norm", + error_name="Gradient Norm", additional_info={ - "Obj. Val.": new_value, - "Damp. Factor": damping_factor + "Objective Value": new_value, + "Damping Factor": damping_factor } ) return base.OptStep(params=params, state=state) diff --git a/jaxopt/_src/mirror_descent.py b/jaxopt/_src/mirror_descent.py index 4b2fcbde..1e962770 100644 --- a/jaxopt/_src/mirror_descent.py +++ b/jaxopt/_src/mirror_descent.py @@ -169,7 +169,7 @@ def _update(self, x, state, hyperparams_proj, args, kwargs): if self.verbose: self.log_info( next_state, - error_name="Dist. btw Iterates" + error_name="Distance btw Iterates" ) return base.OptStep(params=next_x, state=next_state) diff --git a/jaxopt/_src/nonlinear_cg.py b/jaxopt/_src/nonlinear_cg.py index b0832fdf..398f3db2 100644 --- a/jaxopt/_src/nonlinear_cg.py +++ b/jaxopt/_src/nonlinear_cg.py @@ -273,11 +273,11 @@ def update(self, if self.verbose: self.log_info( new_state, - error_name="Grad. Norm", + error_name="Gradient Norm", additional_info={ - "Obj. Val.": new_value, + "Objective Value": new_value, "Stepsize": new_stepsize, - "Num. Linesearch Iter.": + "Number Linesearch Iterations": new_state.num_linesearch_iter - state.num_linesearch_iter } ) diff --git a/jaxopt/_src/optax_wrapper.py b/jaxopt/_src/optax_wrapper.py index 99b03daa..b17f4a66 100644 --- a/jaxopt/_src/optax_wrapper.py +++ b/jaxopt/_src/optax_wrapper.py @@ -154,8 +154,8 @@ def update(self, if self.verbose: self.log_info( new_state, - error_name="Grad. Norm", - additional_info={"Obj. Value": value} + error_name="Gradient Norm", + additional_info={"Objective Value": value} ) return base.OptStep(params=params, state=new_state) diff --git a/jaxopt/_src/polyak_sgd.py b/jaxopt/_src/polyak_sgd.py index c786692b..2ba74630 100644 --- a/jaxopt/_src/polyak_sgd.py +++ b/jaxopt/_src/polyak_sgd.py @@ -210,9 +210,9 @@ def update(self, if self.verbose: self.log_info( new_state, - error_name="Grad. Norm", + error_name="Gradient Norm", additional_info={ - "Obj. Val.": value, + "Objective Value": value, "Stepsize": stepsize, } ) diff --git a/jaxopt/_src/proximal_gradient.py b/jaxopt/_src/proximal_gradient.py index 54bf73e4..76dba7e9 100644 --- a/jaxopt/_src/proximal_gradient.py +++ b/jaxopt/_src/proximal_gradient.py @@ -277,7 +277,7 @@ def _update_accel(self, x, state, hyperparams_prox, args, kwargs): if self.verbose: self.log_info( next_state, - error_name="Dist. btw Iterates", + error_name="Distance btw Iterates", additional_info={ "Stepsize": next_stepsize } diff --git a/jaxopt/_src/zoom_linesearch.py b/jaxopt/_src/zoom_linesearch.py index f30969a9..a1e2c2c2 100644 --- a/jaxopt/_src/zoom_linesearch.py +++ b/jaxopt/_src/zoom_linesearch.py @@ -813,11 +813,11 @@ def _cond_fun(self, inputs): def _log_info(self, state, stepsize): self.log_info( state, - error_name="Min. Decr. & Curv. Err.", + error_name="Minimum Decrease & Curvature Errors", additional_info={ "Stepsize": stepsize, - "Decr. Err.": state.decrease_error, - "Curv. Err.": state.curvature_error + "Decrease Error": state.decrease_error, + "Curvature Error": state.curvature_error } )