Skip to content

Commit

Permalink
args: add entire training parameters (#829)
Browse files Browse the repository at this point in the history
Resolves #770.
  • Loading branch information
njzjz authored Jul 30, 2022
1 parent f3e2b31 commit 5a57de2
Showing 1 changed file with 22 additions and 0 deletions.
22 changes: 22 additions & 0 deletions dpgen/generator/arginfo.py
Original file line number Diff line number Diff line change
Expand Up @@ -59,11 +59,26 @@ def data_args() -> List[Argument]:


def training_args() -> List[Argument]:
"""Traning arguments.
Returns
-------
list[dargs.Argument]
List of training arguments.
"""
doc_numb_models = 'Number of models to be trained in 00.train. 4 is recommend.'
doc_training_iter0_model_path = 'The model used to init the first iter training. Number of element should be equal to numb_models.'
doc_training_init_model = 'Iteration > 0, the model parameters will be initilized from the model trained at the previous iteration. Iteration == 0, the model parameters will be initialized from training_iter0_model_path.'
doc_default_training_param = 'Training parameters for deepmd-kit in 00.train. You can find instructions from here: (https://github.com/deepmodeling/deepmd-kit).'
doc_dp_compress = 'Use dp compress to compress the model.'
doc_training_reuse_iter = "The minimal index of iteration that continues training models from old models of last iteration."
doc_reusing = " This option is only adopted when continuing training models from old models. This option will override default parameters."
doc_training_reuse_old_ratio = "The probability proportion of old data during training." + doc_reusing
doc_training_reuse_numb_steps = "Number of training batch." + doc_reusing
doc_training_reuse_start_lr = "The learning rate the start of the training." + doc_reusing
doc_training_reuse_start_pref_e = "The prefactor of energy loss at the start of the training." + doc_reusing
doc_training_reuse_start_pref_f = "The prefactor of force loss at the start of the training." + doc_reusing
doc_model_devi_activation_func = "The activation function in the model. The shape of list should be (N_models, 2), where 2 represents the embedding and fitting network. This option will override default parameters."

return [
Argument("numb_models", int, optional=False, doc=doc_numb_models),
Expand All @@ -75,6 +90,13 @@ def training_args() -> List[Argument]:
doc=doc_default_training_param),
Argument("dp_compress", bool, optional=True,
default=False, doc=doc_dp_compress),
Argument("training_reuse_iter", [None, int], optional=True, doc=doc_training_reuse_iter),
Argument("training_reuse_old_ratio", [None, float], optional=True, doc=doc_training_reuse_old_ratio),
Argument("training_reuse_numb_steps", [None, int], alias=["training_reuse_stop_batch"], optional=True, default=400000, doc=doc_training_reuse_numb_steps),
Argument("training_reuse_start_lr", [None, float], optional=True, default=1e-4, doc=doc_training_reuse_start_lr),
Argument("training_reuse_start_pref_e", [None, float, int], optional=True, default=0.1, doc=doc_training_reuse_start_pref_e),
Argument("training_reuse_start_pref_f", [None, float, int], optional=True, default=100, doc=doc_training_reuse_start_pref_f),
Argument("model_devi_activation_func", [None, list], optional=True, doc=doc_model_devi_activation_func),
]


Expand Down

0 comments on commit 5a57de2

Please sign in to comment.