Skip to content

Commit

Permalink
default quantization params set inqat from config (#1192)
Browse files Browse the repository at this point in the history
  • Loading branch information
shaydeci authored Jun 19, 2023
1 parent 42e3ecf commit 2ca8647
Showing 1 changed file with 3 additions and 1 deletion.
4 changes: 3 additions & 1 deletion src/super_gradients/training/sg_trainer/sg_trainer.py
Original file line number Diff line number Diff line change
Expand Up @@ -2077,7 +2077,9 @@ def quantize_from_config(cls, cfg: Union[DictConfig, dict]) -> Tuple[nn.Module,
quantization_params = get_param(cfg, "quantization_params")

if quantization_params is None:
raise logger.warning("Your recipe does not include quantization_params. Using default quantization params.")
logger.warning("Your recipe does not include quantization_params. Using default quantization params.")
quantization_params = load_recipe("quantization_params/default_quantization_params").quantization_params
cfg.quantization_params = quantization_params

if get_param(cfg.checkpoint_params, "checkpoint_path") is None and get_param(cfg.checkpoint_params, "pretrained_weights") is None:
raise ValueError("Starting checkpoint / pretrained weights are a must for QAT finetuning.")
Expand Down

0 comments on commit 2ca8647

Please sign in to comment.