diff --git a/src/neptune_fastai/impl/__init__.py b/src/neptune_fastai/impl/__init__.py index abe784c..7815bb5 100644 --- a/src/neptune_fastai/impl/__init__.py +++ b/src/neptune_fastai/impl/__init__.py @@ -154,7 +154,9 @@ def _batch_size(self) -> int: @property def _optimizer_name(self) -> Optional[str]: - return self.opt_func.__name__ + optim_name = getattr(self.opt_func, "__name__", "NA") + warnings.warn("NeptuneCallback: Couldn't retrieve the optimizer name, so it will not be logged.") + return optim_name @property def _device(self) -> str: diff --git a/tests/neptune_fastai/test_e2e.py b/tests/neptune_fastai/test_e2e.py index 8f06735..f8baf89 100644 --- a/tests/neptune_fastai/test_e2e.py +++ b/tests/neptune_fastai/test_e2e.py @@ -13,6 +13,7 @@ # See the License for the specific language governing permissions and # limitations under the License. # +from functools import partial from itertools import islice from pathlib import Path @@ -24,6 +25,7 @@ untar_data, ) from fastai.callback.all import SaveModelCallback +from fastai.optimizer import Adam from fastai.tabular.all import ( Categorify, FillMissing, @@ -71,12 +73,15 @@ def test_vision_classification_with_handler(self): device=torch.device("cpu"), ) + opt_func = partial(Adam, lr=3e-3, wd=0.01) + learn = cnn_learner( dls, squeezenet1_0, metrics=error_rate, cbs=[NeptuneCallback(run, "experiment")], pretrained=False, + opt_func=opt_func, ) learn.fit(1) @@ -91,6 +96,7 @@ def test_vision_classification_with_handler(self): exp_config = run["experiment/config"].fetch() assert exp_config["batch_size"] == 64 assert exp_config["criterion"] == "CrossEntropyLoss()" + assert exp_config["optimizer"]["name"] == "NA" assert exp_config["input_shape"] == {"x": "[3, 224, 224]", "y": 1} # and