|
13 | 13 | __all__ = ["MobileNetV2", "MobileNet_V2_Weights", "mobilenet_v2"] |
14 | 14 |
|
15 | 15 |
|
| 16 | +_COMMON_META = { |
| 17 | + "task": "image_classification", |
| 18 | + "architecture": "MobileNetV2", |
| 19 | + "publication_year": 2018, |
| 20 | + "num_params": 3504872, |
| 21 | + "size": (224, 224), |
| 22 | + "min_size": (1, 1), |
| 23 | + "categories": _IMAGENET_CATEGORIES, |
| 24 | + "interpolation": InterpolationMode.BILINEAR, |
| 25 | +} |
| 26 | + |
| 27 | + |
16 | 28 | class MobileNet_V2_Weights(WeightsEnum): |
17 | 29 | IMAGENET1K_V1 = Weights( |
18 | 30 | url="https://download.pytorch.org/models/mobilenet_v2-b0353104.pth", |
19 | 31 | transforms=partial(ImageNetEval, crop_size=224), |
20 | 32 | meta={ |
21 | | - "task": "image_classification", |
22 | | - "architecture": "MobileNetV2", |
23 | | - "publication_year": 2018, |
24 | | - "num_params": 3504872, |
25 | | - "size": (224, 224), |
26 | | - "min_size": (1, 1), |
27 | | - "categories": _IMAGENET_CATEGORIES, |
28 | | - "interpolation": InterpolationMode.BILINEAR, |
| 33 | + **_COMMON_META, |
29 | 34 | "recipe": "https://github.com/pytorch/vision/tree/main/references/classification#mobilenetv2", |
30 | 35 | "acc@1": 71.878, |
31 | 36 | "acc@5": 90.286, |
32 | 37 | }, |
33 | 38 | ) |
34 | | - DEFAULT = IMAGENET1K_V1 |
| 39 | + IMAGENET1K_V2 = Weights( |
| 40 | + url="https://download.pytorch.org/models/mobilenet_v2-7ebf99e0.pth", |
| 41 | + transforms=partial(ImageNetEval, crop_size=224, resize_size=232), |
| 42 | + meta={ |
| 43 | + **_COMMON_META, |
| 44 | + "recipe": "https://github.com/pytorch/vision/issues/3995#new-recipe-with-reg-tuning", |
| 45 | + "acc@1": 72.154, |
| 46 | + "acc@5": 90.822, |
| 47 | + }, |
| 48 | + ) |
| 49 | + DEFAULT = IMAGENET1K_V2 |
35 | 50 |
|
36 | 51 |
|
37 | 52 | @handle_legacy_interface(weights=("pretrained", MobileNet_V2_Weights.IMAGENET1K_V1)) |
|
0 commit comments