From cbe4fd42b2212ce0cd3a2bbc6df4ce767538f495 Mon Sep 17 00:00:00 2001 From: LynnL4 Date: Thu, 7 Nov 2024 06:50:22 +0000 Subject: [PATCH] ci: auto generated docs --- README.md | 18 +- README_zh_CN.md | 18 +- detection/electricity_meter/swift_yolo.json | 4 +- detection/water_meter/swift_yolo.json | 4 +- ...igital_Meter_Electricity_Swift-YOLO_192.md | 8 +- docs/en/Digital_Meter_Water_Swift-YOLO_192.md | 8 +- ...igital_Meter_Electricity_Swift-YOLO_192.md | 8 +- .../Digital_Meter_Water_Swift-YOLO_192.md | 8 +- models.json | 1098 ++++++++--------- 9 files changed, 587 insertions(+), 587 deletions(-) diff --git a/README.md b/README.md index da375a5..9b0402a 100644 --- a/README.md +++ b/README.md @@ -20,28 +20,28 @@ Currently, SSCMA Model Zoo provides pre-trained models for the following applica | Model | Colab | |:------------------------------------------------------------------------------------------------|:--------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------| -| [person_Detection_Swift-YOLO_192](docs/en/person_Detection_Swift-YOLO_192.md) | [![Open In Colab](https://colab.research.google.com/assets/colab-badge.svg)](https://colab.research.google.com/github/seeed-studio/sscma-model-zoo/blob/main/notebooks/en/person_Detection_Swift-YOLO_192.ipynb) | | [person_Detection_Swift-YOLO_Nano_192](docs/en/person_Detection_Swift-YOLO_Nano_192.md) | [![Open In Colab](https://colab.research.google.com/assets/colab-badge.svg)](https://colab.research.google.com/github/seeed-studio/sscma-model-zoo/blob/main/notebooks/en/person_Detection_Swift-YOLO_Nano_192.ipynb) | -| [Digital_Meter_Electricity_Swift-YOLO_192](docs/en/Digital_Meter_Electricity_Swift-YOLO_192.md) | [![Open In Colab](https://colab.research.google.com/assets/colab-badge.svg)](https://colab.research.google.com/github/seeed-studio/sscma-model-zoo/blob/main/notebooks/en/Digital_Meter_Electricity_Swift-YOLO_192.ipynb) | -| [Strawberry_Detection_Swift-YOLO_192](docs/en/Strawberry_Detection_Swift-YOLO_192.md) | [![Open In Colab](https://colab.research.google.com/assets/colab-badge.svg)](https://colab.research.google.com/github/seeed-studio/sscma-model-zoo/blob/main/notebooks/en/Strawberry_Detection_Swift-YOLO_192.ipynb) | -| [Apple_Detection_Swift-YOLO_192](docs/en/Apple_Detection_Swift-YOLO_192.md) | [![Open In Colab](https://colab.research.google.com/assets/colab-badge.svg)](https://colab.research.google.com/github/seeed-studio/sscma-model-zoo/blob/main/notebooks/en/Apple_Detection_Swift-YOLO_192.ipynb) | +| [person_Detection_Swift-YOLO_192](docs/en/person_Detection_Swift-YOLO_192.md) | [![Open In Colab](https://colab.research.google.com/assets/colab-badge.svg)](https://colab.research.google.com/github/seeed-studio/sscma-model-zoo/blob/main/notebooks/en/person_Detection_Swift-YOLO_192.ipynb) | | [Gesture_Detection_Swift-YOLO_192](docs/en/Gesture_Detection_Swift-YOLO_192.md) | [![Open In Colab](https://colab.research.google.com/assets/colab-badge.svg)](https://colab.research.google.com/github/seeed-studio/sscma-model-zoo/blob/main/notebooks/en/Gesture_Detection_Swift-YOLO_192.ipynb) | -| [Pet_Detection_Swift-YOLO_192](docs/en/Pet_Detection_Swift-YOLO_192.md) | [![Open In Colab](https://colab.research.google.com/assets/colab-badge.svg)](https://colab.research.google.com/github/seeed-studio/sscma-model-zoo/blob/main/notebooks/en/Pet_Detection_Swift-YOLO_192.ipynb) | -| [Gender_Detection_Swift-YOLO_192](docs/en/Gender_Detection_Swift-YOLO_192.md) | [![Open In Colab](https://colab.research.google.com/assets/colab-badge.svg)](https://colab.research.google.com/github/seeed-studio/sscma-model-zoo/blob/main/notebooks/en/Gender_Detection_Swift-YOLO_192.ipynb) | -| [Digital_Meter_Water_Swift-YOLO_192](docs/en/Digital_Meter_Water_Swift-YOLO_192.md) | [![Open In Colab](https://colab.research.google.com/assets/colab-badge.svg)](https://colab.research.google.com/github/seeed-studio/sscma-model-zoo/blob/main/notebooks/en/Digital_Meter_Water_Swift-YOLO_192.ipynb) | | [Face_Detection_Swift-YOLO_96](docs/en/Face_Detection_Swift-YOLO_96.md) | [![Open In Colab](https://colab.research.google.com/assets/colab-badge.svg)](https://colab.research.google.com/github/seeed-studio/sscma-model-zoo/blob/main/notebooks/en/Face_Detection_Swift-YOLO_96.ipynb) | | [COCO_Detection_Swift-YOLO_320](docs/en/COCO_Detection_Swift-YOLO_320.md) | [![Open In Colab](https://colab.research.google.com/assets/colab-badge.svg)](https://colab.research.google.com/github/seeed-studio/sscma-model-zoo/blob/main/notebooks/en/COCO_Detection_Swift-YOLO_320.ipynb) | +| [Gender_Detection_Swift-YOLO_192](docs/en/Gender_Detection_Swift-YOLO_192.md) | [![Open In Colab](https://colab.research.google.com/assets/colab-badge.svg)](https://colab.research.google.com/github/seeed-studio/sscma-model-zoo/blob/main/notebooks/en/Gender_Detection_Swift-YOLO_192.ipynb) | +| [Apple_Detection_Swift-YOLO_192](docs/en/Apple_Detection_Swift-YOLO_192.md) | [![Open In Colab](https://colab.research.google.com/assets/colab-badge.svg)](https://colab.research.google.com/github/seeed-studio/sscma-model-zoo/blob/main/notebooks/en/Apple_Detection_Swift-YOLO_192.ipynb) | +| [Strawberry_Detection_Swift-YOLO_192](docs/en/Strawberry_Detection_Swift-YOLO_192.md) | [![Open In Colab](https://colab.research.google.com/assets/colab-badge.svg)](https://colab.research.google.com/github/seeed-studio/sscma-model-zoo/blob/main/notebooks/en/Strawberry_Detection_Swift-YOLO_192.ipynb) | +| [Pet_Detection_Swift-YOLO_192](docs/en/Pet_Detection_Swift-YOLO_192.md) | [![Open In Colab](https://colab.research.google.com/assets/colab-badge.svg)](https://colab.research.google.com/github/seeed-studio/sscma-model-zoo/blob/main/notebooks/en/Pet_Detection_Swift-YOLO_192.ipynb) | +| [Digital_Meter_Water_Swift-YOLO_192](docs/en/Digital_Meter_Water_Swift-YOLO_192.md) | [![Open In Colab](https://colab.research.google.com/assets/colab-badge.svg)](https://colab.research.google.com/github/seeed-studio/sscma-model-zoo/blob/main/notebooks/en/Digital_Meter_Water_Swift-YOLO_192.ipynb) | +| [Digital_Meter_Electricity_Swift-YOLO_192](docs/en/Digital_Meter_Electricity_Swift-YOLO_192.md) | [![Open In Colab](https://colab.research.google.com/assets/colab-badge.svg)](https://colab.research.google.com/github/seeed-studio/sscma-model-zoo/blob/main/notebooks/en/Digital_Meter_Electricity_Swift-YOLO_192.ipynb) | ### Image Classification | Model | Colab | |:--------------------------------------------------------------------------------------------------------------|:---------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------| -| [Person_Classification_MobileNetV2_0.35_Rep_96](docs/en/Person_Classification_MobileNetV2_0.35_Rep_96.md) | [![Open In Colab](https://colab.research.google.com/assets/colab-badge.svg)](https://colab.research.google.com/github/seeed-studio/sscma-model-zoo/blob/main/notebooks/en/Person_Classification_MobileNetV2_0.35_Rep_96.ipynb) | | [Person_Classification_MobileNetV2_0.35_Rep_32](docs/en/Person_Classification_MobileNetV2_0.35_Rep_32.md) | [![Open In Colab](https://colab.research.google.com/assets/colab-badge.svg)](https://colab.research.google.com/github/seeed-studio/sscma-model-zoo/blob/main/notebooks/en/Person_Classification_MobileNetV2_0.35_Rep_32.ipynb) | +| [Person_Classification_MobileNetV2_0.35_Rep_96](docs/en/Person_Classification_MobileNetV2_0.35_Rep_96.md) | [![Open In Colab](https://colab.research.google.com/assets/colab-badge.svg)](https://colab.research.google.com/github/seeed-studio/sscma-model-zoo/blob/main/notebooks/en/Person_Classification_MobileNetV2_0.35_Rep_96.ipynb) | | [Person_Classification_MobileNetV2_0.35_Rep_64](docs/en/Person_Classification_MobileNetV2_0.35_Rep_64.md) | [![Open In Colab](https://colab.research.google.com/assets/colab-badge.svg)](https://colab.research.google.com/github/seeed-studio/sscma-model-zoo/blob/main/notebooks/en/Person_Classification_MobileNetV2_0.35_Rep_64.ipynb) | -| [Gender_Classification_MobileNetV2_0.35_Rep_64](docs/en/Gender_Classification_MobileNetV2_0.35_Rep_64.md) | [![Open In Colab](https://colab.research.google.com/assets/colab-badge.svg)](https://colab.research.google.com/github/seeed-studio/sscma-model-zoo/blob/main/notebooks/en/Gender_Classification_MobileNetV2_0.35_Rep_64.ipynb) | | [CIFAR-10_Classification_MobileNetV2_0.35_Rep_32](docs/en/CIFAR-10_Classification_MobileNetV2_0.35_Rep_32.md) | [![Open In Colab](https://colab.research.google.com/assets/colab-badge.svg)](https://colab.research.google.com/github/seeed-studio/sscma-model-zoo/blob/main/notebooks/en/CIFAR-10_Classification_MobileNetV2_0.35_Rep_32.ipynb) | | [MNIST_Classification_MobileNetV2_0.5_Rep_32](docs/en/MNIST_Classification_MobileNetV2_0.5_Rep_32.md) | [![Open In Colab](https://colab.research.google.com/assets/colab-badge.svg)](https://colab.research.google.com/github/seeed-studio/sscma-model-zoo/blob/main/notebooks/en/MNIST_Classification_MobileNetV2_0.5_Rep_32.ipynb) | +| [Gender_Classification_MobileNetV2_0.35_Rep_64](docs/en/Gender_Classification_MobileNetV2_0.35_Rep_64.md) | [![Open In Colab](https://colab.research.google.com/assets/colab-badge.svg)](https://colab.research.google.com/github/seeed-studio/sscma-model-zoo/blob/main/notebooks/en/Gender_Classification_MobileNetV2_0.35_Rep_64.ipynb) | diff --git a/README_zh_CN.md b/README_zh_CN.md index c57901e..364a287 100644 --- a/README_zh_CN.md +++ b/README_zh_CN.md @@ -20,28 +20,28 @@ SSCMA Model Zoo 专注于提供在 SSCMA 优化的神经网络上训练得到的 | 模型 | Colab | |:---------------------------------------------------------------------------------------------------|:-----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------| -| [person_Detection_Swift-YOLO_192](docs/zh_CN/person_Detection_Swift-YOLO_192.md) | [![Open In Colab](https://colab.research.google.com/assets/colab-badge.svg)](https://colab.research.google.com/github/seeed-studio/sscma-model-zoo/blob/main/notebooks/zh_CN/person_Detection_Swift-YOLO_192.ipynb) | | [person_Detection_Swift-YOLO_Nano_192](docs/zh_CN/person_Detection_Swift-YOLO_Nano_192.md) | [![Open In Colab](https://colab.research.google.com/assets/colab-badge.svg)](https://colab.research.google.com/github/seeed-studio/sscma-model-zoo/blob/main/notebooks/zh_CN/person_Detection_Swift-YOLO_Nano_192.ipynb) | -| [Digital_Meter_Electricity_Swift-YOLO_192](docs/zh_CN/Digital_Meter_Electricity_Swift-YOLO_192.md) | [![Open In Colab](https://colab.research.google.com/assets/colab-badge.svg)](https://colab.research.google.com/github/seeed-studio/sscma-model-zoo/blob/main/notebooks/zh_CN/Digital_Meter_Electricity_Swift-YOLO_192.ipynb) | -| [Strawberry_Detection_Swift-YOLO_192](docs/zh_CN/Strawberry_Detection_Swift-YOLO_192.md) | [![Open In Colab](https://colab.research.google.com/assets/colab-badge.svg)](https://colab.research.google.com/github/seeed-studio/sscma-model-zoo/blob/main/notebooks/zh_CN/Strawberry_Detection_Swift-YOLO_192.ipynb) | -| [Apple_Detection_Swift-YOLO_192](docs/zh_CN/Apple_Detection_Swift-YOLO_192.md) | [![Open In Colab](https://colab.research.google.com/assets/colab-badge.svg)](https://colab.research.google.com/github/seeed-studio/sscma-model-zoo/blob/main/notebooks/zh_CN/Apple_Detection_Swift-YOLO_192.ipynb) | +| [person_Detection_Swift-YOLO_192](docs/zh_CN/person_Detection_Swift-YOLO_192.md) | [![Open In Colab](https://colab.research.google.com/assets/colab-badge.svg)](https://colab.research.google.com/github/seeed-studio/sscma-model-zoo/blob/main/notebooks/zh_CN/person_Detection_Swift-YOLO_192.ipynb) | | [Gesture_Detection_Swift-YOLO_192](docs/zh_CN/Gesture_Detection_Swift-YOLO_192.md) | [![Open In Colab](https://colab.research.google.com/assets/colab-badge.svg)](https://colab.research.google.com/github/seeed-studio/sscma-model-zoo/blob/main/notebooks/zh_CN/Gesture_Detection_Swift-YOLO_192.ipynb) | -| [Pet_Detection_Swift-YOLO_192](docs/zh_CN/Pet_Detection_Swift-YOLO_192.md) | [![Open In Colab](https://colab.research.google.com/assets/colab-badge.svg)](https://colab.research.google.com/github/seeed-studio/sscma-model-zoo/blob/main/notebooks/zh_CN/Pet_Detection_Swift-YOLO_192.ipynb) | -| [Gender_Detection_Swift-YOLO_192](docs/zh_CN/Gender_Detection_Swift-YOLO_192.md) | [![Open In Colab](https://colab.research.google.com/assets/colab-badge.svg)](https://colab.research.google.com/github/seeed-studio/sscma-model-zoo/blob/main/notebooks/zh_CN/Gender_Detection_Swift-YOLO_192.ipynb) | -| [Digital_Meter_Water_Swift-YOLO_192](docs/zh_CN/Digital_Meter_Water_Swift-YOLO_192.md) | [![Open In Colab](https://colab.research.google.com/assets/colab-badge.svg)](https://colab.research.google.com/github/seeed-studio/sscma-model-zoo/blob/main/notebooks/zh_CN/Digital_Meter_Water_Swift-YOLO_192.ipynb) | | [Face_Detection_Swift-YOLO_96](docs/zh_CN/Face_Detection_Swift-YOLO_96.md) | [![Open In Colab](https://colab.research.google.com/assets/colab-badge.svg)](https://colab.research.google.com/github/seeed-studio/sscma-model-zoo/blob/main/notebooks/zh_CN/Face_Detection_Swift-YOLO_96.ipynb) | | [COCO_Detection_Swift-YOLO_320](docs/zh_CN/COCO_Detection_Swift-YOLO_320.md) | [![Open In Colab](https://colab.research.google.com/assets/colab-badge.svg)](https://colab.research.google.com/github/seeed-studio/sscma-model-zoo/blob/main/notebooks/zh_CN/COCO_Detection_Swift-YOLO_320.ipynb) | +| [Gender_Detection_Swift-YOLO_192](docs/zh_CN/Gender_Detection_Swift-YOLO_192.md) | [![Open In Colab](https://colab.research.google.com/assets/colab-badge.svg)](https://colab.research.google.com/github/seeed-studio/sscma-model-zoo/blob/main/notebooks/zh_CN/Gender_Detection_Swift-YOLO_192.ipynb) | +| [Apple_Detection_Swift-YOLO_192](docs/zh_CN/Apple_Detection_Swift-YOLO_192.md) | [![Open In Colab](https://colab.research.google.com/assets/colab-badge.svg)](https://colab.research.google.com/github/seeed-studio/sscma-model-zoo/blob/main/notebooks/zh_CN/Apple_Detection_Swift-YOLO_192.ipynb) | +| [Strawberry_Detection_Swift-YOLO_192](docs/zh_CN/Strawberry_Detection_Swift-YOLO_192.md) | [![Open In Colab](https://colab.research.google.com/assets/colab-badge.svg)](https://colab.research.google.com/github/seeed-studio/sscma-model-zoo/blob/main/notebooks/zh_CN/Strawberry_Detection_Swift-YOLO_192.ipynb) | +| [Pet_Detection_Swift-YOLO_192](docs/zh_CN/Pet_Detection_Swift-YOLO_192.md) | [![Open In Colab](https://colab.research.google.com/assets/colab-badge.svg)](https://colab.research.google.com/github/seeed-studio/sscma-model-zoo/blob/main/notebooks/zh_CN/Pet_Detection_Swift-YOLO_192.ipynb) | +| [Digital_Meter_Water_Swift-YOLO_192](docs/zh_CN/Digital_Meter_Water_Swift-YOLO_192.md) | [![Open In Colab](https://colab.research.google.com/assets/colab-badge.svg)](https://colab.research.google.com/github/seeed-studio/sscma-model-zoo/blob/main/notebooks/zh_CN/Digital_Meter_Water_Swift-YOLO_192.ipynb) | +| [Digital_Meter_Electricity_Swift-YOLO_192](docs/zh_CN/Digital_Meter_Electricity_Swift-YOLO_192.md) | [![Open In Colab](https://colab.research.google.com/assets/colab-badge.svg)](https://colab.research.google.com/github/seeed-studio/sscma-model-zoo/blob/main/notebooks/zh_CN/Digital_Meter_Electricity_Swift-YOLO_192.ipynb) | ### Image Classification | 模型 | Colab | |:-----------------------------------------------------------------------------------------------------------------|:------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------| -| [Person_Classification_MobileNetV2_0.35_Rep_96](docs/zh_CN/Person_Classification_MobileNetV2_0.35_Rep_96.md) | [![Open In Colab](https://colab.research.google.com/assets/colab-badge.svg)](https://colab.research.google.com/github/seeed-studio/sscma-model-zoo/blob/main/notebooks/zh_CN/Person_Classification_MobileNetV2_0.35_Rep_96.ipynb) | | [Person_Classification_MobileNetV2_0.35_Rep_32](docs/zh_CN/Person_Classification_MobileNetV2_0.35_Rep_32.md) | [![Open In Colab](https://colab.research.google.com/assets/colab-badge.svg)](https://colab.research.google.com/github/seeed-studio/sscma-model-zoo/blob/main/notebooks/zh_CN/Person_Classification_MobileNetV2_0.35_Rep_32.ipynb) | +| [Person_Classification_MobileNetV2_0.35_Rep_96](docs/zh_CN/Person_Classification_MobileNetV2_0.35_Rep_96.md) | [![Open In Colab](https://colab.research.google.com/assets/colab-badge.svg)](https://colab.research.google.com/github/seeed-studio/sscma-model-zoo/blob/main/notebooks/zh_CN/Person_Classification_MobileNetV2_0.35_Rep_96.ipynb) | | [Person_Classification_MobileNetV2_0.35_Rep_64](docs/zh_CN/Person_Classification_MobileNetV2_0.35_Rep_64.md) | [![Open In Colab](https://colab.research.google.com/assets/colab-badge.svg)](https://colab.research.google.com/github/seeed-studio/sscma-model-zoo/blob/main/notebooks/zh_CN/Person_Classification_MobileNetV2_0.35_Rep_64.ipynb) | -| [Gender_Classification_MobileNetV2_0.35_Rep_64](docs/zh_CN/Gender_Classification_MobileNetV2_0.35_Rep_64.md) | [![Open In Colab](https://colab.research.google.com/assets/colab-badge.svg)](https://colab.research.google.com/github/seeed-studio/sscma-model-zoo/blob/main/notebooks/zh_CN/Gender_Classification_MobileNetV2_0.35_Rep_64.ipynb) | | [CIFAR-10_Classification_MobileNetV2_0.35_Rep_32](docs/zh_CN/CIFAR-10_Classification_MobileNetV2_0.35_Rep_32.md) | [![Open In Colab](https://colab.research.google.com/assets/colab-badge.svg)](https://colab.research.google.com/github/seeed-studio/sscma-model-zoo/blob/main/notebooks/zh_CN/CIFAR-10_Classification_MobileNetV2_0.35_Rep_32.ipynb) | | [MNIST_Classification_MobileNetV2_0.5_Rep_32](docs/zh_CN/MNIST_Classification_MobileNetV2_0.5_Rep_32.md) | [![Open In Colab](https://colab.research.google.com/assets/colab-badge.svg)](https://colab.research.google.com/github/seeed-studio/sscma-model-zoo/blob/main/notebooks/zh_CN/MNIST_Classification_MobileNetV2_0.5_Rep_32.ipynb) | +| [Gender_Classification_MobileNetV2_0.35_Rep_64](docs/zh_CN/Gender_Classification_MobileNetV2_0.35_Rep_64.md) | [![Open In Colab](https://colab.research.google.com/assets/colab-badge.svg)](https://colab.research.google.com/github/seeed-studio/sscma-model-zoo/blob/main/notebooks/zh_CN/Gender_Classification_MobileNetV2_0.35_Rep_64.ipynb) | diff --git a/detection/electricity_meter/swift_yolo.json b/detection/electricity_meter/swift_yolo.json index eb0549f..95390cb 100644 --- a/detection/electricity_meter/swift_yolo.json +++ b/detection/electricity_meter/swift_yolo.json @@ -1,5 +1,5 @@ { - "uuid": "91907a020c8b6a16d9b9337e617ce7dc", + "uuid": "01e826988bf6c99a730c1b1c255b9a0b", "name": "Digital Meter Electricity", "version": "1.0.0", "category": "Object Detection", @@ -125,4 +125,4 @@ "benchmark_note": { "Evaluation Parameters": " Confidence Threshold: 0.001, IoU Threshold: 0.55, mAP Eval IoU: 0.50." } -} +} \ No newline at end of file diff --git a/detection/water_meter/swift_yolo.json b/detection/water_meter/swift_yolo.json index febcc31..f31f7c6 100644 --- a/detection/water_meter/swift_yolo.json +++ b/detection/water_meter/swift_yolo.json @@ -1,5 +1,5 @@ { - "uuid": "fbbc0648e605ce6397e4c847842751ee", + "uuid": "4b7cb67e3c45f10978bf5e3686d639be", "name": "Digital Meter Water", "version": "1.0.0", "category": "Object Detection", @@ -126,4 +126,4 @@ "benchmark_note": { "Evaluation Parameters": " Confidence Threshold: 0.001, IoU Threshold: 0.55, mAP Eval IoU: 0.50." } -} +} \ No newline at end of file diff --git a/docs/en/Digital_Meter_Electricity_Swift-YOLO_192.md b/docs/en/Digital_Meter_Electricity_Swift-YOLO_192.md index 5582edf..6011da1 100644 --- a/docs/en/Digital_Meter_Electricity_Swift-YOLO_192.md +++ b/docs/en/Digital_Meter_Electricity_Swift-YOLO_192.md @@ -18,10 +18,10 @@ The model is a Swift-YOLO model trained on the Digital Meter Electricity dataset ### Network -| | Type | Batch | Shape | Remark | -|:-------|:-------|:-------:|:--------------|:-----------------------------------------------------------------------------------------------------------------| -| Input | image | 1 | [192, 192, 3] | The input image should be resized to 192x192 pixels. | -| Output | bbox | 1 | [2268, 5] | The output is a 2268x5 tensor, where 2268 is the number of candidate boxes and 5 is [x, y, w, h, score, [class]] | +| | Type | Batch | Shape | Remark | +|:-------|:-------|:-------:|:--------------|:-------------------------------------------------------------------------------------------------------------------| +| Input | image | 1 | [192, 192, 3] | The input image should be resized to 192x192 pixels. | +| Output | bbox | 1 | [2268, 15] | The output is a 2268x15 tensor, where 2268 is the number of candidate boxes and 15 is [x, y, w, h, score, [class]] | ### Benchmark | Backend | Precision | mAP(%) | MACs(MB) | Params(M) | Peek RAM(MB) | Inference(ms) | Download | Author | diff --git a/docs/en/Digital_Meter_Water_Swift-YOLO_192.md b/docs/en/Digital_Meter_Water_Swift-YOLO_192.md index f652048..a821e12 100644 --- a/docs/en/Digital_Meter_Water_Swift-YOLO_192.md +++ b/docs/en/Digital_Meter_Water_Swift-YOLO_192.md @@ -18,10 +18,10 @@ The model is a Swift-YOLO model trained on the Digital Meter Water dataset, whic ### Network -| | Type | Batch | Shape | Remark | -|:-------|:-------|:-------:|:--------------|:-----------------------------------------------------------------------------------------------------------------| -| Input | image | 1 | [192, 192, 3] | The input image should be resized to 192x192 pixels. | -| Output | bbox | 1 | [2268, 5] | The output is a 2268x5 tensor, where 2268 is the number of candidate boxes and 5 is [x, y, w, h, score, [class]] | +| | Type | Batch | Shape | Remark | +|:-------|:-------|:-------:|:--------------|:-------------------------------------------------------------------------------------------------------------------| +| Input | image | 1 | [192, 192, 3] | The input image should be resized to 192x192 pixels. | +| Output | bbox | 1 | [2268, 15] | The output is a 2268x15 tensor, where 2268 is the number of candidate boxes and 15 is [x, y, w, h, score, [class]] | ### Benchmark | Backend | Precision | mAP(%) | MACs(MB) | Params(M) | Peek RAM(MB) | Inference(ms) | Download | Author | diff --git a/docs/zh_CN/Digital_Meter_Electricity_Swift-YOLO_192.md b/docs/zh_CN/Digital_Meter_Electricity_Swift-YOLO_192.md index 654f782..c26a47d 100644 --- a/docs/zh_CN/Digital_Meter_Electricity_Swift-YOLO_192.md +++ b/docs/zh_CN/Digital_Meter_Electricity_Swift-YOLO_192.md @@ -18,10 +18,10 @@ The model is a Swift-YOLO model trained on the Digital Meter Electricity dataset ### 网络架构 -| | 类型 | 批次 | 形状 | 备注 | -|:---|:------|:----:|:--------------|:-----------------------------------------------------------------------------------------------------------------| -| 输入 | image | 1 | [192, 192, 3] | The input image should be resized to 192x192 pixels. | -| 输出 | bbox | 1 | [2268, 5] | The output is a 2268x5 tensor, where 2268 is the number of candidate boxes and 5 is [x, y, w, h, score, [class]] | +| | 类型 | 批次 | 形状 | 备注 | +|:---|:------|:----:|:--------------|:-------------------------------------------------------------------------------------------------------------------| +| 输入 | image | 1 | [192, 192, 3] | The input image should be resized to 192x192 pixels. | +| 输出 | bbox | 1 | [2268, 15] | The output is a 2268x15 tensor, where 2268 is the number of candidate boxes and 15 is [x, y, w, h, score, [class]] | ### 基准测试 | 框架 | 精度 | mAP(%) | MACs(MB) | Params(M) | Peek RAM(MB) | Inference(ms) | 下载 | 作者 | diff --git a/docs/zh_CN/Digital_Meter_Water_Swift-YOLO_192.md b/docs/zh_CN/Digital_Meter_Water_Swift-YOLO_192.md index a257924..19c754a 100644 --- a/docs/zh_CN/Digital_Meter_Water_Swift-YOLO_192.md +++ b/docs/zh_CN/Digital_Meter_Water_Swift-YOLO_192.md @@ -18,10 +18,10 @@ The model is a Swift-YOLO model trained on the Digital Meter Water dataset, whic ### 网络架构 -| | 类型 | 批次 | 形状 | 备注 | -|:---|:------|:----:|:--------------|:-----------------------------------------------------------------------------------------------------------------| -| 输入 | image | 1 | [192, 192, 3] | The input image should be resized to 192x192 pixels. | -| 输出 | bbox | 1 | [2268, 5] | The output is a 2268x5 tensor, where 2268 is the number of candidate boxes and 5 is [x, y, w, h, score, [class]] | +| | 类型 | 批次 | 形状 | 备注 | +|:---|:------|:----:|:--------------|:-------------------------------------------------------------------------------------------------------------------| +| 输入 | image | 1 | [192, 192, 3] | The input image should be resized to 192x192 pixels. | +| 输出 | bbox | 1 | [2268, 15] | The output is a 2268x15 tensor, where 2268 is the number of candidate boxes and 15 is [x, y, w, h, score, [class]] | ### 基准测试 | 框架 | 精度 | mAP(%) | MACs(MB) | Params(M) | Peek RAM(MB) | Inference(ms) | 下载 | 作者 | diff --git a/models.json b/models.json index 137263b..a2347f7 100644 --- a/models.json +++ b/models.json @@ -1,11 +1,11 @@ { - "version": "a57c0213bb4cfb7225dd4c1d3f48cd53167bfeb3", + "version": "0fd83b445576edeb98a788473525bcf37aaf6721", "models": [ { "name": "person Detection", "version": "1.0.0", "category": "Object Detection", - "algorithm": "Swift-YOLO", + "algorithm": "Swift-YOLO Nano", "description": "The model is a Swift-YOLO model trained on the person detection dataset.", "dataset": { "name": "Person", @@ -33,7 +33,7 @@ } }, "config": { - "url": "configs/swift_yolo/swift_yolo_tiny_1xb16_300e_coco.py" + "url": "configs/swift_yolo/swift_yolo_nano_1xb16_300e_coco.py" }, "guidelines": "", "license": "MIT", @@ -46,34 +46,34 @@ "backend": "PyTorch", "precision": "FLOAT32", "metrics": { - "mAP(%)": 95.3, - "Flops(M)": 90.564, - "Params(M)": 0.699 + "mAP(%)": 95.5, + "Flops(M)": 190.164, + "Params(M)": 1.699 }, - "url": "https://files.seeedstudio.com/sscma/model_zoo/detection/person/person_detection.pth", + "url": "https://files.seeedstudio.com/sscma/model_zoo/detection/person/swift_yolo_nano_person_192.pth", "author": "Seeed Studio", - "checksum": "md5:947d7945fa252b9910b415768ed6f7b4" + "checksum": "md5:038039dfac923a3841d3bb290e249bd2" }, { "backend": "ONNX", "precision": "FLOAT32", "metrics": { - "mAP(%)": 91.7, - "Params(M)": 0.699 + "mAP(%)": 92.7, + "Params(M)": 1.699 }, - "url": "https://files.seeedstudio.com/sscma/model_zoo/detection/person/person_detection_float32.onnx", + "url": "https://files.seeedstudio.com/sscma/model_zoo/detection/person/swift_yolo_nano_person_192_float32.onnx", "author": "Seeed Studio", - "checksum": "md5:f759a4da265358b2baa097509ac1d89f" + "checksum": "md5:3872088fccc618bb47a34031a5df4ab0" }, { "backend": "TFLite", "precision": "FLOAT32", "metrics": { - "mAP(%)": 91.7 + "mAP(%)": 92.7 }, - "url": "https://files.seeedstudio.com/sscma/model_zoo/detection/person/person_detection_float32.tflite", + "url": "https://files.seeedstudio.com/sscma/model_zoo/detection/person/swift_yolo_nano_person_192_float32.tflite", "author": "Seeed Studio", - "checksum": "md5:8102564fee876677b14336779dfe18db" + "checksum": "md5:6c68054a3c55fd1e3137bf95d7e7bd43" }, { "backend": "TFLite", @@ -82,26 +82,42 @@ "xiao_esp32s3" ], "metrics": { - "mAP(%)": 91.6, + "mAP(%)": 92.6, "Inference(ms)": { - "xiao_esp32s3": 608.0 + "xiao_esp32s3": 2608.0 } }, - "url": "https://files.seeedstudio.com/sscma/model_zoo/detection/person/person_detection_int8.tflite", + "url": "https://files.seeedstudio.com/sscma/model_zoo/detection/person/swift_yolo_nano_person_192_int8.tflite", "author": "Seeed Studio", - "checksum": "md5:7c8836e80ae2810e9a3980770dd4f2c7" + "checksum": "md5:bee22bbad91c87a6787ee606d4f799a2" + }, + { + "backend": "TFLite(vela)", + "precision": "INT8", + "device": [ + "we2" + ], + "metrics": { + "mAP(%)": 92.6, + "Inference(ms)": { + "we2": 76.0 + } + }, + "url": "https://files.seeedstudio.com/sscma/model_zoo/detection/person/swift_yolo_nano_person_192_int8_vela.tflite", + "author": "Seeed Studio", + "checksum": "md5:f2b99229ba108c82de9379c4b6ad6354" } ], "benchmark_note": { "Evaluation Parameters": " Confidence Threshold: 0.001, IoU Threshold: 0.55, mAP Eval IoU: 0.50." }, - "uuid": "d518f043500b8e27cf294904e43b6538" + "uuid": "ea57d226365069fb01e7c1d02081f3a5" }, { "name": "person Detection", "version": "1.0.0", "category": "Object Detection", - "algorithm": "Swift-YOLO Nano", + "algorithm": "Swift-YOLO", "description": "The model is a Swift-YOLO model trained on the person detection dataset.", "dataset": { "name": "Person", @@ -129,7 +145,7 @@ } }, "config": { - "url": "configs/swift_yolo/swift_yolo_nano_1xb16_300e_coco.py" + "url": "configs/swift_yolo/swift_yolo_tiny_1xb16_300e_coco.py" }, "guidelines": "", "license": "MIT", @@ -142,34 +158,34 @@ "backend": "PyTorch", "precision": "FLOAT32", "metrics": { - "mAP(%)": 95.5, - "Flops(M)": 190.164, - "Params(M)": 1.699 + "mAP(%)": 95.3, + "Flops(M)": 90.564, + "Params(M)": 0.699 }, - "url": "https://files.seeedstudio.com/sscma/model_zoo/detection/person/swift_yolo_nano_person_192.pth", + "url": "https://files.seeedstudio.com/sscma/model_zoo/detection/person/person_detection.pth", "author": "Seeed Studio", - "checksum": "md5:038039dfac923a3841d3bb290e249bd2" + "checksum": "md5:947d7945fa252b9910b415768ed6f7b4" }, { "backend": "ONNX", "precision": "FLOAT32", "metrics": { - "mAP(%)": 92.7, - "Params(M)": 1.699 + "mAP(%)": 91.7, + "Params(M)": 0.699 }, - "url": "https://files.seeedstudio.com/sscma/model_zoo/detection/person/swift_yolo_nano_person_192_float32.onnx", + "url": "https://files.seeedstudio.com/sscma/model_zoo/detection/person/person_detection_float32.onnx", "author": "Seeed Studio", - "checksum": "md5:3872088fccc618bb47a34031a5df4ab0" + "checksum": "md5:f759a4da265358b2baa097509ac1d89f" }, { "backend": "TFLite", "precision": "FLOAT32", "metrics": { - "mAP(%)": 92.7 + "mAP(%)": 91.7 }, - "url": "https://files.seeedstudio.com/sscma/model_zoo/detection/person/swift_yolo_nano_person_192_float32.tflite", + "url": "https://files.seeedstudio.com/sscma/model_zoo/detection/person/person_detection_float32.tflite", "author": "Seeed Studio", - "checksum": "md5:6c68054a3c55fd1e3137bf95d7e7bd43" + "checksum": "md5:8102564fee876677b14336779dfe18db" }, { "backend": "TFLite", @@ -178,48 +194,31 @@ "xiao_esp32s3" ], "metrics": { - "mAP(%)": 92.6, - "Inference(ms)": { - "xiao_esp32s3": 2608.0 - } - }, - "url": "https://files.seeedstudio.com/sscma/model_zoo/detection/person/swift_yolo_nano_person_192_int8.tflite", - "author": "Seeed Studio", - "checksum": "md5:bee22bbad91c87a6787ee606d4f799a2" - }, - { - "backend": "TFLite(vela)", - "precision": "INT8", - "device": [ - "we2" - ], - "metrics": { - "mAP(%)": 92.6, + "mAP(%)": 91.6, "Inference(ms)": { - "we2": 76.0 + "xiao_esp32s3": 608.0 } }, - "url": "https://files.seeedstudio.com/sscma/model_zoo/detection/person/swift_yolo_nano_person_192_int8_vela.tflite", + "url": "https://files.seeedstudio.com/sscma/model_zoo/detection/person/person_detection_int8.tflite", "author": "Seeed Studio", - "checksum": "md5:f2b99229ba108c82de9379c4b6ad6354" + "checksum": "md5:7c8836e80ae2810e9a3980770dd4f2c7" } ], "benchmark_note": { "Evaluation Parameters": " Confidence Threshold: 0.001, IoU Threshold: 0.55, mAP Eval IoU: 0.50." }, - "uuid": "ea57d226365069fb01e7c1d02081f3a5" + "uuid": "d518f043500b8e27cf294904e43b6538" }, { - "uuid": "91907a020c8b6a16d9b9337e617ce7dc", - "name": "Digital Meter Electricity", + "name": "Gesture Detection", "version": "1.0.0", "category": "Object Detection", "algorithm": "Swift-YOLO", - "description": "The model is a Swift-YOLO model trained on the Digital Meter Electricity dataset, which can detect the 7-segment digital meter.", + "description": "The model is a Swift-YOLO model trained on the gesture detection dataset.", "dataset": { - "name": "Digital Meter Electricity", - "url": "https://universe.roboflow.com/seeed-studio-dbk14/digital-meter-electricity", - "download": "https://universe.roboflow.com/ds/hK8PvFlIZ5?key=LxpaoUhp5i" + "name": "Gesture", + "url": "https://universe.roboflow.com/rsp/paper-aaj0p/dataset/33", + "download": "https://universe.roboflow.com/ds/xaMM3ZTeWy?key=5bznPZyI0t" }, "network": { "batch": 1, @@ -236,9 +235,9 @@ "type": "bbox", "shape": [ 2268, - 5 + 8 ], - "remark": "The output is a 2268x5 tensor, where 2268 is the number of candidate boxes and 5 is [x, y, w, h, score, [class]]" + "remark": "The output is a 2268x8 tensor, where 2268 is the number of candidate boxes and 8 is [x, y, w, h, score, [class]]" } }, "config": { @@ -246,55 +245,45 @@ }, "guidelines": "", "license": "MIT", - "image": "https://files.seeedstudio.com/sscma/static/detect_meter.png", + "image": "https://files.seeedstudio.com/sscma/static/detection_gesture.png", "classes": [ - "0", - "1", - "2", - "3", - "4", - "5", - "6", - "7", - "8", - "9" + "paper", + "rock", + "scissors" ], "benchmark": [ { "backend": "PyTorch", "precision": "FLOAT32", "metrics": { - "mAP(%)": 99.2, - "MACs(MB)": 90.56, - "Params(M)": 0.67 + "mAP(%)": 90.6, + "Flops(M)": 90.8, + "Params(M)": 0.7 }, - "url": "https://files.seeedstudio.com/sscma/model_zoo/detection/electricity_meter/yolov5_tiny_1xb16_300e_coco_sha1_b26cffe14038a7155315c40b49f851679a547dec.pth", + "url": "https://files.seeedstudio.com/sscma/model_zoo/detection/gesture/swift_yolo_1xb16_300e_coco_sha1_adda465db843aae8384c90c82e223c2cd931cad2.pth", "author": "Seeed Studio", - "checksum": "md5:dc4f0332c6a339f2eaa8b3d4baad5041" + "checksum": "md5:58779e02275790ce44ac1cfcbd95d5f9" }, { "backend": "ONNX", "precision": "FLOAT32", "metrics": { - "mAP(%)": 98.8, - "Params(M)": 0.67, - "Peek RAM(MB)": 1.2 + "mAP(%)": 91.9, + "Params(M)": 0.7 }, - "url": "https://files.seeedstudio.com/sscma/model_zoo/detection/electricity_meter/yolov5_tiny_1xb16_300e_coco_float32_sha1_e46a4c7183d073a5807e327d6b6d788853f2acf7.tflite", + "url": "https://files.seeedstudio.com/sscma/model_zoo/detection/gesture/swift_yolo_1xb16_300e_coco_sha1_6f0e8c8ad5a6eb5c9afb5f18f43063dcc065c4b8.onnx", "author": "Seeed Studio", - "checksum": "md5:b764db53c927d4e0f0f04c14845f47d7" + "checksum": "md5:bfed043a7cbed06bc84d1ccba277fb03" }, { "backend": "TFLite", "precision": "FLOAT32", "metrics": { - "mAP(%)": 98.8, - "MACs(MB)": 89.0, - "Peek RAM(MB)": 1.2 + "mAP(%)": 91.9 }, - "url": "https://files.seeedstudio.com/sscma/model_zoo/detection/electricity_meter/yolov5_tiny_1xb16_300e_coco_int8_sha1_d670a8f8ceb3691beaa89da352c678634a29df73.tflite", + "url": "https://files.seeedstudio.com/sscma/model_zoo/detection/gesture/swift_yolo_1xb16_300e_coco_sha1_54f794c25b545a1d33502e3f93a620c4cecfb1f9.tflite", "author": "Seeed Studio", - "checksum": "md5:05bc8bc7fdacef7e2d2f2694ca838864" + "checksum": "md5:106c8a6e88cdaa0ddfd68d3a61cc29fe" }, { "backend": "TFLite", @@ -303,16 +292,14 @@ "xiao_esp32s3" ], "metrics": { - "mAP(%)": 93.1, - "MACs(MB)": 89.0, - "Peek RAM(MB)": 0.35, + "mAP(%)": 93.0, "Inference(ms)": { - "xiao_esp32s3": 691.0 + "xiao_esp32s3": 642.0 } }, - "url": "https://files.seeedstudio.com/sscma/model_zoo/detection/electricity_meter/yolov5_tiny_1xb16_300e_coco_int8_sha1_d670a8f8ceb3691beaa89da352c678634a29df73.tflite", + "url": "https://files.seeedstudio.com/sscma/model_zoo/detection/gesture/swift_yolo_1xb16_300e_coco_sha1_8d25b2b0be2a0ea38d3fe0aca5ce3891f7aa67c5.tflite", "author": "Seeed Studio", - "checksum": "md5:05bc8bc7fdacef7e2d2f2694ca838864" + "checksum": "md5:e972b9cbea2677c89cadd6669a2650f4" }, { "backend": "TFLite(vela)", @@ -321,51 +308,50 @@ "grove_vision_ai_we2" ], "metrics": { - "mAP(%)": 93.1, - "MACs(MB)": 89.0, - "Peek RAM(MB)": 0.35, + "mAP(%)": 93.0, "Inference(ms)": { - "grove_vision_ai_we2": 50 + "grove_vision_ai_we2": 47 } }, - "url": "https://files.seeedstudio.com/sscma/model_zoo/detection/electricity_meter/yolov5_tiny_1xb16_300e_coco_int8_sha1_d670a8f8ceb3691beaa89da352c678634a29df73_vela.tflite", + "url": "https://files.seeedstudio.com/sscma/model_zoo/detection/gesture/swift_yolo_1xb16_300e_coco_sha1_8d25b2b0be2a0ea38d3fe0aca5ce3891f7aa67c5_vela.tflite", "author": "Seeed Studio", - "checksum": "md5:f3dd771780632a1434b43053973a2ffb" + "checksum": "md5:91331a9db811ed5cfb5cdba2e419e507" } ], "benchmark_note": { "Evaluation Parameters": " Confidence Threshold: 0.001, IoU Threshold: 0.55, mAP Eval IoU: 0.50." - } + }, + "uuid": "60eb2fe141549daef6989609601e9b28" }, { - "name": "Strawberry Detection", + "name": "Face Detection", "version": "1.0.0", "category": "Object Detection", "algorithm": "Swift-YOLO", - "description": "The model is a Swift-YOLO model trained on the strawberry detection dataset.", + "description": "The model is a Swift-YOLO model trained on the face detection dataset.", "dataset": { - "name": "Strawberry", - "url": "https://universe.roboflow.com/bbb-ynve2/caomei-i40aq/dataset/8", - "download": "https://universe.roboflow.com/ds/hm4qHytEO4?key=TeF7sjHrH5" + "name": "face detection", + "url": "https://universe.roboflow.com/detection-02p2y/face-b3jhr/dataset/2", + "download": "https://universe.roboflow.com/ds/tCPeEouXqt?key=uohxULat1Q" }, "network": { "batch": 1, "input": { "type": "image", "shape": [ - 192, - 192, + 96, + 96, 3 ], - "remark": "The input image should be resized to 192x192 pixels." + "remark": "The input image should be resized to 96x96 pixels." }, "output": { "type": "bbox", "shape": [ - 2268, + 567, 6 ], - "remark": "The output is a 2268x6 tensor, where 2268 is the number of candidate boxes and 6 is [x, y, w, h, score, [class]]" + "remark": "The output is a 567x6 tensor, where 567 is the number of candidate boxes and 6 is [x, y, w, h, score, [class]]" } }, "config": { @@ -373,43 +359,43 @@ }, "guidelines": "", "license": "MIT", - "image": "https://files.seeedstudio.com/sscma/static/detection_strawberry.png", + "image": "https://files.seeedstudio.com/sscma/static/detection_face.png", "classes": [ - "strawberry" + "face" ], "benchmark": [ { "backend": "PyTorch", "precision": "FLOAT32", "metrics": { - "mAP(%)": 94.7, - "Flops(M)": 90.564, + "mAP(%)": 98.7, + "Flops(M)": 22.641, "Params(M)": 0.699 }, - "url": "https://files.seeedstudio.com/sscma/model_zoo/detection/strawberry/strawberry_detection.pth", + "url": "https://files.seeedstudio.com/sscma/model_zoo/detection/face_detection/swift_yolo_1xb16_300e_coco_300_sha1_fe1d7dec30d62e583a7ccf717fd6585c792570bf.pth", "author": "Seeed Studio", - "checksum": "md5:a5d2603e002a05936fd0364a732c34a4" + "checksum": "md5:f56eaee97a7e4b12a69d28bb4044072d" }, { "backend": "ONNX", "precision": "FLOAT32", "metrics": { - "mAP(%)": 92.6, + "mAP(%)": 97.9, "Params(M)": 0.699 }, - "url": "https://files.seeedstudio.com/sscma/model_zoo/detection/strawberry/strawberry_detection_float32.onnx", + "url": "https://files.seeedstudio.com/sscma/model_zoo/detection/face_detection/swift_yolo_1xb16_300e_coco_300_float32_sha1_441e4868e17a9bac5740280b3db791a6d75ac8a7.onnx", "author": "Seeed Studio", - "checksum": "md5:45f0480ad8b0a092d1fc5361ac83e257" + "checksum": "md5:5507b43f7a947a69a329d1842d8a483d" }, { "backend": "TFLite", "precision": "FLOAT32", "metrics": { - "mAP(%)": 92.6 + "mAP(%)": 97.9 }, - "url": "https://files.seeedstudio.com/sscma/model_zoo/detection/strawberry/strawberry_detection_float32.tflite", + "url": "https://files.seeedstudio.com/sscma/model_zoo/detection/face_detection/swift_yolo_1xb16_300e_coco_300_float32_sha1_7c75dc6e777e3d3098d7f0bdb3e5c529c4d2865a.tflite", "author": "Seeed Studio", - "checksum": "md5:b679f194c23ea358b7c042099f18e3e6" + "checksum": "md5:a330f4da70f258ab8a7d55bdbbf112a0" }, { "backend": "TFLite", @@ -418,14 +404,14 @@ "xiao_esp32s3" ], "metrics": { - "mAP(%)": 91.8, + "mAP(%)": 97.9, "Inference(ms)": { - "xiao_esp32s3": 616.0 + "xiao_esp32s3": 180.0 } }, - "url": "https://files.seeedstudio.com/sscma/model_zoo/detection/strawberry/strawberry_detection_int8.tflite", + "url": "https://files.seeedstudio.com/sscma/model_zoo/detection/face_detection/swift_yolo_1xb16_300e_coco_300_int8_sha1_2287b951101007d4cd1d09c3da68e53e6f23a071.tflite", "author": "Seeed Studio", - "checksum": "md5:6f8529c3d8a73be60fd7ad6a690210c4" + "checksum": "md5:18c320dbf7c6ecc3c4ad216da2070d26" }, { "backend": "TFLite(vela)", @@ -434,94 +420,96 @@ "grove_vision_ai_we2" ], "metrics": { - "mAP(%)": 91.8, + "mAP(%)": 97.9, "Inference(ms)": { - "grove_vision_ai_we2": 45 + "grove_vision_ai_we2": 33 } }, - "url": "https://files.seeedstudio.com/sscma/model_zoo/detection/strawberry/strawberry_detection_int8_vela.tflite", + "url": "https://files.seeedstudio.com/sscma/model_zoo/detection/face_detection/swift_yolo_1xb16_300e_coco_300_int8_sha1_2287b951101007d4cd1d09c3da68e53e6f23a071_vela.tflite", "author": "Seeed Studio", - "checksum": "md5:bd436b117ebf531b22b0157f5c1d3162" + "checksum": "md5:377aee70190387cc4cf2435f13aab3af" } ], "benchmark_note": { "Evaluation Parameters": " Confidence Threshold: 0.001, IoU Threshold: 0.55, mAP Eval IoU: 0.50." }, - "uuid": "248553e02d3be1fdc1b4908e374dc35f" + "uuid": "89b6ea7192f70451db72ad5121857f53" }, { - "name": "Apple Detection", + "uuid": "d07b6be0796b2684c987e5222831459c", + "name": "COCO Detection", "version": "1.0.0", "category": "Object Detection", "algorithm": "Swift-YOLO", - "description": "The model is a Swift-YOLO model trained on the apple detection dataset.", + "description": "The model is a Swift-YOLO model trained on the COCO2017 dataset.", "dataset": { - "name": "Apple", - "url": "https://universe.roboflow.com/bbb-ynve2/fruits-3gejo/dataset/4", - "download": "https://universe.roboflow.com/ds/8cLQfLLQwy?key=sKHHXh8uoU" - }, + "name": "COCO2017", + "url": "https://public.roboflow.com/object-detection/microsoft-coco-subset", + "download": "https://public.roboflow.com/ds/saZ2K9LYJf?key=FKkHrP6bjs" + }, "network": { "batch": 1, "input": { "type": "image", "shape": [ - 192, - 192, + 320, + 320, 3 ], - "remark": "The input image should be resized to 192x192 pixels." + "remark": "The input image should be resized to 320x320 pixels." }, "output": { "type": "bbox", "shape": [ - 2268, - 6 + 6300, + 85 ], - "remark": "The output is a 2268x6 tensor, where 2268 is the number of candidate boxes and 6 is [x, y, w, h, score, [class]]" + "remark": "The output is a 6300x85 tensor, where 6300 is the number of candidate boxes and 5 is [x, y, w, h, score, [class]]" } }, "config": { - "url": "configs/swift_yolo/swift_yolo_tiny_1xb16_300e_coco.py" + "url": "configs/yolov5/swift_yolo_shuff_1xb16_300e_coco.py" }, "guidelines": "", "license": "MIT", - "image": "https://files.seeedstudio.com/sscma/static/detection_apple.png", + "image": "https://files.seeedstudio.com/sscma/static/detection_coco.png", "classes": [ - "apple" + "person" ], "benchmark": [ { "backend": "PyTorch", "precision": "FLOAT32", "metrics": { - "mAP(%)": 91.5, - "Flops(M)": 90.564, - "Params(M)": 0.699 + "mAP(%)": 25.1, + "Flops(M)": 194, + "Params(M)": 0.63 }, - "url": "https://files.seeedstudio.com/sscma/model_zoo/detection/fruit/apple_detection.pth", + "url": "https://files.seeedstudio.com/sscma/model_zoo/detection/coco/swift_yolo_shuffle_coco_320_float32_sha1_a5927bd6a6c6569d27edb98da946a8e75a8d816f.pth", "author": "Seeed Studio", - "checksum": "md5:6b4c1beb9a8372ccb51dd285437e25bc" + "checksum": "md5:79a08ceed5d9ff8e033c8a6ffd5c6093" }, { "backend": "ONNX", "precision": "FLOAT32", "metrics": { - "mAP(%)": 89.5, - "Params(M)": 0.699 + "mAP(%)": 25.1, + "Params(M)": 0.63 }, - "url": "https://files.seeedstudio.com/sscma/model_zoo/detection/fruit/apple_detection_float32.onnx", + "url": "https://files.seeedstudio.com/sscma/model_zoo/detection/coco/swift_yolo_shuffle_coco_320_float32_sha1_20bc2c8517a8e42699bf46f1409f7541e52345ac.onnx", "author": "Seeed Studio", - "checksum": "md5:b1ce1586a52e0390dd8111fc80d026df" + "checksum": "md5:2ca5762b62cc8ff143d44c3233a3b2c5" }, { "backend": "TFLite", "precision": "FLOAT32", "metrics": { - "mAP(%)": 89.5 + "mAP(%)": 25.1, + "Peek RAM(MB)": 1.2 }, - "url": "https://files.seeedstudio.com/sscma/model_zoo/detection/fruit/apple_detection_float32.tflite", + "url": "https://files.seeedstudio.com/sscma/model_zoo/detection/coco/swift_yolo_shuffle_coco_320_float32_sha1_5dfa1a16d27ef347c0173c5297395963760fcc57.tflite", "author": "Seeed Studio", - "checksum": "md5:872caf833788b1b7e5798e346da52446" + "checksum": "md5:42c97149bcf0a241951a0756f1cc2a31" }, { "backend": "TFLite", @@ -530,14 +518,15 @@ "xiao_esp32s3" ], "metrics": { - "mAP(%)": 89.3, + "mAP(%)": 25.1, + "Peek RAM(MB)": 0.35, "Inference(ms)": { - "xiao_esp32s3": 688.0 + "xiao_esp32s3": 200.0 } }, - "url": "https://files.seeedstudio.com/sscma/model_zoo/detection/fruit/apple_detection_int8.tflite", + "url": "https://files.seeedstudio.com/sscma/model_zoo/detection/coco/swift_yolo_shuffle_coco_320_int8_sha1_3b0a6d7fd95e9dd21902beae6fa2d1cd0807bd7b.tflite", "author": "Seeed Studio", - "checksum": "md5:d6204144c69170dc43fd3dc01c05aa39" + "checksum": "md5:e2f7586cd6389f3e54acb01912681dd6" }, { "backend": "TFLite(vela)", @@ -546,31 +535,31 @@ "grove_vision_ai_we2" ], "metrics": { - "mAP(%)": 89.3, + "mAP(%)": 25.1, + "Peek RAM(MB)": 0.35, "Inference(ms)": { - "grove_vision_ai_we2": 45 + "grove_vision_ai_we2": 20.0 } }, - "url": "https://files.seeedstudio.com/sscma/model_zoo/detection/fruit/apple_detection_int8_vela.tflite", + "url": "https://files.seeedstudio.com/sscma/model_zoo/detection/coco/swift_yolo_shuffle_coco_320_int8_sha1_3b0a6d7fd95e9dd21902beae6fa2d1cd0807bd7b_vela.tflite", "author": "Seeed Studio", - "checksum": "md5:bd1b4b6e2446b6dadf3a6a2c6066686c" + "checksum": "md5:053f314c9f8c31147aab8c96412c0773" } ], "benchmark_note": { "Evaluation Parameters": " Confidence Threshold: 0.001, IoU Threshold: 0.55, mAP Eval IoU: 0.50." - }, - "uuid": "39c1b25338e5b01ec1784aa11e3828a0" + } }, { - "name": "Gesture Detection", + "name": "Gender Detection", "version": "1.0.0", "category": "Object Detection", "algorithm": "Swift-YOLO", - "description": "The model is a Swift-YOLO model trained on the gesture detection dataset.", + "description": "The model is a Swift-YOLO model trained on the gender detection dataset.", "dataset": { - "name": "Gesture", - "url": "https://universe.roboflow.com/rsp/paper-aaj0p/dataset/33", - "download": "https://universe.roboflow.com/ds/xaMM3ZTeWy?key=5bznPZyI0t" + "name": "Gender", + "url": "https://universe.roboflow.com/aaa-61999/gender-ymxim/dataset/6", + "download": "https://universe.roboflow.com/ds/E7H3j002kN?key=XCi9zboD6w" }, "network": { "batch": 1, @@ -587,9 +576,9 @@ "type": "bbox", "shape": [ 2268, - 8 + 7 ], - "remark": "The output is a 2268x8 tensor, where 2268 is the number of candidate boxes and 8 is [x, y, w, h, score, [class]]" + "remark": "The output is a 2268x7 tensor, where 2268 is the number of candidate boxes and 7 is [x, y, w, h, score, [class]]" } }, "config": { @@ -597,45 +586,44 @@ }, "guidelines": "", "license": "MIT", - "image": "https://files.seeedstudio.com/sscma/static/detection_gesture.png", + "image": "https://files.seeedstudio.com/sscma/static/gender_cls.png", "classes": [ - "paper", - "rock", - "scissors" + "male", + "female" ], "benchmark": [ { "backend": "PyTorch", "precision": "FLOAT32", "metrics": { - "mAP(%)": 90.6, - "Flops(M)": 90.8, + "mAP(%)": 98.2, + "Flops(M)": 90.685, "Params(M)": 0.7 }, - "url": "https://files.seeedstudio.com/sscma/model_zoo/detection/gesture/swift_yolo_1xb16_300e_coco_sha1_adda465db843aae8384c90c82e223c2cd931cad2.pth", + "url": "https://files.seeedstudio.com/sscma/model_zoo/detection/gender/gender_detection.pth", "author": "Seeed Studio", - "checksum": "md5:58779e02275790ce44ac1cfcbd95d5f9" + "checksum": "md5:7cf06ac986af08038b22bf1ff4e80b38" }, { "backend": "ONNX", "precision": "FLOAT32", "metrics": { - "mAP(%)": 91.9, + "mAP(%)": 95.6, "Params(M)": 0.7 }, - "url": "https://files.seeedstudio.com/sscma/model_zoo/detection/gesture/swift_yolo_1xb16_300e_coco_sha1_6f0e8c8ad5a6eb5c9afb5f18f43063dcc065c4b8.onnx", + "url": "https://files.seeedstudio.com/sscma/model_zoo/detection/gender/gender_detection_float32.onnx", "author": "Seeed Studio", - "checksum": "md5:bfed043a7cbed06bc84d1ccba277fb03" + "checksum": "md5:6feba1a4f1dae6e537ee528450fdab86" }, { "backend": "TFLite", "precision": "FLOAT32", "metrics": { - "mAP(%)": 91.9 + "mAP(%)": 95.6 }, - "url": "https://files.seeedstudio.com/sscma/model_zoo/detection/gesture/swift_yolo_1xb16_300e_coco_sha1_54f794c25b545a1d33502e3f93a620c4cecfb1f9.tflite", + "url": "https://files.seeedstudio.com/sscma/model_zoo/detection/gender/gender_detection_float32.tflite", "author": "Seeed Studio", - "checksum": "md5:106c8a6e88cdaa0ddfd68d3a61cc29fe" + "checksum": "md5:eef4d08c06f0f26194d1df3a19ce4743" }, { "backend": "TFLite", @@ -644,14 +632,14 @@ "xiao_esp32s3" ], "metrics": { - "mAP(%)": 93.0, + "mAP(%)": 95.5, "Inference(ms)": { - "xiao_esp32s3": 642.0 + "xiao_esp32s3": 631.0 } }, - "url": "https://files.seeedstudio.com/sscma/model_zoo/detection/gesture/swift_yolo_1xb16_300e_coco_sha1_8d25b2b0be2a0ea38d3fe0aca5ce3891f7aa67c5.tflite", + "url": "https://files.seeedstudio.com/sscma/model_zoo/detection/gender/gender_detection_int8.tflite", "author": "Seeed Studio", - "checksum": "md5:e972b9cbea2677c89cadd6669a2650f4" + "checksum": "md5:d4e554fa19f4fb84dd27dc056767207c" }, { "backend": "TFLite(vela)", @@ -660,31 +648,31 @@ "grove_vision_ai_we2" ], "metrics": { - "mAP(%)": 93.0, + "mAP(%)": 95.5, "Inference(ms)": { - "grove_vision_ai_we2": 47 + "grove_vision_ai_we2": 45 } }, - "url": "https://files.seeedstudio.com/sscma/model_zoo/detection/gesture/swift_yolo_1xb16_300e_coco_sha1_8d25b2b0be2a0ea38d3fe0aca5ce3891f7aa67c5_vela.tflite", + "url": "https://files.seeedstudio.com/sscma/model_zoo/detection/gender/gender_detection_int8_vela.tflite", "author": "Seeed Studio", - "checksum": "md5:91331a9db811ed5cfb5cdba2e419e507" + "checksum": "md5:22da293b391c08f90cbd7ed8b545ddce" } ], "benchmark_note": { "Evaluation Parameters": " Confidence Threshold: 0.001, IoU Threshold: 0.55, mAP Eval IoU: 0.50." }, - "uuid": "60eb2fe141549daef6989609601e9b28" + "uuid": "3cfb3c55609550994c76f1490f7b00c3" }, { - "name": "Pet Detection", + "name": "Apple Detection", "version": "1.0.0", "category": "Object Detection", "algorithm": "Swift-YOLO", - "description": "The model is a Swift-YOLO model trained on the animal detection dataset for cat and dog.", + "description": "The model is a Swift-YOLO model trained on the apple detection dataset.", "dataset": { - "name": "Animal", - "url": "https://universe.roboflow.com/animal-cegrr/animal-ph37i/dataset/11", - "download": "https://universe.roboflow.com/ds/5RkM7lqc1k?key=xPrRirL4BH" + "name": "Apple", + "url": "https://universe.roboflow.com/bbb-ynve2/fruits-3gejo/dataset/4", + "download": "https://universe.roboflow.com/ds/8cLQfLLQwy?key=sKHHXh8uoU" }, "network": { "batch": 1, @@ -701,9 +689,9 @@ "type": "bbox", "shape": [ 2268, - 7 + 6 ], - "remark": "The output is a 2268x7 tensor, where 2268 is the number of candidate boxes and 7 is [x, y, w, h, score, [class]]" + "remark": "The output is a 2268x6 tensor, where 2268 is the number of candidate boxes and 6 is [x, y, w, h, score, [class]]" } }, "config": { @@ -711,44 +699,43 @@ }, "guidelines": "", "license": "MIT", - "image": "https://files.seeedstudio.com/sscma/static/detection_animal.png", + "image": "https://files.seeedstudio.com/sscma/static/detection_apple.png", "classes": [ - "cat", - "dog" + "apple" ], "benchmark": [ { "backend": "PyTorch", "precision": "FLOAT32", "metrics": { - "mAP(%)": 93.3, - "Flops(M)": 90.685, - "Params(M)": 0.7 + "mAP(%)": 91.5, + "Flops(M)": 90.564, + "Params(M)": 0.699 }, - "url": "https://files.seeedstudio.com/sscma/model_zoo/detection/animal/animal_detection.pth", + "url": "https://files.seeedstudio.com/sscma/model_zoo/detection/fruit/apple_detection.pth", "author": "Seeed Studio", - "checksum": "md5:6e808cc7f5da04844a29bb78a63d4c15" + "checksum": "md5:6b4c1beb9a8372ccb51dd285437e25bc" }, { "backend": "ONNX", "precision": "FLOAT32", "metrics": { - "mAP(%)": 88.3, - "Params(M)": 0.7 + "mAP(%)": 89.5, + "Params(M)": 0.699 }, - "url": "https://files.seeedstudio.com/sscma/model_zoo/detection/animal/animal_detection_float32.onnx", + "url": "https://files.seeedstudio.com/sscma/model_zoo/detection/fruit/apple_detection_float32.onnx", "author": "Seeed Studio", - "checksum": "md5:975fbe6e1b6613b3f21a89db3e1dafea" + "checksum": "md5:b1ce1586a52e0390dd8111fc80d026df" }, { "backend": "TFLite", "precision": "FLOAT32", "metrics": { - "mAP(%)": 88.3 + "mAP(%)": 89.5 }, - "url": "https://files.seeedstudio.com/sscma/model_zoo/detection/animal/animal_detection_float32.tflite", + "url": "https://files.seeedstudio.com/sscma/model_zoo/detection/fruit/apple_detection_float32.tflite", "author": "Seeed Studio", - "checksum": "md5:7185076304fc99d36172a84d11287ff6" + "checksum": "md5:872caf833788b1b7e5798e346da52446" }, { "backend": "TFLite", @@ -757,14 +744,14 @@ "xiao_esp32s3" ], "metrics": { - "mAP(%)": 87.1, + "mAP(%)": 89.3, "Inference(ms)": { - "xiao_esp32s3": 706.0 + "xiao_esp32s3": 688.0 } }, - "url": "https://files.seeedstudio.com/sscma/model_zoo/detection/animal/animal_detection_int8.tflite", + "url": "https://files.seeedstudio.com/sscma/model_zoo/detection/fruit/apple_detection_int8.tflite", "author": "Seeed Studio", - "checksum": "md5:433f5ac6d86026904b2b8db58262f4e9" + "checksum": "md5:d6204144c69170dc43fd3dc01c05aa39" }, { "backend": "TFLite(vela)", @@ -773,31 +760,31 @@ "grove_vision_ai_we2" ], "metrics": { - "mAP(%)": 87.1, + "mAP(%)": 89.3, "Inference(ms)": { "grove_vision_ai_we2": 45 } }, - "url": "https://files.seeedstudio.com/sscma/model_zoo/detection/animal/animal_detection_int8_vela.tflite", + "url": "https://files.seeedstudio.com/sscma/model_zoo/detection/fruit/apple_detection_int8_vela.tflite", "author": "Seeed Studio", - "checksum": "md5:a5e3e6b2e8d2151c04ad8909cc8626fe" + "checksum": "md5:bd1b4b6e2446b6dadf3a6a2c6066686c" } ], "benchmark_note": { "Evaluation Parameters": " Confidence Threshold: 0.001, IoU Threshold: 0.55, mAP Eval IoU: 0.50." }, - "uuid": "2752623ddc29b22a939cda6283b1d3dc" + "uuid": "39c1b25338e5b01ec1784aa11e3828a0" }, { - "name": "Gender Detection", + "name": "Strawberry Detection", "version": "1.0.0", "category": "Object Detection", "algorithm": "Swift-YOLO", - "description": "The model is a Swift-YOLO model trained on the gender detection dataset.", + "description": "The model is a Swift-YOLO model trained on the strawberry detection dataset.", "dataset": { - "name": "Gender", - "url": "https://universe.roboflow.com/aaa-61999/gender-ymxim/dataset/6", - "download": "https://universe.roboflow.com/ds/E7H3j002kN?key=XCi9zboD6w" + "name": "Strawberry", + "url": "https://universe.roboflow.com/bbb-ynve2/caomei-i40aq/dataset/8", + "download": "https://universe.roboflow.com/ds/hm4qHytEO4?key=TeF7sjHrH5" }, "network": { "batch": 1, @@ -814,9 +801,9 @@ "type": "bbox", "shape": [ 2268, - 7 + 6 ], - "remark": "The output is a 2268x7 tensor, where 2268 is the number of candidate boxes and 7 is [x, y, w, h, score, [class]]" + "remark": "The output is a 2268x6 tensor, where 2268 is the number of candidate boxes and 6 is [x, y, w, h, score, [class]]" } }, "config": { @@ -824,44 +811,43 @@ }, "guidelines": "", "license": "MIT", - "image": "https://files.seeedstudio.com/sscma/static/gender_cls.png", + "image": "https://files.seeedstudio.com/sscma/static/detection_strawberry.png", "classes": [ - "male", - "female" + "strawberry" ], "benchmark": [ { "backend": "PyTorch", "precision": "FLOAT32", "metrics": { - "mAP(%)": 98.2, - "Flops(M)": 90.685, - "Params(M)": 0.7 + "mAP(%)": 94.7, + "Flops(M)": 90.564, + "Params(M)": 0.699 }, - "url": "https://files.seeedstudio.com/sscma/model_zoo/detection/gender/gender_detection.pth", + "url": "https://files.seeedstudio.com/sscma/model_zoo/detection/strawberry/strawberry_detection.pth", "author": "Seeed Studio", - "checksum": "md5:7cf06ac986af08038b22bf1ff4e80b38" + "checksum": "md5:a5d2603e002a05936fd0364a732c34a4" }, { "backend": "ONNX", "precision": "FLOAT32", "metrics": { - "mAP(%)": 95.6, - "Params(M)": 0.7 + "mAP(%)": 92.6, + "Params(M)": 0.699 }, - "url": "https://files.seeedstudio.com/sscma/model_zoo/detection/gender/gender_detection_float32.onnx", + "url": "https://files.seeedstudio.com/sscma/model_zoo/detection/strawberry/strawberry_detection_float32.onnx", "author": "Seeed Studio", - "checksum": "md5:6feba1a4f1dae6e537ee528450fdab86" + "checksum": "md5:45f0480ad8b0a092d1fc5361ac83e257" }, { "backend": "TFLite", "precision": "FLOAT32", "metrics": { - "mAP(%)": 95.6 + "mAP(%)": 92.6 }, - "url": "https://files.seeedstudio.com/sscma/model_zoo/detection/gender/gender_detection_float32.tflite", + "url": "https://files.seeedstudio.com/sscma/model_zoo/detection/strawberry/strawberry_detection_float32.tflite", "author": "Seeed Studio", - "checksum": "md5:eef4d08c06f0f26194d1df3a19ce4743" + "checksum": "md5:b679f194c23ea358b7c042099f18e3e6" }, { "backend": "TFLite", @@ -870,14 +856,14 @@ "xiao_esp32s3" ], "metrics": { - "mAP(%)": 95.5, + "mAP(%)": 91.8, "Inference(ms)": { - "xiao_esp32s3": 631.0 + "xiao_esp32s3": 616.0 } }, - "url": "https://files.seeedstudio.com/sscma/model_zoo/detection/gender/gender_detection_int8.tflite", + "url": "https://files.seeedstudio.com/sscma/model_zoo/detection/strawberry/strawberry_detection_int8.tflite", "author": "Seeed Studio", - "checksum": "md5:d4e554fa19f4fb84dd27dc056767207c" + "checksum": "md5:6f8529c3d8a73be60fd7ad6a690210c4" }, { "backend": "TFLite(vela)", @@ -886,32 +872,31 @@ "grove_vision_ai_we2" ], "metrics": { - "mAP(%)": 95.5, + "mAP(%)": 91.8, "Inference(ms)": { "grove_vision_ai_we2": 45 } }, - "url": "https://files.seeedstudio.com/sscma/model_zoo/detection/gender/gender_detection_int8_vela.tflite", + "url": "https://files.seeedstudio.com/sscma/model_zoo/detection/strawberry/strawberry_detection_int8_vela.tflite", "author": "Seeed Studio", - "checksum": "md5:22da293b391c08f90cbd7ed8b545ddce" + "checksum": "md5:bd436b117ebf531b22b0157f5c1d3162" } ], "benchmark_note": { "Evaluation Parameters": " Confidence Threshold: 0.001, IoU Threshold: 0.55, mAP Eval IoU: 0.50." }, - "uuid": "3cfb3c55609550994c76f1490f7b00c3" + "uuid": "248553e02d3be1fdc1b4908e374dc35f" }, { - "uuid": "fbbc0648e605ce6397e4c847842751ee", - "name": "Digital Meter Water", + "name": "Pet Detection", "version": "1.0.0", "category": "Object Detection", "algorithm": "Swift-YOLO", - "description": "The model is a Swift-YOLO model trained on the Digital Meter Water dataset, which can detect the water meter number.", + "description": "The model is a Swift-YOLO model trained on the animal detection dataset for cat and dog.", "dataset": { - "name": "Digital Meter Electricity", - "url": "https://universe.roboflow.com/seeed-studio-dbk14/digital-meter-water", - "download": "https://universe.roboflow.com/ds/GiPpzvOtZJ?key=xsAuy7kg92" + "name": "Animal", + "url": "https://universe.roboflow.com/animal-cegrr/animal-ph37i/dataset/11", + "download": "https://universe.roboflow.com/ds/5RkM7lqc1k?key=xPrRirL4BH" }, "network": { "batch": 1, @@ -928,9 +913,9 @@ "type": "bbox", "shape": [ 2268, - 5 + 7 ], - "remark": "The output is a 2268x5 tensor, where 2268 is the number of candidate boxes and 5 is [x, y, w, h, score, [class]]" + "remark": "The output is a 2268x7 tensor, where 2268 is the number of candidate boxes and 7 is [x, y, w, h, score, [class]]" } }, "config": { @@ -938,171 +923,44 @@ }, "guidelines": "", "license": "MIT", - "image": "https://files.seeedstudio.com/sscma/static/detect_meter.png", + "image": "https://files.seeedstudio.com/sscma/static/detection_animal.png", "classes": [ - "0", - "1", - "2", - "3", - "4", - "5", - "6", - "7", - "8", - "9", - "-" + "cat", + "dog" ], "benchmark": [ { "backend": "PyTorch", "precision": "FLOAT32", "metrics": { - "mAP(%)": 95.3, - "MACs(MB)": 91.8, - "Params(M)": 0.67 + "mAP(%)": 93.3, + "Flops(M)": 90.685, + "Params(M)": 0.7 }, - "url": "https://files.seeedstudio.com/sscma/model_zoo/detection/water_meter/yolov5_tiny_1xb16_300e_coco_sha1_e10d262518622edc50e0820b213581fc8d628e2b.pth", + "url": "https://files.seeedstudio.com/sscma/model_zoo/detection/animal/animal_detection.pth", "author": "Seeed Studio", - "checksum": "md5:5b782dd05f3a7af22cd7b3508c543925" + "checksum": "md5:6e808cc7f5da04844a29bb78a63d4c15" }, { "backend": "ONNX", "precision": "FLOAT32", - "metrics": { - "mAP(%)": 91.8, - "Params(M)": 0.67, - "Peek RAM(MB)": 1.2 - }, - "url": "https://files.seeedstudio.com/sscma/model_zoo/detection/water_meter/yolov5_tiny_1xb16_300e_coco_sha1_e4139097229c74d6d627a769e788374f7bd23e48.onnx", - "author": "Seeed Studio", - "checksum": "md5:4314cc6cdc385b664f7c17dbf8b42b1b" - }, - { - "backend": "TFLite", - "precision": "FLOAT32", - "metrics": { - "mAP(%)": 91.8, - "MACs(MB)": 89.0, - "Peek RAM(MB)": 1.2 - }, - "url": "https://files.seeedstudio.com/sscma/model_zoo/detection/water_meter/yolov5_tiny_1xb16_300e_coco_float32_sha1_d523dd19922ff4a3a53a0795222121317d01354d.tflite", - "author": "Seeed Studio", - "checksum": "md5:4042bcce210ed9d06d15dc9a9678ca40" - }, - { - "backend": "TFLite", - "precision": "INT8", - "device": [ - "xiao_esp32s3" - ], "metrics": { "mAP(%)": 88.3, - "MACs(MB)": 89.0, - "Peek RAM(MB)": 0.35, - "Inference(ms)": { - "xiao_esp32s3": 691.0 - } - }, - "url": "https://files.seeedstudio.com/sscma/model_zoo/detection/water_meter/yolov5_tiny_1xb16_300e_coco_int8_sha1_7975ab6a7d1daa26f61a2d364f82594834587bfe.tflite", - "author": "Seeed Studio", - "checksum": "md5:0ce672bc4bb710d7b5fde39529d5668c" - }, - { - "backend": "TFLite(vela)", - "precision": "INT8", - "device": [ - "grove_vision_ai_we2" - ], - "metrics": { - "mAP(%)": 88.3, - "MACs(MB)": 89.0, - "Peek RAM(MB)": 0.35, - "Inference(ms)": { - "grove_vision_ai_we2": 49 - } - }, - "url": "https://files.seeedstudio.com/sscma/model_zoo/detection/water_meter/yolov5_tiny_1xb16_300e_coco_int8_sha1_7975ab6a7d1daa26f61a2d364f82594834587bfe_vela.tflite", - "author": "Seeed Studio", - "checksum": "md5:b0b1870a62dd7b432d4658eb791022a7" - } - ], - "benchmark_note": { - "Evaluation Parameters": " Confidence Threshold: 0.001, IoU Threshold: 0.55, mAP Eval IoU: 0.50." - } - }, - { - "name": "Face Detection", - "version": "1.0.0", - "category": "Object Detection", - "algorithm": "Swift-YOLO", - "description": "The model is a Swift-YOLO model trained on the face detection dataset.", - "dataset": { - "name": "face detection", - "url": "https://universe.roboflow.com/detection-02p2y/face-b3jhr/dataset/2", - "download": "https://universe.roboflow.com/ds/tCPeEouXqt?key=uohxULat1Q" - }, - "network": { - "batch": 1, - "input": { - "type": "image", - "shape": [ - 96, - 96, - 3 - ], - "remark": "The input image should be resized to 96x96 pixels." - }, - "output": { - "type": "bbox", - "shape": [ - 567, - 6 - ], - "remark": "The output is a 567x6 tensor, where 567 is the number of candidate boxes and 6 is [x, y, w, h, score, [class]]" - } - }, - "config": { - "url": "configs/swift_yolo/swift_yolo_tiny_1xb16_300e_coco.py" - }, - "guidelines": "", - "license": "MIT", - "image": "https://files.seeedstudio.com/sscma/static/detection_face.png", - "classes": [ - "face" - ], - "benchmark": [ - { - "backend": "PyTorch", - "precision": "FLOAT32", - "metrics": { - "mAP(%)": 98.7, - "Flops(M)": 22.641, - "Params(M)": 0.699 - }, - "url": "https://files.seeedstudio.com/sscma/model_zoo/detection/face_detection/swift_yolo_1xb16_300e_coco_300_sha1_fe1d7dec30d62e583a7ccf717fd6585c792570bf.pth", - "author": "Seeed Studio", - "checksum": "md5:f56eaee97a7e4b12a69d28bb4044072d" - }, - { - "backend": "ONNX", - "precision": "FLOAT32", - "metrics": { - "mAP(%)": 97.9, - "Params(M)": 0.699 + "Params(M)": 0.7 }, - "url": "https://files.seeedstudio.com/sscma/model_zoo/detection/face_detection/swift_yolo_1xb16_300e_coco_300_float32_sha1_441e4868e17a9bac5740280b3db791a6d75ac8a7.onnx", + "url": "https://files.seeedstudio.com/sscma/model_zoo/detection/animal/animal_detection_float32.onnx", "author": "Seeed Studio", - "checksum": "md5:5507b43f7a947a69a329d1842d8a483d" + "checksum": "md5:975fbe6e1b6613b3f21a89db3e1dafea" }, { "backend": "TFLite", "precision": "FLOAT32", "metrics": { - "mAP(%)": 97.9 + "mAP(%)": 88.3 }, - "url": "https://files.seeedstudio.com/sscma/model_zoo/detection/face_detection/swift_yolo_1xb16_300e_coco_300_float32_sha1_7c75dc6e777e3d3098d7f0bdb3e5c529c4d2865a.tflite", + "url": "https://files.seeedstudio.com/sscma/model_zoo/detection/animal/animal_detection_float32.tflite", "author": "Seeed Studio", - "checksum": "md5:a330f4da70f258ab8a7d55bdbbf112a0" + "checksum": "md5:7185076304fc99d36172a84d11287ff6" }, { "backend": "TFLite", @@ -1111,14 +969,14 @@ "xiao_esp32s3" ], "metrics": { - "mAP(%)": 97.9, + "mAP(%)": 87.1, "Inference(ms)": { - "xiao_esp32s3": 180.0 + "xiao_esp32s3": 706.0 } }, - "url": "https://files.seeedstudio.com/sscma/model_zoo/detection/face_detection/swift_yolo_1xb16_300e_coco_300_int8_sha1_2287b951101007d4cd1d09c3da68e53e6f23a071.tflite", + "url": "https://files.seeedstudio.com/sscma/model_zoo/detection/animal/animal_detection_int8.tflite", "author": "Seeed Studio", - "checksum": "md5:18c320dbf7c6ecc3c4ad216da2070d26" + "checksum": "md5:433f5ac6d86026904b2b8db58262f4e9" }, { "backend": "TFLite(vela)", @@ -1127,96 +985,108 @@ "grove_vision_ai_we2" ], "metrics": { - "mAP(%)": 97.9, + "mAP(%)": 87.1, "Inference(ms)": { - "grove_vision_ai_we2": 33 + "grove_vision_ai_we2": 45 } }, - "url": "https://files.seeedstudio.com/sscma/model_zoo/detection/face_detection/swift_yolo_1xb16_300e_coco_300_int8_sha1_2287b951101007d4cd1d09c3da68e53e6f23a071_vela.tflite", + "url": "https://files.seeedstudio.com/sscma/model_zoo/detection/animal/animal_detection_int8_vela.tflite", "author": "Seeed Studio", - "checksum": "md5:377aee70190387cc4cf2435f13aab3af" + "checksum": "md5:a5e3e6b2e8d2151c04ad8909cc8626fe" } ], "benchmark_note": { "Evaluation Parameters": " Confidence Threshold: 0.001, IoU Threshold: 0.55, mAP Eval IoU: 0.50." }, - "uuid": "89b6ea7192f70451db72ad5121857f53" + "uuid": "2752623ddc29b22a939cda6283b1d3dc" }, { - "uuid": "d07b6be0796b2684c987e5222831459c", - "name": "COCO Detection", + "uuid": "4b7cb67e3c45f10978bf5e3686d639be", + "name": "Digital Meter Water", "version": "1.0.0", "category": "Object Detection", "algorithm": "Swift-YOLO", - "description": "The model is a Swift-YOLO model trained on the COCO2017 dataset.", + "description": "The model is a Swift-YOLO model trained on the Digital Meter Water dataset, which can detect the water meter number.", "dataset": { - "name": "COCO2017", - "url": "https://public.roboflow.com/object-detection/microsoft-coco-subset", - "download": "https://public.roboflow.com/ds/saZ2K9LYJf?key=FKkHrP6bjs" + "name": "Digital Meter Electricity", + "url": "https://universe.roboflow.com/seeed-studio-dbk14/digital-meter-water", + "download": "https://universe.roboflow.com/ds/GiPpzvOtZJ?key=xsAuy7kg92" }, "network": { "batch": 1, "input": { "type": "image", "shape": [ - 320, - 320, + 192, + 192, 3 ], - "remark": "The input image should be resized to 320x320 pixels." + "remark": "The input image should be resized to 192x192 pixels." }, "output": { "type": "bbox", "shape": [ - 6300, - 85 + 2268, + 15 ], - "remark": "The output is a 6300x85 tensor, where 6300 is the number of candidate boxes and 5 is [x, y, w, h, score, [class]]" + "remark": "The output is a 2268x15 tensor, where 2268 is the number of candidate boxes and 15 is [x, y, w, h, score, [class]]" } }, "config": { - "url": "configs/yolov5/swift_yolo_shuff_1xb16_300e_coco.py" + "url": "configs/swift_yolo/swift_yolo_tiny_1xb16_300e_coco.py" }, "guidelines": "", "license": "MIT", - "image": "https://files.seeedstudio.com/sscma/static/detection_coco.png", + "image": "https://files.seeedstudio.com/sscma/static/detect_meter.png", "classes": [ - "person" + "0", + "1", + "2", + "3", + "4", + "5", + "6", + "7", + "8", + "9", + "-" ], "benchmark": [ { "backend": "PyTorch", "precision": "FLOAT32", "metrics": { - "mAP(%)": 25.1, - "Flops(M)": 194, - "Params(M)": 0.63 + "mAP(%)": 95.3, + "MACs(MB)": 91.8, + "Params(M)": 0.67 }, - "url": "https://files.seeedstudio.com/sscma/model_zoo/detection/coco/swift_yolo_shuffle_coco_320_float32_sha1_a5927bd6a6c6569d27edb98da946a8e75a8d816f.pth", + "url": "https://files.seeedstudio.com/sscma/model_zoo/detection/water_meter/yolov5_tiny_1xb16_300e_coco_sha1_e10d262518622edc50e0820b213581fc8d628e2b.pth", "author": "Seeed Studio", - "checksum": "md5:79a08ceed5d9ff8e033c8a6ffd5c6093" + "checksum": "md5:5b782dd05f3a7af22cd7b3508c543925" }, { "backend": "ONNX", "precision": "FLOAT32", "metrics": { - "mAP(%)": 25.1, - "Params(M)": 0.63 + "mAP(%)": 91.8, + "Params(M)": 0.67, + "Peek RAM(MB)": 1.2 }, - "url": "https://files.seeedstudio.com/sscma/model_zoo/detection/coco/swift_yolo_shuffle_coco_320_float32_sha1_20bc2c8517a8e42699bf46f1409f7541e52345ac.onnx", + "url": "https://files.seeedstudio.com/sscma/model_zoo/detection/water_meter/yolov5_tiny_1xb16_300e_coco_sha1_e4139097229c74d6d627a769e788374f7bd23e48.onnx", "author": "Seeed Studio", - "checksum": "md5:2ca5762b62cc8ff143d44c3233a3b2c5" + "checksum": "md5:4314cc6cdc385b664f7c17dbf8b42b1b" }, { "backend": "TFLite", "precision": "FLOAT32", "metrics": { - "mAP(%)": 25.1, + "mAP(%)": 91.8, + "MACs(MB)": 89.0, "Peek RAM(MB)": 1.2 }, - "url": "https://files.seeedstudio.com/sscma/model_zoo/detection/coco/swift_yolo_shuffle_coco_320_float32_sha1_5dfa1a16d27ef347c0173c5297395963760fcc57.tflite", + "url": "https://files.seeedstudio.com/sscma/model_zoo/detection/water_meter/yolov5_tiny_1xb16_300e_coco_float32_sha1_d523dd19922ff4a3a53a0795222121317d01354d.tflite", "author": "Seeed Studio", - "checksum": "md5:42c97149bcf0a241951a0756f1cc2a31" + "checksum": "md5:4042bcce210ed9d06d15dc9a9678ca40" }, { "backend": "TFLite", @@ -1225,15 +1095,16 @@ "xiao_esp32s3" ], "metrics": { - "mAP(%)": 25.1, + "mAP(%)": 88.3, + "MACs(MB)": 89.0, "Peek RAM(MB)": 0.35, "Inference(ms)": { - "xiao_esp32s3": 200.0 + "xiao_esp32s3": 691.0 } }, - "url": "https://files.seeedstudio.com/sscma/model_zoo/detection/coco/swift_yolo_shuffle_coco_320_int8_sha1_3b0a6d7fd95e9dd21902beae6fa2d1cd0807bd7b.tflite", + "url": "https://files.seeedstudio.com/sscma/model_zoo/detection/water_meter/yolov5_tiny_1xb16_300e_coco_int8_sha1_7975ab6a7d1daa26f61a2d364f82594834587bfe.tflite", "author": "Seeed Studio", - "checksum": "md5:e2f7586cd6389f3e54acb01912681dd6" + "checksum": "md5:0ce672bc4bb710d7b5fde39529d5668c" }, { "backend": "TFLite(vela)", @@ -1242,15 +1113,16 @@ "grove_vision_ai_we2" ], "metrics": { - "mAP(%)": 25.1, + "mAP(%)": 88.3, + "MACs(MB)": 89.0, "Peek RAM(MB)": 0.35, "Inference(ms)": { - "grove_vision_ai_we2": 20.0 + "grove_vision_ai_we2": 49 } }, - "url": "https://files.seeedstudio.com/sscma/model_zoo/detection/coco/swift_yolo_shuffle_coco_320_int8_sha1_3b0a6d7fd95e9dd21902beae6fa2d1cd0807bd7b_vela.tflite", + "url": "https://files.seeedstudio.com/sscma/model_zoo/detection/water_meter/yolov5_tiny_1xb16_300e_coco_int8_sha1_7975ab6a7d1daa26f61a2d364f82594834587bfe_vela.tflite", "author": "Seeed Studio", - "checksum": "md5:053f314c9f8c31147aab8c96412c0773" + "checksum": "md5:b0b1870a62dd7b432d4658eb791022a7" } ], "benchmark_note": { @@ -1258,80 +1130,91 @@ } }, { - "uuid": "85b9a21113a3708d5a3744a80b26068a", - "name": "Person Classification", + "uuid": "01e826988bf6c99a730c1b1c255b9a0b", + "name": "Digital Meter Electricity", "version": "1.0.0", - "category": "Image Classification", - "algorithm": "MobileNetV2 0.35 Rep", - "description": "The model is a vision model designed for person classification. It utilizes the [SSCMA](https://github.com/Seeed-Studio/ModelAssistant) training and employs the MobileNetV2 (0.35) Rep algorithm.", + "category": "Object Detection", + "algorithm": "Swift-YOLO", + "description": "The model is a Swift-YOLO model trained on the Digital Meter Electricity dataset, which can detect the 7-segment digital meter.", "dataset": { - "name": "VWW", - "url": "https://github.com/Mxbonn/visualwakewords", - "download": "https://universe.roboflow.com/ds/rvZt8qZfBp?key=WDJI0KBhlY" + "name": "Digital Meter Electricity", + "url": "https://universe.roboflow.com/seeed-studio-dbk14/digital-meter-electricity", + "download": "https://universe.roboflow.com/ds/hK8PvFlIZ5?key=LxpaoUhp5i" }, "network": { "batch": 1, "input": { "type": "image", "shape": [ - 96, - 96, + 192, + 192, 3 ], - "remark": "The input image should be resized to 96x96 pixels" + "remark": "The input image should be resized to 192x192 pixels." }, "output": { - "type": "classification", + "type": "bbox", "shape": [ - 2 + 2268, + 15 ], - "remark": "The output is a 2-element vector, which represents the probability of the input image belonging to each class" + "remark": "The output is a 2268x15 tensor, where 2268 is the number of candidate boxes and 15 is [x, y, w, h, score, [class]]" } }, "config": { - "url": "configs/classification/mobnetv2_0.35_rep_1bx16_300e_custom.py" + "url": "configs/swift_yolo/swift_yolo_tiny_1xb16_300e_coco.py" }, "guidelines": "", "license": "MIT", - "image": "https://files.seeedstudio.com/sscma/static/person_cls.png", + "image": "https://files.seeedstudio.com/sscma/static/detect_meter.png", "classes": [ - "Not a person", - "Person" + "0", + "1", + "2", + "3", + "4", + "5", + "6", + "7", + "8", + "9" ], "benchmark": [ { "backend": "PyTorch", "precision": "FLOAT32", "metrics": { - "Top-1(%)": 88.37, - "Flops(MB)": 76.5, - "Params(M)": 2.71 + "mAP(%)": 99.2, + "MACs(MB)": 90.56, + "Params(M)": 0.67 }, - "url": "https://files.seeedstudio.com/sscma/model_zoo/classification/person/mobilenetv2_0.35rep_vww96_float32_sha1_0b47deccb4ffab4d8f970ea6379b838163e5bd8f.pth", + "url": "https://files.seeedstudio.com/sscma/model_zoo/detection/electricity_meter/yolov5_tiny_1xb16_300e_coco_sha1_b26cffe14038a7155315c40b49f851679a547dec.pth", "author": "Seeed Studio", - "checksum": "md5:d29fae6afd2e9069d8df46df61e6b6fa" + "checksum": "md5:dc4f0332c6a339f2eaa8b3d4baad5041" }, { "backend": "ONNX", "precision": "FLOAT32", "metrics": { - "Top-1(%)": 88.36, - "Params(M)": 2.71 + "mAP(%)": 98.8, + "Params(M)": 0.67, + "Peek RAM(MB)": 1.2 }, - "url": "https://files.seeedstudio.com/sscma/model_zoo/classification/person/mobilenetv2_0.35rep_vww96_float32_sha1_689cbad95dc725880861e72b5b9f7878f04ce17f.onnx", + "url": "https://files.seeedstudio.com/sscma/model_zoo/detection/electricity_meter/yolov5_tiny_1xb16_300e_coco_float32_sha1_e46a4c7183d073a5807e327d6b6d788853f2acf7.tflite", "author": "Seeed Studio", - "checksum": "md5:11cab18ffe33314ac59e7c3dc0ba9280" + "checksum": "md5:b764db53c927d4e0f0f04c14845f47d7" }, { "backend": "TFLite", "precision": "FLOAT32", "metrics": { - "Top-1(%)": 88.36, - "Params(M)": 2.71 + "mAP(%)": 98.8, + "MACs(MB)": 89.0, + "Peek RAM(MB)": 1.2 }, - "url": "https://files.seeedstudio.com/sscma/model_zoo/classification/person/mobilenetv2_0.35rep_vww96_float32_sha1_a92eb1b9420f2947bfb65153e1def12097fdb977.tflite", + "url": "https://files.seeedstudio.com/sscma/model_zoo/detection/electricity_meter/yolov5_tiny_1xb16_300e_coco_int8_sha1_d670a8f8ceb3691beaa89da352c678634a29df73.tflite", "author": "Seeed Studio", - "checksum": "md5:7ff86dbf4e69ecd035344da4abb1988d" + "checksum": "md5:05bc8bc7fdacef7e2d2f2694ca838864" }, { "backend": "TFLite", @@ -1340,15 +1223,16 @@ "xiao_esp32s3" ], "metrics": { - "Top-1(%)": 88.27, - "Params(M)": 2.71, + "mAP(%)": 93.1, + "MACs(MB)": 89.0, + "Peek RAM(MB)": 0.35, "Inference(ms)": { - "xiao_esp32s3": 582 + "xiao_esp32s3": 691.0 } }, - "url": "https://files.seeedstudio.com/sscma/model_zoo/classification/person/mobilenetv2_0.35rep_vww96_int8_sha1_f1a66ce5a3f05bc1293920e5a95f547e27df6550.tflite", + "url": "https://files.seeedstudio.com/sscma/model_zoo/detection/electricity_meter/yolov5_tiny_1xb16_300e_coco_int8_sha1_d670a8f8ceb3691beaa89da352c678634a29df73.tflite", "author": "Seeed Studio", - "checksum": "md5:beb714685e497664b90c98a5cfa66aba" + "checksum": "md5:05bc8bc7fdacef7e2d2f2694ca838864" }, { "backend": "TFLite(vela)", @@ -1357,17 +1241,21 @@ "grove_vision_ai_we2" ], "metrics": { - "Top-1(%)": 88.27, - "Params(M)": 2.71, + "mAP(%)": 93.1, + "MACs(MB)": 89.0, + "Peek RAM(MB)": 0.35, "Inference(ms)": { - "grove_vision_ai_we2": 15.0 + "grove_vision_ai_we2": 50 } }, - "url": "https://files.seeedstudio.com/sscma/model_zoo/classification/person/mobilenetv2_0.35rep_vww96_int8_sha1_f1a66ce5a3f05bc1293920e5a95f547e27df6550_vela.tflite", + "url": "https://files.seeedstudio.com/sscma/model_zoo/detection/electricity_meter/yolov5_tiny_1xb16_300e_coco_int8_sha1_d670a8f8ceb3691beaa89da352c678634a29df73_vela.tflite", "author": "Seeed Studio", - "checksum": "md5:eef4655855f6300aa3cec2d357af0573" + "checksum": "md5:f3dd771780632a1434b43053973a2ffb" } - ] + ], + "benchmark_note": { + "Evaluation Parameters": " Confidence Threshold: 0.001, IoU Threshold: 0.55, mAP Eval IoU: 0.50." + } }, { "uuid": "7ed115afbe3a915729f49f72de87aca4", @@ -1482,7 +1370,7 @@ ] }, { - "uuid": "acac6bd6e1f883a274b480c29f3fb962", + "uuid": "85b9a21113a3708d5a3744a80b26068a", "name": "Person Classification", "version": "1.0.0", "category": "Image Classification", @@ -1498,11 +1386,11 @@ "input": { "type": "image", "shape": [ - 64, - 64, + 96, + 96, 3 ], - "remark": "The input image should be resized to 64x64 pixels" + "remark": "The input image should be resized to 96x96 pixels" }, "output": { "type": "classification", @@ -1527,35 +1415,35 @@ "backend": "PyTorch", "precision": "FLOAT32", "metrics": { - "Top-1(%)": 85.22, - "Flops(MB)": 34, + "Top-1(%)": 88.37, + "Flops(MB)": 76.5, "Params(M)": 2.71 }, - "url": "https://files.seeedstudio.com/sscma/model_zoo/classification/person/mobilenetv2_0.35rep_vww64_float32_sha1_6dec3c029041408de043c5921621ab7abc4c4ec4.pth", + "url": "https://files.seeedstudio.com/sscma/model_zoo/classification/person/mobilenetv2_0.35rep_vww96_float32_sha1_0b47deccb4ffab4d8f970ea6379b838163e5bd8f.pth", "author": "Seeed Studio", - "checksum": "md5:d99c07e431b3e99904750ca58b3107d0" + "checksum": "md5:d29fae6afd2e9069d8df46df61e6b6fa" }, { "backend": "ONNX", "precision": "FLOAT32", "metrics": { - "Top-1(%)": 85.23, + "Top-1(%)": 88.36, "Params(M)": 2.71 }, - "url": "https://files.seeedstudio.com/sscma/model_zoo/classification/person/mobilenetv2_0.35rep_vww64_float32_sha1_aeb9c1f3bf7c19f3490daee7da1ac0d76b7e49d9.onnx", + "url": "https://files.seeedstudio.com/sscma/model_zoo/classification/person/mobilenetv2_0.35rep_vww96_float32_sha1_689cbad95dc725880861e72b5b9f7878f04ce17f.onnx", "author": "Seeed Studio", - "checksum": "md5:6b20f439c535e97fd141ecd77fe3505f" + "checksum": "md5:11cab18ffe33314ac59e7c3dc0ba9280" }, { "backend": "TFLite", "precision": "FLOAT32", "metrics": { - "Top-1(%)": 85.23, + "Top-1(%)": 88.36, "Params(M)": 2.71 }, - "url": "https://files.seeedstudio.com/sscma/model_zoo/classification/person/mobilenetv2_0.35rep_vww64_float32_sha1_d44e8c1247dfc66e645f5d07b904e4a430149882.tflite", + "url": "https://files.seeedstudio.com/sscma/model_zoo/classification/person/mobilenetv2_0.35rep_vww96_float32_sha1_a92eb1b9420f2947bfb65153e1def12097fdb977.tflite", "author": "Seeed Studio", - "checksum": "md5:23476d16c2bed169df28b18320acd863" + "checksum": "md5:7ff86dbf4e69ecd035344da4abb1988d" }, { "backend": "TFLite", @@ -1564,15 +1452,15 @@ "xiao_esp32s3" ], "metrics": { - "Top-1(%)": 85.26, + "Top-1(%)": 88.27, "Params(M)": 2.71, "Inference(ms)": { - "xiao_esp32s3": 286 + "xiao_esp32s3": 582 } }, - "url": "https://files.seeedstudio.com/sscma/model_zoo/classification/person/mobilenetv2_0.35rep_vww64_int8_sha1_a939407d507b45ceca293e74c8961d59357b37b2.tflite", + "url": "https://files.seeedstudio.com/sscma/model_zoo/classification/person/mobilenetv2_0.35rep_vww96_int8_sha1_f1a66ce5a3f05bc1293920e5a95f547e27df6550.tflite", "author": "Seeed Studio", - "checksum": "md5:0a8eb383e11c42cc2b400567f262067b" + "checksum": "md5:beb714685e497664b90c98a5cfa66aba" }, { "backend": "TFLite(vela)", @@ -1581,29 +1469,29 @@ "grove_vision_ai_we2" ], "metrics": { - "Top-1(%)": 85.26, + "Top-1(%)": 88.27, "Params(M)": 2.71, "Inference(ms)": { - "grove_vision_ai_we2": 8.0 + "grove_vision_ai_we2": 15.0 } }, - "url": "https://files.seeedstudio.com/sscma/model_zoo/classification/person/mobilenetv2_0.35rep_vww64_int8_sha1_a939407d507b45ceca293e74c8961d59357b37b2_vela.tflite", + "url": "https://files.seeedstudio.com/sscma/model_zoo/classification/person/mobilenetv2_0.35rep_vww96_int8_sha1_f1a66ce5a3f05bc1293920e5a95f547e27df6550_vela.tflite", "author": "Seeed Studio", - "checksum": "md5:5dba5f8b9081fa5caef5c0a66c7c3d0c" + "checksum": "md5:eef4655855f6300aa3cec2d357af0573" } ] }, { - "uuid": "fee63b1d204cc2ff54b5e1767c9166dc", - "name": "Gender Classification", + "uuid": "acac6bd6e1f883a274b480c29f3fb962", + "name": "Person Classification", "version": "1.0.0", "category": "Image Classification", "algorithm": "MobileNetV2 0.35 Rep", - "description": "The model is a vision model designed for Gender classification. It utilizes the [SSCMA](https://github.com/Seeed-Studio/ModelAssistant) training and employs the MobileNetV2 (0.35) Rep algorithm.", + "description": "The model is a vision model designed for person classification. It utilizes the [SSCMA](https://github.com/Seeed-Studio/ModelAssistant) training and employs the MobileNetV2 (0.35) Rep algorithm.", "dataset": { - "name": "Gender", - "url": "https://universe.roboflow.com/seeed-studio-e2fso/gender-8vbxd", - "download": "https://universe.roboflow.com/ds/CnPDloVfHN?key=BGRNmtbN5T" + "name": "VWW", + "url": "https://github.com/Mxbonn/visualwakewords", + "download": "https://universe.roboflow.com/ds/rvZt8qZfBp?key=WDJI0KBhlY" }, "network": { "batch": 1, @@ -1629,45 +1517,45 @@ }, "guidelines": "", "license": "MIT", - "image": "https://files.seeedstudio.com/sscma/static/gender_cls.png", + "image": "https://files.seeedstudio.com/sscma/static/person_cls.png", "classes": [ - "female", - "male" + "Not a person", + "Person" ], "benchmark": [ { "backend": "PyTorch", "precision": "FLOAT32", "metrics": { - "Top-1(%)": 94.5, - "Flops(M)": 5.49, - "Params(M)": 2.16 + "Top-1(%)": 85.22, + "Flops(MB)": 34, + "Params(M)": 2.71 }, - "url": "https://files.seeedstudio.com/sscma/model_zoo/classification/gender/mbv2_0.35_rep_gender_sha1_62336a001f0cd58d2ac8ed5a6823b9ac7374f276.pth", + "url": "https://files.seeedstudio.com/sscma/model_zoo/classification/person/mobilenetv2_0.35rep_vww64_float32_sha1_6dec3c029041408de043c5921621ab7abc4c4ec4.pth", "author": "Seeed Studio", - "checksum": "md5:4d15c10ce4fb53caf159ff4fdf07d360" + "checksum": "md5:d99c07e431b3e99904750ca58b3107d0" }, { "backend": "ONNX", "precision": "FLOAT32", "metrics": { - "Top-1(%)": 94.5, - "Params(M)": 2.16 + "Top-1(%)": 85.23, + "Params(M)": 2.71 }, - "url": "https://files.seeedstudio.com/sscma/model_zoo/classification/gender/mbv2_0.35_rep_gender_a9031151303fb4eaeae99262d26c0719a7bca7d7.onnx", + "url": "https://files.seeedstudio.com/sscma/model_zoo/classification/person/mobilenetv2_0.35rep_vww64_float32_sha1_aeb9c1f3bf7c19f3490daee7da1ac0d76b7e49d9.onnx", "author": "Seeed Studio", - "checksum": "md5:6a61966198d37cf18f436bf782d328f1" + "checksum": "md5:6b20f439c535e97fd141ecd77fe3505f" }, { "backend": "TFLite", "precision": "FLOAT32", "metrics": { - "Top-1(%)": 94.5, - "Params(M)": 2.16 + "Top-1(%)": 85.23, + "Params(M)": 2.71 }, - "url": "https://files.seeedstudio.com/sscma/model_zoo/classification/gender/mbv2_0.35_rep_gender_5e6dc80bd5f3ddb429326a27f767816d998c919b.tflite", + "url": "https://files.seeedstudio.com/sscma/model_zoo/classification/person/mobilenetv2_0.35rep_vww64_float32_sha1_d44e8c1247dfc66e645f5d07b904e4a430149882.tflite", "author": "Seeed Studio", - "checksum": "md5:dac8f6c254ce477e053639cacdf31dbe" + "checksum": "md5:23476d16c2bed169df28b18320acd863" }, { "backend": "TFLite", @@ -1676,15 +1564,15 @@ "xiao_esp32s3" ], "metrics": { - "Top-1(%)": 94.3, - "Params(M)": 2.16, + "Top-1(%)": 85.26, + "Params(M)": 2.71, "Inference(ms)": { - "xiao_esp32s3": 40 + "xiao_esp32s3": 286 } }, - "url": "https://files.seeedstudio.com/sscma/model_zoo/classification/gender/mbv2_0.35_rep_gender_int8_sha1_2bc5677615f8aeb41bffe21e25de6d01f91c3a41.tflite", + "url": "https://files.seeedstudio.com/sscma/model_zoo/classification/person/mobilenetv2_0.35rep_vww64_int8_sha1_a939407d507b45ceca293e74c8961d59357b37b2.tflite", "author": "Seeed Studio", - "checksum": "md5:853967d1e961a9fdef02122437efa239" + "checksum": "md5:0a8eb383e11c42cc2b400567f262067b" }, { "backend": "TFLite(vela)", @@ -1693,15 +1581,15 @@ "grove_vision_ai_we2" ], "metrics": { - "Top-1(%)": 94.3, - "Params(M)": 2.16, + "Top-1(%)": 85.26, + "Params(M)": 2.71, "Inference(ms)": { - "grove_vision_ai_we2": 4.0 + "grove_vision_ai_we2": 8.0 } }, - "url": "https://files.seeedstudio.com/sscma/model_zoo/classification/gender/mbv2_0.35_rep_gender_int8_sha1_2bc5677615f8aeb41bffe21e25de6d01f91c3a41_vela.tflite", + "url": "https://files.seeedstudio.com/sscma/model_zoo/classification/person/mobilenetv2_0.35rep_vww64_int8_sha1_a939407d507b45ceca293e74c8961d59357b37b2_vela.tflite", "author": "Seeed Studio", - "checksum": "md5:ed0c4d7d93abc31463f65677567e2a18" + "checksum": "md5:5dba5f8b9081fa5caef5c0a66c7c3d0c" } ] }, @@ -1956,6 +1844,118 @@ "checksum": "md5:8233f08f98911eb637d149b4f18495dd" } ] + }, + { + "uuid": "fee63b1d204cc2ff54b5e1767c9166dc", + "name": "Gender Classification", + "version": "1.0.0", + "category": "Image Classification", + "algorithm": "MobileNetV2 0.35 Rep", + "description": "The model is a vision model designed for Gender classification. It utilizes the [SSCMA](https://github.com/Seeed-Studio/ModelAssistant) training and employs the MobileNetV2 (0.35) Rep algorithm.", + "dataset": { + "name": "Gender", + "url": "https://universe.roboflow.com/seeed-studio-e2fso/gender-8vbxd", + "download": "https://universe.roboflow.com/ds/CnPDloVfHN?key=BGRNmtbN5T" + }, + "network": { + "batch": 1, + "input": { + "type": "image", + "shape": [ + 64, + 64, + 3 + ], + "remark": "The input image should be resized to 64x64 pixels" + }, + "output": { + "type": "classification", + "shape": [ + 2 + ], + "remark": "The output is a 2-element vector, which represents the probability of the input image belonging to each class" + } + }, + "config": { + "url": "configs/classification/mobnetv2_0.35_rep_1bx16_300e_custom.py" + }, + "guidelines": "", + "license": "MIT", + "image": "https://files.seeedstudio.com/sscma/static/gender_cls.png", + "classes": [ + "female", + "male" + ], + "benchmark": [ + { + "backend": "PyTorch", + "precision": "FLOAT32", + "metrics": { + "Top-1(%)": 94.5, + "Flops(M)": 5.49, + "Params(M)": 2.16 + }, + "url": "https://files.seeedstudio.com/sscma/model_zoo/classification/gender/mbv2_0.35_rep_gender_sha1_62336a001f0cd58d2ac8ed5a6823b9ac7374f276.pth", + "author": "Seeed Studio", + "checksum": "md5:4d15c10ce4fb53caf159ff4fdf07d360" + }, + { + "backend": "ONNX", + "precision": "FLOAT32", + "metrics": { + "Top-1(%)": 94.5, + "Params(M)": 2.16 + }, + "url": "https://files.seeedstudio.com/sscma/model_zoo/classification/gender/mbv2_0.35_rep_gender_a9031151303fb4eaeae99262d26c0719a7bca7d7.onnx", + "author": "Seeed Studio", + "checksum": "md5:6a61966198d37cf18f436bf782d328f1" + }, + { + "backend": "TFLite", + "precision": "FLOAT32", + "metrics": { + "Top-1(%)": 94.5, + "Params(M)": 2.16 + }, + "url": "https://files.seeedstudio.com/sscma/model_zoo/classification/gender/mbv2_0.35_rep_gender_5e6dc80bd5f3ddb429326a27f767816d998c919b.tflite", + "author": "Seeed Studio", + "checksum": "md5:dac8f6c254ce477e053639cacdf31dbe" + }, + { + "backend": "TFLite", + "precision": "INT8", + "device": [ + "xiao_esp32s3" + ], + "metrics": { + "Top-1(%)": 94.3, + "Params(M)": 2.16, + "Inference(ms)": { + "xiao_esp32s3": 40 + } + }, + "url": "https://files.seeedstudio.com/sscma/model_zoo/classification/gender/mbv2_0.35_rep_gender_int8_sha1_2bc5677615f8aeb41bffe21e25de6d01f91c3a41.tflite", + "author": "Seeed Studio", + "checksum": "md5:853967d1e961a9fdef02122437efa239" + }, + { + "backend": "TFLite(vela)", + "precision": "INT8", + "device": [ + "grove_vision_ai_we2" + ], + "metrics": { + "Top-1(%)": 94.3, + "Params(M)": 2.16, + "Inference(ms)": { + "grove_vision_ai_we2": 4.0 + } + }, + "url": "https://files.seeedstudio.com/sscma/model_zoo/classification/gender/mbv2_0.35_rep_gender_int8_sha1_2bc5677615f8aeb41bffe21e25de6d01f91c3a41_vela.tflite", + "author": "Seeed Studio", + "checksum": "md5:ed0c4d7d93abc31463f65677567e2a18" + } + ] } ] } \ No newline at end of file