From 059f74cf4c935e824e9177d42502372a641a7c84 Mon Sep 17 00:00:00 2001 From: DefTruth Date: Wed, 4 Aug 2021 22:35:27 +0800 Subject: [PATCH] add GPU Compatibility for CUDAExecutionProvider (#10) --- README.md | 148 +++++++++++++++++++++++++++++++++++++++++------------- 1 file changed, 112 insertions(+), 36 deletions(-) diff --git a/README.md b/README.md index 977fcc3c..5ab2262c 100644 --- a/README.md +++ b/README.md @@ -22,7 +22,7 @@
-*Lite.AI* πŸš€πŸš€πŸŒŸ is a simple and user-friendly C++ library of awesomeπŸ”₯πŸ”₯πŸ”₯ AI models. It's a collection of personal interests. such as YOLOX, YoloV5, YoloV4, DeepLabV3, ArcFace, etc. *Lite.AI* based on *[onnxruntime c++](https://github.com/microsoft/onnxruntime)* by default. I do have plans to reimplement it with *[ncnn](https://github.com/Tencent/ncnn)* and *[MNN](https://github.com/alibaba/MNN)*, but not coming soon. It includes [object detection](#lite.ai-object-detection), [face detection](#lite.ai-face-detection), [style transfer](#lite.ai-style-transfer), [face alignment](#lite.ai-face-alignment), [face recognition](#lite.ai-face-recognition), [segmentation](#lite.ai-segmentation), [colorization](#lite.ai-colorization), [face attributes analysis](#lite.ai-face-attributes-analysis), [image classification](#lite.ai-image-classification), [matting](#lite.ai-matting), etc. You can use these awesome models simply through *lite::cv::Type::Class* syntax, such as *[lite::cv::detection::YoloV5](#lite.ai-object-detection)*. Have a good travel ~ πŸ™ƒπŸ€ͺπŸ€ +*Lite.AI* πŸš€πŸš€πŸŒŸ is a simple and user-friendly C++ library of awesomeπŸ”₯πŸ”₯πŸ”₯ AI models. It's a collection of personal interests. such as YOLOX, YoloV5, YoloV4, DeepLabV3, ArcFace, etc. *Lite.AI* based on *[onnxruntime c++](https://github.com/microsoft/onnxruntime)* by default. I do have plans to reimplement it with *[ncnn](https://github.com/Tencent/ncnn)* and *[MNN](https://github.com/alibaba/MNN)*, but not coming soon. It includes [object detection](#lite.ai-object-detection), [face detection](#lite.ai-face-detection), [style transfer](#lite.ai-style-transfer), [face alignment](#lite.ai-face-alignment), [face recognition](#lite.ai-face-recognition), [segmentation](#lite.ai-segmentation), [colorization](#lite.ai-colorization), [face attributes analysis](#lite.ai-face-attributes-analysis), [image classification](#lite.ai-image-classification), [matting](#lite.ai-matting), etc. You can use these awesome models simply through *lite::cv::Type::Class* syntax, such as *[lite::cv::detection::YoloV5](#lite.ai-object-detection)*. Star πŸŒŸπŸ‘†πŸ» this repo if it does any helps to you ~ Have a good travel ~ πŸ™ƒπŸ€ͺπŸ€ ## Important Notes !!! @@ -310,23 +310,6 @@ For example, ArcFace in [insightface](https://github.com/deepinsight/insightface |[SSDMobileNetV1](https://github.com/onnx/models/blob/master/vision/object_detection_segmentation/ssd-mobilenetv1)|27M| [onnx-models](https://github.com/onnx/models) | πŸ”₯πŸ”₯πŸ”₯↑ | [![](https://img.shields.io/badge/onnx-done-brightgreen.svg)](https://github.com/DefTruth/lite.ai/tree/main/docs/hub/lite.ai.hub.onnx.md#lite.ai.hub.onnx-object-detection) | *detection* | βœ… | [demo](https://github.com/DefTruth/lite.ai/blob/main/examples/lite/cv/test_lite_ssd_mobilenetv1.cpp) | |[YoloX](https://github.com/Megvii-BaseDetection/YOLOX)|3.5M| [YOLOX]( https://github.com/Megvii-BaseDetection/YOLOX) | πŸ”₯πŸ”₯new↑ | [![](https://img.shields.io/badge/onnx-done-brightgreen.svg)](https://github.com/DefTruth/lite.ai/tree/main/docs/hub/lite.ai.hub.onnx.md#lite.ai.hub.onnx-object-detection) | *detection* | βœ… | [demo](https://github.com/DefTruth/lite.ai/blob/main/examples/lite/cv/test_lite_yolox.cpp) | -
- ⚠️ Expand More Details for Lite.AI's Model Zoo. - -* Classification. - -|Class|Size|From|Awesome|File|Type|State|Usage| -|:---:|:---:|:---:|:---:|:---:|:---:|:---:|:---:| -|[EfficientNetLite4](https://github.com/onnx/models/blob/master/vision/classification/efficientnet-lite4)|49M| [onnx-models](https://github.com/onnx/models) | πŸ”₯πŸ”₯πŸ”₯↑ | [![](https://img.shields.io/badge/onnx-done-brightgreen.svg)](https://github.com/DefTruth/lite.ai/tree/main/docs/hub/lite.ai.hub.onnx.md#lite.ai.hub.onnx-classification) | *classification* | βœ… | [demo](https://github.com/DefTruth/lite.ai/blob/main/examples/lite/cv/test_lite_efficientnet_lite4.cpp) | -|[ShuffleNetV2](https://github.com/onnx/models/blob/master/vision/classification/shufflenet)|8.7M| [onnx-models](https://github.com/onnx/models) | πŸ”₯πŸ”₯πŸ”₯↑ | [![](https://img.shields.io/badge/onnx-done-brightgreen.svg)](https://github.com/DefTruth/lite.ai/tree/main/docs/hub/lite.ai.hub.onnx.md#lite.ai.hub.onnx-classification) | *classification* | βœ… | [demo](https://github.com/DefTruth/lite.ai/blob/main/examples/lite/cv/test_lite_shufflenetv2.cpp) | -|[DenseNet121](https://pytorch.org/hub/pytorch_vision_densenet/)|30.7M| [torchvision](https://github.com/pytorch/vision) | πŸ”₯πŸ”₯πŸ”₯↑ | [![](https://img.shields.io/badge/onnx-done-brightgreen.svg)](https://github.com/DefTruth/lite.ai/tree/main/docs/hub/lite.ai.hub.onnx.md#lite.ai.hub.onnx-classification) | *classification* | βœ… | [demo](https://github.com/DefTruth/lite.ai/blob/main/examples/lite/cv/test_lite_densenet.cpp) | -|[GhostNet](https://pytorch.org/hub/pytorch_vision_ghostnet/)|20M| [torchvision](https://github.com/pytorch/vision) | πŸ”₯πŸ”₯πŸ”₯↑ | [![](https://img.shields.io/badge/onnx-done-brightgreen.svg)](https://github.com/DefTruth/lite.ai/tree/main/docs/hub/lite.ai.hub.onnx.md#lite.ai.hub.onnx-classification) | *classification* | βœ… | [demo](https://github.com/DefTruth/lite.ai/blob/main/examples/lite/cv/test_lite_ghostnet.cpp) | -|[HdrDNet](https://pytorch.org/hub/pytorch_vision_hardnet//)|13M| [torchvision](https://github.com/pytorch/vision) | πŸ”₯πŸ”₯πŸ”₯↑ | [![](https://img.shields.io/badge/onnx-done-brightgreen.svg)](https://github.com/DefTruth/lite.ai/tree/main/docs/hub/lite.ai.hub.onnx.md#lite.ai.hub.onnx-classification) | *classification* | βœ… | [demo](https://github.com/DefTruth/lite.ai/blob/main/examples/lite/cv/test_lite_hardnet.cpp) | -|[IBNNet](https://pytorch.org/hub/pytorch_vision_ibnnet/)|97M| [torchvision](https://github.com/pytorch/vision) | πŸ”₯πŸ”₯πŸ”₯↑ | [![](https://img.shields.io/badge/onnx-done-brightgreen.svg)](https://github.com/DefTruth/lite.ai/tree/main/docs/hub/lite.ai.hub.onnx.md#lite.ai.hub.onnx-classification) | *classification* | βœ… | [demo](https://github.com/DefTruth/lite.ai/blob/main/examples/lite/cv/test_lite_ibnnet.cpp) | -|[MobileNetV2](https://pytorch.org/hub/pytorch_vision_mobilenet_v2/)|13M| [torchvision](https://github.com/pytorch/vision) | πŸ”₯πŸ”₯πŸ”₯↑ | [![](https://img.shields.io/badge/onnx-done-brightgreen.svg)](https://github.com/DefTruth/lite.ai/tree/main/docs/hub/lite.ai.hub.onnx.md#lite.ai.hub.onnx-classification) | *classification* | βœ… | [demo](https://github.com/DefTruth/lite.ai/blob/main/examples/lite/cv/test_lite_mobilenetv2.cpp) | -|[ResNet](https://pytorch.org/hub/pytorch_vision_resnet/)|44M| [torchvision](https://github.com/pytorch/vision) | πŸ”₯πŸ”₯πŸ”₯↑ | [![](https://img.shields.io/badge/onnx-done-brightgreen.svg)](https://github.com/DefTruth/lite.ai/tree/main/docs/hub/lite.ai.hub.onnx.md#lite.ai.hub.onnx-classification) | *classification* | βœ… | [demo](https://github.com/DefTruth/lite.ai/blob/main/examples/lite/cv/test_lite_resnet.cpp) | -|[ResNeXt](https://pytorch.org/hub/pytorch_vision_resnext/)|95M| [torchvision](https://github.com/pytorch/vision) | πŸ”₯πŸ”₯πŸ”₯↑ | [![](https://img.shields.io/badge/onnx-done-brightgreen.svg)](https://github.com/DefTruth/lite.ai/tree/main/docs/hub/lite.ai.hub.onnx.md#lite.ai.hub.onnx-classification) | *classification* | βœ… | [demo](https://github.com/DefTruth/lite.ai/blob/main/examples/lite/cv/test_lite_resnext.cpp) | - * Face Detection. @@ -349,22 +332,6 @@ For example, ArcFace in [insightface](https://github.com/deepinsight/insightface |[FaceLandmark1000](https://github.com/Single430/FaceLandmark1000)|2.0M| [FaceLandm...](https://github.com/Single430/FaceLandmark1000) | πŸ”₯↑ | [![](https://img.shields.io/badge/onnx-done-brightgreen.svg)](https://github.com/DefTruth/lite.ai/tree/main/docs/hub/lite.ai.hub.onnx.md#lite.ai.hub.onnx-face-alignment) | *face::align* | βœ…οΈ | [demo](https://github.com/DefTruth/lite.ai/blob/main/examples/lite/cv/test_lite_face_landmarks_1000.cpp) | -* Face Attributes. - -|Class|Size|From|Awesome|File|Type|State|Usage| -|:---:|:---:|:---:|:---:|:---:|:---:|:---:|:---:| -|[AgeGoogleNet](https://github.com/onnx/models/tree/master/vision/body_analysis/age_gender)|23M| [onnx-models](https://github.com/onnx/models) | πŸ”₯πŸ”₯πŸ”₯↑ | [![](https://img.shields.io/badge/onnx-done-brightgreen.svg)](https://github.com/DefTruth/lite.ai/tree/main/docs/hub/lite.ai.hub.onnx.md#lite.ai.hub.onnx-face-attributes) | *face::attr* | βœ… | [demo](https://github.com/DefTruth/lite.ai/blob/main/examples/lite/cv/test_lite_age_googlenet.cpp) | -|[GenderGoogleNet](https://github.com/onnx/models/tree/master/vision/body_analysis/age_gender)|23M| [onnx-models](https://github.com/onnx/models) | πŸ”₯πŸ”₯πŸ”₯↑ | [![](https://img.shields.io/badge/onnx-done-brightgreen.svg)](https://github.com/DefTruth/lite.ai/tree/main/docs/hub/lite.ai.hub.onnx.md#lite.ai.hub.onnx-face-attributes) | *face::attr* | βœ… | [demo](https://github.com/DefTruth/lite.ai/blob/main/examples/lite/cv/test_lite_gender_googlenet.cpp) | -|[EmotionFerPlus](https://github.com/onnx/models/blob/master/vision/body_analysis/emotion_ferplus)|33M| [onnx-models](https://github.com/onnx/models) | πŸ”₯πŸ”₯πŸ”₯↑ | [![](https://img.shields.io/badge/onnx-done-brightgreen.svg)](https://github.com/DefTruth/lite.ai/tree/main/docs/hub/lite.ai.hub.onnx.md#lite.ai.hub.onnx-face-attributes) | *face::attr* | βœ… | [demo](https://github.com/DefTruth/lite.ai/blob/main/examples/lite/cv/test_lite_emotion_ferplus.cpp) | -|[VGG16Age](https://github.com/onnx/models/tree/master/vision/body_analysis/age_gender)|514M| [onnx-models](https://github.com/onnx/models) | πŸ”₯πŸ”₯πŸ”₯↑ | [![](https://img.shields.io/badge/onnx-done-brightgreen.svg)](https://github.com/DefTruth/lite.ai/tree/main/docs/hub/lite.ai.hub.onnx.md#lite.ai.hub.onnx-face-attributes) | *face::attr* | βœ… | [demo](https://github.com/DefTruth/lite.ai/blob/main/examples/lite/cv/test_lite_vgg16_age.cpp) | -|[VGG16Gender](https://github.com/onnx/models/tree/master/vision/body_analysis/age_gender)|512M| [onnx-models](https://github.com/onnx/models) | πŸ”₯πŸ”₯πŸ”₯↑ | [![](https://img.shields.io/badge/onnx-done-brightgreen.svg)](https://github.com/DefTruth/lite.ai/tree/main/docs/hub/lite.ai.hub.onnx.md#lite.ai.hub.onnx-face-attributes) | *face::attr* | βœ… | [demo](https://github.com/DefTruth/lite.ai/blob/main/examples/lite/cv/test_lite_vgg16_gender.cpp) | -|[SSRNet](https://github.com/oukohou/SSR_Net_Pytorch)|190K| [SSR_Net...](https://github.com/oukohou/SSR_Net_Pytorch) | πŸ”₯↑ | [![](https://img.shields.io/badge/onnx-done-brightgreen.svg)](https://github.com/DefTruth/lite.ai/tree/main/docs/hub/lite.ai.hub.onnx.md#lite.ai.hub.onnx-face-attributes) | *face::attr* | βœ… | [demo](https://github.com/DefTruth/lite.ai/blob/main/examples/lite/cv/test_lite_ssrnet.cpp) | -|[EfficientEmotion7](https://github.com/HSE-asavchenko/face-emotion-recognition)|15M| [face-emo...](https://github.com/HSE-asavchenko/face-emotion-recognition) | πŸ”₯↑ | [![](https://img.shields.io/badge/onnx-done-brightgreen.svg)](https://github.com/DefTruth/lite.ai/tree/main/docs/hub/lite.ai.hub.onnx.md#lite.ai.hub.onnx-face-attributes) | *face::attr* | βœ…οΈ | [demo](https://github.com/DefTruth/lite.ai/blob/main/examples/lite/cv/test_lite_efficient_emotion7.cpp) | -|[EfficientEmotion8](https://github.com/HSE-asavchenko/face-emotion-recognition)|15M| [face-emo...](https://github.com/HSE-asavchenko/face-emotion-recognition) | πŸ”₯↑ | [![](https://img.shields.io/badge/onnx-done-brightgreen.svg)](https://github.com/DefTruth/lite.ai/tree/main/docs/hub/lite.ai.hub.onnx.md#lite.ai.hub.onnx-face-attributes) | *face::attr* | βœ… | [demo](https://github.com/DefTruth/lite.ai/blob/main/examples/lite/cv/test_lite_efficient_emotion8.cpp) | -|[MobileEmotion7](https://github.com/HSE-asavchenko/face-emotion-recognition)|13M| [face-emo...](https://github.com/HSE-asavchenko/face-emotion-recognition) | πŸ”₯↑ | [![](https://img.shields.io/badge/onnx-done-brightgreen.svg)](https://github.com/DefTruth/lite.ai/tree/main/docs/hub/lite.ai.hub.onnx.md#lite.ai.hub.onnx-face-attributes) | *face::attr* | βœ… | [demo](https://github.com/DefTruth/lite.ai/blob/main/examples/lite/cv/test_lite_mobile_emotion7.cpp) | -|[ReXNetEmotion7](https://github.com/HSE-asavchenko/face-emotion-recognition)|30M| [face-emo...](https://github.com/HSE-asavchenko/face-emotion-recognition) | πŸ”₯↑ | [![](https://img.shields.io/badge/onnx-done-brightgreen.svg)](https://github.com/DefTruth/lite.ai/tree/main/docs/hub/lite.ai.hub.onnx.md#lite.ai.hub.onnx-face-attributes) | *face::attr* | βœ… | [demo](https://github.com/DefTruth/lite.ai/blob/main/examples/lite/cv/test_lite_rexnet_emotion7.cpp) | - - * Face Recognition. |Class|Size|From|Awesome|File|Type|State|Usage| @@ -386,6 +353,9 @@ For example, ArcFace in [insightface](https://github.com/deepinsight/insightface |[CavaCombinedFace](https://github.com/cavalleria/cavaface.pytorch)| 250M | [cavaface...](https://github.com/cavalleria/cavaface.pytorch) | πŸ”₯πŸ”₯↑ | [![](https://img.shields.io/badge/onnx-done-brightgreen.svg)](https://github.com/DefTruth/lite.ai/tree/main/docs/hub/lite.ai.hub.onnx.md#lite.ai.hub.onnx-face-recognition) | *faceid* | βœ… | [demo](https://github.com/DefTruth/lite.ai/blob/main/examples/lite/cv/test_lite_cava_combined_face.cpp) | |[MobileSEFocalFace](https://github.com/grib0ed0v/face_recognition.pytorch)|4.5M| [face_recog...](https://github.com/grib0ed0v/face_recognition.pytorch) | πŸ”₯πŸ”₯↑ | [![](https://img.shields.io/badge/onnx-done-brightgreen.svg)](https://github.com/DefTruth/lite.ai/tree/main/docs/hub/lite.ai.hub.onnx.md#lite.ai.hub.onnx-face-recognition) | *faceid* | βœ… | [demo](https://github.com/DefTruth/lite.ai/blob/main/examples/lite/cv/test_lite_mobilese_focal_face.cpp) | +
+ ⚠️ Expand More Details for Lite.AI's Model Zoo. + * Head Pose Estimation. @@ -394,6 +364,37 @@ For example, ArcFace in [insightface](https://github.com/deepinsight/insightface |[FSANet](https://github.com/omasaht/headpose-fsanet-pytorch)|1.2M| [...fsanet...](https://github.com/omasaht/headpose-fsanet-pytorch) | πŸ”₯↑ | [![](https://img.shields.io/badge/onnx-done-brightgreen.svg)](https://github.com/DefTruth/lite.ai/tree/main/docs/hub/lite.ai.hub.onnx.md#lite.ai.hub.onnx-head-pose-estimation) | *face::pose* | βœ… | [demo](https://github.com/DefTruth/lite.ai/blob/main/examples/lite/cv/test_lite_fsanet.cpp) | +* Face Attributes. + +|Class|Size|From|Awesome|File|Type|State|Usage| +|:---:|:---:|:---:|:---:|:---:|:---:|:---:|:---:| +|[AgeGoogleNet](https://github.com/onnx/models/tree/master/vision/body_analysis/age_gender)|23M| [onnx-models](https://github.com/onnx/models) | πŸ”₯πŸ”₯πŸ”₯↑ | [![](https://img.shields.io/badge/onnx-done-brightgreen.svg)](https://github.com/DefTruth/lite.ai/tree/main/docs/hub/lite.ai.hub.onnx.md#lite.ai.hub.onnx-face-attributes) | *face::attr* | βœ… | [demo](https://github.com/DefTruth/lite.ai/blob/main/examples/lite/cv/test_lite_age_googlenet.cpp) | +|[GenderGoogleNet](https://github.com/onnx/models/tree/master/vision/body_analysis/age_gender)|23M| [onnx-models](https://github.com/onnx/models) | πŸ”₯πŸ”₯πŸ”₯↑ | [![](https://img.shields.io/badge/onnx-done-brightgreen.svg)](https://github.com/DefTruth/lite.ai/tree/main/docs/hub/lite.ai.hub.onnx.md#lite.ai.hub.onnx-face-attributes) | *face::attr* | βœ… | [demo](https://github.com/DefTruth/lite.ai/blob/main/examples/lite/cv/test_lite_gender_googlenet.cpp) | +|[EmotionFerPlus](https://github.com/onnx/models/blob/master/vision/body_analysis/emotion_ferplus)|33M| [onnx-models](https://github.com/onnx/models) | πŸ”₯πŸ”₯πŸ”₯↑ | [![](https://img.shields.io/badge/onnx-done-brightgreen.svg)](https://github.com/DefTruth/lite.ai/tree/main/docs/hub/lite.ai.hub.onnx.md#lite.ai.hub.onnx-face-attributes) | *face::attr* | βœ… | [demo](https://github.com/DefTruth/lite.ai/blob/main/examples/lite/cv/test_lite_emotion_ferplus.cpp) | +|[VGG16Age](https://github.com/onnx/models/tree/master/vision/body_analysis/age_gender)|514M| [onnx-models](https://github.com/onnx/models) | πŸ”₯πŸ”₯πŸ”₯↑ | [![](https://img.shields.io/badge/onnx-done-brightgreen.svg)](https://github.com/DefTruth/lite.ai/tree/main/docs/hub/lite.ai.hub.onnx.md#lite.ai.hub.onnx-face-attributes) | *face::attr* | βœ… | [demo](https://github.com/DefTruth/lite.ai/blob/main/examples/lite/cv/test_lite_vgg16_age.cpp) | +|[VGG16Gender](https://github.com/onnx/models/tree/master/vision/body_analysis/age_gender)|512M| [onnx-models](https://github.com/onnx/models) | πŸ”₯πŸ”₯πŸ”₯↑ | [![](https://img.shields.io/badge/onnx-done-brightgreen.svg)](https://github.com/DefTruth/lite.ai/tree/main/docs/hub/lite.ai.hub.onnx.md#lite.ai.hub.onnx-face-attributes) | *face::attr* | βœ… | [demo](https://github.com/DefTruth/lite.ai/blob/main/examples/lite/cv/test_lite_vgg16_gender.cpp) | +|[SSRNet](https://github.com/oukohou/SSR_Net_Pytorch)|190K| [SSR_Net...](https://github.com/oukohou/SSR_Net_Pytorch) | πŸ”₯↑ | [![](https://img.shields.io/badge/onnx-done-brightgreen.svg)](https://github.com/DefTruth/lite.ai/tree/main/docs/hub/lite.ai.hub.onnx.md#lite.ai.hub.onnx-face-attributes) | *face::attr* | βœ… | [demo](https://github.com/DefTruth/lite.ai/blob/main/examples/lite/cv/test_lite_ssrnet.cpp) | +|[EfficientEmotion7](https://github.com/HSE-asavchenko/face-emotion-recognition)|15M| [face-emo...](https://github.com/HSE-asavchenko/face-emotion-recognition) | πŸ”₯↑ | [![](https://img.shields.io/badge/onnx-done-brightgreen.svg)](https://github.com/DefTruth/lite.ai/tree/main/docs/hub/lite.ai.hub.onnx.md#lite.ai.hub.onnx-face-attributes) | *face::attr* | βœ…οΈ | [demo](https://github.com/DefTruth/lite.ai/blob/main/examples/lite/cv/test_lite_efficient_emotion7.cpp) | +|[EfficientEmotion8](https://github.com/HSE-asavchenko/face-emotion-recognition)|15M| [face-emo...](https://github.com/HSE-asavchenko/face-emotion-recognition) | πŸ”₯↑ | [![](https://img.shields.io/badge/onnx-done-brightgreen.svg)](https://github.com/DefTruth/lite.ai/tree/main/docs/hub/lite.ai.hub.onnx.md#lite.ai.hub.onnx-face-attributes) | *face::attr* | βœ… | [demo](https://github.com/DefTruth/lite.ai/blob/main/examples/lite/cv/test_lite_efficient_emotion8.cpp) | +|[MobileEmotion7](https://github.com/HSE-asavchenko/face-emotion-recognition)|13M| [face-emo...](https://github.com/HSE-asavchenko/face-emotion-recognition) | πŸ”₯↑ | [![](https://img.shields.io/badge/onnx-done-brightgreen.svg)](https://github.com/DefTruth/lite.ai/tree/main/docs/hub/lite.ai.hub.onnx.md#lite.ai.hub.onnx-face-attributes) | *face::attr* | βœ… | [demo](https://github.com/DefTruth/lite.ai/blob/main/examples/lite/cv/test_lite_mobile_emotion7.cpp) | +|[ReXNetEmotion7](https://github.com/HSE-asavchenko/face-emotion-recognition)|30M| [face-emo...](https://github.com/HSE-asavchenko/face-emotion-recognition) | πŸ”₯↑ | [![](https://img.shields.io/badge/onnx-done-brightgreen.svg)](https://github.com/DefTruth/lite.ai/tree/main/docs/hub/lite.ai.hub.onnx.md#lite.ai.hub.onnx-face-attributes) | *face::attr* | βœ… | [demo](https://github.com/DefTruth/lite.ai/blob/main/examples/lite/cv/test_lite_rexnet_emotion7.cpp) | + + +* Classification. + +|Class|Size|From|Awesome|File|Type|State|Usage| +|:---:|:---:|:---:|:---:|:---:|:---:|:---:|:---:| +|[EfficientNetLite4](https://github.com/onnx/models/blob/master/vision/classification/efficientnet-lite4)|49M| [onnx-models](https://github.com/onnx/models) | πŸ”₯πŸ”₯πŸ”₯↑ | [![](https://img.shields.io/badge/onnx-done-brightgreen.svg)](https://github.com/DefTruth/lite.ai/tree/main/docs/hub/lite.ai.hub.onnx.md#lite.ai.hub.onnx-classification) | *classification* | βœ… | [demo](https://github.com/DefTruth/lite.ai/blob/main/examples/lite/cv/test_lite_efficientnet_lite4.cpp) | +|[ShuffleNetV2](https://github.com/onnx/models/blob/master/vision/classification/shufflenet)|8.7M| [onnx-models](https://github.com/onnx/models) | πŸ”₯πŸ”₯πŸ”₯↑ | [![](https://img.shields.io/badge/onnx-done-brightgreen.svg)](https://github.com/DefTruth/lite.ai/tree/main/docs/hub/lite.ai.hub.onnx.md#lite.ai.hub.onnx-classification) | *classification* | βœ… | [demo](https://github.com/DefTruth/lite.ai/blob/main/examples/lite/cv/test_lite_shufflenetv2.cpp) | +|[DenseNet121](https://pytorch.org/hub/pytorch_vision_densenet/)|30.7M| [torchvision](https://github.com/pytorch/vision) | πŸ”₯πŸ”₯πŸ”₯↑ | [![](https://img.shields.io/badge/onnx-done-brightgreen.svg)](https://github.com/DefTruth/lite.ai/tree/main/docs/hub/lite.ai.hub.onnx.md#lite.ai.hub.onnx-classification) | *classification* | βœ… | [demo](https://github.com/DefTruth/lite.ai/blob/main/examples/lite/cv/test_lite_densenet.cpp) | +|[GhostNet](https://pytorch.org/hub/pytorch_vision_ghostnet/)|20M| [torchvision](https://github.com/pytorch/vision) | πŸ”₯πŸ”₯πŸ”₯↑ | [![](https://img.shields.io/badge/onnx-done-brightgreen.svg)](https://github.com/DefTruth/lite.ai/tree/main/docs/hub/lite.ai.hub.onnx.md#lite.ai.hub.onnx-classification) | *classification* | βœ… | [demo](https://github.com/DefTruth/lite.ai/blob/main/examples/lite/cv/test_lite_ghostnet.cpp) | +|[HdrDNet](https://pytorch.org/hub/pytorch_vision_hardnet//)|13M| [torchvision](https://github.com/pytorch/vision) | πŸ”₯πŸ”₯πŸ”₯↑ | [![](https://img.shields.io/badge/onnx-done-brightgreen.svg)](https://github.com/DefTruth/lite.ai/tree/main/docs/hub/lite.ai.hub.onnx.md#lite.ai.hub.onnx-classification) | *classification* | βœ… | [demo](https://github.com/DefTruth/lite.ai/blob/main/examples/lite/cv/test_lite_hardnet.cpp) | +|[IBNNet](https://pytorch.org/hub/pytorch_vision_ibnnet/)|97M| [torchvision](https://github.com/pytorch/vision) | πŸ”₯πŸ”₯πŸ”₯↑ | [![](https://img.shields.io/badge/onnx-done-brightgreen.svg)](https://github.com/DefTruth/lite.ai/tree/main/docs/hub/lite.ai.hub.onnx.md#lite.ai.hub.onnx-classification) | *classification* | βœ… | [demo](https://github.com/DefTruth/lite.ai/blob/main/examples/lite/cv/test_lite_ibnnet.cpp) | +|[MobileNetV2](https://pytorch.org/hub/pytorch_vision_mobilenet_v2/)|13M| [torchvision](https://github.com/pytorch/vision) | πŸ”₯πŸ”₯πŸ”₯↑ | [![](https://img.shields.io/badge/onnx-done-brightgreen.svg)](https://github.com/DefTruth/lite.ai/tree/main/docs/hub/lite.ai.hub.onnx.md#lite.ai.hub.onnx-classification) | *classification* | βœ… | [demo](https://github.com/DefTruth/lite.ai/blob/main/examples/lite/cv/test_lite_mobilenetv2.cpp) | +|[ResNet](https://pytorch.org/hub/pytorch_vision_resnet/)|44M| [torchvision](https://github.com/pytorch/vision) | πŸ”₯πŸ”₯πŸ”₯↑ | [![](https://img.shields.io/badge/onnx-done-brightgreen.svg)](https://github.com/DefTruth/lite.ai/tree/main/docs/hub/lite.ai.hub.onnx.md#lite.ai.hub.onnx-classification) | *classification* | βœ… | [demo](https://github.com/DefTruth/lite.ai/blob/main/examples/lite/cv/test_lite_resnet.cpp) | +|[ResNeXt](https://pytorch.org/hub/pytorch_vision_resnext/)|95M| [torchvision](https://github.com/pytorch/vision) | πŸ”₯πŸ”₯πŸ”₯↑ | [![](https://img.shields.io/badge/onnx-done-brightgreen.svg)](https://github.com/DefTruth/lite.ai/tree/main/docs/hub/lite.ai.hub.onnx.md#lite.ai.hub.onnx-classification) | *classification* | βœ… | [demo](https://github.com/DefTruth/lite.ai/blob/main/examples/lite/cv/test_lite_resnext.cpp) | + + * Segmentation. |Class|Size|From|Awesome|File|Type|State|Usage| @@ -528,13 +529,88 @@ The output is: - +
+ +#### Example3: Face Recognition using [ArcFace](https://github.com/deepinsight/insightface/tree/master/recognition/arcface_torch). Download model from Model-Zoo[2](#lite.ai-2). + +```c++ +#include "lite/lite.h" + +static void test_default() +{ + std::string onnx_path = "../../../hub/onnx/cv/ms1mv3_arcface_r100.onnx"; + std::string test_img_path0 = "../../../examples/lite/resources/test_lite_faceid_0.png"; + std::string test_img_path1 = "../../../examples/lite/resources/test_lite_faceid_1.png"; + std::string test_img_path2 = "../../../examples/lite/resources/test_lite_faceid_2.png"; + + auto *glint_arcface = new lite::cv::faceid::GlintArcFace(onnx_path); + + lite::cv::types::FaceContent face_content0, face_content1, face_content2; + cv::Mat img_bgr0 = cv::imread(test_img_path0); + cv::Mat img_bgr1 = cv::imread(test_img_path1); + cv::Mat img_bgr2 = cv::imread(test_img_path2); + glint_arcface->detect(img_bgr0, face_content0); + glint_arcface->detect(img_bgr1, face_content1); + glint_arcface->detect(img_bgr2, face_content2); + + if (face_content0.flag && face_content1.flag && face_content2.flag) + { + float sim01 = lite::cv::utils::math::cosine_similarity( + face_content0.embedding, face_content1.embedding); + float sim02 = lite::cv::utils::math::cosine_similarity( + face_content0.embedding, face_content2.embedding); + std::cout << "Detected Sim01: " << sim << " Sim02: " << sim02 << std::endl; + } + + delete glint_arcface; +} +``` + +The output is: +
+ + + +
+ +> Detected Sim01: 0.721159 Sim02: -0.0626267 + +
+ +#### Example4: Face Detection using [UltraFace](https://github.com/Linzaer/Ultra-Light-Fast-Generic-Face-Detector-1MB). Download model from Model-Zoo[2](#lite.ai-2). +```c++ +#include "lite/lite.h" + +static void test_default() +{ + std::string onnx_path = "../../../hub/onnx/cv/ultraface-rfb-640.onnx"; + std::string test_img_path = "../../../examples/lite/resources/test_lite_ultraface.jpg"; + std::string save_img_path = "../../../logs/test_lite_ultraface.jpg"; + + auto *ultraface = new lite::cv::face::detect::UltraFace(onnx_path); + + std::vector detected_boxes; + cv::Mat img_bgr = cv::imread(test_img_path); + ultraface->detect(img_bgr, detected_boxes); + lite::cv::utils::draw_boxes_inplace(img_bgr, detected_boxes); + cv::imwrite(save_img_path, img_bgr); + + delete ultraface; +} +``` +The output is: +
+ + + +
+ +
-