From 761126b72659c349b6e9ca8383d64eea37d8e78f Mon Sep 17 00:00:00 2001 From: wangziyang <2890199310@qq.com> Date: Thu, 12 May 2022 20:03:58 +0800 Subject: [PATCH] =?UTF-8?q?=E6=8F=90=E4=BA=A4=E6=BA=90=E7=A0=81?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- src/PaddleClas/ppcls/__init__.py | 20 + .../ppcls/__pycache__/__init__.cpython-39.pyc | Bin 0 -> 255 bytes src/PaddleClas/ppcls/arch/__init__.py | 134 +++ .../arch/__pycache__/__init__.cpython-39.pyc | Bin 0 -> 3525 bytes .../arch/__pycache__/utils.cpython-39.pyc | Bin 0 -> 1633 bytes .../ppcls/arch/backbone/__init__.py | 83 ++ .../__pycache__/__init__.cpython-39.pyc | Bin 0 -> 8785 bytes .../ppcls/arch/backbone/base/__init__.py | 0 .../base/__pycache__/__init__.cpython-39.pyc | Bin 0 -> 176 bytes .../__pycache__/theseus_layer.cpython-39.pyc | Bin 0 -> 9546 bytes .../ppcls/arch/backbone/base/theseus_layer.py | 301 +++++ .../backbone/legendary_models/__init__.py | 6 + .../__pycache__/__init__.cpython-39.pyc | Bin 0 -> 1212 bytes .../__pycache__/esnet.cpython-39.pyc | Bin 0 -> 9847 bytes .../__pycache__/hrnet.cpython-39.pyc | Bin 0 -> 21144 bytes .../__pycache__/inception_v3.cpython-39.pyc | Bin 0 -> 11107 bytes .../__pycache__/mobilenet_v1.cpython-39.pyc | Bin 0 -> 7373 bytes .../__pycache__/mobilenet_v3.cpython-39.pyc | Bin 0 -> 15979 bytes .../__pycache__/pp_lcnet.cpython-39.pyc | Bin 0 -> 12099 bytes .../__pycache__/resnet.cpython-39.pyc | Bin 0 -> 15291 bytes .../__pycache__/vgg.cpython-39.pyc | Bin 0 -> 6815 bytes .../arch/backbone/legendary_models/esnet.py | 369 ++++++ .../arch/backbone/legendary_models/hrnet.py | 794 +++++++++++++ .../backbone/legendary_models/inception_v3.py | 557 +++++++++ .../backbone/legendary_models/mobilenet_v1.py | 257 +++++ .../backbone/legendary_models/mobilenet_v3.py | 586 ++++++++++ .../backbone/legendary_models/pp_lcnet.py | 419 +++++++ .../arch/backbone/legendary_models/resnet.py | 591 ++++++++++ .../arch/backbone/legendary_models/vgg.py | 259 +++++ .../ppcls/arch/backbone/model_zoo/__init__.py | 0 .../__pycache__/__init__.cpython-39.pyc | Bin 0 -> 181 bytes .../__pycache__/alexnet.cpython-39.pyc | Bin 0 -> 4140 bytes .../__pycache__/cspnet.cpython-39.pyc | Bin 0 -> 8361 bytes .../__pycache__/darknet.cpython-39.pyc | Bin 0 -> 5598 bytes .../__pycache__/densenet.cpython-39.pyc | Bin 0 -> 8535 bytes ...istilled_vision_transformer.cpython-39.pyc | Bin 0 -> 5765 bytes .../model_zoo/__pycache__/dla.cpython-39.pyc | Bin 0 -> 10922 bytes .../model_zoo/__pycache__/dpn.cpython-39.pyc | Bin 0 -> 9210 bytes .../__pycache__/efficientnet.cpython-39.pyc | Bin 0 -> 20490 bytes .../__pycache__/ghostnet.cpython-39.pyc | Bin 0 -> 8961 bytes .../__pycache__/googlenet.cpython-39.pyc | Bin 0 -> 6021 bytes .../model_zoo/__pycache__/gvt.cpython-39.pyc | Bin 0 -> 18183 bytes .../__pycache__/hardnet.cpython-39.pyc | Bin 0 -> 6308 bytes .../__pycache__/inception_v4.cpython-39.pyc | Bin 0 -> 11154 bytes .../__pycache__/levit.cpython-39.pyc | Bin 0 -> 13957 bytes .../__pycache__/mixnet.cpython-39.pyc | Bin 0 -> 20716 bytes .../__pycache__/mobilenet_v2.cpython-39.pyc | Bin 0 -> 7242 bytes .../__pycache__/pvt_v2.cpython-39.pyc | Bin 0 -> 12348 bytes .../__pycache__/rednet.cpython-39.pyc | Bin 0 -> 5398 bytes .../__pycache__/regnet.cpython-39.pyc | Bin 0 -> 11039 bytes .../__pycache__/repvgg.cpython-39.pyc | Bin 0 -> 9588 bytes .../__pycache__/res2net.cpython-39.pyc | Bin 0 -> 6570 bytes .../__pycache__/res2net_vd.cpython-39.pyc | Bin 0 -> 7226 bytes .../__pycache__/resnest.cpython-39.pyc | Bin 0 -> 12002 bytes .../__pycache__/resnet_vc.cpython-39.pyc | Bin 0 -> 6653 bytes .../__pycache__/resnext.cpython-39.pyc | Bin 0 -> 7599 bytes .../__pycache__/resnext101_wsl.cpython-39.pyc | Bin 0 -> 9446 bytes .../__pycache__/resnext_vd.cpython-39.pyc | Bin 0 -> 7580 bytes .../__pycache__/rexnet.cpython-39.pyc | Bin 0 -> 6879 bytes .../__pycache__/se_resnet_vd.cpython-39.pyc | Bin 0 -> 8714 bytes .../__pycache__/se_resnext.cpython-39.pyc | Bin 0 -> 8251 bytes .../__pycache__/se_resnext_vd.cpython-39.pyc | Bin 0 -> 7386 bytes .../__pycache__/shufflenet_v2.cpython-39.pyc | Bin 0 -> 8591 bytes .../__pycache__/squeezenet.cpython-39.pyc | Bin 0 -> 5014 bytes .../swin_transformer.cpython-39.pyc | Bin 0 -> 22819 bytes .../model_zoo/__pycache__/tnt.cpython-39.pyc | Bin 0 -> 9476 bytes .../vision_transformer.cpython-39.pyc | Bin 0 -> 11702 bytes .../__pycache__/xception.cpython-39.pyc | Bin 0 -> 10465 bytes .../xception_deeplab.cpython-39.pyc | Bin 0 -> 9044 bytes .../ppcls/arch/backbone/model_zoo/alexnet.py | 168 +++ .../ppcls/arch/backbone/model_zoo/cspnet.py | 376 +++++++ .../ppcls/arch/backbone/model_zoo/darknet.py | 197 ++++ .../ppcls/arch/backbone/model_zoo/densenet.py | 344 ++++++ .../model_zoo/distilled_vision_transformer.py | 272 +++++ .../ppcls/arch/backbone/model_zoo/dla.py | 528 +++++++++ .../ppcls/arch/backbone/model_zoo/dpn.py | 451 ++++++++ .../arch/backbone/model_zoo/efficientnet.py | 976 ++++++++++++++++ .../ppcls/arch/backbone/model_zoo/ghostnet.py | 363 ++++++ .../arch/backbone/model_zoo/googlenet.py | 229 ++++ .../ppcls/arch/backbone/model_zoo/gvt.py | 693 ++++++++++++ .../ppcls/arch/backbone/model_zoo/hardnet.py | 293 +++++ .../arch/backbone/model_zoo/inception_v4.py | 477 ++++++++ .../ppcls/arch/backbone/model_zoo/levit.py | 589 ++++++++++ .../ppcls/arch/backbone/model_zoo/mixnet.py | 815 ++++++++++++++ .../arch/backbone/model_zoo/mobilenet_v2.py | 287 +++++ .../ppcls/arch/backbone/model_zoo/pvt_v2.py | 492 ++++++++ .../ppcls/arch/backbone/model_zoo/rednet.py | 203 ++++ .../ppcls/arch/backbone/model_zoo/regnet.py | 431 +++++++ .../ppcls/arch/backbone/model_zoo/repvgg.py | 382 +++++++ .../ppcls/arch/backbone/model_zoo/res2net.py | 264 +++++ .../arch/backbone/model_zoo/res2net_vd.py | 305 +++++ .../ppcls/arch/backbone/model_zoo/resnest.py | 740 ++++++++++++ .../arch/backbone/model_zoo/resnet_vc.py | 309 +++++ .../ppcls/arch/backbone/model_zoo/resnext.py | 298 +++++ .../arch/backbone/model_zoo/resnext101_wsl.py | 490 ++++++++ .../arch/backbone/model_zoo/resnext_vd.py | 317 ++++++ .../ppcls/arch/backbone/model_zoo/rexnet.py | 281 +++++ .../arch/backbone/model_zoo/se_resnet_vd.py | 390 +++++++ .../arch/backbone/model_zoo/se_resnext.py | 364 ++++++ .../arch/backbone/model_zoo/se_resnext_vd.py | 309 +++++ .../arch/backbone/model_zoo/shufflenet_v2.py | 362 ++++++ .../arch/backbone/model_zoo/squeezenet.py | 194 ++++ .../backbone/model_zoo/swin_transformer.py | 857 ++++++++++++++ .../ppcls/arch/backbone/model_zoo/tnt.py | 386 +++++++ .../backbone/model_zoo/vision_transformer.py | 458 ++++++++ .../ppcls/arch/backbone/model_zoo/xception.py | 377 +++++++ .../backbone/model_zoo/xception_deeplab.py | 421 +++++++ .../arch/backbone/variant_models/__init__.py | 3 + .../__pycache__/__init__.cpython-39.pyc | Bin 0 -> 358 bytes .../pp_lcnet_variant.cpython-39.pyc | Bin 0 -> 1385 bytes .../__pycache__/resnet_variant.cpython-39.pyc | Bin 0 -> 1095 bytes .../__pycache__/vgg_variant.cpython-39.pyc | Bin 0 -> 1335 bytes .../variant_models/pp_lcnet_variant.py | 29 + .../backbone/variant_models/resnet_variant.py | 23 + .../backbone/variant_models/vgg_variant.py | 28 + src/PaddleClas/ppcls/arch/gears/__init__.py | 32 + .../gears/__pycache__/__init__.cpython-39.pyc | Bin 0 -> 704 bytes .../__pycache__/arcmargin.cpython-39.pyc | Bin 0 -> 1940 bytes .../__pycache__/circlemargin.cpython-39.pyc | Bin 0 -> 1604 bytes .../__pycache__/cosmargin.cpython-39.pyc | Bin 0 -> 1449 bytes .../arch/gears/__pycache__/fc.cpython-39.pyc | Bin 0 -> 1003 bytes .../__pycache__/identity_head.cpython-39.pyc | Bin 0 -> 719 bytes .../__pycache__/vehicle_neck.cpython-39.pyc | Bin 0 -> 1181 bytes src/PaddleClas/ppcls/arch/gears/arcmargin.py | 72 ++ .../ppcls/arch/gears/circlemargin.py | 59 + src/PaddleClas/ppcls/arch/gears/cosmargin.py | 55 + src/PaddleClas/ppcls/arch/gears/fc.py | 35 + .../ppcls/arch/gears/identity_head.py | 9 + .../ppcls/arch/gears/vehicle_neck.py | 52 + src/PaddleClas/ppcls/arch/slim/__init__.py | 16 + .../slim/__pycache__/__init__.cpython-39.pyc | Bin 0 -> 284 bytes .../slim/__pycache__/prune.cpython-39.pyc | Bin 0 -> 1673 bytes .../slim/__pycache__/quant.cpython-39.pyc | Bin 0 -> 1181 bytes src/PaddleClas/ppcls/arch/slim/prune.py | 65 ++ src/PaddleClas/ppcls/arch/slim/quant.py | 55 + src/PaddleClas/ppcls/arch/utils.py | 53 + .../Cartoonface/ResNet50_icartoon.yaml | 149 +++ .../GeneralRecognition_PPLCNet_x2_5.yaml | 148 +++ ...eneralRecognition_PPLCNet_x2_5_binary.yaml | 145 +++ .../GeneralRecognition_PPLCNet_x2_5_dml.yaml | 188 ++++ .../GeneralRecognition_PPLCNet_x2_5_udml.yaml | 193 ++++ .../configs/ImageNet/AlexNet/AlexNet.yaml | 129 +++ .../configs/ImageNet/CSPNet/CSPDarkNet53.yaml | 131 +++ .../ppcls/configs/ImageNet/DLA/DLA102.yaml | 130 +++ .../ppcls/configs/ImageNet/DLA/DLA102x.yaml | 130 +++ .../ppcls/configs/ImageNet/DLA/DLA102x2.yaml | 130 +++ .../ppcls/configs/ImageNet/DLA/DLA169.yaml | 130 +++ .../ppcls/configs/ImageNet/DLA/DLA34.yaml | 130 +++ .../ppcls/configs/ImageNet/DLA/DLA46_c.yaml | 130 +++ .../ppcls/configs/ImageNet/DLA/DLA46x_c.yaml | 130 +++ .../ppcls/configs/ImageNet/DLA/DLA60.yaml | 130 +++ .../ppcls/configs/ImageNet/DLA/DLA60x.yaml | 130 +++ .../ppcls/configs/ImageNet/DLA/DLA60x_c.yaml | 130 +++ .../ppcls/configs/ImageNet/DPN/DPN107.yaml | 130 +++ .../ppcls/configs/ImageNet/DPN/DPN131.yaml | 130 +++ .../ppcls/configs/ImageNet/DPN/DPN68.yaml | 130 +++ .../ppcls/configs/ImageNet/DPN/DPN92.yaml | 130 +++ .../ppcls/configs/ImageNet/DPN/DPN98.yaml | 130 +++ .../configs/ImageNet/DarkNet/DarkNet53.yaml | 130 +++ .../DataAugment/ResNet50_AutoAugment.yaml | 129 +++ .../DataAugment/ResNet50_Baseline.yaml | 128 +++ .../ImageNet/DataAugment/ResNet50_Cutmix.yaml | 128 +++ .../ImageNet/DataAugment/ResNet50_Cutout.yaml | 131 +++ .../DataAugment/ResNet50_GridMask.yaml | 134 +++ .../DataAugment/ResNet50_HideAndSeek.yaml | 129 +++ .../ImageNet/DataAugment/ResNet50_Mixup.yaml | 128 +++ .../DataAugment/ResNet50_RandAugment.yaml | 131 +++ .../DataAugment/ResNet50_RandomErasing.yaml | 134 +++ .../DeiT/DeiT_base_distilled_patch16_224.yaml | 156 +++ .../DeiT/DeiT_base_distilled_patch16_384.yaml | 156 +++ .../ImageNet/DeiT/DeiT_base_patch16_224.yaml | 156 +++ .../ImageNet/DeiT/DeiT_base_patch16_384.yaml | 156 +++ .../DeiT_small_distilled_patch16_224.yaml | 156 +++ .../ImageNet/DeiT/DeiT_small_patch16_224.yaml | 156 +++ .../DeiT/DeiT_tiny_distilled_patch16_224.yaml | 156 +++ .../ImageNet/DeiT/DeiT_tiny_patch16_224.yaml | 156 +++ .../ImageNet/DenseNet/DenseNet121.yaml | 130 +++ .../ImageNet/DenseNet/DenseNet161.yaml | 130 +++ .../ImageNet/DenseNet/DenseNet169.yaml | 130 +++ .../ImageNet/DenseNet/DenseNet201.yaml | 130 +++ .../ImageNet/DenseNet/DenseNet264.yaml | 130 +++ ...mv3_large_x1_0_distill_mv3_small_x1_0.yaml | 157 +++ .../configs/ImageNet/ESNet/ESNet_x0_25.yaml | 129 +++ .../configs/ImageNet/ESNet/ESNet_x0_5.yaml | 129 +++ .../configs/ImageNet/ESNet/ESNet_x0_75.yaml | 129 +++ .../configs/ImageNet/ESNet/ESNet_x1_0.yaml | 129 +++ .../ImageNet/EfficientNet/EfficientNetB0.yaml | 133 +++ .../ImageNet/EfficientNet/EfficientNetB1.yaml | 133 +++ .../ImageNet/EfficientNet/EfficientNetB2.yaml | 133 +++ .../ImageNet/EfficientNet/EfficientNetB3.yaml | 133 +++ .../ImageNet/EfficientNet/EfficientNetB4.yaml | 133 +++ .../ImageNet/EfficientNet/EfficientNetB5.yaml | 133 +++ .../ImageNet/EfficientNet/EfficientNetB6.yaml | 133 +++ .../ImageNet/EfficientNet/EfficientNetB7.yaml | 133 +++ .../ImageNet/GhostNet/GhostNet_x0_5.yaml | 130 +++ .../ImageNet/GhostNet/GhostNet_x1_0.yaml | 130 +++ .../ImageNet/GhostNet/GhostNet_x1_3.yaml | 130 +++ .../configs/ImageNet/HRNet/HRNet_W18_C.yaml | 130 +++ .../configs/ImageNet/HRNet/HRNet_W30_C.yaml | 130 +++ .../configs/ImageNet/HRNet/HRNet_W32_C.yaml | 130 +++ .../configs/ImageNet/HRNet/HRNet_W40_C.yaml | 130 +++ .../configs/ImageNet/HRNet/HRNet_W44_C.yaml | 130 +++ .../configs/ImageNet/HRNet/HRNet_W48_C.yaml | 130 +++ .../configs/ImageNet/HRNet/HRNet_W64_C.yaml | 130 +++ .../ImageNet/HarDNet/HarDNet39_ds.yaml | 130 +++ .../configs/ImageNet/HarDNet/HarDNet68.yaml | 130 +++ .../ImageNet/HarDNet/HarDNet68_ds.yaml | 130 +++ .../configs/ImageNet/HarDNet/HarDNet85.yaml | 130 +++ .../configs/ImageNet/Inception/GoogLeNet.yaml | 129 +++ .../ImageNet/Inception/InceptionV3.yaml | 130 +++ .../ImageNet/Inception/InceptionV4.yaml | 130 +++ .../configs/ImageNet/LeViT/LeViT_128.yaml | 130 +++ .../configs/ImageNet/LeViT/LeViT_128S.yaml | 130 +++ .../configs/ImageNet/LeViT/LeViT_192.yaml | 130 +++ .../configs/ImageNet/LeViT/LeViT_256.yaml | 130 +++ .../configs/ImageNet/LeViT/LeViT_384.yaml | 130 +++ .../configs/ImageNet/MixNet/MixNet_L.yaml | 132 +++ .../configs/ImageNet/MixNet/MixNet_M.yaml | 132 +++ .../configs/ImageNet/MixNet/MixNet_S.yaml | 132 +++ .../ImageNet/MobileNetV1/MobileNetV1.yaml | 132 +++ .../MobileNetV1/MobileNetV1_x0_25.yaml | 130 +++ .../MobileNetV1/MobileNetV1_x0_5.yaml | 130 +++ .../MobileNetV1/MobileNetV1_x0_75.yaml | 130 +++ .../ImageNet/MobileNetV2/MobileNetV2.yaml | 130 +++ .../MobileNetV2/MobileNetV2_x0_25.yaml | 128 +++ .../MobileNetV2/MobileNetV2_x0_5.yaml | 128 +++ .../MobileNetV2/MobileNetV2_x0_75.yaml | 128 +++ .../MobileNetV2/MobileNetV2_x1_5.yaml | 128 +++ .../MobileNetV2/MobileNetV2_x2_0.yaml | 128 +++ .../MobileNetV3/MobileNetV3_large_x0_35.yaml | 130 +++ .../MobileNetV3/MobileNetV3_large_x0_5.yaml | 130 +++ .../MobileNetV3/MobileNetV3_large_x0_75.yaml | 130 +++ .../MobileNetV3/MobileNetV3_large_x1_0.yaml | 131 +++ .../MobileNetV3/MobileNetV3_large_x1_25.yaml | 130 +++ .../MobileNetV3/MobileNetV3_small_x0_35.yaml | 130 +++ .../MobileNetV3/MobileNetV3_small_x0_5.yaml | 130 +++ .../MobileNetV3/MobileNetV3_small_x0_75.yaml | 130 +++ .../MobileNetV3/MobileNetV3_small_x1_0.yaml | 130 +++ .../MobileNetV3/MobileNetV3_small_x1_25.yaml | 130 +++ .../ImageNet/PPLCNet/PPLCNet_x0_25.yaml | 129 +++ .../ImageNet/PPLCNet/PPLCNet_x0_35.yaml | 129 +++ .../ImageNet/PPLCNet/PPLCNet_x0_5.yaml | 129 +++ .../ImageNet/PPLCNet/PPLCNet_x0_75.yaml | 129 +++ .../ImageNet/PPLCNet/PPLCNet_x1_0.yaml | 129 +++ .../ImageNet/PPLCNet/PPLCNet_x1_5.yaml | 129 +++ .../ImageNet/PPLCNet/PPLCNet_x2_0.yaml | 128 +++ .../ImageNet/PPLCNet/PPLCNet_x2_5.yaml | 130 +++ .../configs/ImageNet/PVTV2/PVT_V2_B0.yaml | 161 +++ .../configs/ImageNet/PVTV2/PVT_V2_B1.yaml | 161 +++ .../configs/ImageNet/PVTV2/PVT_V2_B2.yaml | 161 +++ .../ImageNet/PVTV2/PVT_V2_B2_Linear.yaml | 161 +++ .../configs/ImageNet/PVTV2/PVT_V2_B3.yaml | 162 +++ .../configs/ImageNet/PVTV2/PVT_V2_B4.yaml | 162 +++ .../configs/ImageNet/PVTV2/PVT_V2_B5.yaml | 162 +++ .../configs/ImageNet/ReXNet/ReXNet_1_0.yaml | 132 +++ .../configs/ImageNet/ReXNet/ReXNet_1_3.yaml | 132 +++ .../configs/ImageNet/ReXNet/ReXNet_1_5.yaml | 132 +++ .../configs/ImageNet/ReXNet/ReXNet_2_0.yaml | 132 +++ .../configs/ImageNet/ReXNet/ReXNet_3_0.yaml | 132 +++ .../configs/ImageNet/RedNet/RedNet101.yaml | 130 +++ .../configs/ImageNet/RedNet/RedNet152.yaml | 130 +++ .../configs/ImageNet/RedNet/RedNet26.yaml | 130 +++ .../configs/ImageNet/RedNet/RedNet38.yaml | 130 +++ .../configs/ImageNet/RedNet/RedNet50.yaml | 130 +++ .../Res2Net/Res2Net101_vd_26w_4s.yaml | 130 +++ .../Res2Net/Res2Net200_vd_26w_4s.yaml | 130 +++ .../ImageNet/Res2Net/Res2Net50_14w_8s.yaml | 130 +++ .../ImageNet/Res2Net/Res2Net50_26w_4s.yaml | 130 +++ .../ImageNet/Res2Net/Res2Net50_vd_26w_4s.yaml | 130 +++ .../configs/ImageNet/ResNeSt/ResNeSt101.yaml | 131 +++ .../configs/ImageNet/ResNeSt/ResNeSt50.yaml | 131 +++ .../ResNeSt/ResNeSt50_fast_1s1x64d.yaml | 131 +++ .../ImageNet/ResNeXt/ResNeXt101_32x4d.yaml | 130 +++ .../ImageNet/ResNeXt/ResNeXt101_64x4d.yaml | 130 +++ .../ImageNet/ResNeXt/ResNeXt101_vd_32x4d.yaml | 130 +++ .../ImageNet/ResNeXt/ResNeXt101_vd_64x4d.yaml | 130 +++ .../ImageNet/ResNeXt/ResNeXt152_32x4d.yaml | 130 +++ .../ImageNet/ResNeXt/ResNeXt152_64x4d.yaml | 130 +++ .../ImageNet/ResNeXt/ResNeXt152_vd_32x4d.yaml | 130 +++ .../ImageNet/ResNeXt/ResNeXt152_vd_64x4d.yaml | 130 +++ .../ImageNet/ResNeXt/ResNeXt50_32x4d.yaml | 130 +++ .../ImageNet/ResNeXt/ResNeXt50_64x4d.yaml | 130 +++ .../ImageNet/ResNeXt/ResNeXt50_vd_32x4d.yaml | 130 +++ .../ImageNet/ResNeXt/ResNeXt50_vd_64x4d.yaml | 130 +++ .../ResNeXt101_wsl/ResNeXt101_32x16d_wsl.yaml | 130 +++ .../ResNeXt101_wsl/ResNeXt101_32x32d_wsl.yaml | 130 +++ .../ResNeXt101_wsl/ResNeXt101_32x48d_wsl.yaml | 130 +++ .../ResNeXt101_wsl/ResNeXt101_32x8d_wsl.yaml | 130 +++ .../configs/ImageNet/ResNet/ResNet101.yaml | 132 +++ .../configs/ImageNet/ResNet/ResNet101_vd.yaml | 130 +++ .../configs/ImageNet/ResNet/ResNet152.yaml | 132 +++ .../configs/ImageNet/ResNet/ResNet152_vd.yaml | 130 +++ .../configs/ImageNet/ResNet/ResNet18.yaml | 130 +++ .../configs/ImageNet/ResNet/ResNet18_vd.yaml | 130 +++ .../configs/ImageNet/ResNet/ResNet200_vd.yaml | 130 +++ .../configs/ImageNet/ResNet/ResNet34.yaml | 130 +++ .../configs/ImageNet/ResNet/ResNet34_vd.yaml | 130 +++ .../configs/ImageNet/ResNet/ResNet50.yaml | 132 +++ .../ImageNet/ResNet/ResNet50_amp_O1.yaml | 147 +++ .../ImageNet/ResNet/ResNet50_amp_O2.yaml | 149 +++ .../configs/ImageNet/ResNet/ResNet50_vd.yaml | 130 +++ .../configs/ImageNet/SENet/SENet154_vd.yaml | 130 +++ .../ImageNet/SENet/SE_ResNeXt101_32x4d.yaml | 130 +++ .../SENet/SE_ResNeXt101_32x4d_amp_O2.yaml | 143 +++ .../ImageNet/SENet/SE_ResNeXt50_32x4d.yaml | 130 +++ .../ImageNet/SENet/SE_ResNeXt50_vd_32x4d.yaml | 130 +++ .../ImageNet/SENet/SE_ResNet18_vd.yaml | 130 +++ .../ImageNet/SENet/SE_ResNet34_vd.yaml | 130 +++ .../ImageNet/SENet/SE_ResNet50_vd.yaml | 130 +++ .../ShuffleNet/ShuffleNetV2_swish.yaml | 129 +++ .../ShuffleNet/ShuffleNetV2_x0_25.yaml | 129 +++ .../ShuffleNet/ShuffleNetV2_x0_33.yaml | 129 +++ .../ShuffleNet/ShuffleNetV2_x0_5.yaml | 129 +++ .../ShuffleNet/ShuffleNetV2_x1_0.yaml | 129 +++ .../ShuffleNet/ShuffleNetV2_x1_5.yaml | 129 +++ .../ShuffleNet/ShuffleNetV2_x2_0.yaml | 129 +++ .../ImageNet/SqueezeNet/SqueezeNet1_0.yaml | 128 +++ .../ImageNet/SqueezeNet/SqueezeNet1_1.yaml | 128 +++ ...nTransformer_base_patch4_window12_384.yaml | 159 +++ ...inTransformer_base_patch4_window7_224.yaml | 159 +++ ...Transformer_large_patch4_window12_384.yaml | 159 +++ ...nTransformer_large_patch4_window7_224.yaml | 159 +++ ...nTransformer_small_patch4_window7_224.yaml | 159 +++ ...inTransformer_tiny_patch4_window7_224.yaml | 159 +++ .../ppcls/configs/ImageNet/TNT/TNT_small.yaml | 130 +++ .../configs/ImageNet/Twins/alt_gvt_base.yaml | 161 +++ .../configs/ImageNet/Twins/alt_gvt_large.yaml | 161 +++ .../configs/ImageNet/Twins/alt_gvt_small.yaml | 161 +++ .../configs/ImageNet/Twins/pcpvt_base.yaml | 161 +++ .../configs/ImageNet/Twins/pcpvt_large.yaml | 161 +++ .../configs/ImageNet/Twins/pcpvt_small.yaml | 161 +++ .../ppcls/configs/ImageNet/VGG/VGG11.yaml | 128 +++ .../ppcls/configs/ImageNet/VGG/VGG13.yaml | 128 +++ .../ppcls/configs/ImageNet/VGG/VGG16.yaml | 128 +++ .../ppcls/configs/ImageNet/VGG/VGG19.yaml | 128 +++ .../ViT_base_patch16_224.yaml | 130 +++ .../ViT_base_patch16_384.yaml | 130 +++ .../ViT_base_patch32_384.yaml | 130 +++ .../ViT_large_patch16_224.yaml | 130 +++ .../ViT_large_patch16_384.yaml | 130 +++ .../ViT_large_patch32_384.yaml | 130 +++ .../ViT_small_patch16_224.yaml | 130 +++ .../configs/ImageNet/Xception/Xception41.yaml | 129 +++ .../ImageNet/Xception/Xception41_deeplab.yaml | 129 +++ .../configs/ImageNet/Xception/Xception65.yaml | 130 +++ .../ImageNet/Xception/Xception65_deeplab.yaml | 129 +++ .../configs/ImageNet/Xception/Xception71.yaml | 130 +++ .../ppcls/configs/Logo/ResNet50_ReID.yaml | 151 +++ .../MV3_Large_1x_Aliproduct_DLBHC.yaml | 149 +++ .../Products/ResNet50_vd_Aliproduct.yaml | 119 ++ .../configs/Products/ResNet50_vd_Inshop.yaml | 157 +++ .../configs/Products/ResNet50_vd_SOP.yaml | 156 +++ .../configs/Vehicle/PPLCNet_2.5x_ReID.yaml | 158 +++ .../ppcls/configs/Vehicle/ResNet50.yaml | 130 +++ .../ppcls/configs/Vehicle/ResNet50_ReID.yaml | 155 +++ .../quick_start/MobileNetV1_retrieval.yaml | 158 +++ .../quick_start/MobileNetV3_large_x1_0.yaml | 130 +++ .../configs/quick_start/ResNet50_vd.yaml | 129 +++ .../kunlun/HRNet_W18_C_finetune_kunlun.yaml | 68 ++ .../kunlun/ResNet50_vd_finetune_kunlun.yaml | 69 ++ .../kunlun/VGG16_finetune_kunlun.yaml | 70 ++ .../kunlun/VGG19_finetune_kunlun.yaml | 70 ++ .../new_user/ShuffleNetV2_x0_25.yaml | 129 +++ .../professional/MobileNetV1_multilabel.yaml | 129 +++ ...ileNetV3_large_x1_0_CIFAR100_finetune.yaml | 127 +++ ...50_vd_distill_MV3_large_x1_0_CIFAR100.yaml | 151 +++ .../professional/ResNet50_vd_CIFAR100.yaml | 127 +++ .../ResNet50_vd_mixup_CIFAR100_finetune.yaml | 127 +++ .../professional/VGG19_CIFAR10_DeepHash.yaml | 147 +++ ...Recognition_PPLCNet_x2_5_quantization.yaml | 154 +++ .../slim/MobileNetV3_large_x1_0_prune.yaml | 139 +++ .../MobileNetV3_large_x1_0_quantization.yaml | 138 +++ .../slim/PPLCNet_x1_0_quantization.yaml | 138 +++ .../ppcls/configs/slim/ResNet50_vd_prune.yaml | 138 +++ .../slim/ResNet50_vd_quantization.yaml | 137 +++ .../slim/ResNet50_vehicle_cls_prune.yaml | 135 +++ .../ResNet50_vehicle_cls_quantization.yaml | 134 +++ .../slim/ResNet50_vehicle_reid_prune.yaml | 162 +++ .../ResNet50_vehicle_reid_quantization.yaml | 161 +++ src/PaddleClas/ppcls/data/__init__.py | 144 +++ .../data/__pycache__/__init__.cpython-39.pyc | Bin 0 -> 3831 bytes .../data/dataloader/.imagenet_dataset.py.un~ | Bin 0 -> 523 bytes .../DistributedRandomIdentitySampler.py | 90 ++ .../ppcls/data/dataloader/__init__.py | 9 + ...ibutedRandomIdentitySampler.cpython-39.pyc | Bin 0 -> 2513 bytes .../__pycache__/__init__.cpython-39.pyc | Bin 0 -> 849 bytes .../__pycache__/common_dataset.cpython-39.pyc | Bin 0 -> 2415 bytes .../icartoon_dataset.cpython-39.pyc | Bin 0 -> 983 bytes .../imagenet_dataset.cpython-39.pyc | Bin 0 -> 1043 bytes .../__pycache__/logo_dataset.cpython-39.pyc | Bin 0 -> 1173 bytes .../__pycache__/mix_dataset.cpython-39.pyc | Bin 0 -> 1413 bytes .../__pycache__/mix_sampler.cpython-39.pyc | Bin 0 -> 2364 bytes .../multilabel_dataset.cpython-39.pyc | Bin 0 -> 1885 bytes .../__pycache__/pk_sampler.cpython-39.pyc | Bin 0 -> 2993 bytes .../vehicle_dataset.cpython-39.pyc | Bin 0 -> 4242 bytes .../ppcls/data/dataloader/common_dataset.py | 84 ++ src/PaddleClas/ppcls/data/dataloader/dali.py | 319 ++++++ .../ppcls/data/dataloader/icartoon_dataset.py | 36 + .../ppcls/data/dataloader/imagenet_dataset.py | 38 + .../ppcls/data/dataloader/logo_dataset.py | 46 + .../ppcls/data/dataloader/mix_dataset.py | 49 + .../ppcls/data/dataloader/mix_sampler.py | 79 ++ .../data/dataloader/multilabel_dataset.py | 59 + .../ppcls/data/dataloader/pk_sampler.py | 105 ++ .../ppcls/data/dataloader/vehicle_dataset.py | 138 +++ .../ppcls/data/postprocess/__init__.py | 41 + .../__pycache__/__init__.cpython-39.pyc | Bin 0 -> 1257 bytes .../__pycache__/topk.cpython-39.pyc | Bin 0 -> 2638 bytes src/PaddleClas/ppcls/data/postprocess/topk.py | 85 ++ .../ppcls/data/preprocess/__init__.py | 100 ++ .../__pycache__/__init__.cpython-39.pyc | Bin 0 -> 3132 bytes .../data/preprocess/batch_ops/__init__.py | 1 + .../__pycache__/__init__.cpython-39.pyc | Bin 0 -> 183 bytes .../batch_operators.cpython-39.pyc | Bin 0 -> 7072 bytes .../preprocess/batch_ops/batch_operators.py | 231 ++++ .../ppcls/data/preprocess/ops/__init__.py | 1 + .../ops/__pycache__/__init__.cpython-39.pyc | Bin 0 -> 177 bytes .../__pycache__/autoaugment.cpython-39.pyc | Bin 0 -> 8715 bytes .../ops/__pycache__/cutout.cpython-39.pyc | Bin 0 -> 1013 bytes .../ops/__pycache__/fmix.cpython-39.pyc | Bin 0 -> 6614 bytes .../ops/__pycache__/functional.cpython-39.pyc | Bin 0 -> 3856 bytes .../ops/__pycache__/grid.cpython-39.pyc | Bin 0 -> 2111 bytes .../__pycache__/hide_and_seek.cpython-39.pyc | Bin 0 -> 962 bytes .../ops/__pycache__/operators.cpython-39.pyc | Bin 0 -> 11291 bytes .../__pycache__/randaugment.cpython-39.pyc | Bin 0 -> 3628 bytes .../__pycache__/random_erasing.cpython-39.pyc | Bin 0 -> 2404 bytes .../timm_autoaugment.cpython-39.pyc | Bin 0 -> 23847 bytes .../ppcls/data/preprocess/ops/autoaugment.py | 264 +++++ .../ppcls/data/preprocess/ops/cutout.py | 41 + .../ppcls/data/preprocess/ops/fmix.py | 217 ++++ .../ppcls/data/preprocess/ops/functional.py | 138 +++ .../ppcls/data/preprocess/ops/grid.py | 89 ++ .../data/preprocess/ops/hide_and_seek.py | 44 + .../ppcls/data/preprocess/ops/operators.py | 384 +++++++ .../ppcls/data/preprocess/ops/randaugment.py | 106 ++ .../data/preprocess/ops/random_erasing.py | 90 ++ .../data/preprocess/ops/timm_autoaugment.py | 877 +++++++++++++++ src/PaddleClas/ppcls/data/utils/__init__.py | 13 + .../utils/__pycache__/__init__.cpython-39.pyc | Bin 0 -> 168 bytes .../__pycache__/get_image_list.cpython-39.pyc | Bin 0 -> 1192 bytes .../ppcls/data/utils/get_image_list.py | 49 + src/PaddleClas/ppcls/engine/__init__.py | 0 .../__pycache__/__init__.cpython-39.pyc | Bin 0 -> 164 bytes .../engine/__pycache__/engine.cpython-39.pyc | Bin 0 -> 11341 bytes src/PaddleClas/ppcls/engine/engine.py | 465 ++++++++ .../ppcls/engine/evaluation/__init__.py | 16 + .../__pycache__/__init__.cpython-39.pyc | Bin 0 -> 329 bytes .../__pycache__/classification.cpython-39.pyc | Bin 0 -> 3526 bytes .../__pycache__/retrieval.cpython-39.pyc | Bin 0 -> 3796 bytes .../ppcls/engine/evaluation/classification.py | 169 +++ .../ppcls/engine/evaluation/retrieval.py | 171 +++ src/PaddleClas/ppcls/engine/train/__init__.py | 14 + .../train/__pycache__/__init__.cpython-39.pyc | Bin 0 -> 230 bytes .../train/__pycache__/train.cpython-39.pyc | Bin 0 -> 2135 bytes .../train/__pycache__/utils.cpython-39.pyc | Bin 0 -> 2123 bytes src/PaddleClas/ppcls/engine/train/train.py | 83 ++ src/PaddleClas/ppcls/engine/train/utils.py | 72 ++ src/PaddleClas/ppcls/loss/__init__.py | 67 ++ .../loss/__pycache__/__init__.cpython-39.pyc | Bin 0 -> 2773 bytes .../loss/__pycache__/celoss.cpython-39.pyc | Bin 0 -> 1971 bytes .../__pycache__/centerloss.cpython-39.pyc | Bin 0 -> 1767 bytes .../loss/__pycache__/comfunc.cpython-39.pyc | Bin 0 -> 810 bytes .../__pycache__/deephashloss.cpython-39.pyc | Bin 0 -> 2943 bytes .../__pycache__/distanceloss.cpython-39.pyc | Bin 0 -> 1175 bytes .../distillationloss.cpython-39.pyc | Bin 0 -> 4598 bytes .../loss/__pycache__/dmlloss.cpython-39.pyc | Bin 0 -> 1306 bytes .../loss/__pycache__/emlloss.cpython-39.pyc | Bin 0 -> 2665 bytes .../__pycache__/googlenetloss.cpython-39.pyc | Bin 0 -> 1226 bytes .../loss/__pycache__/msmloss.cpython-39.pyc | Bin 0 -> 2133 bytes .../__pycache__/multilabelloss.cpython-39.pyc | Bin 0 -> 1641 bytes .../__pycache__/npairsloss.cpython-39.pyc | Bin 0 -> 1483 bytes .../pairwisecosface.cpython-39.pyc | Bin 0 -> 1549 bytes .../loss/__pycache__/rkdloss.cpython-39.pyc | Bin 0 -> 2366 bytes .../__pycache__/supconloss.cpython-39.pyc | Bin 0 -> 3067 bytes .../__pycache__/trihardloss.cpython-39.pyc | Bin 0 -> 2210 bytes .../loss/__pycache__/triplet.cpython-39.pyc | Bin 0 -> 3757 bytes src/PaddleClas/ppcls/loss/celoss.py | 67 ++ src/PaddleClas/ppcls/loss/centerloss.py | 54 + src/PaddleClas/ppcls/loss/comfunc.py | 45 + src/PaddleClas/ppcls/loss/deephashloss.py | 92 ++ src/PaddleClas/ppcls/loss/distanceloss.py | 43 + src/PaddleClas/ppcls/loss/distillationloss.py | 174 +++ src/PaddleClas/ppcls/loss/dmlloss.py | 50 + src/PaddleClas/ppcls/loss/emlloss.py | 97 ++ src/PaddleClas/ppcls/loss/googlenetloss.py | 41 + src/PaddleClas/ppcls/loss/msmloss.py | 78 ++ src/PaddleClas/ppcls/loss/multilabelloss.py | 43 + src/PaddleClas/ppcls/loss/npairsloss.py | 38 + src/PaddleClas/ppcls/loss/pairwisecosface.py | 55 + src/PaddleClas/ppcls/loss/rkdloss.py | 97 ++ src/PaddleClas/ppcls/loss/supconloss.py | 108 ++ src/PaddleClas/ppcls/loss/trihardloss.py | 82 ++ src/PaddleClas/ppcls/loss/triplet.py | 137 +++ src/PaddleClas/ppcls/metric/__init__.py | 51 + .../__pycache__/__init__.cpython-39.pyc | Bin 0 -> 1619 bytes .../metric/__pycache__/metrics.cpython-39.pyc | Bin 0 -> 9059 bytes src/PaddleClas/ppcls/metric/metrics.py | 309 +++++ src/PaddleClas/ppcls/optimizer/__init__.py | 72 ++ .../__pycache__/__init__.cpython-39.pyc | Bin 0 -> 1711 bytes .../__pycache__/learning_rate.cpython-39.pyc | Bin 0 -> 11382 bytes .../__pycache__/optimizer.cpython-39.pyc | Bin 0 -> 6311 bytes .../ppcls/optimizer/learning_rate.py | 326 ++++++ src/PaddleClas/ppcls/optimizer/optimizer.py | 217 ++++ src/PaddleClas/ppcls/static/program.py | 449 ++++++++ src/PaddleClas/ppcls/static/run_dali.sh | 8 + src/PaddleClas/ppcls/static/save_load.py | 139 +++ src/PaddleClas/ppcls/static/train.py | 209 ++++ src/PaddleClas/ppcls/utils/__init__.py | 27 + .../utils/__pycache__/__init__.cpython-39.pyc | Bin 0 -> 630 bytes .../utils/__pycache__/check.cpython-39.pyc | Bin 0 -> 3952 bytes .../utils/__pycache__/config.cpython-39.pyc | Bin 0 -> 5579 bytes .../utils/__pycache__/download.cpython-39.pyc | Bin 0 -> 7296 bytes .../utils/__pycache__/logger.cpython-39.pyc | Bin 0 -> 4334 bytes .../utils/__pycache__/metrics.cpython-39.pyc | Bin 0 -> 2714 bytes .../utils/__pycache__/misc.cpython-39.pyc | Bin 0 -> 1981 bytes .../__pycache__/model_zoo.cpython-39.pyc | Bin 0 -> 5603 bytes .../utils/__pycache__/profiler.cpython-39.pyc | Bin 0 -> 3289 bytes .../__pycache__/save_load.cpython-39.pyc | Bin 0 -> 3575 bytes src/PaddleClas/ppcls/utils/check.py | 151 +++ src/PaddleClas/ppcls/utils/config.py | 210 ++++ src/PaddleClas/ppcls/utils/download.py | 319 ++++++ src/PaddleClas/ppcls/utils/ema.py | 63 ++ .../feature_maps_visualization/fm_vis.py | 97 ++ .../feature_maps_visualization/resnet.py | 535 +++++++++ .../utils/feature_maps_visualization/utils.py | 85 ++ src/PaddleClas/ppcls/utils/gallery2fc.py | 119 ++ .../ppcls/utils/imagenet1k_label_list.txt | 1000 +++++++++++++++++ src/PaddleClas/ppcls/utils/logger.py | 137 +++ src/PaddleClas/ppcls/utils/metrics.py | 107 ++ src/PaddleClas/ppcls/utils/misc.py | 63 ++ src/PaddleClas/ppcls/utils/model_zoo.py | 213 ++++ src/PaddleClas/ppcls/utils/pretrained.list | 121 ++ src/PaddleClas/ppcls/utils/profiler.py | 111 ++ src/PaddleClas/ppcls/utils/save_load.py | 136 +++ 534 files changed, 64967 insertions(+) create mode 100644 src/PaddleClas/ppcls/__init__.py create mode 100644 src/PaddleClas/ppcls/__pycache__/__init__.cpython-39.pyc create mode 100644 src/PaddleClas/ppcls/arch/__init__.py create mode 100644 src/PaddleClas/ppcls/arch/__pycache__/__init__.cpython-39.pyc create mode 100644 src/PaddleClas/ppcls/arch/__pycache__/utils.cpython-39.pyc create mode 100644 src/PaddleClas/ppcls/arch/backbone/__init__.py create mode 100644 src/PaddleClas/ppcls/arch/backbone/__pycache__/__init__.cpython-39.pyc create mode 100644 src/PaddleClas/ppcls/arch/backbone/base/__init__.py create mode 100644 src/PaddleClas/ppcls/arch/backbone/base/__pycache__/__init__.cpython-39.pyc create mode 100644 src/PaddleClas/ppcls/arch/backbone/base/__pycache__/theseus_layer.cpython-39.pyc create mode 100644 src/PaddleClas/ppcls/arch/backbone/base/theseus_layer.py create mode 100644 src/PaddleClas/ppcls/arch/backbone/legendary_models/__init__.py create mode 100644 src/PaddleClas/ppcls/arch/backbone/legendary_models/__pycache__/__init__.cpython-39.pyc create mode 100644 src/PaddleClas/ppcls/arch/backbone/legendary_models/__pycache__/esnet.cpython-39.pyc create mode 100644 src/PaddleClas/ppcls/arch/backbone/legendary_models/__pycache__/hrnet.cpython-39.pyc create mode 100644 src/PaddleClas/ppcls/arch/backbone/legendary_models/__pycache__/inception_v3.cpython-39.pyc create mode 100644 src/PaddleClas/ppcls/arch/backbone/legendary_models/__pycache__/mobilenet_v1.cpython-39.pyc create mode 100644 src/PaddleClas/ppcls/arch/backbone/legendary_models/__pycache__/mobilenet_v3.cpython-39.pyc create mode 100644 src/PaddleClas/ppcls/arch/backbone/legendary_models/__pycache__/pp_lcnet.cpython-39.pyc create mode 100644 src/PaddleClas/ppcls/arch/backbone/legendary_models/__pycache__/resnet.cpython-39.pyc create mode 100644 src/PaddleClas/ppcls/arch/backbone/legendary_models/__pycache__/vgg.cpython-39.pyc create mode 100644 src/PaddleClas/ppcls/arch/backbone/legendary_models/esnet.py create mode 100644 src/PaddleClas/ppcls/arch/backbone/legendary_models/hrnet.py create mode 100644 src/PaddleClas/ppcls/arch/backbone/legendary_models/inception_v3.py create mode 100644 src/PaddleClas/ppcls/arch/backbone/legendary_models/mobilenet_v1.py create mode 100644 src/PaddleClas/ppcls/arch/backbone/legendary_models/mobilenet_v3.py create mode 100644 src/PaddleClas/ppcls/arch/backbone/legendary_models/pp_lcnet.py create mode 100644 src/PaddleClas/ppcls/arch/backbone/legendary_models/resnet.py create mode 100644 src/PaddleClas/ppcls/arch/backbone/legendary_models/vgg.py create mode 100644 src/PaddleClas/ppcls/arch/backbone/model_zoo/__init__.py create mode 100644 src/PaddleClas/ppcls/arch/backbone/model_zoo/__pycache__/__init__.cpython-39.pyc create mode 100644 src/PaddleClas/ppcls/arch/backbone/model_zoo/__pycache__/alexnet.cpython-39.pyc create mode 100644 src/PaddleClas/ppcls/arch/backbone/model_zoo/__pycache__/cspnet.cpython-39.pyc create mode 100644 src/PaddleClas/ppcls/arch/backbone/model_zoo/__pycache__/darknet.cpython-39.pyc create mode 100644 src/PaddleClas/ppcls/arch/backbone/model_zoo/__pycache__/densenet.cpython-39.pyc create mode 100644 src/PaddleClas/ppcls/arch/backbone/model_zoo/__pycache__/distilled_vision_transformer.cpython-39.pyc create mode 100644 src/PaddleClas/ppcls/arch/backbone/model_zoo/__pycache__/dla.cpython-39.pyc create mode 100644 src/PaddleClas/ppcls/arch/backbone/model_zoo/__pycache__/dpn.cpython-39.pyc create mode 100644 src/PaddleClas/ppcls/arch/backbone/model_zoo/__pycache__/efficientnet.cpython-39.pyc create mode 100644 src/PaddleClas/ppcls/arch/backbone/model_zoo/__pycache__/ghostnet.cpython-39.pyc create mode 100644 src/PaddleClas/ppcls/arch/backbone/model_zoo/__pycache__/googlenet.cpython-39.pyc create mode 100644 src/PaddleClas/ppcls/arch/backbone/model_zoo/__pycache__/gvt.cpython-39.pyc create mode 100644 src/PaddleClas/ppcls/arch/backbone/model_zoo/__pycache__/hardnet.cpython-39.pyc create mode 100644 src/PaddleClas/ppcls/arch/backbone/model_zoo/__pycache__/inception_v4.cpython-39.pyc create mode 100644 src/PaddleClas/ppcls/arch/backbone/model_zoo/__pycache__/levit.cpython-39.pyc create mode 100644 src/PaddleClas/ppcls/arch/backbone/model_zoo/__pycache__/mixnet.cpython-39.pyc create mode 100644 src/PaddleClas/ppcls/arch/backbone/model_zoo/__pycache__/mobilenet_v2.cpython-39.pyc create mode 100644 src/PaddleClas/ppcls/arch/backbone/model_zoo/__pycache__/pvt_v2.cpython-39.pyc create mode 100644 src/PaddleClas/ppcls/arch/backbone/model_zoo/__pycache__/rednet.cpython-39.pyc create mode 100644 src/PaddleClas/ppcls/arch/backbone/model_zoo/__pycache__/regnet.cpython-39.pyc create mode 100644 src/PaddleClas/ppcls/arch/backbone/model_zoo/__pycache__/repvgg.cpython-39.pyc create mode 100644 src/PaddleClas/ppcls/arch/backbone/model_zoo/__pycache__/res2net.cpython-39.pyc create mode 100644 src/PaddleClas/ppcls/arch/backbone/model_zoo/__pycache__/res2net_vd.cpython-39.pyc create mode 100644 src/PaddleClas/ppcls/arch/backbone/model_zoo/__pycache__/resnest.cpython-39.pyc create mode 100644 src/PaddleClas/ppcls/arch/backbone/model_zoo/__pycache__/resnet_vc.cpython-39.pyc create mode 100644 src/PaddleClas/ppcls/arch/backbone/model_zoo/__pycache__/resnext.cpython-39.pyc create mode 100644 src/PaddleClas/ppcls/arch/backbone/model_zoo/__pycache__/resnext101_wsl.cpython-39.pyc create mode 100644 src/PaddleClas/ppcls/arch/backbone/model_zoo/__pycache__/resnext_vd.cpython-39.pyc create mode 100644 src/PaddleClas/ppcls/arch/backbone/model_zoo/__pycache__/rexnet.cpython-39.pyc create mode 100644 src/PaddleClas/ppcls/arch/backbone/model_zoo/__pycache__/se_resnet_vd.cpython-39.pyc create mode 100644 src/PaddleClas/ppcls/arch/backbone/model_zoo/__pycache__/se_resnext.cpython-39.pyc create mode 100644 src/PaddleClas/ppcls/arch/backbone/model_zoo/__pycache__/se_resnext_vd.cpython-39.pyc create mode 100644 src/PaddleClas/ppcls/arch/backbone/model_zoo/__pycache__/shufflenet_v2.cpython-39.pyc create mode 100644 src/PaddleClas/ppcls/arch/backbone/model_zoo/__pycache__/squeezenet.cpython-39.pyc create mode 100644 src/PaddleClas/ppcls/arch/backbone/model_zoo/__pycache__/swin_transformer.cpython-39.pyc create mode 100644 src/PaddleClas/ppcls/arch/backbone/model_zoo/__pycache__/tnt.cpython-39.pyc create mode 100644 src/PaddleClas/ppcls/arch/backbone/model_zoo/__pycache__/vision_transformer.cpython-39.pyc create mode 100644 src/PaddleClas/ppcls/arch/backbone/model_zoo/__pycache__/xception.cpython-39.pyc create mode 100644 src/PaddleClas/ppcls/arch/backbone/model_zoo/__pycache__/xception_deeplab.cpython-39.pyc create mode 100644 src/PaddleClas/ppcls/arch/backbone/model_zoo/alexnet.py create mode 100644 src/PaddleClas/ppcls/arch/backbone/model_zoo/cspnet.py create mode 100644 src/PaddleClas/ppcls/arch/backbone/model_zoo/darknet.py create mode 100644 src/PaddleClas/ppcls/arch/backbone/model_zoo/densenet.py create mode 100644 src/PaddleClas/ppcls/arch/backbone/model_zoo/distilled_vision_transformer.py create mode 100644 src/PaddleClas/ppcls/arch/backbone/model_zoo/dla.py create mode 100644 src/PaddleClas/ppcls/arch/backbone/model_zoo/dpn.py create mode 100644 src/PaddleClas/ppcls/arch/backbone/model_zoo/efficientnet.py create mode 100644 src/PaddleClas/ppcls/arch/backbone/model_zoo/ghostnet.py create mode 100644 src/PaddleClas/ppcls/arch/backbone/model_zoo/googlenet.py create mode 100644 src/PaddleClas/ppcls/arch/backbone/model_zoo/gvt.py create mode 100644 src/PaddleClas/ppcls/arch/backbone/model_zoo/hardnet.py create mode 100644 src/PaddleClas/ppcls/arch/backbone/model_zoo/inception_v4.py create mode 100644 src/PaddleClas/ppcls/arch/backbone/model_zoo/levit.py create mode 100644 src/PaddleClas/ppcls/arch/backbone/model_zoo/mixnet.py create mode 100644 src/PaddleClas/ppcls/arch/backbone/model_zoo/mobilenet_v2.py create mode 100644 src/PaddleClas/ppcls/arch/backbone/model_zoo/pvt_v2.py create mode 100644 src/PaddleClas/ppcls/arch/backbone/model_zoo/rednet.py create mode 100644 src/PaddleClas/ppcls/arch/backbone/model_zoo/regnet.py create mode 100644 src/PaddleClas/ppcls/arch/backbone/model_zoo/repvgg.py create mode 100644 src/PaddleClas/ppcls/arch/backbone/model_zoo/res2net.py create mode 100644 src/PaddleClas/ppcls/arch/backbone/model_zoo/res2net_vd.py create mode 100644 src/PaddleClas/ppcls/arch/backbone/model_zoo/resnest.py create mode 100644 src/PaddleClas/ppcls/arch/backbone/model_zoo/resnet_vc.py create mode 100644 src/PaddleClas/ppcls/arch/backbone/model_zoo/resnext.py create mode 100644 src/PaddleClas/ppcls/arch/backbone/model_zoo/resnext101_wsl.py create mode 100644 src/PaddleClas/ppcls/arch/backbone/model_zoo/resnext_vd.py create mode 100644 src/PaddleClas/ppcls/arch/backbone/model_zoo/rexnet.py create mode 100644 src/PaddleClas/ppcls/arch/backbone/model_zoo/se_resnet_vd.py create mode 100644 src/PaddleClas/ppcls/arch/backbone/model_zoo/se_resnext.py create mode 100644 src/PaddleClas/ppcls/arch/backbone/model_zoo/se_resnext_vd.py create mode 100644 src/PaddleClas/ppcls/arch/backbone/model_zoo/shufflenet_v2.py create mode 100644 src/PaddleClas/ppcls/arch/backbone/model_zoo/squeezenet.py create mode 100644 src/PaddleClas/ppcls/arch/backbone/model_zoo/swin_transformer.py create mode 100644 src/PaddleClas/ppcls/arch/backbone/model_zoo/tnt.py create mode 100644 src/PaddleClas/ppcls/arch/backbone/model_zoo/vision_transformer.py create mode 100644 src/PaddleClas/ppcls/arch/backbone/model_zoo/xception.py create mode 100644 src/PaddleClas/ppcls/arch/backbone/model_zoo/xception_deeplab.py create mode 100644 src/PaddleClas/ppcls/arch/backbone/variant_models/__init__.py create mode 100644 src/PaddleClas/ppcls/arch/backbone/variant_models/__pycache__/__init__.cpython-39.pyc create mode 100644 src/PaddleClas/ppcls/arch/backbone/variant_models/__pycache__/pp_lcnet_variant.cpython-39.pyc create mode 100644 src/PaddleClas/ppcls/arch/backbone/variant_models/__pycache__/resnet_variant.cpython-39.pyc create mode 100644 src/PaddleClas/ppcls/arch/backbone/variant_models/__pycache__/vgg_variant.cpython-39.pyc create mode 100644 src/PaddleClas/ppcls/arch/backbone/variant_models/pp_lcnet_variant.py create mode 100644 src/PaddleClas/ppcls/arch/backbone/variant_models/resnet_variant.py create mode 100644 src/PaddleClas/ppcls/arch/backbone/variant_models/vgg_variant.py create mode 100644 src/PaddleClas/ppcls/arch/gears/__init__.py create mode 100644 src/PaddleClas/ppcls/arch/gears/__pycache__/__init__.cpython-39.pyc create mode 100644 src/PaddleClas/ppcls/arch/gears/__pycache__/arcmargin.cpython-39.pyc create mode 100644 src/PaddleClas/ppcls/arch/gears/__pycache__/circlemargin.cpython-39.pyc create mode 100644 src/PaddleClas/ppcls/arch/gears/__pycache__/cosmargin.cpython-39.pyc create mode 100644 src/PaddleClas/ppcls/arch/gears/__pycache__/fc.cpython-39.pyc create mode 100644 src/PaddleClas/ppcls/arch/gears/__pycache__/identity_head.cpython-39.pyc create mode 100644 src/PaddleClas/ppcls/arch/gears/__pycache__/vehicle_neck.cpython-39.pyc create mode 100644 src/PaddleClas/ppcls/arch/gears/arcmargin.py create mode 100644 src/PaddleClas/ppcls/arch/gears/circlemargin.py create mode 100644 src/PaddleClas/ppcls/arch/gears/cosmargin.py create mode 100644 src/PaddleClas/ppcls/arch/gears/fc.py create mode 100644 src/PaddleClas/ppcls/arch/gears/identity_head.py create mode 100644 src/PaddleClas/ppcls/arch/gears/vehicle_neck.py create mode 100644 src/PaddleClas/ppcls/arch/slim/__init__.py create mode 100644 src/PaddleClas/ppcls/arch/slim/__pycache__/__init__.cpython-39.pyc create mode 100644 src/PaddleClas/ppcls/arch/slim/__pycache__/prune.cpython-39.pyc create mode 100644 src/PaddleClas/ppcls/arch/slim/__pycache__/quant.cpython-39.pyc create mode 100644 src/PaddleClas/ppcls/arch/slim/prune.py create mode 100644 src/PaddleClas/ppcls/arch/slim/quant.py create mode 100644 src/PaddleClas/ppcls/arch/utils.py create mode 100644 src/PaddleClas/ppcls/configs/Cartoonface/ResNet50_icartoon.yaml create mode 100644 src/PaddleClas/ppcls/configs/GeneralRecognition/GeneralRecognition_PPLCNet_x2_5.yaml create mode 100644 src/PaddleClas/ppcls/configs/GeneralRecognition/GeneralRecognition_PPLCNet_x2_5_binary.yaml create mode 100644 src/PaddleClas/ppcls/configs/GeneralRecognition/GeneralRecognition_PPLCNet_x2_5_dml.yaml create mode 100644 src/PaddleClas/ppcls/configs/GeneralRecognition/GeneralRecognition_PPLCNet_x2_5_udml.yaml create mode 100644 src/PaddleClas/ppcls/configs/ImageNet/AlexNet/AlexNet.yaml create mode 100644 src/PaddleClas/ppcls/configs/ImageNet/CSPNet/CSPDarkNet53.yaml create mode 100644 src/PaddleClas/ppcls/configs/ImageNet/DLA/DLA102.yaml create mode 100644 src/PaddleClas/ppcls/configs/ImageNet/DLA/DLA102x.yaml create mode 100644 src/PaddleClas/ppcls/configs/ImageNet/DLA/DLA102x2.yaml create mode 100644 src/PaddleClas/ppcls/configs/ImageNet/DLA/DLA169.yaml create mode 100644 src/PaddleClas/ppcls/configs/ImageNet/DLA/DLA34.yaml create mode 100644 src/PaddleClas/ppcls/configs/ImageNet/DLA/DLA46_c.yaml create mode 100644 src/PaddleClas/ppcls/configs/ImageNet/DLA/DLA46x_c.yaml create mode 100644 src/PaddleClas/ppcls/configs/ImageNet/DLA/DLA60.yaml create mode 100644 src/PaddleClas/ppcls/configs/ImageNet/DLA/DLA60x.yaml create mode 100644 src/PaddleClas/ppcls/configs/ImageNet/DLA/DLA60x_c.yaml create mode 100644 src/PaddleClas/ppcls/configs/ImageNet/DPN/DPN107.yaml create mode 100644 src/PaddleClas/ppcls/configs/ImageNet/DPN/DPN131.yaml create mode 100644 src/PaddleClas/ppcls/configs/ImageNet/DPN/DPN68.yaml create mode 100644 src/PaddleClas/ppcls/configs/ImageNet/DPN/DPN92.yaml create mode 100644 src/PaddleClas/ppcls/configs/ImageNet/DPN/DPN98.yaml create mode 100644 src/PaddleClas/ppcls/configs/ImageNet/DarkNet/DarkNet53.yaml create mode 100644 src/PaddleClas/ppcls/configs/ImageNet/DataAugment/ResNet50_AutoAugment.yaml create mode 100644 src/PaddleClas/ppcls/configs/ImageNet/DataAugment/ResNet50_Baseline.yaml create mode 100644 src/PaddleClas/ppcls/configs/ImageNet/DataAugment/ResNet50_Cutmix.yaml create mode 100644 src/PaddleClas/ppcls/configs/ImageNet/DataAugment/ResNet50_Cutout.yaml create mode 100644 src/PaddleClas/ppcls/configs/ImageNet/DataAugment/ResNet50_GridMask.yaml create mode 100644 src/PaddleClas/ppcls/configs/ImageNet/DataAugment/ResNet50_HideAndSeek.yaml create mode 100644 src/PaddleClas/ppcls/configs/ImageNet/DataAugment/ResNet50_Mixup.yaml create mode 100644 src/PaddleClas/ppcls/configs/ImageNet/DataAugment/ResNet50_RandAugment.yaml create mode 100644 src/PaddleClas/ppcls/configs/ImageNet/DataAugment/ResNet50_RandomErasing.yaml create mode 100644 src/PaddleClas/ppcls/configs/ImageNet/DeiT/DeiT_base_distilled_patch16_224.yaml create mode 100644 src/PaddleClas/ppcls/configs/ImageNet/DeiT/DeiT_base_distilled_patch16_384.yaml create mode 100644 src/PaddleClas/ppcls/configs/ImageNet/DeiT/DeiT_base_patch16_224.yaml create mode 100644 src/PaddleClas/ppcls/configs/ImageNet/DeiT/DeiT_base_patch16_384.yaml create mode 100644 src/PaddleClas/ppcls/configs/ImageNet/DeiT/DeiT_small_distilled_patch16_224.yaml create mode 100644 src/PaddleClas/ppcls/configs/ImageNet/DeiT/DeiT_small_patch16_224.yaml create mode 100644 src/PaddleClas/ppcls/configs/ImageNet/DeiT/DeiT_tiny_distilled_patch16_224.yaml create mode 100644 src/PaddleClas/ppcls/configs/ImageNet/DeiT/DeiT_tiny_patch16_224.yaml create mode 100644 src/PaddleClas/ppcls/configs/ImageNet/DenseNet/DenseNet121.yaml create mode 100644 src/PaddleClas/ppcls/configs/ImageNet/DenseNet/DenseNet161.yaml create mode 100644 src/PaddleClas/ppcls/configs/ImageNet/DenseNet/DenseNet169.yaml create mode 100644 src/PaddleClas/ppcls/configs/ImageNet/DenseNet/DenseNet201.yaml create mode 100644 src/PaddleClas/ppcls/configs/ImageNet/DenseNet/DenseNet264.yaml create mode 100644 src/PaddleClas/ppcls/configs/ImageNet/Distillation/mv3_large_x1_0_distill_mv3_small_x1_0.yaml create mode 100644 src/PaddleClas/ppcls/configs/ImageNet/ESNet/ESNet_x0_25.yaml create mode 100644 src/PaddleClas/ppcls/configs/ImageNet/ESNet/ESNet_x0_5.yaml create mode 100644 src/PaddleClas/ppcls/configs/ImageNet/ESNet/ESNet_x0_75.yaml create mode 100644 src/PaddleClas/ppcls/configs/ImageNet/ESNet/ESNet_x1_0.yaml create mode 100644 src/PaddleClas/ppcls/configs/ImageNet/EfficientNet/EfficientNetB0.yaml create mode 100644 src/PaddleClas/ppcls/configs/ImageNet/EfficientNet/EfficientNetB1.yaml create mode 100644 src/PaddleClas/ppcls/configs/ImageNet/EfficientNet/EfficientNetB2.yaml create mode 100644 src/PaddleClas/ppcls/configs/ImageNet/EfficientNet/EfficientNetB3.yaml create mode 100644 src/PaddleClas/ppcls/configs/ImageNet/EfficientNet/EfficientNetB4.yaml create mode 100644 src/PaddleClas/ppcls/configs/ImageNet/EfficientNet/EfficientNetB5.yaml create mode 100644 src/PaddleClas/ppcls/configs/ImageNet/EfficientNet/EfficientNetB6.yaml create mode 100644 src/PaddleClas/ppcls/configs/ImageNet/EfficientNet/EfficientNetB7.yaml create mode 100644 src/PaddleClas/ppcls/configs/ImageNet/GhostNet/GhostNet_x0_5.yaml create mode 100644 src/PaddleClas/ppcls/configs/ImageNet/GhostNet/GhostNet_x1_0.yaml create mode 100644 src/PaddleClas/ppcls/configs/ImageNet/GhostNet/GhostNet_x1_3.yaml create mode 100644 src/PaddleClas/ppcls/configs/ImageNet/HRNet/HRNet_W18_C.yaml create mode 100644 src/PaddleClas/ppcls/configs/ImageNet/HRNet/HRNet_W30_C.yaml create mode 100644 src/PaddleClas/ppcls/configs/ImageNet/HRNet/HRNet_W32_C.yaml create mode 100644 src/PaddleClas/ppcls/configs/ImageNet/HRNet/HRNet_W40_C.yaml create mode 100644 src/PaddleClas/ppcls/configs/ImageNet/HRNet/HRNet_W44_C.yaml create mode 100644 src/PaddleClas/ppcls/configs/ImageNet/HRNet/HRNet_W48_C.yaml create mode 100644 src/PaddleClas/ppcls/configs/ImageNet/HRNet/HRNet_W64_C.yaml create mode 100644 src/PaddleClas/ppcls/configs/ImageNet/HarDNet/HarDNet39_ds.yaml create mode 100644 src/PaddleClas/ppcls/configs/ImageNet/HarDNet/HarDNet68.yaml create mode 100644 src/PaddleClas/ppcls/configs/ImageNet/HarDNet/HarDNet68_ds.yaml create mode 100644 src/PaddleClas/ppcls/configs/ImageNet/HarDNet/HarDNet85.yaml create mode 100644 src/PaddleClas/ppcls/configs/ImageNet/Inception/GoogLeNet.yaml create mode 100644 src/PaddleClas/ppcls/configs/ImageNet/Inception/InceptionV3.yaml create mode 100644 src/PaddleClas/ppcls/configs/ImageNet/Inception/InceptionV4.yaml create mode 100644 src/PaddleClas/ppcls/configs/ImageNet/LeViT/LeViT_128.yaml create mode 100644 src/PaddleClas/ppcls/configs/ImageNet/LeViT/LeViT_128S.yaml create mode 100644 src/PaddleClas/ppcls/configs/ImageNet/LeViT/LeViT_192.yaml create mode 100644 src/PaddleClas/ppcls/configs/ImageNet/LeViT/LeViT_256.yaml create mode 100644 src/PaddleClas/ppcls/configs/ImageNet/LeViT/LeViT_384.yaml create mode 100644 src/PaddleClas/ppcls/configs/ImageNet/MixNet/MixNet_L.yaml create mode 100644 src/PaddleClas/ppcls/configs/ImageNet/MixNet/MixNet_M.yaml create mode 100644 src/PaddleClas/ppcls/configs/ImageNet/MixNet/MixNet_S.yaml create mode 100644 src/PaddleClas/ppcls/configs/ImageNet/MobileNetV1/MobileNetV1.yaml create mode 100644 src/PaddleClas/ppcls/configs/ImageNet/MobileNetV1/MobileNetV1_x0_25.yaml create mode 100644 src/PaddleClas/ppcls/configs/ImageNet/MobileNetV1/MobileNetV1_x0_5.yaml create mode 100644 src/PaddleClas/ppcls/configs/ImageNet/MobileNetV1/MobileNetV1_x0_75.yaml create mode 100644 src/PaddleClas/ppcls/configs/ImageNet/MobileNetV2/MobileNetV2.yaml create mode 100644 src/PaddleClas/ppcls/configs/ImageNet/MobileNetV2/MobileNetV2_x0_25.yaml create mode 100644 src/PaddleClas/ppcls/configs/ImageNet/MobileNetV2/MobileNetV2_x0_5.yaml create mode 100644 src/PaddleClas/ppcls/configs/ImageNet/MobileNetV2/MobileNetV2_x0_75.yaml create mode 100644 src/PaddleClas/ppcls/configs/ImageNet/MobileNetV2/MobileNetV2_x1_5.yaml create mode 100644 src/PaddleClas/ppcls/configs/ImageNet/MobileNetV2/MobileNetV2_x2_0.yaml create mode 100644 src/PaddleClas/ppcls/configs/ImageNet/MobileNetV3/MobileNetV3_large_x0_35.yaml create mode 100644 src/PaddleClas/ppcls/configs/ImageNet/MobileNetV3/MobileNetV3_large_x0_5.yaml create mode 100644 src/PaddleClas/ppcls/configs/ImageNet/MobileNetV3/MobileNetV3_large_x0_75.yaml create mode 100644 src/PaddleClas/ppcls/configs/ImageNet/MobileNetV3/MobileNetV3_large_x1_0.yaml create mode 100644 src/PaddleClas/ppcls/configs/ImageNet/MobileNetV3/MobileNetV3_large_x1_25.yaml create mode 100644 src/PaddleClas/ppcls/configs/ImageNet/MobileNetV3/MobileNetV3_small_x0_35.yaml create mode 100644 src/PaddleClas/ppcls/configs/ImageNet/MobileNetV3/MobileNetV3_small_x0_5.yaml create mode 100644 src/PaddleClas/ppcls/configs/ImageNet/MobileNetV3/MobileNetV3_small_x0_75.yaml create mode 100644 src/PaddleClas/ppcls/configs/ImageNet/MobileNetV3/MobileNetV3_small_x1_0.yaml create mode 100644 src/PaddleClas/ppcls/configs/ImageNet/MobileNetV3/MobileNetV3_small_x1_25.yaml create mode 100644 src/PaddleClas/ppcls/configs/ImageNet/PPLCNet/PPLCNet_x0_25.yaml create mode 100644 src/PaddleClas/ppcls/configs/ImageNet/PPLCNet/PPLCNet_x0_35.yaml create mode 100644 src/PaddleClas/ppcls/configs/ImageNet/PPLCNet/PPLCNet_x0_5.yaml create mode 100644 src/PaddleClas/ppcls/configs/ImageNet/PPLCNet/PPLCNet_x0_75.yaml create mode 100644 src/PaddleClas/ppcls/configs/ImageNet/PPLCNet/PPLCNet_x1_0.yaml create mode 100644 src/PaddleClas/ppcls/configs/ImageNet/PPLCNet/PPLCNet_x1_5.yaml create mode 100644 src/PaddleClas/ppcls/configs/ImageNet/PPLCNet/PPLCNet_x2_0.yaml create mode 100644 src/PaddleClas/ppcls/configs/ImageNet/PPLCNet/PPLCNet_x2_5.yaml create mode 100644 src/PaddleClas/ppcls/configs/ImageNet/PVTV2/PVT_V2_B0.yaml create mode 100644 src/PaddleClas/ppcls/configs/ImageNet/PVTV2/PVT_V2_B1.yaml create mode 100644 src/PaddleClas/ppcls/configs/ImageNet/PVTV2/PVT_V2_B2.yaml create mode 100644 src/PaddleClas/ppcls/configs/ImageNet/PVTV2/PVT_V2_B2_Linear.yaml create mode 100644 src/PaddleClas/ppcls/configs/ImageNet/PVTV2/PVT_V2_B3.yaml create mode 100644 src/PaddleClas/ppcls/configs/ImageNet/PVTV2/PVT_V2_B4.yaml create mode 100644 src/PaddleClas/ppcls/configs/ImageNet/PVTV2/PVT_V2_B5.yaml create mode 100644 src/PaddleClas/ppcls/configs/ImageNet/ReXNet/ReXNet_1_0.yaml create mode 100644 src/PaddleClas/ppcls/configs/ImageNet/ReXNet/ReXNet_1_3.yaml create mode 100644 src/PaddleClas/ppcls/configs/ImageNet/ReXNet/ReXNet_1_5.yaml create mode 100644 src/PaddleClas/ppcls/configs/ImageNet/ReXNet/ReXNet_2_0.yaml create mode 100644 src/PaddleClas/ppcls/configs/ImageNet/ReXNet/ReXNet_3_0.yaml create mode 100644 src/PaddleClas/ppcls/configs/ImageNet/RedNet/RedNet101.yaml create mode 100644 src/PaddleClas/ppcls/configs/ImageNet/RedNet/RedNet152.yaml create mode 100644 src/PaddleClas/ppcls/configs/ImageNet/RedNet/RedNet26.yaml create mode 100644 src/PaddleClas/ppcls/configs/ImageNet/RedNet/RedNet38.yaml create mode 100644 src/PaddleClas/ppcls/configs/ImageNet/RedNet/RedNet50.yaml create mode 100644 src/PaddleClas/ppcls/configs/ImageNet/Res2Net/Res2Net101_vd_26w_4s.yaml create mode 100644 src/PaddleClas/ppcls/configs/ImageNet/Res2Net/Res2Net200_vd_26w_4s.yaml create mode 100644 src/PaddleClas/ppcls/configs/ImageNet/Res2Net/Res2Net50_14w_8s.yaml create mode 100644 src/PaddleClas/ppcls/configs/ImageNet/Res2Net/Res2Net50_26w_4s.yaml create mode 100644 src/PaddleClas/ppcls/configs/ImageNet/Res2Net/Res2Net50_vd_26w_4s.yaml create mode 100644 src/PaddleClas/ppcls/configs/ImageNet/ResNeSt/ResNeSt101.yaml create mode 100644 src/PaddleClas/ppcls/configs/ImageNet/ResNeSt/ResNeSt50.yaml create mode 100644 src/PaddleClas/ppcls/configs/ImageNet/ResNeSt/ResNeSt50_fast_1s1x64d.yaml create mode 100644 src/PaddleClas/ppcls/configs/ImageNet/ResNeXt/ResNeXt101_32x4d.yaml create mode 100644 src/PaddleClas/ppcls/configs/ImageNet/ResNeXt/ResNeXt101_64x4d.yaml create mode 100644 src/PaddleClas/ppcls/configs/ImageNet/ResNeXt/ResNeXt101_vd_32x4d.yaml create mode 100644 src/PaddleClas/ppcls/configs/ImageNet/ResNeXt/ResNeXt101_vd_64x4d.yaml create mode 100644 src/PaddleClas/ppcls/configs/ImageNet/ResNeXt/ResNeXt152_32x4d.yaml create mode 100644 src/PaddleClas/ppcls/configs/ImageNet/ResNeXt/ResNeXt152_64x4d.yaml create mode 100644 src/PaddleClas/ppcls/configs/ImageNet/ResNeXt/ResNeXt152_vd_32x4d.yaml create mode 100644 src/PaddleClas/ppcls/configs/ImageNet/ResNeXt/ResNeXt152_vd_64x4d.yaml create mode 100644 src/PaddleClas/ppcls/configs/ImageNet/ResNeXt/ResNeXt50_32x4d.yaml create mode 100644 src/PaddleClas/ppcls/configs/ImageNet/ResNeXt/ResNeXt50_64x4d.yaml create mode 100644 src/PaddleClas/ppcls/configs/ImageNet/ResNeXt/ResNeXt50_vd_32x4d.yaml create mode 100644 src/PaddleClas/ppcls/configs/ImageNet/ResNeXt/ResNeXt50_vd_64x4d.yaml create mode 100644 src/PaddleClas/ppcls/configs/ImageNet/ResNeXt101_wsl/ResNeXt101_32x16d_wsl.yaml create mode 100644 src/PaddleClas/ppcls/configs/ImageNet/ResNeXt101_wsl/ResNeXt101_32x32d_wsl.yaml create mode 100644 src/PaddleClas/ppcls/configs/ImageNet/ResNeXt101_wsl/ResNeXt101_32x48d_wsl.yaml create mode 100644 src/PaddleClas/ppcls/configs/ImageNet/ResNeXt101_wsl/ResNeXt101_32x8d_wsl.yaml create mode 100644 src/PaddleClas/ppcls/configs/ImageNet/ResNet/ResNet101.yaml create mode 100644 src/PaddleClas/ppcls/configs/ImageNet/ResNet/ResNet101_vd.yaml create mode 100644 src/PaddleClas/ppcls/configs/ImageNet/ResNet/ResNet152.yaml create mode 100644 src/PaddleClas/ppcls/configs/ImageNet/ResNet/ResNet152_vd.yaml create mode 100644 src/PaddleClas/ppcls/configs/ImageNet/ResNet/ResNet18.yaml create mode 100644 src/PaddleClas/ppcls/configs/ImageNet/ResNet/ResNet18_vd.yaml create mode 100644 src/PaddleClas/ppcls/configs/ImageNet/ResNet/ResNet200_vd.yaml create mode 100644 src/PaddleClas/ppcls/configs/ImageNet/ResNet/ResNet34.yaml create mode 100644 src/PaddleClas/ppcls/configs/ImageNet/ResNet/ResNet34_vd.yaml create mode 100644 src/PaddleClas/ppcls/configs/ImageNet/ResNet/ResNet50.yaml create mode 100644 src/PaddleClas/ppcls/configs/ImageNet/ResNet/ResNet50_amp_O1.yaml create mode 100644 src/PaddleClas/ppcls/configs/ImageNet/ResNet/ResNet50_amp_O2.yaml create mode 100644 src/PaddleClas/ppcls/configs/ImageNet/ResNet/ResNet50_vd.yaml create mode 100644 src/PaddleClas/ppcls/configs/ImageNet/SENet/SENet154_vd.yaml create mode 100644 src/PaddleClas/ppcls/configs/ImageNet/SENet/SE_ResNeXt101_32x4d.yaml create mode 100644 src/PaddleClas/ppcls/configs/ImageNet/SENet/SE_ResNeXt101_32x4d_amp_O2.yaml create mode 100644 src/PaddleClas/ppcls/configs/ImageNet/SENet/SE_ResNeXt50_32x4d.yaml create mode 100644 src/PaddleClas/ppcls/configs/ImageNet/SENet/SE_ResNeXt50_vd_32x4d.yaml create mode 100644 src/PaddleClas/ppcls/configs/ImageNet/SENet/SE_ResNet18_vd.yaml create mode 100644 src/PaddleClas/ppcls/configs/ImageNet/SENet/SE_ResNet34_vd.yaml create mode 100644 src/PaddleClas/ppcls/configs/ImageNet/SENet/SE_ResNet50_vd.yaml create mode 100644 src/PaddleClas/ppcls/configs/ImageNet/ShuffleNet/ShuffleNetV2_swish.yaml create mode 100644 src/PaddleClas/ppcls/configs/ImageNet/ShuffleNet/ShuffleNetV2_x0_25.yaml create mode 100644 src/PaddleClas/ppcls/configs/ImageNet/ShuffleNet/ShuffleNetV2_x0_33.yaml create mode 100644 src/PaddleClas/ppcls/configs/ImageNet/ShuffleNet/ShuffleNetV2_x0_5.yaml create mode 100644 src/PaddleClas/ppcls/configs/ImageNet/ShuffleNet/ShuffleNetV2_x1_0.yaml create mode 100644 src/PaddleClas/ppcls/configs/ImageNet/ShuffleNet/ShuffleNetV2_x1_5.yaml create mode 100644 src/PaddleClas/ppcls/configs/ImageNet/ShuffleNet/ShuffleNetV2_x2_0.yaml create mode 100644 src/PaddleClas/ppcls/configs/ImageNet/SqueezeNet/SqueezeNet1_0.yaml create mode 100644 src/PaddleClas/ppcls/configs/ImageNet/SqueezeNet/SqueezeNet1_1.yaml create mode 100644 src/PaddleClas/ppcls/configs/ImageNet/SwinTransformer/SwinTransformer_base_patch4_window12_384.yaml create mode 100644 src/PaddleClas/ppcls/configs/ImageNet/SwinTransformer/SwinTransformer_base_patch4_window7_224.yaml create mode 100644 src/PaddleClas/ppcls/configs/ImageNet/SwinTransformer/SwinTransformer_large_patch4_window12_384.yaml create mode 100644 src/PaddleClas/ppcls/configs/ImageNet/SwinTransformer/SwinTransformer_large_patch4_window7_224.yaml create mode 100644 src/PaddleClas/ppcls/configs/ImageNet/SwinTransformer/SwinTransformer_small_patch4_window7_224.yaml create mode 100644 src/PaddleClas/ppcls/configs/ImageNet/SwinTransformer/SwinTransformer_tiny_patch4_window7_224.yaml create mode 100644 src/PaddleClas/ppcls/configs/ImageNet/TNT/TNT_small.yaml create mode 100644 src/PaddleClas/ppcls/configs/ImageNet/Twins/alt_gvt_base.yaml create mode 100644 src/PaddleClas/ppcls/configs/ImageNet/Twins/alt_gvt_large.yaml create mode 100644 src/PaddleClas/ppcls/configs/ImageNet/Twins/alt_gvt_small.yaml create mode 100644 src/PaddleClas/ppcls/configs/ImageNet/Twins/pcpvt_base.yaml create mode 100644 src/PaddleClas/ppcls/configs/ImageNet/Twins/pcpvt_large.yaml create mode 100644 src/PaddleClas/ppcls/configs/ImageNet/Twins/pcpvt_small.yaml create mode 100644 src/PaddleClas/ppcls/configs/ImageNet/VGG/VGG11.yaml create mode 100644 src/PaddleClas/ppcls/configs/ImageNet/VGG/VGG13.yaml create mode 100644 src/PaddleClas/ppcls/configs/ImageNet/VGG/VGG16.yaml create mode 100644 src/PaddleClas/ppcls/configs/ImageNet/VGG/VGG19.yaml create mode 100644 src/PaddleClas/ppcls/configs/ImageNet/VisionTransformer/ViT_base_patch16_224.yaml create mode 100644 src/PaddleClas/ppcls/configs/ImageNet/VisionTransformer/ViT_base_patch16_384.yaml create mode 100644 src/PaddleClas/ppcls/configs/ImageNet/VisionTransformer/ViT_base_patch32_384.yaml create mode 100644 src/PaddleClas/ppcls/configs/ImageNet/VisionTransformer/ViT_large_patch16_224.yaml create mode 100644 src/PaddleClas/ppcls/configs/ImageNet/VisionTransformer/ViT_large_patch16_384.yaml create mode 100644 src/PaddleClas/ppcls/configs/ImageNet/VisionTransformer/ViT_large_patch32_384.yaml create mode 100644 src/PaddleClas/ppcls/configs/ImageNet/VisionTransformer/ViT_small_patch16_224.yaml create mode 100644 src/PaddleClas/ppcls/configs/ImageNet/Xception/Xception41.yaml create mode 100644 src/PaddleClas/ppcls/configs/ImageNet/Xception/Xception41_deeplab.yaml create mode 100644 src/PaddleClas/ppcls/configs/ImageNet/Xception/Xception65.yaml create mode 100644 src/PaddleClas/ppcls/configs/ImageNet/Xception/Xception65_deeplab.yaml create mode 100644 src/PaddleClas/ppcls/configs/ImageNet/Xception/Xception71.yaml create mode 100644 src/PaddleClas/ppcls/configs/Logo/ResNet50_ReID.yaml create mode 100644 src/PaddleClas/ppcls/configs/Products/MV3_Large_1x_Aliproduct_DLBHC.yaml create mode 100644 src/PaddleClas/ppcls/configs/Products/ResNet50_vd_Aliproduct.yaml create mode 100644 src/PaddleClas/ppcls/configs/Products/ResNet50_vd_Inshop.yaml create mode 100644 src/PaddleClas/ppcls/configs/Products/ResNet50_vd_SOP.yaml create mode 100644 src/PaddleClas/ppcls/configs/Vehicle/PPLCNet_2.5x_ReID.yaml create mode 100644 src/PaddleClas/ppcls/configs/Vehicle/ResNet50.yaml create mode 100644 src/PaddleClas/ppcls/configs/Vehicle/ResNet50_ReID.yaml create mode 100644 src/PaddleClas/ppcls/configs/quick_start/MobileNetV1_retrieval.yaml create mode 100644 src/PaddleClas/ppcls/configs/quick_start/MobileNetV3_large_x1_0.yaml create mode 100644 src/PaddleClas/ppcls/configs/quick_start/ResNet50_vd.yaml create mode 100644 src/PaddleClas/ppcls/configs/quick_start/kunlun/HRNet_W18_C_finetune_kunlun.yaml create mode 100644 src/PaddleClas/ppcls/configs/quick_start/kunlun/ResNet50_vd_finetune_kunlun.yaml create mode 100644 src/PaddleClas/ppcls/configs/quick_start/kunlun/VGG16_finetune_kunlun.yaml create mode 100644 src/PaddleClas/ppcls/configs/quick_start/kunlun/VGG19_finetune_kunlun.yaml create mode 100644 src/PaddleClas/ppcls/configs/quick_start/new_user/ShuffleNetV2_x0_25.yaml create mode 100644 src/PaddleClas/ppcls/configs/quick_start/professional/MobileNetV1_multilabel.yaml create mode 100644 src/PaddleClas/ppcls/configs/quick_start/professional/MobileNetV3_large_x1_0_CIFAR100_finetune.yaml create mode 100644 src/PaddleClas/ppcls/configs/quick_start/professional/R50_vd_distill_MV3_large_x1_0_CIFAR100.yaml create mode 100644 src/PaddleClas/ppcls/configs/quick_start/professional/ResNet50_vd_CIFAR100.yaml create mode 100644 src/PaddleClas/ppcls/configs/quick_start/professional/ResNet50_vd_mixup_CIFAR100_finetune.yaml create mode 100644 src/PaddleClas/ppcls/configs/quick_start/professional/VGG19_CIFAR10_DeepHash.yaml create mode 100644 src/PaddleClas/ppcls/configs/slim/GeneralRecognition_PPLCNet_x2_5_quantization.yaml create mode 100644 src/PaddleClas/ppcls/configs/slim/MobileNetV3_large_x1_0_prune.yaml create mode 100644 src/PaddleClas/ppcls/configs/slim/MobileNetV3_large_x1_0_quantization.yaml create mode 100644 src/PaddleClas/ppcls/configs/slim/PPLCNet_x1_0_quantization.yaml create mode 100644 src/PaddleClas/ppcls/configs/slim/ResNet50_vd_prune.yaml create mode 100644 src/PaddleClas/ppcls/configs/slim/ResNet50_vd_quantization.yaml create mode 100644 src/PaddleClas/ppcls/configs/slim/ResNet50_vehicle_cls_prune.yaml create mode 100644 src/PaddleClas/ppcls/configs/slim/ResNet50_vehicle_cls_quantization.yaml create mode 100644 src/PaddleClas/ppcls/configs/slim/ResNet50_vehicle_reid_prune.yaml create mode 100644 src/PaddleClas/ppcls/configs/slim/ResNet50_vehicle_reid_quantization.yaml create mode 100644 src/PaddleClas/ppcls/data/__init__.py create mode 100644 src/PaddleClas/ppcls/data/__pycache__/__init__.cpython-39.pyc create mode 100644 src/PaddleClas/ppcls/data/dataloader/.imagenet_dataset.py.un~ create mode 100644 src/PaddleClas/ppcls/data/dataloader/DistributedRandomIdentitySampler.py create mode 100644 src/PaddleClas/ppcls/data/dataloader/__init__.py create mode 100644 src/PaddleClas/ppcls/data/dataloader/__pycache__/DistributedRandomIdentitySampler.cpython-39.pyc create mode 100644 src/PaddleClas/ppcls/data/dataloader/__pycache__/__init__.cpython-39.pyc create mode 100644 src/PaddleClas/ppcls/data/dataloader/__pycache__/common_dataset.cpython-39.pyc create mode 100644 src/PaddleClas/ppcls/data/dataloader/__pycache__/icartoon_dataset.cpython-39.pyc create mode 100644 src/PaddleClas/ppcls/data/dataloader/__pycache__/imagenet_dataset.cpython-39.pyc create mode 100644 src/PaddleClas/ppcls/data/dataloader/__pycache__/logo_dataset.cpython-39.pyc create mode 100644 src/PaddleClas/ppcls/data/dataloader/__pycache__/mix_dataset.cpython-39.pyc create mode 100644 src/PaddleClas/ppcls/data/dataloader/__pycache__/mix_sampler.cpython-39.pyc create mode 100644 src/PaddleClas/ppcls/data/dataloader/__pycache__/multilabel_dataset.cpython-39.pyc create mode 100644 src/PaddleClas/ppcls/data/dataloader/__pycache__/pk_sampler.cpython-39.pyc create mode 100644 src/PaddleClas/ppcls/data/dataloader/__pycache__/vehicle_dataset.cpython-39.pyc create mode 100644 src/PaddleClas/ppcls/data/dataloader/common_dataset.py create mode 100644 src/PaddleClas/ppcls/data/dataloader/dali.py create mode 100644 src/PaddleClas/ppcls/data/dataloader/icartoon_dataset.py create mode 100644 src/PaddleClas/ppcls/data/dataloader/imagenet_dataset.py create mode 100644 src/PaddleClas/ppcls/data/dataloader/logo_dataset.py create mode 100644 src/PaddleClas/ppcls/data/dataloader/mix_dataset.py create mode 100644 src/PaddleClas/ppcls/data/dataloader/mix_sampler.py create mode 100644 src/PaddleClas/ppcls/data/dataloader/multilabel_dataset.py create mode 100644 src/PaddleClas/ppcls/data/dataloader/pk_sampler.py create mode 100644 src/PaddleClas/ppcls/data/dataloader/vehicle_dataset.py create mode 100644 src/PaddleClas/ppcls/data/postprocess/__init__.py create mode 100644 src/PaddleClas/ppcls/data/postprocess/__pycache__/__init__.cpython-39.pyc create mode 100644 src/PaddleClas/ppcls/data/postprocess/__pycache__/topk.cpython-39.pyc create mode 100644 src/PaddleClas/ppcls/data/postprocess/topk.py create mode 100644 src/PaddleClas/ppcls/data/preprocess/__init__.py create mode 100644 src/PaddleClas/ppcls/data/preprocess/__pycache__/__init__.cpython-39.pyc create mode 100644 src/PaddleClas/ppcls/data/preprocess/batch_ops/__init__.py create mode 100644 src/PaddleClas/ppcls/data/preprocess/batch_ops/__pycache__/__init__.cpython-39.pyc create mode 100644 src/PaddleClas/ppcls/data/preprocess/batch_ops/__pycache__/batch_operators.cpython-39.pyc create mode 100644 src/PaddleClas/ppcls/data/preprocess/batch_ops/batch_operators.py create mode 100644 src/PaddleClas/ppcls/data/preprocess/ops/__init__.py create mode 100644 src/PaddleClas/ppcls/data/preprocess/ops/__pycache__/__init__.cpython-39.pyc create mode 100644 src/PaddleClas/ppcls/data/preprocess/ops/__pycache__/autoaugment.cpython-39.pyc create mode 100644 src/PaddleClas/ppcls/data/preprocess/ops/__pycache__/cutout.cpython-39.pyc create mode 100644 src/PaddleClas/ppcls/data/preprocess/ops/__pycache__/fmix.cpython-39.pyc create mode 100644 src/PaddleClas/ppcls/data/preprocess/ops/__pycache__/functional.cpython-39.pyc create mode 100644 src/PaddleClas/ppcls/data/preprocess/ops/__pycache__/grid.cpython-39.pyc create mode 100644 src/PaddleClas/ppcls/data/preprocess/ops/__pycache__/hide_and_seek.cpython-39.pyc create mode 100644 src/PaddleClas/ppcls/data/preprocess/ops/__pycache__/operators.cpython-39.pyc create mode 100644 src/PaddleClas/ppcls/data/preprocess/ops/__pycache__/randaugment.cpython-39.pyc create mode 100644 src/PaddleClas/ppcls/data/preprocess/ops/__pycache__/random_erasing.cpython-39.pyc create mode 100644 src/PaddleClas/ppcls/data/preprocess/ops/__pycache__/timm_autoaugment.cpython-39.pyc create mode 100644 src/PaddleClas/ppcls/data/preprocess/ops/autoaugment.py create mode 100644 src/PaddleClas/ppcls/data/preprocess/ops/cutout.py create mode 100644 src/PaddleClas/ppcls/data/preprocess/ops/fmix.py create mode 100644 src/PaddleClas/ppcls/data/preprocess/ops/functional.py create mode 100644 src/PaddleClas/ppcls/data/preprocess/ops/grid.py create mode 100644 src/PaddleClas/ppcls/data/preprocess/ops/hide_and_seek.py create mode 100644 src/PaddleClas/ppcls/data/preprocess/ops/operators.py create mode 100644 src/PaddleClas/ppcls/data/preprocess/ops/randaugment.py create mode 100644 src/PaddleClas/ppcls/data/preprocess/ops/random_erasing.py create mode 100644 src/PaddleClas/ppcls/data/preprocess/ops/timm_autoaugment.py create mode 100644 src/PaddleClas/ppcls/data/utils/__init__.py create mode 100644 src/PaddleClas/ppcls/data/utils/__pycache__/__init__.cpython-39.pyc create mode 100644 src/PaddleClas/ppcls/data/utils/__pycache__/get_image_list.cpython-39.pyc create mode 100644 src/PaddleClas/ppcls/data/utils/get_image_list.py create mode 100644 src/PaddleClas/ppcls/engine/__init__.py create mode 100644 src/PaddleClas/ppcls/engine/__pycache__/__init__.cpython-39.pyc create mode 100644 src/PaddleClas/ppcls/engine/__pycache__/engine.cpython-39.pyc create mode 100644 src/PaddleClas/ppcls/engine/engine.py create mode 100644 src/PaddleClas/ppcls/engine/evaluation/__init__.py create mode 100644 src/PaddleClas/ppcls/engine/evaluation/__pycache__/__init__.cpython-39.pyc create mode 100644 src/PaddleClas/ppcls/engine/evaluation/__pycache__/classification.cpython-39.pyc create mode 100644 src/PaddleClas/ppcls/engine/evaluation/__pycache__/retrieval.cpython-39.pyc create mode 100644 src/PaddleClas/ppcls/engine/evaluation/classification.py create mode 100644 src/PaddleClas/ppcls/engine/evaluation/retrieval.py create mode 100644 src/PaddleClas/ppcls/engine/train/__init__.py create mode 100644 src/PaddleClas/ppcls/engine/train/__pycache__/__init__.cpython-39.pyc create mode 100644 src/PaddleClas/ppcls/engine/train/__pycache__/train.cpython-39.pyc create mode 100644 src/PaddleClas/ppcls/engine/train/__pycache__/utils.cpython-39.pyc create mode 100644 src/PaddleClas/ppcls/engine/train/train.py create mode 100644 src/PaddleClas/ppcls/engine/train/utils.py create mode 100644 src/PaddleClas/ppcls/loss/__init__.py create mode 100644 src/PaddleClas/ppcls/loss/__pycache__/__init__.cpython-39.pyc create mode 100644 src/PaddleClas/ppcls/loss/__pycache__/celoss.cpython-39.pyc create mode 100644 src/PaddleClas/ppcls/loss/__pycache__/centerloss.cpython-39.pyc create mode 100644 src/PaddleClas/ppcls/loss/__pycache__/comfunc.cpython-39.pyc create mode 100644 src/PaddleClas/ppcls/loss/__pycache__/deephashloss.cpython-39.pyc create mode 100644 src/PaddleClas/ppcls/loss/__pycache__/distanceloss.cpython-39.pyc create mode 100644 src/PaddleClas/ppcls/loss/__pycache__/distillationloss.cpython-39.pyc create mode 100644 src/PaddleClas/ppcls/loss/__pycache__/dmlloss.cpython-39.pyc create mode 100644 src/PaddleClas/ppcls/loss/__pycache__/emlloss.cpython-39.pyc create mode 100644 src/PaddleClas/ppcls/loss/__pycache__/googlenetloss.cpython-39.pyc create mode 100644 src/PaddleClas/ppcls/loss/__pycache__/msmloss.cpython-39.pyc create mode 100644 src/PaddleClas/ppcls/loss/__pycache__/multilabelloss.cpython-39.pyc create mode 100644 src/PaddleClas/ppcls/loss/__pycache__/npairsloss.cpython-39.pyc create mode 100644 src/PaddleClas/ppcls/loss/__pycache__/pairwisecosface.cpython-39.pyc create mode 100644 src/PaddleClas/ppcls/loss/__pycache__/rkdloss.cpython-39.pyc create mode 100644 src/PaddleClas/ppcls/loss/__pycache__/supconloss.cpython-39.pyc create mode 100644 src/PaddleClas/ppcls/loss/__pycache__/trihardloss.cpython-39.pyc create mode 100644 src/PaddleClas/ppcls/loss/__pycache__/triplet.cpython-39.pyc create mode 100644 src/PaddleClas/ppcls/loss/celoss.py create mode 100644 src/PaddleClas/ppcls/loss/centerloss.py create mode 100644 src/PaddleClas/ppcls/loss/comfunc.py create mode 100644 src/PaddleClas/ppcls/loss/deephashloss.py create mode 100644 src/PaddleClas/ppcls/loss/distanceloss.py create mode 100644 src/PaddleClas/ppcls/loss/distillationloss.py create mode 100644 src/PaddleClas/ppcls/loss/dmlloss.py create mode 100644 src/PaddleClas/ppcls/loss/emlloss.py create mode 100644 src/PaddleClas/ppcls/loss/googlenetloss.py create mode 100644 src/PaddleClas/ppcls/loss/msmloss.py create mode 100644 src/PaddleClas/ppcls/loss/multilabelloss.py create mode 100644 src/PaddleClas/ppcls/loss/npairsloss.py create mode 100644 src/PaddleClas/ppcls/loss/pairwisecosface.py create mode 100644 src/PaddleClas/ppcls/loss/rkdloss.py create mode 100644 src/PaddleClas/ppcls/loss/supconloss.py create mode 100644 src/PaddleClas/ppcls/loss/trihardloss.py create mode 100644 src/PaddleClas/ppcls/loss/triplet.py create mode 100644 src/PaddleClas/ppcls/metric/__init__.py create mode 100644 src/PaddleClas/ppcls/metric/__pycache__/__init__.cpython-39.pyc create mode 100644 src/PaddleClas/ppcls/metric/__pycache__/metrics.cpython-39.pyc create mode 100644 src/PaddleClas/ppcls/metric/metrics.py create mode 100644 src/PaddleClas/ppcls/optimizer/__init__.py create mode 100644 src/PaddleClas/ppcls/optimizer/__pycache__/__init__.cpython-39.pyc create mode 100644 src/PaddleClas/ppcls/optimizer/__pycache__/learning_rate.cpython-39.pyc create mode 100644 src/PaddleClas/ppcls/optimizer/__pycache__/optimizer.cpython-39.pyc create mode 100644 src/PaddleClas/ppcls/optimizer/learning_rate.py create mode 100644 src/PaddleClas/ppcls/optimizer/optimizer.py create mode 100644 src/PaddleClas/ppcls/static/program.py create mode 100644 src/PaddleClas/ppcls/static/run_dali.sh create mode 100644 src/PaddleClas/ppcls/static/save_load.py create mode 100644 src/PaddleClas/ppcls/static/train.py create mode 100644 src/PaddleClas/ppcls/utils/__init__.py create mode 100644 src/PaddleClas/ppcls/utils/__pycache__/__init__.cpython-39.pyc create mode 100644 src/PaddleClas/ppcls/utils/__pycache__/check.cpython-39.pyc create mode 100644 src/PaddleClas/ppcls/utils/__pycache__/config.cpython-39.pyc create mode 100644 src/PaddleClas/ppcls/utils/__pycache__/download.cpython-39.pyc create mode 100644 src/PaddleClas/ppcls/utils/__pycache__/logger.cpython-39.pyc create mode 100644 src/PaddleClas/ppcls/utils/__pycache__/metrics.cpython-39.pyc create mode 100644 src/PaddleClas/ppcls/utils/__pycache__/misc.cpython-39.pyc create mode 100644 src/PaddleClas/ppcls/utils/__pycache__/model_zoo.cpython-39.pyc create mode 100644 src/PaddleClas/ppcls/utils/__pycache__/profiler.cpython-39.pyc create mode 100644 src/PaddleClas/ppcls/utils/__pycache__/save_load.cpython-39.pyc create mode 100644 src/PaddleClas/ppcls/utils/check.py create mode 100644 src/PaddleClas/ppcls/utils/config.py create mode 100644 src/PaddleClas/ppcls/utils/download.py create mode 100644 src/PaddleClas/ppcls/utils/ema.py create mode 100644 src/PaddleClas/ppcls/utils/feature_maps_visualization/fm_vis.py create mode 100644 src/PaddleClas/ppcls/utils/feature_maps_visualization/resnet.py create mode 100644 src/PaddleClas/ppcls/utils/feature_maps_visualization/utils.py create mode 100644 src/PaddleClas/ppcls/utils/gallery2fc.py create mode 100644 src/PaddleClas/ppcls/utils/imagenet1k_label_list.txt create mode 100644 src/PaddleClas/ppcls/utils/logger.py create mode 100644 src/PaddleClas/ppcls/utils/metrics.py create mode 100644 src/PaddleClas/ppcls/utils/misc.py create mode 100644 src/PaddleClas/ppcls/utils/model_zoo.py create mode 100644 src/PaddleClas/ppcls/utils/pretrained.list create mode 100644 src/PaddleClas/ppcls/utils/profiler.py create mode 100644 src/PaddleClas/ppcls/utils/save_load.py diff --git a/src/PaddleClas/ppcls/__init__.py b/src/PaddleClas/ppcls/__init__.py new file mode 100644 index 0000000..d6cdb6f --- /dev/null +++ b/src/PaddleClas/ppcls/__init__.py @@ -0,0 +1,20 @@ +# Copyright (c) 2020 PaddlePaddle Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from . import optimizer + +from .arch import * +from .optimizer import * +from .data import * +from .utils import * diff --git a/src/PaddleClas/ppcls/__pycache__/__init__.cpython-39.pyc b/src/PaddleClas/ppcls/__pycache__/__init__.cpython-39.pyc new file mode 100644 index 0000000000000000000000000000000000000000..d088393b5e24f73a78a6251f81b278a3368aa7bf GIT binary patch literal 255 zcmYe~<>g`kg8G|=Ntc)y7#@Q-$N(tD-~hzM20$W(A%!uAA(t_VkrBjZ%3%tD(2P); z8A`JNY35)CO_rC8Ky8|gw>a|)N-}dZt5S=A97ZiaP1aisMIf`ISQ3kpGj6e@B$g!J zVl6Gn%qd>UP{arxgQk`=Wi*<`qb5awFQ+18>jByDU6eQ;q$Hd2H j=4F<|$LkeT-r}&y%}*)KNwwn#8e7Z(BzTxO7(oyKwZlDj literal 0 HcmV?d00001 diff --git a/src/PaddleClas/ppcls/arch/__init__.py b/src/PaddleClas/ppcls/arch/__init__.py new file mode 100644 index 0000000..2d5e29d --- /dev/null +++ b/src/PaddleClas/ppcls/arch/__init__.py @@ -0,0 +1,134 @@ +#copyright (c) 2021 PaddlePaddle Authors. All Rights Reserve. +# +#Licensed under the Apache License, Version 2.0 (the "License"); +#you may not use this file except in compliance with the License. +#You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +#Unless required by applicable law or agreed to in writing, software +#distributed under the License is distributed on an "AS IS" BASIS, +#WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +#See the License for the specific language governing permissions and +#limitations under the License. + +import copy +import importlib + +import paddle.nn as nn +from paddle.jit import to_static +from paddle.static import InputSpec + +from . import backbone, gears +from .backbone import * +from .gears import build_gear +from .utils import * +from ppcls.arch.backbone.base.theseus_layer import TheseusLayer +from ppcls.utils import logger +from ppcls.utils.save_load import load_dygraph_pretrain +from ppcls.arch.slim import prune_model, quantize_model + +__all__ = ["build_model", "RecModel", "DistillationModel"] + + +def build_model(config): + arch_config = copy.deepcopy(config["Arch"]) + model_type = arch_config.pop("name") + mod = importlib.import_module(__name__) + arch = getattr(mod, model_type)(**arch_config) + if isinstance(arch, TheseusLayer): + prune_model(config, arch) + quantize_model(config, arch) + return arch + + +def apply_to_static(config, model): + support_to_static = config['Global'].get('to_static', False) + + if support_to_static: + specs = None + if 'image_shape' in config['Global']: + specs = [InputSpec([None] + config['Global']['image_shape'])] + model = to_static(model, input_spec=specs) + logger.info("Successfully to apply @to_static with specs: {}".format( + specs)) + return model + + +class RecModel(TheseusLayer): + def __init__(self, **config): + super().__init__() + backbone_config = config["Backbone"] + backbone_name = backbone_config.pop("name") + self.backbone = eval(backbone_name)(**backbone_config) + if "BackboneStopLayer" in config: + backbone_stop_layer = config["BackboneStopLayer"]["name"] + self.backbone.stop_after(backbone_stop_layer) + + if "Neck" in config: + self.neck = build_gear(config["Neck"]) + else: + self.neck = None + + if "Head" in config: + self.head = build_gear(config["Head"]) + else: + self.head = None + + def forward(self, x, label=None): + out = dict() + x = self.backbone(x) + out["backbone"] = x + if self.neck is not None: + x = self.neck(x) + out["neck"] = x + out["features"] = x + if self.head is not None: + y = self.head(x, label) + out["logits"] = y + return out + + +class DistillationModel(nn.Layer): + def __init__(self, + models=None, + pretrained_list=None, + freeze_params_list=None, + **kargs): + super().__init__() + assert isinstance(models, list) + self.model_list = [] + self.model_name_list = [] + if pretrained_list is not None: + assert len(pretrained_list) == len(models) + + if freeze_params_list is None: + freeze_params_list = [False] * len(models) + assert len(freeze_params_list) == len(models) + for idx, model_config in enumerate(models): + assert len(model_config) == 1 + key = list(model_config.keys())[0] + model_config = model_config[key] + model_name = model_config.pop("name") + model = eval(model_name)(**model_config) + + if freeze_params_list[idx]: + for param in model.parameters(): + param.trainable = False + self.model_list.append(self.add_sublayer(key, model)) + self.model_name_list.append(key) + + if pretrained_list is not None: + for idx, pretrained in enumerate(pretrained_list): + if pretrained is not None: + load_dygraph_pretrain( + self.model_name_list[idx], path=pretrained) + + def forward(self, x, label=None): + result_dict = dict() + for idx, model_name in enumerate(self.model_name_list): + if label is None: + result_dict[model_name] = self.model_list[idx](x) + else: + result_dict[model_name] = self.model_list[idx](x, label) + return result_dict diff --git a/src/PaddleClas/ppcls/arch/__pycache__/__init__.cpython-39.pyc b/src/PaddleClas/ppcls/arch/__pycache__/__init__.cpython-39.pyc new file mode 100644 index 0000000000000000000000000000000000000000..8585526a6c042215053c854e1154353b35fadd0e GIT binary patch literal 3525 zcmZ`*TaO$^6|U;r^z>YIcD-wR%|(`l3Kqr{sIg9Im9&#?=Z#I?2AInr}EaKlq z7wyfnTqGpJto`snYB|@xOpheZgM2m~+tE%wOGP4%#wslil0wN+rP&k*w+c0%%49MV zGB@2P^K@Ef$E$hYHLd#mS{?pcj(%kqZoZT0GRt$^cQ&=65nXis_!yAu?xE?A(3R|j zS8(4hJLL;r@e@Zl7&)bT;*~x|ZW&Z=zj=)ic>bRzpLKO4ub6faFylgOGUDUS+iT#SuJ z+80cH1DpPR|L3ZbCjH<;91tMUN(}WJFA{^Eun+f(z$a7`q}M8hXZK z?{mN+_Q_mo1B?GBXj*jCb95zJ0yd|NEKJZLkF9{?mTu)Pxdn?uV5wN*3Af_#;fGl5 z?!wF10nZ+K@RUb>f}cr$0H|^D?UHsirh!0YBn{zP?H{bxUunaYHWUn(4u)~~vlS01 z&^3!U*V6~(tgx&TdqX*T9DBczspvOh^m!o_!K{vBMW!{Le2O@s;53oO&LrJ0rBb2Q zO|jp22ejWeo|gH(^=!Pcroc7qI%ficaD3@(YLCo`17CnklVk+k=_EP#y7~^@^b<5q zF~b0%&_XGMr`t{l&j+?Y-dr=@VEqBZ(c0a#^ChDDEd=DVgU(&V3HN1M&Xv@b=d)7x zJ>!XNRGzO<-^aqfuPCYw|J3+-x(A+jXYNRw5GFi$w=n;+ATztYCs*q(xT%w z2AWt5Zn;$B1(TN^($b^Jp=;=gz=97f_pAx}`9T63&0s(+QG|DKd zYtlWW_5jQ-r^2+6dlNn1vxK0?zx{R%h`_!{#~bTDNzO4gSC|z|8c6u z+PIncG;Y^dtkb`7ai8@U(5=ntlTv^CM*R0KbrL zgM7Nk>^=J<*lj^V(A(x+CxSHM4v)S%zV=EPcE(dL(Nuu*$sza9G@-kusS*j`uy*g8 zaId$h1DX*@9l8}#1O+CA(jE4scP^tk?MzCv#D_?~ui;$vHg!Lx?k#lvz#3Y;P6GPM zxCOeJ=VfAP>OB&FK%F(pxn+nYK10)lN+O{xYn99Vc=y#WUUQH;9HNjOsyDIa!hz?U zLZGKQNK^F?I>@V>ec8QX$)NvrwMF}O(3xgo%eBFDY8*7g7gcYA!>qK`hv}>Yb-ixO z62ofJB}>|x)23iivx>ir>xTH!G#W+=d1H04KIPuN}$v~%1 zWkQ9#jbenYU2Tunc{VvOR81pEQp>Gfx)0SQmOqVebF}^93nn@ cViUCQASog(_TU4GK|4c=4BfC5c6vAd12b7z8vpmIFdjK(ufpLP}3os?~aDoUA>&>&%SP zBwCAugQ$==aD-EG>`y@Ak1$tGIdkL0H|rQBP{gd}`MozYp7*}@y-8MATLj~q-##4t z+92e2XIvdVjL$IjHUuY}O448a(msXFB+I?NM+x`1Fa12|2haoFI3@iC4|#;&kT>@T zZ}F9(&)aP!=L}JD0$M9>H3DZfW?3BY!UUDHjiAwh~BT_R}Lf2cmP35q#Q5^FuHF0bMY{<23WjZUgNf5ma z%<)8M+x~J|q$Vqi=P<3Ovm?8b%0%mUvEE1~6H)N4XM-Ziae$63J$y~n8gBUW*%zPe zew}bG#Z#Plq=XcS7LOkHp8S`4GD)T0busTwO(u13a%^uQr}$bL?>M;XYWqH2$JeH6 z9ln2I%iyfBeF%k9)R^RSMo?GEJ?KD}l8TJHijKTHWX386Ed6bDS+#e0rV)k|JU#?C zWf2>d6na>yTqsm^{S98ff4k12=R)o+%dT!C0`(Sz4dWOUisP<##n~K45Jcj-8oGtJ z<+k5*>HobN*Hw+PB1WN1me%(B*ry{10wmn0?zod=@8_S~_~p|J2UhB;z8p;`A*N%X z{vHC%^9^}L<`lg99J~u=jjC}*IasDT2W`VRgYBIIFDK{TnSV}ZVHH+v?5lR=p9NJ= zfuA)iVUHTFlTPa%`hijM9U0QPkD|)w%!HL^pv~!=eakk$_BowJRRsK-mt4+@p-@Wj zj?VH-Ch84bwk(dfQ&+9!aJnn|4rYx>g<@~G7Gc}UWm`jw+R>wbA{}kYBp>i(>j}2~ zfk{ptTwZ;hf4xmvris*BP!=bicbWCZ;@E~t915kywU}HyuFV|>^j(NOfcRo6sh z1%(qMYzstEM!(F=zH))uK$b6svMUGEEFH(#la;FN7JOYQW4$~ptS3c*zwuqu*U|MG zI4<)UanqicGtSvH4Z_uuhAA| z^e&B{t<(EdJ%%3-n&Y=}9biMA4Tmxt)VU}JcKzDb??c@d^^q&3cQ4iA{%8};HI-uP SCS2O39O0riYkSd36#fNTc$d!r literal 0 HcmV?d00001 diff --git a/src/PaddleClas/ppcls/arch/backbone/__init__.py b/src/PaddleClas/ppcls/arch/backbone/__init__.py new file mode 100644 index 0000000..1bd23a9 --- /dev/null +++ b/src/PaddleClas/ppcls/arch/backbone/__init__.py @@ -0,0 +1,83 @@ +# copyright (c) 2021 PaddlePaddle Authors. All Rights Reserve. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import sys +import inspect + +from ppcls.arch.backbone.legendary_models.mobilenet_v1 import MobileNetV1_x0_25, MobileNetV1_x0_5, MobileNetV1_x0_75, MobileNetV1 +from ppcls.arch.backbone.legendary_models.mobilenet_v3 import MobileNetV3_small_x0_35, MobileNetV3_small_x0_5, MobileNetV3_small_x0_75, MobileNetV3_small_x1_0, MobileNetV3_small_x1_25, MobileNetV3_large_x0_35, MobileNetV3_large_x0_5, MobileNetV3_large_x0_75, MobileNetV3_large_x1_0, MobileNetV3_large_x1_25 +from ppcls.arch.backbone.legendary_models.resnet import ResNet18, ResNet18_vd, ResNet34, ResNet34_vd, ResNet50, ResNet50_vd, ResNet101, ResNet101_vd, ResNet152, ResNet152_vd, ResNet200_vd +from ppcls.arch.backbone.legendary_models.vgg import VGG11, VGG13, VGG16, VGG19 +from ppcls.arch.backbone.legendary_models.inception_v3 import InceptionV3 +from ppcls.arch.backbone.legendary_models.hrnet import HRNet_W18_C, HRNet_W30_C, HRNet_W32_C, HRNet_W40_C, HRNet_W44_C, HRNet_W48_C, HRNet_W60_C, HRNet_W64_C, SE_HRNet_W64_C +from ppcls.arch.backbone.legendary_models.pp_lcnet import PPLCNet_x0_25, PPLCNet_x0_35, PPLCNet_x0_5, PPLCNet_x0_75, PPLCNet_x1_0, PPLCNet_x1_5, PPLCNet_x2_0, PPLCNet_x2_5 +from ppcls.arch.backbone.legendary_models.esnet import ESNet_x0_25, ESNet_x0_5, ESNet_x0_75, ESNet_x1_0 + +from ppcls.arch.backbone.model_zoo.resnet_vc import ResNet50_vc +from ppcls.arch.backbone.model_zoo.resnext import ResNeXt50_32x4d, ResNeXt50_64x4d, ResNeXt101_32x4d, ResNeXt101_64x4d, ResNeXt152_32x4d, ResNeXt152_64x4d +from ppcls.arch.backbone.model_zoo.resnext_vd import ResNeXt50_vd_32x4d, ResNeXt50_vd_64x4d, ResNeXt101_vd_32x4d, ResNeXt101_vd_64x4d, ResNeXt152_vd_32x4d, ResNeXt152_vd_64x4d +from ppcls.arch.backbone.model_zoo.res2net import Res2Net50_26w_4s, Res2Net50_14w_8s +from ppcls.arch.backbone.model_zoo.res2net_vd import Res2Net50_vd_26w_4s, Res2Net101_vd_26w_4s, Res2Net200_vd_26w_4s +from ppcls.arch.backbone.model_zoo.se_resnet_vd import SE_ResNet18_vd, SE_ResNet34_vd, SE_ResNet50_vd +from ppcls.arch.backbone.model_zoo.se_resnext_vd import SE_ResNeXt50_vd_32x4d, SE_ResNeXt50_vd_32x4d, SENet154_vd +from ppcls.arch.backbone.model_zoo.se_resnext import SE_ResNeXt50_32x4d, SE_ResNeXt101_32x4d, SE_ResNeXt152_64x4d +from ppcls.arch.backbone.model_zoo.dpn import DPN68, DPN92, DPN98, DPN107, DPN131 +from ppcls.arch.backbone.model_zoo.densenet import DenseNet121, DenseNet161, DenseNet169, DenseNet201, DenseNet264 +from ppcls.arch.backbone.model_zoo.efficientnet import EfficientNetB0, EfficientNetB1, EfficientNetB2, EfficientNetB3, EfficientNetB4, EfficientNetB5, EfficientNetB6, EfficientNetB7, EfficientNetB0_small +from ppcls.arch.backbone.model_zoo.resnest import ResNeSt50_fast_1s1x64d, ResNeSt50, ResNeSt101 +from ppcls.arch.backbone.model_zoo.googlenet import GoogLeNet +from ppcls.arch.backbone.model_zoo.mobilenet_v2 import MobileNetV2_x0_25, MobileNetV2_x0_5, MobileNetV2_x0_75, MobileNetV2, MobileNetV2_x1_5, MobileNetV2_x2_0 +from ppcls.arch.backbone.model_zoo.shufflenet_v2 import ShuffleNetV2_x0_25, ShuffleNetV2_x0_33, ShuffleNetV2_x0_5, ShuffleNetV2_x1_0, ShuffleNetV2_x1_5, ShuffleNetV2_x2_0, ShuffleNetV2_swish +from ppcls.arch.backbone.model_zoo.ghostnet import GhostNet_x0_5, GhostNet_x1_0, GhostNet_x1_3 +from ppcls.arch.backbone.model_zoo.alexnet import AlexNet +from ppcls.arch.backbone.model_zoo.inception_v4 import InceptionV4 +from ppcls.arch.backbone.model_zoo.xception import Xception41, Xception65, Xception71 +from ppcls.arch.backbone.model_zoo.xception_deeplab import Xception41_deeplab, Xception65_deeplab +from ppcls.arch.backbone.model_zoo.resnext101_wsl import ResNeXt101_32x8d_wsl, ResNeXt101_32x16d_wsl, ResNeXt101_32x32d_wsl, ResNeXt101_32x48d_wsl +from ppcls.arch.backbone.model_zoo.squeezenet import SqueezeNet1_0, SqueezeNet1_1 +from ppcls.arch.backbone.model_zoo.darknet import DarkNet53 +from ppcls.arch.backbone.model_zoo.regnet import RegNetX_200MF, RegNetX_4GF, RegNetX_32GF, RegNetY_200MF, RegNetY_4GF, RegNetY_32GF +from ppcls.arch.backbone.model_zoo.vision_transformer import ViT_small_patch16_224, ViT_base_patch16_224, ViT_base_patch16_384, ViT_base_patch32_384, ViT_large_patch16_224, ViT_large_patch16_384, ViT_large_patch32_384 +from ppcls.arch.backbone.model_zoo.distilled_vision_transformer import DeiT_tiny_patch16_224, DeiT_small_patch16_224, DeiT_base_patch16_224, DeiT_tiny_distilled_patch16_224, DeiT_small_distilled_patch16_224, DeiT_base_distilled_patch16_224, DeiT_base_patch16_384, DeiT_base_distilled_patch16_384 +from ppcls.arch.backbone.model_zoo.swin_transformer import SwinTransformer_tiny_patch4_window7_224, SwinTransformer_small_patch4_window7_224, SwinTransformer_base_patch4_window7_224, SwinTransformer_base_patch4_window12_384, SwinTransformer_large_patch4_window7_224, SwinTransformer_large_patch4_window12_384 +from ppcls.arch.backbone.model_zoo.mixnet import MixNet_S, MixNet_M, MixNet_L +from ppcls.arch.backbone.model_zoo.rexnet import ReXNet_1_0, ReXNet_1_3, ReXNet_1_5, ReXNet_2_0, ReXNet_3_0 +from ppcls.arch.backbone.model_zoo.gvt import pcpvt_small, pcpvt_base, pcpvt_large, alt_gvt_small, alt_gvt_base, alt_gvt_large +from ppcls.arch.backbone.model_zoo.levit import LeViT_128S, LeViT_128, LeViT_192, LeViT_256, LeViT_384 +from ppcls.arch.backbone.model_zoo.dla import DLA34, DLA46_c, DLA46x_c, DLA60, DLA60x, DLA60x_c, DLA102, DLA102x, DLA102x2, DLA169 +from ppcls.arch.backbone.model_zoo.rednet import RedNet26, RedNet38, RedNet50, RedNet101, RedNet152 +from ppcls.arch.backbone.model_zoo.tnt import TNT_small +from ppcls.arch.backbone.model_zoo.hardnet import HarDNet68, HarDNet85, HarDNet39_ds, HarDNet68_ds +from ppcls.arch.backbone.model_zoo.cspnet import CSPDarkNet53 +from ppcls.arch.backbone.model_zoo.pvt_v2 import PVT_V2_B0, PVT_V2_B1, PVT_V2_B2_Linear, PVT_V2_B2, PVT_V2_B3, PVT_V2_B4, PVT_V2_B5 +from ppcls.arch.backbone.model_zoo.repvgg import RepVGG_A0, RepVGG_A1, RepVGG_A2, RepVGG_B0, RepVGG_B1, RepVGG_B2, RepVGG_B1g2, RepVGG_B1g4, RepVGG_B2g4, RepVGG_B3g4 +from ppcls.arch.backbone.variant_models.resnet_variant import ResNet50_last_stage_stride1 +from ppcls.arch.backbone.variant_models.vgg_variant import VGG19Sigmoid +from ppcls.arch.backbone.variant_models.pp_lcnet_variant import PPLCNet_x2_5_Tanh + + +# help whl get all the models' api (class type) and components' api (func type) +def get_apis(): + current_func = sys._getframe().f_code.co_name + current_module = sys.modules[__name__] + api = [] + for _, obj in inspect.getmembers(current_module, + inspect.isclass) + inspect.getmembers( + current_module, inspect.isfunction): + api.append(obj.__name__) + api.remove(current_func) + return api + + +__all__ = get_apis() diff --git a/src/PaddleClas/ppcls/arch/backbone/__pycache__/__init__.cpython-39.pyc b/src/PaddleClas/ppcls/arch/backbone/__pycache__/__init__.cpython-39.pyc new file mode 100644 index 0000000000000000000000000000000000000000..ed103276c7df2e873b3b94ebff5cdbead4a514b3 GIT binary patch literal 8785 zcmb7J349dg72nByY(ju=2?{7emLu-W9t5;jAQ03LBf$u-=(yRPY&LvvcbQp|K*6Kl zRV!Al)}w;;tarVwXVrSuvz}uwYcFf}y`Ib;aL!FEM zo%WvE7oO#EeaK4wzcNyeqQ_cU>2gI}5k zs}*gy43^qcK?YJ6NOS@aF z!4BxawXhbChNJNqI0lb}W3dxDrM*F{!!GE; zlZAo((2r4w;s6X_3}QG4gVNS4wx9rk2#9frV*(PGgd}c-t!RRY7Fd{ql=Phs@RS@V26~ai7(<#*onJf7oG~I;%RUio(`wu8E}TQPZwXpGvQ1;3(mr` z;cPqy&cSoxTs#lX!}H;M=`%xo883hf@Itr{FM^9CcYyc`UJMsYd8XKnm%t@bo+ZAD zm%^oZ8C-^!!{w4YP<#!qfGhAyxKio|iLc{Ta1~w+SK~Es4PFb^N`AKZ23`l(;U3r{ z^*LfMUJutxd9L^--T*h?jc_C01UKQ$a5LTlx8SXCtF#|1zJ<5JZFoD}j(5NvcqiP6 zcfnnFH{6Z)z&+CE5b9_aN8l0ZH&1*AAB9Kp zF?b9ghsW^=cmkh)?17vKea5njZX z;3a8WAij?;!^`*zyn?U7tN0qchOfix_y)Wo?JeR5_$IuGZ^2vmHoT4Rz&rRZyen-U z@gCY>m);^+7a_yztFeu=+= zU*WIe*Z2{9ByEetZ}4OISjvmVZ}AiO1b+v=!{5X2@el9^$uALq#6Q8Gq}(e0jDLZ@ z;9udd_&4|){vG~~|A2qsKjEMFDSRq@4io>vf5X4=Kky&?FZ>tx!M@!Jx^^o-4=VJg z2g~S950=xL9;~1@Jy=QaN}0#ITM<>nR1;Gz8I_nCVrq%0m5fGA9WnL9)Dz>2_|M{! z+`ARSeI`o{5v9lF34Fx6YnP|Op13a27ZXO8k?K|T5wETV?50svV2sn!fL)*G_SM9Wucs<`v4fdXF|o1BqHBwx$7X$3Y- z4^ZFKukOheS(jC43TI`?pD|E&?lET=czD8|Dtm%dH>ODUX(RqfJ`waY(UkU3y=vMe zpBd#tW9X#G`Vb?1sH{o3vW6mQze3K*HaV}vtm7zhl&73K>6BSdhjdCWlkL!`yQrmF za9HAspNFIMkr!R zCPI!wrpe|pw2|5MJ*%nGYJgW6xlSr@b^3a8j>`>FKA9bZO1rYXyDQixU&}P-)n-=_ zsd|?>Z=dR^wCmfAxW!wis;PNOus~UsvuL!yvoTuGPiwJpb$@>>95do6O0&{CN>@ke z+9=&OO81Y_1EcidD1E8jTv)!dbyE*cm1EPx^PoRurF7L&M}joZovn&_yDsfQy-6#k zwj+^?B-Ny88?N@VhkhZ7{zf|><*^@GwU)3Xqkrbi692X2^INAVtVRjiCQ|W2n z7SV?-VNWS!)nHNDrxm6BP7BGwlTgw+MtP*iJkL4KSdF`n75OJPQ5AZ2Gt}Li*GNe$y7-@-%17PB|oO( zvvXM@F)I}l!iW^)o0FHXxQCNMW-jjF)U=>zZrOh0$bm+7LCm6p+tc^y#bIVNSUUWJZhQjT?0odF%=nK!tFIX$DRS;W&xN(;7TU5r<< z-jkKqWu;C!CupbK$adZba*vm>(lcw6zFebDgI9;Dwe{GwSvf1xp_gX0KrkcnK-bY>*tvo}Ypdzy59(pN zMhYV&u_frGi%RL1DbiT<1zpiZOeC+Q1aUm;uKh_WS)NIB9DTx>?n zIjq!ptCYSg)o6htFBpzRi{>f5sGnLlQ||G6hzmZ%tyG9EM^?&=MGW4X-F$0U))R|j zA{HTiV)g@^zMS<9q4+?UuZfv|^wXP*pH=i&K#+3nQTDn9%LdDLkvv$Ds+{54r9@r3 z%2HK(s&^`>s==DQIk|Rk*)&(X>!jw*WxL9ERqQO=Q@68XuyU`WxV8pKtKaEjd1o12 zP?au^+b*}ZTXwA;HB$X%2o1ZcUk@iDhFu*_=y4{hF%j7&49l+3C86tfbu4Zrjd04Y zqcAkE&oC__tuXa~W!J^5{%!FvUxn%6BX*T(U}DIiIp_{=Gfnyw)7b|aGn!L~ zT^>rtY(=-r6MciXy3CKM13v9uy?k?bC=wCID$2jaG=xDbbBWgK`z$({3=3;>$P5o` z?hA#t^d;iPW?hfPV<}y4O>VbqsMW1fa?iTDgSQObM;Rh*DES=60PLV+gAi<*h$XWeuq&|GH|HGgL6} z82|rid}b8~S2NTw)H3kTx@J9to1uZBkzoP>EE-o`O~ay!noS%wkzo?UWQJyjDGXB? zrZG%sn8Cok2QA|&V<;LmXL1bv`s^|fWH^YU$Jd*&%>7y)@|m+aZsE8}9Wc3JbGWp* z3g)ExJ<1hC1!#E3tVjhZUXt#lp-^j##mesh=uO3am!^i9|t;p%K> z*ui0~`=4T&u*@ac{31v0q3lxSZi@3|BB*Nl9>}`YJZ(0uVT2G z;Tnc(8NNYKsw*)jXYO_EzlUM(ID$-Px?j)!-(CN&@G!$8 z4BsIr)sLnT;VJnj`#;9;IKvYRPZE@>Q!1V^pJKlft7IT#%5uKTiJxY8hT&NbEwO0A zR+4M@y>YnmU3*9~pX1ceGrYj?BEw4z-)DH4pj4Trk>sz+SJ?kmhSwNgXLy6*2MljA zyv6V~!#f1Uzd46OW-LTk6M1-B*$*m*HQ(j9qMPEVII5AJrp)&^$X9H(jS9+qO=cr) zuIncRcD1ha|MlteI+O>$Wn;||R9m{A9XWwCi;uV(rKW7U;#K_0bh_0nR$7$lWo~5- y{WU4g>AzXZEV49{eU36uzTJdP6w#FCTFBB&UUTU2(&Lud<|%8HN&Ht)=l&m*AYL&5 literal 0 HcmV?d00001 diff --git a/src/PaddleClas/ppcls/arch/backbone/base/__init__.py b/src/PaddleClas/ppcls/arch/backbone/base/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/src/PaddleClas/ppcls/arch/backbone/base/__pycache__/__init__.cpython-39.pyc b/src/PaddleClas/ppcls/arch/backbone/base/__pycache__/__init__.cpython-39.pyc new file mode 100644 index 0000000000000000000000000000000000000000..c69b585402ec00de2debf7ea91af755a42ee4009 GIT binary patch literal 176 zcmYe~<>g`kg8G|=Ng(<$h(HF6K#l_t7qb9~6oz01O-8?!3`HPe1o11>)hZ?+F(oA@ z)j21zShpxOCpED+Ro6(*7?*HCL2^!UOkz=TMody-a&}UFUMi4LoEj4!pP83g5+AQu WPVW&i*<@hoxx literal 0 HcmV?d00001 diff --git a/src/PaddleClas/ppcls/arch/backbone/base/__pycache__/theseus_layer.cpython-39.pyc b/src/PaddleClas/ppcls/arch/backbone/base/__pycache__/theseus_layer.cpython-39.pyc new file mode 100644 index 0000000000000000000000000000000000000000..fd599a589308bc59b658ee327624f28bdd8e73a9 GIT binary patch literal 9546 zcmcIq-ESP%b)WBDE|(O!mSag(+^OQ2+?HNgYTc%Uq{@-Z)TzQcv62)k(bM71EV<(B z%<9gpWNu~=pi~U(p_m&=;?NyYi-tcO^=IbaKvY%J$>gScNc3zRGxBRj0 zo$*#48|{WydtYfbJ_F@S)5^>{!$II@#_e95WX7#tC&|q3hrKAwsyE#raCgzDza1Vn z)l3URq*f5^?fIfZ}w%PkUT#>z?iz*o*NK#ZcNNXfU(t zyPh8=z2vZ?mZqXS%|>tBK$0klnkq-y6YZ%&JbYjIRNZdsnHdiUz7TX)S=DiRVJ~qU zkZ8ny(EU`+796J&xN+<_pDLfX-`TwPp6hvme-m|Y2tV-M*ni_n>vx}3Js5O?_?{~| z`}cO;&cSXJ`pCroy=32y{bB3`?x8PQgF~?d3cfauF7HN2yj%X^!OB^_n) zSjbSB#c~4~PfhinIyMq)=e~NR-cokXj!nux(NgoNni{EjRe8`z4X{t!&a7ydKw@z- zjR5LIT)-TeK8OZQOHyv~jmbGP)D!MZft(QcBi~WlFi~QSw2%uTu60s!Q*W{!Iqw?q+-T;s(ac{5wCxw zjI~5d^&>UWQT|4%@2O)wF;YD-k2LY~#NxKfN6L|UV8kCJRVsV>fg=9OGd@y&0R{hw zPE0IjXQfDr9zAbi)L7@5YNQxrpUYHT`qS}IdpGdGBI9&!IAylf)&AXGaS$7%{gabOA~ zRTG7Is8IfIwFe|rDiaHpt_ObD)H1F0W~PHADEDQ#vXd`dQjoQ)L_{a*53b*!)kv-_ zRa4I%kD4>W!UhAP*SZ=+9XE)tBU6k#K|F~O6Hn@riLZ~y)z_e1fz z%=N;oHXL|v;^ThAO_MK|_-$%DGbbF`QeGjsg=ykFNe@%Tx2#(5;SJ0X)5TYes%k)Q z7#e=HI_r>zsvT>1sa(UKc5E39&6w9L#auWuOD;XbN#Y{Ce~LFIJ%x*>)76kyjD%h0 z{sLmbT4i*H#H>3EI|;lJlIlU=c6=uu?yfgksy%-o{0_L`ZP@T-V4>|H-nHRiPq-fU zWWB@P!BpS%TF|A;dy~aIYuw7`z!E={r_nSdTfYh-1yL&gfD+cU6Lp&{mdJS*?JR5x zn76Es&dms0sUs9*gDH{*I+x&Vao-K$o;b8u@rRhKL#nEH$UUG7#M=2EkbzcK6ZJ@o zS5g%!N!=r*MLPPYP&OKLGkE*g8uYapw;r5J^wfk}(hdxB3bdtS2Uw(%O-(Dyja&l4|t9 zxm3p)R^6Y0a6XdK4%h4?vf-BWx`%c`ie$L^hCC)Hn|R=NdfgsW+3umec9K_Xty;~_ zpSNKJHzy^kJlWq~XP@>ijM9dE8+V*?J3Gxy)?s9A?Wilq5m`{BOj$udne5igQl%Dq zy>OWC-VMXnZE_+updZD~hFz446({0c;sLBV)GyQi0{dse;q3LHqX({V`?2i`pE=1> ztQSi*ZAcZmu(4PNIZbqOSx2wCcHi&sLc>Je=BZp`Lvji`q2P&&HYu!T%YGXx&>6}L zPwJmaaw`K7b^JK?z0#3=kFEOLsZs1952zr0M;;FxgJR0U%FvcNk%0CaM4<=WDeY9z z4fVqy$;ruusB}_L1SMkL`Oxh{ZGf7YlkJMA54KUjVE6iiNF+GLnI3tq>_dUZ>;#_Q z1qB1X&g;yJ&50a9QON_@Bmrs8iZ6+9=s%L-g?)_%-HgJAS8lD(EKl9&>4i=Qn+yFQ zzQ)V65D87HHclM_M{Uxh7*wu1ebfQm4L@+=-pIf9*6Ce{890t>S57S3JW-QN%Vevw z>cxI{T*jx&Sf5;*Oi|6#Kt zE~7#EB&!$-_T@7jI%E6i=Li4K#7Q*BH$*f`gI`7o zD9Rz%l?{G?3>kcaLB@y<;)Vyz0LEKSHJJR{VR8X|;{~CfCt*>l^I!{jzpOA z7KWXaTyj&%$!E#EgAC&%1b7k@2sjjOt4>l3D1qZ&{A!SGK!k8u4Y;asP7zyw6ZOUu zlR!Vj!WK%9S0F2pbx5e`UWB%iaS)%0wE7gQN*7Mrml?<7q*%&Mg$|2tVIEmy`sDs0)40pi#nTOQN4ZQJ1Bn31l4`qta zdW6kdqxG3npN5;$VEK2&OXPlS7=GpVP(+jr|i1 z=Xtf!Ju}uAZjW= zJ*Y;)wi<*!A>8gq4}E9f4*)#F781fGh?!Ys-;K%CiSJPfp)YmJE(m`Q*2@=8u1B3g z6eo;iWvej|6*fu`T&~IiK2`y20q0mlkS=fv;IsdVMdB4CfWbIAr>l#I{MX}_oNF5Z z0>Qe15Nz=ZB?KahA5t<6cc(FLB|rFIqC<`aR}JxJC;_h_tIj-tj$-`Jigxe&P)KA` z1swvn3Q`N=`;@$fq`)v(pU_Nlo^MlbN&=x3;`fmJ9bT!kmMm>$85USIjxC*F37MPI zSySo7QEoS1I(5H9hajUbu2MouEYr!SA?Xohi3OSD%w$C_NWt-@GJ{Nv_%4-+I6=y^ zL^T7nOgbUI1SiDA7y#&%Y7Nr%MeX9UwM;7ai~7}--?;E4Bg8YYe<8y+X)qnTRGpAy z^=j#E0n0=i%OgbT66L=}2?MQjZ2SQl{vEHpWvZa^Hq6cq{1%QE`%;Uneh^Z?`#XtB z5L+gyUZ6mD4&5*~C70lo=)#7wmv_zpR$K!vp&n?W=4o&>YJfRI^a+bmgr@P>*MVE; z;?E~zQO{WO7Ito8{1%VjVthe&{|$y<#xl7MTChsY#JoA>|1`Z!;4i3y(%NG!z;B_B}2m+dy?zC+2JY`=%P|G*m)B&sYkk_m@KBV9>|ChJOa z+l}KYx$T;45i|}Lhhh)8AcJshhG-%@qyQ>-`66yY>fo0^4ZNi9&&NGp^QbjMUF@qfuY=&ayd zhKbgiS5GbMNY1n*TTov9q;g`;N=l$1KtgzGfDu#6JM%H`AJl@bzeIEbE>*_rgAMlI zR@?}BjYCLw-yo(0|)Wfe$X_(1b1s ztuEm?3GMNQV!DX{)96k{!t=#R=tydS7f zT2n!O{?4A=y2HmTz7Fj*T7pYmNHzl1*Ab{9{_M$5el7bw`4OeF?{^M3@JE;<4qMy& zG46Bqu6P@he6H?{UfIUI=|;nXgB;3wnDHEDlo-Ag)gr=K8|iD!*670gIH3H@KMZ{Z z9XLpq42}2sTUsNsn_{La36aVt^Qf5ItLQ4uVO42c9Ka`wR z>7tT#$}EXVOMi8d|Bt0LOnN~q`E!?gQ*b8#7lB+q(Wc}cC1eG8#nP_Wz>*J}Rj len(stages_pattern) or min( + return_stages) < 0: + msg = f"The 'return_stages' set error. Illegal value(s) have been ignored. The stages' pattern list is {stages_pattern}." + logger.warning(msg) + return_stages = [ + val for val in return_stages + if val >= 0 and val < len(stages_pattern) + ] + return_patterns = [stages_pattern[i] for i in return_stages] + + if return_patterns: + self.update_res(return_patterns) + + def replace_sub(self, *args, **kwargs) -> None: + msg = "The function 'replace_sub()' is deprecated, please use 'upgrade_sublayer()' instead." + logger.error(DeprecationWarning(msg)) + raise DeprecationWarning(msg) + + def upgrade_sublayer(self, + layer_name_pattern: Union[str, List[str]], + handle_func: Callable[[nn.Layer, str], nn.Layer] + ) -> Dict[str, nn.Layer]: + """use 'handle_func' to modify the sub-layer(s) specified by 'layer_name_pattern'. + + Args: + layer_name_pattern (Union[str, List[str]]): The name of layer to be modified by 'handle_func'. + handle_func (Callable[[nn.Layer, str], nn.Layer]): The function to modify target layer specified by 'layer_name_pattern'. The formal params are the layer(nn.Layer) and pattern(str) that is (a member of) layer_name_pattern (when layer_name_pattern is List type). And the return is the layer processed. + + Returns: + Dict[str, nn.Layer]: The key is the pattern and corresponding value is the result returned by 'handle_func()'. + + Examples: + + from paddle import nn + import paddleclas + + def rep_func(layer: nn.Layer, pattern: str): + new_layer = nn.Conv2D( + in_channels=layer._in_channels, + out_channels=layer._out_channels, + kernel_size=5, + padding=2 + ) + return new_layer + + net = paddleclas.MobileNetV1() + res = net.replace_sub(layer_name_pattern=["blocks[11].depthwise_conv.conv", "blocks[12].depthwise_conv.conv"], handle_func=rep_func) + print(res) + # {'blocks[11].depthwise_conv.conv': the corresponding new_layer, 'blocks[12].depthwise_conv.conv': the corresponding new_layer} + """ + + if not isinstance(layer_name_pattern, list): + layer_name_pattern = [layer_name_pattern] + + hit_layer_pattern_list = [] + for pattern in layer_name_pattern: + # parse pattern to find target layer and its parent + layer_list = parse_pattern_str(pattern=pattern, parent_layer=self) + if not layer_list: + continue + sub_layer_parent = layer_list[-2]["layer"] if len( + layer_list) > 1 else self + + sub_layer = layer_list[-1]["layer"] + sub_layer_name = layer_list[-1]["name"] + sub_layer_index = layer_list[-1]["index"] + + new_sub_layer = handle_func(sub_layer, pattern) + + if sub_layer_index: + getattr(sub_layer_parent, + sub_layer_name)[sub_layer_index] = new_sub_layer + else: + setattr(sub_layer_parent, sub_layer_name, new_sub_layer) + + hit_layer_pattern_list.append(pattern) + return hit_layer_pattern_list + + def stop_after(self, stop_layer_name: str) -> bool: + """stop forward and backward after 'stop_layer_name'. + + Args: + stop_layer_name (str): The name of layer that stop forward and backward after this layer. + + Returns: + bool: 'True' if successful, 'False' otherwise. + """ + + layer_list = parse_pattern_str(stop_layer_name, self) + if not layer_list: + return False + + parent_layer = self + for layer_dict in layer_list: + name, index = layer_dict["name"], layer_dict["index"] + if not set_identity(parent_layer, name, index): + msg = f"Failed to set the layers that after stop_layer_name('{stop_layer_name}') to IdentityLayer. The error layer's name is '{name}'." + logger.warning(msg) + return False + parent_layer = layer_dict["layer"] + + return True + + def update_res( + self, + return_patterns: Union[str, List[str]]) -> Dict[str, nn.Layer]: + """update the result(s) to be returned. + + Args: + return_patterns (Union[str, List[str]]): The name of layer to return output. + + Returns: + Dict[str, nn.Layer]: The pattern(str) and corresponding layer(nn.Layer) that have been set successfully. + """ + + # clear res_dict that could have been set + self.res_dict = {} + + class Handler(object): + def __init__(self, res_dict): + # res_dict is a reference + self.res_dict = res_dict + + def __call__(self, layer, pattern): + layer.res_dict = self.res_dict + layer.res_name = pattern + if hasattr(layer, "hook_remove_helper"): + layer.hook_remove_helper.remove() + layer.hook_remove_helper = layer.register_forward_post_hook( + save_sub_res_hook) + return layer + + handle_func = Handler(self.res_dict) + + hit_layer_pattern_list = self.upgrade_sublayer( + return_patterns, handle_func=handle_func) + + if hasattr(self, "hook_remove_helper"): + self.hook_remove_helper.remove() + self.hook_remove_helper = self.register_forward_post_hook( + self._return_dict_hook) + + return hit_layer_pattern_list + + +def save_sub_res_hook(layer, input, output): + layer.res_dict[layer.res_name] = output + + +def set_identity(parent_layer: nn.Layer, + layer_name: str, + layer_index: str=None) -> bool: + """set the layer specified by layer_name and layer_index to Indentity. + + Args: + parent_layer (nn.Layer): The parent layer of target layer specified by layer_name and layer_index. + layer_name (str): The name of target layer to be set to Indentity. + layer_index (str, optional): The index of target layer to be set to Indentity in parent_layer. Defaults to None. + + Returns: + bool: True if successfully, False otherwise. + """ + + stop_after = False + for sub_layer_name in parent_layer._sub_layers: + if stop_after: + parent_layer._sub_layers[sub_layer_name] = Identity() + continue + if sub_layer_name == layer_name: + stop_after = True + + if layer_index and stop_after: + stop_after = False + for sub_layer_index in parent_layer._sub_layers[ + layer_name]._sub_layers: + if stop_after: + parent_layer._sub_layers[layer_name][ + sub_layer_index] = Identity() + continue + if layer_index == sub_layer_index: + stop_after = True + + return stop_after + + +def parse_pattern_str(pattern: str, parent_layer: nn.Layer) -> Union[ + None, List[Dict[str, Union[nn.Layer, str, None]]]]: + """parse the string type pattern. + + Args: + pattern (str): The pattern to discribe layer. + parent_layer (nn.Layer): The root layer relative to the pattern. + + Returns: + Union[None, List[Dict[str, Union[nn.Layer, str, None]]]]: None if failed. If successfully, the members are layers parsed in order: + [ + {"layer": first layer, "name": first layer's name parsed, "index": first layer's index parsed if exist}, + {"layer": second layer, "name": second layer's name parsed, "index": second layer's index parsed if exist}, + ... + ] + """ + + pattern_list = pattern.split(".") + if not pattern_list: + msg = f"The pattern('{pattern}') is illegal. Please check and retry." + logger.warning(msg) + return None + + layer_list = [] + while len(pattern_list) > 0: + if '[' in pattern_list[0]: + target_layer_name = pattern_list[0].split('[')[0] + target_layer_index = pattern_list[0].split('[')[1].split(']')[0] + else: + target_layer_name = pattern_list[0] + target_layer_index = None + + target_layer = getattr(parent_layer, target_layer_name, None) + + if target_layer is None: + msg = f"Not found layer named('{target_layer_name}') specifed in pattern('{pattern}')." + logger.warning(msg) + return None + + if target_layer_index and target_layer: + if int(target_layer_index) < 0 or int(target_layer_index) >= len( + target_layer): + msg = f"Not found layer by index('{target_layer_index}') specifed in pattern('{pattern}'). The index should < {len(target_layer)} and > 0." + logger.warning(msg) + return None + + target_layer = target_layer[target_layer_index] + + layer_list.append({ + "layer": target_layer, + "name": target_layer_name, + "index": target_layer_index + }) + + pattern_list = pattern_list[1:] + parent_layer = target_layer + return layer_list diff --git a/src/PaddleClas/ppcls/arch/backbone/legendary_models/__init__.py b/src/PaddleClas/ppcls/arch/backbone/legendary_models/__init__.py new file mode 100644 index 0000000..1f837da --- /dev/null +++ b/src/PaddleClas/ppcls/arch/backbone/legendary_models/__init__.py @@ -0,0 +1,6 @@ +from .resnet import ResNet18, ResNet34, ResNet50, ResNet101, ResNet152, ResNet18_vd, ResNet34_vd, ResNet50_vd, ResNet101_vd, ResNet152_vd +from .hrnet import HRNet_W18_C, HRNet_W30_C, HRNet_W32_C, HRNet_W40_C, HRNet_W44_C, HRNet_W48_C, HRNet_W64_C +from .mobilenet_v1 import MobileNetV1_x0_25, MobileNetV1_x0_5, MobileNetV1_x0_75, MobileNetV1 +from .mobilenet_v3 import MobileNetV3_small_x0_35, MobileNetV3_small_x0_5, MobileNetV3_small_x0_75, MobileNetV3_small_x1_0, MobileNetV3_small_x1_25, MobileNetV3_large_x0_35, MobileNetV3_large_x0_5, MobileNetV3_large_x0_75, MobileNetV3_large_x1_0, MobileNetV3_large_x1_25 +from .inception_v3 import InceptionV3 +from .vgg import VGG11, VGG13, VGG16, VGG19 diff --git a/src/PaddleClas/ppcls/arch/backbone/legendary_models/__pycache__/__init__.cpython-39.pyc b/src/PaddleClas/ppcls/arch/backbone/legendary_models/__pycache__/__init__.cpython-39.pyc new file mode 100644 index 0000000000000000000000000000000000000000..118b131a8113ddd857168cf6dd7f6f78e3da2dc9 GIT binary patch literal 1212 zcmZ{j+iuf95QgoX4^Er(ko0^^4@PQb$8jkLA*2cc7gVT7MH)p~Iq@ou*h!*wi0Bp1 z!wc{Pyac}GidW!@owP|!n_9}Z^Zzrm6R)*ip^($m^X-@2{1MZ%-z1`s^0Zg#7yMN4 zHD4DR>Y!r`Vi<=wCLn=HNMZ_7n1(cFAcI-RVh(aT)yG604KT0(1)PByoP}8|LJ><) z!a0~5)x^a-F2DjV!XlQTj1{Pi{DfG-Wmv`)Six0T#Wh&Nby&v@*uYKL#4XsuZP>;g z*csI)#V%H%I*Lh)+Q zy1|Lcn2E6TzSryst_ED%?5o9UZdkl$xetn-x?w(2qz{aW;wZ8o}HUd!j$Aemk%V}5*a5X2uVVUkS1gZSpwZPnOA`1m83?4i~?bXFiR*B zN`yIu(7@3LRn=^0%X#t_2#bWWg1TLI6^^WszeHFjtPoZSYlL;e24R!1r2s}}{LXCX z$ZchX@nO3ycc@~QP$ldseAlE&G2yTG`g!An=lg=62`@O3TyQVoN3~<;F}vSyiJ;-h z)>WhFwLUj{UEUD9&AYxQuNn4yuF{Nkx}5=I$NlRt^FoY6qRNg z`_0SQgYVqWe&_prmviV34;KtP-~Hv;x#6>h@!ymge6q;Ag*OqVVF*K*ki(M0n|?MU-z z?I`jEF(is&Sd4sN*N%zNh9OFi&CskJ4^9NfHePsSteds6*eAvw7_~2mFNtw+;DK3t z@ohsK6o(!d;!tpSJyUxL`NLuY`3cUyjQkOC6#1i^|043o#Bt=0bN&_NPl!?EM>&5| zydcV$;Z^Zv@uGMMGgQPW@v`_Ls4t1r;uUcc)R#q7yecZ7P6=zqsGQj({#P<;)SpXQ zQ6~+&u-R_ORJkHt43n@GtC6-0RoQB{I%&mH`z{H; zora6SrNzdLRx6?o@A}J{te9%}n|{~~;|BHjBNXJWPhJh`{tC$At%V>7I>~i^C6E~F zK-BVuCsrELZ!dUl8DMo`tWNA%?9Izo)9c7+_3lENwv!8I&a{0YqTqDc^cz7Oq^FxL z5k$%9*lz~axx3Z5dN9{Ys`Xa$OmB`eQGk}hmn)ubd*<5AbdY+>uX&T_wiXbGYFo64 zdC6+{SLU%Rtf=#`$Gyi>Ob8@(roUwW_LO>QIU8h4?fg!O%E!nkd(S!5invqOm2Gm13@CYoo5`d2QFc3!%}PcdqY8%-T6QI;Yn_H{b=rw4%n=a1 zBwP(tG43?I`hp+FkZ|QJ1Yu(V*j)-mx=o=sVXg+`hpuxf1!w zX&FR;p9H5TtFJ$=xZSQt$!%ZO7jDn_^?P%zIJmtVHn)QWc&oNo)M#&NFIni!&qu*2 zOq!IC80JCKH7#@GsbiJQaZ|pEvP~CBo!HTThdnmEDZMR*f6?4EZK)>3M_7urZlwc^o4({`2Sb{Le*fPNwNEs|++G z#L-+-Io6vksq#=&-l89M0+o$}C2x_rrjwHxLya{3dx59b>|7Ll6=g{giGh!^%@bx3 zzw$NY>r^=S(3jDZ&^`o4%@jt>61g85^QOoM`$4{z5n18D8rVNHY@?RN%3U=?O8D|L zYe;zlJ#W=XH2M$GWS)tINrWUd){PDGkztbDnviCa(~K|=!w7>_*))^d>oC9%O<_Zx z-LBzfQwMC#q&eZNnFo%(%+l5>=hF!0*tTcDnpD(`u9 zpe(_>wCR#0NWOvOExgIgNQ_(onCSlzjrTWhfl7*I@nhF^%+>J$8L0NZWQU?7)j}e` zs3Bl=jSb-9k+}wCwE>i9>aHbUMjqJUyg7qKPFHOCCVI$=l)Oa=jq=2jSBWO|v>OAt z`IcM)GyV#72|I^ab-{G-uI?XPYp)#?8%eGf-zN_32AO2fQw5LgZYLtT=y~@$exz%d zOY$4&rG|S<-xV749VBEF_7?RGXuqEL_r=}Vz#dRpn!O8+ zM|_4o$F4w=S<@9u8(VV)X7>ihmVZIX3?;WH`8E>eXctqt%}$ht?P#UP4&B#<7K_s` zUD<^g7UUHDdzR2+CZ!PPUpxOzt8XlyTxDt%tXE^hJnJ?va}UX$*lD(FF+LnvtA6eF2sxJcoHpOAKAzZnO8g60(bN-X&pI+D ztk){;03=8?U^kb%v))-{3;Z$*l|_*d7f(LXl0Go#9`gOS7$+eo$G~2aG_ySa-py{& zRPvFVD_C2y|NGT_eJ1ZM=UEI#FUxrs1RNj8>0R)7Lfi2Hx&fYoo1J<46p!_{7tKMZhUBd`f`N7BhSBc|LCLV2JP1U>Y~YgOW;UO*Uu>s3W-@W2rKye$0 zuG6F8Xzd2dC-(+0i5IO?#Cu+#IbGLmtigS2N8kn$>*T|!oKsG6fDPat@h+;^#iMo#j z8+MgwcJ~&D3MtvPJzH4(Im&xj)CNqa8jV|6oUHzzSR^a*mvWyIzyFMapgh_e z$*MVsQfN-YBZR<*>ypQ2<1D~@Y_7Rv=hCdec8wzt1%>VEQI4iWuHme~e-myuFY?gb zE*(2E#&W64xx6T96gG^tLR#p$>*iJC&Kz|4&{{Da>J~YFV{N!QEQaq{@@HwWo9_;Z z{2^n)V5&fr7=d7n#K+R%?Xqo3*LDjVBi)flrfJ-N|Nh^kqjyW)!uu4yGjCfUd*&iH-U^4o-12${OonI=XR){nNwgMAsJk#rQ+w))CBb z6fG@rfb+-FX8&545#`Jfoq=Fl`;D$+EYIV75Tif8jMm5~#* z`5}_<7ucqdK&I6a3qD2pk`xk#*pA4^9p)1lxOiJyY4a0|Des|bsrOj2K_9@#;aPtsPKNmPvD2(u$&uZ8Cijz>#hv zU^KCLQ;y$moDy`WzwGh!Cu+OgyJv703~&oOPZ;y-fy z;Nq9elGO`zK3*N+!_2)6hlO>J79wu*s-WVv;n)$#|DouH4M*QRI9rC|S(9OGb=X3R<>)09yV%B^>#1H1rRYWg&P-)IU=4Pn5hv$sUGc0su`8qLMu0Xa81*H5>XN zk0T`wlu$w_mmMV2)6e(iNX7pWFUM4FhJ$_z{7vJxBNyvrRQeCm(D|CFS3z{4!f~cp zh`$QT0`($0*s!wXNSj7{f=(GzxWDqxQiP9PoG+pjA-b^F&F|slj@{y^TR4MqJ4xUr zNhDU^>0ej`qMWX@gL0UZ<5pVs7yU5uaadR_-{8~MGP;y!>Ch`~%$8fSJUfT5Jn-XL z?pK9tOwO(<;3_AEM91|&Wzbyir_$oh4$#{Su1VRFIKC`wPph(gi-A*2y7-{G5&6&P zvTJu#$>R%^?S3a|tH#ihPBAUKWX(yct*7$eQ98Bbu&K|^Z=xotB1w&TFf@e+3hR;i z0M_!S=KJKhK+ktGDSTVl7IWP;kt4?WOPH@v#Q#0BwZ-C<~7~Bio(t^Yq!Vf@BZKltL4932xvNNG`6@bpqj!6DiRlQgUjTh z%1aBuVCft5NqYR7Y&-@S!W}M@PbSToO?JEY$WpEsAQKuprOA)LPkc$DA=a^6e zT)g-yH4uNQc%A!Wr0c)5bjb~Dwj>iEA5rpCN_I&G)n0v$WKi9wEE&&gp7WoJ#-}72 z=l>|#_;64*4v|DWS2n2j)^lWo>ON)J*sV2i@%*__qJO~nQ?&*d`3JRzcKXQfXpKRU z-t(n`YQ=M;g6a_ERq%rY{i&%P?`gVjn(rVW!*!D+Pg|0!EyTAcbUfsa`rM`cBg>T!d zblYC--?oE**8+cn{#C|{=znD-Icw^GZrp)$kyI0ZF<`%ry+n2(gQ^L)NadzGqRql6FR-Xn_6Lv`rL`$66xTWGDu8LzxO+iIMtlW3>hufsvXW!{_g z{mBI+&i=`8#mrCbL)9!GgM1TJasxUw- literal 0 HcmV?d00001 diff --git a/src/PaddleClas/ppcls/arch/backbone/legendary_models/__pycache__/hrnet.cpython-39.pyc b/src/PaddleClas/ppcls/arch/backbone/legendary_models/__pycache__/hrnet.cpython-39.pyc new file mode 100644 index 0000000000000000000000000000000000000000..d1ee9ed8269761aa786cd59a11bbb22c1dc58b64 GIT binary patch literal 21144 zcmeHPTW}lKdEQ+t78imfL{btZ%Q7t6lHo{{M9G&ZzUX4nm&mj%$#IIvfk2!k2?_+M zvrEZhz$6Xb+UbO8(k5w|x+#@rI@Pq(X42cVP1Dr9J*RKkhxVbJY1OAT)0sB0JnHxT ziz`LYNi6A!i`j#7KfC9A|MeW8(A}LhaDD0r`^P>xX&C>?MC%nt;xKOSe@w$rhBC{> zw3#za#x2CHoF(xH;*nfL;&$22*(kTm(OeYY(Q=T-KA2hTlJ`3l~x;6-&?U?0hu8yZRP9&8i0xD_ssrjE zV!PBy^{{#bu?N*l>QVI=Vp(-ceNY`j>>+hpeMmiy*ay@Z^@Ms7vEAxr^^`h{*dFx} zbwnLSY)HMLj;W^++p9jRj;j&G_Nkv!&!}e++plu!IrThZ!^%EwWIuL|c*vSTZ(+=v zDbM+CzBD~MqkUw>Rq0~ME6r38@0r!5ik}~!s}z0Yj3Udbupn`=pbOJSd|$K0+^koa zo-MnG$6l_K#%J_&)(TQ*CSA{+^G*~lyE=$Jr`(EP@-L%gV|k{a^6K)0F3e8mXSM6= zLa7pLUo|^Fu4kt6bGke~HR=1a-l4sFXA4S|-94r0!h~CK{XNq&$}M|)DuroxXl!a| ztmux-ctgdR>Akfv_Lg1LR0VxGAJ*Ob?5R=L&%d(&vHY>d1Gs8vR?YIoyxTF>fqi#y ztl>L1*1n zKXmq_Fpgv8g0}~c;1)c0&+yQJPT8}w#j1=^>`F#*c2a`_4b5H- zy7EQs7Qjg{<^c@o^&v3Kc;n+(mKndHGsrEnsP*D(^YV5gaE%;hEoZ6N8^*Y)BFcU} zk&CFPav)&rH?TChDCRUCbRCag}j3KCBj3;nMaC^@q@Qp=TMAPuC zsYunVTGdF^u11tOh(+CMRHGblDONS|HfltxmV&fjvXFNCSk+lH5sS;BI-`0Adc0;K z7$H(im3(osP^o|!2dSAkznSVf?_!?Hd9O6@29D?Jl5&F>`FN=^5jYci1~Mc_jFk#r zz5uDBx1rB$B8UP66dw6}sZ#Ru`49vS-+TD!z%JrDNE|Kr#mP~ud0>rIWPbJCY#s}2 z&n=G!smk1R%cC$gUMl;p_JXc3BG09Fp^?rqU~|AHk)#{Wod@$PhaFr)i1`mDnevC5UCo=M%BDxE?|b2%|^_s#mv)q zq|t0d523E!$3WiF()$q)%-6zaY&>WX0~uTbwVXk*N1e|wlk%DGZ+*C0Ep*I>2m`|C zs>5lgOwH$mWS)X^uFQBUpT96yD2F979r}Z)A9UAdtLT=?^%;|=d6@mm+}(@VQ@A}+ zZ9Hn2`24tMKr_EHO+1Lw22I45zFdvwP4%TNr8=Z}_Wis$8(Mv4x4 zsb7qfGK4C=hDehtKn+ITs)BvxBfd@gP^Pj}WXVQO6gd`h?5d5t=n`u=wLE74OcJyb z)&%`bMRcXsLwi{qWtRS&pFmG22}2cA@gOX%e6EsMVgg@UhS$m(d+V>95~yfKY%*2qJc9BO7z?ysb(%! zLhVjgDTtEh_J?1?;n#s6!OT3X2C>t}Lxs=sT%lIIg`E;n;NgU7^AYq2lk%L;FeX50 z09YR6Eb_f)5g1O=a!m5j+UI&a+K8LhJ8_38ho#r!j(oSJ(AKoxjob5sE8x+f&e0<0 zv3_yR8Zz+XNL0-QEYF%G$|WC09k5f7F=|gm^--jqkc9o%R2=X!mAPaIktbS6+Zw%U z>BlfmU|!B9HD6gj$AC;*%k+GhF`2R#5DT184Tg|YO6K>f9nP=x64$PmSEZsKFji8a z*cA*ysP-4pleY3K*qw==QtL}`QA9U*p5p9nx(McV{mEJz+Lc%`CH`AD%YL(ze(Tnyhr;Ka~M0{2Dq zg`^E}M?$udN8%>A!_bvHm<5)?gufZHQ~tAQAdB54VH`&wSFj$Cu3Gh#o2KEA}>_{wp! zXNzXeO=#t$@M=@n1gikg>v1S+jv`uH1yf9OmD^D2AftiOZz9n|sc$VqCA&gpi$aK^ z2lH2r+yytt0)&qrTaIIHtpRA+cNq&&sA}LQRojR7s7AY>#w}84nZPlHN(%jT3u1d} zZP7WEg$?;;|yD;av7`q{Af^OuxuN6za*cOmVrHSd8k_uv87`a&%9Y<$5XWCUz6)~8v zF0LkzzJprc{Rl{BDNtI<>;vZ{b)`X1$+pydx+V-8q_u{O8AqS(GR8NjgnpDY!L0Squ%Ry8MWX3DruIFfdO0a3Zw}52Ca|mWPpOlBJNMH4JA`Dc(X16JZ6oL|n`n{*v}po9RIa_6jVlV& ziIV3By&fM=9B}zEGordsnScpZn4N_~D;Rhdgm%gmA0BMz7eq(TMnjXJtF>oQ8991j zmR1Ayzo5tqO;!q!q?r=+$-ZIVnD3EES_8!7hsW5#P6Pn4=|g{=g87e`=@Uf=bFu{` zU|!~h9uWbLqAHAz8GPw7g9S`24y_h-TQ zr)*%#nu6K$hWUmy4rEcn_^{GByNl-MApn50jGuVZdJ~q3Sn8o#5*=;x>d@fi1N7te zXzNX2kzU=(#X?haOFcA{u@w2Db%Bmz6D!oNd}K`#iSXiDL;C`A3MK_nr9B1$vW9F9 zl8+O!@WakN`6CXRLg3ieyVTREjnD624ZaPW4LuI?tx)!!M5Nx2K8ij-O!srR2N-YF ztw_0g8bOdAqh^>NqX8(CBR~rxunYApD_XAP@EBOLP=EnCya#% zC@Dn4&{=E2UZ6q>F~7xFaHRkMU^h$#IZ`^fy-5?}(6TOnkc>Q#mSrskXh?R6gw6Z%7yJ2m_$_0T=MGJyf zLv+~@;*B8|mskRt6eM?zjNis0wx+)YQ2`6_yshW4n6c$9KSj+A>`|B`2%K&NYbjjF z=cO*pz_1Q9y(ArWgCKn@{}^Eqrs=?gPxhZ0eNy`d)1^DF*aau~ajeP-4U3cXQr+f0KhW*jGDoCowKjCB#j^L*iD@<$c2^ zo3Atdw?X1&HA(_sQSvJxsRK7v)CUGAP^+;mvaMnEtkha@Tbl$d z1O|*tD@`TIn(u2_`UWpsL!l+F!5(;>k)(x*^Xw=soA`@^ie#0C$+uS0t zo7CEPkUUZF{9|SB#X0|~eJz77t{rCZX$Ek*8pV45$S^22J8lxRswNGdcE6~*R83%o z3KBy+T2xP#>`)~Ut`QkM)8@4lW#JhN*s2A$Fa^lpO0g-ig`0$8ihiTIYMXAc62r>u zH<0Wa7YMT{z*b`hr?>)}P-xg>1|gx>Sip72?f&Kjb|7enZE*`hQ%7o()|qx`9ZKjn z`P`7o5&cQD)=Lb?@9L&$Llg_DMHK5#v(hS{E%0^7VhCEoGiA>AwY~zb+~pRt{J~mmWZaLO;ex zW^l>wqr{Hr3Ga}^n189H{K-QZnEp@fe{3iN7mk~$?JH%l?qjab%#4SvhZ>DSfAyiv z7`zBij6iF2hceHNXLg;@bMCI)8Td#u=q9wYGXBiU=HWgMdasSUZ{NP545(*z&gYmM z5IaGBX1s`RoM#9JKIQszy3+FK_I0fflQVMdKO%-PS|rD-UpP!&6OX1??L@>ogPStk(SXrNCWAPuJW30en zi~(gy=%=;Di$Pp=@u8k!uaTNt&=bx|eYW(ku{u`(ougxqYQ6q4Grxi$m#iNz$Pc%~ z546M&YA#(aEw6ya^_~7ud~(S`ZQxfK6a4K)%qRYS7ZL8~Qwwll$q!oFZrFB+LFdc5hu`ijr5s&bM2gAB1!9B_qIoPK}jvW__!*l#@JQ`ZaG%L9ChB2Ul za4fb=Sv|EA2JkOOYlg_X^F8f!(Y9qnuE2R0&Ez^Ft8dr9S5S8#hfZZ~>tM4X+`7a8 z9+;04@M6kVB{m-vTtrBr29$nbd8iNduT}%w549RFU{FKMSJX&0YIN0NP!YwvNrgVF zS9dJL{O)CV1*HutU6Qu3dRte zfUb4;6<3n30k}dS1FiwA8i0sQv<)kLWKV z936#e{UJp3;|!ia5XAD>%P-;#2WdmRt@=sEnosjElR`dJ=}LSgvhTQQr{qvac(B85 zvofeGQ*{(rK8*-ZmEqJ_vhoPelOeVPcBmXk!fprTU1d8hW8YfD#<=X)8jEnnRN^>a z2FYAISr#1~IfKQF(`)B>Wi|h7{XD6wGXCXRw|<Oj=475{U2wK${!Dlf#{*X)35{^xS_v|+>tiouFv9!Q6d|@zA=t@ z^o6KYledfqB$4}=^ z9yxR7_^DCBL68iKUOsi=G;04LTM}S_a~{XMCOrKu=Cq?tx{gAD@n}UDNgFZ+GUtY4 z^_fz3o3X`ns_WPa!TbKO#hF|aTi0aei6Q+*47lL>j~VBbJYVA<`)1&%DQ z$tcTxZq5#iHmCww&Q~OTW4Ib z^x+P;VA<`)1%9ruj*DA@3(Vx&xDchk$uBmni3^qNmAv)uNrE?u{!LplhK}VPx88;S{<;%|uY=`x0z2ytkEV<5XB0fmC4Wz!hyHUctLP+W^j{+ywBUt}JLBbV zQ2VQE;DzOP1TX83koPNyTJZ9d`$AU13mSLE%iF-i@2!Csmfs1ytUE|szv_$^Htvj~#Hzaw~Amq)jbzwl3-)|N+dSd+e&oAq5b?u?gzM4M!!vpvo7tTi(7zU~A3Uzzba zoR`G^%4nd|&=}ItJmaFzF?fjq4-Lsd!rVakmls1gUtRyVDEzHO4CT3v&7zhcqlZTi zcgjCG4e_6xhU)+1ga}Tf;G78mqAoATCpAyWhJPrvF>Eo1lOx`cSGefPp`GyL4-Wwz^X>_+`S}V(@V^NmIS}R)1mNXi970a|B$=ZY~r>Dv8;q-_% zRrlyJp|ai`0hmo;UH0vKae~G`xGER9+HPVq+d4(GH-d^wb#yfPG8C4 zSTSG{Jgho(s;ax{eCM3H7^PBP!smPcd7|~|f+YQdGRaQ{nREC#LY5>`GUcw+lbdo+ zX)3)`GbIyG>8eeY|I^Jh{!`s_FVoBfW!awA)Htnnv%Oq1*ULBay+X6lD>jSFPlI1- zmU`u8xi{0CVP2+N>FsFl;B>aTv$w0ci_=vySJyg2l$uGuM&K!p~%+ z_gQ;4_CJxD2f!)*Oa?81DrUJu=^5r!n6slJn>)>2D4YGubo1c5lDXU5^GGuHSi9C! z&F7HcYwkmSALpM(e!qDD`2(Ckg#1DCIpm+?{0qoGZyrMa5a+ANzhG98uX6rH3aNEm` zf#`XeD|X*9gqOW22E)Osiw<_2H;tid-?h%)?OYuUx>VtP<9?WoxM)55Vc%XtGe{P1 zEL)DX>Rd4%SfZwQySoF!)XfJSVGNh`p|D(G*nRKNRJOh(20eXMbiKmme%nItgZ|CA zwUuSp9Xe-@A0HZ~*|m<@J)>jwE%#V&U|L=0Sl{Sb_0~$g)wWs#r`{g)jt3nc?^=*F z4e>ztWsfJ?jh@Cb>qB!$1LV}wYkTdmYrWMSv~TNO+i|~p@}q4E=C&(%bGw4mAFb`1 z+Q*64KdPlRe~$Uxlz^_yDhL==Vw?jJdx*?})YMHWh4fxxLtgN~+XMLy>b{fz)EKfxf4 zvLi`X+|)=ODWlX#9i>MZQ#O^y#8=()NLiPW%DCAL?TLg`&Xmx3s&T27rFYWX%SOKs zCGv{U`Z!m(Z3(b-$6m9%jN=O1w7e|opWW|xnT{B&4jnJovJFQ!2vvD6?W_(h;c2>V z_ib0#y_C^*1=&kaZR1TvmXb=V?z-tHQXqw7zlUs(%hZe3-t%!79fBWf9rb?jt)ZaemH z*zP*F4AEY`)iT<*TZ6uJYZ5%SV!gRLS06qQzk#k#7i7s45k0 zUZq;o5I=`JmL?Yu3gUb%z{*FDa=sk`6Kyk^B2;|>MiNyc z8CtIpsu&5t6gcSwhY*!!JY^vjB8sGv^)ir>31M8*mm-|yCU~0b%Mjjtq~)o{5#cy| zh5|dtQo|267*S;I#J52nT;s-u#UJyzg? z$;@6ztHyoXfkh-F39?S&J4Ak)$nOwo68SEXTOeM>kK4TrhQDpNumd~+-y#8O zqZz0I=HDwOC){s?tzm2u$s#<(&)EkOSOkTw0#j8kevwhub|!}MnP%YwG&G_n1GDg& zAkNop7A|ZFcp|evGdbNX#LgCwM+glUsj;ab!;k^I$(gEft>lu^0puCXK}|%y4DP~X zXo#*cxQmU=kKqLwv_6m-;R|?*1nLR!;u;ZxIl`$3+qK*_SSE)zp#|jscp8Iv%=fTt zk>e>PulkSpJPyNPlzPn#bHgclJODJNhR?i zK0lfz4y1;z&g&7ehU&*$Z)$dOa@1KU`x;1$c@;isne!{ASu{~Kih5dLc;m7BnZRI*>?iculR&HJp=WDid z7q|E0KSpm~ZslOP((K2>V4kepxD_EUPMoCW@&kMs7N`_iE(-8Tj-rCFH45-4UuJKg z{}Ow&c@7hhhychuiDe=bjIhfWw<*;nVuI8P6UHU99`O3HU7^fLujEH@&;c$R%TAQe zjSIiQD`sD9mZ<9~p~BmwJ2y}`VI1O+d>qDK3H*s5;^K68r_pf~MEou?6Yy3LNK-h0 ziH(BrIEV;S=_o|^gK*W&u4q1Nrv(-{aSuI-`$QfPStIg$L@09g2P_8YWQfrWR#iMB z+k(=hf!&GbXkde2gZ`CSIRV6r&j#Wunm9|11>wd~L7cA{hzr{T@n6x~mje-?+q%vk zhb93rjjsF%12hNoXR_*wR|w#X5W}s3@{Im`Su>Vh~pv z;x69F--zUBox^h)(V8DrFqVj2=*Q2VaiokU;<#jX@yNI^}q}uHR0vK$&t55EtPA9>vCGl=Ogc5 z2)sYF8=Qqq^ZvznQUwjL_#TnSa>SD{0arIP2wZh4xI zs4{#+Rg88K^qX_WTj(lZ#V5&S#(B|k&Txu~vCXk$cg|EzyY1G4Ty?NCR;?aO^QjF^ zd(KpGYJ2v?>#x6Fui_N73RcS!RFMyMEGMoLo}*gknd&Py#Hv*ti0UPy>sU2P4Gnjh z>cwLI@n^mIon^~iMx*YaN++;YWD-(UJ|y+Kxn^NYte>m-jLXd};ueOR=)rIECPMdW^IU0#iUJCHcJwO!lj;&hfS85HhW zc4yh;lUPwB5yhLqRRbun4lLjZweNVPfZ@1C$8u^f#J;XsvBN_#-QeT?B&WTF)7ru@ zw{R@x@bQ9Ij+=LI=q8Ti3B4Sn=nC!@c(ZYNeB;1NV|%<~d8w||=j})@V+;vYymP|~ z8xE@2;Vp$?+91Wj*OrPRuJ?&WvT6Hd|r+1?^=mB zrg)zg2Fxq_Llq98m-7#pVVA@oQVV}XEy%V%<2>0s?a_>*OV_+RErS1dGTPTrUCKmZ zCIU?jUSW@1#tyEkl$D)Gm6h>)g}D{Tp)5EItH|nSS|&Jp{&nI^2sV4V(ESuG@4|VzyrwobzrZKd z*tD~33@z*r{xSL#kBR(*2svI4yM|mWXvahR8A!NW@-3vBGZXreINCZ*72X2LD0r3e zGqRn$o?e^194R-(BRtbiiFMR!H1I@kQycFPIS1lpb=@4aaY9YIvfQ>fPpR0o^OB5v z{s9X9ndXwtT{KxO;ka6sJ}sXTRMHL|FVZ~Tq<1{#7m*m@^4O{*^8Ifjr66_K#T-!R zf{}$i=^t`&_U|g|ia5;|JYb!{)m+ttHU0tan84no8<%dt9%!qMr8`d7T>EzH^Q!KH zp;fh=YJcEXjk|{3HCkP(UcJhfA*!fSU8I{G{mvpe&BYduf-IxI$o1;jrKREY^0-sg z2M{obUJBx3hPcIRt9{q*Sqnl8=t@;S?$%4Qr}T3CN4J^82C5{+kA}{x=zIk#uD63` z&|iFMbaX3MFn>iH-zr~-zd?;lW6K>HS{lyx@N?+qgDbf*uG`31X24n67|2iLAK?rL zLnpzMtL#GMbrpAJQgj2xSi-!GM>99)rUWzb1D>hU2%TM`FoOmRgHf;AOVvde0mz~s zPsGv)LU5tVqU$TKRTsGuhM9tod$wc6p$r8~;RlVO(buXyOoQXcCP5^#eVjsd!`Tm> zkOQGO*1`E>zk^`RaqVu`pgUz-LZ0fqWh==&OEJ|~j)N8>q}`lb^aCLtQ1R!OAF%@j zN=3l%@{NTX`uqotOPAmEX5atd;=&dE`i=ANE?n2Ip1*Np;aY=d+{^n#A6~n19hXo( zp;k7idgeCnuXG&o*Tji&$&jRO4n?YzC-qU4%;!o<`E^F!Dc6)smug4p!phProp}Q1 z5S9d)L_ta={*(x>2S~rXoPT&%@Auhn@r5S7%H+Ee@-*yy{Cyj;1I^u0m3qQY`wr(Q zUo@)IMWcFn(Fh4>1g4lS4(eUHIVkAB!`tl(uVUhyx?|jhQ$^6q2xM@IzoXutlBjyy zdf<@lGWd4Q-%>%9$OR&FoHx~P(roZp^YkRfl1G(+s=Wmu<7&^_1bl~#ie{|H*@ci^o^R>Sn*0jG;Vfe|T@G??j=$ghfriWTfuj(zMYP53IoK8F= zG+X&I}+7=CBb~Vxu3L)mb*y)Y$m1^+2ya#%J$6{*hLF;uVceu*o|b zo8*(5x#~WYr`R;g)2h55QXFP(z+Bx+!xZk!Y{oc3yYzjrkq+% zo@IVJ4c7SCwdUnG4v${|bM!SoXa!M|n))FarK_tv;hp4?f18W4A!ozbXCAxV6n=Zv zYYUzVKZxX^UD;kk#4WEQ!u9rQnzoaLW5?P)VKBfrHfwHuXM zo!8=|Qjc55x?>y*IV71cZhM*RvDf2T5b`CSzB%u`Gv^&Y(Vqrql{RbBf+XwlqZ@Je z0UzNfp4@%J8;|G{d%HTcoMlr^?pSv@@deh29YQBh$dMr-F-t!kK zq=VAPO)kLp639v`Nkzc8ERey#Af?q5ai^Wgky|`yuBIN$Sjb{6@Dr6SJ2Ibi+FVH6 z^Jq-Z6BfQhked}H5NX!20)oC&8ns9i6;JT6gViw;9yVkt>a>RD(+I+pi)=n4RFyFj zae(?fNbMbBXEVdc7Co;X`bpw>TiOGDapBr!W$ov|JBoq0pYWr{E6?x6Znx`Ua?Ka@ z)oV4sezO)w{Mr|-|5{5~KU{~mHeYGq7EfauN0E~KAX>pO41M@@bmJ4d=;-S+L;GLp zEm7T|M%{<MLs@9V;+8MiITR8bMAIGhu1^n7t}9j6#Vka&EZR}>kDezTkDOp9q~pL(9vl3%s4K-U<@F%*hwmLcYV)NcX7k<>_#xFDL5X*qOTz0L#MHlRpZGCo~DuyYH(g33REo_AGwD7vVi zvTTU&;YXYza*W78xs+a!{#{2>dX*{n`u<(!($#Qre0v`Q1wDz1SE(cN^|oG`7R1@1 zp8ZHbEqm(8bf{%fDQTWrhz2GrFdIsmSJA+NXLe+H8GpV0a8B znwrdbUsL9)qHbn>LtI2=L}jy?2hZwCTG$bbZv919cKk}}g zA`CKZA5mRVL(!a~=05Fx4Sv`9mGQQTX;{jYW@BGKC)uRPRC_-7G;z-mQJz=%*q5kY zS7we=hEbW5RtDs^bU}4PVbnPl21$w|Li@}wQIkx8SOw#OpW!u}qDVT+{Ot}r2aut(GOQ~`NW%8lnqC{nF+n>4$dH}a z1i^(NEN3+8(pE4}U|1_d9Anumdb3mOCX0P1BQ8@PKO&+$?Q4`%ew%_?3n{q-qFIh^ zby1XxFrV7?jK0VFFKcY9VE)^29DNLJR1Ese&Rbbz`fPn_=O*lpxn9KJy9OZbUJr9= z_i}w4W)5&z7M}7M5_<#Ph&PG6MT8*6p3&+zs7y*AJt#(iY<+Olsr0~@Db7+q=RlU0 z5Z0fk64{720U`^Y$KpER7A>X1VS>J9C_t(uSF8OpDkzjCV<5I}j=&`8{dDX_q3U{J z4b^0$8bSE;NC}zK1`Zz`O*qku$Qj5TChG(q0v01z5FJVZS5srt5Z?hdLVgn$ERhYD z0%!9J*fDH0zqEL@tV_ECoRTCVTmSyx%;%=J+uRKjH;Pl&U-N^IAY8@0tnj;wChiIy zPNL?D8w+=(24&@bw4&No%6T!4SxN^N77_F@s&koxxS)@fr7Ioie2ZTcA{IDPI)hMIZTi+*PSX8^ zFiom8u%Id6>^_akG8ul* z89F`Uwy+r)27#AaB@97$WTO*l3r*TLvLq5iXr}bmiV! z<-^?5^hy6SU|HF*o-1DlU7FZCJb!#iT`AzqrQB^5EHMGHWhzsWGq3;f!o^G8^3}7i zTwL}ppS^nZ;*}+(7t+b9-nepU8PYzrB!!dKO`Or1iKr3hNS`RvrLKREl#pN791{v> z{m0B1pIK%J3TGN>@Q18%Ki@;+PJFF!boa>F!%?^sUq#>klIfe^*nUCqSTu+PM82Ra zRQm^{y>*2o99}jB_!PLFAg=+u&rP@Lg|EIPn*xrw2 zrhjV}&+@fXd00;6OZ`a(C+Vb>`I0@|2#NfY6oX>V_K-8BmyN~6ZRu&cB5ZVUNWm4M zpaY$t1H2&ILnkWw^F(B>oy#cut{11sugHjY-j6nm76F zgpjqbE;7|oimriFWLGg|?l=PB*5UGUR cglS~YCsxV++%)an#FG>ICtkHjooRai3jsu00{{R3 literal 0 HcmV?d00001 diff --git a/src/PaddleClas/ppcls/arch/backbone/legendary_models/__pycache__/mobilenet_v3.cpython-39.pyc b/src/PaddleClas/ppcls/arch/backbone/legendary_models/__pycache__/mobilenet_v3.cpython-39.pyc new file mode 100644 index 0000000000000000000000000000000000000000..54ad9f4f169f065c53935f184db4cef3769250d5 GIT binary patch literal 15979 zcmeHOYiu0Xb)K2sot<5-C`zKZt&RPcQ5Uk{J%8&Z^ir%3qeLgX~Q-m{vb zFomhEQdF~QQOjz@NH(HUn&w8cQTZDy>RDZ;McimHo{h_J)QuI5tWiv46UAgUS?tJm z6jRxh%!|1jik;a`8P?s6#jb3Z49DF~#m(8x#Vy$_h#M@ylB|QJ*ap_gHnJ|ZiEVy6 zne84|*p_!x(C&9sw)Jf-yVdDl*mhgVZb#ZSnX_He9g^;pbXSP(mf=SveKbTLli|lD zH6{IAlWs)YPsrFFN%u&n-3kC_BQQy`^LyyP&Y6?AR@Z9do*FMzW71ew>{^{Dh2~h(E`A5$~1p&msPE z?0LkWm+>bMe}SDu{G^QUVW-$>w7ZwR%FeQL=;=xJC3c?mA@mgcGV5m-5PF(r*+upu zLi^ZP*caFr5lXYGY=B)t=oxm64YHRI`aJu2cA345(0*pIAvTQA0XE7;*cF5_OdD3x z_9Eda9T6MtF|X`aea9*kD`oBrgB7L=UZGqPofTdv`BuJK%K3;7rd5%+Y;(JK#`k$T zCN`a6cEvADJ7=cHFPF=1&v}tJYx}v0!7?uj{am>;jimT_Uapj@zR(8>CCBDyB|gw| z-pScB2&P6R9M7qG1NMx=)0)`omTe4rW}MrV39G^#pWB6!c&u%*mFMN6RpsvN^n~wM zyxxNcD>h@UbD&VP$DNYnA1IcY<9Y{5cG1a&7AL$kFCt9G0oD3h}|4t*&>!?Ymv~=^k(2?>bNSuyv>{ zm)Ci^hpnFW(_OEAU7L?PKLb=X`~4YB_cKOSeY&4Ds+!%lM^!p0wy&UK#J08>M^~tD z{Ak+>m~Yz(1oNdCO*|Y~)>1QLr3%ZtZq9jG*Uj{vqv_};i7~gFoASPL=vp(_b8WVf zdi>gKvR>fup=;?F--#WH?*eHSIT|W*I23;N8n%v2_b_j1k{nA~UyPu|MY5pyV<7z^ z${n}5s3M%!`J@%`2GT8SiQ3h5B1O zO*{+bnJ5P6#U!>wOqvS;6OJN>Q||>OhH)4O<-oY+6GceJ_Z+_7S&*8>l&Zp0HFZw& z5%blWQd60_TiLGMRBP%k#JZK6+FYa-VOmY;R;CnwvKFbS#}!J0Q5PAMpc((Cf28Mw zQy5vKXx|VKm~rW-P^U#)8h2$b62(Hvnzr4lBVr}zx;1@A;ioW)Pn^EqtCyt_bT%-geug|h!XgXNnsQscslKjKN;*0i$OVb{?0(pyrVU$n+9rF~Y#6>~!81!`-}I^# zvXC9tYkqYO_ODhdiNwW*yo%_MGjK(uhN>mMP;~k@2b=XgOkvxHp&LQNlEVQxjSfYJ zmE(>L(;IoyNboj;5Ca;YsHUmLU49X%Im!w@geZEvUJysgVnSJssab%7MVzP;b95GE zF&4`k-%#=@(^>phA{(bu8%`QXiz?X!SdbJQA;oFm0>YCfo=~EUpW)+PuMAOcu3X9& z#(Pb-;Q5(wS(7;xe*y~3F#}~@FPdpq^>ViB^qP5?^!^KnGlw!J@jX{|^7%rp;FNsR zpRj$Z=JT@aA?BD~Vb(Frc@qG-UglG#G`}6+;a0v3x^LKs<(}# zF^5xqS;X;LgDI3Mu3d7xdX;0rU~pWiR564JyHs*qvsFJ|;GP`XG@Y3UBrY9XTI~g6 z@LsDr&MYQL<9O~^eHbgYT5(EM7oY(YXxhu#UOqwRM#U}xgK~~NCsu15OlddbR;w&H zpTY7jk2RL%P^KqinwODc<1ke4XNY;8?`r^DV!L59Fs_-MI&G*0i9eV!Z)L^b7 z$dHu6A76FaaVxZBQ4jQY)LG@Y`H0~Qr^%HL>lZZ`5 zpj@GDNqX=h$2e&>zcNoF-{N;Hbl;{Px7G}76)OK9e;Y#Z zz07M!-Coief13%~EZ)vYZN$qpmNaQ>NGb_$7Awk571qPiw zNSQAYxlE~g)pIP*kyC-%PYpRRK2Q1--igY|KSIwQ*`Ru2^GBMxP2F}^(^A+Lwczv7 z?7onO&RA9fqp_^a^Dwb(*E@+oqp8Kac;t-x)4J%O^$Zl7CsJ}*8?lZlheO}V)s<7{ zFVp-JV+Gr@Y&d&sll2b%~_{&SDjam^=5@pyMNzb>rZ`n5ro=A9pxgvwe*a%k+ zRD@MQJLts|8|n$DFz$HiN5Q&k#o@weOoe|LopWg!zDmQ`utc|tR1gicp-2QV08n(0 z{gkF)5rDQf4B7q5!pqzMJnTmM7OA{@Lcs>Yl(o;gq=LjrVDULaJ@G(l%(GoUO z(HXF!!J=U5K6OCz=OioKqYA08%H}0XFM|M33Cju?VR4(1PZN0!!Da91Z_zg@iOY_jkJY$Gi205 z>M)ozm`-rAw)hN@St4I0QX?Wy7;_Z*8j-Jq+|hz%Qg1A`-{P?DuEYNOHoSwhf#a9(GN9pk7$9(8`?BUUQ zH5_KKc^Lgx9J_#GVXv+k*b}g?%L9Nu*q@Gb2i@>Ik#QoV^#j$X2fG<3gi@A#V9j@M zXS|9-AGSe`lU0f~zJ^kj`-DhJ6IE(WSsTJ$LG55kNQ(p*j8g~)M1|9yD73K>AEwwv zBD8mcQ=}y1{5{aW2=BPI4{T|1o0TUJxZg^1I~ov0(&pY%QM}FL~Lt&bkL^=((h4fmCj z5&$bz+NrfTuuiCskp$oax$t4>k!{3cyQJ;IA}9q*7Nt;fGQumqQG@-8ctX~S)LV-) zrQTW~8OESwsI(hKAR}ZTwFO`cdwZzJ07!or?XWoVmN$Wua}IBTVia*CV?{@>c?TOU z9b?7Dv9j;Gcyp7RlBvQN?z<#4qtMAV^BmFP%GXFTJ8gu7W2DUhuTa`ba|&c$eMQ+I390Xd9~lXP;1U^9wU1}FhK<7ZVnfT^^j%y;ggiYfiq5`}=qT`7)VI9J z7(egp*xG1ggvnfMij~2C9c}WriF}vHZxZ<}BHsZKdhlutjtIu?L;}A3B4rq`;b9Q@ zGa<=1P1Q*Jo+BP@MnJ)?Pn#Rfo@ff1V#%Bva;OgSU!&UX#EfJBZ;};{tSZA$KJMHn zGYyj_TdAWh z`tIzO@K75otV!PT@1d`im-=1g_ET#Fc|+}taY|X^La(Mve*;Y|Qp5MGYaEHRzTPNE zYh8WlOsVq%4GJB)3y#A;i_XWe+R&Q|@!QzRU?E5yOYYVsB@%wJ?iC<(bV%zV9@3HE zzJos7nGqp={Vs~6Qz27;Bq6XrX*&=xI#nE&>&VYj*5l389?3!edq}9mPQ>drhe&XT zRpndyb^sirb=Zq~!5&J=t8S|Q)X+(p)nlrL`0S>J%59HL;x4gXVv~lrvKm{skM-`y z;FCZ;kWBXxw86B-r8-h)u-Qn1{14E0@FL|8DMSR`LB}dRdoX*M8F>*&E z^aLFG(sG3l0v?yrW8~cTwVN70;lnCaT0hFjX6whOKeA9}{TES>cxs+gnXoGFcunq_W|j^XI)*ricfF9W0!YeW*)JW4oo zmU3`dpg7-vRNe;ylQ!Av4Q>&-MS*t)^h5V8_#acrrF<7%mb|3Vs#hYm1$&19kl~yN z{g2Rop7i-tU0-68GJqk7aj+4O)qcCyguQCQ(vGd{h$4}qbqQA)Rg0G@6F}M zv7qng^{V!e+-IrcE-#1_lkDR&C zH*8%#GcwXQG}z`0JbdZQz<`98NCs7|3=IsUvp=EM3AaL@!l8ZK<9|viOF=BV|0>~j z2QrmpRH{a($4{d%?Wej9_uJj5CQGdoc<1j6-lGq(OZUMX_8_r)bam(k#ty-j|A0sr zkv}K$9+71rqTDxE1re26ClK9V10G)!itx>H1_{b10G)kjOyo+ z9|amPfYK%c(27u^-0!UlB`UQ}P`aN!!cC)x`shLHBQSLh`sfRUnwCByD{EyCQSJ{` z1re26ClK9FAJMh6hbVgRDC$|eKKe;mA8olWlqmO~SA`Ol`YfSTzh+3+v>!t0fk7$6 z)%$BCI<4_PBhu1H-H6uhc>WiZ^_N86BtmCD{?|m_BeKkRqKf~qD#EDLIw7p>j@RVH zLx9Qu$ao+Yc#ElPz?XD4K0<(z7Wh^Q{T-3_h%5sU<$k;>h^W*$fvD|{*PQR#Ebxau z2)LWt-XUA!g9veT4KVr}LQ4xq+bFssj3~ELTg@9lrPc{X_tQlWt?&AOhdoGLw5s(j z_d02Pw?MT0z96F9J*$Fza7)pZeZbq%`cZwWUo7?I0yWnDzM zCsu_KmBO2p6ZpX!mHz%UeGk$FqdAm%Y3d$LZ2eaLa9Ty>#f3OmMyBDfzzw8Twy=OyhqA5P+u~K5Y6oP)lB?<|*oP zYfu>1tX(hT+0%}EzYx5np!XMn2}x$9h)y{(p1@rx`fp7uus6R&MgEG2G&151 z>RtMmgDXg{Q1o3Q^kOG4PRR)!`1quEMxL(axroe8E=>D*g`iP`mxZZ!y^MSro*Yr>IxUr#mNe@owdp;j(-S?zXsh?;+ f*7rm;`H3VGMyhMFYj4-Fu8yupjSb1I%Rm1MvgI=l literal 0 HcmV?d00001 diff --git a/src/PaddleClas/ppcls/arch/backbone/legendary_models/__pycache__/pp_lcnet.cpython-39.pyc b/src/PaddleClas/ppcls/arch/backbone/legendary_models/__pycache__/pp_lcnet.cpython-39.pyc new file mode 100644 index 0000000000000000000000000000000000000000..8b2be9d8ea02912eacdd30f9df3f5fb67f6f726e GIT binary patch literal 12099 zcmeHNO>i4WcAgmw1_KbJAc~YkNtR(-mW_=CNt7iyT3P?1?OhpGMA{o8+zf~4Aqa8s z zKu{*QN`;ay8nPc=cfaZ1uity!J?`LOO2hMq|8{cr8|O6bKPl4tQ0^s!;BR$J6PnO{ zt*RIGs!=qmv0_Z8G{ZNGCjZ8(R?*_Lm~U1S#RT)FAFtX)yP7N}tEpnDI#3*_ri*FL zi~EDsq2dtpmcOf-DQ1{Y_`9pa#bM@ce@}I!I8q%gjv}5EDKQ|@Vo(f;T_PiPe`FW; zis7;*_I#pO^x{5mzqfa7>|B0v_?q}Fl=!STFJ2PIz&$5k6?t(S+!1j>oDe6$Jufbb zad8UVQE^GUEKY-aL0lGR#944JiYww3@!Q~X!kE-@SJ!Dqb1}8coek=KGxVHFwNaO$ zvPEUF5>)CnHPn!mTIkF*Yo!qJLQYr7o33=Lm%~uz;%fJ0;Wom`qIY?*e6wEn$FHg6 z6*nx+7wWRAtgH3fB9aoNKUl?9Yc`BRF^bc*7OK z_l{MnZrQ7O;jwC6cz$rK=2pG@?Ctz)$(yYQ`BJ@lyj{m}--D!Z<&qQ09>00>#?^us zI(JVvn!asa*&`arvs>5pNV;~?IkDxeJ)+7UW6#DP zXV1o+Eqj);)!?QTvMGA%v`Y8np5Byswl@>A)FXP*>CHnwd-Bi_m1q|VHI$h3>!pQY zyu+O8FfVr)h(|JKa`E*T^xk@gNR|jpm^P7%%ftAUoN@`=x&hKneijkQv0l%zDqjNM>{F&8d|~9Q)nP3zd(6RNP24B1}#yKI*$p) zLb!?$TmfkrLThR=-O}$FA!4E4(ptLE4{G~h4O{vF#749g<6f*46GlrL(H1m$p%rWC zXEjQLt%w!45#@jSM|%G2BD8X>>fTi`Scjac^hK3mn^Bi430>|ix_;AB@tSwXS-h{w z<7mZa?)7uiH(3>5_1)l@^nA|^ykq0}Q~i<~jglWsyRtMtJ?oYhX6rR?`a#t^-Do&| z32HsxSW-h(cfoTa>ox0puh9skK{VagP5rQ*#;-hyc!?5w9~v}z0vcmaE9yck8p8TO zo706MV(%x5F=2{0jIsHFW@^Pa1|^{e$iQ7Gu%VJP9VHt0&O?iEAvdXgFYy@iHj#nwqhmH#$KqY4@Zoq6AJ>-Y#lwP^%yOQbzR#I#v zSxSB#GP$&h2hD~jmF+mxJjaoQW6A4*W=xqS46RI1;sC{s*_twHHI;nbl_I!P3FcLD zzQg7$Wd@!= z@}AztwWV@8W97a@GeI7aPQN4rh(JFbq-x)D(!#9u{ zjomo7n+O4|1KZL*AKEV|?IG7*g4Z5a9k0E(ur{qdTMb|H8sR*E;7xi2FRXXg;1ge3 zg9IX8gRg_xSc9!NR*Vrs&Ed}^btm*yN;6w=IUdGaI+Ua4SR?_pXpkOQHIb6wRc02T zkta~1q+%uM8itj8gE&$K_suAuvsj2=m$T&sk|1a)s~LDs;PJRXh2C)p^~#>S{w7a= zG6GtEQF6}WDetM1QA}^qdX?XzQhE5hVBkvLLwX0ly4A%mQ2V)UnS|la->;SretA$I z0>&?*hF%!o*5v`jI}p~^FoGR0%Nr2Rneun3wcO+)alc38O(NX*jUK#(v=0yh(xe1s zD+2R@En3>av=IS868d_|+kocV$Y>z&`mp0r$e$1Gm6ZCB;QS4SLB5Nk*2$)9vdjd+ zE*Pf~*#Kh*Gr(|z31C3NAP4}GK}Vp3Ay`YWlDAoc3D1BTFutKhs ztM!Ud1FVvx2y_;!s6^Y?sHF5nllPHZ!SshHKR5ys=_>+d8?1mSfwT)<#Fuw>mdMud zq>-lKzlsph(j(ue7ouBe+6|a$YnTH9Y#6_7!AO&0Frc5Ii*rUHXUGCdRQggbA%7p7 zbcnb_W{C(8WktJAWmlViSZVl6?cs>twP9FmVI^GZf#(T&m*D;hLXZK`;Iu@#o9K|f zoay$u19e?~8*=@TPC)z4I z?Ps3GXtO6M-vL)5Wzcy@6(te&AOz<@${56NzdQ}7Psi_JJ8;ju7r$qP8a52EsM+Am zb_Zy>ETOH&|I~QTBqwE)IV9~C=@`=S@K#8_6p^W?qrB{@`piJ^bRh*qrr3aF1 z0erD|P;z0;yH-G@&ij=h#1T#7!rwuJSBI6zd`2!<+kkz(e5Ac0?5c$Pd29<^Kezy< z^ST_v`;TrIWF5szvMrMAuDna6+Yh;n=szHA*#UU~IbUhh{-tlz4t~{5`}e+0JM>jI z?bE(ZdkQ)13a)>FN7CY(fE(A(6!YFl4U77n8ZQ8BjqpFmTPVGuOgfCD-Bb{|65Fr4 zl|Utfu-;%68M|;a+LSe?ft?vPa)BCbC*Xy$7v!GmvGhx5i%Lg39`A0$tqFEQd5J~_ zjSYTk(tEE-rponI(!++ylWyp#RN?xRbM>vl#2c^if@0`O0*R#Qklb-HDv^OCwXc%c zU16_D+d4AKyp55hf8hAG&xaEeJ~h3DO-#f{^7@o9S5h`_6D4|KM{dPX)TvHqC3h$p zbqMWL>2{$w_p8s!6{_)%iF}6$yMnwn+3J!n{x=B06o_V%lWDcLdeAPvvaO*+>X)fh zKT6xey|C47NBT1xB@F2qW5{UxEWcjPJgA%cZwgyz!vEb>!5i4Iv^S4dmrHaxyUQiJ zT&l|rv^lsqk%yj+KtS4Z%gJTOhaB^09zP`RdqloZq@O2`n;$J8H3uip=(;u=9F+O; zuFg<(d>l(IP?Rp$zE^#cE+Nf5=`6dIWb9DJZ z_apL$c;x~2Q)L{!RM~iMj3#Utl5~V-AY?E_X<8d!$iGDD#3mefhVz@q3GyJJHirQX zu}LB4=OgT*f1>|DzZb*#mqzDW!W_f!u9!MvG;r=8puSWVPa6c$jSHt-mgV3abCkc) z#B*61qgN+j=sg-T3`aiu#$0x0N;bWjEOn?yI?G2<9`s}IQg()4!a#@fo}{CMe7Ah6 zqw}CJ6#7!OiirrakVPTke6~Ip@$CyK%qf|`>lD8s>4)clCuJiNyUe@ z!}VHkGr6ejoq4Y}^;POpRQ@ei^*sZ^FPzKPYI#16%x5PXUa2xyDP?Cijpa<#%B=7j zxZ?`4sMf{EMx~Z9XL$bkBK!7m+eoX*RU)(@!HEW~Ej9AyTi34Na3-fNzjl4nxp{eN z>iVq$<3XjOthaC7n1uA7(VGM`Wi8-PsvO8arxbRGHoPJ#`X~X7P^_g)-Tnf~^b5-v z*16ipfU#4jpJ0q`RXy?l$JnXU+hc5wpMQ-oCjW{6v75+`hw^r+dkn~U zqBwR)ao}r_ZIRLTY=6Y!@?p@SUE=*sq&xO* ziw?^DZXa||-eW+=6Wy^xx&vR6Jq$X`UUcl+79EuP7k$t{dALJ-5kI&~oS4`)e2u=w zm}}x#7hhvYgDR!_Gf5{cavnq_ql?CTt;Sch(I+%~^h)O`7ULjBr;CqZ6fayVXIDOPJAX1Z5erI z!0sPgdRFMXZh3k(wvpYl24$beK?%NXxv&c*1@C~27&d*rqaR|L)-$HD`T4muf&&RN joyPqs{zx|cKSpfmzpcSkGLy|bojIKG?ZH&0_wRoJ8noMa literal 0 HcmV?d00001 diff --git a/src/PaddleClas/ppcls/arch/backbone/legendary_models/__pycache__/resnet.cpython-39.pyc b/src/PaddleClas/ppcls/arch/backbone/legendary_models/__pycache__/resnet.cpython-39.pyc new file mode 100644 index 0000000000000000000000000000000000000000..3e2cd638598eff56b69d6bfd1b7a9fed7ca588ef GIT binary patch literal 15291 zcmeHOU2NP|b|yI-4(CTdmStPE6Nj-K$CKD&NtPT(8z=Ff>|`CqwH3HKYnP(taz>Om zobe?`j?__iHwuuTU1hVyb{AOWK@PUvx<5cs6n*G(U)z`VrF<$-pso8$v@1MUt4r zWLGN7dAY3Qm2xZ}lS!tyYCbN0wY-Mkn5&f&`9vd6FDLU!!KxQ|`_8mizL3LZ-R>deyUM%syUTm>dyr1D6ic%V z>tVgDkM*-1Y=G@#gP)}HLnVpr`cwwp-9h&Ven`;0pUQ0CCt7~5J+!{>u9V-;_M`Mu zBJXMB9rz?BB+qP+7$_4-o^6yN$w5)3Q*!75$>9x>=g{+0mn3$CJ^#MMp0@{AWBI3% zKFSQF4Us;8be8Q$dcR0NgY*mRMWkO8X@ebO$KRLo&$5f`1RH)|&L3oxY=n)1JH#%r zlWYv!VRo5~vs2)nV{fpR*lBP_*qiKSHUaK=_HFhp_6oS8>UImwBZ?V_d>)>8s zQ|v4|2ku4oHapKQfIG&N>r(dWeL_k$7WP@Qe#NZ?w&|3s6&{2-a~2)nsd!;;l{;Qw z7HeK1Kzb@GhsmoJx5{UOfTOB*zTz#8UI>%tte`MARpI4OyW)7ZMVUL!GOHRmi}u;Y z($z}ErE+gs?>5<%6pbd{@th)RgUw9O*}h%#uUJbq&nn?Aw_-7qEtR-coinT44!Grb z;em&W%_6Uq%^G)?OLIX`^(Ri8s9KD<_Hn0dm2A%rj+ZOUcKzd?Rkm}p^SRlAJzMc} zg-ZEEqmL7=jgrjbOJ-E|#5LQWvV)P+t=?^xtFkK1gTFlgo9JkfJ+!axKh(*whj!Hc zheAi+*h7sEe?t$2j<$HHx$dK4c;ul^weTD1>6?43@zHPUvCz{NkJbG-939>Y8rgKH zHw{XdZgLn|Q+#r)DHb}@6ps%##R7F^oNp0vGF0VWH8t=zt%jViJboNT`9?qtg6&lI|gnk`3v7our31OUcu_KiraP zp)zthRDdM~z=@3jC9x48B8G=Apwxzo#z&V=-PlrIW>Q{ZsSl*0%rusGKbem)oh9L? zs2^Z_`8d3XbeN{K|J;;Vr|)BKq{*rOpjIU5zh4X0sq>d_6ohjc$t2>_xM^Da-vbGx zRcT$mi)pQEb;(rf3D8)eG8umgi!pUgg(Mzmb-Aw8V|BG2WAPz4vHPTYJV?}arme{= zQBN{`Es2tPkgO|BdGi;!NzqO!NUvw^N+{REBs@TBDthBI^69J^rWb6EH)s0JvK?xE zz#V3X3GyBsuM{Q=w&MzafWL$a@KbtRcsU;OOl}4CWHuS59k2EN!%U?Xv{HNmb@;c4 zyaEzxC0?mj{V;vUcCe0p7cW;`;+N5>i`yrakWwjG!=EDSU+ap3-{>zz65>D8QzX$T^`#zP9_)a2 zt~sj(vJ%aUctSG04*xVrU7DA8e;tUf%Xj4!8JyDMVoh#djB-yoSFzfF<^Aacjz?1xDBy&w{vG9~`#cX?;$b2Q4L zH$t!=lT)0=&R+((rxAe2EhYe&1fH5pe8x0y*DN>65fjV5i}GPl=ekZ3Qtw6yT@@TjNEskS8K}e1rq%c1s!a%_Tc|dTvM#SEP+I&p6#F46 zJ|E+`y0Q}UQYfj`6_ktx@p)RhVoY9Bgh5EO)&8C`l~s5it%vebR_6>Hw}}*q&|L8n zNT@~oSD10eB5R;Vvn%TASfsqb36@0R4jmNgM{4@_xcxy8Nmj*dq!gG!B_;Qgf!MQo zMw&)K?bIfI6V*E!Z!0+I9ch%G0ck1t=e@_fq}K<~Z0AU}*6U3kMGRV{1hBiL-7P(i zHS{=S026mjmpIx%6N#P{4?bKt01H(Eft6qp`{uP#gG*! zPox?oX{&~Ym7?trKZaJ2h1psw<`sUpr3;v9p$qCf7QGH#fMy77!V<`7iO428rlAFY zUPc!8h=^IO5GPDU+g=dLzrgL2%ilzK2gV0F9_`nJH^J|psOe4cKJoy(6Vx{@pZvR) z3oXNOh9_H+n2Jy%mM@lIiDE)5X5>&O^D2cuv6#h$W39m-$b?$7owJH?O;ZN0C@t8t zyyQ;^jxu1Ji%x06U``>(HBv^U*rV^7HtmWVKg$^ z^^Ubfi_9oko&o0)6IV87?Et=C&S=OYovaF7+#~PYj#lMUdLRBSlxu-c{#Bo z$r;zHcEKq+g~n_cqJV+D8iISiQSl55a~X{*R2FBQ|BL~J@yLe|UeP8(PLfdYsSW#@ zK8Sm8qqgMFz%Oij7j^kV8T*DWKg0Zi%P(c5#jfeT9Uz25WWw*#0EsbdnafNA$c18j$mMZ=LgaUa#3Ns*zWgf zlsoDe{RL@N{)nDoFxW-)nf{@~_N>vf?k-6ysbG)jGqH*>U65}5%}SaLtz?2dsGq2( z_o6nWKVRvor`Cspy{l+xUf(O#d)VFu1>cmjz zBi1MUu1qxdko;{uUGGuxs;JG%Us2nhs=z5lxqWCUhPwJ532h@_zm~Ww@w+ScGBl8rstC#4UDsCG5X$9lfcpz*Sp`t*8N(rzmE?2qB0lh2 zC4Q{jRxyXMXg#4u#ZnDtjF#ds)A@+mc&Oxt^D6D@^Kr4E59NhW_OTX!>xJP<{0@S{81XmQ4Mswf_YsR}hG@T>n)t%@#0G8>0rI z%Slc{p2liPuQ%Erw7hFRPqLO{ot2|ek{v_90^tcKa4Z+`4UVjHBvryb9H_vMnpMOg z5CHMRo`xVYw%McY9YGi?6ifVF>Vvioh>3{(JZyb5j*ZaE_cUW+A`Cz=3CBaDUcnAk zdde^^U>0h$wpc-^ijZ0iu*&OxU?Y}>-pCCKQ`ha=HS%*U7x4&W@f3;mvh}WrXM}NX zc_l<`tSTAzFi~j5-1f75(e7M}7UQrtnz-hKhKa`d%HN|##Ox7v@;e=`iV%@F zmuhy+zei=h4-z)QjAEjq(5vaNcZF{l%1$2Bz!oEiuugRde`4pflYmDpAm~A!KHNOI zGvaRu5)~m36@d`te^nJ={2CHUeXVL4W#~&i(Vx^mOJ$-^i7F2ucK~~ZLEO5$4*))b zmXu%VNwlcY9-<$43QDPR|5tikYdWD{E%$8#`2+D2n%eWY{U3q^(md9|_1InMM=m0Q z(mdjTSnG0uh?YdF(IVo_l8;O7 zpuZc8B=&Cotxyj+@<-+6-o|`4ecS}UMUDIp5fda#m?or9Xz~vP z2$KbHT=;3R@sd^*NOgpORdQTJP$WJzISu2b*L>UbeU~j?ZXN;K%pi+lIKJUwk+K#o z$F*i%J7-)K(L@7Pj2Q~#dZiho!i||(#6E4yn-TSL@b$=dyIw#O8SPFe+^9omppg{U_?rI` zH6}Nx2F;^}n_%IOfdo=f3d9l?C`^G9h|u^a*jN2R{)nOs*y`3PY>u3|ys84iF+ezO z-A1p%Q;%>la#|!dZXnuYoG@s1&P>8=!t3U=Q^*-_6pfi_UbANmdLpA;+7Jv z8|>{jexzbSXzitB)%I^wHHk%M1oeEeq-1^~>Cm`tpNbsTA z_D19+!GWR}k7Gor-2ZHY4$AKeIyM`Tr3cXQWG{obaCZRmNHfyLNZX9;A;@fLMkrTP zno*Vq_q>$f6`(xIjEs*yF(chT$@u8@W`yEm;-lWjF(X4LymuRPP<~g?@hCGQzQcY3 zknRBFk!GZgk+vDxOSsw6j8N{eZGb}g-vm&iPxPMvr3*lbrgD2@GESg)nn)Wc`zX03 zP$>7cZGb}gS!HtavG3#b$!)QQqgZ@$%h9sLV?^jMAdKUPwYr3lv#ZhfP;|-?CZi7n zbDkG{h)G8V;zRD}$TCSuIwlq0e&=_!O6NL`fyJ37|3hl-i1@%dM;}<{njct$K`F(Wy_v+(&Q9y_U+rZ zi`~WUd++yl>5q>WH9Wug%WUJH&uiM>sWAFjD4a!-pMo%r>7mxv>w4R$8|_>@r<2SG z&9+sy6rTfc*X?$`o^LyKr(LKQl-z{8ST8Ex3dh=|dP(tiINqM9PqZiNlPKqz!wRg( z#y&FZWmalwZ2V(A(CbsYyfuAatM7wk;$t0jQqi)aQy=SW8gI;8(%3#Wb5~5RX)HDvO{;Z`a$*rdx|}MSFaypFS2LYv)~T1m)H^Z9Jr_0cbLnb2lq5H zu4$E{+xT*=l1nH3hK$2*!o8r~iA9n+EVvQKAdb?BjtHW}TkS^81m#-AOzrb=bYt#9 zT6oJ(nrpRKv{UmI++1Z_;IjeS9#}|HE{@+KL|3*eR z^^J^jdaqHS6P8-eESt}=Q(1O8&7rPhrKKj+#%qLebKRTWRhrvXn%`A=byw;86>D3E z&WeK|+Xium2BcyQ`G>gCs_h)TaCks=RhcAxYCtOH_89o>aUx|R`#_pVyB_L~9{Cao z*Xlac>c)rKs?Ky~+;!?XmSZMt)BI2~wYmknv(f^Y&|9<=O&W9X5bq#Sm-0^_iMFY2 z>Gw5V>*acyXC%3v-ZQ9U{Lt+^8&b2EW7fJM-$1VSb9>zoOK5F8(%R~`V#ul0#-6S0 ze(mq}tj#A5v+#pcyGQdXB*m?<`x>|s)9`_XTDHKnON$jd9Sb6_x#mX^j5IC5KMYG_ z>s&zW$zX%0woF98c$z2s4x(0CXav6W{3H?Cj;g&O1*qi23&_NaL}=kF=9`?tIfJXUH%QEFc0m)}cG!NV@p z+mt+9O(%k=GdlN4e95Rld>8#Zfa~+TX6Q@lc@MPDmM<hwA@?OeK3AQ8 zoVe3zhH}{#&9&u*-&}9R5nq1TeJ$T;wW^)l;$@8Z8j`GmXnN7Gbz}7RnUg;>TGkCn z%8y8#hb2x~-hqRQ^c_jIJke;@s_4NDt9RnQnG~=#AU!j$AJq?_( z!{wlRi)aTO-Qx{vS66RPy9n*fJ=;<3Z%}JJTDxWUWdwC*EmI(6_oPLYyr>`(CyCI$ ziC2hxkH|41$3Y(C#4NJZRvRx<{gyaIb&C9eYVxbGxakY#p&+SW>;Zm96rE9&%Jkcq z8m?#mEtm$hl~4=XTJ#8-x*_o;&-18EpLbB@_xE)EKJ@8sghqo#y-(@lTZlP|HiNkkU%6HjV$YN^4o!5BcUxFPL zT|8M?vFAifE+~#FU|g#~YrzcznN<7D`oii?Bbme<4{-qVLJAJ#f{S?L%*m>Ykik`m z%oB(qbrp6<_vTvATtk`hTW;u!78h;;=i;tB+||l4_ht}=ZiBm$C+>>st*Sx|zY+2? zi+(8i%FawOn~l2dYzng{Pxfa4Q3Hd-tFCIoWj6b(Jn4#Pdc}66Dgh{{dsFHS;6eL; zSN`CW54MsiHqIv4r0R7#*~iLkYLiBo0YBB}_Y>bUmHYsG{q=*uG^=N`>3b?fhf40t z&}REARw-M_UlK>)vkm0TLWDqr>r&Xh{Usj6phcPZBJuoNflLcZb%?d-7NR`M5BIK*m{9;IhL~uQQrof?~T=)r3i$jA+3p4( zng0TcevSSC341`%7Q&%E*bHH32rNSo89;}&mEU>8+2IO1++MZCel5X{UaO2ferMms zM8y&jg+kYedymNbL2Dldz>=cX|aWa{7bR^MxT4HP(;x!fSYMTb6PGT~1Q~wpBYpknP zTU^4i(&Ng1+@9}UisoVm&E4tE3TMsec$H~b(ZXJ2)%sKCp`D})p{)wWjLShy<< zgnZnOR@8e{oCivnWm?4XH-h>{O`hh^T+mS!s2k))KU&UEvSr+ ztAc`pkrkvJOv|1+;||~ZBczgyU(+cHbM!J|g%Yg6_)@Gwy0|OmBG{(oB+FBllxVA3 zqC9zm=<2Y>avwp-|DgX?zmvoA;oLwAcNj~&Y0@cz7SCUYS5&YEC7GR7+-~F^PH(}b z^~s=;mJ3T!b>CifSC&MVuekIv?y&U;U>|c=GJJL8WQ~iPf#gHbr|4ktpfhxO%xz;6 zB#@j?KUpKRReb+$0Q;&IcG$;Y??-1?b$_zPX*g^%HrQo|JP-l+bagR0P1O|w2|g-4 zL!b0NL;H$a!<8q(`_tGhF$MC#5I-gUXGG{+`M^>plNM_im%Q`u))wEsl+L{K?uCn& zy=zP7E?vClT{*Y3bn$9U=|Wn}s@}VL`Pu_hP*A6oEVb7Wgtnw$BpKSQ@)@$RwKT$Mx1)GMm$R!Pd+Eph)Loes}WTDH>ABaf@;5RjX3?SMto(B zIQ?XeI6^8g)Chuu$7=-Deu}iWMo?|VSX|tdo}nv@)h-S~xWW*0QzYo%Dkx@23)z)@ zHHxwW4FxfR&bI1AqE0B;HQZ5k{Z*ywuj=6X3k;$M;5z+>klhmqIvc0^v&LP3ysS$9 z23OIQiq%vYC)R0%>(ta-=eH%rxxTtO38;eZO2k~v^;Q#;t literal 0 HcmV?d00001 diff --git a/src/PaddleClas/ppcls/arch/backbone/legendary_models/esnet.py b/src/PaddleClas/ppcls/arch/backbone/legendary_models/esnet.py new file mode 100644 index 0000000..e05e0ce --- /dev/null +++ b/src/PaddleClas/ppcls/arch/backbone/legendary_models/esnet.py @@ -0,0 +1,369 @@ +# copyright (c) 2021 PaddlePaddle Authors. All Rights Reserve. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from __future__ import absolute_import, division, print_function +import math +import paddle +from paddle import ParamAttr, reshape, transpose, concat, split +import paddle.nn as nn +from paddle.nn import Conv2D, BatchNorm, Linear, Dropout +from paddle.nn import AdaptiveAvgPool2D, MaxPool2D +from paddle.nn.initializer import KaimingNormal +from paddle.regularizer import L2Decay + +from ppcls.arch.backbone.base.theseus_layer import TheseusLayer +from ppcls.utils.save_load import load_dygraph_pretrain, load_dygraph_pretrain_from_url + +MODEL_URLS = { + "ESNet_x0_25": + "https://paddle-imagenet-models-name.bj.bcebos.com/dygraph/legendary_models/ESNet_x0_25_pretrained.pdparams", + "ESNet_x0_5": + "https://paddle-imagenet-models-name.bj.bcebos.com/dygraph/legendary_models/ESNet_x0_5_pretrained.pdparams", + "ESNet_x0_75": + "https://paddle-imagenet-models-name.bj.bcebos.com/dygraph/legendary_models/ESNet_x0_75_pretrained.pdparams", + "ESNet_x1_0": + "https://paddle-imagenet-models-name.bj.bcebos.com/dygraph/legendary_models/ESNet_x1_0_pretrained.pdparams", +} + +MODEL_STAGES_PATTERN = {"ESNet": ["blocks[2]", "blocks[9]", "blocks[12]"]} + +__all__ = list(MODEL_URLS.keys()) + + +def channel_shuffle(x, groups): + batch_size, num_channels, height, width = x.shape[0:4] + channels_per_group = num_channels // groups + x = reshape( + x=x, shape=[batch_size, groups, channels_per_group, height, width]) + x = transpose(x=x, perm=[0, 2, 1, 3, 4]) + x = reshape(x=x, shape=[batch_size, num_channels, height, width]) + return x + + +def make_divisible(v, divisor=8, min_value=None): + if min_value is None: + min_value = divisor + new_v = max(min_value, int(v + divisor / 2) // divisor * divisor) + if new_v < 0.9 * v: + new_v += divisor + return new_v + + +class ConvBNLayer(TheseusLayer): + def __init__(self, + in_channels, + out_channels, + kernel_size, + stride=1, + groups=1, + if_act=True): + super().__init__() + self.conv = Conv2D( + in_channels=in_channels, + out_channels=out_channels, + kernel_size=kernel_size, + stride=stride, + padding=(kernel_size - 1) // 2, + groups=groups, + weight_attr=ParamAttr(initializer=KaimingNormal()), + bias_attr=False) + + self.bn = BatchNorm( + out_channels, + param_attr=ParamAttr(regularizer=L2Decay(0.0)), + bias_attr=ParamAttr(regularizer=L2Decay(0.0))) + self.if_act = if_act + self.hardswish = nn.Hardswish() + + def forward(self, x): + x = self.conv(x) + x = self.bn(x) + if self.if_act: + x = self.hardswish(x) + return x + + +class SEModule(TheseusLayer): + def __init__(self, channel, reduction=4): + super().__init__() + self.avg_pool = AdaptiveAvgPool2D(1) + self.conv1 = Conv2D( + in_channels=channel, + out_channels=channel // reduction, + kernel_size=1, + stride=1, + padding=0) + self.relu = nn.ReLU() + self.conv2 = Conv2D( + in_channels=channel // reduction, + out_channels=channel, + kernel_size=1, + stride=1, + padding=0) + self.hardsigmoid = nn.Hardsigmoid() + + def forward(self, x): + identity = x + x = self.avg_pool(x) + x = self.conv1(x) + x = self.relu(x) + x = self.conv2(x) + x = self.hardsigmoid(x) + x = paddle.multiply(x=identity, y=x) + return x + + +class ESBlock1(TheseusLayer): + def __init__(self, in_channels, out_channels): + super().__init__() + self.pw_1_1 = ConvBNLayer( + in_channels=in_channels // 2, + out_channels=out_channels // 2, + kernel_size=1, + stride=1) + self.dw_1 = ConvBNLayer( + in_channels=out_channels // 2, + out_channels=out_channels // 2, + kernel_size=3, + stride=1, + groups=out_channels // 2, + if_act=False) + self.se = SEModule(out_channels) + + self.pw_1_2 = ConvBNLayer( + in_channels=out_channels, + out_channels=out_channels // 2, + kernel_size=1, + stride=1) + + def forward(self, x): + x1, x2 = split( + x, num_or_sections=[x.shape[1] // 2, x.shape[1] // 2], axis=1) + x2 = self.pw_1_1(x2) + x3 = self.dw_1(x2) + x3 = concat([x2, x3], axis=1) + x3 = self.se(x3) + x3 = self.pw_1_2(x3) + x = concat([x1, x3], axis=1) + return channel_shuffle(x, 2) + + +class ESBlock2(TheseusLayer): + def __init__(self, in_channels, out_channels): + super().__init__() + + # branch1 + self.dw_1 = ConvBNLayer( + in_channels=in_channels, + out_channels=in_channels, + kernel_size=3, + stride=2, + groups=in_channels, + if_act=False) + self.pw_1 = ConvBNLayer( + in_channels=in_channels, + out_channels=out_channels // 2, + kernel_size=1, + stride=1) + # branch2 + self.pw_2_1 = ConvBNLayer( + in_channels=in_channels, + out_channels=out_channels // 2, + kernel_size=1) + self.dw_2 = ConvBNLayer( + in_channels=out_channels // 2, + out_channels=out_channels // 2, + kernel_size=3, + stride=2, + groups=out_channels // 2, + if_act=False) + self.se = SEModule(out_channels // 2) + self.pw_2_2 = ConvBNLayer( + in_channels=out_channels // 2, + out_channels=out_channels // 2, + kernel_size=1) + self.concat_dw = ConvBNLayer( + in_channels=out_channels, + out_channels=out_channels, + kernel_size=3, + groups=out_channels) + self.concat_pw = ConvBNLayer( + in_channels=out_channels, out_channels=out_channels, kernel_size=1) + + def forward(self, x): + x1 = self.dw_1(x) + x1 = self.pw_1(x1) + x2 = self.pw_2_1(x) + x2 = self.dw_2(x2) + x2 = self.se(x2) + x2 = self.pw_2_2(x2) + x = concat([x1, x2], axis=1) + x = self.concat_dw(x) + x = self.concat_pw(x) + return x + + +class ESNet(TheseusLayer): + def __init__(self, + stages_pattern, + class_num=1000, + scale=1.0, + dropout_prob=0.2, + class_expand=1280, + return_patterns=None, + return_stages=None): + super().__init__() + self.scale = scale + self.class_num = class_num + self.class_expand = class_expand + stage_repeats = [3, 7, 3] + stage_out_channels = [ + -1, 24, make_divisible(116 * scale), make_divisible(232 * scale), + make_divisible(464 * scale), 1024 + ] + + self.conv1 = ConvBNLayer( + in_channels=3, + out_channels=stage_out_channels[1], + kernel_size=3, + stride=2) + self.max_pool = MaxPool2D(kernel_size=3, stride=2, padding=1) + + block_list = [] + for stage_id, num_repeat in enumerate(stage_repeats): + for i in range(num_repeat): + if i == 0: + block = ESBlock2( + in_channels=stage_out_channels[stage_id + 1], + out_channels=stage_out_channels[stage_id + 2]) + else: + block = ESBlock1( + in_channels=stage_out_channels[stage_id + 2], + out_channels=stage_out_channels[stage_id + 2]) + block_list.append(block) + self.blocks = nn.Sequential(*block_list) + + self.conv2 = ConvBNLayer( + in_channels=stage_out_channels[-2], + out_channels=stage_out_channels[-1], + kernel_size=1) + + self.avg_pool = AdaptiveAvgPool2D(1) + + self.last_conv = Conv2D( + in_channels=stage_out_channels[-1], + out_channels=self.class_expand, + kernel_size=1, + stride=1, + padding=0, + bias_attr=False) + self.hardswish = nn.Hardswish() + self.dropout = Dropout(p=dropout_prob, mode="downscale_in_infer") + self.flatten = nn.Flatten(start_axis=1, stop_axis=-1) + self.fc = Linear(self.class_expand, self.class_num) + + super().init_res( + stages_pattern, + return_patterns=return_patterns, + return_stages=return_stages) + + def forward(self, x): + x = self.conv1(x) + x = self.max_pool(x) + x = self.blocks(x) + x = self.conv2(x) + x = self.avg_pool(x) + x = self.last_conv(x) + x = self.hardswish(x) + x = self.dropout(x) + x = self.flatten(x) + x = self.fc(x) + return x + + +def _load_pretrained(pretrained, model, model_url, use_ssld): + if pretrained is False: + pass + elif pretrained is True: + load_dygraph_pretrain_from_url(model, model_url, use_ssld=use_ssld) + elif isinstance(pretrained, str): + load_dygraph_pretrain(model, pretrained) + else: + raise RuntimeError( + "pretrained type is not available. Please use `string` or `boolean` type." + ) + + +def ESNet_x0_25(pretrained=False, use_ssld=False, **kwargs): + """ + ESNet_x0_25 + Args: + pretrained: bool=False or str. If `True` load pretrained parameters, `False` otherwise. + If str, means the path of the pretrained model. + use_ssld: bool=False. Whether using distillation pretrained model when pretrained=True. + Returns: + model: nn.Layer. Specific `ESNet_x0_25` model depends on args. + """ + model = ESNet( + scale=0.25, stages_pattern=MODEL_STAGES_PATTERN["ESNet"], **kwargs) + _load_pretrained(pretrained, model, MODEL_URLS["ESNet_x0_25"], use_ssld) + return model + + +def ESNet_x0_5(pretrained=False, use_ssld=False, **kwargs): + """ + ESNet_x0_5 + Args: + pretrained: bool=False or str. If `True` load pretrained parameters, `False` otherwise. + If str, means the path of the pretrained model. + use_ssld: bool=False. Whether using distillation pretrained model when pretrained=True. + Returns: + model: nn.Layer. Specific `ESNet_x0_5` model depends on args. + """ + model = ESNet( + scale=0.5, stages_pattern=MODEL_STAGES_PATTERN["ESNet"], **kwargs) + _load_pretrained(pretrained, model, MODEL_URLS["ESNet_x0_5"], use_ssld) + return model + + +def ESNet_x0_75(pretrained=False, use_ssld=False, **kwargs): + """ + ESNet_x0_75 + Args: + pretrained: bool=False or str. If `True` load pretrained parameters, `False` otherwise. + If str, means the path of the pretrained model. + use_ssld: bool=False. Whether using distillation pretrained model when pretrained=True. + Returns: + model: nn.Layer. Specific `ESNet_x0_75` model depends on args. + """ + model = ESNet( + scale=0.75, stages_pattern=MODEL_STAGES_PATTERN["ESNet"], **kwargs) + _load_pretrained(pretrained, model, MODEL_URLS["ESNet_x0_75"], use_ssld) + return model + + +def ESNet_x1_0(pretrained=False, use_ssld=False, **kwargs): + """ + ESNet_x1_0 + Args: + pretrained: bool=False or str. If `True` load pretrained parameters, `False` otherwise. + If str, means the path of the pretrained model. + use_ssld: bool=False. Whether using distillation pretrained model when pretrained=True. + Returns: + model: nn.Layer. Specific `ESNet_x1_0` model depends on args. + """ + model = ESNet( + scale=1.0, stages_pattern=MODEL_STAGES_PATTERN["ESNet"], **kwargs) + _load_pretrained(pretrained, model, MODEL_URLS["ESNet_x1_0"], use_ssld) + return model diff --git a/src/PaddleClas/ppcls/arch/backbone/legendary_models/hrnet.py b/src/PaddleClas/ppcls/arch/backbone/legendary_models/hrnet.py new file mode 100644 index 0000000..c3f7759 --- /dev/null +++ b/src/PaddleClas/ppcls/arch/backbone/legendary_models/hrnet.py @@ -0,0 +1,794 @@ +# copyright (c) 2020 PaddlePaddle Authors. All Rights Reserve. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from __future__ import absolute_import +from __future__ import division +from __future__ import print_function + +import math +import paddle +from paddle import nn +from paddle import ParamAttr +from paddle.nn.functional import upsample +from paddle.nn.initializer import Uniform + +from ppcls.arch.backbone.base.theseus_layer import TheseusLayer, Identity +from ppcls.utils.save_load import load_dygraph_pretrain, load_dygraph_pretrain_from_url + +MODEL_URLS = { + "HRNet_W18_C": + "https://paddle-imagenet-models-name.bj.bcebos.com/dygraph/legendary_models/HRNet_W18_C_pretrained.pdparams", + "HRNet_W30_C": + "https://paddle-imagenet-models-name.bj.bcebos.com/dygraph/legendary_models/HRNet_W30_C_pretrained.pdparams", + "HRNet_W32_C": + "https://paddle-imagenet-models-name.bj.bcebos.com/dygraph/legendary_models/HRNet_W32_C_pretrained.pdparams", + "HRNet_W40_C": + "https://paddle-imagenet-models-name.bj.bcebos.com/dygraph/legendary_models/HRNet_W40_C_pretrained.pdparams", + "HRNet_W44_C": + "https://paddle-imagenet-models-name.bj.bcebos.com/dygraph/legendary_models/HRNet_W44_C_pretrained.pdparams", + "HRNet_W48_C": + "https://paddle-imagenet-models-name.bj.bcebos.com/dygraph/legendary_models/HRNet_W48_C_pretrained.pdparams", + "HRNet_W64_C": + "https://paddle-imagenet-models-name.bj.bcebos.com/dygraph/legendary_models/HRNet_W64_C_pretrained.pdparams" +} + +MODEL_STAGES_PATTERN = {"HRNet": ["st4"]} + +__all__ = list(MODEL_URLS.keys()) + + +def _create_act(act): + if act == "hardswish": + return nn.Hardswish() + elif act == "relu": + return nn.ReLU() + elif act is None: + return Identity() + else: + raise RuntimeError( + "The activation function is not supported: {}".format(act)) + + +class ConvBNLayer(TheseusLayer): + def __init__(self, + num_channels, + num_filters, + filter_size, + stride=1, + groups=1, + act="relu"): + super().__init__() + + self.conv = nn.Conv2D( + in_channels=num_channels, + out_channels=num_filters, + kernel_size=filter_size, + stride=stride, + padding=(filter_size - 1) // 2, + groups=groups, + bias_attr=False) + self.bn = nn.BatchNorm(num_filters, act=None) + self.act = _create_act(act) + + def forward(self, x): + x = self.conv(x) + x = self.bn(x) + x = self.act(x) + return x + + +class BottleneckBlock(TheseusLayer): + def __init__(self, + num_channels, + num_filters, + has_se, + stride=1, + downsample=False): + super().__init__() + + self.has_se = has_se + self.downsample = downsample + + self.conv1 = ConvBNLayer( + num_channels=num_channels, + num_filters=num_filters, + filter_size=1, + act="relu") + self.conv2 = ConvBNLayer( + num_channels=num_filters, + num_filters=num_filters, + filter_size=3, + stride=stride, + act="relu") + self.conv3 = ConvBNLayer( + num_channels=num_filters, + num_filters=num_filters * 4, + filter_size=1, + act=None) + + if self.downsample: + self.conv_down = ConvBNLayer( + num_channels=num_channels, + num_filters=num_filters * 4, + filter_size=1, + act=None) + + if self.has_se: + self.se = SELayer( + num_channels=num_filters * 4, + num_filters=num_filters * 4, + reduction_ratio=16) + self.relu = nn.ReLU() + + def forward(self, x, res_dict=None): + residual = x + x = self.conv1(x) + x = self.conv2(x) + x = self.conv3(x) + if self.downsample: + residual = self.conv_down(residual) + if self.has_se: + x = self.se(x) + x = paddle.add(x=residual, y=x) + x = self.relu(x) + return x + + +class BasicBlock(nn.Layer): + def __init__(self, num_channels, num_filters, has_se=False): + super().__init__() + + self.has_se = has_se + + self.conv1 = ConvBNLayer( + num_channels=num_channels, + num_filters=num_filters, + filter_size=3, + stride=1, + act="relu") + self.conv2 = ConvBNLayer( + num_channels=num_filters, + num_filters=num_filters, + filter_size=3, + stride=1, + act=None) + + if self.has_se: + self.se = SELayer( + num_channels=num_filters, + num_filters=num_filters, + reduction_ratio=16) + self.relu = nn.ReLU() + + def forward(self, x): + residual = x + x = self.conv1(x) + x = self.conv2(x) + + if self.has_se: + x = self.se(x) + + x = paddle.add(x=residual, y=x) + x = self.relu(x) + return x + + +class SELayer(TheseusLayer): + def __init__(self, num_channels, num_filters, reduction_ratio): + super().__init__() + + self.avg_pool = nn.AdaptiveAvgPool2D(1) + + self._num_channels = num_channels + + med_ch = int(num_channels / reduction_ratio) + stdv = 1.0 / math.sqrt(num_channels * 1.0) + self.fc_squeeze = nn.Linear( + num_channels, + med_ch, + weight_attr=ParamAttr(initializer=Uniform(-stdv, stdv))) + self.relu = nn.ReLU() + stdv = 1.0 / math.sqrt(med_ch * 1.0) + self.fc_excitation = nn.Linear( + med_ch, + num_filters, + weight_attr=ParamAttr(initializer=Uniform(-stdv, stdv))) + self.sigmoid = nn.Sigmoid() + + def forward(self, x, res_dict=None): + residual = x + x = self.avg_pool(x) + x = paddle.squeeze(x, axis=[2, 3]) + x = self.fc_squeeze(x) + x = self.relu(x) + x = self.fc_excitation(x) + x = self.sigmoid(x) + x = paddle.unsqueeze(x, axis=[2, 3]) + x = residual * x + return x + + +class Stage(TheseusLayer): + def __init__(self, num_modules, num_filters, has_se=False): + super().__init__() + + self._num_modules = num_modules + + self.stage_func_list = nn.LayerList() + for i in range(num_modules): + self.stage_func_list.append( + HighResolutionModule( + num_filters=num_filters, has_se=has_se)) + + def forward(self, x, res_dict=None): + x = x + for idx in range(self._num_modules): + x = self.stage_func_list[idx](x) + return x + + +class HighResolutionModule(TheseusLayer): + def __init__(self, num_filters, has_se=False): + super().__init__() + + self.basic_block_list = nn.LayerList() + + for i in range(len(num_filters)): + self.basic_block_list.append( + nn.Sequential(* [ + BasicBlock( + num_channels=num_filters[i], + num_filters=num_filters[i], + has_se=has_se) for j in range(4) + ])) + + self.fuse_func = FuseLayers( + in_channels=num_filters, out_channels=num_filters) + + def forward(self, x, res_dict=None): + out = [] + for idx, xi in enumerate(x): + basic_block_list = self.basic_block_list[idx] + for basic_block_func in basic_block_list: + xi = basic_block_func(xi) + out.append(xi) + out = self.fuse_func(out) + return out + + +class FuseLayers(TheseusLayer): + def __init__(self, in_channels, out_channels): + super().__init__() + + self._actual_ch = len(in_channels) + self._in_channels = in_channels + + self.residual_func_list = nn.LayerList() + self.relu = nn.ReLU() + for i in range(len(in_channels)): + for j in range(len(in_channels)): + if j > i: + self.residual_func_list.append( + ConvBNLayer( + num_channels=in_channels[j], + num_filters=out_channels[i], + filter_size=1, + stride=1, + act=None)) + elif j < i: + pre_num_filters = in_channels[j] + for k in range(i - j): + if k == i - j - 1: + self.residual_func_list.append( + ConvBNLayer( + num_channels=pre_num_filters, + num_filters=out_channels[i], + filter_size=3, + stride=2, + act=None)) + pre_num_filters = out_channels[i] + else: + self.residual_func_list.append( + ConvBNLayer( + num_channels=pre_num_filters, + num_filters=out_channels[j], + filter_size=3, + stride=2, + act="relu")) + pre_num_filters = out_channels[j] + + def forward(self, x, res_dict=None): + out = [] + residual_func_idx = 0 + for i in range(len(self._in_channels)): + residual = x[i] + for j in range(len(self._in_channels)): + if j > i: + xj = self.residual_func_list[residual_func_idx](x[j]) + residual_func_idx += 1 + + xj = upsample(xj, scale_factor=2**(j - i), mode="nearest") + residual = paddle.add(x=residual, y=xj) + elif j < i: + xj = x[j] + for k in range(i - j): + xj = self.residual_func_list[residual_func_idx](xj) + residual_func_idx += 1 + + residual = paddle.add(x=residual, y=xj) + + residual = self.relu(residual) + out.append(residual) + + return out + + +class LastClsOut(TheseusLayer): + def __init__(self, + num_channel_list, + has_se, + num_filters_list=[32, 64, 128, 256]): + super().__init__() + + self.func_list = nn.LayerList() + for idx in range(len(num_channel_list)): + self.func_list.append( + BottleneckBlock( + num_channels=num_channel_list[idx], + num_filters=num_filters_list[idx], + has_se=has_se, + downsample=True)) + + def forward(self, x, res_dict=None): + out = [] + for idx, xi in enumerate(x): + xi = self.func_list[idx](xi) + out.append(xi) + return out + + +class HRNet(TheseusLayer): + """ + HRNet + Args: + width: int=18. Base channel number of HRNet. + has_se: bool=False. If 'True', add se module to HRNet. + class_num: int=1000. Output num of last fc layer. + Returns: + model: nn.Layer. Specific HRNet model depends on args. + """ + + def __init__(self, + stages_pattern, + width=18, + has_se=False, + class_num=1000, + return_patterns=None, + return_stages=None): + super().__init__() + + self.width = width + self.has_se = has_se + self._class_num = class_num + + channels_2 = [self.width, self.width * 2] + channels_3 = [self.width, self.width * 2, self.width * 4] + channels_4 = [ + self.width, self.width * 2, self.width * 4, self.width * 8 + ] + + self.conv_layer1_1 = ConvBNLayer( + num_channels=3, + num_filters=64, + filter_size=3, + stride=2, + act="relu") + + self.conv_layer1_2 = ConvBNLayer( + num_channels=64, + num_filters=64, + filter_size=3, + stride=2, + act="relu") + + self.layer1 = nn.Sequential(* [ + BottleneckBlock( + num_channels=64 if i == 0 else 256, + num_filters=64, + has_se=has_se, + stride=1, + downsample=True if i == 0 else False) for i in range(4) + ]) + + self.conv_tr1_1 = ConvBNLayer( + num_channels=256, num_filters=width, filter_size=3) + self.conv_tr1_2 = ConvBNLayer( + num_channels=256, num_filters=width * 2, filter_size=3, stride=2) + + self.st2 = Stage( + num_modules=1, num_filters=channels_2, has_se=self.has_se) + + self.conv_tr2 = ConvBNLayer( + num_channels=width * 2, + num_filters=width * 4, + filter_size=3, + stride=2) + self.st3 = Stage( + num_modules=4, num_filters=channels_3, has_se=self.has_se) + + self.conv_tr3 = ConvBNLayer( + num_channels=width * 4, + num_filters=width * 8, + filter_size=3, + stride=2) + + self.st4 = Stage( + num_modules=3, num_filters=channels_4, has_se=self.has_se) + + # classification + num_filters_list = [32, 64, 128, 256] + self.last_cls = LastClsOut( + num_channel_list=channels_4, + has_se=self.has_se, + num_filters_list=num_filters_list) + + last_num_filters = [256, 512, 1024] + self.cls_head_conv_list = nn.LayerList() + for idx in range(3): + self.cls_head_conv_list.append( + ConvBNLayer( + num_channels=num_filters_list[idx] * 4, + num_filters=last_num_filters[idx], + filter_size=3, + stride=2)) + + self.conv_last = ConvBNLayer( + num_channels=1024, num_filters=2048, filter_size=1, stride=1) + + self.avg_pool = nn.AdaptiveAvgPool2D(1) + + stdv = 1.0 / math.sqrt(2048 * 1.0) + + self.fc = nn.Linear( + 2048, + class_num, + weight_attr=ParamAttr(initializer=Uniform(-stdv, stdv))) + + super().init_res( + stages_pattern, + return_patterns=return_patterns, + return_stages=return_stages) + + def forward(self, x): + x = self.conv_layer1_1(x) + x = self.conv_layer1_2(x) + + x = self.layer1(x) + + tr1_1 = self.conv_tr1_1(x) + tr1_2 = self.conv_tr1_2(x) + x = self.st2([tr1_1, tr1_2]) + + tr2 = self.conv_tr2(x[-1]) + x.append(tr2) + x = self.st3(x) + + tr3 = self.conv_tr3(x[-1]) + x.append(tr3) + x = self.st4(x) + + x = self.last_cls(x) + + y = x[0] + for idx in range(3): + y = paddle.add(x[idx + 1], self.cls_head_conv_list[idx](y)) + + y = self.conv_last(y) + y = self.avg_pool(y) + y = paddle.reshape(y, shape=[-1, y.shape[1]]) + y = self.fc(y) + return y + + +def _load_pretrained(pretrained, model, model_url, use_ssld): + if pretrained is False: + pass + elif pretrained is True: + load_dygraph_pretrain_from_url(model, model_url, use_ssld=use_ssld) + elif isinstance(pretrained, str): + load_dygraph_pretrain(model, pretrained) + else: + raise RuntimeError( + "pretrained type is not available. Please use `string` or `boolean` type." + ) + + +def HRNet_W18_C(pretrained=False, use_ssld=False, **kwargs): + """ + HRNet_W18_C + Args: + pretrained: bool=False or str. If `True` load pretrained parameters, `False` otherwise. + If str, means the path of the pretrained model. + use_ssld: bool=False. Whether using distillation pretrained model when pretrained=True. + Returns: + model: nn.Layer. Specific `HRNet_W18_C` model depends on args. + """ + model = HRNet( + width=18, stages_pattern=MODEL_STAGES_PATTERN["HRNet"], **kwargs) + _load_pretrained(pretrained, model, MODEL_URLS["HRNet_W18_C"], use_ssld) + return model + + +def HRNet_W30_C(pretrained=False, use_ssld=False, **kwargs): + """ + HRNet_W30_C + Args: + pretrained: bool=False or str. If `True` load pretrained parameters, `False` otherwise. + If str, means the path of the pretrained model. + use_ssld: bool=False. Whether using distillation pretrained model when pretrained=True. + Returns: + model: nn.Layer. Specific `HRNet_W30_C` model depends on args. + """ + model = HRNet( + width=30, stages_pattern=MODEL_STAGES_PATTERN["HRNet"], **kwargs) + _load_pretrained(pretrained, model, MODEL_URLS["HRNet_W30_C"], use_ssld) + return model + + +def HRNet_W32_C(pretrained=False, use_ssld=False, **kwargs): + """ + HRNet_W32_C + Args: + pretrained: bool=False or str. If `True` load pretrained parameters, `False` otherwise. + If str, means the path of the pretrained model. + use_ssld: bool=False. Whether using distillation pretrained model when pretrained=True. + Returns: + model: nn.Layer. Specific `HRNet_W32_C` model depends on args. + """ + model = HRNet( + width=32, stages_pattern=MODEL_STAGES_PATTERN["HRNet"], **kwargs) + _load_pretrained(pretrained, model, MODEL_URLS["HRNet_W32_C"], use_ssld) + return model + + +def HRNet_W40_C(pretrained=False, use_ssld=False, **kwargs): + """ + HRNet_W40_C + Args: + pretrained: bool=False or str. If `True` load pretrained parameters, `False` otherwise. + If str, means the path of the pretrained model. + use_ssld: bool=False. Whether using distillation pretrained model when pretrained=True. + Returns: + model: nn.Layer. Specific `HRNet_W40_C` model depends on args. + """ + model = HRNet( + width=40, stages_pattern=MODEL_STAGES_PATTERN["HRNet"], **kwargs) + _load_pretrained(pretrained, model, MODEL_URLS["HRNet_W40_C"], use_ssld) + return model + + +def HRNet_W44_C(pretrained=False, use_ssld=False, **kwargs): + """ + HRNet_W44_C + Args: + pretrained: bool=False or str. If `True` load pretrained parameters, `False` otherwise. + If str, means the path of the pretrained model. + use_ssld: bool=False. Whether using distillation pretrained model when pretrained=True. + Returns: + model: nn.Layer. Specific `HRNet_W44_C` model depends on args. + """ + model = HRNet( + width=44, stages_pattern=MODEL_STAGES_PATTERN["HRNet"], **kwargs) + _load_pretrained(pretrained, model, MODEL_URLS["HRNet_W44_C"], use_ssld) + return model + + +def HRNet_W48_C(pretrained=False, use_ssld=False, **kwargs): + """ + HRNet_W48_C + Args: + pretrained: bool=False or str. If `True` load pretrained parameters, `False` otherwise. + If str, means the path of the pretrained model. + use_ssld: bool=False. Whether using distillation pretrained model when pretrained=True. + Returns: + model: nn.Layer. Specific `HRNet_W48_C` model depends on args. + """ + model = HRNet( + width=48, stages_pattern=MODEL_STAGES_PATTERN["HRNet"], **kwargs) + _load_pretrained(pretrained, model, MODEL_URLS["HRNet_W48_C"], use_ssld) + return model + + +def HRNet_W60_C(pretrained=False, use_ssld=False, **kwargs): + """ + HRNet_W60_C + Args: + pretrained: bool=False or str. If `True` load pretrained parameters, `False` otherwise. + If str, means the path of the pretrained model. + use_ssld: bool=False. Whether using distillation pretrained model when pretrained=True. + Returns: + model: nn.Layer. Specific `HRNet_W60_C` model depends on args. + """ + model = HRNet( + width=60, stages_pattern=MODEL_STAGES_PATTERN["HRNet"], **kwargs) + _load_pretrained(pretrained, model, MODEL_URLS["HRNet_W60_C"], use_ssld) + return model + + +def HRNet_W64_C(pretrained=False, use_ssld=False, **kwargs): + """ + HRNet_W64_C + Args: + pretrained: bool=False or str. If `True` load pretrained parameters, `False` otherwise. + If str, means the path of the pretrained model. + use_ssld: bool=False. Whether using distillation pretrained model when pretrained=True. + Returns: + model: nn.Layer. Specific `HRNet_W64_C` model depends on args. + """ + model = HRNet( + width=64, stages_pattern=MODEL_STAGES_PATTERN["HRNet"], **kwargs) + _load_pretrained(pretrained, model, MODEL_URLS["HRNet_W64_C"], use_ssld) + return model + + +def SE_HRNet_W18_C(pretrained=False, use_ssld=False, **kwargs): + """ + SE_HRNet_W18_C + Args: + pretrained: bool=False or str. If `True` load pretrained parameters, `False` otherwise. + If str, means the path of the pretrained model. + use_ssld: bool=False. Whether using distillation pretrained model when pretrained=True. + Returns: + model: nn.Layer. Specific `SE_HRNet_W18_C` model depends on args. + """ + model = HRNet( + width=18, + stages_pattern=MODEL_STAGES_PATTERN["HRNet"], + has_se=True, + **kwargs) + _load_pretrained(pretrained, model, MODEL_URLS["SE_HRNet_W18_C"], use_ssld) + return model + + +def SE_HRNet_W30_C(pretrained=False, use_ssld=False, **kwargs): + """ + SE_HRNet_W30_C + Args: + pretrained: bool=False or str. If `True` load pretrained parameters, `False` otherwise. + If str, means the path of the pretrained model. + use_ssld: bool=False. Whether using distillation pretrained model when pretrained=True. + Returns: + model: nn.Layer. Specific `SE_HRNet_W30_C` model depends on args. + """ + model = HRNet( + width=30, + stages_pattern=MODEL_STAGES_PATTERN["HRNet"], + has_se=True, + **kwargs) + _load_pretrained(pretrained, model, MODEL_URLS["SE_HRNet_W30_C"], use_ssld) + return model + + +def SE_HRNet_W32_C(pretrained=False, use_ssld=False, **kwargs): + """ + SE_HRNet_W32_C + Args: + pretrained: bool=False or str. If `True` load pretrained parameters, `False` otherwise. + If str, means the path of the pretrained model. + use_ssld: bool=False. Whether using distillation pretrained model when pretrained=True. + Returns: + model: nn.Layer. Specific `SE_HRNet_W32_C` model depends on args. + """ + model = HRNet( + width=32, + stages_pattern=MODEL_STAGES_PATTERN["HRNet"], + has_se=True, + **kwargs) + _load_pretrained(pretrained, model, MODEL_URLS["SE_HRNet_W32_C"], use_ssld) + return model + + +def SE_HRNet_W40_C(pretrained=False, use_ssld=False, **kwargs): + """ + SE_HRNet_W40_C + Args: + pretrained: bool=False or str. If `True` load pretrained parameters, `False` otherwise. + If str, means the path of the pretrained model. + use_ssld: bool=False. Whether using distillation pretrained model when pretrained=True. + Returns: + model: nn.Layer. Specific `SE_HRNet_W40_C` model depends on args. + """ + model = HRNet( + width=40, + stages_pattern=MODEL_STAGES_PATTERN["HRNet"], + has_se=True, + **kwargs) + _load_pretrained(pretrained, model, MODEL_URLS["SE_HRNet_W40_C"], use_ssld) + return model + + +def SE_HRNet_W44_C(pretrained=False, use_ssld=False, **kwargs): + """ + SE_HRNet_W44_C + Args: + pretrained: bool=False or str. If `True` load pretrained parameters, `False` otherwise. + If str, means the path of the pretrained model. + use_ssld: bool=False. Whether using distillation pretrained model when pretrained=True. + Returns: + model: nn.Layer. Specific `SE_HRNet_W44_C` model depends on args. + """ + model = HRNet( + width=44, + stages_pattern=MODEL_STAGES_PATTERN["HRNet"], + has_se=True, + **kwargs) + _load_pretrained(pretrained, model, MODEL_URLS["SE_HRNet_W44_C"], use_ssld) + return model + + +def SE_HRNet_W48_C(pretrained=False, use_ssld=False, **kwargs): + """ + SE_HRNet_W48_C + Args: + pretrained: bool=False or str. If `True` load pretrained parameters, `False` otherwise. + If str, means the path of the pretrained model. + use_ssld: bool=False. Whether using distillation pretrained model when pretrained=True. + Returns: + model: nn.Layer. Specific `SE_HRNet_W48_C` model depends on args. + """ + model = HRNet( + width=48, + stages_pattern=MODEL_STAGES_PATTERN["HRNet"], + has_se=True, + **kwargs) + _load_pretrained(pretrained, model, MODEL_URLS["SE_HRNet_W48_C"], use_ssld) + return model + + +def SE_HRNet_W60_C(pretrained=False, use_ssld=False, **kwargs): + """ + SE_HRNet_W60_C + Args: + pretrained: bool=False or str. If `True` load pretrained parameters, `False` otherwise. + If str, means the path of the pretrained model. + use_ssld: bool=False. Whether using distillation pretrained model when pretrained=True. + Returns: + model: nn.Layer. Specific `SE_HRNet_W60_C` model depends on args. + """ + model = HRNet( + width=60, + stages_pattern=MODEL_STAGES_PATTERN["HRNet"], + has_se=True, + **kwargs) + _load_pretrained(pretrained, model, MODEL_URLS["SE_HRNet_W60_C"], use_ssld) + return model + + +def SE_HRNet_W64_C(pretrained=False, use_ssld=False, **kwargs): + """ + SE_HRNet_W64_C + Args: + pretrained: bool=False or str. If `True` load pretrained parameters, `False` otherwise. + If str, means the path of the pretrained model. + use_ssld: bool=False. Whether using distillation pretrained model when pretrained=True. + Returns: + model: nn.Layer. Specific `SE_HRNet_W64_C` model depends on args. + """ + model = HRNet( + width=64, + stages_pattern=MODEL_STAGES_PATTERN["HRNet"], + has_se=True, + **kwargs) + _load_pretrained(pretrained, model, MODEL_URLS["SE_HRNet_W64_C"], use_ssld) + return model diff --git a/src/PaddleClas/ppcls/arch/backbone/legendary_models/inception_v3.py b/src/PaddleClas/ppcls/arch/backbone/legendary_models/inception_v3.py new file mode 100644 index 0000000..5575f8c --- /dev/null +++ b/src/PaddleClas/ppcls/arch/backbone/legendary_models/inception_v3.py @@ -0,0 +1,557 @@ +# copyright (c) 2021 PaddlePaddle Authors. All Rights Reserve. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from __future__ import absolute_import, division, print_function +import math +import paddle +from paddle import ParamAttr +import paddle.nn as nn +from paddle.nn import Conv2D, BatchNorm, Linear, Dropout +from paddle.nn import AdaptiveAvgPool2D, MaxPool2D, AvgPool2D +from paddle.nn.initializer import Uniform + +from ppcls.arch.backbone.base.theseus_layer import TheseusLayer +from ppcls.utils.save_load import load_dygraph_pretrain, load_dygraph_pretrain_from_url + +MODEL_URLS = { + "InceptionV3": + "https://paddle-imagenet-models-name.bj.bcebos.com/dygraph/legendary_models/InceptionV3_pretrained.pdparams" +} + +MODEL_STAGES_PATTERN = { + "InceptionV3": [ + "inception_block_list[2]", "inception_block_list[3]", + "inception_block_list[7]", "inception_block_list[8]", + "inception_block_list[10]" + ] +} + +__all__ = MODEL_URLS.keys() +''' +InceptionV3 config: dict. + key: inception blocks of InceptionV3. + values: conv num in different blocks. +''' +NET_CONFIG = { + "inception_a": [[192, 256, 288], [32, 64, 64]], + "inception_b": [288], + "inception_c": [[768, 768, 768, 768], [128, 160, 160, 192]], + "inception_d": [768], + "inception_e": [1280, 2048] +} + + +class ConvBNLayer(TheseusLayer): + def __init__(self, + num_channels, + num_filters, + filter_size, + stride=1, + padding=0, + groups=1, + act="relu"): + super().__init__() + self.act = act + self.conv = Conv2D( + in_channels=num_channels, + out_channels=num_filters, + kernel_size=filter_size, + stride=stride, + padding=padding, + groups=groups, + bias_attr=False) + self.bn = BatchNorm(num_filters) + self.relu = nn.ReLU() + + def forward(self, x): + x = self.conv(x) + x = self.bn(x) + if self.act: + x = self.relu(x) + return x + + +class InceptionStem(TheseusLayer): + def __init__(self): + super().__init__() + self.conv_1a_3x3 = ConvBNLayer( + num_channels=3, + num_filters=32, + filter_size=3, + stride=2, + act="relu") + self.conv_2a_3x3 = ConvBNLayer( + num_channels=32, + num_filters=32, + filter_size=3, + stride=1, + act="relu") + self.conv_2b_3x3 = ConvBNLayer( + num_channels=32, + num_filters=64, + filter_size=3, + padding=1, + act="relu") + + self.max_pool = MaxPool2D(kernel_size=3, stride=2, padding=0) + self.conv_3b_1x1 = ConvBNLayer( + num_channels=64, num_filters=80, filter_size=1, act="relu") + self.conv_4a_3x3 = ConvBNLayer( + num_channels=80, num_filters=192, filter_size=3, act="relu") + + def forward(self, x): + x = self.conv_1a_3x3(x) + x = self.conv_2a_3x3(x) + x = self.conv_2b_3x3(x) + x = self.max_pool(x) + x = self.conv_3b_1x1(x) + x = self.conv_4a_3x3(x) + x = self.max_pool(x) + return x + + +class InceptionA(TheseusLayer): + def __init__(self, num_channels, pool_features): + super().__init__() + self.branch1x1 = ConvBNLayer( + num_channels=num_channels, + num_filters=64, + filter_size=1, + act="relu") + self.branch5x5_1 = ConvBNLayer( + num_channels=num_channels, + num_filters=48, + filter_size=1, + act="relu") + self.branch5x5_2 = ConvBNLayer( + num_channels=48, + num_filters=64, + filter_size=5, + padding=2, + act="relu") + + self.branch3x3dbl_1 = ConvBNLayer( + num_channels=num_channels, + num_filters=64, + filter_size=1, + act="relu") + self.branch3x3dbl_2 = ConvBNLayer( + num_channels=64, + num_filters=96, + filter_size=3, + padding=1, + act="relu") + self.branch3x3dbl_3 = ConvBNLayer( + num_channels=96, + num_filters=96, + filter_size=3, + padding=1, + act="relu") + self.branch_pool = AvgPool2D( + kernel_size=3, stride=1, padding=1, exclusive=False) + self.branch_pool_conv = ConvBNLayer( + num_channels=num_channels, + num_filters=pool_features, + filter_size=1, + act="relu") + + def forward(self, x): + branch1x1 = self.branch1x1(x) + branch5x5 = self.branch5x5_1(x) + branch5x5 = self.branch5x5_2(branch5x5) + + branch3x3dbl = self.branch3x3dbl_1(x) + branch3x3dbl = self.branch3x3dbl_2(branch3x3dbl) + branch3x3dbl = self.branch3x3dbl_3(branch3x3dbl) + + branch_pool = self.branch_pool(x) + branch_pool = self.branch_pool_conv(branch_pool) + x = paddle.concat( + [branch1x1, branch5x5, branch3x3dbl, branch_pool], axis=1) + return x + + +class InceptionB(TheseusLayer): + def __init__(self, num_channels): + super().__init__() + self.branch3x3 = ConvBNLayer( + num_channels=num_channels, + num_filters=384, + filter_size=3, + stride=2, + act="relu") + self.branch3x3dbl_1 = ConvBNLayer( + num_channels=num_channels, + num_filters=64, + filter_size=1, + act="relu") + self.branch3x3dbl_2 = ConvBNLayer( + num_channels=64, + num_filters=96, + filter_size=3, + padding=1, + act="relu") + self.branch3x3dbl_3 = ConvBNLayer( + num_channels=96, + num_filters=96, + filter_size=3, + stride=2, + act="relu") + self.branch_pool = MaxPool2D(kernel_size=3, stride=2) + + def forward(self, x): + branch3x3 = self.branch3x3(x) + + branch3x3dbl = self.branch3x3dbl_1(x) + branch3x3dbl = self.branch3x3dbl_2(branch3x3dbl) + branch3x3dbl = self.branch3x3dbl_3(branch3x3dbl) + + branch_pool = self.branch_pool(x) + + x = paddle.concat([branch3x3, branch3x3dbl, branch_pool], axis=1) + + return x + + +class InceptionC(TheseusLayer): + def __init__(self, num_channels, channels_7x7): + super().__init__() + self.branch1x1 = ConvBNLayer( + num_channels=num_channels, + num_filters=192, + filter_size=1, + act="relu") + + self.branch7x7_1 = ConvBNLayer( + num_channels=num_channels, + num_filters=channels_7x7, + filter_size=1, + stride=1, + act="relu") + self.branch7x7_2 = ConvBNLayer( + num_channels=channels_7x7, + num_filters=channels_7x7, + filter_size=(1, 7), + stride=1, + padding=(0, 3), + act="relu") + self.branch7x7_3 = ConvBNLayer( + num_channels=channels_7x7, + num_filters=192, + filter_size=(7, 1), + stride=1, + padding=(3, 0), + act="relu") + + self.branch7x7dbl_1 = ConvBNLayer( + num_channels=num_channels, + num_filters=channels_7x7, + filter_size=1, + act="relu") + self.branch7x7dbl_2 = ConvBNLayer( + num_channels=channels_7x7, + num_filters=channels_7x7, + filter_size=(7, 1), + padding=(3, 0), + act="relu") + self.branch7x7dbl_3 = ConvBNLayer( + num_channels=channels_7x7, + num_filters=channels_7x7, + filter_size=(1, 7), + padding=(0, 3), + act="relu") + self.branch7x7dbl_4 = ConvBNLayer( + num_channels=channels_7x7, + num_filters=channels_7x7, + filter_size=(7, 1), + padding=(3, 0), + act="relu") + self.branch7x7dbl_5 = ConvBNLayer( + num_channels=channels_7x7, + num_filters=192, + filter_size=(1, 7), + padding=(0, 3), + act="relu") + + self.branch_pool = AvgPool2D( + kernel_size=3, stride=1, padding=1, exclusive=False) + self.branch_pool_conv = ConvBNLayer( + num_channels=num_channels, + num_filters=192, + filter_size=1, + act="relu") + + def forward(self, x): + branch1x1 = self.branch1x1(x) + + branch7x7 = self.branch7x7_1(x) + branch7x7 = self.branch7x7_2(branch7x7) + branch7x7 = self.branch7x7_3(branch7x7) + + branch7x7dbl = self.branch7x7dbl_1(x) + branch7x7dbl = self.branch7x7dbl_2(branch7x7dbl) + branch7x7dbl = self.branch7x7dbl_3(branch7x7dbl) + branch7x7dbl = self.branch7x7dbl_4(branch7x7dbl) + branch7x7dbl = self.branch7x7dbl_5(branch7x7dbl) + + branch_pool = self.branch_pool(x) + branch_pool = self.branch_pool_conv(branch_pool) + + x = paddle.concat( + [branch1x1, branch7x7, branch7x7dbl, branch_pool], axis=1) + + return x + + +class InceptionD(TheseusLayer): + def __init__(self, num_channels): + super().__init__() + self.branch3x3_1 = ConvBNLayer( + num_channels=num_channels, + num_filters=192, + filter_size=1, + act="relu") + self.branch3x3_2 = ConvBNLayer( + num_channels=192, + num_filters=320, + filter_size=3, + stride=2, + act="relu") + self.branch7x7x3_1 = ConvBNLayer( + num_channels=num_channels, + num_filters=192, + filter_size=1, + act="relu") + self.branch7x7x3_2 = ConvBNLayer( + num_channels=192, + num_filters=192, + filter_size=(1, 7), + padding=(0, 3), + act="relu") + self.branch7x7x3_3 = ConvBNLayer( + num_channels=192, + num_filters=192, + filter_size=(7, 1), + padding=(3, 0), + act="relu") + self.branch7x7x3_4 = ConvBNLayer( + num_channels=192, + num_filters=192, + filter_size=3, + stride=2, + act="relu") + self.branch_pool = MaxPool2D(kernel_size=3, stride=2) + + def forward(self, x): + branch3x3 = self.branch3x3_1(x) + branch3x3 = self.branch3x3_2(branch3x3) + + branch7x7x3 = self.branch7x7x3_1(x) + branch7x7x3 = self.branch7x7x3_2(branch7x7x3) + branch7x7x3 = self.branch7x7x3_3(branch7x7x3) + branch7x7x3 = self.branch7x7x3_4(branch7x7x3) + + branch_pool = self.branch_pool(x) + + x = paddle.concat([branch3x3, branch7x7x3, branch_pool], axis=1) + return x + + +class InceptionE(TheseusLayer): + def __init__(self, num_channels): + super().__init__() + self.branch1x1 = ConvBNLayer( + num_channels=num_channels, + num_filters=320, + filter_size=1, + act="relu") + self.branch3x3_1 = ConvBNLayer( + num_channels=num_channels, + num_filters=384, + filter_size=1, + act="relu") + self.branch3x3_2a = ConvBNLayer( + num_channels=384, + num_filters=384, + filter_size=(1, 3), + padding=(0, 1), + act="relu") + self.branch3x3_2b = ConvBNLayer( + num_channels=384, + num_filters=384, + filter_size=(3, 1), + padding=(1, 0), + act="relu") + + self.branch3x3dbl_1 = ConvBNLayer( + num_channels=num_channels, + num_filters=448, + filter_size=1, + act="relu") + self.branch3x3dbl_2 = ConvBNLayer( + num_channels=448, + num_filters=384, + filter_size=3, + padding=1, + act="relu") + self.branch3x3dbl_3a = ConvBNLayer( + num_channels=384, + num_filters=384, + filter_size=(1, 3), + padding=(0, 1), + act="relu") + self.branch3x3dbl_3b = ConvBNLayer( + num_channels=384, + num_filters=384, + filter_size=(3, 1), + padding=(1, 0), + act="relu") + self.branch_pool = AvgPool2D( + kernel_size=3, stride=1, padding=1, exclusive=False) + self.branch_pool_conv = ConvBNLayer( + num_channels=num_channels, + num_filters=192, + filter_size=1, + act="relu") + + def forward(self, x): + branch1x1 = self.branch1x1(x) + + branch3x3 = self.branch3x3_1(x) + branch3x3 = [ + self.branch3x3_2a(branch3x3), + self.branch3x3_2b(branch3x3), + ] + branch3x3 = paddle.concat(branch3x3, axis=1) + + branch3x3dbl = self.branch3x3dbl_1(x) + branch3x3dbl = self.branch3x3dbl_2(branch3x3dbl) + branch3x3dbl = [ + self.branch3x3dbl_3a(branch3x3dbl), + self.branch3x3dbl_3b(branch3x3dbl), + ] + branch3x3dbl = paddle.concat(branch3x3dbl, axis=1) + + branch_pool = self.branch_pool(x) + branch_pool = self.branch_pool_conv(branch_pool) + + x = paddle.concat( + [branch1x1, branch3x3, branch3x3dbl, branch_pool], axis=1) + return x + + +class Inception_V3(TheseusLayer): + """ + Inception_V3 + Args: + config: dict. config of Inception_V3. + class_num: int=1000. The number of classes. + pretrained: (True or False) or path of pretrained_model. Whether to load the pretrained model. + Returns: + model: nn.Layer. Specific Inception_V3 model depends on args. + """ + + def __init__(self, + config, + stages_pattern, + class_num=1000, + return_patterns=None, + return_stages=None): + super().__init__() + + self.inception_a_list = config["inception_a"] + self.inception_c_list = config["inception_c"] + self.inception_b_list = config["inception_b"] + self.inception_d_list = config["inception_d"] + self.inception_e_list = config["inception_e"] + + self.inception_stem = InceptionStem() + + self.inception_block_list = nn.LayerList() + for i in range(len(self.inception_a_list[0])): + inception_a = InceptionA(self.inception_a_list[0][i], + self.inception_a_list[1][i]) + self.inception_block_list.append(inception_a) + + for i in range(len(self.inception_b_list)): + inception_b = InceptionB(self.inception_b_list[i]) + self.inception_block_list.append(inception_b) + + for i in range(len(self.inception_c_list[0])): + inception_c = InceptionC(self.inception_c_list[0][i], + self.inception_c_list[1][i]) + self.inception_block_list.append(inception_c) + + for i in range(len(self.inception_d_list)): + inception_d = InceptionD(self.inception_d_list[i]) + self.inception_block_list.append(inception_d) + + for i in range(len(self.inception_e_list)): + inception_e = InceptionE(self.inception_e_list[i]) + self.inception_block_list.append(inception_e) + + self.avg_pool = AdaptiveAvgPool2D(1) + self.dropout = Dropout(p=0.2, mode="downscale_in_infer") + stdv = 1.0 / math.sqrt(2048 * 1.0) + self.fc = Linear( + 2048, + class_num, + weight_attr=ParamAttr(initializer=Uniform(-stdv, stdv)), + bias_attr=ParamAttr()) + + super().init_res( + stages_pattern, + return_patterns=return_patterns, + return_stages=return_stages) + + def forward(self, x): + x = self.inception_stem(x) + for inception_block in self.inception_block_list: + x = inception_block(x) + x = self.avg_pool(x) + x = paddle.reshape(x, shape=[-1, 2048]) + x = self.dropout(x) + x = self.fc(x) + return x + + +def _load_pretrained(pretrained, model, model_url, use_ssld): + if pretrained is False: + pass + elif pretrained is True: + load_dygraph_pretrain_from_url(model, model_url, use_ssld=use_ssld) + elif isinstance(pretrained, str): + load_dygraph_pretrain(model, pretrained) + else: + raise RuntimeError( + "pretrained type is not available. Please use `string` or `boolean` type." + ) + + +def InceptionV3(pretrained=False, use_ssld=False, **kwargs): + """ + InceptionV3 + Args: + pretrained: bool=false or str. if `true` load pretrained parameters, `false` otherwise. + if str, means the path of the pretrained model. + use_ssld: bool=False. Whether using distillation pretrained model when pretrained=True. + Returns: + model: nn.Layer. Specific `InceptionV3` model + """ + model = Inception_V3( + NET_CONFIG, + stages_pattern=MODEL_STAGES_PATTERN["InceptionV3"], + **kwargs) + _load_pretrained(pretrained, model, MODEL_URLS["InceptionV3"], use_ssld) + return model diff --git a/src/PaddleClas/ppcls/arch/backbone/legendary_models/mobilenet_v1.py b/src/PaddleClas/ppcls/arch/backbone/legendary_models/mobilenet_v1.py new file mode 100644 index 0000000..9767d69 --- /dev/null +++ b/src/PaddleClas/ppcls/arch/backbone/legendary_models/mobilenet_v1.py @@ -0,0 +1,257 @@ +# copyright (c) 2021 PaddlePaddle Authors. All Rights Reserve. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from __future__ import absolute_import, division, print_function + +from paddle import ParamAttr +import paddle.nn as nn +from paddle.nn import Conv2D, BatchNorm, Linear, ReLU, Flatten +from paddle.nn import AdaptiveAvgPool2D +from paddle.nn.initializer import KaimingNormal + +from ppcls.arch.backbone.base.theseus_layer import TheseusLayer +from ppcls.utils.save_load import load_dygraph_pretrain, load_dygraph_pretrain_from_url + +MODEL_URLS = { + "MobileNetV1_x0_25": + "https://paddle-imagenet-models-name.bj.bcebos.com/dygraph/legendary_models/MobileNetV1_x0_25_pretrained.pdparams", + "MobileNetV1_x0_5": + "https://paddle-imagenet-models-name.bj.bcebos.com/dygraph/legendary_models/MobileNetV1_x0_5_pretrained.pdparams", + "MobileNetV1_x0_75": + "https://paddle-imagenet-models-name.bj.bcebos.com/dygraph/legendary_models/MobileNetV1_x0_75_pretrained.pdparams", + "MobileNetV1": + "https://paddle-imagenet-models-name.bj.bcebos.com/dygraph/legendary_models/MobileNetV1_pretrained.pdparams" +} + +MODEL_STAGES_PATTERN = { + "MobileNetV1": ["blocks[0]", "blocks[2]", "blocks[4]", "blocks[10]"] +} + +__all__ = MODEL_URLS.keys() + + +class ConvBNLayer(TheseusLayer): + def __init__(self, + num_channels, + filter_size, + num_filters, + stride, + padding, + num_groups=1): + super().__init__() + + self.conv = Conv2D( + in_channels=num_channels, + out_channels=num_filters, + kernel_size=filter_size, + stride=stride, + padding=padding, + groups=num_groups, + weight_attr=ParamAttr(initializer=KaimingNormal()), + bias_attr=False) + self.bn = BatchNorm(num_filters) + self.relu = ReLU() + + def forward(self, x): + x = self.conv(x) + x = self.bn(x) + x = self.relu(x) + return x + + +class DepthwiseSeparable(TheseusLayer): + def __init__(self, num_channels, num_filters1, num_filters2, num_groups, + stride, scale): + super().__init__() + + self.depthwise_conv = ConvBNLayer( + num_channels=num_channels, + num_filters=int(num_filters1 * scale), + filter_size=3, + stride=stride, + padding=1, + num_groups=int(num_groups * scale)) + + self.pointwise_conv = ConvBNLayer( + num_channels=int(num_filters1 * scale), + filter_size=1, + num_filters=int(num_filters2 * scale), + stride=1, + padding=0) + + def forward(self, x): + x = self.depthwise_conv(x) + x = self.pointwise_conv(x) + return x + + +class MobileNet(TheseusLayer): + """ + MobileNet + Args: + scale: float=1.0. The coefficient that controls the size of network parameters. + class_num: int=1000. The number of classes. + Returns: + model: nn.Layer. Specific MobileNet model depends on args. + """ + + def __init__(self, + stages_pattern, + scale=1.0, + class_num=1000, + return_patterns=None, + return_stages=None): + super().__init__() + self.scale = scale + + self.conv = ConvBNLayer( + num_channels=3, + filter_size=3, + num_filters=int(32 * scale), + stride=2, + padding=1) + + #num_channels, num_filters1, num_filters2, num_groups, stride + self.cfg = [[int(32 * scale), 32, 64, 32, 1], + [int(64 * scale), 64, 128, 64, 2], + [int(128 * scale), 128, 128, 128, 1], + [int(128 * scale), 128, 256, 128, 2], + [int(256 * scale), 256, 256, 256, 1], + [int(256 * scale), 256, 512, 256, 2], + [int(512 * scale), 512, 512, 512, 1], + [int(512 * scale), 512, 512, 512, 1], + [int(512 * scale), 512, 512, 512, 1], + [int(512 * scale), 512, 512, 512, 1], + [int(512 * scale), 512, 512, 512, 1], + [int(512 * scale), 512, 1024, 512, 2], + [int(1024 * scale), 1024, 1024, 1024, 1]] + + self.blocks = nn.Sequential(* [ + DepthwiseSeparable( + num_channels=params[0], + num_filters1=params[1], + num_filters2=params[2], + num_groups=params[3], + stride=params[4], + scale=scale) for params in self.cfg + ]) + + self.avg_pool = AdaptiveAvgPool2D(1) + self.flatten = Flatten(start_axis=1, stop_axis=-1) + + self.fc = Linear( + int(1024 * scale), + class_num, + weight_attr=ParamAttr(initializer=KaimingNormal())) + + super().init_res( + stages_pattern, + return_patterns=return_patterns, + return_stages=return_stages) + + def forward(self, x): + x = self.conv(x) + x = self.blocks(x) + x = self.avg_pool(x) + x = self.flatten(x) + x = self.fc(x) + return x + + +def _load_pretrained(pretrained, model, model_url, use_ssld): + if pretrained is False: + pass + elif pretrained is True: + load_dygraph_pretrain_from_url(model, model_url, use_ssld=use_ssld) + elif isinstance(pretrained, str): + load_dygraph_pretrain(model, pretrained) + else: + raise RuntimeError( + "pretrained type is not available. Please use `string` or `boolean` type." + ) + + +def MobileNetV1_x0_25(pretrained=False, use_ssld=False, **kwargs): + """ + MobileNetV1_x0_25 + Args: + pretrained: bool=False or str. If `True` load pretrained parameters, `False` otherwise. + If str, means the path of the pretrained model. + use_ssld: bool=False. Whether using distillation pretrained model when pretrained=True. + Returns: + model: nn.Layer. Specific `MobileNetV1_x0_25` model depends on args. + """ + model = MobileNet( + scale=0.25, + stages_pattern=MODEL_STAGES_PATTERN["MobileNetV1"], + **kwargs) + _load_pretrained(pretrained, model, MODEL_URLS["MobileNetV1_x0_25"], + use_ssld) + return model + + +def MobileNetV1_x0_5(pretrained=False, use_ssld=False, **kwargs): + """ + MobileNetV1_x0_5 + Args: + pretrained: bool=False or str. If `True` load pretrained parameters, `False` otherwise. + If str, means the path of the pretrained model. + use_ssld: bool=False. Whether using distillation pretrained model when pretrained=True. + Returns: + model: nn.Layer. Specific `MobileNetV1_x0_5` model depends on args. + """ + model = MobileNet( + scale=0.5, + stages_pattern=MODEL_STAGES_PATTERN["MobileNetV1"], + **kwargs) + _load_pretrained(pretrained, model, MODEL_URLS["MobileNetV1_x0_5"], + use_ssld) + return model + + +def MobileNetV1_x0_75(pretrained=False, use_ssld=False, **kwargs): + """ + MobileNetV1_x0_75 + Args: + pretrained: bool=False or str. If `True` load pretrained parameters, `False` otherwise. + If str, means the path of the pretrained model. + use_ssld: bool=False. Whether using distillation pretrained model when pretrained=True. + Returns: + model: nn.Layer. Specific `MobileNetV1_x0_75` model depends on args. + """ + model = MobileNet( + scale=0.75, + stages_pattern=MODEL_STAGES_PATTERN["MobileNetV1"], + **kwargs) + _load_pretrained(pretrained, model, MODEL_URLS["MobileNetV1_x0_75"], + use_ssld) + return model + + +def MobileNetV1(pretrained=False, use_ssld=False, **kwargs): + """ + MobileNetV1 + Args: + pretrained: bool=False or str. If `True` load pretrained parameters, `False` otherwise. + If str, means the path of the pretrained model. + use_ssld: bool=False. Whether using distillation pretrained model when pretrained=True. + Returns: + model: nn.Layer. Specific `MobileNetV1` model depends on args. + """ + model = MobileNet( + scale=1.0, + stages_pattern=MODEL_STAGES_PATTERN["MobileNetV1"], + **kwargs) + _load_pretrained(pretrained, model, MODEL_URLS["MobileNetV1"], use_ssld) + return model diff --git a/src/PaddleClas/ppcls/arch/backbone/legendary_models/mobilenet_v3.py b/src/PaddleClas/ppcls/arch/backbone/legendary_models/mobilenet_v3.py new file mode 100644 index 0000000..836c54c --- /dev/null +++ b/src/PaddleClas/ppcls/arch/backbone/legendary_models/mobilenet_v3.py @@ -0,0 +1,586 @@ +# copyright (c) 2021 PaddlePaddle Authors. All Rights Reserve. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from __future__ import absolute_import, division, print_function + +import paddle +import paddle.nn as nn +from paddle import ParamAttr +from paddle.nn import AdaptiveAvgPool2D, BatchNorm, Conv2D, Dropout, Linear +from paddle.regularizer import L2Decay +from ppcls.arch.backbone.base.theseus_layer import TheseusLayer +from ppcls.utils.save_load import load_dygraph_pretrain, load_dygraph_pretrain_from_url + +MODEL_URLS = { + "MobileNetV3_small_x0_35": + "https://paddle-imagenet-models-name.bj.bcebos.com/dygraph/legendary_models/MobileNetV3_small_x0_35_pretrained.pdparams", + "MobileNetV3_small_x0_5": + "https://paddle-imagenet-models-name.bj.bcebos.com/dygraph/legendary_models/MobileNetV3_small_x0_5_pretrained.pdparams", + "MobileNetV3_small_x0_75": + "https://paddle-imagenet-models-name.bj.bcebos.com/dygraph/legendary_models/MobileNetV3_small_x0_75_pretrained.pdparams", + "MobileNetV3_small_x1_0": + "https://paddle-imagenet-models-name.bj.bcebos.com/dygraph/legendary_models/MobileNetV3_small_x1_0_pretrained.pdparams", + "MobileNetV3_small_x1_25": + "https://paddle-imagenet-models-name.bj.bcebos.com/dygraph/legendary_models/MobileNetV3_small_x1_25_pretrained.pdparams", + "MobileNetV3_large_x0_35": + "https://paddle-imagenet-models-name.bj.bcebos.com/dygraph/legendary_models/MobileNetV3_large_x0_35_pretrained.pdparams", + "MobileNetV3_large_x0_5": + "https://paddle-imagenet-models-name.bj.bcebos.com/dygraph/legendary_models/MobileNetV3_large_x0_5_pretrained.pdparams", + "MobileNetV3_large_x0_75": + "https://paddle-imagenet-models-name.bj.bcebos.com/dygraph/legendary_models/MobileNetV3_large_x0_75_pretrained.pdparams", + "MobileNetV3_large_x1_0": + "https://paddle-imagenet-models-name.bj.bcebos.com/dygraph/legendary_models/MobileNetV3_large_x1_0_pretrained.pdparams", + "MobileNetV3_large_x1_25": + "https://paddle-imagenet-models-name.bj.bcebos.com/dygraph/legendary_models/MobileNetV3_large_x1_25_pretrained.pdparams", +} + +MODEL_STAGES_PATTERN = { + "MobileNetV3_small": + ["blocks[0]", "blocks[2]", "blocks[7]", "blocks[10]"], + "MobileNetV3_large": + ["blocks[0]", "blocks[2]", "blocks[5]", "blocks[11]", "blocks[14]"] +} + +__all__ = MODEL_URLS.keys() + +# "large", "small" is just for MobinetV3_large, MobileNetV3_small respectively. +# The type of "large" or "small" config is a list. Each element(list) represents a depthwise block, which is composed of k, exp, se, act, s. +# k: kernel_size +# exp: middle channel number in depthwise block +# c: output channel number in depthwise block +# se: whether to use SE block +# act: which activation to use +# s: stride in depthwise block +NET_CONFIG = { + "large": [ + # k, exp, c, se, act, s + [3, 16, 16, False, "relu", 1], + [3, 64, 24, False, "relu", 2], + [3, 72, 24, False, "relu", 1], + [5, 72, 40, True, "relu", 2], + [5, 120, 40, True, "relu", 1], + [5, 120, 40, True, "relu", 1], + [3, 240, 80, False, "hardswish", 2], + [3, 200, 80, False, "hardswish", 1], + [3, 184, 80, False, "hardswish", 1], + [3, 184, 80, False, "hardswish", 1], + [3, 480, 112, True, "hardswish", 1], + [3, 672, 112, True, "hardswish", 1], + [5, 672, 160, True, "hardswish", 2], + [5, 960, 160, True, "hardswish", 1], + [5, 960, 160, True, "hardswish", 1], + ], + "small": [ + # k, exp, c, se, act, s + [3, 16, 16, True, "relu", 2], + [3, 72, 24, False, "relu", 2], + [3, 88, 24, False, "relu", 1], + [5, 96, 40, True, "hardswish", 2], + [5, 240, 40, True, "hardswish", 1], + [5, 240, 40, True, "hardswish", 1], + [5, 120, 48, True, "hardswish", 1], + [5, 144, 48, True, "hardswish", 1], + [5, 288, 96, True, "hardswish", 2], + [5, 576, 96, True, "hardswish", 1], + [5, 576, 96, True, "hardswish", 1], + ] +} +# first conv output channel number in MobileNetV3 +STEM_CONV_NUMBER = 16 +# last second conv output channel for "small" +LAST_SECOND_CONV_SMALL = 576 +# last second conv output channel for "large" +LAST_SECOND_CONV_LARGE = 960 +# last conv output channel number for "large" and "small" +LAST_CONV = 1280 + + +def _make_divisible(v, divisor=8, min_value=None): + if min_value is None: + min_value = divisor + new_v = max(min_value, int(v + divisor / 2) // divisor * divisor) + if new_v < 0.9 * v: + new_v += divisor + return new_v + + +def _create_act(act): + if act == "hardswish": + return nn.Hardswish() + elif act == "relu": + return nn.ReLU() + elif act is None: + return None + else: + raise RuntimeError( + "The activation function is not supported: {}".format(act)) + + +class MobileNetV3(TheseusLayer): + """ + MobileNetV3 + Args: + config: list. MobileNetV3 depthwise blocks config. + scale: float=1.0. The coefficient that controls the size of network parameters. + class_num: int=1000. The number of classes. + inplanes: int=16. The output channel number of first convolution layer. + class_squeeze: int=960. The output channel number of penultimate convolution layer. + class_expand: int=1280. The output channel number of last convolution layer. + dropout_prob: float=0.2. Probability of setting units to zero. + Returns: + model: nn.Layer. Specific MobileNetV3 model depends on args. + """ + + def __init__(self, + config, + stages_pattern, + scale=1.0, + class_num=1000, + inplanes=STEM_CONV_NUMBER, + class_squeeze=LAST_SECOND_CONV_LARGE, + class_expand=LAST_CONV, + dropout_prob=0.2, + return_patterns=None, + return_stages=None): + super().__init__() + + self.cfg = config + self.scale = scale + self.inplanes = inplanes + self.class_squeeze = class_squeeze + self.class_expand = class_expand + self.class_num = class_num + + self.conv = ConvBNLayer( + in_c=3, + out_c=_make_divisible(self.inplanes * self.scale), + filter_size=3, + stride=2, + padding=1, + num_groups=1, + if_act=True, + act="hardswish") + + self.blocks = nn.Sequential(* [ + ResidualUnit( + in_c=_make_divisible(self.inplanes * self.scale if i == 0 else + self.cfg[i - 1][2] * self.scale), + mid_c=_make_divisible(self.scale * exp), + out_c=_make_divisible(self.scale * c), + filter_size=k, + stride=s, + use_se=se, + act=act) for i, (k, exp, c, se, act, s) in enumerate(self.cfg) + ]) + + self.last_second_conv = ConvBNLayer( + in_c=_make_divisible(self.cfg[-1][2] * self.scale), + out_c=_make_divisible(self.scale * self.class_squeeze), + filter_size=1, + stride=1, + padding=0, + num_groups=1, + if_act=True, + act="hardswish") + + self.avg_pool = AdaptiveAvgPool2D(1) + + self.last_conv = Conv2D( + in_channels=_make_divisible(self.scale * self.class_squeeze), + out_channels=self.class_expand, + kernel_size=1, + stride=1, + padding=0, + bias_attr=False) + + self.hardswish = nn.Hardswish() + if dropout_prob is not None: + self.dropout = Dropout(p=dropout_prob, mode="downscale_in_infer") + else: + self.dropout = None + self.flatten = nn.Flatten(start_axis=1, stop_axis=-1) + + self.fc = Linear(self.class_expand, class_num) + + super().init_res( + stages_pattern, + return_patterns=return_patterns, + return_stages=return_stages) + + def forward(self, x): + x = self.conv(x) + x = self.blocks(x) + x = self.last_second_conv(x) + x = self.avg_pool(x) + x = self.last_conv(x) + x = self.hardswish(x) + if self.dropout is not None: + x = self.dropout(x) + x = self.flatten(x) + x = self.fc(x) + + return x + + +class ConvBNLayer(TheseusLayer): + def __init__(self, + in_c, + out_c, + filter_size, + stride, + padding, + num_groups=1, + if_act=True, + act=None): + super().__init__() + + self.conv = Conv2D( + in_channels=in_c, + out_channels=out_c, + kernel_size=filter_size, + stride=stride, + padding=padding, + groups=num_groups, + bias_attr=False) + self.bn = BatchNorm( + num_channels=out_c, + act=None, + param_attr=ParamAttr(regularizer=L2Decay(0.0)), + bias_attr=ParamAttr(regularizer=L2Decay(0.0))) + self.if_act = if_act + self.act = _create_act(act) + + def forward(self, x): + x = self.conv(x) + x = self.bn(x) + if self.if_act: + x = self.act(x) + return x + + +class ResidualUnit(TheseusLayer): + def __init__(self, + in_c, + mid_c, + out_c, + filter_size, + stride, + use_se, + act=None): + super().__init__() + self.if_shortcut = stride == 1 and in_c == out_c + self.if_se = use_se + + self.expand_conv = ConvBNLayer( + in_c=in_c, + out_c=mid_c, + filter_size=1, + stride=1, + padding=0, + if_act=True, + act=act) + self.bottleneck_conv = ConvBNLayer( + in_c=mid_c, + out_c=mid_c, + filter_size=filter_size, + stride=stride, + padding=int((filter_size - 1) // 2), + num_groups=mid_c, + if_act=True, + act=act) + if self.if_se: + self.mid_se = SEModule(mid_c) + self.linear_conv = ConvBNLayer( + in_c=mid_c, + out_c=out_c, + filter_size=1, + stride=1, + padding=0, + if_act=False, + act=None) + + def forward(self, x): + identity = x + x = self.expand_conv(x) + x = self.bottleneck_conv(x) + if self.if_se: + x = self.mid_se(x) + x = self.linear_conv(x) + if self.if_shortcut: + x = paddle.add(identity, x) + return x + + +# nn.Hardsigmoid can't transfer "slope" and "offset" in nn.functional.hardsigmoid +class Hardsigmoid(TheseusLayer): + def __init__(self, slope=0.2, offset=0.5): + super().__init__() + self.slope = slope + self.offset = offset + + def forward(self, x): + return nn.functional.hardsigmoid( + x, slope=self.slope, offset=self.offset) + + +class SEModule(TheseusLayer): + def __init__(self, channel, reduction=4): + super().__init__() + self.avg_pool = AdaptiveAvgPool2D(1) + self.conv1 = Conv2D( + in_channels=channel, + out_channels=channel // reduction, + kernel_size=1, + stride=1, + padding=0) + self.relu = nn.ReLU() + self.conv2 = Conv2D( + in_channels=channel // reduction, + out_channels=channel, + kernel_size=1, + stride=1, + padding=0) + self.hardsigmoid = Hardsigmoid(slope=0.2, offset=0.5) + + def forward(self, x): + identity = x + x = self.avg_pool(x) + x = self.conv1(x) + x = self.relu(x) + x = self.conv2(x) + x = self.hardsigmoid(x) + return paddle.multiply(x=identity, y=x) + + +def _load_pretrained(pretrained, model, model_url, use_ssld): + if pretrained is False: + pass + elif pretrained is True: + load_dygraph_pretrain_from_url(model, model_url, use_ssld=use_ssld) + elif isinstance(pretrained, str): + load_dygraph_pretrain(model, pretrained) + else: + raise RuntimeError( + "pretrained type is not available. Please use `string` or `boolean` type." + ) + + +def MobileNetV3_small_x0_35(pretrained=False, use_ssld=False, **kwargs): + """ + MobileNetV3_small_x0_35 + Args: + pretrained: bool=False or str. If `True` load pretrained parameters, `False` otherwise. + If str, means the path of the pretrained model. + use_ssld: bool=False. Whether using distillation pretrained model when pretrained=True. + Returns: + model: nn.Layer. Specific `MobileNetV3_small_x0_35` model depends on args. + """ + model = MobileNetV3( + config=NET_CONFIG["small"], + scale=0.35, + stages_pattern=MODEL_STAGES_PATTERN["MobileNetV3_small"], + class_squeeze=LAST_SECOND_CONV_SMALL, + **kwargs) + _load_pretrained(pretrained, model, MODEL_URLS["MobileNetV3_small_x0_35"], + use_ssld) + return model + + +def MobileNetV3_small_x0_5(pretrained=False, use_ssld=False, **kwargs): + """ + MobileNetV3_small_x0_5 + Args: + pretrained: bool=False or str. If `True` load pretrained parameters, `False` otherwise. + If str, means the path of the pretrained model. + use_ssld: bool=False. Whether using distillation pretrained model when pretrained=True. + Returns: + model: nn.Layer. Specific `MobileNetV3_small_x0_5` model depends on args. + """ + model = MobileNetV3( + config=NET_CONFIG["small"], + scale=0.5, + stages_pattern=MODEL_STAGES_PATTERN["MobileNetV3_small"], + class_squeeze=LAST_SECOND_CONV_SMALL, + **kwargs) + _load_pretrained(pretrained, model, MODEL_URLS["MobileNetV3_small_x0_5"], + use_ssld) + return model + + +def MobileNetV3_small_x0_75(pretrained=False, use_ssld=False, **kwargs): + """ + MobileNetV3_small_x0_75 + Args: + pretrained: bool=false or str. if `true` load pretrained parameters, `false` otherwise. + if str, means the path of the pretrained model. + use_ssld: bool=False. Whether using distillation pretrained model when pretrained=True. + Returns: + model: nn.Layer. Specific `MobileNetV3_small_x0_75` model depends on args. + """ + model = MobileNetV3( + config=NET_CONFIG["small"], + scale=0.75, + stages_pattern=MODEL_STAGES_PATTERN["MobileNetV3_small"], + class_squeeze=LAST_SECOND_CONV_SMALL, + **kwargs) + _load_pretrained(pretrained, model, MODEL_URLS["MobileNetV3_small_x0_75"], + use_ssld) + return model + + +def MobileNetV3_small_x1_0(pretrained=False, use_ssld=False, **kwargs): + """ + MobileNetV3_small_x1_0 + Args: + pretrained: bool=False or str. If `True` load pretrained parameters, `False` otherwise. + If str, means the path of the pretrained model. + use_ssld: bool=False. Whether using distillation pretrained model when pretrained=True. + Returns: + model: nn.Layer. Specific `MobileNetV3_small_x1_0` model depends on args. + """ + model = MobileNetV3( + config=NET_CONFIG["small"], + scale=1.0, + stages_pattern=MODEL_STAGES_PATTERN["MobileNetV3_small"], + class_squeeze=LAST_SECOND_CONV_SMALL, + **kwargs) + _load_pretrained(pretrained, model, MODEL_URLS["MobileNetV3_small_x1_0"], + use_ssld) + return model + + +def MobileNetV3_small_x1_25(pretrained=False, use_ssld=False, **kwargs): + """ + MobileNetV3_small_x1_25 + Args: + pretrained: bool=False or str. If `True` load pretrained parameters, `False` otherwise. + If str, means the path of the pretrained model. + use_ssld: bool=False. Whether using distillation pretrained model when pretrained=True. + Returns: + model: nn.Layer. Specific `MobileNetV3_small_x1_25` model depends on args. + """ + model = MobileNetV3( + config=NET_CONFIG["small"], + scale=1.25, + stages_pattern=MODEL_STAGES_PATTERN["MobileNetV3_small"], + class_squeeze=LAST_SECOND_CONV_SMALL, + **kwargs) + _load_pretrained(pretrained, model, MODEL_URLS["MobileNetV3_small_x1_25"], + use_ssld) + return model + + +def MobileNetV3_large_x0_35(pretrained=False, use_ssld=False, **kwargs): + """ + MobileNetV3_large_x0_35 + Args: + pretrained: bool=False or str. If `True` load pretrained parameters, `False` otherwise. + If str, means the path of the pretrained model. + use_ssld: bool=False. Whether using distillation pretrained model when pretrained=True. + Returns: + model: nn.Layer. Specific `MobileNetV3_large_x0_35` model depends on args. + """ + model = MobileNetV3( + config=NET_CONFIG["large"], + scale=0.35, + stages_pattern=MODEL_STAGES_PATTERN["MobileNetV3_small"], + class_squeeze=LAST_SECOND_CONV_LARGE, + **kwargs) + _load_pretrained(pretrained, model, MODEL_URLS["MobileNetV3_large_x0_35"], + use_ssld) + return model + + +def MobileNetV3_large_x0_5(pretrained=False, use_ssld=False, **kwargs): + """ + MobileNetV3_large_x0_5 + Args: + pretrained: bool=False or str. If `True` load pretrained parameters, `False` otherwise. + If str, means the path of the pretrained model. + use_ssld: bool=False. Whether using distillation pretrained model when pretrained=True. + Returns: + model: nn.Layer. Specific `MobileNetV3_large_x0_5` model depends on args. + """ + model = MobileNetV3( + config=NET_CONFIG["large"], + scale=0.5, + stages_pattern=MODEL_STAGES_PATTERN["MobileNetV3_large"], + class_squeeze=LAST_SECOND_CONV_LARGE, + **kwargs) + _load_pretrained(pretrained, model, MODEL_URLS["MobileNetV3_large_x0_5"], + use_ssld) + return model + + +def MobileNetV3_large_x0_75(pretrained=False, use_ssld=False, **kwargs): + """ + MobileNetV3_large_x0_75 + Args: + pretrained: bool=False or str. If `True` load pretrained parameters, `False` otherwise. + If str, means the path of the pretrained model. + use_ssld: bool=False. Whether using distillation pretrained model when pretrained=True. + Returns: + model: nn.Layer. Specific `MobileNetV3_large_x0_75` model depends on args. + """ + model = MobileNetV3( + config=NET_CONFIG["large"], + scale=0.75, + stages_pattern=MODEL_STAGES_PATTERN["MobileNetV3_large"], + class_squeeze=LAST_SECOND_CONV_LARGE, + **kwargs) + _load_pretrained(pretrained, model, MODEL_URLS["MobileNetV3_large_x0_75"], + use_ssld) + return model + + +def MobileNetV3_large_x1_0(pretrained=False, use_ssld=False, **kwargs): + """ + MobileNetV3_large_x1_0 + Args: + pretrained: bool=False or str. If `True` load pretrained parameters, `False` otherwise. + If str, means the path of the pretrained model. + use_ssld: bool=False. Whether using distillation pretrained model when pretrained=True. + Returns: + model: nn.Layer. Specific `MobileNetV3_large_x1_0` model depends on args. + """ + model = MobileNetV3( + config=NET_CONFIG["large"], + scale=1.0, + stages_pattern=MODEL_STAGES_PATTERN["MobileNetV3_large"], + class_squeeze=LAST_SECOND_CONV_LARGE, + **kwargs) + _load_pretrained(pretrained, model, MODEL_URLS["MobileNetV3_large_x1_0"], + use_ssld) + return model + + +def MobileNetV3_large_x1_25(pretrained=False, use_ssld=False, **kwargs): + """ + MobileNetV3_large_x1_25 + Args: + pretrained: bool=False or str. If `True` load pretrained parameters, `False` otherwise. + If str, means the path of the pretrained model. + use_ssld: bool=False. Whether using distillation pretrained model when pretrained=True. + Returns: + model: nn.Layer. Specific `MobileNetV3_large_x1_25` model depends on args. + """ + model = MobileNetV3( + config=NET_CONFIG["large"], + scale=1.25, + stages_pattern=MODEL_STAGES_PATTERN["MobileNetV3_large"], + class_squeeze=LAST_SECOND_CONV_LARGE, + **kwargs) + _load_pretrained(pretrained, model, MODEL_URLS["MobileNetV3_large_x1_25"], + use_ssld) + return model diff --git a/src/PaddleClas/ppcls/arch/backbone/legendary_models/pp_lcnet.py b/src/PaddleClas/ppcls/arch/backbone/legendary_models/pp_lcnet.py new file mode 100644 index 0000000..4017462 --- /dev/null +++ b/src/PaddleClas/ppcls/arch/backbone/legendary_models/pp_lcnet.py @@ -0,0 +1,419 @@ +# copyright (c) 2021 PaddlePaddle Authors. All Rights Reserve. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from __future__ import absolute_import, division, print_function + +import paddle +import paddle.nn as nn +from paddle import ParamAttr +from paddle.nn import AdaptiveAvgPool2D, BatchNorm, Conv2D, Dropout, Linear +from paddle.regularizer import L2Decay +from paddle.nn.initializer import KaimingNormal +from ppcls.arch.backbone.base.theseus_layer import TheseusLayer +from ppcls.utils.save_load import load_dygraph_pretrain, load_dygraph_pretrain_from_url + +MODEL_URLS = { + "PPLCNet_x0_25": + "https://paddle-imagenet-models-name.bj.bcebos.com/dygraph/legendary_models/PPLCNet_x0_25_pretrained.pdparams", + "PPLCNet_x0_35": + "https://paddle-imagenet-models-name.bj.bcebos.com/dygraph/legendary_models/PPLCNet_x0_35_pretrained.pdparams", + "PPLCNet_x0_5": + "https://paddle-imagenet-models-name.bj.bcebos.com/dygraph/legendary_models/PPLCNet_x0_5_pretrained.pdparams", + "PPLCNet_x0_75": + "https://paddle-imagenet-models-name.bj.bcebos.com/dygraph/legendary_models/PPLCNet_x0_75_pretrained.pdparams", + "PPLCNet_x1_0": + "https://paddle-imagenet-models-name.bj.bcebos.com/dygraph/legendary_models/PPLCNet_x1_0_pretrained.pdparams", + "PPLCNet_x1_5": + "https://paddle-imagenet-models-name.bj.bcebos.com/dygraph/legendary_models/PPLCNet_x1_5_pretrained.pdparams", + "PPLCNet_x2_0": + "https://paddle-imagenet-models-name.bj.bcebos.com/dygraph/legendary_models/PPLCNet_x2_0_pretrained.pdparams", + "PPLCNet_x2_5": + "https://paddle-imagenet-models-name.bj.bcebos.com/dygraph/legendary_models/PPLCNet_x2_5_pretrained.pdparams" +} + +MODEL_STAGES_PATTERN = { + "PPLCNet": ["blocks2", "blocks3", "blocks4", "blocks5", "blocks6"] +} + +__all__ = list(MODEL_URLS.keys()) + +# Each element(list) represents a depthwise block, which is composed of k, in_c, out_c, s, use_se. +# k: kernel_size +# in_c: input channel number in depthwise block +# out_c: output channel number in depthwise block +# s: stride in depthwise block +# use_se: whether to use SE block + +NET_CONFIG = { + "blocks2": + #k, in_c, out_c, s, use_se + [[3, 16, 32, 1, False]], + "blocks3": [[3, 32, 64, 2, False], [3, 64, 64, 1, False]], + "blocks4": [[3, 64, 128, 2, False], [3, 128, 128, 1, False]], + "blocks5": [[3, 128, 256, 2, False], [5, 256, 256, 1, False], + [5, 256, 256, 1, False], [5, 256, 256, 1, False], + [5, 256, 256, 1, False], [5, 256, 256, 1, False]], + "blocks6": [[5, 256, 512, 2, True], [5, 512, 512, 1, True]] +} + + +def make_divisible(v, divisor=8, min_value=None): + if min_value is None: + min_value = divisor + new_v = max(min_value, int(v + divisor / 2) // divisor * divisor) + if new_v < 0.9 * v: + new_v += divisor + return new_v + + +class ConvBNLayer(TheseusLayer): + def __init__(self, + num_channels, + filter_size, + num_filters, + stride, + num_groups=1): + super().__init__() + + self.conv = Conv2D( + in_channels=num_channels, + out_channels=num_filters, + kernel_size=filter_size, + stride=stride, + padding=(filter_size - 1) // 2, + groups=num_groups, + weight_attr=ParamAttr(initializer=KaimingNormal()), + bias_attr=False) + + self.bn = BatchNorm( + num_filters, + param_attr=ParamAttr(regularizer=L2Decay(0.0)), + bias_attr=ParamAttr(regularizer=L2Decay(0.0))) + self.hardswish = nn.Hardswish() + + def forward(self, x): + x = self.conv(x) + x = self.bn(x) + x = self.hardswish(x) + return x + + +class DepthwiseSeparable(TheseusLayer): + def __init__(self, + num_channels, + num_filters, + stride, + dw_size=3, + use_se=False): + super().__init__() + self.use_se = use_se + self.dw_conv = ConvBNLayer( + num_channels=num_channels, + num_filters=num_channels, + filter_size=dw_size, + stride=stride, + num_groups=num_channels) + if use_se: + self.se = SEModule(num_channels) + self.pw_conv = ConvBNLayer( + num_channels=num_channels, + filter_size=1, + num_filters=num_filters, + stride=1) + + def forward(self, x): + x = self.dw_conv(x) + if self.use_se: + x = self.se(x) + x = self.pw_conv(x) + return x + + +class SEModule(TheseusLayer): + def __init__(self, channel, reduction=4): + super().__init__() + self.avg_pool = AdaptiveAvgPool2D(1) + self.conv1 = Conv2D( + in_channels=channel, + out_channels=channel // reduction, + kernel_size=1, + stride=1, + padding=0) + self.relu = nn.ReLU() + self.conv2 = Conv2D( + in_channels=channel // reduction, + out_channels=channel, + kernel_size=1, + stride=1, + padding=0) + self.hardsigmoid = nn.Hardsigmoid() + + def forward(self, x): + identity = x + x = self.avg_pool(x) + x = self.conv1(x) + x = self.relu(x) + x = self.conv2(x) + x = self.hardsigmoid(x) + x = paddle.multiply(x=identity, y=x) + return x + + +class PPLCNet(TheseusLayer): + def __init__(self, + stages_pattern, + scale=1.0, + class_num=1000, + dropout_prob=0.2, + class_expand=1280, + return_patterns=None, + return_stages=None): + super().__init__() + self.scale = scale + self.class_expand = class_expand + + self.conv1 = ConvBNLayer( + num_channels=3, + filter_size=3, + num_filters=make_divisible(16 * scale), + stride=2) + + self.blocks2 = nn.Sequential(* [ + DepthwiseSeparable( + num_channels=make_divisible(in_c * scale), + num_filters=make_divisible(out_c * scale), + dw_size=k, + stride=s, + use_se=se) + for i, (k, in_c, out_c, s, se) in enumerate(NET_CONFIG["blocks2"]) + ]) + + self.blocks3 = nn.Sequential(* [ + DepthwiseSeparable( + num_channels=make_divisible(in_c * scale), + num_filters=make_divisible(out_c * scale), + dw_size=k, + stride=s, + use_se=se) + for i, (k, in_c, out_c, s, se) in enumerate(NET_CONFIG["blocks3"]) + ]) + + self.blocks4 = nn.Sequential(* [ + DepthwiseSeparable( + num_channels=make_divisible(in_c * scale), + num_filters=make_divisible(out_c * scale), + dw_size=k, + stride=s, + use_se=se) + for i, (k, in_c, out_c, s, se) in enumerate(NET_CONFIG["blocks4"]) + ]) + + self.blocks5 = nn.Sequential(* [ + DepthwiseSeparable( + num_channels=make_divisible(in_c * scale), + num_filters=make_divisible(out_c * scale), + dw_size=k, + stride=s, + use_se=se) + for i, (k, in_c, out_c, s, se) in enumerate(NET_CONFIG["blocks5"]) + ]) + + self.blocks6 = nn.Sequential(* [ + DepthwiseSeparable( + num_channels=make_divisible(in_c * scale), + num_filters=make_divisible(out_c * scale), + dw_size=k, + stride=s, + use_se=se) + for i, (k, in_c, out_c, s, se) in enumerate(NET_CONFIG["blocks6"]) + ]) + + self.avg_pool = AdaptiveAvgPool2D(1) + + self.last_conv = Conv2D( + in_channels=make_divisible(NET_CONFIG["blocks6"][-1][2] * scale), + out_channels=self.class_expand, + kernel_size=1, + stride=1, + padding=0, + bias_attr=False) + + self.hardswish = nn.Hardswish() + self.dropout = Dropout(p=dropout_prob, mode="downscale_in_infer") + self.flatten = nn.Flatten(start_axis=1, stop_axis=-1) + + self.fc = Linear(self.class_expand, class_num) + + super().init_res( + stages_pattern, + return_patterns=return_patterns, + return_stages=return_stages) + + def forward(self, x): + x = self.conv1(x) + + x = self.blocks2(x) + x = self.blocks3(x) + x = self.blocks4(x) + x = self.blocks5(x) + x = self.blocks6(x) + + x = self.avg_pool(x) + x = self.last_conv(x) + x = self.hardswish(x) + x = self.dropout(x) + x = self.flatten(x) + x = self.fc(x) + return x + + +def _load_pretrained(pretrained, model, model_url, use_ssld): + if pretrained is False: + pass + elif pretrained is True: + load_dygraph_pretrain_from_url(model, model_url, use_ssld=use_ssld) + elif isinstance(pretrained, str): + load_dygraph_pretrain(model, pretrained) + else: + raise RuntimeError( + "pretrained type is not available. Please use `string` or `boolean` type." + ) + + +def PPLCNet_x0_25(pretrained=False, use_ssld=False, **kwargs): + """ + PPLCNet_x0_25 + Args: + pretrained: bool=False or str. If `True` load pretrained parameters, `False` otherwise. + If str, means the path of the pretrained model. + use_ssld: bool=False. Whether using distillation pretrained model when pretrained=True. + Returns: + model: nn.Layer. Specific `PPLCNet_x0_25` model depends on args. + """ + model = PPLCNet( + scale=0.25, stages_pattern=MODEL_STAGES_PATTERN["PPLCNet"], **kwargs) + _load_pretrained(pretrained, model, MODEL_URLS["PPLCNet_x0_25"], use_ssld) + return model + + +def PPLCNet_x0_35(pretrained=False, use_ssld=False, **kwargs): + """ + PPLCNet_x0_35 + Args: + pretrained: bool=False or str. If `True` load pretrained parameters, `False` otherwise. + If str, means the path of the pretrained model. + use_ssld: bool=False. Whether using distillation pretrained model when pretrained=True. + Returns: + model: nn.Layer. Specific `PPLCNet_x0_35` model depends on args. + """ + model = PPLCNet( + scale=0.35, stages_pattern=MODEL_STAGES_PATTERN["PPLCNet"], **kwargs) + _load_pretrained(pretrained, model, MODEL_URLS["PPLCNet_x0_35"], use_ssld) + return model + + +def PPLCNet_x0_5(pretrained=False, use_ssld=False, **kwargs): + """ + PPLCNet_x0_5 + Args: + pretrained: bool=False or str. If `True` load pretrained parameters, `False` otherwise. + If str, means the path of the pretrained model. + use_ssld: bool=False. Whether using distillation pretrained model when pretrained=True. + Returns: + model: nn.Layer. Specific `PPLCNet_x0_5` model depends on args. + """ + model = PPLCNet( + scale=0.5, stages_pattern=MODEL_STAGES_PATTERN["PPLCNet"], **kwargs) + _load_pretrained(pretrained, model, MODEL_URLS["PPLCNet_x0_5"], use_ssld) + return model + + +def PPLCNet_x0_75(pretrained=False, use_ssld=False, **kwargs): + """ + PPLCNet_x0_75 + Args: + pretrained: bool=False or str. If `True` load pretrained parameters, `False` otherwise. + If str, means the path of the pretrained model. + use_ssld: bool=False. Whether using distillation pretrained model when pretrained=True. + Returns: + model: nn.Layer. Specific `PPLCNet_x0_75` model depends on args. + """ + model = PPLCNet( + scale=0.75, stages_pattern=MODEL_STAGES_PATTERN["PPLCNet"], **kwargs) + _load_pretrained(pretrained, model, MODEL_URLS["PPLCNet_x0_75"], use_ssld) + return model + + +def PPLCNet_x1_0(pretrained=False, use_ssld=False, **kwargs): + """ + PPLCNet_x1_0 + Args: + pretrained: bool=False or str. If `True` load pretrained parameters, `False` otherwise. + If str, means the path of the pretrained model. + use_ssld: bool=False. Whether using distillation pretrained model when pretrained=True. + Returns: + model: nn.Layer. Specific `PPLCNet_x1_0` model depends on args. + """ + model = PPLCNet( + scale=1.0, stages_pattern=MODEL_STAGES_PATTERN["PPLCNet"], **kwargs) + _load_pretrained(pretrained, model, MODEL_URLS["PPLCNet_x1_0"], use_ssld) + return model + + +def PPLCNet_x1_5(pretrained=False, use_ssld=False, **kwargs): + """ + PPLCNet_x1_5 + Args: + pretrained: bool=False or str. If `True` load pretrained parameters, `False` otherwise. + If str, means the path of the pretrained model. + use_ssld: bool=False. Whether using distillation pretrained model when pretrained=True. + Returns: + model: nn.Layer. Specific `PPLCNet_x1_5` model depends on args. + """ + model = PPLCNet( + scale=1.5, stages_pattern=MODEL_STAGES_PATTERN["PPLCNet"], **kwargs) + _load_pretrained(pretrained, model, MODEL_URLS["PPLCNet_x1_5"], use_ssld) + return model + + +def PPLCNet_x2_0(pretrained=False, use_ssld=False, **kwargs): + """ + PPLCNet_x2_0 + Args: + pretrained: bool=False or str. If `True` load pretrained parameters, `False` otherwise. + If str, means the path of the pretrained model. + use_ssld: bool=False. Whether using distillation pretrained model when pretrained=True. + Returns: + model: nn.Layer. Specific `PPLCNet_x2_0` model depends on args. + """ + model = PPLCNet( + scale=2.0, stages_pattern=MODEL_STAGES_PATTERN["PPLCNet"], **kwargs) + _load_pretrained(pretrained, model, MODEL_URLS["PPLCNet_x2_0"], use_ssld) + return model + + +def PPLCNet_x2_5(pretrained=False, use_ssld=False, **kwargs): + """ + PPLCNet_x2_5 + Args: + pretrained: bool=False or str. If `True` load pretrained parameters, `False` otherwise. + If str, means the path of the pretrained model. + use_ssld: bool=False. Whether using distillation pretrained model when pretrained=True. + Returns: + model: nn.Layer. Specific `PPLCNet_x2_5` model depends on args. + """ + model = PPLCNet( + scale=2.5, stages_pattern=MODEL_STAGES_PATTERN["PPLCNet"], **kwargs) + _load_pretrained(pretrained, model, MODEL_URLS["PPLCNet_x2_5"], use_ssld) + return model diff --git a/src/PaddleClas/ppcls/arch/backbone/legendary_models/resnet.py b/src/PaddleClas/ppcls/arch/backbone/legendary_models/resnet.py new file mode 100644 index 0000000..74c5c5f --- /dev/null +++ b/src/PaddleClas/ppcls/arch/backbone/legendary_models/resnet.py @@ -0,0 +1,591 @@ +# copyright (c) 2021 PaddlePaddle Authors. All Rights Reserve. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from __future__ import absolute_import, division, print_function + +import numpy as np +import paddle +from paddle import ParamAttr +import paddle.nn as nn +from paddle.nn import Conv2D, BatchNorm, Linear +from paddle.nn import AdaptiveAvgPool2D, MaxPool2D, AvgPool2D +from paddle.nn.initializer import Uniform +import math + +from ppcls.arch.backbone.base.theseus_layer import TheseusLayer +from ppcls.utils.save_load import load_dygraph_pretrain, load_dygraph_pretrain_from_url + +MODEL_URLS = { + "ResNet18": + "https://paddle-imagenet-models-name.bj.bcebos.com/dygraph/legendary_models/ResNet18_pretrained.pdparams", + "ResNet18_vd": + "https://paddle-imagenet-models-name.bj.bcebos.com/dygraph/legendary_models/ResNet18_vd_pretrained.pdparams", + "ResNet34": + "https://paddle-imagenet-models-name.bj.bcebos.com/dygraph/legendary_models/ResNet34_pretrained.pdparams", + "ResNet34_vd": + "https://paddle-imagenet-models-name.bj.bcebos.com/dygraph/legendary_models/ResNet34_vd_pretrained.pdparams", + "ResNet50": + "https://paddle-imagenet-models-name.bj.bcebos.com/dygraph/legendary_models/ResNet50_pretrained.pdparams", + "ResNet50_vd": + "https://paddle-imagenet-models-name.bj.bcebos.com/dygraph/legendary_models/ResNet50_vd_pretrained.pdparams", + "ResNet101": + "https://paddle-imagenet-models-name.bj.bcebos.com/dygraph/legendary_models/ResNet101_pretrained.pdparams", + "ResNet101_vd": + "https://paddle-imagenet-models-name.bj.bcebos.com/dygraph/legendary_models/ResNet101_vd_pretrained.pdparams", + "ResNet152": + "https://paddle-imagenet-models-name.bj.bcebos.com/dygraph/legendary_models/ResNet152_pretrained.pdparams", + "ResNet152_vd": + "https://paddle-imagenet-models-name.bj.bcebos.com/dygraph/legendary_models/ResNet152_vd_pretrained.pdparams", + "ResNet200_vd": + "https://paddle-imagenet-models-name.bj.bcebos.com/dygraph/legendary_models/ResNet200_vd_pretrained.pdparams", +} + +MODEL_STAGES_PATTERN = { + "ResNet18": ["blocks[1]", "blocks[3]", "blocks[5]", "blocks[7]"], + "ResNet34": ["blocks[2]", "blocks[6]", "blocks[12]", "blocks[15]"], + "ResNet50": ["blocks[2]", "blocks[6]", "blocks[12]", "blocks[15]"], + "ResNet101": ["blocks[2]", "blocks[6]", "blocks[29]", "blocks[32]"], + "ResNet152": ["blocks[2]", "blocks[10]", "blocks[46]", "blocks[49]"], + "ResNet200": ["blocks[2]", "blocks[14]", "blocks[62]", "blocks[65]"] +} + +__all__ = MODEL_URLS.keys() +''' +ResNet config: dict. + key: depth of ResNet. + values: config's dict of specific model. + keys: + block_type: Two different blocks in ResNet, BasicBlock and BottleneckBlock are optional. + block_depth: The number of blocks in different stages in ResNet. + num_channels: The number of channels to enter the next stage. +''' +NET_CONFIG = { + "18": { + "block_type": "BasicBlock", + "block_depth": [2, 2, 2, 2], + "num_channels": [64, 64, 128, 256] + }, + "34": { + "block_type": "BasicBlock", + "block_depth": [3, 4, 6, 3], + "num_channels": [64, 64, 128, 256] + }, + "50": { + "block_type": "BottleneckBlock", + "block_depth": [3, 4, 6, 3], + "num_channels": [64, 256, 512, 1024] + }, + "101": { + "block_type": "BottleneckBlock", + "block_depth": [3, 4, 23, 3], + "num_channels": [64, 256, 512, 1024] + }, + "152": { + "block_type": "BottleneckBlock", + "block_depth": [3, 8, 36, 3], + "num_channels": [64, 256, 512, 1024] + }, + "200": { + "block_type": "BottleneckBlock", + "block_depth": [3, 12, 48, 3], + "num_channels": [64, 256, 512, 1024] + }, +} + + +class ConvBNLayer(TheseusLayer): + def __init__(self, + num_channels, + num_filters, + filter_size, + stride=1, + groups=1, + is_vd_mode=False, + act=None, + lr_mult=1.0, + data_format="NCHW"): + super().__init__() + self.is_vd_mode = is_vd_mode + self.act = act + self.avg_pool = AvgPool2D( + kernel_size=2, stride=2, padding=0, ceil_mode=True) + self.conv = Conv2D( + in_channels=num_channels, + out_channels=num_filters, + kernel_size=filter_size, + stride=stride, + padding=(filter_size - 1) // 2, + groups=groups, + weight_attr=ParamAttr(learning_rate=lr_mult), + bias_attr=False, + data_format=data_format) + self.bn = BatchNorm( + num_filters, + param_attr=ParamAttr(learning_rate=lr_mult), + bias_attr=ParamAttr(learning_rate=lr_mult), + data_layout=data_format) + self.relu = nn.ReLU() + + def forward(self, x): + if self.is_vd_mode: + x = self.avg_pool(x) + x = self.conv(x) + x = self.bn(x) + if self.act: + x = self.relu(x) + return x + + +class BottleneckBlock(TheseusLayer): + def __init__(self, + num_channels, + num_filters, + stride, + shortcut=True, + if_first=False, + lr_mult=1.0, + data_format="NCHW"): + super().__init__() + + self.conv0 = ConvBNLayer( + num_channels=num_channels, + num_filters=num_filters, + filter_size=1, + act="relu", + lr_mult=lr_mult, + data_format=data_format) + self.conv1 = ConvBNLayer( + num_channels=num_filters, + num_filters=num_filters, + filter_size=3, + stride=stride, + act="relu", + lr_mult=lr_mult, + data_format=data_format) + self.conv2 = ConvBNLayer( + num_channels=num_filters, + num_filters=num_filters * 4, + filter_size=1, + act=None, + lr_mult=lr_mult, + data_format=data_format) + + if not shortcut: + self.short = ConvBNLayer( + num_channels=num_channels, + num_filters=num_filters * 4, + filter_size=1, + stride=stride if if_first else 1, + is_vd_mode=False if if_first else True, + lr_mult=lr_mult, + data_format=data_format) + self.relu = nn.ReLU() + self.shortcut = shortcut + + def forward(self, x): + identity = x + x = self.conv0(x) + x = self.conv1(x) + x = self.conv2(x) + + if self.shortcut: + short = identity + else: + short = self.short(identity) + x = paddle.add(x=x, y=short) + x = self.relu(x) + return x + + +class BasicBlock(TheseusLayer): + def __init__(self, + num_channels, + num_filters, + stride, + shortcut=True, + if_first=False, + lr_mult=1.0, + data_format="NCHW"): + super().__init__() + + self.stride = stride + self.conv0 = ConvBNLayer( + num_channels=num_channels, + num_filters=num_filters, + filter_size=3, + stride=stride, + act="relu", + lr_mult=lr_mult, + data_format=data_format) + self.conv1 = ConvBNLayer( + num_channels=num_filters, + num_filters=num_filters, + filter_size=3, + act=None, + lr_mult=lr_mult, + data_format=data_format) + if not shortcut: + self.short = ConvBNLayer( + num_channels=num_channels, + num_filters=num_filters, + filter_size=1, + stride=stride if if_first else 1, + is_vd_mode=False if if_first else True, + lr_mult=lr_mult, + data_format=data_format) + self.shortcut = shortcut + self.relu = nn.ReLU() + + def forward(self, x): + identity = x + x = self.conv0(x) + x = self.conv1(x) + if self.shortcut: + short = identity + else: + short = self.short(identity) + x = paddle.add(x=x, y=short) + x = self.relu(x) + return x + + +class ResNet(TheseusLayer): + """ + ResNet + Args: + config: dict. config of ResNet. + version: str="vb". Different version of ResNet, version vd can perform better. + class_num: int=1000. The number of classes. + lr_mult_list: list. Control the learning rate of different stages. + Returns: + model: nn.Layer. Specific ResNet model depends on args. + """ + + def __init__(self, + config, + stages_pattern, + version="vb", + class_num=1000, + lr_mult_list=[1.0, 1.0, 1.0, 1.0, 1.0], + data_format="NCHW", + input_image_channel=3, + return_patterns=None, + return_stages=None): + super().__init__() + + self.cfg = config + self.lr_mult_list = lr_mult_list + self.is_vd_mode = version == "vd" + self.class_num = class_num + self.num_filters = [64, 128, 256, 512] + self.block_depth = self.cfg["block_depth"] + self.block_type = self.cfg["block_type"] + self.num_channels = self.cfg["num_channels"] + self.channels_mult = 1 if self.num_channels[-1] == 256 else 4 + + assert isinstance(self.lr_mult_list, ( + list, tuple + )), "lr_mult_list should be in (list, tuple) but got {}".format( + type(self.lr_mult_list)) + assert len(self.lr_mult_list + ) == 5, "lr_mult_list length should be 5 but got {}".format( + len(self.lr_mult_list)) + + self.stem_cfg = { + #num_channels, num_filters, filter_size, stride + "vb": [[input_image_channel, 64, 7, 2]], + "vd": + [[input_image_channel, 32, 3, 2], [32, 32, 3, 1], [32, 64, 3, 1]] + } + + self.stem = nn.Sequential(* [ + ConvBNLayer( + num_channels=in_c, + num_filters=out_c, + filter_size=k, + stride=s, + act="relu", + lr_mult=self.lr_mult_list[0], + data_format=data_format) + for in_c, out_c, k, s in self.stem_cfg[version] + ]) + + self.max_pool = MaxPool2D( + kernel_size=3, stride=2, padding=1, data_format=data_format) + block_list = [] + for block_idx in range(len(self.block_depth)): + shortcut = False + for i in range(self.block_depth[block_idx]): + block_list.append(globals()[self.block_type]( + num_channels=self.num_channels[block_idx] if i == 0 else + self.num_filters[block_idx] * self.channels_mult, + num_filters=self.num_filters[block_idx], + stride=2 if i == 0 and block_idx != 0 else 1, + shortcut=shortcut, + if_first=block_idx == i == 0 if version == "vd" else True, + lr_mult=self.lr_mult_list[block_idx + 1], + data_format=data_format)) + shortcut = True + self.blocks = nn.Sequential(*block_list) + + self.avg_pool = AdaptiveAvgPool2D(1, data_format=data_format) + self.flatten = nn.Flatten() + self.avg_pool_channels = self.num_channels[-1] * 2 + stdv = 1.0 / math.sqrt(self.avg_pool_channels * 1.0) + self.fc = Linear( + self.avg_pool_channels, + self.class_num, + weight_attr=ParamAttr(initializer=Uniform(-stdv, stdv))) + + self.data_format = data_format + + super().init_res( + stages_pattern, + return_patterns=return_patterns, + return_stages=return_stages) + + def forward(self, x): + with paddle.static.amp.fp16_guard(): + if self.data_format == "NHWC": + x = paddle.transpose(x, [0, 2, 3, 1]) + x.stop_gradient = True + x = self.stem(x) + x = self.max_pool(x) + x = self.blocks(x) + x = self.avg_pool(x) + x = self.flatten(x) + x = self.fc(x) + return x + + +def _load_pretrained(pretrained, model, model_url, use_ssld): + if pretrained is False: + pass + elif pretrained is True: + load_dygraph_pretrain_from_url(model, model_url, use_ssld=use_ssld) + elif isinstance(pretrained, str): + load_dygraph_pretrain(model, pretrained) + else: + raise RuntimeError( + "pretrained type is not available. Please use `string` or `boolean` type." + ) + + +def ResNet18(pretrained=False, use_ssld=False, **kwargs): + """ + ResNet18 + Args: + pretrained: bool=False or str. If `True` load pretrained parameters, `False` otherwise. + If str, means the path of the pretrained model. + use_ssld: bool=False. Whether using distillation pretrained model when pretrained=True. + Returns: + model: nn.Layer. Specific `ResNet18` model depends on args. + """ + model = ResNet( + config=NET_CONFIG["18"], + stages_pattern=MODEL_STAGES_PATTERN["ResNet18"], + version="vb", + **kwargs) + _load_pretrained(pretrained, model, MODEL_URLS["ResNet18"], use_ssld) + return model + + +def ResNet18_vd(pretrained=False, use_ssld=False, **kwargs): + """ + ResNet18_vd + Args: + pretrained: bool=False or str. If `True` load pretrained parameters, `False` otherwise. + If str, means the path of the pretrained model. + use_ssld: bool=False. Whether using distillation pretrained model when pretrained=True. + Returns: + model: nn.Layer. Specific `ResNet18_vd` model depends on args. + """ + model = ResNet( + config=NET_CONFIG["18"], + stages_pattern=MODEL_STAGES_PATTERN["ResNet18"], + version="vd", + **kwargs) + _load_pretrained(pretrained, model, MODEL_URLS["ResNet18_vd"], use_ssld) + return model + + +def ResNet34(pretrained=False, use_ssld=False, **kwargs): + """ + ResNet34 + Args: + pretrained: bool=False or str. If `True` load pretrained parameters, `False` otherwise. + If str, means the path of the pretrained model. + use_ssld: bool=False. Whether using distillation pretrained model when pretrained=True. + Returns: + model: nn.Layer. Specific `ResNet34` model depends on args. + """ + model = ResNet( + config=NET_CONFIG["34"], + stages_pattern=MODEL_STAGES_PATTERN["ResNet34"], + version="vb", + **kwargs) + _load_pretrained(pretrained, model, MODEL_URLS["ResNet34"], use_ssld) + return model + + +def ResNet34_vd(pretrained=False, use_ssld=False, **kwargs): + """ + ResNet34_vd + Args: + pretrained: bool=False or str. If `True` load pretrained parameters, `False` otherwise. + If str, means the path of the pretrained model. + use_ssld: bool=False. Whether using distillation pretrained model when pretrained=True. + Returns: + model: nn.Layer. Specific `ResNet34_vd` model depends on args. + """ + model = ResNet( + config=NET_CONFIG["34"], + stages_pattern=MODEL_STAGES_PATTERN["ResNet34"], + version="vd", + **kwargs) + _load_pretrained(pretrained, model, MODEL_URLS["ResNet34_vd"], use_ssld) + return model + + +def ResNet50(pretrained=False, use_ssld=False, **kwargs): + """ + ResNet50 + Args: + pretrained: bool=False or str. If `True` load pretrained parameters, `False` otherwise. + If str, means the path of the pretrained model. + use_ssld: bool=False. Whether using distillation pretrained model when pretrained=True. + Returns: + model: nn.Layer. Specific `ResNet50` model depends on args. + """ + model = ResNet( + config=NET_CONFIG["50"], + stages_pattern=MODEL_STAGES_PATTERN["ResNet50"], + version="vb", + **kwargs) + _load_pretrained(pretrained, model, MODEL_URLS["ResNet50"], use_ssld) + return model + + +def ResNet50_vd(pretrained=False, use_ssld=False, **kwargs): + """ + ResNet50_vd + Args: + pretrained: bool=False or str. If `True` load pretrained parameters, `False` otherwise. + If str, means the path of the pretrained model. + use_ssld: bool=False. Whether using distillation pretrained model when pretrained=True. + Returns: + model: nn.Layer. Specific `ResNet50_vd` model depends on args. + """ + model = ResNet( + config=NET_CONFIG["50"], + stages_pattern=MODEL_STAGES_PATTERN["ResNet50"], + version="vd", + **kwargs) + _load_pretrained(pretrained, model, MODEL_URLS["ResNet50_vd"], use_ssld) + return model + + +def ResNet101(pretrained=False, use_ssld=False, **kwargs): + """ + ResNet101 + Args: + pretrained: bool=False or str. If `True` load pretrained parameters, `False` otherwise. + If str, means the path of the pretrained model. + use_ssld: bool=False. Whether using distillation pretrained model when pretrained=True. + Returns: + model: nn.Layer. Specific `ResNet101` model depends on args. + """ + model = ResNet( + config=NET_CONFIG["101"], + stages_pattern=MODEL_STAGES_PATTERN["ResNet101"], + version="vb", + **kwargs) + _load_pretrained(pretrained, model, MODEL_URLS["ResNet101"], use_ssld) + return model + + +def ResNet101_vd(pretrained=False, use_ssld=False, **kwargs): + """ + ResNet101_vd + Args: + pretrained: bool=False or str. If `True` load pretrained parameters, `False` otherwise. + If str, means the path of the pretrained model. + use_ssld: bool=False. Whether using distillation pretrained model when pretrained=True. + Returns: + model: nn.Layer. Specific `ResNet101_vd` model depends on args. + """ + model = ResNet( + config=NET_CONFIG["101"], + stages_pattern=MODEL_STAGES_PATTERN["ResNet101"], + version="vd", + **kwargs) + _load_pretrained(pretrained, model, MODEL_URLS["ResNet101_vd"], use_ssld) + return model + + +def ResNet152(pretrained=False, use_ssld=False, **kwargs): + """ + ResNet152 + Args: + pretrained: bool=False or str. If `True` load pretrained parameters, `False` otherwise. + If str, means the path of the pretrained model. + use_ssld: bool=False. Whether using distillation pretrained model when pretrained=True. + Returns: + model: nn.Layer. Specific `ResNet152` model depends on args. + """ + model = ResNet( + config=NET_CONFIG["152"], + stages_pattern=MODEL_STAGES_PATTERN["ResNet152"], + version="vb", + **kwargs) + _load_pretrained(pretrained, model, MODEL_URLS["ResNet152"], use_ssld) + return model + + +def ResNet152_vd(pretrained=False, use_ssld=False, **kwargs): + """ + ResNet152_vd + Args: + pretrained: bool=False or str. If `True` load pretrained parameters, `False` otherwise. + If str, means the path of the pretrained model. + use_ssld: bool=False. Whether using distillation pretrained model when pretrained=True. + Returns: + model: nn.Layer. Specific `ResNet152_vd` model depends on args. + """ + model = ResNet( + config=NET_CONFIG["152"], + stages_pattern=MODEL_STAGES_PATTERN["ResNet152"], + version="vd", + **kwargs) + _load_pretrained(pretrained, model, MODEL_URLS["ResNet152_vd"], use_ssld) + return model + + +def ResNet200_vd(pretrained=False, use_ssld=False, **kwargs): + """ + ResNet200_vd + Args: + pretrained: bool=False or str. If `True` load pretrained parameters, `False` otherwise. + If str, means the path of the pretrained model. + use_ssld: bool=False. Whether using distillation pretrained model when pretrained=True. + Returns: + model: nn.Layer. Specific `ResNet200_vd` model depends on args. + """ + model = ResNet( + config=NET_CONFIG["200"], + stages_pattern=MODEL_STAGES_PATTERN["ResNet200"], + version="vd", + **kwargs) + _load_pretrained(pretrained, model, MODEL_URLS["ResNet200_vd"], use_ssld) + return model diff --git a/src/PaddleClas/ppcls/arch/backbone/legendary_models/vgg.py b/src/PaddleClas/ppcls/arch/backbone/legendary_models/vgg.py new file mode 100644 index 0000000..74d5cfa --- /dev/null +++ b/src/PaddleClas/ppcls/arch/backbone/legendary_models/vgg.py @@ -0,0 +1,259 @@ +# copyright (c) 2021 PaddlePaddle Authors. All Rights Reserve. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from __future__ import absolute_import, division, print_function + +import paddle.nn as nn +from paddle.nn import Conv2D, BatchNorm, Linear, Dropout +from paddle.nn import MaxPool2D + +from ppcls.arch.backbone.base.theseus_layer import TheseusLayer +from ppcls.utils.save_load import load_dygraph_pretrain, load_dygraph_pretrain_from_url + +MODEL_URLS = { + "VGG11": + "https://paddle-imagenet-models-name.bj.bcebos.com/dygraph/legendary_models/VGG11_pretrained.pdparams", + "VGG13": + "https://paddle-imagenet-models-name.bj.bcebos.com/dygraph/legendary_models/VGG13_pretrained.pdparams", + "VGG16": + "https://paddle-imagenet-models-name.bj.bcebos.com/dygraph/legendary_models/VGG16_pretrained.pdparams", + "VGG19": + "https://paddle-imagenet-models-name.bj.bcebos.com/dygraph/legendary_models/VGG19_pretrained.pdparams", +} + +MODEL_STAGES_PATTERN = { + "VGG": [ + "conv_block_1", "conv_block_2", "conv_block_3", "conv_block_4", + "conv_block_5" + ] +} + +__all__ = MODEL_URLS.keys() + +# VGG config +# key: VGG network depth +# value: conv num in different blocks +NET_CONFIG = { + 11: [1, 1, 2, 2, 2], + 13: [2, 2, 2, 2, 2], + 16: [2, 2, 3, 3, 3], + 19: [2, 2, 4, 4, 4] +} + + +class ConvBlock(TheseusLayer): + def __init__(self, input_channels, output_channels, groups): + super().__init__() + + self.groups = groups + self.conv1 = Conv2D( + in_channels=input_channels, + out_channels=output_channels, + kernel_size=3, + stride=1, + padding=1, + bias_attr=False) + if groups == 2 or groups == 3 or groups == 4: + self.conv2 = Conv2D( + in_channels=output_channels, + out_channels=output_channels, + kernel_size=3, + stride=1, + padding=1, + bias_attr=False) + if groups == 3 or groups == 4: + self.conv3 = Conv2D( + in_channels=output_channels, + out_channels=output_channels, + kernel_size=3, + stride=1, + padding=1, + bias_attr=False) + if groups == 4: + self.conv4 = Conv2D( + in_channels=output_channels, + out_channels=output_channels, + kernel_size=3, + stride=1, + padding=1, + bias_attr=False) + + self.max_pool = MaxPool2D(kernel_size=2, stride=2, padding=0) + self.relu = nn.ReLU() + + def forward(self, inputs): + x = self.conv1(inputs) + x = self.relu(x) + if self.groups == 2 or self.groups == 3 or self.groups == 4: + x = self.conv2(x) + x = self.relu(x) + if self.groups == 3 or self.groups == 4: + x = self.conv3(x) + x = self.relu(x) + if self.groups == 4: + x = self.conv4(x) + x = self.relu(x) + x = self.max_pool(x) + return x + + +class VGGNet(TheseusLayer): + """ + VGGNet + Args: + config: list. VGGNet config. + stop_grad_layers: int=0. The parameters in blocks which index larger than `stop_grad_layers`, will be set `param.trainable=False` + class_num: int=1000. The number of classes. + Returns: + model: nn.Layer. Specific VGG model depends on args. + """ + + def __init__(self, + config, + stages_pattern, + stop_grad_layers=0, + class_num=1000, + return_patterns=None, + return_stages=None): + super().__init__() + + self.stop_grad_layers = stop_grad_layers + + self.conv_block_1 = ConvBlock(3, 64, config[0]) + self.conv_block_2 = ConvBlock(64, 128, config[1]) + self.conv_block_3 = ConvBlock(128, 256, config[2]) + self.conv_block_4 = ConvBlock(256, 512, config[3]) + self.conv_block_5 = ConvBlock(512, 512, config[4]) + + self.relu = nn.ReLU() + self.flatten = nn.Flatten(start_axis=1, stop_axis=-1) + + for idx, block in enumerate([ + self.conv_block_1, self.conv_block_2, self.conv_block_3, + self.conv_block_4, self.conv_block_5 + ]): + if self.stop_grad_layers >= idx + 1: + for param in block.parameters(): + param.trainable = False + + self.drop = Dropout(p=0.5, mode="downscale_in_infer") + self.fc1 = Linear(7 * 7 * 512, 4096) + self.fc2 = Linear(4096, 4096) + self.fc3 = Linear(4096, class_num) + + super().init_res( + stages_pattern, + return_patterns=return_patterns, + return_stages=return_stages) + + def forward(self, inputs): + x = self.conv_block_1(inputs) + x = self.conv_block_2(x) + x = self.conv_block_3(x) + x = self.conv_block_4(x) + x = self.conv_block_5(x) + x = self.flatten(x) + x = self.fc1(x) + x = self.relu(x) + x = self.drop(x) + x = self.fc2(x) + x = self.relu(x) + x = self.drop(x) + x = self.fc3(x) + return x + + +def _load_pretrained(pretrained, model, model_url, use_ssld): + if pretrained is False: + pass + elif pretrained is True: + load_dygraph_pretrain_from_url(model, model_url, use_ssld=use_ssld) + elif isinstance(pretrained, str): + load_dygraph_pretrain(model, pretrained) + else: + raise RuntimeError( + "pretrained type is not available. Please use `string` or `boolean` type." + ) + + +def VGG11(pretrained=False, use_ssld=False, **kwargs): + """ + VGG11 + Args: + pretrained: bool=False or str. If `True` load pretrained parameters, `False` otherwise. + If str, means the path of the pretrained model. + use_ssld: bool=False. Whether using distillation pretrained model when pretrained=True. + Returns: + model: nn.Layer. Specific `VGG11` model depends on args. + """ + model = VGGNet( + config=NET_CONFIG[11], + stages_pattern=MODEL_STAGES_PATTERN["VGG"], + **kwargs) + _load_pretrained(pretrained, model, MODEL_URLS["VGG11"], use_ssld) + return model + + +def VGG13(pretrained=False, use_ssld=False, **kwargs): + """ + VGG13 + Args: + pretrained: bool=False or str. If `True` load pretrained parameters, `False` otherwise. + If str, means the path of the pretrained model. + use_ssld: bool=False. Whether using distillation pretrained model when pretrained=True. + Returns: + model: nn.Layer. Specific `VGG13` model depends on args. + """ + model = VGGNet( + config=NET_CONFIG[13], + stages_pattern=MODEL_STAGES_PATTERN["VGG"], + **kwargs) + _load_pretrained(pretrained, model, MODEL_URLS["VGG13"], use_ssld) + return model + + +def VGG16(pretrained=False, use_ssld=False, **kwargs): + """ + VGG16 + Args: + pretrained: bool=False or str. If `True` load pretrained parameters, `False` otherwise. + If str, means the path of the pretrained model. + use_ssld: bool=False. Whether using distillation pretrained model when pretrained=True. + Returns: + model: nn.Layer. Specific `VGG16` model depends on args. + """ + model = VGGNet( + config=NET_CONFIG[16], + stages_pattern=MODEL_STAGES_PATTERN["VGG"], + **kwargs) + _load_pretrained(pretrained, model, MODEL_URLS["VGG16"], use_ssld) + return model + + +def VGG19(pretrained=False, use_ssld=False, **kwargs): + """ + VGG19 + Args: + pretrained: bool=False or str. If `True` load pretrained parameters, `False` otherwise. + If str, means the path of the pretrained model. + use_ssld: bool=False. Whether using distillation pretrained model when pretrained=True. + Returns: + model: nn.Layer. Specific `VGG19` model depends on args. + """ + model = VGGNet( + config=NET_CONFIG[19], + stages_pattern=MODEL_STAGES_PATTERN["VGG"], + **kwargs) + _load_pretrained(pretrained, model, MODEL_URLS["VGG19"], use_ssld) + return model diff --git a/src/PaddleClas/ppcls/arch/backbone/model_zoo/__init__.py b/src/PaddleClas/ppcls/arch/backbone/model_zoo/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/src/PaddleClas/ppcls/arch/backbone/model_zoo/__pycache__/__init__.cpython-39.pyc b/src/PaddleClas/ppcls/arch/backbone/model_zoo/__pycache__/__init__.cpython-39.pyc new file mode 100644 index 0000000000000000000000000000000000000000..63bd229c9bf1ba8a70ef4c1e97e47c8f2fdff0d8 GIT binary patch literal 181 zcmYe~<>g`kg8G|=Ng(<$h(HF6K#l_t7qb9~6oz01O-8?!3`HPe1o11%)hZ?+F(oA@ z)j21zShpxOCpED+Ro6(*7?*HCL2^!UOkz=TMody-a&}UFUTREkeoAUid{usaOniK1 bUS>&ryk0@&Ee;!?cxg_m9mvklK+FIDG&3%w literal 0 HcmV?d00001 diff --git a/src/PaddleClas/ppcls/arch/backbone/model_zoo/__pycache__/alexnet.cpython-39.pyc b/src/PaddleClas/ppcls/arch/backbone/model_zoo/__pycache__/alexnet.cpython-39.pyc new file mode 100644 index 0000000000000000000000000000000000000000..5b84c9963fd8e616f1ed69a4616c1899bdc438e0 GIT binary patch literal 4140 zcmZ`+OK%*<5uVrX&OZ5;EQzAmvg~A&a0OAar5K4VMG_q()LNFTgC|Nx!|h&j#F^Q3 z&nzVoE5rx|_{5wH1W2C(+g)8%-CbS%RgG7z zmNYzn{M*INdnYvQAJmzAEOf5njsFV5G^PidL%&9#g}S5bDbEN^$IRMRXd0TXg*hj$ zIA&mk1*Z_&jvW@AVpwuYVc99GF$-fArxI43YFKk>ikAy!!dYiFvo`0WeChDy3vFL_-uPH!MOJ#Qu@W~Qo!U2@)8Ld@1)NI8Im6~yjm$_Wby4`@(ZhKFL%@G%K6z)fU2gVwPTnf4# zbJ^1^;q|uNp5Te_{7A06mhE;#H+1_VkcI03f70T~-u>+)>BS#hxX|+$3;3!ZdRsi= z$!geTJcw5#FXYY5N6pPP-|WWCb~n6`O?DwOF`AUKW{>q~_i>xNG5Jso&=bE4!Zk-{ znq#oSS6WABCbOOw9h2o){<-E@Uul-+^(jfDP{{7sonuTWu9=WD5_qt9VVk z@e`0lOZ0)ZZyXvg5I_T)={ts4PRxBRGMF~dTr0^9^no!j2i71rC?xqokr^koq%g>r zwLyO0VCI3LxYny&mOHRv!A^=SPXWY=2PXO@reU$Amb3&9`qFkEbAM|)iH{6vk@sZT zkNm{<0)LO=*xhcY6Y~VS1K(|Ldr<^G%L+na+$-;J0k#`MN9N;1_>9W}9g!bx$^4e+ z_It4`r!%=8f=w1TeJ@tcWAeta0V2p_4O5(=_Guz#h*6fgxZmSK%%Fz_yR<;p6$O%* zBSIF0vQQ?GoZH4hh+^9A;XoQCX~jI~$eJJZCRaX#eM~XR9Y098P+J+V0on=M70YDN z^xBD7MC%B*$93C*7ssx9r2XgfTOT~Qqr&nAc&l(P_hP=f*1YsX>|U=O#1A~t-hQy@ zwRbkV5r3cp+1=}QA9#p)gm$y{RGh&I?j}Y;FU95nI@Jo4zM%FSAKza)YMN3Lg;5=GK zrg#TGGOv6bOZ|yhB>_d6G*SQxJoW_p5S>_E484^7?JZ3&Ig`XBvk-2zj`DQ!6ripv zORfty^#f{GT=$E<7o;P~QDTkes%A&i=0Sj98L#B07!|Y}6;I?)wG`XN4b-*oYG)L` z4gS>gdPvACNWd*rkYKUg6eJX6F+-i3pCA7d&i>v3R4@nlNH&Q5X*6JZVjfxy&Of6) zGm;zu4}lIV3=F!>tfcODK9ehtG-=44hXnU&L{HJ=<>QF}X+t+|@X zEoX8yc9yNMHxCe??o2YvmRTKhyfr-sIOnE0?Ck>!8gt2{{yg;SO8@+{H1yw@=4AQ{ zq%qJj{_gZRI6s=^WEzVKYk%C@N+aoCANe~tX7MJv$JdAuTpd$ni8>JiGnpgsy4Wzp zPl!c8<6p}|>|3;sbHt+?k&3PP-(txB5>}lm>0KJaxmbXZQ%QLlhFfDm6KBcL3W)zD zGSV?3KeKRoXdzX;oFxEX1DvLlzUOPW?%AIH{*jz#-N#Yf23!G|@!#PBnO^Tni?Gbs zuv}U0wBH{ZmL=*8*QD}FhkHq1@GP$kd*ekq(7!U&cc6bI)2D%}lLJ?WhL^-!w8g7L z=#pbKUa}1!|3V~bCdsQbaZ#qfYp-UJlzk~1zMTEOC*W{koXb!TT@-X4(sHq(jJ2kE zR}{Sp^iqgeOp`>^4@HC4qabJmi)wivgCBnlqUklG25g^wX;1x5qJ2eSJ?Ovf63I^f zy*j4sC`+l-X#BsnJ=UGz7KYZwyuJAmjQwyfAy?d>T^xc;=92Gc5CXPQR>`HUDPBI~ z6&~3D1f5ZtV^Ur^EROU5Sd^Eh^va{r%402)>52!B5}PZek_Y| z((S2cqb4@68F8CP3qs-Qw|2p27P{!74zf?D3y-nTW zL+#P&-9%@Aub#dpO40jh8EBnLP#I7eO6Q-mAV8SrNPjw+ezP^mwpGBVnF zSnrDZ!%ft1+>0J6y=EgPs#v8g`LQ42;G#B{Cd370Q(3v&j}kxRw}j}5h9yhmX=P4T zSF)HES@^OLe}Kw_<5Zb4>#EO@vEE-G6wiZbx{b?b>vS>|D#2a~4(?Q1CvOr){1&hJ zen_;A<|DO!qyJfdW|HRyW`d|b1)nnGz(V0~!e3@f)Sx1M4+7v&z(vMo>Gmf#Z*92u z?{3^{SYnf8Xw@>mgIa1U7F4RdvIS-78}PF#i!OTQW!`A$>kVUly|E&GM@m!<%3>P8 z%_x#a6gi8dZnM*m+KJzdya13rY72){sb~?jRRw_x6w`v-CLWdEf(krmX>4xdyFyUn z6TbnGOR5NN_7gvdo3XdcmGjk6s#6vnlClCn#<%AVe;VVP#Pz7AlR5RBrNDgbuGER+ utDsuySnXe=MdOF`9ic9qtyl0juP^J1__d8$6G!?#D_1e+&(D8as{S9mef3KK literal 0 HcmV?d00001 diff --git a/src/PaddleClas/ppcls/arch/backbone/model_zoo/__pycache__/cspnet.cpython-39.pyc b/src/PaddleClas/ppcls/arch/backbone/model_zoo/__pycache__/cspnet.cpython-39.pyc new file mode 100644 index 0000000000000000000000000000000000000000..1d996fd8d117d9ad7b38667c53857ee018ce7709 GIT binary patch literal 8361 zcma)BTaz2db?&)fFf+I;7To1U7iuNT5+Q|4i?$U@VPsLHR*plh8Li|v2$kV1dVs|) zb^!JamgHtoQM`1lD%YwK+m)M@s%6SAe9x~)Repfy$yIqs_?AkQOR2t4 zfSNNs-96Ji-RFGgJ7>a5#TWSf*)LvN|HpGe{3~6|{>r#`9Y(-el-wwnd6F?(X$X*yiCdc|$G>27;X57)LVbd9Dj?Wh!$q$7*cm7etXaV<-- zEGx1q=VVRJ%ldv%9@{t3yDaUF(X6~B>9yO24Cs6amxTcPpC*^6>oF3O4lV{{patZe@pzUe-4BDOeWlQ`Io@#a=&@fxO*O;b0>es3=uoH_lH#vN~w1 z{&p}_y}W#B?ds)FZLLP>*T1^k+(^?w^6I&BgHXy|bhf)4cA_{+&u;f+)Jx9B;dZpV zzPY^Kiq`wda;v|6Zam_-+2>4$kL2<|4rrFi{Uyx$zC{8%DRR-U^J0IP2Ca=GFKtCC zj(S1T-Hmc5NmW-yxjpFjdyT^V*Xf=QNMMUaSC}MhAsPkcQZ25^HlK& z(I|A#LciDlO`recn`!~2L(wRVTYmX)3-`gpx&I>FbX2_CC@RwDyda}Nx}ivWb1%9z z2vnGM`+0f2pQgQNd{tWS^;=t$3)#OJ2OZVFnQmY^?Iex1^Foqh16p{>kza9qm3#w* zNHh&8nx-`HXBkinvN_;=Z&Jbyt;_dd%R$iUg-H?w_r#~)d-K(6SJ?tDq3*1TdQq4} zXU{Kx^*2=y2CZIlEmWyq69{JGEssu=+dSMburW# z)S2Ttb4}bcRvVUj0e7hvNznbMIEz#6@B$~faZ7!T+E{#@T8izyx*00@`?yG0T@8Qy z->SFf+6B)P<&}o`irK6A3|)YagzwZP=ewALn+~^wz|N(N9!x z5LECw{u2r*8``LtnwgPWsoggB%;bs8=olj-Eo8=?k(~Lq_&$tjq3(%rYXS26YRBujb+CFy774t7|U_N_Mdvr%5vyU9>Y7?0hgw^F2V z>Ln8NqKe*}*8uHPi)KT~jb-&XTJKqTk!+P*gQp@nY-tPE+t(=T4fEHc8^b70yJ0W) zyGb_&SjMd=FQmgk5ALJaP14-zMKKTt~h zx0UAhX1^OZD!DsGqJ%MhX0H4ihi^qw&TZ*p_Y79;o+lcZ(h| z`7(qrmeBJ06}7J0cv!_Te!JLJmC#;SsJhxG00JuLA&kXAWTTsAySb- zNB7JFliUVrGIY>h5PQ(rkq=FVn~=sn^pvzX^V`r$X?2aP^mF4Qaur3?LvK-U!?6@( z;bUvs=8oORJ#$9GSfN-0>9r>sHU;2@r=CN-B7eq~om<^F%?ofHu~ctUEtzm$pxt^& zpUy+!&Qc{%iBwW+13&U4-9W&8ID@7S#}jK@2hUXfLBMXlS={bQy1iL!Z2`eLiDKB8 zLK{zKh-W2FKnTBT)Sv<LaWYK-NzqnUT66q#Eso==1o~vg!1qyYY!~j>F6PJA&iJ|xJDEFhBzBC?gt-=M<`{6;-m;QS zSP0JQqqK`NkEE=FBI?}cn0y^GR@#ZCHo*}QZ? z=oV7{pmdjdm(xmCU=OoATFk^|`JkH4Ws7@8R>c$QpnJv4YZtmAo1?MVPt->XH~v|V zKu`PJC^fXrvt1t@eV%RFTRpTV^@mtc4LEGk_6!bB&05zH+i%phfd_OK>(lqB^hYGf zymPCA$cSxRBQ(+O+-0x^PiK`>Z&F8x%{BoxgfaG;2uE%qG|A_=Gs3Gl>5^Xx*7LcC zsv5@`G1N60mb5`TLY^-vh&n}r(c=V_m>X8xT?IceA++i-v^2{tQn{eljiZq0EJqA- z0M@FD?HEA?S87mcx*SI^&QRPzF0`UMjDxLai6^VO=Pn|}9Y(c3pou&Jl75Q%L_vsJ zmFZ#)z+|H2e&*Oz!hZr!gf$d53{Q=l%X8r&^ z<`nQz*C13uJ=K_EWQ}Y*3tHQ9*xCj@=8ogeZUULI0Ha2uj=%x4~DIiI01VdZ3d z(F{hej9zZOtIs{dbs>B0WK-Zl^=e zlz;CY@d~6v6brD~y@ofO@d_5u9vBY^e=dz3kvu~g7DTKt5NU#Mm%+o$wgH|CUw=yc z9KM23{TgyzfitRPl@3R&S%nOARK>Y~^PKKYCa7*@bFv82nnOOukS_Sf9LDnYEyMuc zfzLjLJV=e%b{qMe`B5EyrF2k9tJysJl`7Amu17}>rqng2zNIcpJn~#tm*rD}M#`{W zn@GjN8_mf|Hcwe0S>3lf#&3h$1jQR4qeTRB7{4ax;QvH0k8*unK9+i$3+Q2`i+h3& zS=$E(3XZb}o2Lw39SeG&V2_ty({;*-s5eMlB2ghh>|9Z33mpD{%70FRean;~?-p4} zlVx?08eItV{t}fbjBT7abbv^(aK04gl6xFxQ+`K>@bA$LG>5!APWK|i3k=_*`ZHAT zU7;N870M9da|3KUQQxM+=_npH|1aY|diAiL%d@Wf^nlr1p( z2W68in%KKdC;+Ao+XjoTaWAe%+vMzk%~Lwr8M}M;f9(f8A^|)Nm_ZZ($nihBbJ}bl z?cK>T>9~X=c^zWbyKfqBVrAr(!Q9|jj2ktyqRg2LV(62SJ;`oNSU`cbLH!61)XMk~ zmHrZ9Hp#)-Ipf}^oceuQpelM#IJHhW?^egJGD&ch9_G@dYwCANaDIb>((lqGQ@H;> z({Ts=zC;5P$)UBM{Kwf2!~7K(HuYL**^rYTl?ym{2`{6Nfmz^djUT^^l8MqYDSLmU z53GI55bKiG2!$HChwQh>E$e!_ighcS)h1HdJ;Wpi2f^22dle^!Vd}55` zQhIw3Ep?NnxSuYCJ0ZTvtoNejrK|keVhOh_T_;Zzcdjq>)zbBK=zSE%*ZIC>d`_b& zYv)NERgnVZ<#&ci?QTbJD%Dr`R_Gs&mlyccM(*h^9Qcf>eoFVTS7X6mtrqaN&%^sV zw3-B0g0PJ@RdX=0yI45FsJ^B)&lu4PVsE6rY7NlW=!rVaWvW5k78)~S zoGtttsyX_Kik_d+Td`;5WMLT=3O-V#-ZcNibh{$gx4H^IWhkb`hOyExS5_J))c{wy zr^C uOy@J(AqYaE;h|{6{Iw03va60=e-KsN}_eKEX$T@JBdT5f-O;Y(zMOR7d34i#&wdWLEXY27m{FsAiV&U zh(k|1v1c;V8Fl&->`q_&ov%6>4 znV2Xlcz*romF72#it-l{4n78i_wa^)1z`$PeZ``$<|}R0Qq>`*wRKCEywTP*MOE4v zD=QG)*V;KN*Unq{cEKvNi&jzK2Jn(qYL~5Yd%~I!SjL}hPgzs4rHWO7rL2FfJ#9_5 zXRI0EIhOZSYu24}XP+E@sytGy`5!8*z>1F*R&@3ICm!k60+13b11U@7B#;R<31m_t zi)@Bf*s;f|b&AchX_f9T;FZshH_@9%8h?sR;r@S*eNh)qzmp8GWLHqcB> zi|71~!)&&@$(`<&-Q{k?9WRKNj)d(E-f7!C?#G224&QFL(Up4tv#luVhHqcG)O8s1 z-DR)sY`TFPEw?+&^~2@BX}f0gzS(TK%}!{xI_*o++@++E#IVcEF6+{N4_oAmgAc_6 zJ>hF0u41W7u{4(bTG>#U#`MPpOJ@elJXS2@YsFBk3_O~POXR$3ja$yH%NwnnXkke5 z_!RK!c*6iBQX+MrJkp+MPZ33tK2QhRKpz-`Ok@nQOr1yU?Wqh__HqNo&H&5ID-Ux~ zc7T3BC&zL_9l_^^JkY|t0$PMz+RLJ5jw$F{xzV41JAxha3)kD+io$1VY>>CssoP#) zx3-)hfD7X?MmUv9+b#!ehhE=}vth(N=Egblz87r9rD0>XgV>1+P0tAh+t=giK=M(9TUjM;Xbpl_Hv4zFe7a?oq1=5(hO#YHhDq9r~LSH+cfX9wnN zs%TTE<5`KI4bKT$u0U(~I1~1|F6Wa}D{tFg;6=90$?04pEMt=oIE`+s+syz~jL`Kr z;z=*)9{k@^@WLUWwBh-Y%Y`HOEHyYsq(Uv_n!|3vrv=+?`A!(x_A}+%&u_kc_qGVs z>%f-b57!CZZ`0)rT55 zM5y32#FN^nX}k&zeir1J&d=c!XN4ETSpAaMs7#QHRFcCI@P)(KI}pMwh@uwp>(3nQ zlhi@u&$2XpG}N{o7i}9p==sE#ZTmsb@rNbC3*4mkC#2t7uJ2>SQ|lK{%Bc$?D5#gh zd_JdqJGm&Z7O+Fl|2OxG*PPI6UGqDw?M7?V1hq>%O@DH1_+yZxran{g!ver`Ok6`u zTmy@OhRL3p#pXceD5WVsEX{~%op0=`Xps}?YvyG>1!kXybf3%tTB&9E5?H=Qgu-Sx zJF;RXti;3bYRwQM$j_6)D?})=x$s^xilX{W;2}*gMWv{#JkNv9KJU+@|NrH|MZUOB z4bVUw6aUo3@8-J>wmD9o0j8>_qM;#=VOcS+0+H-K2j^x|(2oDR7dZHkHy@g5Ae z=>uI%V~t-rTs@5#a@h@PLkyuhnHNrM0Xg(gDLwz&UXha<(*EBtQZLNHBw%i1Fb@aJ zCbE-1VDGT?$e_IluR(he%ZjkU{sAlpG!L{O(QJhM0(cR!j=Fhh<&ZrLwaQYf0Id@6 z38__-@}!hYs8wQ9vQ|abDnn}u_%W$9A?0Z)PfB@4%2QIFm2ySOb5cGg<>OMGmh!xm zXQX^W%CqRz6kCwJIti5ay*X$t0AG|^$EAEq%JWh_E#(tZUXt>Hl+Q@{q?D^tUX=1# zDW8(^IVqn;uNK&O*{fH8Qm>YvbsqSuQtOPAUz2iG$`_=3R?4qS`CN3K8INf6UtzWA z)hDm9i%-=><)Ic`VCM&F^g4SZstvS~GSD7r>^plJkY&_WnYm}6)WPuaDUy^^w5Q}#_y z!SovSZ}kX+_0(WJHCRs#*3HCV-AoMD&BS2cObpgd7=(*Q2J1%{TuTkEr3TkhgKK7e z6HdkR_je`p`}gAFM$49&9ac<{l_0D0BG2)Wxgj}2`7i-P@o`iZnN{tL;ok5A0%&ir zkm6zJLa|4r%dJ@V;wigHTb$jbJqIuXiR5BW6RCeyYEe#RG{oXBwh5f? z4d+WQteriuSbTsm;6EZl*)IPHky}J4AL9)o9}&4ttY$C2z_a`_*)c)*FeP41W{b1 zcSOZ;MacgNL5*oT)=Z7!Gf6W!oqS%NYSIc!4j84HS2-c;FB~LjqKX9i2ABad!5*e; za21>e?p%b^8r>sc;BZeBEYTf9giL_HC2kmS-vE?yKjaW!;Z6g?nQ5$lf`eQ(RTpXqKzJXFW4Wg)Y_|0P_DXL{U8b9YD&_h`^r8;yFrSKG)d!%eA9`dF?s=reo z>g2hB9$_LZz^Ay2F;vL#m)_tLsKU2FYPuLE(Rf_^@S__yZ`q&Tx%DxQDqYf{UdP#O z9DFxJ{(z9NQwp{8C%hpoyI4EL%9qt@>UvFEU#~5R_#Iwd6ow}rGYDcW2&~yeG&g!d zEAl#l<0B17d3Akw@gZ_LvB1;c6c7J?+?%V n@6btB924`nzbIo`ols}-&1)4M1Ntu`Q`V=?O;1fb#fg6dfZlN8 literal 0 HcmV?d00001 diff --git a/src/PaddleClas/ppcls/arch/backbone/model_zoo/__pycache__/densenet.cpython-39.pyc b/src/PaddleClas/ppcls/arch/backbone/model_zoo/__pycache__/densenet.cpython-39.pyc new file mode 100644 index 0000000000000000000000000000000000000000..7d8af7ddfef60101614af0119b271994f055c704 GIT binary patch literal 8535 zcmcIp-E$k)b>DC70w73&q^K{;v@JV^9Z`{DE4G`)ANoipl@Zle+R0YwYJs~TK>-Ws z?t->xVA_UC-D!vEQ!{PHZK<>`>_h*IPCC>60lkmyOYzpHPHIo9{?1u|0A#98W@LbS zc<;G)@1DK)eEiNqI5(Hq@cF%ey->UTRZaVM>P&webl$?12wl^JCiIrp)_q;4wt=?c z8{9V0Hhq)ZR?G5jj9V?ockthCIqjUE8;!Yb&-b|PwA^;y&$kPHp0j zJkWM^|Kztdu^<-jYhp3-wodJu{%LT^VhNn35$7pzmcS@Dv1TCAb2`W)#Z-%L zi?EwEx1+bWH*Rz~ExO^|@WXNQkc-uG-;bLOEK@PmN~;r!K-}AqVRtj=$|#j#GgfDg zWCsn|X$L*o+WEm|ns$>n&Y$mwLbRfD&33pE#Zh{$-4RhMITwfRXuYEE_`FE0cSV<8Ie8*WU-}Z4e*Mdm`r_BVG^sCMdgUmoE4C_3 z9miDX(uL{H>yr+Ioa$V9rA{h7{jo=#*-k_JesrFvi6HTI1M zny#g0U+?EqtM3YZ6|QYh=eD^D2Lhh8s)5=VarT_P7TBqS`9|ODTYbCl@Z4N>pKyh@ z=VDGyXe4D-c|k1>l6u&R4s>OalB+ziI*mpWrOKstI|^e}q}FyQn_*m!4s2zH^;G3q z!-Ei3t-M+@OtR)eyR+SlH~7YY*IXWP#tURzH~1iGZfvH>wF6fbnsHFy4C5Hqri$(9o}!!$+39r?Rmc`&5#%%2#fq!!q}Pq4TtN@-5;Wsx z8U!+juB2mCg`h?rD2U-ZB*hbD2X!3Ufv2n_YBf|b?zN``pvw9MIJ(6?DcI?BZVB=|a8v8udvYDm`x-7*RLv_HhECt2p4T1S`P6l@{+JeZ z;}<)}X4G_jyhU{b>x@67mULall^h4@Yx`PXf1nQxw4iF-GTz6_Rx5@)j~SBWI56c! z{8M%_?)Flp-;=M?5R*4(AP1NIK`6z$=p?lJ8brGBc??t7in7)!y43@R zPL{MR2%x(G_NLdOb}QcKry40K?V- z6NatB$c<$nA7Kc+Zwt5tVU3)Cm5uccI4PY+ox@S5Z^>Gk>s#cw_Hc@U%MQV-PPSN? z!G{+P9Qk#~AYUQ!Dv?V>NZVPxha@4*yr{~e^Fhp>1ZIZ6MkEo9^{%o=TPvPCM>kz1 zLW-(#A`A4HaTefR!a5@cxZ_FZq9o_2ilfOqq2jP!v1F;E9#3}l$XC*VeM5|T8E z1(bZKhbqU9zKOx)MG#Fd=?+_o!6= z+mm0XF^gQ-lwz|^lkVTal@viTLvvtkWqqeS^>$-h)2s3=Oq*I{VjXmb@8e47ja|L8 z%BT87ZQ`9)r+%keCr3U0kRHtAVw>1TV>U=4)WIe;u<-%xVvtMizOk!c(SG>d0RrBZ zwU5xapA-7N+jkHyjaBV|k$QdD0PF-|l58i##xs;d$Tm!Qmz9a|2V)|~ni)WzViHeGvZu2gha&g|K;Ur%Q#MaL$|!nR zFgXjN<;W-;!$RP2bPFaj4j6`B{>=JcjwoLPeK9pnujxk>ziJp)5)xrWEl|n}34QgxsVCF*$H#nw*LC%V;v4 zOG>M-!_8@*mQG0GJt1A5zlmc_nv?=vr?alsXoTtY7kXqWWXK2;WrN5!iA;j~q1OB@Eyck$)0x#r1^HzZhqmy~XdymCy)-9@ zQ^z&)u}8gGSnx%uL*X(Gp>7b70)-#BlOXCaI{0dh?~Pfdb_@)Qv=cKH;MGawZ=)45Gzu+GueGx%nV zm|<~-j2CF&8$@Qq_1}Z_2&5f*B!aN6m-WUlcV~HO`<{?i5t_|#?c=oSA4$k5ty-EE zVysa|1K7t0TH3S&TDSx<14B7;g;WB*KVn8Fb`v|Vmz zd~d3qpK2Glt$$(<9K>$@gm%juEBBYPQ ze1m*%0KL=E{V|iOg_G($8}xae3%FXbrglK-R-fRoN5)FCaWJ zj(dgpuM(*cxkThJpU?cU0Bf_=I zvNY6QfKtL1rA{n`@-Ut~0vSDu?^;$-Ih}d)CNbEX|Bzbbi&UN-dx7DsEHGmPBHH1H zs#qgHPjBC7Zm5+E|7JWwcR{O}q-q(J+7VphWiqZ80dz!^-pSvghutDVAWi-*J;iKA zF$dqNs!o2Ys6%x%1_a1M!*mm6ox3uXE^+7heyTOkGv-uIros~tue_{s0}0f^p{9?} zwm-+Of#OS$bfdbzNZ^PoOoz~j72rpBjK4rAoF+_4T+|!SRC-gA-ax!mY#$W`{{^GT zpMwyZasgc|-FgTtZWV#k`TLoZdjx0c#(%rs0E05|3RY}Y} z6Y)vgFZ691=~J=R%HZ?xAwDpMe3FJT4Jh|kUXpgY+^iJk??Y($2Sk2Mgw9p&68S?S z_lbN=q(S5+2nt*+)JCD8e~jL^Y(XhHI}G`!)M9t>Eow3BVR8soPr=6~$1unVyHQ3q zR@N5`N_G}-?JOMb`50qZa*NiYOh2oXwW#$Lk;(duviKon$^VRN3}ma4IM+Uxs2rZ$ z6%6+g=X;vW$CuDD(0VpSen%x|+FxCZ&rta~HFgd88mBQRWrOP?UR#Uh_W-NRpP=`Of&a8pM+HUN@qU5qHwHa64?a!dfd@U%SE8(ly+&cALt;@y6}7j$FH4 zL%IUjcbo59M?y}80+q+lQ85k^1#9*dl3W;?nnWru!?%1<7vY%Z)SFRQQO zXCe7h+%mOsv}bA|;NOxa_q|M~RK}3T_Tlo`6v;jw%1^+Zsis|L*s$T)iHA z|Gn$)A6W7)Xb!!Da_*qgx{=7gB+f(=SP5vQ*XSL{S!&QQ6pbG`20!76%N~;ei$q9v z`By~#hR7pA(Udoj5Q_LuT&NBSKqvSGLeZ3KM+imyCoWW%*AnCD~Pm1318Y)Fqu}K*FUBf93Fw^=Z&USU^%#QY-v}%X%+uwvzQE+>Y4V z*>|v8k$+9IEJiHWo#nah3DO4F?h%~X%%y!ubSn2qXc~d7w zz%BMS?l-gFG`>Y%EukEZ<2_J-%aQ-MzN{Aw*I2NO@#nvsxuRJf-Bl|yp5T<+@IviuR-8{7HVBw9C4>$*m36ppPVh^qv#o3vY{6nAGr?=q0iGN=}Ev2QF2YIy|4lrY2zydV9k^RZ~MZJqwCy@ zL_6joD(0el4&>LEajdb$>Uwg zZkxxU+u^}{YdTF6apBCF#G^D|qkia3v52LkVN6*dMk6m|_2x{y*<#ID)LU_QW_6D< zmzjUvP5o%jO}w-<{o*1@W_gPJjB#kM95FVyg@<*B<2Vfq^vfdjP4npv{oBmXS=)jLggmnUxhY zJ1b=lCHI`H{2THiAzG%*=u`)2%$U>|!Fi!=q!u;r0@W>M+M-PhKrJpo_sO{x|7Na| z;hx;YOWMmRQ(?DntEH7AQ;7X;}iYJIy>QWGM_1mwTxn zOZ&#`O}FWLLOR%4+yG4~i!2d-5Jy}GtF0h_K2s3WUc%%MWo@q$q^=+NsqY2w5nPrM z=tXKv2_GTcG@fMz!~|LShfBGVJdq(_mouKE%4ILBy_CDa>?AyQtzdBIDP09tDPLGLkt zYC09QiZlfa+WZJEbQA%umVd!<#7-cnB6t$PNd%_=NNe^zk57qZB1^8DJHcJnzKpJ1 zn0Qll*O$RDqnrf{I?d?W`b)c!lLV|c;qlh=MAK`{Hsgp*sDA9u$MM8QuHRG#1s6K~ z+DW0F%<<>oTQCpBB>-B*Ci>3b=Vn>|msK&zAkiP#Mz3y?%0pZ4HSGr$F^T!W)M1zo zamf6s-P5za=FApG$J&E_LIb}HL;M;*sx6X?ED=f$YqueH7ImE6CJ@oaqM1RKQN3*d z#Q;iS3&l*e8B1j;X&cnQ+ygn3ERJZn!lJ#tR_U_#Zsm3X@|i5a>NjTKt+V@%^+^g3Ta|m)hzxkBY=(ZLpVPSpjN~pga=zFLXPV~ zW_1F@E3SK^;|2K}l}1Vz{u&5DmB>@N#o#FFZt~N&ab0v49{?~4LtyE!b&c%!ecoHM z2Nd5}3ro`4gZO&JT!u*oX6WgX=r&aH!=LcRhVD;*osyJ0a zq&f+8(vPMlt1+)mHscuHM3YLcUMq0)l60T|MI!a07Lx|B^CD6!Z*}0Z60$3t#~dy+ z&c?Q~pz0X(9=u-Q4TQe|BAd*ahrO$=EgNz%FbVXsMzEgQq#uH#qC;hS$lnI)mQY4( z;)&#S7~&j&RBMAh@qS3nwcFplVy0uCzipr;Gb7b$NmT~tt_dC20Q;54RZOjX3`?j; zTEPV27*5weyZiT#7Qgu8Z!azv^CqD>RJZ?F|Iz2c!uX4`~gO;l(7$`P-5RoeZ5iGi&|hg;gblu z>4Pkvz}UZ&P-2HZ7@Du+GUowosiPZlXz!Z($YBK{4W5AM8Vx}d=pYJHX9oMb t`9ba?Rxp7lPn%T8A-&(!w?6-{hAKw?C8x(3RKFoOrag2HI780Re*qG=bm0I1 literal 0 HcmV?d00001 diff --git a/src/PaddleClas/ppcls/arch/backbone/model_zoo/__pycache__/dla.cpython-39.pyc b/src/PaddleClas/ppcls/arch/backbone/model_zoo/__pycache__/dla.cpython-39.pyc new file mode 100644 index 0000000000000000000000000000000000000000..bb12baed2fd231777869c05b86838fc669f2c962 GIT binary patch literal 10922 zcmcIqS#TW3d7gV`XLmsm1P_oD1ySe9B1nQ1by%@33YKLGRwz<&Hgr~l?E$#JUhEn0 zKrLdI!Pu1tsFYMvm4oE~j#Cbz#1BbclgeXWQgk()6jISZkHUzF>0B*TXO5!QdZ^7npe-2aw=`r^7TTgP#-D{A#V#uWP~fS z!V@`>7X>jS_TBSJ`+|LI!|PgUe~=Z!V*jdM8bNAAr4D>q69>fTT}_Mz=KSc2Svm;L zL2(G2Ln-GFIETd%aE_#$!{CgGqu?A(IY+=bCXR!1Jmrju^Wubf@UC7uDxMbO;vuAt zi3{SSco?bU;u-OXcoeAz#Ixct@i!0u=a70t=x=Mq7dN@?(?uh8rkk?v*JAh8W+ROJMpV>e_bVc3 zMAhgH$`96>z9@@3bJA~JFSle6Nx#~NPwdDp&&p=KyeMl+Z(on1R`~qs(=A_!T5zgb z_veB}5S^+wMNkV*HT-%oF*83gQwe68;Y6ibKb`h*`r@UR&zZ|~CX-qzs4nH>jvWlw{>aOU=ot?q3J*y_$pySLGvJu|uEX!j-u zX!Y}01U9?#aQCDGmmz-r;~% zks5hYbm6%l(=hOY9k6fuwZ&jA$^X}jl_b%lR+**g(1wl*^&t> z3u_CzV|`b%w2}?mlSQXB{}n&1RyJYhwTj(qQFD;a;-)JI8z7Mu>1}PrSTojP>?3pD zYU^!7=xwuY2?MvenrUlgJ2Katwy}=7W5`Nie&7HYR!+T&4wJstHnZW zhl{O1$^*#7Zn<1-RHJe^HX4oCft9^Ec`>#tc%F@OulP~rIxR4G=1k*kY+Vj6U5zan z)E1LUljKjzBW&4;tuUy~#%{IIs`-r|jGg4Eisdo(XIFeV7jEcrwp^~%{4gw+H?)tg zzV`gJx0EG*6}(euKJdff)a1mu-Po;Gr50ZEW##&{8NaeH(`*FSlI*29 z9GQ~A=a2}woUT4}SaB-I3|w7JehTcaChO4OtkPyn*wAJRcnP=(+H5OrcAzl1xWH*O zqo@Y|URl^Q(1M!L=1WLbG9aDHMS=*B+Zz4|WKg-48k&(6*;M7WBlKh1s0Nj`N3|7w zIn#!s&u5Nm%bCcX&%z93Xk~=8YC(Oy$X&~|Gs*zuRXOSlTbT)%iZ-m!s*8GszIsDx zJ)^IkeX2Y$B*RGWSB7Ln8HdcYd;z0Yvzx-^9cJbz$w82!lQ5{Na;#gVw9=WA&Baz& zJRpy=fI5@xBP+*dwGqWu-H)!v_H3=$lyNq=-SQhDZORGOeUN0F*Xt$Y)983jlnO_6ai@g~bgz~Mbh_08sdR3Iu>PnQllCB0F zpv8~^sAYz92l_sGAF^&(x_jR>@>{iCH-6@3$xTX^4h@LQ|3~>HdfKD%Yik{)*VZ5t zskv}I%QVQ-=(d>I)i}skP%dem-vAZ}<2)3pn@jYab>$9f^2gwZ)bFj1(fi2F&@C#5z}4z1L4|mzb8)!{ zhrWzEd5v^cCZu`Ka#J5sivP85i>C&XIa&)T(8!I3>tm%rJM|H!4GjO(bAymBbc3|V~)ns zzEmP6QpuK2vxAE7A2n-MwRYhL`g?@2S#Swj z%>gcYYg-GZ{09Ko9Qw*99uB&Qn8qrVrO>w4Asc8R)A)M3um&&J&b9NxU4^^=LhJ1H z2rO#ar;Rv>Ue$a3_SHcDvzEwWj@DtVua1|D{C!*bI;yr{Lr*}hsc@^jh*g)bflNVzm19+YlxmD9#IS^#l=}gBL2KB&M2iz=ucK}yahkIC43 zJGimPXoz1+yzoPO-C1l2KMItD0OI`BXAz6K zd=rBLWV(Wfyh_4dh#kMx3K~NG8adoS`E`=t0D&idoN1cI#9*?C*qH$oZq{St_F3GM zeU`3kD*p%dQpYf?P6!TYUY!;Wfez`)XsXrhzyUY0TBGbsk)Hihd7#nh>D^5$mEmvL zE8lgU)bl6)U)O#z^onGS-Ayf>LI(lRNeeT8?@!{Wv#v6|G`Kt=3^S(669VI^KnD(E zt9T+Rfb+i#FSI{45tmSqxqmME(MR<^V0f=oa!7WreMofA3^#GO*kX}s=S*dDEM;?S z1q`^)uzzhLemBalc~N$r$9zNRV_JgtIr>!xW%-1=t|MJo*VY*f7=l#oOZX{;za2}; zh9O72GII=aD?oWNQYVr9(Fn$1+I2;$ z<%IG6mxbSosyBm|Z_XwDJ zdl(u`z6gSl+YK4T&aI$2cOBpWaj>u)J4=CV;%Utw3Rv9N^YC(!?;t>@4hdmzz2tAQ zpwIy<2~N1pUT%`yBB7Iy^?Ix~;!Hz*mMQP#^IhQ5@Ly0@%kli({ow>oPu}(4A6YnN zIRES9hKT@BI(pwV)A-ycj-Axx+r~CezXxv4&^^QhK9l~$8TJeu5a8`b5HT3ij{=+U zi9=By&Y_DUDm(ynF3Nl!w*nC*B<8ebCjyjM(P0=~L4e3v(<5UZ$03?BhKB&IpKMzV z7+wN$-1cfFG5{k0h4Hl4wK*6cKm!1R8+p9zw4L`D2X>|t2Zg0h<2~{alEqAHOTRIP zfN*d-A~NV6CX>j_!>mtL=nuchA{ZqV^9_55FU};o9maZfD@<5oGu-BJBzU4?0Hp)m zh7y4Gp_Li>nd1R`jjAQvqFxn>aOYwgt~FV@H;{lmP4E`ah`>VWNfPJEt!GMNu9Vj~ zOA>JsqP)T;dbV{mKDIG zDT1`8m3@U-)myQvK91t^Kyub0+A4bT_wYy*e3>aKeXLK#85&RqlYgIFWd#W25lqI{ z1jv^BUGfLlNyT{HS_ZkHmI0f>c|F*S|mJOY<`ObM+Fd=j}0?9|IoiFUnYr!0DmYI z>Z5`haqPYMwTrJ^Dqp>P>2197Cn$`a1$-Ns3*|l599$CB$Ol}7MAen=`qVOwPaJDl zSEawiHIRRb5nv$(Cz{0c*EqO!l0PG%ufgXZH94gz+z$Bzl0PTeG7VP$yIrTj(rwcq zQ*9+hqkk)Tu(}6Rv}HS;zU{<^2u?-*kVNfRfYgS)b&IHMDySkE)RKQeveWQc`u7|@ zB|+;U2%Pqfj~xd)!5Z79KrQ*DN+Th)63&55S|e9&&G(V2GLVpLlE;EwC&ki#+I3G@ zx@|&x+>^iLcs?NM?up!BHb=7SKw0|FpSk+1@wp6kP=J!b5@PK>SeE|VXAYJ%whflX z;;XpBr$7d!{T=|u7FoCZWZl7c(XFOS$q7Y1Dfj@XE6`RIpy?T?-(iIU$*xP!(jRj+ zi91c9^Ct*{{oJu3_AK8v7wUqV%NZmF=fa@EJ%FPv^TDxs#Y|1HJ1zeT?`@HjE12Y# z;rEnRO=O5K?K%;b{x>H=l-j4kDYkbqm<;QDo|B>1kld9k@<&WEbhG=2S^CK@7%}T? z8}VL5sn{<{%J(Tzn&n89XH=pj|C)Jvq}_+l(k%SqZj!_DqCWKt{vK6lOi5{qv&om3 ziAM6xw=~+#Ps}bhR5$?te&F@e@(=J@>0qxK|Iq|-YW%;7ltlc}WV?^4f906qzj91; z{*?oXFqnw02Vt-nmet`niHjdhDlbOWS~wBK_lj z!@ho-1@pz{tE`hQ+QXF|^WNhT>$TzhkUct-ams5An J^Sr#5|37A|NTUD% literal 0 HcmV?d00001 diff --git a/src/PaddleClas/ppcls/arch/backbone/model_zoo/__pycache__/dpn.cpython-39.pyc b/src/PaddleClas/ppcls/arch/backbone/model_zoo/__pycache__/dpn.cpython-39.pyc new file mode 100644 index 0000000000000000000000000000000000000000..73eb68db76c5ff0f76c8e97abff8b28784a2c149 GIT binary patch literal 9210 zcmcIpU2GiJb)LVSot^#V@?VxEO0pGiQcY79jE&Yjui zlA?^Fa(6N3o;mlPd+*#i=X~eh@rp%T!S7rDJh%AwmlWmS>1Oz6;^r+};k>FSOkrwM zX{nB?60d>R98K~%c-_$@Z!`@jgLuPhsuuX9D7G$6*lrfVI#i1`ofOx9EYUJMj;tZB`-oU#>OETPbDWHnP8KU zOs0~P>}591jy+JF8FrSvz>b4E#lFB^WGBF#X0Nc5YzEvJR%55wX>c#GFS0Z2C2%h@ zjkvRGrI5jNTZ_)>>Ag2-K359$#l3lQaQdfaPW zi6UOpg?Y6dY+RTZxwpNjzOvBfEn(hh1inX=BUhN$i5eUJm5t?_?RJyez2|NA*?|-fo>w$2dQKbK%nE!AL%v?XV6#WB8n% zzINgNqtl~jdi%K^f9}<<968nJ?DgW@QN7kOA|sbv7O!1Mip#=;5EDtti*t3d(BWSu zy$R2P@fAm9iledIca$ZSWtjOO=jbfUtOtr?d`B@9Cj&dTMV>75?S&iOmd|?#9||WT z4DL7A7wWmRPb!F~IHcnWmp~#VQe$OD+tVH@suJn38f&p08*wH!BO|t$I*ItRt1_J# zyH>2YnaD(otgOoyIpv zZv<|A#R~$MuPC(Fqy4-5n$ID2!^XBR%rN2&=8G&@ej`{G<}z=ucS4a*I(0qxs>m%i zyf9_m#lQ{gUegygvF)X$(2p<)@Gaj9M1i;sk2k!a?mx;1-K$5!mOdeS;HR-VqS9(_ zU^uDb(xk;$D(S0AY0VOuaJ}PmK1N!W>o$T$XhuHALne$|3Vwom(6J7D1{{Bd$TSF+HRrnZrWb~;`$+lu z2iIP|b5q8NtB{?8E%;vOpSv)7@mb z7C(of=5R?1RdOXwQ-}WrReNmZ5=lX|RZ}&8zCAW%pR@h7scYz^|08>(>ng5r3M5wc zlvsVJb~SLYNy%xq@RSQRjnAS5KM(Rq=QJ6SX#}11NT^%rLVdu6u9YYbG^vVfdR+ z5XJO0)cs}?Hn2Y<;J(6XTGC8j1rwG)tPET)`9CflmvwcB5B&abfuZ>J!j-G^q%t%N zJ)L~8XGex6#y%Js5Em3zho`IX{mgz`ki4Gyz9D^|qJk-X-;A;c;tA;?KFD}tAwEz% zvC{a!&tObWub#g|5dan*aUQt*hCmLj7^p( z&%;G-deO>t1lTs;5++4GuXzxc%IKG{NvHl_1$rH_K7*_en54MCB+v{~yE$BToMYOQ z!t`TGHy_&rz7S`bv8M6cOzRdI>6yXnkeQIJFnu693t1MjShk~{&7c}(S#AfJcVz9M z_lLyUu^Q!fG`<%VVs%Fiv{>zyVrA9j{}dJDQfx5HZcKq~BQuVRadAn5tru`#l%7zM ztu$F{u@dCvU7A%Ha>CnOY(pa-7eEtj6Z#dj8`;gGT?OqVDyJh@tUBmpM0(KZLXQ?t z`go5*t1vpey?R?1tbIS=*CDySJrT5{86?3?zvTy!&t`)5{Zpq-!3<^KBLaRugD~9| z0aeXReC#3(@XJKLOoWg~LXTrgaGOH_mA-hW9ida&q2h?bve4QR)G5Sup%QQRzPTL z_Z@9*MQEM-xKrhH4-x6?6-wh5vwR zmTIF`dxDttMAJ0`#{M5=OaZdJ$Hs*gP;GJlhpKebba^It)UJ)}Rgu03= z{6~-o;2o)On6r_#ilmaM(~vN2T3ObZ#`ImSs|5fPg7;N}-vP*HB*^P%Z^G1-RX8xH zHI`^(cJ(`2*XU+~O43^w+SFecZnT$^x=1(o?AS!A0GHXz$60We><^B;3fZb9M^F07 zb#2rYkm}|DS6VkOXPSxa!3+!JkjqFqYT1STq=3>_Z?h5dck!Iq%azF_+uRL(xqS?| zm&nyk{;R|l`THn1g<4@I8#iGh7p!{`U!a-~s3sG3nvH1Rc=&Hn)i;QIlj?P3a_EYZ z9sULg_W6m$IA&tSsh~aAXD*~Q9!hDM>3!YFN{u>NCd>uLfKRk=l;8)Ue-s4$9Y#zN zKFwB-YX)r4hQ(E%ko{SZ;SZZDJ+Y0FTGh5E4_i$iq%;|b7M_ht;ZeG0%E&#FDfdh|c49rkz}|l#Zq_Mw_J5RRRB@3kyo6YK2#jE} z03&4&76=Q|W1XxU_Nb@!s52e+uwrJQ%%aSov{0HTb11VYZIl+uJjxu(LIP}&$?VvU z^MH{;a>OD5ARAfo(51{OyBOOAV04SiNRwTA86cr4-BPz4yEQ>cOsZ#AVt8?`Uj>o!W(johE&DW|vZxX$i?MEeLJ#zQ_WoC^`t03w_PyBC|%f z{I_6UqAWLx`aeuGVQa%!-huG@R~SK;0Yw>P@l{JY0s&CPqkx7+Oa|cnReLez8ddI6qfzS z#i5()@#T=Wsg2z9m*kspH{CgzDR4Sw!GQvu2{NQno1%z?lvS?GLo(b8mT|J-bvk~) zL;(Tb4c8Z&bciEN1Wcq}GVmc|i8!b2%Kj!t6lkE0!eio zs^Ed-F4>fwha#7p6428a^c0T1ct7M#YOe8+2c#??j7*eM-oft?p`C~$@h&~Haq&ow zEpx==p>*m~TdwmpYM*35?<4o4ENCt*aTq2wgwb%DTTXuH)CWf)5!*N_1Nv9=7yd1X zq9OwV;!w_W;1B7+J)4GHrb1YS)+O1t0!n#Pl08*`TBgjK4(X}(W8)Xv6QfYjCZIuM zeP+8-1-NHnbjq5jE&4;A1wVD0+i~4C>EugZO&*00cPGH6r9s z6yrbqI%Ph{t$;DHj%Wss>4=}yf-)x@;UT`#)|}$&G2qdWt?w<|n~5@%bK!_DgFJ|G zr>xAKh_lixDzloyh6D)4vb#F4j4~~Y@~hYaoLXsq6k+1YR^naa?t=85;0Nckk%V(N z4KgQ@3!`>Nvb8+lrj{QO=@N;FNaJI~`9wY@vIBzSoF-2C{D42e9ZmxF^P$|p{T#`D zlY9fY5PYt|Hu>*T7rP)yt@Iq3pBY=g)h?&n6{$S}?JBg9;|4#2_9(P}3XIo)?lks9sy%t6c40{S zV`xu7`!Sn__A#mbLaKfINNsIMdmh>^u4%yX%R{4|_{`CV^^HjRc<4YHoAr}FG?6h` z$d6{;P>^fn*(sEiFP%o2hb^92;0-K$@7qLXKx#$;nq-_kdNcC2X7nn=|3RPh-U5fV z-y^~AgY=Mmf*}9vOJ&u7^eDn7UDL+7M;O?<3vyt3}^t-{gNxgt7!k2LmRLDW8fWbMGIB&|V#l zol1(!S^ zwA9kFH6~@(>y*;aWga&%J}|E<+zu{*)4-jk9XlOGVQ-F)%fSiyEEH+j!C#gi zg@7WET9Ltw9ra^;mV&2c7OvmMo3z$L-wnei+kUtIB@-u0g!nU!5GPF0jJM%6n%-j5 zpPjiWze3DFW#%rOeFV#QXWD$`?jrIeBzU;A<)WGt#>}AhcYf;>u5~`+7ehifG{mvj6)3C0`6cTV;u`+EO2Sk@_m<~jL#PI z;^YXze(%@kuibDzc>l(&M+W~3lGD_MxrPrHNHISl$-(E~)X9%&>g4i@TE(=b>~o#& z4`^T+B>$4Qzb0~M9#s3up?Q$(bDhT?4Lm~Rne(99zd1Ayl6|i8_z_KkY~|VWpxU1v zng_`~*LnPyrZD=M^Pt+F9hwKpYU=fxcKzw=EPdEoT94K_K5TJ7p2Bq^Wbq<{q`tEy zw4lRpl8_Oha3XTadDd(Ypf*T8x)44k`wz-#jsk_tFH4Mo;Rd#!>DKmGleSuE()Gu4-?_3F2j LD|XQ~lmGt!_RcX= literal 0 HcmV?d00001 diff --git a/src/PaddleClas/ppcls/arch/backbone/model_zoo/__pycache__/efficientnet.cpython-39.pyc b/src/PaddleClas/ppcls/arch/backbone/model_zoo/__pycache__/efficientnet.cpython-39.pyc new file mode 100644 index 0000000000000000000000000000000000000000..ef17886168f5004e2fe1d13f66e2ca6fa953f6c0 GIT binary patch literal 20490 zcmcJ1dypK*dEd7rZf6tyP4-KU){C?$YduDaZvi^dZ-XBYk;_`mq zu`FdNyJi)6cWTy>U9@e3bCx2-NC-!lB93KSOR-{HFp-+Glqe>alEvgws+d|z7t?}| z0-q^nma@g{(ok_Id^%Um$y~SqvW@#QFpt$b>Xhd z_NayXj?&$N*%o5931&=H+m2poi4Fno#cnG@;I^ht)1X??HbiuiM@95rKD0 zx<}F}N%u;+PtyI@ZFK;neXlyG9>u#K#=9R=hw$!0Ze(E>-aZM;`_$vWJRV{m0p{n_ zVPFo2nBBk}QAdF}8e;YUb4(ov=6Hyi0_KD|3Czh5vlp0CY8sg75VH@MC)AU`JQ-s4 z1M`0M6fjSPm;=E4ym}g#r$fv^RaK|e3})a_wV<9+9{}{2T2yD$F9JHGYU+dPuK{|W zT2jxe4*_~y)m2gb5}=<`4Ruz17|>z0tX@zhKu6Sh^`a^RI;ymqRTV(Tl>MAF;jNOU zXKvU*>Y1`GFCFuJJrND!CmQt&QzwJe@v>hzH`CBdLHy}z-7V`NaZ)#y8!P@qB-n6F zm6!eM1^3v6`DYr98Xq`aeyIa?u@g=(R%?`1NnM)P<>hmwW$pU9T&)K?)$2k9<*)moz-WR>AaC0+L2AX8siDphJ_&vU(C z!^Nud&y^|-x5EvB4a!{}Kn)p`s$Qsi4IQMGs`ZkfP#I0*@p|2@_@@2T@miy@cudcG z6UiX6=<2##D|ywX8>IPW?Jm1zKNzakmskAKT(#!A+6!`En7#R|`_gi`uGnI=5hSXr zj?c%C);+*ZdqF_ zu=jNH(ZcESa-nx=3&J}KWSs@SQ4lWZqL+o9S@I?e6VY#@ef++6M85~O)mM>x`*%LY z-xrTeL(=@#plfXzJ}-K4fy(z5U`J~{s%sLz&yJTw|Vny^yq3H6BTyXdq+RHd@0!2izt~sJcYM=qe!f%WB2|2IG)i**f%bICCVz{FOGW#mk_J0 z56Jbp-H!U!Rl60vM#v%gwwxA7xMj6$L&jH9?N}??ip@D7;bil0;R)9-%yQcm%6fi< ztiUUjE0u;;)%tut5o1>k5^|!sP46i!?k#zH_LST`rRv_PQe)47lIQN8oZ64VDJh(q zD!B(rRTMO)_VpF+FL_}h3L5*MOmv>y+xsL68jp4hw`&yQNxl=!qM)&7&s68pyYxOO z?YtBvjXjU`6t8udeL_iN?^LJlZ-4fQKW~nH_J1Gw_FFf;|3{~1pEkpu$Z%MeddC8I}04;mLYGd-fTnlmsIlCP} z>a@A7oyV-U)ru}Z)WRb9kyg|vHlplp)+Q`VE3y^2QR}jUZI7jES)NVV=y#DhfGp3EXxbGMlw5h@F^hl-iHd$A+y z2Pb<-`q(M`7+&siFIrCAj^Zzlza;*m_W0XTCkseFkJ7%`#@=4r00m_FXWO@Z=PH>f z=Ixp-Gi5jT6`uXs=LXVQxWgH|BZw}Q{c}OI;#O;55(%T&c!r?33vV&du-M7;8RSun0fvGja2zna8v-W)!^ijvsK;u&Octdo;a;s)KJ?|u`QSa6<>7PiJp99(P{i` zQ>qeMu)|nS#kW|fmuw}x>#d}Boqh6SKH61Dwjxlf3#GeIrVC{WsiBs2!B#o;LVwRs z;Q1W!bw@>|4ZaI+y=>3hYWPYL6Q6G$AnOFP3^^xwSz~4X9P(6YA*8|ksO#3bOS?jR zvY>x~3l@LqteSk!gnky8nZWi|F}Idx64;A@tpmFn*p0w;gLu6mnzG}O2j0VK_cFPU z$^A@rGI@Z>Lrfk-0!2Mt^{RC++IqzeBIr^O^S}@Mz|qLp+`2BXve12O4o(Rl21r)K zT*(X|h%}bHAkl!oqfz$)r$KFJ-hk&eIZI9UkGboVcAkK6gzn)e8WhXg*kUdw&dOTL%D zbpn_7>qrzRb^(Hs9CyLS9b$3;YYckg9jiR&02@axf_nn@DDFwzW8>C*ylqoyTZl)k z2>JE%Dz<3rS{?d+J>RmgCRB>02Ml+<8AYNWxcI!%LCx|XIs&%O2F%6P#dM|5eT(^dG-X6-bi^Kc+reVy ze-e#t<#OV0aM9Ljp>NS5Xs#=0cO%o!a^@`hRG2h0Xc;~FN_w$6{YIJI{3 zTf)h}5kqE(d977FpF442b9&UJy=Avjox5Z3)f4O@`macS8Q%oeXn35Atq9X zHUNzodV-iDu^Ei(qDy_ngKs`6?tnxI8jI)p)0(Uty4pu^3z92v7R^@6UXT_#D3#&D z4^qSkI5@%CYQ5@L%e9h!Y1#ds?4OXjO;XQ8-PaTiL3+*4b`zO*2S$0@H!!rG4{5!)|pbRe96^6KuwQq(L%R# zw*C!3fiqXRBVE;((b_;i^%dd<`UW2#`ZiZFeR=~mdi(SWpP+>5^NGZOAaMv`90x$J z2ZXA<=;-a3Jm!nv@(vD4qXT0JY9#NQA)vwGBh5`~RuKaw)VJu7oHSyR zTGfQDpW`hSB&$pF;;$6dD99KS$9OQ4stVHz3QmxO!_w3m7<2}!2&++Ef?#Gx$A`M& zjM)0O_>QE0FK(;LNHooW)km4ouvpCz_bEcuL{O7^W0mHo{yT(d)UNJeGE5vdo#s@b zCWYVXuM+Y12pwZWp}x9-xa)+dpQGkp!hEs*T|z(4Aha zmZQ@xs~xF73DpmD7JA~KpwZvvL+PXW4dosl6BU^UOS%G<)sDko7iq;&D*=e6d*YhY zif%(otqABZHY2{^HJxn2hI)OjyzGM9;wEyZIm?Vhod$2@(=ZRh}>dnIv#Smv&I2(up@f$rFbET^6hw*x6LP4XL!_T}vfUnTWfV+7_`0&@t|IlcA8WzTYr({8 z=c&|};;pnwpw{PM;^o0pBA4ki+t42A%WrItwno}xtfG2&fqZETG$M)dC? z9|e7kA^sC0V_*Lb&NGfFZj`ODTYyme7Je(KRy8dMlp z){6CttwDozrxge7K~P!&( z;IXIQe^N*X;r9>C96SB~6Wm(!u(6jt)001BPclYjFmnGBz+fg`z)g~gH0Eh#8UDgA zu`cu`^X?#0MtDJ_ehQl_+*zr*>#9-(;V)t8ntcyq424~o*c8CZs?7yC6XdF#Ducu3*IhTo1pyf~mFAg1)}<$)tMjg)!bvrZIZvd8gloZvrh z<_9UFPn{(Bb~!m0lpb)J&QK*`x{LNuFit??>Z5{nu*3END;A;DjOikrIhL|M+DRh< zOF9R`AGN4aL}&Du@fQ6DO!z(o0s_KZx4@q6Dg7ES(Bl4-rC*TJmj?Gi0{-%gWvxn_ z!#wI(n2R)K5h>=YU^j<5qYrB$h9P8&2do+WADK?i%*@<~>t6<}e}&0Mm~d(IH<1KG z;mTE99P-SjlKp+`!#IFs3@o+(?+3Z}Nu3Ic`+n2``0MBU8R7dlXeE3f zhpk=)fp+N;JM<72KWgAkl zO%~EP(tK-!N(eVi3O7y8z=ir}c)zCo)w6wN=gOx2ItyeE3wNc$rvE#VGx{jOV@SXz zmfUh3o9RMXSLt$tgwIM?KI9aJj*{J|exeh~&o4DDKzo(vO$>B2#B`bx>>6JHrThus z`yD1i4P0d*)*vpHQC+M99b?BLrP=z#h#^)Prf_l@E7`e;;D=n50}YbocO~%0AU<2? zU4M_w%g{LqK}sqZ^9(fz2osYzS?i{Lm5&KM3MsOuHIgE=MJu0MFBV2N@xx@+ga_yV z?EEog0Qj}$Xb**TMafN!gG6-#m-jFdFb2f^z&@Y=8unql5@zFf@XvuoJZI?%JOI;O zhAi9v`#|WwU?M|V8xObOr#PN8085x29GOmYV{Z>Tn>%ZE!wYQZD@Zz90y}(WMt_Uw zwdP-CR_2ULyvTQK2e8ieHJkbSxm`jG6;$f`iSPxce-p=L6Eo64#w8SY|9@K`Vo`v-h{0o zo8-sD$|kvw&`u<9Yp*Pu-NO~0NUUuR_y8Dk7l%?$``uR8qv_9ZeF|vEO5~iJO=a3p zErN;iCzNYDKWc93pV;-u*Fi&^+!MHD*-5@Kd6001gc)o!=-vXR?TCg=5fhx$Xxd?U9-6JKHDy}!R29d0qMCj^32&XPP`Z%%5W&i+syt9zTl^L~Q z_hosc2m~z8fo#_!*esqJ>JTiZ1L0|NjEdbB%~5D}56JQVp!A&(Zq5+yPgz|E_Z~vD z(~Z{qVM23EIOZTG4q_7w5*iS(AcUP2xJ@j( zx#i|}zBBse^nQU2u>)QDy-O2+hv;|s8QoaG8M<3jZyaKBlmojkM|SI zw>X>IfhWPldt^7cV3!AO9+;HB8i{C!1GyD}{DeIT31rjY``#IOaH77+X8OoulP3NS zkw@aWQxd0`sgRL%euU|B5vIo2Y(H7xU=L0^fayfx+JKXQXCdJj(QWtRX0p4$*{7KznwJxdNS34%{~5lGas(P`gBOa#p?tviD`6L6-`SB zW5l)uk|(JZ z)YSUP8ZtLj8`~*5rJy=S8`HqjH)c1c!2&^MOE-6)&% z^DzvMcmY(-kND}(n?^el_TUwkXGFRiMU|em-ZC5oSxD%w`Z?wJ!)TQ~i{q#kBNS;L zVSc^|3T_%U=-QLqcpLnYZs}mF8--KD%Yf+)9SqU+1oFhFVb%C9YEW@CLYy)=h@V5K z%X1}gZw@i6MoN49Nm z&E{wiovpud$v(Rq_jizK8NZRyr0-qZylWmL;6kraFK!{Gr%?jUFx={(Hv=ss^>2ex zQVVJEMHv{r31G7DLNPz2f8EECB0{)^v)1JZm_+{S2H_z=$0874UUx?;$^E>CA{B7}bJ&dHGy$IxT;sHi5i_7+nh>9p(Y2B@s|B>N zN(z(1TLFA1BF*5xb(i#Oz~cZ;znA5m=>4IJwvlTrdjIo$!*`fa9qtvC!Z(6kss9+V zY}d7&DlVQ^`FQ^(y}+f|0X+4zK^fWIjz4UU_0H(}y9_O@*{bVk^0!yhuCOC)0(!i- zbvny8*&(dko1mPj6}B;;qY;0HsA~AsOV~;ke;VJntZ3Kk_k}QC%ZsGoQT^>FEC>e^ zu)@KAz%ViqwxTJD^NCl5jQ~b~tDV3F5w?)Tp{8db08Iug<@3kqDJ1haX_09;ClO})@Z;@lD=TlvcHfX) z=)57@^9C5V3z3Bl?V;9?{%&gs!Yu*aGr7&e*=8`bs~c6!A0-ajC0wii7~0LiQ45!C ze+fcZafIjKb_LVJY3YUh)$vwN&d4QX3<*4m5iCX#2Kyj;hbxN>n7rb&vlyS0hU3#1 zU$PIBWdB%k#@-Bn{jbVO~Q=3{WF`4?y!Yz3J;OK2}{`o z98NYj@Ucr$A+$`WIK1ZW0t;Kdq1)oJ5>MS1)z^a6HjbOm#acryGr% zStgJY4=(qmifMB_i{O)F5Z6)fCad(#WFiZZ#~>Atq$8{)fh@;KzZsA7_*EA9n@gqU zoAD$ATs%9JgVNG*h7iWzYtf%ZGd$ML_T%`VU<7a+!Li%bhH%|Hz-`uNx>Re(aZcb0 z{o9?uSm(SQZIXqE@B-K&E$1?npYvN8<}wyZRFfKd^o$+I-eipE`&dbY$C?EBCf@oD z&S|ePf-|1*ZT4}>E)TbFRW z_#RsS9@;Rz_cQu9;EMP_!-g}scfB!3(S?3i4h=z0g+RMEEIm#(eAZYh;x`070mcW0 z?jt6J=?9}x>qvyTgTFxVab1_Sdy99-d;NBoUyhs4=Z zu!tZ6pe3bnzTxgGoPjZ6l7*S(Q16FOyYv{Q6=$-z$*cUfL;o{CW?B9@ah!Jj`-EO3 z^e+f~gwVev^m#)6ijW10(Vru9l+eGHdW6Vmjo!?IF7Q_fpE~E3mA3da?ErCy1$mqy z;L$uB2PlcI3~u`jO7Q^*4rIejnyUcwpIQGCNG8S&U%j8L2wxS3{Q;I2h#TNzq+vAT z&KXwP)!{T%QR5BRLMVX*?hzi*(XS9elGG34>5gS4?-2(1G>a%K5P##Bz9o1|jMNcD z?+q6J8WVPSJx=()FhyP+g;tynT^d`_x5?v-ua2Bh{$Pk@;p{cxeb(#0I=qgYt^?!# zqhyAL15CHKlaVaBp!s{NIo7)i*Jo*DXI*QJ>j34V8|%N>%|eP@i0a*_1xFXHF+Yxd z=1{?!r*cHg%DM`VfLLl_bpKhj8`2=ow!c)x_tyF$E*`aOvnaBRvN97apq~Z@Y>Y=r zmgSaK)O#_6o;W|xJ)|K*z!>{JJnwSF*iG!{)F?^&Ph4 zG-7WV>2MMNPxv52o(9Xn86QAAg$ox3{DaUU@!i4>9=P%yghC#bpgRs<0h{!36po({ zh7RWGGdSTU=Wac(rkYQ#{ojWI57idJPyNal$~aCrTXQE1&&Vgb1q7rEFPan8FBb3- z;fu3y1|n4cqC7W=6YS)>CZHi|16gs9eRiepSC?G*1`z+8Bfa*%ASR#j;gfawnh*an zL+|2h^$V&XUy}c`vG&{nF3|`Yx3D+*5a8_J^7;|>kE0CYi#S$-?=p51(?4Ws^8`mZipH&YXLmYpMPm+h zn!GJFL(`==Z*5$^i571-?DEky?oogvBsV48#MycH*V-#&l^jB^R|6s zjXT>n>L{jf8*ke;mVV~8ePfL~+c#=Wrf*~sx1Kka{?ps`jWzCU->Ao#zKz|sZ!G=7 zZTrR=IL|jD|DM78gARKdn&;mj-qwH9+^8*103Q>^KQlc|@GC&hoVnBBaTD@Er=h*^ zWqaso&8D)||6-`@Ae+WsCsWfUe z1c3R$rmhmb4Qx0VlmCV>x#CxA-lSK);EHh66tzbD?kBTuv4tqU?8HZ(r4s*l6h1KG z@hJCR7j z`pF};Ff-vAvOQ%V2*IrhQ-Kci^~uN0*Hwr4l=m$p<~!99D9EEwBjnG?BV;k~`^)%P z*2!n{F?-vVG)% Zkr|}FH1bq>sH^$CmF~1?Tki_|{{Sc)lbiqm literal 0 HcmV?d00001 diff --git a/src/PaddleClas/ppcls/arch/backbone/model_zoo/__pycache__/ghostnet.cpython-39.pyc b/src/PaddleClas/ppcls/arch/backbone/model_zoo/__pycache__/ghostnet.cpython-39.pyc new file mode 100644 index 0000000000000000000000000000000000000000..2f818727953cbab85aa02c9959354cbf5593859f GIT binary patch literal 8961 zcmcIpON<;zTFzHxW@SCPTrRuY)h(H5+`?H+b^m>tGD9^11po?heeN^Gr{bXG-n zRd#1(l_Rs-?pC!Hre|4T)eGWrV8x+5AW#G1yhn~8f#6CUU?dLE2#Gx)v7VJM-yd0T z_bhuDvQ>Xa{t@v_{S@azh709f1#J@$H2=M@r%!^ioz78hDtsC zuhvz(YoXfF>YAGJ^o~(ChUHAh&=gham~~5X^ic2Eb-Ra>E!A;$v47$r%*4* zawaTx7U~O~QoSVK&G1C0TrYP{)=%QyVm8Y%=MVLI#Xsd&4o*K-_SE{~+X~CE{09ol z`^Lu7o?br#PJtD{DGoVj!C7D>a7shYIdD#}GC1WS=R7zkSp}TRkaK}uWT)BU2WtHy zdyXx!GblaBs_ZN~hf=~ql>_Vwi^ZS(X)CPGjA5{R#mS=2RVe@j_cnZYt2 zWa~OJne~BEH@>A9N<9N*$%-85#kI9t-nP$=U|WhXxF7b{nl{NAK3V+F;iuyl|2jyb zB@T^5; z*G??-G81E9GkZ}TV+vrW-fC&~ITb@~LV3Gzuce{Z>EpxSWgScGuTq zKM@(X<9m_Fx?8C6qNZOnMLz0xCR5VAW+EJ^*K!nok))M6-7QQ>_IByRL}kbs36k8J zBQkNn=X1V5y=>Q|6>(ioo5*RsIBhGY(S!zY(jIORBj+~AMYs{H913E@ez?vTsd|aX z86szioF_ucmVTwxh~&VB@I0>D481sZ-9zQ$`iGSUA`ClmsXcw{v>v< z*9_x(9&fhqHN56#qZ|46q-$_@y4`y%aslub%e`&>(-^&mU;Hy5id|H-@t-40&Qn9v z{@ZqJZB&ydj!M*2%^&Pkru2V#v_?%@p3y_=MZdT3i)jf4%7HRaAFI0>N>F@R(x8m$ z8g~4%Xu&Um)O5a#f5Hr+UO$P2y3G;Ul$5+oH5R<;LyxmB;UzXf6xG4s&dKRH4cpWV zspu&=fu?JRCT%Jyfa?m!b+JPIkji=2ebD#9v_>uwe+3;xVYn+zKMbJ;WA&V}OMEz=-ArPmlu5FQGca<> zz}SO(8abjQi*g2z#N1bzwQoRDZYT*7N0J~RGf?Lwz)g`WlM=Es67td=IjKAPSIivG z_pf4Ku8f=#uYgt0f{{9aLsi<%p~f$uBDHPDT^%(?%hh4aT7mx*HF=!~sXWhsz%x+L zW^T*tVQ*(VhtctpHca;cPdKeV=Tek@;|iA@*HQU zG}MLTO^>EdpDIl+L}6ljiMpW;G={BjX-wbOq%nd6W7HG<0Fj7FXrGZK1MSU$L4gq4 zuJ6*jh4)E+a1KamjK|t;WP=3j%E`cm(F}v0$o8Yr2H2f`m;}9Wd$j!I#Dqb+D!ZrMbVR8sTb1ig z=G??DrumW6C`-rTs3ny0>dwMwRY%T4<3ERnYy9(|Q^F>OPpY+sUrg3$tHp{`HX38* zkxmMnS&ZRGgC}f37z1H^#^7@zCt=OI(m$mRjONX6f!tkdQWP3J1TyHImOLJS%-err zU_-TmaSryrr|xC}J+icoOG*VGeAh`#Y$5D>U@~=J@#{>BG)BN71tAt}-9nq}z!_xF zB8xWdaU1dp#y+*Nr8S$WHOoD$7l7FSRQa}ymxtDm5fbYq*oyD<+%Cd!YG~QilD~>i zla?&>AWHZRyh1kczco~BR8J|T)4ks-3P~w?1UQ1OZ zKvFMAC@=y-^?c+%oCr9^wtMIllM7O^MQB)QY{e8*Ve3z9dlX(roSn+lrXO$JWZ9%{ zWH(a!#Jb0URZp}Hov+Mz ziWfN5K4WL2m^f=^rnED-J85Tx>8G*0B^Km#uXU3o#Br**ISbzVGF%^rCy0RON|*qX&m*pVge$QhV?C&{4} zhh}iI`@l?YUIKp{6_^7)fIiFb)93}V$xS`}5F_fFIv0?v;*TWbG=sJT|5M3DSw}MH zkLrr!x`pYZ!QJ|T+aky~0!UZjZY7*`u?ACcc5EKf>(?tT^Lt7AVG#S160f6=(B0Sv z(xYf8gX7KlAy%Pzb3=@!*DttnyUUX%jP{rj>cvSl0rOfZ^%$fgsmHL~(c#4HV{Wm} zuhXPT?z|@;%@W1gZ%_S8aWXKCA$`5DA*?~TY_89$C_g)4Q5f^&EbS?m2!yaa@VWe(b}i<|wl#qj@16WcN1n#LT}1l*W)RI*OC zoV22n`DABl&T@`-SY(xBhqZ;5X@|7|J%lw}1TTdl(DV%)0q|}N&jo>;5|T_{oFkWz*0)DW|r-Zfg#NjFQHcU}4V z`}Z=tnO$?&+O>C~NkB(Z-T(+^MqbJ#J)De^!<)kv;k%7J6{(m!5K|5jAp^{0Pxx$U z-(qJFmh)(zJtz$Fk5v`tEmri7csJBegw`W3}wftBSM58t@! zuygpH6SA$cLElSlk?xc!pmK6@qk`nbMg$+y^aXz@P{BrpP`ZzJ?!;}xJ3B&xQgmW0>4l7x2fuxvHqI4gpsGmN+