You can not select more than 25 topics Topics must start with a letter or number, can include dashes ('-') and can be up to 35 characters long.

383 lines
29 KiB

5 months ago
torchvision-0.18.1.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4
torchvision-0.18.1.dist-info/LICENSE,sha256=wGNj-dM2J9xRc7E1IkRMyF-7Rzn2PhbUWH1cChZbWx4,1546
torchvision-0.18.1.dist-info/METADATA,sha256=YxT0dITCC_3UTAsUzUVOJynTswZ4aDupfgT2mTya_9o,6613
torchvision-0.18.1.dist-info/RECORD,,
torchvision-0.18.1.dist-info/REQUESTED,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
torchvision-0.18.1.dist-info/WHEEL,sha256=TxwUeV-3HEbjjXVQ7qnUZIKQ4IinhZYV_rmkwq84M_Y,102
torchvision-0.18.1.dist-info/top_level.txt,sha256=ucJZoaluBW9BGYT4TuCE6zoZY_JuSP30wbDh-IRpxUU,12
torchvision/_C.pyd,sha256=nEWm1ESTojCisclpCrToj8wEXlolo3-JYFMwnl6H8bo,642048
torchvision/__init__.py,sha256=GH5ATaN0Zkz9Lh2FAfjOfqtPR0YgegQpxmdRSca-VKo,3471
torchvision/__pycache__/__init__.cpython-311.pyc,,
torchvision/__pycache__/_internally_replaced_utils.cpython-311.pyc,,
torchvision/__pycache__/_meta_registrations.cpython-311.pyc,,
torchvision/__pycache__/_utils.cpython-311.pyc,,
torchvision/__pycache__/extension.cpython-311.pyc,,
torchvision/__pycache__/utils.cpython-311.pyc,,
torchvision/__pycache__/version.cpython-311.pyc,,
torchvision/_internally_replaced_utils.py,sha256=jnEH2UiEKQCUf1Bq2fITPKxxg-ZySJONIVoniEh8hGY,1439
torchvision/_meta_registrations.py,sha256=fvulO98RBcEUh5owNUbKFA29w9WKwkscK_Azqu5vjw4,7437
torchvision/_utils.py,sha256=kcSn6P3Vjv5QPgHHNmNu59Mh-NLb5MDlYxvDkNJWrOM,966
torchvision/datasets/__init__.py,sha256=IsRXN1eaTyE7Wb67BtBaDgZkAukXqYiblbziJ4lD6aI,3733
torchvision/datasets/__pycache__/__init__.cpython-311.pyc,,
torchvision/datasets/__pycache__/_optical_flow.cpython-311.pyc,,
torchvision/datasets/__pycache__/_stereo_matching.cpython-311.pyc,,
torchvision/datasets/__pycache__/caltech.cpython-311.pyc,,
torchvision/datasets/__pycache__/celeba.cpython-311.pyc,,
torchvision/datasets/__pycache__/cifar.cpython-311.pyc,,
torchvision/datasets/__pycache__/cityscapes.cpython-311.pyc,,
torchvision/datasets/__pycache__/clevr.cpython-311.pyc,,
torchvision/datasets/__pycache__/coco.cpython-311.pyc,,
torchvision/datasets/__pycache__/country211.cpython-311.pyc,,
torchvision/datasets/__pycache__/dtd.cpython-311.pyc,,
torchvision/datasets/__pycache__/eurosat.cpython-311.pyc,,
torchvision/datasets/__pycache__/fakedata.cpython-311.pyc,,
torchvision/datasets/__pycache__/fer2013.cpython-311.pyc,,
torchvision/datasets/__pycache__/fgvc_aircraft.cpython-311.pyc,,
torchvision/datasets/__pycache__/flickr.cpython-311.pyc,,
torchvision/datasets/__pycache__/flowers102.cpython-311.pyc,,
torchvision/datasets/__pycache__/folder.cpython-311.pyc,,
torchvision/datasets/__pycache__/food101.cpython-311.pyc,,
torchvision/datasets/__pycache__/gtsrb.cpython-311.pyc,,
torchvision/datasets/__pycache__/hmdb51.cpython-311.pyc,,
torchvision/datasets/__pycache__/imagenet.cpython-311.pyc,,
torchvision/datasets/__pycache__/imagenette.cpython-311.pyc,,
torchvision/datasets/__pycache__/inaturalist.cpython-311.pyc,,
torchvision/datasets/__pycache__/kinetics.cpython-311.pyc,,
torchvision/datasets/__pycache__/kitti.cpython-311.pyc,,
torchvision/datasets/__pycache__/lfw.cpython-311.pyc,,
torchvision/datasets/__pycache__/lsun.cpython-311.pyc,,
torchvision/datasets/__pycache__/mnist.cpython-311.pyc,,
torchvision/datasets/__pycache__/moving_mnist.cpython-311.pyc,,
torchvision/datasets/__pycache__/omniglot.cpython-311.pyc,,
torchvision/datasets/__pycache__/oxford_iiit_pet.cpython-311.pyc,,
torchvision/datasets/__pycache__/pcam.cpython-311.pyc,,
torchvision/datasets/__pycache__/phototour.cpython-311.pyc,,
torchvision/datasets/__pycache__/places365.cpython-311.pyc,,
torchvision/datasets/__pycache__/rendered_sst2.cpython-311.pyc,,
torchvision/datasets/__pycache__/sbd.cpython-311.pyc,,
torchvision/datasets/__pycache__/sbu.cpython-311.pyc,,
torchvision/datasets/__pycache__/semeion.cpython-311.pyc,,
torchvision/datasets/__pycache__/stanford_cars.cpython-311.pyc,,
torchvision/datasets/__pycache__/stl10.cpython-311.pyc,,
torchvision/datasets/__pycache__/sun397.cpython-311.pyc,,
torchvision/datasets/__pycache__/svhn.cpython-311.pyc,,
torchvision/datasets/__pycache__/ucf101.cpython-311.pyc,,
torchvision/datasets/__pycache__/usps.cpython-311.pyc,,
torchvision/datasets/__pycache__/utils.cpython-311.pyc,,
torchvision/datasets/__pycache__/video_utils.cpython-311.pyc,,
torchvision/datasets/__pycache__/vision.cpython-311.pyc,,
torchvision/datasets/__pycache__/voc.cpython-311.pyc,,
torchvision/datasets/__pycache__/widerface.cpython-311.pyc,,
torchvision/datasets/_optical_flow.py,sha256=exA1SfgHNdvExeOHrCoMnQUfhCo-7c0P97kkqYm2dRE,20141
torchvision/datasets/_stereo_matching.py,sha256=uI3EzvxJgcbI97sXkiqwUd_UafZrpL57QMfQ7kKy4YA,50307
torchvision/datasets/caltech.py,sha256=5lmADI0KQmwGcABpMmaivr2qY5NoOgc4HhDF9525s6Y,9175
torchvision/datasets/celeba.py,sha256=hES-xHtqgbTagwSUsOnArvKZ-YqwWw0KNo9wvCbsUEo,8664
torchvision/datasets/cifar.py,sha256=2HVkqRdWeks6vHPNlU1K70rODmvgIAwODu5RB_meRDs,6018
torchvision/datasets/cityscapes.py,sha256=8aAmy50eIgLtlqi8s7fiz6kqusMudTIv0EeT8jLptKw,10515
torchvision/datasets/clevr.py,sha256=vliWOSDso7wRX5NtfwWSOK4sL-h0A0UP7tctAie9o3o,3548
torchvision/datasets/coco.py,sha256=ffMMWD4lEElgMPb1V967OTU1RiSt2MPklgh3h3vGOjQ,4289
torchvision/datasets/country211.py,sha256=_iKc9AaBoGm9vWTd9Z2IbdB4hD2KjzKo9Xsa8eDvlfk,2494
torchvision/datasets/dtd.py,sha256=xCxrh3Prnz6Td0D2zp0LY4NupqUN9rihTkb1g8I7mIs,4119
torchvision/datasets/eurosat.py,sha256=haGeLUypJFKodQJAUC3MxyCdd_nARHNY_nWpbY2A6Y4,2172
torchvision/datasets/fakedata.py,sha256=BqGViPyIRcvdBNpGhsPl_isrxxOTtlFLyKnLxzBWQP8,2514
torchvision/datasets/fer2013.py,sha256=OxPwEQLDHuq3Dvl0HjDDFPIyTdsgpuz9rIoGlPMclbQ,2881
torchvision/datasets/fgvc_aircraft.py,sha256=IlkE04918JZ8of8bwfSPtvfkYafrBIUGUbpg3isfQos,4741
torchvision/datasets/flickr.py,sha256=kRzv6HJiz8TDa7_SR9gc13S1n_4loCz5ike7UhV4S3I,5598
torchvision/datasets/flowers102.py,sha256=pkBqqDqOI4Ef_OB5J8_P8s0dqq_YldY1kkQ0hc3S0No,4755
torchvision/datasets/folder.py,sha256=xS9pf344r5JKxtNYMdWTvCR9NNgxGWfU4Ow5xJiLTvc,13243
torchvision/datasets/food101.py,sha256=jhWKGMqqkIMDHq_dQocQOgYWHdZ6pYKsy9cDB7MPiI0,3845
torchvision/datasets/gtsrb.py,sha256=TVMesJ3jzVk1tg4ecJyiun5lQ0PKh7lMZWLkZbkVbUY,3888
torchvision/datasets/hmdb51.py,sha256=nAfL2TmZMNAuxv6hN1X3jNP-DmBEF5ls5YnjHo64TIs,6123
torchvision/datasets/imagenet.py,sha256=ftqZ3qfRBuL-fAAXFjNwKzN92KdT_vS-PUqIlnFUMfA,8910
torchvision/datasets/imagenette.py,sha256=1JxKiR5ItZCuOZooSidy2ma33sgUM0YFXI1vbHSX4l0,4560
torchvision/datasets/inaturalist.py,sha256=RWD3qH5_411m6TpnieUuhVGuWf_Iz5qIvWCkuXCVJAQ,10403
torchvision/datasets/kinetics.py,sha256=SMgnLJ6qaqmdX6q9GyFgSPfZij4PdG_OPXLKoV-PuF0,10638
torchvision/datasets/kitti.py,sha256=q0otSanUygTFq4P2T3xKft1Gv-BPQXw-xNNMfOmDOzc,5795
torchvision/datasets/lfw.py,sha256=3XWdxEt1D-M-zG4psvq8ZCUvKR1ksQQ0iXIfo80lprA,10816
torchvision/datasets/lsun.py,sha256=pOPWLCnQBY3lbUzCWR_YsRhFZeEB2qFoAWxnuQT_l6w,5896
torchvision/datasets/mnist.py,sha256=Kyky-ex-AHE5NXezLzqb1Kp9zTA5bqS7mI0FSfpYhfI,22277
torchvision/datasets/moving_mnist.py,sha256=iuX-yNmki7bypxNfICobifZhadtJKFmTGvFd40JPrIw,3740
torchvision/datasets/omniglot.py,sha256=ysZzt9vQ7eD2ikqQGIEvfRVckusg2JRl4d8rQCF0PEU,4254
torchvision/datasets/oxford_iiit_pet.py,sha256=aeTY9s-2D6ueX_CwppAAghHlkTdQLBgyOoz_kRSzLWk,5215
torchvision/datasets/pcam.py,sha256=8tWbJcxo7lKYTToISFpL6_OSbxeYEqU2QsaY6OYjRpM,5419
torchvision/datasets/phototour.py,sha256=yEQ_X1bDJQA-70I1x7PTy7F-mCn6UrW70ubU-z3AU8g,8271
torchvision/datasets/places365.py,sha256=izNJr6E0mR3toIG4UJs82Gdc1Fh_a5FJKVa8qVpezT4,7430
torchvision/datasets/rendered_sst2.py,sha256=_guhu305oAtxCH8KcV4rUwP5CUBucFCYfx--lpBjRK8,3683
torchvision/datasets/samplers/__init__.py,sha256=yX8XD3h-VwTCoqF9IdQmcuGblZxvXb2EIqc5lPqXXtY,164
torchvision/datasets/samplers/__pycache__/__init__.cpython-311.pyc,,
torchvision/datasets/samplers/__pycache__/clip_sampler.cpython-311.pyc,,
torchvision/datasets/samplers/clip_sampler.py,sha256=ERp9c0O3ZGQNR3lLXvZXWjfJkXayYym8bhYfOm_MNKI,6416
torchvision/datasets/sbd.py,sha256=6gtgU3ip9TFGuacBpKNGrVJcpe2l31PXD6LQORFh0Bs,5388
torchvision/datasets/sbu.py,sha256=GWAuWzCS39wkH77_Py3EKu2ej-TifDaxGyiup67RLFw,4253
torchvision/datasets/semeion.py,sha256=DLzb76ihmjgtAzNhnV0JntTjwuWA1sf0eEcFBj9oJII,3240
torchvision/datasets/stanford_cars.py,sha256=REy9oAjVYN5jISRCLQD8eg0GMBx3MZUbYeF76eQCdI8,4626
torchvision/datasets/stl10.py,sha256=MGcfHh9vUzIPrTtgcYDKjewMjw4r11vJLJGihLP-hRQ,7468
torchvision/datasets/sun397.py,sha256=yIbrKH6CfuJ3l8yjQGsZCakQGPcbIxNGb6h64bQCDU4,2859
torchvision/datasets/svhn.py,sha256=nP-62KxLkLD6n84lw5lgyblHyRFWC5NYiBLoGgeID5Y,4958
torchvision/datasets/ucf101.py,sha256=zM9EY6Clqdvt45PZyL0dZvnlLn-5oyCD6v924bENlPE,5664
torchvision/datasets/usps.py,sha256=vaztMPU18onO_7tWXUWNQ0QsPPYoxGB-3a7YiA3pYrs,3596
torchvision/datasets/utils.py,sha256=RfT013BlukbKIvp9LG8DL_-FdYscGkieGI5vC9tukn4,16804
torchvision/datasets/video_utils.py,sha256=gUazc9gv4wgFuJ1N47e2c4Wi86HWhR9O09HcwVraeLA,17632
torchvision/datasets/vision.py,sha256=5N69xiFsNdiqjVzP-QP3-PtswDUNsKSeyy_FzXTDt_4,4360
torchvision/datasets/voc.py,sha256=7GhzVyU3iWbBzFyb1zdj-9xxCSLmQCdihPO_o85SdqA,9059
torchvision/datasets/widerface.py,sha256=UGvI97nsBCmJhG0KKNxOwM9ikLGBuaamUSjlyt4jwzo,8494
torchvision/extension.py,sha256=0A4efQ6V8RlQcMMtDpK74VIBHpDv4icjkkOc-EokPHw,3233
torchvision/image.pyd,sha256=QwaE9wk-FcpgqJpghx5FSZ_tTQFG_RfiywxXvaNk0eQ,150016
torchvision/io/__init__.py,sha256=md51PMqDbCY8Wvo9rA2il7ZrZ87GshTq8fJY3xhNVOA,1547
torchvision/io/__pycache__/__init__.cpython-311.pyc,,
torchvision/io/__pycache__/_load_gpu_decoder.cpython-311.pyc,,
torchvision/io/__pycache__/_video_opt.cpython-311.pyc,,
torchvision/io/__pycache__/image.cpython-311.pyc,,
torchvision/io/__pycache__/video.cpython-311.pyc,,
torchvision/io/__pycache__/video_reader.cpython-311.pyc,,
torchvision/io/_load_gpu_decoder.py,sha256=B3mPLXerJYXqiHv9FO2laRrGRlIkXii7CsD0J8J_SwU,182
torchvision/io/_video_opt.py,sha256=n5PL4hXCnOVaLDlSfLtWzP8yK4ypwWbyvQhbSHrO0Ps,20902
torchvision/io/image.py,sha256=cYqcLdGmcRsnBKYfOVvuEGrYIGq9bXhIIpIS3XuySWY,11120
torchvision/io/video.py,sha256=tcDKnx2z_AXAgBU5CCgthNGL2lMBDoMT-vXnfW8qyzE,16089
torchvision/io/video_reader.py,sha256=RhuK-KcutlD_ByPkND_tHH_QVIwDsdqWLz-C0KJc0EM,11642
torchvision/jpeg8.dll,sha256=aM-Kj2MkrdHI0gkgpHfh86_icuM26XiMu6gyMGeuKig,552448
torchvision/libpng16.dll,sha256=nPxu7uIrOxgpEXCLUyjP7rQBCghBVPsL-h8OxeArvc0,192512
torchvision/models/__init__.py,sha256=6QlTJfvjKcUmMJvwSapWUNFXbf2Vo15dVRcBuNSaYko,888
torchvision/models/__pycache__/__init__.cpython-311.pyc,,
torchvision/models/__pycache__/_api.cpython-311.pyc,,
torchvision/models/__pycache__/_meta.cpython-311.pyc,,
torchvision/models/__pycache__/_utils.cpython-311.pyc,,
torchvision/models/__pycache__/alexnet.cpython-311.pyc,,
torchvision/models/__pycache__/convnext.cpython-311.pyc,,
torchvision/models/__pycache__/densenet.cpython-311.pyc,,
torchvision/models/__pycache__/efficientnet.cpython-311.pyc,,
torchvision/models/__pycache__/feature_extraction.cpython-311.pyc,,
torchvision/models/__pycache__/googlenet.cpython-311.pyc,,
torchvision/models/__pycache__/inception.cpython-311.pyc,,
torchvision/models/__pycache__/maxvit.cpython-311.pyc,,
torchvision/models/__pycache__/mnasnet.cpython-311.pyc,,
torchvision/models/__pycache__/mobilenet.cpython-311.pyc,,
torchvision/models/__pycache__/mobilenetv2.cpython-311.pyc,,
torchvision/models/__pycache__/mobilenetv3.cpython-311.pyc,,
torchvision/models/__pycache__/regnet.cpython-311.pyc,,
torchvision/models/__pycache__/resnet.cpython-311.pyc,,
torchvision/models/__pycache__/shufflenetv2.cpython-311.pyc,,
torchvision/models/__pycache__/squeezenet.cpython-311.pyc,,
torchvision/models/__pycache__/swin_transformer.cpython-311.pyc,,
torchvision/models/__pycache__/vgg.cpython-311.pyc,,
torchvision/models/__pycache__/vision_transformer.cpython-311.pyc,,
torchvision/models/_api.py,sha256=RJurpplK_q7teeUlnqlMuTTxAorqIEhEcuQydyhzF3s,10331
torchvision/models/_meta.py,sha256=2NSIICoq4MDzPZc00DlGJTgHOCwTBSObSTeRTh3E0tQ,30429
torchvision/models/_utils.py,sha256=X7zduE90fkek8DjukzyENOcZ0iop03R0LIxC_FuAazk,11149
torchvision/models/alexnet.py,sha256=XcldP2UuOkdOUfdxitGS8qHzLH78Ny7VCzTzKsaWITU,4607
torchvision/models/convnext.py,sha256=mML3XALGvQGe3aYkmz6dbC7Wus7ntZWr7IVihWjLV8M,15740
torchvision/models/densenet.py,sha256=Wxvecj8b09Uy2FjKun7ZbagpDmHll8L2c3GbM802Ivs,17273
torchvision/models/detection/__init__.py,sha256=D4cs338Z4BQn5TgX2IKuJC9TD2rtw2svUDZlALR-lwI,175
torchvision/models/detection/__pycache__/__init__.cpython-311.pyc,,
torchvision/models/detection/__pycache__/_utils.cpython-311.pyc,,
torchvision/models/detection/__pycache__/anchor_utils.cpython-311.pyc,,
torchvision/models/detection/__pycache__/backbone_utils.cpython-311.pyc,,
torchvision/models/detection/__pycache__/faster_rcnn.cpython-311.pyc,,
torchvision/models/detection/__pycache__/fcos.cpython-311.pyc,,
torchvision/models/detection/__pycache__/generalized_rcnn.cpython-311.pyc,,
torchvision/models/detection/__pycache__/image_list.cpython-311.pyc,,
torchvision/models/detection/__pycache__/keypoint_rcnn.cpython-311.pyc,,
torchvision/models/detection/__pycache__/mask_rcnn.cpython-311.pyc,,
torchvision/models/detection/__pycache__/retinanet.cpython-311.pyc,,
torchvision/models/detection/__pycache__/roi_heads.cpython-311.pyc,,
torchvision/models/detection/__pycache__/rpn.cpython-311.pyc,,
torchvision/models/detection/__pycache__/ssd.cpython-311.pyc,,
torchvision/models/detection/__pycache__/ssdlite.cpython-311.pyc,,
torchvision/models/detection/__pycache__/transform.cpython-311.pyc,,
torchvision/models/detection/_utils.py,sha256=m9bowqjuYiR9A7HI7wJAK_kgFrUYlKSukXOEwuUtRpA,22667
torchvision/models/detection/anchor_utils.py,sha256=TQKWOKDFALsTmYw_BMGIDlT9mNQhukmgnNdFuRjN49w,12127
torchvision/models/detection/backbone_utils.py,sha256=BBKVxCyAY9Q8JYOLQ3oAbWJxcjn5HAbPeAcDb-5JoVA,10792
torchvision/models/detection/faster_rcnn.py,sha256=CqOFL8d206qgEvlmJrgjvcQXMZmR5u4Eg_AomEzlYWw,37576
torchvision/models/detection/fcos.py,sha256=8ffrmOs2WZZFlUCzoOV5NiGkDZ7K24CuJozvX0bhNWg,34761
torchvision/models/detection/generalized_rcnn.py,sha256=nLVj2o8yr6BW1MN12u30QuFbvtsdllEbUlbNH-dO-G0,4861
torchvision/models/detection/image_list.py,sha256=IzFjxIaMdyFas1IHPBgAuBK3iYJOert5HzGurYJitNk,808
torchvision/models/detection/keypoint_rcnn.py,sha256=XCZw2v0ilcYYkHt62pIO9SUUh03B8UFCFTeLcMsXf-U,22198
torchvision/models/detection/mask_rcnn.py,sha256=O1-jvyefS-aU6JGsUB2lgzIBE8Tht-RsApabHDjvyQ0,27054
torchvision/models/detection/retinanet.py,sha256=Erd9q38DhUGCc7NLJxKGSBUafgEUBe1fA5zaEECwm2g,37954
torchvision/models/detection/roi_heads.py,sha256=f_Lde69JHugGAaiGWh6ugJ9WUTT8f7InxPry_MZxgZY,34698
torchvision/models/detection/rpn.py,sha256=z4ezvg1XS9uNq_3jIYXzIRnsAhuGClZ5AJ5P0NDziN8,16226
torchvision/models/detection/ssd.py,sha256=QyWW7IihpNG4_SWeVo1-X5J9kDJrasd794-AHIcDxQY,29661
torchvision/models/detection/ssdlite.py,sha256=DRkN07LP7WTKt6Q5WIOraQR37Unx7XIiB70UdhmDCcE,13550
torchvision/models/detection/transform.py,sha256=gou0mGGPzLncV2ZMHkTTkw6UO0fUoJii6Dr2N0PNA_Y,12508
torchvision/models/efficientnet.py,sha256=tW6BpsBProhN6b1o1_sHSikSoqDupQRgdgRETiYjcAY,44221
torchvision/models/feature_extraction.py,sha256=uTBD0Obc42BSm0YBU7VcSKJR7HDeY6Da5nOiuGBZsV8,26140
torchvision/models/googlenet.py,sha256=AtXckNXKcmWhDyoozVsvb3VPI-odl2tptiYz7KXU3wA,13151
torchvision/models/inception.py,sha256=l9tutwO7KNVa5nfdl1_5f-6lJzQ-NSOCzXPArDILeAA,19329
torchvision/models/maxvit.py,sha256=m8Pfh7MYcYANxwxiAaU9pVmzeWmOB2jTpLSiuUjsHZI,32886
torchvision/models/mnasnet.py,sha256=PTSF4DchbFgtOchd9kgoPKCcfKH6NC4WfHa5bv4jZ58,18008
torchvision/models/mobilenet.py,sha256=alrEJwktmXVcCphU8h2EAJZX0YdKfcz4tJEOdG2BXB8,217
torchvision/models/mobilenetv2.py,sha256=eVil23yP4f-DBUhjKt73mao84zEiF5Y01wHNwh8HCdM,9970
torchvision/models/mobilenetv3.py,sha256=g1Ul1RkohPHhXfsecg2r0W7NVlEgVUc7X_uT1bxxrTQ,16702
torchvision/models/optical_flow/__init__.py,sha256=uuRFAdvcDobdAbY2VmxEZ7_CLH_f5-JRkCSuJRkj4RY,21
torchvision/models/optical_flow/__pycache__/__init__.cpython-311.pyc,,
torchvision/models/optical_flow/__pycache__/_utils.cpython-311.pyc,,
torchvision/models/optical_flow/__pycache__/raft.cpython-311.pyc,,
torchvision/models/optical_flow/_utils.py,sha256=PRcuU-IB6EL3hAOLiyC5q-NBzlvIKfhSF_BMplHbzfY,2125
torchvision/models/optical_flow/raft.py,sha256=o9wJ3jZH9EWwJl7fQYeWSdeXPMKYOZ0Zwm-hhcequVk,40942
torchvision/models/quantization/__init__.py,sha256=YOJmYqWQTfP5P2ypteZNKQOMW4VEB2WHJlYoSlSaL1Y,130
torchvision/models/quantization/__pycache__/__init__.cpython-311.pyc,,
torchvision/models/quantization/__pycache__/googlenet.cpython-311.pyc,,
torchvision/models/quantization/__pycache__/inception.cpython-311.pyc,,
torchvision/models/quantization/__pycache__/mobilenet.cpython-311.pyc,,
torchvision/models/quantization/__pycache__/mobilenetv2.cpython-311.pyc,,
torchvision/models/quantization/__pycache__/mobilenetv3.cpython-311.pyc,,
torchvision/models/quantization/__pycache__/resnet.cpython-311.pyc,,
torchvision/models/quantization/__pycache__/shufflenetv2.cpython-311.pyc,,
torchvision/models/quantization/__pycache__/utils.cpython-311.pyc,,
torchvision/models/quantization/googlenet.py,sha256=P92cacoKVTV4cDoaNkRevLx1daGH5DkPfUwPDMuOXO0,8290
torchvision/models/quantization/inception.py,sha256=TXz2hRpSvh6zYP398MsXTYQMnqYgnYnq5wyG6xr5Nhk,11088
torchvision/models/quantization/mobilenet.py,sha256=alrEJwktmXVcCphU8h2EAJZX0YdKfcz4tJEOdG2BXB8,217
torchvision/models/quantization/mobilenetv2.py,sha256=g2z3HPQ0MXFCuMV5TpLPRdO99m3wC_3d0ukUanXaJHo,6037
torchvision/models/quantization/mobilenetv3.py,sha256=l9g1AwKiU35XKuobXo-UPe5MqRKC1kgN7xgWWim4qr4,9467
torchvision/models/quantization/resnet.py,sha256=azvn1vwebP22qryYGNgFLMviGjHklXOX7xd4C2cggUo,18423
torchvision/models/quantization/shufflenetv2.py,sha256=7k9MLRLzjP3vke-e0Ai_cnA-j15KGQq5yDcs_ELXUg8,17311
torchvision/models/quantization/utils.py,sha256=Ij88l6toyO8MQi1w512Jt-yQ2Q9hK75-Z2SOjIzS6Zw,2109
torchvision/models/regnet.py,sha256=NbsA3RO7ka7k9fwYyVZ5wvvtJUuhXqyX76aGIoIujGE,65124
torchvision/models/resnet.py,sha256=AXWWl7XlkSQpUCsv8lCaWeDuYaq-KyOLXmBe0R8rv58,39917
torchvision/models/segmentation/__init__.py,sha256=TLL2SSmqE08HLiv_yyIWyIyrvf2xaOsZi0muDv_Y5Vc,69
torchvision/models/segmentation/__pycache__/__init__.cpython-311.pyc,,
torchvision/models/segmentation/__pycache__/_utils.cpython-311.pyc,,
torchvision/models/segmentation/__pycache__/deeplabv3.cpython-311.pyc,,
torchvision/models/segmentation/__pycache__/fcn.cpython-311.pyc,,
torchvision/models/segmentation/__pycache__/lraspp.cpython-311.pyc,,
torchvision/models/segmentation/_utils.py,sha256=yFeyBa5_Pyv1UQ_2N64XMRgTYsxifwzDd-VRP-vmIGM,1234
torchvision/models/segmentation/deeplabv3.py,sha256=MkmYEm1dF4afQEYXmFfxLAcqioiT5uWKYGSXCccIYh4,15405
torchvision/models/segmentation/fcn.py,sha256=mQ1Wi4S9j5G6OQbNciuxNwVbJ6e9miTzIWj6mUF5JwA,9205
torchvision/models/segmentation/lraspp.py,sha256=yx_b3PJsH5e0F3TqiGDhEnbXCGTdNX2iIxsKvNenM0s,7821
torchvision/models/shufflenetv2.py,sha256=VEGsTNNTdqdu8m7I62zQuJK_5CkET-0Y4ixYBJ-QBCs,15852
torchvision/models/squeezenet.py,sha256=Dha-ci350KU15D0LS9N07kw6MlNuusUHSBnC83Ery_E,8986
torchvision/models/swin_transformer.py,sha256=-Q9Kd1qzsD7vL3u137Q4MoHSTwzA6QFcweaF0zCWmUk,40370
torchvision/models/vgg.py,sha256=Qt9r5sFoY-oPdwP4De61jqSVe9XUZLXK47r9yVDQ33M,19736
torchvision/models/video/__init__.py,sha256=xHHR5c6kP0cMDXck7XnXq19iJL_Uemcxg3OC00cqE6A,97
torchvision/models/video/__pycache__/__init__.cpython-311.pyc,,
torchvision/models/video/__pycache__/mvit.cpython-311.pyc,,
torchvision/models/video/__pycache__/resnet.cpython-311.pyc,,
torchvision/models/video/__pycache__/s3d.cpython-311.pyc,,
torchvision/models/video/__pycache__/swin_transformer.cpython-311.pyc,,
torchvision/models/video/mvit.py,sha256=xIK4nCOuJWXQjoX8NzcouwzyTkIkcFug3yiu0a5-Dk8,33895
torchvision/models/video/resnet.py,sha256=JOP7FDfUOfQQP-jEYUvzSIOr9Iaexl2L3iUh-rzrp90,17274
torchvision/models/video/s3d.py,sha256=Rn-iypP13jrETAap1Qd4NY6kkpYDuSXjGkEKZDOxemI,8034
torchvision/models/video/swin_transformer.py,sha256=M74P2v4lVKM6zgwoeFn_njppZB2l-gAjuGVvzzESKpU,28431
torchvision/models/vision_transformer.py,sha256=GE-_-dlFJQPTnONe4qrzYOYp-wavPOrFPCo9krM39Vg,33000
torchvision/ops/__init__.py,sha256=7wibGxcF1JHDviSNs9O9Pwlc8dhMSFfZo0wzVjTFnAY,2001
torchvision/ops/__pycache__/__init__.cpython-311.pyc,,
torchvision/ops/__pycache__/_box_convert.cpython-311.pyc,,
torchvision/ops/__pycache__/_register_onnx_ops.cpython-311.pyc,,
torchvision/ops/__pycache__/_utils.cpython-311.pyc,,
torchvision/ops/__pycache__/boxes.cpython-311.pyc,,
torchvision/ops/__pycache__/ciou_loss.cpython-311.pyc,,
torchvision/ops/__pycache__/deform_conv.cpython-311.pyc,,
torchvision/ops/__pycache__/diou_loss.cpython-311.pyc,,
torchvision/ops/__pycache__/drop_block.cpython-311.pyc,,
torchvision/ops/__pycache__/feature_pyramid_network.cpython-311.pyc,,
torchvision/ops/__pycache__/focal_loss.cpython-311.pyc,,
torchvision/ops/__pycache__/giou_loss.cpython-311.pyc,,
torchvision/ops/__pycache__/misc.cpython-311.pyc,,
torchvision/ops/__pycache__/poolers.cpython-311.pyc,,
torchvision/ops/__pycache__/ps_roi_align.cpython-311.pyc,,
torchvision/ops/__pycache__/ps_roi_pool.cpython-311.pyc,,
torchvision/ops/__pycache__/roi_align.cpython-311.pyc,,
torchvision/ops/__pycache__/roi_pool.cpython-311.pyc,,
torchvision/ops/__pycache__/stochastic_depth.cpython-311.pyc,,
torchvision/ops/_box_convert.py,sha256=glF6sulLzaw_KG36wg0CHWt0ef62BnkjokbqQnBUMsU,2490
torchvision/ops/_register_onnx_ops.py,sha256=g4M5Fp7n_5ZTzIQcUXvEct3YFlUMPNVSQQfBP-J0eQQ,4288
torchvision/ops/_utils.py,sha256=xFrLnLhKDHiG2TN39tUWY-MJbLEPka6dkaVVJFAN7-8,3736
torchvision/ops/boxes.py,sha256=LvrW5Pj4K5TRFApj70zUx9xuKZ0SJixgvAcPZk0bwUA,16796
torchvision/ops/ciou_loss.py,sha256=Qzm89C82ehX-YvYBPLPRPhbJZdr3itizxuxrT7MLi9o,2834
torchvision/ops/deform_conv.py,sha256=DuIosFDK3tsY5RlHU7mez5x1p08IQai9WG14z3S0gzU,7185
torchvision/ops/diou_loss.py,sha256=6IebWlMYc_2YnbG36niDXgM16vxajSKRfiusEuUJwpQ,3456
torchvision/ops/drop_block.py,sha256=ZkIzM1b3v5_U7z0eavzaNpN7IBq0N4ZNwwvWArispwg,6010
torchvision/ops/feature_pyramid_network.py,sha256=Jts5mzUJX3EarcAQU5MDUe0a5Sgn5YjUstaW2JQpgEE,8952
torchvision/ops/focal_loss.py,sha256=lS5FqgLFuDKlpm0sk5V1VgIA6LFAdJUXQaPi35nEDoU,2319
torchvision/ops/giou_loss.py,sha256=xB_RlES28k_A6iH2VqWAPBQkiT_zkEwdtREDGR_nVJM,2772
torchvision/ops/misc.py,sha256=niQnKPuifQzVXAWnnf6TkVsgetqyetjZ6lq3wi2tsZw,13892
torchvision/ops/poolers.py,sha256=sfgcZWh2dIo9UY437CnpAHdxqPQhuvjNPYzhIKlAIPE,12247
torchvision/ops/ps_roi_align.py,sha256=6_kmnE6z_3AZZ1N2hrS_uK3cbuzqZhjdM2rC50mfYUo,3715
torchvision/ops/ps_roi_pool.py,sha256=2JrjJwzVtEeEg0BebkCnGfq4xEDwMCD-Xh915mvNcyI,2940
torchvision/ops/roi_align.py,sha256=xCwgOCqGfn1WYnJ9krI_ch9_i332gw8YTub1QwO5b84,11030
torchvision/ops/roi_pool.py,sha256=cN7rSCQfpUzETvP8SjPDl1yfXjbxg9I-tXnHbvAKya8,3015
torchvision/ops/stochastic_depth.py,sha256=9T4Zu_BaemKZafSmRwrPCVr5aaGH8tmzlsQAZO-1_-Y,2302
torchvision/transforms/__init__.py,sha256=WCNXTJUbJ1h7YaN9UfrBSvt--ST2PAV4sLICbTS-L5A,55
torchvision/transforms/__pycache__/__init__.cpython-311.pyc,,
torchvision/transforms/__pycache__/_functional_pil.cpython-311.pyc,,
torchvision/transforms/__pycache__/_functional_tensor.cpython-311.pyc,,
torchvision/transforms/__pycache__/_functional_video.cpython-311.pyc,,
torchvision/transforms/__pycache__/_presets.cpython-311.pyc,,
torchvision/transforms/__pycache__/_transforms_video.cpython-311.pyc,,
torchvision/transforms/__pycache__/autoaugment.cpython-311.pyc,,
torchvision/transforms/__pycache__/functional.cpython-311.pyc,,
torchvision/transforms/__pycache__/transforms.cpython-311.pyc,,
torchvision/transforms/_functional_pil.py,sha256=UEiaElYLuYXkNR__O_dbKts2BKBsb28Rj50RMFgRxig,12505
torchvision/transforms/_functional_tensor.py,sha256=hpLy9xCwONubxoGfzXiqNs0nEhgVaDKRuMcqAxSi090,34794
torchvision/transforms/_functional_video.py,sha256=c4BbUi3Y2LvskozFdy619piLBd5acsjxgogYAXmY5P8,3971
torchvision/transforms/_presets.py,sha256=UVxchNgdPL-4iVHjOxcHndygktw4K76hhxEK8ks1zlw,8700
torchvision/transforms/_transforms_video.py,sha256=ub2gCT5ELiK918Bq-Pp6mzhWrAZxlj7blfpkA8Dhb1o,5124
torchvision/transforms/autoaugment.py,sha256=UD8UBlT4dWCIQaNDUDQBtc0osMHHPQluLr7seZJr4cY,28858
torchvision/transforms/functional.py,sha256=NioCgkCAceO1Aq4_ngYHE3yUVZ6eDsh66D-veM6gHU0,68953
torchvision/transforms/transforms.py,sha256=NeFpo86xWf2h8vMz_vXpbqV9PzRoROEp6GlNxw9oTZQ,87710
torchvision/transforms/v2/__init__.py,sha256=AfbkHi6yQoEhrbfNymH2z8sZpqznEcwZZEPWQNFDdOM,1492
torchvision/transforms/v2/__pycache__/__init__.cpython-311.pyc,,
torchvision/transforms/v2/__pycache__/_augment.cpython-311.pyc,,
torchvision/transforms/v2/__pycache__/_auto_augment.cpython-311.pyc,,
torchvision/transforms/v2/__pycache__/_color.cpython-311.pyc,,
torchvision/transforms/v2/__pycache__/_container.cpython-311.pyc,,
torchvision/transforms/v2/__pycache__/_deprecated.cpython-311.pyc,,
torchvision/transforms/v2/__pycache__/_geometry.cpython-311.pyc,,
torchvision/transforms/v2/__pycache__/_meta.cpython-311.pyc,,
torchvision/transforms/v2/__pycache__/_misc.cpython-311.pyc,,
torchvision/transforms/v2/__pycache__/_temporal.cpython-311.pyc,,
torchvision/transforms/v2/__pycache__/_transform.cpython-311.pyc,,
torchvision/transforms/v2/__pycache__/_type_conversion.cpython-311.pyc,,
torchvision/transforms/v2/__pycache__/_utils.cpython-311.pyc,,
torchvision/transforms/v2/_augment.py,sha256=LWrgFw9wXXNsYXoD2xxjWB-_iAJjMXrtZDBv2M1amHQ,15625
torchvision/transforms/v2/_auto_augment.py,sha256=-pM1nhChaYU61KK4Bng8URo4m0pVYDvpWsDAFiQ7MUg,32407
torchvision/transforms/v2/_color.py,sha256=pFsVgt3o59y8p3_GweVNgelR25cWnvXAtTSyIvrZAsE,17366
torchvision/transforms/v2/_container.py,sha256=E-8TvTF_qBqC6aLnlK5mnp3V27oQOxhy1X7PZfQAD5w,6229
torchvision/transforms/v2/_deprecated.py,sha256=9oSk7wHbYIajAizg37oOGFcWC5GICwEuSHGAOTGfGkE,1997
torchvision/transforms/v2/_geometry.py,sha256=JZnWTJSiZwzTeI9EH84u1642xpVwnXmJk3QZjb5avW0,68331
torchvision/transforms/v2/_meta.py,sha256=I_5TP_yGo_vHpx7xKDYMIrlzuiusCjcXRX6zR8WzEUU,1441
torchvision/transforms/v2/_misc.py,sha256=YDCO7d96gdqdO4t3MX3w3ZG5AMmMS1NrL5rFmWy1n8g,18076
torchvision/transforms/v2/_temporal.py,sha256=FmFtwnluzRzJOX6Q-c2hxePQW0SHAp67ctLsZLWt8FM,932
torchvision/transforms/v2/_transform.py,sha256=i3KWfXcFjioL3FV5ix1XetK1cLHyw3hutIWwd_wNn9w,8652
torchvision/transforms/v2/_type_conversion.py,sha256=zWwYZSLm-7Mk_0TrSoJDHX9Kel-fyy36wGlYYfaEvuw,2944
torchvision/transforms/v2/_utils.py,sha256=CisntDy7j0GuorgmUAV80oS7A1D8zxzFD3AGFrpytCQ,8872
torchvision/transforms/v2/functional/__init__.py,sha256=BV6Lqai0jNzMjv5TDeSKev2AgpwO79QuPK1m36majRo,3624
torchvision/transforms/v2/functional/__pycache__/__init__.cpython-311.pyc,,
torchvision/transforms/v2/functional/__pycache__/_augment.cpython-311.pyc,,
torchvision/transforms/v2/functional/__pycache__/_color.cpython-311.pyc,,
torchvision/transforms/v2/functional/__pycache__/_deprecated.cpython-311.pyc,,
torchvision/transforms/v2/functional/__pycache__/_geometry.cpython-311.pyc,,
torchvision/transforms/v2/functional/__pycache__/_meta.cpython-311.pyc,,
torchvision/transforms/v2/functional/__pycache__/_misc.cpython-311.pyc,,
torchvision/transforms/v2/functional/__pycache__/_temporal.cpython-311.pyc,,
torchvision/transforms/v2/functional/__pycache__/_type_conversion.cpython-311.pyc,,
torchvision/transforms/v2/functional/__pycache__/_utils.cpython-311.pyc,,
torchvision/transforms/v2/functional/_augment.py,sha256=rwliCn3Z9CMwpY7xc0sh-5tD-w9YU9aFPok3e7DAc54,3295
torchvision/transforms/v2/functional/_color.py,sha256=R8dminUMbhugepxr0nTzKREvfsitWTqU68fGEW8Ui9c,30990
torchvision/transforms/v2/functional/_deprecated.py,sha256=-X7agTXp-JnbFpsp3xoVk1eZr7OyT-evBt0nlULAR40,825
torchvision/transforms/v2/functional/_geometry.py,sha256=rUNxmCoLNkW4H-g2tHkJVHeYGrgqkeY5m79XeCaENI8,89743
torchvision/transforms/v2/functional/_meta.py,sha256=b_MF4SQrmZNVpcN8X-8edP-CEmQ241MKDWpQTG4pmbI,10826
torchvision/transforms/v2/functional/_misc.py,sha256=BgMN67r7JRqNWDH1KQiioDTCaWMMMb8r854eLRrjHC8,15657
torchvision/transforms/v2/functional/_temporal.py,sha256=tSRkkqOqUQ0QXjENF82F16El1-J0IDoFKIH-ss_cpC4,1163
torchvision/transforms/v2/functional/_type_conversion.py,sha256=oYf4LMgiClvEZwwc3WbKI7fJ-rRFhDrVSBKiPA5vxio,896
torchvision/transforms/v2/functional/_utils.py,sha256=3T5iFgq8whHQbk7duizYmoxspH6mtkP-L7ku627zfBY,5620
torchvision/tv_tensors/__init__.py,sha256=7UBIZbraVyhIO-ZCeKtD59if85XKkAQ4njjS6RjEfDg,1544
torchvision/tv_tensors/__pycache__/__init__.cpython-311.pyc,,
torchvision/tv_tensors/__pycache__/_bounding_boxes.cpython-311.pyc,,
torchvision/tv_tensors/__pycache__/_dataset_wrapper.cpython-311.pyc,,
torchvision/tv_tensors/__pycache__/_image.cpython-311.pyc,,
torchvision/tv_tensors/__pycache__/_mask.cpython-311.pyc,,
torchvision/tv_tensors/__pycache__/_torch_function_helpers.cpython-311.pyc,,
torchvision/tv_tensors/__pycache__/_tv_tensor.cpython-311.pyc,,
torchvision/tv_tensors/__pycache__/_video.cpython-311.pyc,,
torchvision/tv_tensors/_bounding_boxes.py,sha256=iRykoOkjSkE7AUDpIFNAkTwF14kZUWR4D7w7dfe6RzE,4574
torchvision/tv_tensors/_dataset_wrapper.py,sha256=3S5X_xgzfxb4Bd4A05s3TwpWoDT5iNfUKXSz7FNOzY8,24878
torchvision/tv_tensors/_image.py,sha256=r66_5vfWVIFxnTU3BJ74Rsuv9VooF1sVMO2yZgQYDsc,1957
torchvision/tv_tensors/_mask.py,sha256=x9kqe6ik8EvEjKY45UtuX_CRGXsh5-NNVpoStCFLJbc,1490
torchvision/tv_tensors/_torch_function_helpers.py,sha256=U7r-QG2jKV_KYeUQJ2hKPlYRLwqz_xhEs7_L1oXpBvM,2348
torchvision/tv_tensors/_tv_tensor.py,sha256=v-dVm-ZZs4fdT6TVcAeX7KnAJpudVUf55VqdtQtgthE,6380
torchvision/tv_tensors/_video.py,sha256=zYbXjwFnzsxwV8RGt2BCw_AWxSDPASYDVthSi4YMiTo,1420
torchvision/utils.py,sha256=eZLOxrFdrw-zQkBfgyNQhuUkfygfRFBUp2LM348qDpE,27075
torchvision/version.py,sha256=aR1DGq4vEWYD2HnPBEk0N7pRxrAJyUDdX5s5up4963g,206
torchvision/zlib.dll,sha256=jb_W73N0qDEVi93Mt549VmXpYlyBr1V_FbQVC3h39oc,99608