From 307192015de85aa8c6b0a53b0348f36db5c858f5 Mon Sep 17 00:00:00 2001 From: zhangzhongyu Date: Mon, 28 Nov 2022 11:53:28 +0800 Subject: [PATCH 1/3] 1.revise the configs of wrn22, wrn24, and wrn40. 2.revise the data_preprocessor of ofd_backbone_resnet50_resnet18_8xb16_cifar10 --- .../ofd/ofd_backbone_resnet50_resnet18_8xb16_cifar10.py | 6 +++--- configs/vanilla/mmcls/wide-resnet/wrn22-w4_b16x8_cifar10.py | 2 +- configs/vanilla/mmcls/wide-resnet/wrn28-w4_b16x8_cifar10.py | 2 +- configs/vanilla/mmcls/wide-resnet/wrn40-w2_b16x8_cifar10.py | 2 +- 4 files changed, 6 insertions(+), 6 deletions(-) diff --git a/configs/distill/mmcls/ofd/ofd_backbone_resnet50_resnet18_8xb16_cifar10.py b/configs/distill/mmcls/ofd/ofd_backbone_resnet50_resnet18_8xb16_cifar10.py index 447c14b8f..7b4e607a1 100644 --- a/configs/distill/mmcls/ofd/ofd_backbone_resnet50_resnet18_8xb16_cifar10.py +++ b/configs/distill/mmcls/ofd/ofd_backbone_resnet50_resnet18_8xb16_cifar10.py @@ -10,10 +10,10 @@ data_preprocessor=dict( type='ImgDataPreprocessor', # RGB format normalization parameters - mean=[123.675, 116.28, 103.53], - std=[58.395, 57.12, 57.375], + mean=[125.307, 122.961, 113.8575], + std=[51.5865, 50.847, 51.255], # convert image from BGR to RGB - bgr_to_rgb=True), + bgr_to_rgb=False), architecture=dict( cfg_path= # noqa: E251 'mmrazor::vanilla/mmcls/wide-resnet/wrn16-w2_b16x8_cifar10.py', diff --git a/configs/vanilla/mmcls/wide-resnet/wrn22-w4_b16x8_cifar10.py b/configs/vanilla/mmcls/wide-resnet/wrn22-w4_b16x8_cifar10.py index 155b7550b..091fa3003 100644 --- a/configs/vanilla/mmcls/wide-resnet/wrn22-w4_b16x8_cifar10.py +++ b/configs/vanilla/mmcls/wide-resnet/wrn22-w4_b16x8_cifar10.py @@ -1,3 +1,3 @@ -_base_ = ['wrn16_2_b16x8_cifar10.py'] +_base_ = ['wrn16_w2_b16x8_cifar10.py'] model = dict( backbone=dict(depth=22, widen_factor=4), head=dict(in_channels=256, )) diff --git a/configs/vanilla/mmcls/wide-resnet/wrn28-w4_b16x8_cifar10.py b/configs/vanilla/mmcls/wide-resnet/wrn28-w4_b16x8_cifar10.py index b05daac19..72dede29f 100644 --- a/configs/vanilla/mmcls/wide-resnet/wrn28-w4_b16x8_cifar10.py +++ b/configs/vanilla/mmcls/wide-resnet/wrn28-w4_b16x8_cifar10.py @@ -1,3 +1,3 @@ -_base_ = ['wrn16_2_b16x8_cifar10.py'] +_base_ = ['wrn16_w2_b16x8_cifar10.py'] model = dict( backbone=dict(depth=28, widen_factor=4), head=dict(in_channels=256, )) diff --git a/configs/vanilla/mmcls/wide-resnet/wrn40-w2_b16x8_cifar10.py b/configs/vanilla/mmcls/wide-resnet/wrn40-w2_b16x8_cifar10.py index f4c71ce12..ba25d6b81 100644 --- a/configs/vanilla/mmcls/wide-resnet/wrn40-w2_b16x8_cifar10.py +++ b/configs/vanilla/mmcls/wide-resnet/wrn40-w2_b16x8_cifar10.py @@ -1,3 +1,3 @@ -_base_ = ['wrn16_2_b16x8_cifar10.py'] +_base_ = ['wrn16_w2_b16x8_cifar10.py'] model = dict( backbone=dict(depth=40, widen_factor=2), head=dict(in_channels=128, )) From 40fd7347c3e527a4bf68893066ddaaddd888447f Mon Sep 17 00:00:00 2001 From: zhangzhongyu Date: Tue, 29 Nov 2022 20:07:21 +0800 Subject: [PATCH 2/3] 1.Add README for vanilla-wrm. --- configs/vanilla/mmcls/wide-resnet/README.md | 35 +++++++++++++++++++++ 1 file changed, 35 insertions(+) create mode 100644 configs/vanilla/mmcls/wide-resnet/README.md diff --git a/configs/vanilla/mmcls/wide-resnet/README.md b/configs/vanilla/mmcls/wide-resnet/README.md new file mode 100644 index 000000000..6b703812b --- /dev/null +++ b/configs/vanilla/mmcls/wide-resnet/README.md @@ -0,0 +1,35 @@ +# Wide-ResNet + +> [Wide Residual Networks](https://arxiv.org/abs/1605.07146) + + + +## Abstract + +Deep residual networks were shown to be able to scale up to thousands of layers and still have improving performance. However, each fraction of a percent of improved accuracy costs nearly doubling the number of layers, and so training very deep residual networks has a problem of diminishing feature reuse, which makes these networks very slow to train. To tackle these problems, in this paper we conduct a detailed experimental study on the architecture of ResNet blocks, based on which we propose a novel architecture where we decrease depth and increase width of residual networks. We call the resulting network structures wide residual networks (WRNs) and show that these are far superior over their commonly used thin and very deep counterparts. For example, we demonstrate that even a simple 16-layer-deep wide residual network outperforms in accuracy and efficiency all previous deep residual networks, including thousand-layer-deep networks, achieving new state-of-the-art results on CIFAR, SVHN, COCO, and significant improvements on ImageNet. + +
+ +
+ +## Results and models + +### Cifar10 + +| Model | Top-1 (%) | Config | Download | +| :-------------: | :-------: | :------------------------------------------: | :--------------------------------------------------------------------------------------------------------: | +| WRN-16 | 93.04 | [config](./wrn16-w2_b16x8_cifar10.py) | [model](https://openmmlab-share.oss-cn-hangzhou.aliyuncs.com/mmrazor/v1/wide_resnet/wrn16_2_b16x8_cifar10_20220831_204709-446b466e.pth) \| [log](https://openmmlab-share.oss-cn-hangzhou.aliyuncs.com/mmrazor/v1/wide_resnet/wrn16_2_b16x8_cifar10_20220831_204709-446b466e.json) | +| WRN-22 | | [config](./wrn22-w4_b16x8_cifar10.py) | [model]() \| [log]() | +| WRN-28 | 95.41 | [config](./wrn28-w4_b16x8_cifar10.py) | [model](https://openmmlab-share.oss-cn-hangzhou.aliyuncs.com/mmrazor/v1/wide_resnet/wrn28_4_b16x8_cifar10_20220831_173536-d6f8725c.pth) \| [log](https://openmmlab-share.oss-cn-hangzhou.aliyuncs.com/mmrazor/v1/wide_resnet/wrn28_4_b16x8_cifar10_20220831_173536-d6f8725c.json) | +| WRN-40 | | [config](./wrn40-w2_b16x8_cifar10.py) | [model]() \| [log]() | + + +## Citation + +```bibtex +@INPROCEEDINGS{Zagoruyko2016WRN, + author = {Sergey Zagoruyko and Nikos Komodakis}, + title = {Wide Residual Networks}, + booktitle = {BMVC}, + year = {2016}} +``` From e1006f10cfcef3157b5dd093e6d9241e69722e55 Mon Sep 17 00:00:00 2001 From: zhangzhongyu Date: Wed, 30 Nov 2022 10:50:14 +0800 Subject: [PATCH 3/3] 1.Revise readme of wrn --- configs/vanilla/mmcls/wide-resnet/README.md | 9 ++++----- 1 file changed, 4 insertions(+), 5 deletions(-) diff --git a/configs/vanilla/mmcls/wide-resnet/README.md b/configs/vanilla/mmcls/wide-resnet/README.md index 6b703812b..5cc8c4258 100644 --- a/configs/vanilla/mmcls/wide-resnet/README.md +++ b/configs/vanilla/mmcls/wide-resnet/README.md @@ -16,13 +16,12 @@ Deep residual networks were shown to be able to scale up to thousands of layers ### Cifar10 -| Model | Top-1 (%) | Config | Download | -| :-------------: | :-------: | :------------------------------------------: | :--------------------------------------------------------------------------------------------------------: | +| Model | Top-1 (%) | Config | Download | +| :----: | :-------: | :-----------------------------------: | :-------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------: | | WRN-16 | 93.04 | [config](./wrn16-w2_b16x8_cifar10.py) | [model](https://openmmlab-share.oss-cn-hangzhou.aliyuncs.com/mmrazor/v1/wide_resnet/wrn16_2_b16x8_cifar10_20220831_204709-446b466e.pth) \| [log](https://openmmlab-share.oss-cn-hangzhou.aliyuncs.com/mmrazor/v1/wide_resnet/wrn16_2_b16x8_cifar10_20220831_204709-446b466e.json) | -| WRN-22 | | [config](./wrn22-w4_b16x8_cifar10.py) | [model]() \| [log]() | +| WRN-22 | | [config](./wrn22-w4_b16x8_cifar10.py) | [model](<>) \| [log](<>) | | WRN-28 | 95.41 | [config](./wrn28-w4_b16x8_cifar10.py) | [model](https://openmmlab-share.oss-cn-hangzhou.aliyuncs.com/mmrazor/v1/wide_resnet/wrn28_4_b16x8_cifar10_20220831_173536-d6f8725c.pth) \| [log](https://openmmlab-share.oss-cn-hangzhou.aliyuncs.com/mmrazor/v1/wide_resnet/wrn28_4_b16x8_cifar10_20220831_173536-d6f8725c.json) | -| WRN-40 | | [config](./wrn40-w2_b16x8_cifar10.py) | [model]() \| [log]() | - +| WRN-40 | | [config](./wrn40-w2_b16x8_cifar10.py) | [model](<>) \| [log](<>) | ## Citation