From 422a7277e37136d8eb24d46d5f574fc74eb298c6 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=E5=88=98=E7=90=A6?= Date: Thu, 12 Jan 2023 20:15:33 +0800 Subject: [PATCH] add lad to distill configs --- configs/distill/mmdet/lad/README.md | 45 +++++++++++++++++++ .../mmdet/lad/lad_ppa_r101_ppa_r50_1x_coco.py | 30 +++++++++++++ 2 files changed, 75 insertions(+) create mode 100644 configs/distill/mmdet/lad/README.md create mode 100644 configs/distill/mmdet/lad/lad_ppa_r101_ppa_r50_1x_coco.py diff --git a/configs/distill/mmdet/lad/README.md b/configs/distill/mmdet/lad/README.md new file mode 100644 index 000000000..78252a108 --- /dev/null +++ b/configs/distill/mmdet/lad/README.md @@ -0,0 +1,45 @@ +# LAD + +> [Improving Object Detection by Label Assignment Distillation](https://arxiv.org/abs/2108.10520) + + + +## Abstract + +Label assignment in object detection aims to assign targets, foreground or background, to sampled regions in an image. Unlike labeling for image classification, this problem is not well defined due to the object's bounding box. In this paper, we investigate the problem from a perspective of distillation, hence we call Label Assignment Distillation (LAD). Our initial motivation is very simple, we use a teacher network to generate labels for the student. This can be achieved in two ways: either using the teacher's prediction as the direct targets (soft label), or through the hard labels dynamically assigned by the teacher (LAD). Our experiments reveal that: (i) LAD is more effective than soft-label, but they are complementary. (ii) Using LAD, a smaller teacher can also improve a larger student significantly, while soft-label can't. We then introduce Co-learning LAD, in which two networks simultaneously learn from scratch and the role of teacher and student are dynamically interchanged. Using PAA-ResNet50 as a teacher, our LAD techniques can improve detectors PAA-ResNet101 and PAA-ResNeXt101 to 46AP and 47.5AP on the COCO test-dev set. With a stronger teacher PAA-SwinB, we improve the students PAA-ResNet50 to 43.7AP by only 1x schedule training and standard setting, and PAA-ResNet101 to 47.9AP, significantly surpassing the current methods. + +
+ +
+ +## Results and Models + +We provide config files to reproduce the object detection results in the +WACV 2022 paper for Improving Object Detection by Label Assignment +Distillation. + +### PAA with LAD + +| Teacher | Student | Training schedule | AP (val) | Config | Download | +| :-----: | :-----: | :---------------: | :------: | :----------: | :----------------------: | +| -- | R-50 | 1x | 40.4 | [config](<>) | [model](<>) \| [log](<>) | +| -- | R-101 | 1x | 42.6 | [config](<>) | [model](<>) \| [log](<>) | +| R-101 | R-50 | 1x | 41.4 | [config](<>) | [model](<>) \| [log](<>) | +| R-50 | R-101 | 1x | 43.2 | [config](<>) | [model](<>) \| [log](<>) | + +## Note + +- Meaning of Config name: lad_r50(student model)\_paa(based on paa)\_r101(teacher model)\_fpn(neck)\_coco(dataset)\_1x(12 epoch).py +- Results may fluctuate by about 0.2 mAP. +- 2 GPUs are used, 8 samples per GPU. + +## Citation + +```latex +@inproceedings{nguyen2021improving, + title={Improving Object Detection by Label Assignment Distillation}, + author={Chuong H. Nguyen and Thuy C. Nguyen and Tuan N. Tang and Nam L. H. Phan}, + booktitle = {WACV}, + year={2022} +} +``` diff --git a/configs/distill/mmdet/lad/lad_ppa_r101_ppa_r50_1x_coco.py b/configs/distill/mmdet/lad/lad_ppa_r101_ppa_r50_1x_coco.py new file mode 100644 index 000000000..749828b1a --- /dev/null +++ b/configs/distill/mmdet/lad/lad_ppa_r101_ppa_r50_1x_coco.py @@ -0,0 +1,30 @@ +_base_ = ['mmdet::paa/paa_r101_fpn_1x_coco.py'] + +teacher_ckpt = 'http://download.openmmlab.com/mmdetection/v2.0/paa/paa_r101_fpn_1x_coco/paa_r101_fpn_1x_coco_20200821-0a1825a4.pth' # noqa: E501 + +student = _base_.model + +model = dict( + _delete_=True, + _scope_='mmrazor', + type='SingleTeacherDistill', + architecture=student, + teacher=dict( + cfg_path='mmdet::paa/paa_r101_fpn_1x_coco.py', pretrained=False), + teacher_ckpt=teacher_ckpt, + distiller=dict( + type='ConfigurableDistiller', + distill_deliveries=dict( + assign=dict( + type='MethodOutputs', + max_keep_data=10000, + method_path='mmdet.models.PAAHead.get_targets'), + reassign=dict( + type='MethodOutputs', + max_keep_data=10000, + method_path='mmdet.models.PAAHead.paa_reassign'), + ))) + +find_unused_parameters = True + +val_cfg = dict(_delete_=True, type='mmrazor.SingleTeacherDistillValLoop')