2023 CVPR’23 DepGraph: Towards Any Structural Pruning Gongfan Fang, Xinyin Ma, Mingli Song, Michael Bi Mi, and Xinchao Wang Proceedings of the IEEE/CVF Conference on Computer Vision and Pattern Recognition, 2023 arXiv Bib PDF Code @inproceedings{fang2023depgraph, title = {DepGraph: Towards Any Structural Pruning}, author = {Fang, Gongfan and Ma, Xinyin and Song, Mingli and Mi, Michael Bi and Wang, Xinchao}, booktitle = {Proceedings of the IEEE/CVF Conference on Computer Vision and Pattern Recognition}, pages = {16091--16101}, year = {2023} } NeurIPS’23 LLM-Pruner: On the Structural Pruning of Large Language Models Xinyin Ma, Gongfan Fang, and Xinchao Wang Advances in neural information processing systems, 2023 arXiv Bib Code @inproceedings{ma2023llmpruner, title = {LLM-Pruner: On the Structural Pruning of Large Language Models}, author = {Ma, Xinyin and Fang, Gongfan and Wang, Xinchao}, booktitle = {Advances in neural information processing systems}, year = {2023} } Preprint’23 0.1% Data Makes Segment Anything Slim Zigeng Chen, Gongfan Fang, Xinyin Ma, and Xinchao Wang arXiv preprint arXiv:2312.05284, 2023 arXiv Bib Code @article{chen20230, title = {0.1\% Data Makes Segment Anything Slim}, author = {Chen, Zigeng and Fang, Gongfan and Ma, Xinyin and Wang, Xinchao}, journal = {arXiv preprint arXiv:2312.05284}, year = {2023} } NeurIPS’23 Structural Pruning for Diffusion Models Gongfan Fang, Xinyin Ma, and Xinchao Wang Advances in Neural Information Processing Systems, 2023 arXiv Bib Code @inproceedings{fang2023structural, title = {Structural Pruning for Diffusion Models}, author = {Fang, Gongfan and Ma, Xinyin and Wang, Xinchao}, booktitle = {Advances in Neural Information Processing Systems}, year = {2023} } 2022 AAAI’22 Up to 100x Faster Data-free Knowledge Distillation Gongfan Fang, Kanya Mo, Xinchao Wang, Jie Song, Shitao Bei, Haofei Zhang, and Mingli Song Proceedings of the AAAI Conference on Artificial Intelligence, 2022 arXiv Bib PDF Code @inproceedings{fang2022up, title = {Up to 100x Faster Data-free Knowledge Distillation}, author = {Fang, Gongfan and Mo, Kanya and Wang, Xinchao and Song, Jie and Bei, Shitao and Zhang, Haofei and Song, Mingli}, booktitle = {Proceedings of the AAAI Conference on Artificial Intelligence}, volume = {36}, number = {6}, pages = {6597--6604}, year = {2022} } IJCAI’22 Prompting to Distill: Boosting Data-Free Knowledge Distillation via Reinforced Prompt Xinyin Ma, Xinchao Wang, Gongfan Fang, Yongliang Shen, and Weiming Lu Proceedings of International Joint Conference on Artificial Intelligence, 2022 Bib PDF @article{ma2022prompting, title = {Prompting to Distill: Boosting Data-Free Knowledge Distillation via Reinforced Prompt}, author = {Ma, Xinyin and Wang, Xinchao and Fang, Gongfan and Shen, Yongliang and Lu, Weiming}, journal = {Proceedings of International Joint Conference on Artificial Intelligence}, year = {2022} } TIP’23 Knowledge Amalgamation for Object Detection with Transformers Haofei Zhang, Feng Mao, Mengqi Xue, Gongfan Fang, Zunlei Feng, Jie Song, and Mingli Song IEEE Transactions on Image Processing, 2022 arXiv Bib @article{zhang2022knowledge, title = {Knowledge Amalgamation for Object Detection with Transformers}, author = {Zhang, Haofei and Mao, Feng and Xue, Mengqi and Fang, Gongfan and Feng, Zunlei and Song, Jie and Song, Mingli}, journal = {IEEE Transactions on Image Processing}, year = {2022} } 2021 NeurIPS’21 Mosaicking to Distill: Knowledge Distillation from Out-of-Domain Data Gongfan Fang, Yifan Bao, Jie Song, Xinchao Wang, Donglin Xie, Chengchao Shen, and Mingli Song Advances in Neural Information Processing Systems, 2021 arXiv Bib PDF Code @article{fang2021mosaicking, title = {Mosaicking to Distill: Knowledge Distillation from Out-of-Domain Data}, author = {Fang, Gongfan and Bao, Yifan and Song, Jie and Wang, Xinchao and Xie, Donglin and Shen, Chengchao and Song, Mingli}, journal = {Advances in Neural Information Processing Systems}, volume = {34}, pages = {11920--11932}, year = {2021} } IJCAI’21 Contrastive Model Inversion for Data-free Knowledge Distillation Gongfan Fang, Jie Song, Xinchao Wang, Chengchao Shen, Xingen Wang, and Mingli Song Proceedings of International Joint Conference on Artificial Intelligence, 2021 arXiv Bib PDF Code @article{fang2021contrastive, title = {Contrastive Model Inversion for Data-free Knowledge Distillation}, author = {Fang, Gongfan and Song, Jie and Wang, Xinchao and Shen, Chengchao and Wang, Xingen and Song, Mingli}, journal = {Proceedings of International Joint Conference on Artificial Intelligence}, year = {2021} } 2020 EMNLP’20 Adversarial Self-Supervised Data-Free Distillation for Text Classification Xinyin Ma, Yongliang Shen, Gongfan Fang, Chen Chen, Chenghao Jia, and Weiming Lu Proceedings of the 2020 Conference on Empirical Methods in Natural Language Processing, 2020 arXiv Bib PDF @inproceedings{ma2020adversarial, title = {Adversarial Self-Supervised Data-Free Distillation for Text Classification}, author = {Ma, Xinyin and Shen, Yongliang and Fang, Gongfan and Chen, Chen and Jia, Chenghao and Lu, Weiming}, booktitle = {Proceedings of the 2020 Conference on Empirical Methods in Natural Language Processing}, pages = {6182--6192}, year = {2020} } 2019 Preprint’19 Data-free Adversarial Distillation Gongfan Fang, Jie Song, Chengchao Shen, Xinchao Wang, Da Chen, and Mingli Song arXiv preprint arXiv:1912.11006, 2019 arXiv Bib Code @article{fang2019data, title = {Data-free Adversarial Distillation}, author = {Fang, Gongfan and Song, Jie and Shen, Chengchao and Wang, Xinchao and Chen, Da and Song, Mingli}, journal = {arXiv preprint arXiv:1912.11006}, year = {2019} }