2024 NeurIPS’24 MaskLLM: Learnable Semi-structured Sparsity for Large Language Models Gongfan Fang, Hongxu Yin, Saurav Muralidharan, Greg Heinrich, Jeff Pool, Jan Kautz, Pavlo Molchanov, and Xinchao Wang Advances in Neural Information Processing Systems, 2024 arXiv Webpage Bib Code @article{fang2024maskllm, title = {MaskLLM: Learnable Semi-structured Sparsity for Large Language Models}, author = {Fang, Gongfan and Yin, Hongxu and Muralidharan, Saurav and Heinrich, Greg and Pool, Jeff and Kautz, Jan and Molchanov, Pavlo and Wang, Xinchao}, journal = {Advances in Neural Information Processing Systems}, year = {2024} } NeurIPS’24 Remix-DiT: Mixing Diffusion Transformers for Multi-Expert Denoising Gongfan Fang, Xinyin Ma, and Xinchao Wang Advances in Neural Information Processing Systems, 2024 arXiv Bib Code @article{fang2024remixdit, title = {Remix-DiT: Mixing Diffusion Transformers for Multi-Expert Denoising}, author = {Fang, Gongfan and Ma, Xinyin and Wang, Xinchao}, journal = {Advances in Neural Information Processing Systems}, year = {2024} } ECCV’24 Isomorphic Pruning for Vision Models Gongfan Fang, Xinyin Ma, Michael Bi Mi, and Xinchao Wang European Conference on Computer Vision, 2024 arXiv Bib Code @article{fang2024isomorphic, title = {Isomorphic Pruning for Vision Models}, author = {Fang, Gongfan and Ma, Xinyin and Mi, Michael Bi and Wang, Xinchao}, journal = {European Conference on Computer Vision}, year = {2024} } NeurIPS’24 AsyncDiff: Parallelizing Diffusion Models by Asynchronous Denoising Zigeng Chen, Xinyin Ma, Gongfan Fang, Zhenxiong Tan, and Xinchao Wang Advances in Neural Information Processing Systems, 2024 arXiv Bib Code @article{chen2024asyncdiff, title = {AsyncDiff: Parallelizing Diffusion Models by Asynchronous Denoising}, author = {Chen, Zigeng and Ma, Xinyin and Fang, Gongfan and Tan, Zhenxiong and Wang, Xinchao}, journal = {Advances in Neural Information Processing Systems}, year = {2024} } NeurIPS’24 Learning-to-Cache: Accelerating Diffusion Transformer via Layer Caching Xinyin Ma, Gongfan Fang, Michael Bi Mi, and Xinchao Wang Advances in Neural Information Processing Systems, 2024 arXiv Bib Code @article{ma2024learning, title = {Learning-to-Cache: Accelerating Diffusion Transformer via Layer Caching}, author = {Ma, Xinyin and Fang, Gongfan and Mi, Michael Bi and Wang, Xinchao}, journal = {Advances in Neural Information Processing Systems}, year = {2024} } NeurIPS’24 SlimSam: 0.1% Data Makes Segment Anything Slim Zigeng Chen, Gongfan Fang, Xinyin Ma, and Xinchao Wang Advances in Neural Information Processing Systems, 2024 arXiv Bib Code @article{chen20230, title = {SlimSam: 0.1\% Data Makes Segment Anything Slim}, author = {Chen, Zigeng and Fang, Gongfan and Ma, Xinyin and Wang, Xinchao}, journal = {Advances in Neural Information Processing Systems}, year = {2024} } CVPR’24 DeepCache: Accelerating Diffusion Models for Free Xinyin Ma, Gongfan Fang, and Xinchao Wang Proceedings of the IEEE/CVF Conference on Computer Vision and Pattern Recognition, 2024 arXiv Bib Code @inproceedings{ma2023deepcache, title = {DeepCache: Accelerating Diffusion Models for Free}, author = {Ma, Xinyin and Fang, Gongfan and Wang, Xinchao}, booktitle = {Proceedings of the IEEE/CVF Conference on Computer Vision and Pattern Recognition}, year = {2024} } InterSpeech’24 LiteFocus: Accelerated Diffusion Inference for Long Audio Synthesis Zhenxiong Tan, Xinyin Ma, Gongfan Fang, and Xinchao Wang Conference of the International Speech Communication Association, 2024 arXiv Bib Code @article{tan2024litefocus, title = {LiteFocus: Accelerated Diffusion Inference for Long Audio Synthesis}, author = {Tan, Zhenxiong and Ma, Xinyin and Fang, Gongfan and Wang, Xinchao}, journal = {Conference of the International Speech Communication Association}, year = {2024} } 2023 CVPR’23 DepGraph: Towards Any Structural Pruning Gongfan Fang, Xinyin Ma, Mingli Song, Michael Bi Mi, and Xinchao Wang Proceedings of the IEEE/CVF Conference on Computer Vision and Pattern Recognition, 2023 arXiv Bib PDF Code @inproceedings{fang2023depgraph, title = {DepGraph: Towards Any Structural Pruning}, author = {Fang, Gongfan and Ma, Xinyin and Song, Mingli and Mi, Michael Bi and Wang, Xinchao}, booktitle = {Proceedings of the IEEE/CVF Conference on Computer Vision and Pattern Recognition}, pages = {16091--16101}, year = {2023} } NeurIPS’23 LLM-Pruner: On the Structural Pruning of Large Language Models Xinyin Ma, Gongfan Fang, and Xinchao Wang Advances in Neural Information Processing Systems, 2023 arXiv Bib Code @inproceedings{ma2023llmpruner, title = {LLM-Pruner: On the Structural Pruning of Large Language Models}, author = {Ma, Xinyin and Fang, Gongfan and Wang, Xinchao}, booktitle = {Advances in Neural Information Processing Systems}, year = {2023} } NeurIPS’23 Structural Pruning for Diffusion Models Gongfan Fang, Xinyin Ma, and Xinchao Wang Advances in Neural Information Processing Systems, 2023 arXiv Bib Code @inproceedings{fang2023structural, title = {Structural Pruning for Diffusion Models}, author = {Fang, Gongfan and Ma, Xinyin and Wang, Xinchao}, booktitle = {Advances in Neural Information Processing Systems}, year = {2023} } 2022 AAAI’22 Up to 100x Faster Data-free Knowledge Distillation Gongfan Fang, Kanya Mo, Xinchao Wang, Jie Song, Shitao Bei, Haofei Zhang, and Mingli Song Proceedings of the AAAI Conference on Artificial Intelligence, 2022 arXiv Bib PDF Code @inproceedings{fang2022up, title = {Up to 100x Faster Data-free Knowledge Distillation}, author = {Fang, Gongfan and Mo, Kanya and Wang, Xinchao and Song, Jie and Bei, Shitao and Zhang, Haofei and Song, Mingli}, booktitle = {Proceedings of the AAAI Conference on Artificial Intelligence}, volume = {36}, number = {6}, pages = {6597--6604}, year = {2022} } IJCAI’22 Prompting to Distill: Boosting Data-Free Knowledge Distillation via Reinforced Prompt Xinyin Ma, Xinchao Wang, Gongfan Fang, Yongliang Shen, and Weiming Lu Proceedings of International Joint Conference on Artificial Intelligence, 2022 Bib PDF @article{ma2022prompting, title = {Prompting to Distill: Boosting Data-Free Knowledge Distillation via Reinforced Prompt}, author = {Ma, Xinyin and Wang, Xinchao and Fang, Gongfan and Shen, Yongliang and Lu, Weiming}, journal = {Proceedings of International Joint Conference on Artificial Intelligence}, year = {2022} } TIP’23 Knowledge Amalgamation for Object Detection with Transformers Haofei Zhang, Feng Mao, Mengqi Xue, Gongfan Fang, Zunlei Feng, Jie Song, and Mingli Song IEEE Transactions on Image Processing, 2022 arXiv Bib @article{zhang2022knowledge, title = {Knowledge Amalgamation for Object Detection with Transformers}, author = {Zhang, Haofei and Mao, Feng and Xue, Mengqi and Fang, Gongfan and Feng, Zunlei and Song, Jie and Song, Mingli}, journal = {IEEE Transactions on Image Processing}, year = {2022} } 2021 NeurIPS’21 Mosaicking to Distill: Knowledge Distillation from Out-of-Domain Data Gongfan Fang, Yifan Bao, Jie Song, Xinchao Wang, Donglin Xie, Chengchao Shen, and Mingli Song Advances in Neural Information Processing Systems, 2021 arXiv Bib PDF Code @article{fang2021mosaicking, title = {Mosaicking to Distill: Knowledge Distillation from Out-of-Domain Data}, author = {Fang, Gongfan and Bao, Yifan and Song, Jie and Wang, Xinchao and Xie, Donglin and Shen, Chengchao and Song, Mingli}, journal = {Advances in Neural Information Processing Systems}, volume = {34}, pages = {11920--11932}, year = {2021} } IJCAI’21 Contrastive Model Inversion for Data-free Knowledge Distillation Gongfan Fang, Jie Song, Xinchao Wang, Chengchao Shen, Xingen Wang, and Mingli Song Proceedings of International Joint Conference on Artificial Intelligence, 2021 arXiv Bib PDF Code @article{fang2021contrastive, title = {Contrastive Model Inversion for Data-free Knowledge Distillation}, author = {Fang, Gongfan and Song, Jie and Wang, Xinchao and Shen, Chengchao and Wang, Xingen and Song, Mingli}, journal = {Proceedings of International Joint Conference on Artificial Intelligence}, year = {2021} } 2020 EMNLP’20 Adversarial Self-Supervised Data-Free Distillation for Text Classification Xinyin Ma, Yongliang Shen, Gongfan Fang, Chen Chen, Chenghao Jia, and Weiming Lu Proceedings of the 2020 Conference on Empirical Methods in Natural Language Processing, 2020 arXiv Bib PDF @inproceedings{ma2020adversarial, title = {Adversarial Self-Supervised Data-Free Distillation for Text Classification}, author = {Ma, Xinyin and Shen, Yongliang and Fang, Gongfan and Chen, Chen and Jia, Chenghao and Lu, Weiming}, booktitle = {Proceedings of the 2020 Conference on Empirical Methods in Natural Language Processing}, pages = {6182--6192}, year = {2020} } 2019 Preprint’19 Data-free Adversarial Distillation Gongfan Fang, Jie Song, Chengchao Shen, Xinchao Wang, Da Chen, and Mingli Song arXiv preprint arXiv:1912.11006, 2019 arXiv Bib Code @article{fang2019data, title = {Data-free Adversarial Distillation}, author = {Fang, Gongfan and Song, Jie and Shen, Chengchao and Wang, Xinchao and Chen, Da and Song, Mingli}, journal = {arXiv preprint arXiv:1912.11006}, year = {2019} }