@inproceedings{0d5a2795a03c4b9f81b3d173fe7735fe,
title = "Prompting ELECTRA: Few-Shot Learning with Discriminative Pre-Trained Models",
abstract = "Pre-trained masked language models successfully perform few-shot learning by formulating downstream tasks as text infilling. However, as a strong alternative in full-shot settings, discriminative pre-trained models like ELECTRA do not fit into the paradigm. In this work, we adapt prompt-based few-shot learning to ELECTRA and show that it outperforms masked language models in a wide range of tasks. ELECTRA is pre-trained to distinguish if a token is generated or original. We naturally extend that to prompt-based few-shot learning by training to score the originality of the target options without introducing new parameters. Our method can be easily adapted to tasks involving multi-token predictions without extra computation overhead. Analysis shows that ELECTRA learns distributions that align better with downstream tasks.",
author = "Mengzhou Xia and Mikel Artetxe and Jingfei Du and Danqi Chen and Ves Stoyanov",
note = "Publisher Copyright: {\textcopyright} 2022 Association for Computational Linguistics.; 2022 Conference on Empirical Methods in Natural Language Processing, EMNLP 2022 ; Conference date: 07-12-2022 Through 11-12-2022",
year = "2022",
doi = "10.18653/v1/2022.emnlp-main.780",
language = "English (US)",
series = "Proceedings of the 2022 Conference on Empirical Methods in Natural Language Processing, EMNLP 2022",
publisher = "Association for Computational Linguistics (ACL)",
pages = "11351--11361",
editor = "Yoav Goldberg and Zornitsa Kozareva and Yue Zhang",
booktitle = "Proceedings of the 2022 Conference on Empirical Methods in Natural Language Processing, EMNLP 2022",
}