@inproceedings{0c4381fc959b4268bf060c22520d08bd,
title = "SenseBERT: Driving some sense into BERT",
abstract = "The ability to learn from large unlabeled corpora has allowed neural language models to advance the frontier in natural language understanding. However, existing self-supervision techniques operate at the word form level, which serves as a surrogate for the underlying semantic content. This paper proposes a method to employ weak-supervision directly at the word sense level. Our model, named SenseBERT, is pre-trained to predict not only the masked words but also their WordNet supersenses. Accordingly, we attain a lexical-semantic level language model, without the use of human annotation. SenseBERT achieves significantly improved lexical understanding, as we demonstrate by experimenting on SemEval Word Sense Disambiguation, and by attaining a state of the art result on the 'Word in Context' task.",
author = "Yoav Levine and Barak Lenz and Or Dagan and Ori Ram and Dan Padnos and Or Sharir and Shai Shalev-Shwartz and Amnon Shashua and Yoav Shoham",
note = "Publisher Copyright: {\textcopyright} 2020 Association for Computational Linguistics; 58th Annual Meeting of the Association for Computational Linguistics, ACL 2020 ; Conference date: 05-07-2020 Through 10-07-2020",
year = "2020",
language = "الإنجليزيّة",
series = "Proceedings of the Annual Meeting of the Association for Computational Linguistics",
publisher = "Association for Computational Linguistics (ACL)",
pages = "4656--4667",
booktitle = "ACL 2020 - 58th Annual Meeting of the Association for Computational Linguistics, Proceedings of the Conference",
address = "الولايات المتّحدة",
}