@inproceedings{85e6c7fc56ac4f4692c9fc031c1c5d43,
title = "Pre-training mention representations in coreference models",
abstract = "Collecting labeled data for coreference resolution is a challenging task, requiring skilled annotators. It is thus desirable to develop coreference resolution models that can make use of unlabeled data. Here we provide such an approach for the powerful class of neural coreference models. These models rely on representations of mentions, and we show these representations can be learned in a self-supervised manner towards improving resolution accuracy. We propose two self-supervised tasks that are closely related to coreference resolution and thus improve mention representation. Applying this approach to the GAP dataset results in new state of the arts results.",
author = "Yuval Varkel and Amir Globerson",
note = "Publisher Copyright: {\textcopyright} 2020 Association for Computational Linguistics.; 2020 Conference on Empirical Methods in Natural Language Processing, EMNLP 2020 ; Conference date: 16-11-2020 Through 20-11-2020",
year = "2020",
language = "الإنجليزيّة",
series = "EMNLP 2020 - 2020 Conference on Empirical Methods in Natural Language Processing, Proceedings of the Conference",
publisher = "Association for Computational Linguistics (ACL)",
pages = "8534--8540",
booktitle = "EMNLP 2020 - 2020 Conference on Empirical Methods in Natural Language Processing, Proceedings of the Conference",
address = "الولايات المتّحدة",
}