@inproceedings{b5b1f85e95ea4a11aba9bce910a0b3c3,
title = "BERTese: Learning to speak to BERT",
abstract = "Large pre-trained language models have been shown to encode large amounts of world and commonsense knowledge in their parameters, leading to substantial interest in methods for extracting that knowledge. In past work, knowledge was extracted by taking manually-authored queries and gathering paraphrases for them using a separate pipeline. In this work, we propose a method for automatically rewriting queries into “BERTese”, a paraphrase query that is directly optimized towards better knowledge extraction. To encourage meaningful rewrites, we add auxiliary loss functions that encourage the query to correspond to actual language tokens. We empirically show our approach outperforms competing baselines, obviating the need for complex pipelines. Moreover, BERTese provides some insight into the type of language that helps language models perform knowledge extraction.",
author = "Adi Haviv and Jonathan Berant and Amir Globerson",
note = "Publisher Copyright: {\textcopyright} 2021 Association for Computational Linguistics; 16th Conference of the European Chapter of the Associationfor Computational Linguistics, EACL 2021 ; Conference date: 19-04-2021 Through 23-04-2021",
year = "2021",
language = "الإنجليزيّة",
series = "EACL 2021 - 16th Conference of the European Chapter of the Association for Computational Linguistics, Proceedings of the Conference",
publisher = "Association for Computational Linguistics (ACL)",
pages = "3618--3623",
booktitle = "EACL 2021 - 16th Conference of the European Chapter of the Association for Computational Linguistics, Proceedings of the Conference",
address = "الولايات المتّحدة",
}