@inproceedings{d8bb748e57e94cab825cb5731b38ee65,
title = "Facts2Story: Controlling Text Generation by Key Facts",
abstract = "Recent advancements in self-attention neural network architectures have raised the bar for open-ended text generation. Yet, while current methods are capable of producing a coherent text which is several hundred words long, attaining control over the content that is being generated—as well as evaluating it—are still open questions. We propose a controlled generation task which is based on expanding a sequence of facts, expressed in natural language, into a longer narrative. We introduce human-based evaluation metrics for this task, as well as a method for deriving a large training dataset. We evaluate three methods on this task, based on fine-tuning pre-trained models. We show that while auto-regressive, unidirectional Language Models such as GPT2 produce better fluency, they struggle to adhere to the requested facts. We propose a plan-and-cloze model (using fine-tuned XLNet) which produces competitive fluency while adhering to the requested content.",
author = "Eyal Orbach and Yoav Goldberg",
note = "Publisher Copyright: {\textcopyright} 2020 COLING 2020 - 28th International Conference on Computational Linguistics, Proceedings of the Conference. All rights reserved.; 28th International Conference on Computational Linguistics, COLING 2020 ; Conference date: 08-12-2020 Through 13-12-2020",
year = "2020",
month = jan,
day = "1",
doi = "10.18653/v1/2020.coling-main.211",
language = "الإنجليزيّة",
series = "COLING 2020 - 28th International Conference on Computational Linguistics, Proceedings of the Conference",
publisher = "Association for Computational Linguistics (ACL)",
pages = "2329--2345",
editor = "Donia Scott and Nuria Bel and Chengqing Zong",
booktitle = "COLING 2020 - 28th International Conference on Computational Linguistics, Proceedings of the Conference",
address = "الولايات المتّحدة",
}