@inproceedings{771f6a6aacdd4c0a9909946ae1f6b160,
title = "MsBERT: A New Model for the Reconstruction of Lacunae in Hebrew Manuscripts",
abstract = "Hebrew manuscripts provide thousands of textual transmissions of post-Biblical Hebrew texts. In many cases, the text in the manuscripts is not fully decipherable, whether due to deterioration, perforation, burns, or otherwise. Existing BERT models for Hebrew struggle to fill these gaps, due to the many orthographical deviations found in Hebrew manuscripts. We have pretrained a new dedicated BERT model, dubbed MsBERT (short for: Manuscript BERT), designed from the ground up to handle Hebrew manuscript text. MsBERT substantially outperforms all existing Hebrew BERT models regarding the prediction of missing words in fragmentary Hebrew manuscript transcriptions in multiple genres, as well as regarding the task of differentiating between quoted passages and exegetical elaborations. We provide MsBERT for free download and unrestricted use, and we also provide an interactive and user-friendly website to allow manuscript scholars to leverage the power of MsBERT in their scholarly work of reconstructing fragmentary Hebrew manuscripts.",
author = "Avi Shmidman and Ometz Shmidman and Hillel Gershuni and Moshe Koppel",
note = "Publisher Copyright: {\textcopyright} 2024 Association for Computational Linguistics.; 1st Workshop on Machine Learning for Ancient Languages, ML4AL 2024 ; Conference date: 15-08-2024",
year = "2024",
language = "الإنجليزيّة",
series = "ML4AL 2024 - 1st Workshop on Machine Learning for Ancient Languages, Proceedings of the Workshop",
publisher = "Association for Computational Linguistics (ACL)",
pages = "13--18",
editor = "John Pavlopoulos and Thea Sommerschield and Yannis Assael and Shai Gordin and Kyunghyun Cho and Marco Passarotti and Rachele Sprugnoli and Yudong Liu and Bin Li and Adam Anderson",
booktitle = "ML4AL 2024 - 1st Workshop on Machine Learning for Ancient Languages, Proceedings of the Workshop",
address = "الولايات المتّحدة",
}