@inproceedings{2136c8ed02184d0790f114e27bba678d,
title = "Can Stochastic Gradient Langevin Dynamics Provide Differential Privacy for Deep Learning?",
abstract = "Bayesian learning via Stochastic Gradient Langevin Dynamics (SGLD) has been suggested for differentially private learning. While previous research provides differential privacy bounds for SGLD at the initial steps of the algorithm or when close to convergence, the question of what differential privacy guarantees can be made in between remains unanswered. This interim region is of great importance, especially for Bayesian neural networks, as it is hard to guarantee convergence to the posterior. This paper shows that using SGLD might result in unbounded privacy loss for this interim region, even when sampling from the posterior is as differentially private as desired.",
keywords = "Bayesian Inference, Deep Learning, Differential Privacy, Stochastic Gradient Langevin Dynamics",
author = "Guy Heller and Ethan Fetaya",
note = "Publisher Copyright: {\textcopyright} 2023 IEEE.; 2023 IEEE Conference on Secure and Trustworthy Machine Learning, SaTML 2023 ; Conference date: 08-02-2023 Through 10-02-2023",
year = "2023",
doi = "10.1109/satml54575.2023.00015",
language = "الإنجليزيّة",
series = "Proceedings - 2023 IEEE Conference on Secure and Trustworthy Machine Learning, SaTML 2023",
publisher = "Institute of Electrical and Electronics Engineers Inc.",
pages = "68--106",
booktitle = "Proceedings - 2023 IEEE Conference on Secure and Trustworthy Machine Learning, SaTML 2023",
address = "الولايات المتّحدة",
}