@inproceedings{7d4f0429cfbb45b58b0fc3c6b138edbf,
title = "Soft-Sign Stochastic Gradient Descent Algorithm for Wireless Federated Learning",
abstract = "Federated learning over wireless networks requires aggregating locally computed gradients at a server where the mobile devices send statistically distinct gradient information over heterogenous communication links. This paper proposes a Bayesian approach for wireless federated learning referred to as soft-sign stochastic gradient descent (soft-signSGD). The idea of soft-signSGD is to aggregate the one-bit quantized local gradients at the server by jointly exploiting i) the prior distributions of the local gradients, ii) the gradient quantizer function, and iii) channel distributions. This aggregation method is optimal in the sense of minimizing the mean-squared error (MSE) under a simplified Gaussian prior assumption on the local gradient. From simulations, we demonstrate that soft-signSGD considerably outperforms the conventional sign stochastic gradient descent algorithm when training and testing neural networks using the MNIST dataset and the CIFAR-10 dataset over heterogeneous wireless networks.",
author = "Seunghoon Lee and Chanho Park and Songnam Hong and Eldar, {Yonina C} and Namyoon Lee",
note = "Publisher Copyright: {\textcopyright} 2021 IEEE.; IEEE 22nd International Workshop on Signal Processing Advances in Wireless Communications (SPAWC) ; Conference date: 27-09-2021 Through 30-09-2021",
year = "2021",
month = nov,
day = "15",
doi = "10.1109/SPAWC51858.2021.9593212",
language = "الإنجليزيّة",
isbn = "978-1-6654-2852-1",
series = "2021-September",
pages = "241--245",
booktitle = "2021 IEEE 22nd International Workshop on Signal Processing Advances in Wireless Communications (SPAWC)",
}