@inproceedings{5ccd58c30e9848f9a06b387954d436dd,
title = "Learning Additive Noise Channels: Generalization Bounds and Algorithms",
abstract = "An additive noise channel is considered, in which the noise distribution is unknown and does not known to belong to any parametric family. The problem of designing a codebook and a generalized minimal distance decoder (which is parameterized by a covariance matrix) based on samples of the noise is considered. High probability generalization bounds for the error probability loss function, as well as for a hinge-type surrogate loss function are provided. A stochastic-gradient based alternating-minimization algorithm for the latter loss function is presented. Bounds on the average empirical error and generalization error are provided for a Gibbs based algorithm that gradually expurgates codewords from a large initial codebook to obtain a smaller codebook with improved error probability.",
keywords = "Gibbs algorithm, additive noise channel, expurgation, generalization bounds, statistical learning, stochastic gradient descent",
author = "Nir Weinberger",
note = "Publisher Copyright: {\textcopyright} 2020 IEEE.; 2020 IEEE International Symposium on Information Theory, ISIT 2020 ; Conference date: 21-07-2020 Through 26-07-2020",
year = "2020",
month = jun,
doi = "10.1109/ISIT44484.2020.9174090",
language = "الإنجليزيّة",
series = "IEEE International Symposium on Information Theory - Proceedings",
publisher = "Institute of Electrical and Electronics Engineers Inc.",
pages = "2586--2591",
booktitle = "2020 IEEE International Symposium on Information Theory, ISIT 2020 - Proceedings",
address = "الولايات المتّحدة",
}