@inproceedings{9df0e22fea9a4ece90d9761d551307ec,
title = "Ambient Diffusion: Learning Clean Distributions from Corrupted Data",
abstract = "We present the first diffusion-based framework that can learn an unknown distribution using only highly-corrupted samples. This problem arises in scientific applications where access to uncorrupted samples is impossible or expensive to acquire. Another benefit of our approach is the ability to train generative models that are less likely to memorize any individual training sample, since they never observe clean training data. Our main idea is to introduce additional measurement distortion during the diffusion process and require the model to predict the original corrupted image from the further corrupted image. We prove that our method leads to models that learn the conditional expectation of the full uncorrupted image given this additional measurement corruption. This holds for any corruption process that satisfies some technical conditions (and in particular includes inpainting and compressed sensing). We train models on standard benchmarks (CelebA, CIFAR-10 and AFHQ) and show that we can learn the distribution even when all the training samples have 90% of their pixels missing. We also show that we can finetune foundation models on small corrupted datasets (e.g. MRI scans with block corruptions) and learn the clean distribution without memorizing the training set.",
author = "Giannis Daras and Kulin Shah and Yuval Dagan and Aravind Gollakota and Dimakis, {Alexandros G.} and Adam Klivans",
note = "Publisher Copyright: {\textcopyright} 2023 Neural information processing systems foundation. All rights reserved.; 37th Conference on Neural Information Processing Systems, NeurIPS 2023 ; Conference date: 10-12-2023 Through 16-12-2023",
year = "2023",
language = "الإنجليزيّة",
series = "Advances in Neural Information Processing Systems",
editor = "A. Oh and T. Neumann and A. Globerson and K. Saenko and M. Hardt and S. Levine",
booktitle = "Advances in Neural Information Processing Systems 36 - 37th Conference on Neural Information Processing Systems, NeurIPS 2023",
}