@inproceedings{e50c70ba33ca4e6ea7c8fc5b491ca47d,
title = "Coupled Training for Multi-Source Domain Adaptation",
abstract = "Unsupervised domain adaptation is often addressed by learning a joint representation of labeled samples from a source domain and unlabeled samples from a target domain. Unfortunately, hard sharing of representation may hurt adaptation because of negative transfer, where features that are useful for source domains are learned even if they hurt inference on the target domain. Here, we propose an alternative, soft sharing scheme. We train separate but weakly-coupled models for the source and the target data, while encouraging their predictions to agree. Training the two coupled models jointly effectively exploits the distribution over unlabeled target data and achieves high accuracy on the target. Specifically, we show analytically and empirically that the decision boundaries of the target model converge to low-density {"}valleys{"}of the target distribution. We evaluate our approach on four multi-source domain adaptation (MSDA) benchmarks, digits, amazon text reviews, Office-Caltech and images (DomainNet). We find that it consistently outperforms current MSDA SoTA, sometimes by a very large margin.",
keywords = "Few-shot, Semi- and Un- supervised Learning, Transfer",
author = "Ohad Amosy and Gal Chechik",
note = "Publisher Copyright: {\textcopyright} 2022 IEEE.; 22nd IEEE/CVF Winter Conference on Applications of Computer Vision, WACV 2022 ; Conference date: 04-01-2022 Through 08-01-2022",
year = "2022",
doi = "10.1109/wacv51458.2022.00114",
language = "الإنجليزيّة",
series = "Proceedings - 2022 IEEE/CVF Winter Conference on Applications of Computer Vision, WACV 2022",
publisher = "Institute of Electrical and Electronics Engineers Inc.",
pages = "1071--1080",
booktitle = "Proceedings - 2022 IEEE/CVF Winter Conference on Applications of Computer Vision, WACV 2022",
address = "الولايات المتّحدة",
}