@inproceedings{63f97447d4fe4c2f9e76932da73368cd,
title = "Meta-learning by adjusting priors based on extended PAC-bayes theory",
abstract = "In meta-learning an agent extracts knowledge from observed tasks, aiming to facilitate learning of novel future tasks. Under the assumption that future tasks are 'related' to previous tasks, the accumulated knowledge should be learned in a way which captures the common structure across learned tasks, while allowing the learner sufficient flexibility to adapt to novel aspects of new tasks. We present a framework for meta-learning that is based on generalization error bounds, allowing us to extend various PAC-Bayes bounds to metalearning. Learning takes place through the construction of a distribution over hypotheses based on the observed tasks, and its utilization for learning a new task. Thus, prior knowledge is incorporated through setting an experience-dependent prior for novel tasks. We develop a gradient-based algorithm which minimizes an objective function derived from the bounds and demonstrate its effectiveness numerically with deep neural networks. In addition to establishing the improved performance available through meta-learning, we demonstrate the intuitive way by which prior information is manifested at different levels of the network.",
author = "Ron Amit and Ron Meir",
note = "Publisher Copyright: {\textcopyright} Copyright 2018 by the Authors. All rights reserved.; 35th International Conference on Machine Learning, ICML 2018 ; Conference date: 10-07-2018 Through 15-07-2018",
year = "2018",
language = "الإنجليزيّة",
series = "35th International Conference on Machine Learning, ICML 2018",
pages = "310--326",
editor = "Andreas Krause and Jennifer Dy",
booktitle = "35th International Conference on Machine Learning, ICML 2018",
}