@article {2936, title = {Generalization in quantum machine learning from few training data}, year = {2021}, month = {11/9/2021}, abstract = {
Modern quantum machine learning (QML) methods involve variationally optimizing a parameterized quantum circuit on a training data set, and subsequently making predictions on a testing data set (i.e., generalizing). In this work, we provide a comprehensive study of generalization performance in QML after training on a limited number N of training data points. We show that the generalization error of a quantum machine learning model with T trainable gates scales at worst as T/N\−\−\−\−\√. When only K<