@article {2923, title = {Machine learning outperforms thermodynamics in measuring how well a many-body system learns a drive}, journal = {Scientific Reports}, volume = {11}, year = {2021}, month = {10/22/2021}, abstract = {

Diverse many-body systems, from soap bubbles to suspensions to polymers, learn and remember patterns in the drives that push them far from equilibrium. This learning may be leveraged for computation, memory, and engineering. Until now, many-body learning has been detected with thermodynamic properties, such as work absorption and strain. We progress beyond these macroscopic properties first defined for equilibrium contexts: We quantify statistical mechanical learning using representation learning, a machine-learning model in which information squeezes through a bottleneck. By calculating properties of the bottleneck, we measure four facets of many-body systems\&$\#$39; learning: classification ability, memory capacity, discrimination ability, and novelty detection. Numerical simulations of a classical spin glass illustrate our technique. This toolkit exposes self-organization that eludes detection by thermodynamic measures: Our toolkit more reliably and more precisely detects and quantifies learning by matter while providing a unifying framework for many-body learning.\ 

}, issn = {2045-2322}, doi = {https://doi.org/10.1038/s41598-021-88311-7}, url = {https://arxiv.org/abs/2004.03604}, author = {Zhong, Weishun and Gold, Jacob M. and Marzen, Sarah and England, Jeremy L. and Nicole Yunger Halpern} }