@article{fong_backprop_2019,
title = {Backprop as {Functor}: {A} compositional perspective on supervised learning},
shorttitle = {Backprop as {Functor}},
url = {http://arxiv.org/abs/1711.10455},
abstract = {A supervised learning algorithm searches over a set of functions \$A {\textbackslash}to B\$ parametrised by a space \$P\$ to find the best approximation to some ideal function \$f{\textbackslash}colon A {\textbackslash}to B\$. It does this by taking examples \$(a,f(a)) {\textbackslash}in A{\textbackslash}times B\$, and updating the parameter according to some rule. We define a category where these update rules may be composed, and show that gradient descent---with respect to a fixed step size and an error function satisfying a certain property---defines a monoidal functor from a category of parametrised functions to this category of update rules. This provides a structural perspective on backpropagation, as well as a broad generalisation of neural networks.},
urldate = {2019-11-23},
journal = {arXiv:1711.10455 [cs, math]},
author = {Fong, Brendan and Spivak, David I. and Tuyéras, Rémy},
month = may,
year = {2019},
note = {ZSCC: 0000015
arXiv: 1711.10455},
keywords = {Categorical ML, Machine learning, Purely theoretical}
}