@Article{BR92, author = { Avrim L. Blum and Ronald L. Rivest }, title = { Training a 3-node neural network is {NP}-complete }, pages = { 117--127 }, doi = { 10.1016/S0893-6080(05)80010-3 }, journal = { Neural Networks }, publisher = { Elsevier (Pergamon Press) }, editor = { Shun-Ichi Amari and Stephen Grossberg and John Taylor }, OPTyear = { 1992 }, OPTmonth = { January }, date = { 1992-01 }, volume = { 5 }, number = { 1 }, keywords = { neural networks, computational complexity, NP-completeness, intractability, learning, training, multilayer perceptron, representation }, abstract = { We consider a 2-layer, 3-node, $n$-input neural network whose nodes compute linear threshold functions of their inputs. We show that it is NP-complete to decide whether there exist weights and thresholds for this network so that it produces output consistent with a given set of training examples. We extend the result to other simple networks. We also present a network for which training is hard but where switching to a more powerful representation makes training easier. These results suggest that those looking for perfect training algorithms cannot escape inherent computational difficulties just by considering only simple or very regular networks. They also suggest the importance, given a training problem, of finding an appropriate network and input encoding for that problem. It is left as an open problem to extend our result to nodes with nonlinear functions such as sigmoids. } }