@Article{LMR91, author = { Nathan Linial and Yishay Mansour and Ronald L. Rivest }, title = { Results on Learnability and the {Vapnik-Chervonenkis} Dimension }, journal = { Information and Computation }, publisher = { Academic Press }, issn = { 0890-5401 }, OPTyear = { 1991 }, OPTmonth = { January }, date = { 1991-01 }, volume = { 90 }, number = { 1 }, pages = { 33--49 }, doi = { 10.1016/0890-5401(91)90058-A }, url = { http://www.sciencedirect.com/science/article/pii/089054019190058A }, abstract = { We consider the problem of learning a concept from examples in the distribution-free model by Valiant. (An essentially equivalent model, if one ignores issues of computational difficulty, was studied by Vapnik and Chervonenkis.) We introduce the notion of \emph{dynamic sampling}, wherein the number of examples examined may increase with the complexity of the target concept. This method is used to establish the learnability of various concept classes with an \emph{infinite} Vapnik-Chervonenkis dimension. We also discuss an important variation on the problem of learning from examples, called \emph{approximating from examples}. Here we do not assume that the target concept $T$ is a member of the concept class $\mathcal{C}$ from which approximations are chosen. This problem takes on particular interest when the VC dimension of $\mathcal{C}$ is infinite. Finally, we discuss the problem of computing the VC dimension of a finite concept set defined on a finite domain and consider the structure of classes of a fixed small dimension. }, }