@article{1385552, recid = {1385552}, author = {Shakhnarovich, Gregory. and Darrell, Trevor. and Indyk, Piotr.}, title = {Nearest-neighbor methods in learning and vision : theory and practice /}, publisher = {MIT Press,}, address = {Cambridge, Mass. :}, pages = {1 online resource (vi, 252 pages) :}, year = {2005}, note = {" ... held in Whistler, British Columbia ... annual conference on Neural Information Processing Systems (NIPS) in December 2003"--Preface.}, abstract = {Regression and classification methods based on similarity of the input to stored examples have not been widely used in applications involving very large sets of high-dimensional data. Recent advances in computational geometry and machine learning, however, may alleviate the problems in using these methods on large data sets. This volume presents theoretical and practical discussions of nearest-neighbor (NN) methods in machine learning and examines computer vision as an application domain in which the benefit of these advanced methods is often dramatic. It brings together contributions from researchers in theory of computation, machine learning, and computer vision with the goals of bridging the gaps between disciplines and presenting state-of-the-art methods for emerging applications. The contributors focus on the importance of designing algorithms for NN search, and for the related classification, regression, and retrieval tasks, that remain efficient even as the number of points or the dimensionality of the data grows very large. The book begins with two theoretical chapters on computational geometry and then explores ways to make the NN approach practicable in machine learning applications where the dimensionality of the data and the size of the data sets make the naive methods for NN search prohibitively expensive. The final chapters describe successful applications of an NN algorithm, locality-sensitive hashing (LSH), to vision tasks.}, url = {http://library.usi.edu/record/1385552}, }