Large Margin Rank Boundaries for Ordinal Regression. Herbrich, R., Graepel, T., & Obermayer, K. In Advances in Large Margin Classifiers, 7, pages 115--132. The MIT Press, 1999.
Paper abstract bibtex 22 downloads In contrast to the standard machine learning tasks of classification and metric regression we investigate the problem of predicting variables of ordinal scale, a setting referred to as ordinal regression. This problem arises frequently in the social sciences and in information retrieval where human preferences play a major role. Whilst approaches proposed in statistics rely on a probability model of a latent (unobserved) variable we present a distribution independent risk formulation of ordinal regression which allows us to derive a uniform convergence bound. Applying this bound we present a large margin algorithm that is based on a mapping from objects to scalar utility values thus classifying pairs of objects. We give experimental results for an information retrieval task which show that our algorithm outperforms more naive approaches to ordinal regression such as Support Vector Classification and Support Vector Regression in the case of more than two ranks.
@incollection{Herbrich1999d,
abstract = {In contrast to the standard machine learning tasks of classification and metric regression we investigate the problem of predicting variables of ordinal scale, a setting referred to as ordinal regression. This problem arises frequently in the social sciences and in information retrieval where human preferences play a major role. Whilst approaches proposed in statistics rely on a probability model of a latent (unobserved) variable we present a distribution independent risk formulation of ordinal regression which allows us to derive a uniform convergence bound. Applying this bound we present a large margin algorithm that is based on a mapping from objects to scalar utility values thus classifying pairs of objects. We give experimental results for an information retrieval task which show that our algorithm outperforms more naive approaches to ordinal regression such as Support Vector Classification and Support Vector Regression in the case of more than two ranks.},
author = {Herbrich, Ralf and Graepel, Thore and Obermayer, Klause},
booktitle = {Advances in Large Margin Classifiers},
chapter = {7},
file = {:Users/rherb/Code/herbrich.me/papers/nips98\_ordinal.pdf:pdf},
pages = {115--132},
publisher = {The MIT Press},
title = {{Large Margin Rank Boundaries for Ordinal Regression}},
url = {http://www.herbrich.me/papers/nips98\_ordinal.pdf},
year = {1999}
}
Downloads: 22
{"_id":{"_str":"53421b61ecd21cdc070003fe"},"__v":2,"authorIDs":["5456e9a38b01c8193000005e"],"author_short":["Herbrich, R.","Graepel, T.","Obermayer, K."],"bibbaseid":"herbrich-graepel-obermayer-largemarginrankboundariesforordinalregression-1999","bibdata":{"bibtype":"incollection","type":"incollection","abstract":"In contrast to the standard machine learning tasks of classification and metric regression we investigate the problem of predicting variables of ordinal scale, a setting referred to as ordinal regression. This problem arises frequently in the social sciences and in information retrieval where human preferences play a major role. Whilst approaches proposed in statistics rely on a probability model of a latent (unobserved) variable we present a distribution independent risk formulation of ordinal regression which allows us to derive a uniform convergence bound. Applying this bound we present a large margin algorithm that is based on a mapping from objects to scalar utility values thus classifying pairs of objects. We give experimental results for an information retrieval task which show that our algorithm outperforms more naive approaches to ordinal regression such as Support Vector Classification and Support Vector Regression in the case of more than two ranks.","author":[{"propositions":[],"lastnames":["Herbrich"],"firstnames":["Ralf"],"suffixes":[]},{"propositions":[],"lastnames":["Graepel"],"firstnames":["Thore"],"suffixes":[]},{"propositions":[],"lastnames":["Obermayer"],"firstnames":["Klause"],"suffixes":[]}],"booktitle":"Advances in Large Margin Classifiers","chapter":"7","file":":Users/rherb/Code/herbrich.me/papers/nips98_ordinal.pdf:pdf","pages":"115--132","publisher":"The MIT Press","title":"Large Margin Rank Boundaries for Ordinal Regression","url":"http://www.herbrich.me/papers/nips98_ordinal.pdf","year":"1999","bibtex":"@incollection{Herbrich1999d,\nabstract = {In contrast to the standard machine learning tasks of classification and metric regression we investigate the problem of predicting variables of ordinal scale, a setting referred to as ordinal regression. This problem arises frequently in the social sciences and in information retrieval where human preferences play a major role. Whilst approaches proposed in statistics rely on a probability model of a latent (unobserved) variable we present a distribution independent risk formulation of ordinal regression which allows us to derive a uniform convergence bound. Applying this bound we present a large margin algorithm that is based on a mapping from objects to scalar utility values thus classifying pairs of objects. We give experimental results for an information retrieval task which show that our algorithm outperforms more naive approaches to ordinal regression such as Support Vector Classification and Support Vector Regression in the case of more than two ranks.},\nauthor = {Herbrich, Ralf and Graepel, Thore and Obermayer, Klause},\nbooktitle = {Advances in Large Margin Classifiers},\nchapter = {7},\nfile = {:Users/rherb/Code/herbrich.me/papers/nips98\\_ordinal.pdf:pdf},\npages = {115--132},\npublisher = {The MIT Press},\ntitle = {{Large Margin Rank Boundaries for Ordinal Regression}},\nurl = {http://www.herbrich.me/papers/nips98\\_ordinal.pdf},\nyear = {1999}\n}\n","author_short":["Herbrich, R.","Graepel, T.","Obermayer, K."],"key":"Herbrich1999d","id":"Herbrich1999d","bibbaseid":"herbrich-graepel-obermayer-largemarginrankboundariesforordinalregression-1999","role":"author","urls":{"Paper":"http://www.herbrich.me/papers/nips98_ordinal.pdf"},"downloads":22,"html":""},"bibtype":"incollection","biburl":"http://herbrich.me/bib/herbrich.bib","downloads":22,"keywords":[],"search_terms":["large","margin","rank","boundaries","ordinal","regression","herbrich","graepel","obermayer"],"title":"Large Margin Rank Boundaries for Ordinal Regression","year":1999,"dataSources":["y2DvMgAcqeDpXQ6ds"]}