Optimized Product Quantization for Approximate Nearest Neighbor Search. Ge, T., He, K., Ke, Q., & Sun, J. In 2013 IEEE Conference on Computer Vision and Pattern Recognition, pages 2946–2953, June, 2013. ISSN: 1063-6919doi abstract bibtex Product quantization is an effective vector quantization approach to compactly encode high-dimensional vectors for fast approximate nearest neighbor (ANN) search. The essence of product quantization is to decompose the original high-dimensional space into the Cartesian product of a finite number of low-dimensional subspaces that are then quantized separately. Optimal space decomposition is important for the performance of ANN search, but still remains unaddressed. In this paper, we optimize product quantization by minimizing quantization distortions w.r.t. the space decomposition and the quantization codebooks. We present two novel methods for optimization: a non-parametric method that alternatively solves two smaller sub-problems, and a parametric method that is guaranteed to achieve the optimal solution if the input data follows some Gaussian distribution. We show by experiments that our optimized approach substantially improves the accuracy of product quantization for ANN search.
@inproceedings{ge_optimized_2013,
title = {Optimized {Product} {Quantization} for {Approximate} {Nearest} {Neighbor} {Search}},
doi = {10.1109/CVPR.2013.379},
abstract = {Product quantization is an effective vector quantization approach to compactly encode high-dimensional vectors for fast approximate nearest neighbor (ANN) search. The essence of product quantization is to decompose the original high-dimensional space into the Cartesian product of a finite number of low-dimensional subspaces that are then quantized separately. Optimal space decomposition is important for the performance of ANN search, but still remains unaddressed. In this paper, we optimize product quantization by minimizing quantization distortions w.r.t. the space decomposition and the quantization codebooks. We present two novel methods for optimization: a non-parametric method that alternatively solves two smaller sub-problems, and a parametric method that is guaranteed to achieve the optimal solution if the input data follows some Gaussian distribution. We show by experiments that our optimized approach substantially improves the accuracy of product quantization for ANN search.},
language = {en},
booktitle = {2013 {IEEE} {Conference} on {Computer} {Vision} and {Pattern} {Recognition}},
author = {Ge, Tiezheng and He, Kaiming and Ke, Qifa and Sun, Jian},
month = jun,
year = {2013},
note = {ISSN: 1063-6919},
keywords = {\#CVPR{\textgreater}13, \#Optimization, /unread, Artificial neural networks, Eigenvalues and eigenfunctions, Encoding, Linear programming, Optimization, Quantization (signal), Vectors, nearest neighbor search, product quantization},
pages = {2946--2953},
}
Downloads: 0
{"_id":"src2HoKyrXqZCgYuJ","bibbaseid":"ge-he-ke-sun-optimizedproductquantizationforapproximatenearestneighborsearch-2013","author_short":["Ge, T.","He, K.","Ke, Q.","Sun, J."],"bibdata":{"bibtype":"inproceedings","type":"inproceedings","title":"Optimized Product Quantization for Approximate Nearest Neighbor Search","doi":"10.1109/CVPR.2013.379","abstract":"Product quantization is an effective vector quantization approach to compactly encode high-dimensional vectors for fast approximate nearest neighbor (ANN) search. The essence of product quantization is to decompose the original high-dimensional space into the Cartesian product of a finite number of low-dimensional subspaces that are then quantized separately. Optimal space decomposition is important for the performance of ANN search, but still remains unaddressed. In this paper, we optimize product quantization by minimizing quantization distortions w.r.t. the space decomposition and the quantization codebooks. We present two novel methods for optimization: a non-parametric method that alternatively solves two smaller sub-problems, and a parametric method that is guaranteed to achieve the optimal solution if the input data follows some Gaussian distribution. We show by experiments that our optimized approach substantially improves the accuracy of product quantization for ANN search.","language":"en","booktitle":"2013 IEEE Conference on Computer Vision and Pattern Recognition","author":[{"propositions":[],"lastnames":["Ge"],"firstnames":["Tiezheng"],"suffixes":[]},{"propositions":[],"lastnames":["He"],"firstnames":["Kaiming"],"suffixes":[]},{"propositions":[],"lastnames":["Ke"],"firstnames":["Qifa"],"suffixes":[]},{"propositions":[],"lastnames":["Sun"],"firstnames":["Jian"],"suffixes":[]}],"month":"June","year":"2013","note":"ISSN: 1063-6919","keywords":"#CVPR\\textgreater13, #Optimization, /unread, Artificial neural networks, Eigenvalues and eigenfunctions, Encoding, Linear programming, Optimization, Quantization (signal), Vectors, nearest neighbor search, product quantization","pages":"2946–2953","bibtex":"@inproceedings{ge_optimized_2013,\n\ttitle = {Optimized {Product} {Quantization} for {Approximate} {Nearest} {Neighbor} {Search}},\n\tdoi = {10.1109/CVPR.2013.379},\n\tabstract = {Product quantization is an effective vector quantization approach to compactly encode high-dimensional vectors for fast approximate nearest neighbor (ANN) search. The essence of product quantization is to decompose the original high-dimensional space into the Cartesian product of a finite number of low-dimensional subspaces that are then quantized separately. Optimal space decomposition is important for the performance of ANN search, but still remains unaddressed. In this paper, we optimize product quantization by minimizing quantization distortions w.r.t. the space decomposition and the quantization codebooks. We present two novel methods for optimization: a non-parametric method that alternatively solves two smaller sub-problems, and a parametric method that is guaranteed to achieve the optimal solution if the input data follows some Gaussian distribution. We show by experiments that our optimized approach substantially improves the accuracy of product quantization for ANN search.},\n\tlanguage = {en},\n\tbooktitle = {2013 {IEEE} {Conference} on {Computer} {Vision} and {Pattern} {Recognition}},\n\tauthor = {Ge, Tiezheng and He, Kaiming and Ke, Qifa and Sun, Jian},\n\tmonth = jun,\n\tyear = {2013},\n\tnote = {ISSN: 1063-6919},\n\tkeywords = {\\#CVPR{\\textgreater}13, \\#Optimization, /unread, Artificial neural networks, Eigenvalues and eigenfunctions, Encoding, Linear programming, Optimization, Quantization (signal), Vectors, nearest neighbor search, product quantization},\n\tpages = {2946--2953},\n}\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n","author_short":["Ge, T.","He, K.","Ke, Q.","Sun, J."],"key":"ge_optimized_2013","id":"ge_optimized_2013","bibbaseid":"ge-he-ke-sun-optimizedproductquantizationforapproximatenearestneighborsearch-2013","role":"author","urls":{},"keyword":["#CVPR\\textgreater13","#Optimization","/unread","Artificial neural networks","Eigenvalues and eigenfunctions","Encoding","Linear programming","Optimization","Quantization (signal)","Vectors","nearest neighbor search","product quantization"],"metadata":{"authorlinks":{}},"downloads":0,"html":""},"bibtype":"inproceedings","biburl":"https://bibbase.org/zotero/zzhenry2012","dataSources":["nZHrFJKyxKKDaWYM8"],"keywords":["#cvpr\\textgreater13","#optimization","/unread","artificial neural networks","eigenvalues and eigenfunctions","encoding","linear programming","optimization","quantization (signal)","vectors","nearest neighbor search","product quantization"],"search_terms":["optimized","product","quantization","approximate","nearest","neighbor","search","ge","he","ke","sun"],"title":"Optimized Product Quantization for Approximate Nearest Neighbor Search","year":2013}