From L1 Minimization to Entropy Minimization: A Novel Approach for Sparse Signal Recovery in Compressive Sensing. Conde, M. H. & Loffeld, O. In 2018 26th European Signal Processing Conference (EUSIPCO), pages 568-572, Sep., 2018. Paper doi abstract bibtex The groundbreaking theory of compressive sensing (CS) enables reconstructing many common classes or real-world signals from a number of samples that is well below that prescribed by the Shannon sampling theorem, which exclusively relates to the bandwidth of the signal. Differently, CS takes profit of the sparsity or compressibility of the signals in an appropriate basis to reconstruct them from few measurements. A large number of algorithms exist for solving the sparse recovery problem, which can be roughly classified in greedy pursuits and l1 minimization algorithms. Chambolle and Pock's (C&P) primal-dual l1minimization algorithm has shown to deliver state-of-the-art results with optimal convergence rate. In this work we present an algorithm for l1 minimization that operates in the null space of the measurement matrix and follows a Nesterov-accelerated gradient descent structure. Restriction to the null space allows the algorithm to operate in a minimal-dimension subspace. A further novelty lies on the fact that the cost function is no longer the l1 norm of the temporal solution, but a weighted sum of its entropy and its l1 norm. The inclusion of the entropy pushes the l1 minimization towards a de facto quasi-10 minimization, while the l1 norm term avoids divergence. Our algorithm globally outperforms C&P and other recent approaches for l1 minimization in terms of l2reconstruction error, including a different entropy-based method.
@InProceedings{8553245,
author = {M. H. Conde and O. Loffeld},
booktitle = {2018 26th European Signal Processing Conference (EUSIPCO)},
title = {From L1 Minimization to Entropy Minimization: A Novel Approach for Sparse Signal Recovery in Compressive Sensing},
year = {2018},
pages = {568-572},
abstract = {The groundbreaking theory of compressive sensing (CS) enables reconstructing many common classes or real-world signals from a number of samples that is well below that prescribed by the Shannon sampling theorem, which exclusively relates to the bandwidth of the signal. Differently, CS takes profit of the sparsity or compressibility of the signals in an appropriate basis to reconstruct them from few measurements. A large number of algorithms exist for solving the sparse recovery problem, which can be roughly classified in greedy pursuits and l1 minimization algorithms. Chambolle and Pock's (C&P) primal-dual l1minimization algorithm has shown to deliver state-of-the-art results with optimal convergence rate. In this work we present an algorithm for l1 minimization that operates in the null space of the measurement matrix and follows a Nesterov-accelerated gradient descent structure. Restriction to the null space allows the algorithm to operate in a minimal-dimension subspace. A further novelty lies on the fact that the cost function is no longer the l1 norm of the temporal solution, but a weighted sum of its entropy and its l1 norm. The inclusion of the entropy pushes the l1 minimization towards a de facto quasi-10 minimization, while the l1 norm term avoids divergence. Our algorithm globally outperforms C&P and other recent approaches for l1 minimization in terms of l2reconstruction error, including a different entropy-based method.},
keywords = {compressed sensing;convergence of numerical methods;entropy;gradient methods;greedy algorithms;matrix algebra;minimisation;signal reconstruction;signal sampling;entropy minimization;sparse signal recovery;compressive sensing;CS;sparse recovery problem;minimal-dimension subspace;Shannon sampling theorem;Chambolle and Pock's primal-dual L1 minimization algorithm;C and P;optimal convergence rate;Nesterov-accelerated gradient descent structure;greedy pursuits;Minimization;Entropy;Signal processing algorithms;Kalman filters;Null space;Sensors;Matching pursuit algorithms},
doi = {10.23919/EUSIPCO.2018.8553245},
issn = {2076-1465},
month = {Sep.},
url = {https://www.eurasip.org/proceedings/eusipco/eusipco2018/papers/1570437258.pdf},
}
Downloads: 0
{"_id":"GssMp3xhRWYmpvyYv","bibbaseid":"conde-loffeld-froml1minimizationtoentropyminimizationanovelapproachforsparsesignalrecoveryincompressivesensing-2018","authorIDs":[],"author_short":["Conde, M. H.","Loffeld, O."],"bibdata":{"bibtype":"inproceedings","type":"inproceedings","author":[{"firstnames":["M.","H."],"propositions":[],"lastnames":["Conde"],"suffixes":[]},{"firstnames":["O."],"propositions":[],"lastnames":["Loffeld"],"suffixes":[]}],"booktitle":"2018 26th European Signal Processing Conference (EUSIPCO)","title":"From L1 Minimization to Entropy Minimization: A Novel Approach for Sparse Signal Recovery in Compressive Sensing","year":"2018","pages":"568-572","abstract":"The groundbreaking theory of compressive sensing (CS) enables reconstructing many common classes or real-world signals from a number of samples that is well below that prescribed by the Shannon sampling theorem, which exclusively relates to the bandwidth of the signal. Differently, CS takes profit of the sparsity or compressibility of the signals in an appropriate basis to reconstruct them from few measurements. A large number of algorithms exist for solving the sparse recovery problem, which can be roughly classified in greedy pursuits and l1 minimization algorithms. Chambolle and Pock's (C&P) primal-dual l1minimization algorithm has shown to deliver state-of-the-art results with optimal convergence rate. In this work we present an algorithm for l1 minimization that operates in the null space of the measurement matrix and follows a Nesterov-accelerated gradient descent structure. Restriction to the null space allows the algorithm to operate in a minimal-dimension subspace. A further novelty lies on the fact that the cost function is no longer the l1 norm of the temporal solution, but a weighted sum of its entropy and its l1 norm. The inclusion of the entropy pushes the l1 minimization towards a de facto quasi-10 minimization, while the l1 norm term avoids divergence. Our algorithm globally outperforms C&P and other recent approaches for l1 minimization in terms of l2reconstruction error, including a different entropy-based method.","keywords":"compressed sensing;convergence of numerical methods;entropy;gradient methods;greedy algorithms;matrix algebra;minimisation;signal reconstruction;signal sampling;entropy minimization;sparse signal recovery;compressive sensing;CS;sparse recovery problem;minimal-dimension subspace;Shannon sampling theorem;Chambolle and Pock's primal-dual L1 minimization algorithm;C and P;optimal convergence rate;Nesterov-accelerated gradient descent structure;greedy pursuits;Minimization;Entropy;Signal processing algorithms;Kalman filters;Null space;Sensors;Matching pursuit algorithms","doi":"10.23919/EUSIPCO.2018.8553245","issn":"2076-1465","month":"Sep.","url":"https://www.eurasip.org/proceedings/eusipco/eusipco2018/papers/1570437258.pdf","bibtex":"@InProceedings{8553245,\n author = {M. H. Conde and O. Loffeld},\n booktitle = {2018 26th European Signal Processing Conference (EUSIPCO)},\n title = {From L1 Minimization to Entropy Minimization: A Novel Approach for Sparse Signal Recovery in Compressive Sensing},\n year = {2018},\n pages = {568-572},\n abstract = {The groundbreaking theory of compressive sensing (CS) enables reconstructing many common classes or real-world signals from a number of samples that is well below that prescribed by the Shannon sampling theorem, which exclusively relates to the bandwidth of the signal. Differently, CS takes profit of the sparsity or compressibility of the signals in an appropriate basis to reconstruct them from few measurements. A large number of algorithms exist for solving the sparse recovery problem, which can be roughly classified in greedy pursuits and l1 minimization algorithms. Chambolle and Pock's (C&P) primal-dual l1minimization algorithm has shown to deliver state-of-the-art results with optimal convergence rate. In this work we present an algorithm for l1 minimization that operates in the null space of the measurement matrix and follows a Nesterov-accelerated gradient descent structure. Restriction to the null space allows the algorithm to operate in a minimal-dimension subspace. A further novelty lies on the fact that the cost function is no longer the l1 norm of the temporal solution, but a weighted sum of its entropy and its l1 norm. The inclusion of the entropy pushes the l1 minimization towards a de facto quasi-10 minimization, while the l1 norm term avoids divergence. Our algorithm globally outperforms C&P and other recent approaches for l1 minimization in terms of l2reconstruction error, including a different entropy-based method.},\n keywords = {compressed sensing;convergence of numerical methods;entropy;gradient methods;greedy algorithms;matrix algebra;minimisation;signal reconstruction;signal sampling;entropy minimization;sparse signal recovery;compressive sensing;CS;sparse recovery problem;minimal-dimension subspace;Shannon sampling theorem;Chambolle and Pock's primal-dual L1 minimization algorithm;C and P;optimal convergence rate;Nesterov-accelerated gradient descent structure;greedy pursuits;Minimization;Entropy;Signal processing algorithms;Kalman filters;Null space;Sensors;Matching pursuit algorithms},\n doi = {10.23919/EUSIPCO.2018.8553245},\n issn = {2076-1465},\n month = {Sep.},\n url = {https://www.eurasip.org/proceedings/eusipco/eusipco2018/papers/1570437258.pdf},\n}\n\n","author_short":["Conde, M. H.","Loffeld, O."],"key":"8553245","id":"8553245","bibbaseid":"conde-loffeld-froml1minimizationtoentropyminimizationanovelapproachforsparsesignalrecoveryincompressivesensing-2018","role":"author","urls":{"Paper":"https://www.eurasip.org/proceedings/eusipco/eusipco2018/papers/1570437258.pdf"},"keyword":["compressed sensing;convergence of numerical methods;entropy;gradient methods;greedy algorithms;matrix algebra;minimisation;signal reconstruction;signal sampling;entropy minimization;sparse signal recovery;compressive sensing;CS;sparse recovery problem;minimal-dimension subspace;Shannon sampling theorem;Chambolle and Pock's primal-dual L1 minimization algorithm;C and P;optimal convergence rate;Nesterov-accelerated gradient descent structure;greedy pursuits;Minimization;Entropy;Signal processing algorithms;Kalman filters;Null space;Sensors;Matching pursuit algorithms"],"metadata":{"authorlinks":{}},"downloads":0},"bibtype":"inproceedings","biburl":"https://raw.githubusercontent.com/Roznn/EUSIPCO/main/eusipco2018url.bib","creationDate":"2021-02-13T15:38:40.349Z","downloads":0,"keywords":["compressed sensing;convergence of numerical methods;entropy;gradient methods;greedy algorithms;matrix algebra;minimisation;signal reconstruction;signal sampling;entropy minimization;sparse signal recovery;compressive sensing;cs;sparse recovery problem;minimal-dimension subspace;shannon sampling theorem;chambolle and pock's primal-dual l1 minimization algorithm;c and p;optimal convergence rate;nesterov-accelerated gradient descent structure;greedy pursuits;minimization;entropy;signal processing algorithms;kalman filters;null space;sensors;matching pursuit algorithms"],"search_terms":["minimization","entropy","minimization","novel","approach","sparse","signal","recovery","compressive","sensing","conde","loffeld"],"title":"From L1 Minimization to Entropy Minimization: A Novel Approach for Sparse Signal Recovery in Compressive Sensing","year":2018,"dataSources":["yiZioZximP7hphDpY","iuBeKSmaES2fHcEE9"]}