Applications of Projected Belief Networks (PBN). Baggenstoss, P. M. In *2019 27th European Signal Processing Conference (EUSIPCO)*, pages 1-5, Sep., 2019.

Paper doi abstract bibtex

Paper doi abstract bibtex

The projected belief network (PBN) is a layered generative network, with tractable likelihood function (LF) that can be trained by gradient ascent as a probability density function (PDF) estimator and classifier. The PBN is derived from a feed-forward neural network (FF-NN) by finding the generative network that implements the probability distribution with maximum entropy (MaxEnt) consistent with the knowledge of the distribution at the output of the FF-NN. The FF-NN, from which the PBN is derived, is a complementary feature extractor that exactly recovers the PBN's hidden variables. This paper presents a multi-layer PBN and a deterministic PBN that are tested using a subset of MNIST data set. When the deterministic PBN is combined with the dual FF-NN, it forms an auto-encoder that achieves much lower reconstruction error on testing data than the equivalent conventional network and functions significantly better as a classifier.

@InProceedings{8902708, author = {P. M. Baggenstoss}, booktitle = {2019 27th European Signal Processing Conference (EUSIPCO)}, title = {Applications of Projected Belief Networks (PBN)}, year = {2019}, pages = {1-5}, abstract = {The projected belief network (PBN) is a layered generative network, with tractable likelihood function (LF) that can be trained by gradient ascent as a probability density function (PDF) estimator and classifier. The PBN is derived from a feed-forward neural network (FF-NN) by finding the generative network that implements the probability distribution with maximum entropy (MaxEnt) consistent with the knowledge of the distribution at the output of the FF-NN. The FF-NN, from which the PBN is derived, is a complementary feature extractor that exactly recovers the PBN's hidden variables. This paper presents a multi-layer PBN and a deterministic PBN that are tested using a subset of MNIST data set. When the deterministic PBN is combined with the dual FF-NN, it forms an auto-encoder that achieves much lower reconstruction error on testing data than the equivalent conventional network and functions significantly better as a classifier.}, keywords = {belief networks;feature extraction;feedforward neural nets;gradient methods;learning (artificial intelligence);maximum entropy methods;maximum likelihood estimation;probability;projected belief networks;layered generative network;tractable likelihood function;probability density function estimator;feed-forward neural network;probability distribution;PBN's hidden variables;multilayer PBN;deterministic PBN;dual FF-NN;equivalent conventional network;maximum entropy;MNIST data set;Neural networks;Feature extraction;Data models;Europe;Signal processing;Entropy;Stochastic processes}, doi = {10.23919/EUSIPCO.2019.8902708}, issn = {2076-1465}, month = {Sep.}, url = {https://www.eurasip.org/proceedings/eusipco/eusipco2019/proceedings/papers/1570529856.pdf}, }

Downloads: 0

{"_id":"rxxPgAQpqyLFpRzJJ","bibbaseid":"baggenstoss-applicationsofprojectedbeliefnetworkspbn-2019","authorIDs":[],"author_short":["Baggenstoss, P. M."],"bibdata":{"bibtype":"inproceedings","type":"inproceedings","author":[{"firstnames":["P.","M."],"propositions":[],"lastnames":["Baggenstoss"],"suffixes":[]}],"booktitle":"2019 27th European Signal Processing Conference (EUSIPCO)","title":"Applications of Projected Belief Networks (PBN)","year":"2019","pages":"1-5","abstract":"The projected belief network (PBN) is a layered generative network, with tractable likelihood function (LF) that can be trained by gradient ascent as a probability density function (PDF) estimator and classifier. The PBN is derived from a feed-forward neural network (FF-NN) by finding the generative network that implements the probability distribution with maximum entropy (MaxEnt) consistent with the knowledge of the distribution at the output of the FF-NN. The FF-NN, from which the PBN is derived, is a complementary feature extractor that exactly recovers the PBN's hidden variables. This paper presents a multi-layer PBN and a deterministic PBN that are tested using a subset of MNIST data set. When the deterministic PBN is combined with the dual FF-NN, it forms an auto-encoder that achieves much lower reconstruction error on testing data than the equivalent conventional network and functions significantly better as a classifier.","keywords":"belief networks;feature extraction;feedforward neural nets;gradient methods;learning (artificial intelligence);maximum entropy methods;maximum likelihood estimation;probability;projected belief networks;layered generative network;tractable likelihood function;probability density function estimator;feed-forward neural network;probability distribution;PBN's hidden variables;multilayer PBN;deterministic PBN;dual FF-NN;equivalent conventional network;maximum entropy;MNIST data set;Neural networks;Feature extraction;Data models;Europe;Signal processing;Entropy;Stochastic processes","doi":"10.23919/EUSIPCO.2019.8902708","issn":"2076-1465","month":"Sep.","url":"https://www.eurasip.org/proceedings/eusipco/eusipco2019/proceedings/papers/1570529856.pdf","bibtex":"@InProceedings{8902708,\n author = {P. M. Baggenstoss},\n booktitle = {2019 27th European Signal Processing Conference (EUSIPCO)},\n title = {Applications of Projected Belief Networks (PBN)},\n year = {2019},\n pages = {1-5},\n abstract = {The projected belief network (PBN) is a layered generative network, with tractable likelihood function (LF) that can be trained by gradient ascent as a probability density function (PDF) estimator and classifier. The PBN is derived from a feed-forward neural network (FF-NN) by finding the generative network that implements the probability distribution with maximum entropy (MaxEnt) consistent with the knowledge of the distribution at the output of the FF-NN. The FF-NN, from which the PBN is derived, is a complementary feature extractor that exactly recovers the PBN's hidden variables. This paper presents a multi-layer PBN and a deterministic PBN that are tested using a subset of MNIST data set. When the deterministic PBN is combined with the dual FF-NN, it forms an auto-encoder that achieves much lower reconstruction error on testing data than the equivalent conventional network and functions significantly better as a classifier.},\n keywords = {belief networks;feature extraction;feedforward neural nets;gradient methods;learning (artificial intelligence);maximum entropy methods;maximum likelihood estimation;probability;projected belief networks;layered generative network;tractable likelihood function;probability density function estimator;feed-forward neural network;probability distribution;PBN's hidden variables;multilayer PBN;deterministic PBN;dual FF-NN;equivalent conventional network;maximum entropy;MNIST data set;Neural networks;Feature extraction;Data models;Europe;Signal processing;Entropy;Stochastic processes},\n doi = {10.23919/EUSIPCO.2019.8902708},\n issn = {2076-1465},\n month = {Sep.},\n url = {https://www.eurasip.org/proceedings/eusipco/eusipco2019/proceedings/papers/1570529856.pdf},\n}\n\n","author_short":["Baggenstoss, P. M."],"key":"8902708","id":"8902708","bibbaseid":"baggenstoss-applicationsofprojectedbeliefnetworkspbn-2019","role":"author","urls":{"Paper":"https://www.eurasip.org/proceedings/eusipco/eusipco2019/proceedings/papers/1570529856.pdf"},"keyword":["belief networks;feature extraction;feedforward neural nets;gradient methods;learning (artificial intelligence);maximum entropy methods;maximum likelihood estimation;probability;projected belief networks;layered generative network;tractable likelihood function;probability density function estimator;feed-forward neural network;probability distribution;PBN's hidden variables;multilayer PBN;deterministic PBN;dual FF-NN;equivalent conventional network;maximum entropy;MNIST data set;Neural networks;Feature extraction;Data models;Europe;Signal processing;Entropy;Stochastic processes"],"metadata":{"authorlinks":{}},"downloads":0},"bibtype":"inproceedings","biburl":"https://raw.githubusercontent.com/Roznn/EUSIPCO/main/eusipco2019url.bib","creationDate":"2021-02-11T19:15:21.982Z","downloads":0,"keywords":["belief networks;feature extraction;feedforward neural nets;gradient methods;learning (artificial intelligence);maximum entropy methods;maximum likelihood estimation;probability;projected belief networks;layered generative network;tractable likelihood function;probability density function estimator;feed-forward neural network;probability distribution;pbn's hidden variables;multilayer pbn;deterministic pbn;dual ff-nn;equivalent conventional network;maximum entropy;mnist data set;neural networks;feature extraction;data models;europe;signal processing;entropy;stochastic processes"],"search_terms":["applications","projected","belief","networks","pbn","baggenstoss"],"title":"Applications of Projected Belief Networks (PBN)","year":2019,"dataSources":["NqWTiMfRR56v86wRs","r6oz3cMyC99QfiuHW"]}