Probabilistic Latent Semantic Analysis. Hofmann, T. Paper Website abstract bibtex Probabilistic Latent Semantic Analysis is a novel statistical technique for the analysis of twoomode and co-occurrence data, which has applications in information retrieval and [ltering, natural language processing, ma-chine learning from text, and in related ar-eas. Compared to standard Latent Semantic Analysis which stems from linear algebra and performs a Singular Value Decomposition of co-occurrence tables, the proposed method is based on a mixture decomposition derived from a latent class model. This results in a more principled approach which has a solid foundation in statistics. In order to avoid overrtting, we propose a widely applicable generalization of maximum likelihood model by tempered EM. Our approach yields substantial and consistent improvements over Latent Semantic Analysis in a number of ex-periments.
@article{
title = {Probabilistic Latent Semantic Analysis},
type = {article},
websites = {http://www.iro.umontreal.ca/~nie/IFT6255/Hofmann-UAI99.pdf},
id = {f26ef594-349c-3ec3-888e-dafbf9e3cad1},
created = {2018-02-05T16:56:49.934Z},
accessed = {2018-02-05},
file_attached = {true},
profile_id = {371589bb-c770-37ff-8193-93c6f25ffeb1},
group_id = {f982cd63-7ceb-3aa2-ac7e-a953963d6716},
last_modified = {2018-02-05T16:56:51.867Z},
read = {false},
starred = {false},
authored = {false},
confirmed = {false},
hidden = {false},
private_publication = {false},
abstract = {Probabilistic Latent Semantic Analysis is a novel statistical technique for the analysis of twoomode and co-occurrence data, which has applications in information retrieval and [ltering, natural language processing, ma-chine learning from text, and in related ar-eas. Compared to standard Latent Semantic Analysis which stems from linear algebra and performs a Singular Value Decomposition of co-occurrence tables, the proposed method is based on a mixture decomposition derived from a latent class model. This results in a more principled approach which has a solid foundation in statistics. In order to avoid overrtting, we propose a widely applicable generalization of maximum likelihood model by tempered EM. Our approach yields substantial and consistent improvements over Latent Semantic Analysis in a number of ex-periments.},
bibtype = {article},
author = {Hofmann, Thomas}
}
Downloads: 0
{"_id":"E2Swy36tssGSRrccb","bibbaseid":"hofmann-probabilisticlatentsemanticanalysis","downloads":0,"creationDate":"2018-02-07T16:22:57.278Z","title":"Probabilistic Latent Semantic Analysis","author_short":["Hofmann, T."],"year":null,"bibtype":"article","biburl":null,"bibdata":{"title":"Probabilistic Latent Semantic Analysis","type":"article","websites":"http://www.iro.umontreal.ca/~nie/IFT6255/Hofmann-UAI99.pdf","id":"f26ef594-349c-3ec3-888e-dafbf9e3cad1","created":"2018-02-05T16:56:49.934Z","accessed":"2018-02-05","file_attached":"true","profile_id":"371589bb-c770-37ff-8193-93c6f25ffeb1","group_id":"f982cd63-7ceb-3aa2-ac7e-a953963d6716","last_modified":"2018-02-05T16:56:51.867Z","read":false,"starred":false,"authored":false,"confirmed":false,"hidden":false,"private_publication":false,"abstract":"Probabilistic Latent Semantic Analysis is a novel statistical technique for the analysis of twoomode and co-occurrence data, which has applications in information retrieval and [ltering, natural language processing, ma-chine learning from text, and in related ar-eas. Compared to standard Latent Semantic Analysis which stems from linear algebra and performs a Singular Value Decomposition of co-occurrence tables, the proposed method is based on a mixture decomposition derived from a latent class model. This results in a more principled approach which has a solid foundation in statistics. In order to avoid overrtting, we propose a widely applicable generalization of maximum likelihood model by tempered EM. Our approach yields substantial and consistent improvements over Latent Semantic Analysis in a number of ex-periments.","bibtype":"article","author":"Hofmann, Thomas","bibtex":"@article{\n title = {Probabilistic Latent Semantic Analysis},\n type = {article},\n websites = {http://www.iro.umontreal.ca/~nie/IFT6255/Hofmann-UAI99.pdf},\n id = {f26ef594-349c-3ec3-888e-dafbf9e3cad1},\n created = {2018-02-05T16:56:49.934Z},\n accessed = {2018-02-05},\n file_attached = {true},\n profile_id = {371589bb-c770-37ff-8193-93c6f25ffeb1},\n group_id = {f982cd63-7ceb-3aa2-ac7e-a953963d6716},\n last_modified = {2018-02-05T16:56:51.867Z},\n read = {false},\n starred = {false},\n authored = {false},\n confirmed = {false},\n hidden = {false},\n private_publication = {false},\n abstract = {Probabilistic Latent Semantic Analysis is a novel statistical technique for the analysis of twoomode and co-occurrence data, which has applications in information retrieval and [ltering, natural language processing, ma-chine learning from text, and in related ar-eas. Compared to standard Latent Semantic Analysis which stems from linear algebra and performs a Singular Value Decomposition of co-occurrence tables, the proposed method is based on a mixture decomposition derived from a latent class model. This results in a more principled approach which has a solid foundation in statistics. In order to avoid overrtting, we propose a widely applicable generalization of maximum likelihood model by tempered EM. Our approach yields substantial and consistent improvements over Latent Semantic Analysis in a number of ex-periments.},\n bibtype = {article},\n author = {Hofmann, Thomas}\n}","author_short":["Hofmann, T."],"urls":{"Paper":"http://bibbase.org/service/mendeley/371589bb-c770-37ff-8193-93c6f25ffeb1/file/572ec6a4-3f28-a18e-2d80-dd4fdcedc077/Probabilistic_Latent_Semantic_Analysis.pdf.pdf","Website":"http://www.iro.umontreal.ca/~nie/IFT6255/Hofmann-UAI99.pdf"},"bibbaseid":"hofmann-probabilisticlatentsemanticanalysis","role":"author","downloads":0},"search_terms":["probabilistic","latent","semantic","analysis","hofmann"],"keywords":[],"authorIDs":[]}