Recurrent Neural Networks as Weighted Language Recognizers. Chen, Y., Gilroy, S., Maletti, A., May, J., & Knight, K. In Proceedings of the 2018 Conference of the North American Chapter of the Association for Computational Linguistics: Human Language Technologies, Volume 1 (Long Papers), pages 2261–2271, New Orleans, Louisiana, June, 2018. Association for Computational Linguistics. Paper abstract bibtex We investigate the computational complexity of various problems for simple recurrent neural networks (RNNs) as formal models for recognizing weighted languages. We focus on the single-layer, ReLU-activation, rational-weight RNNs with softmax, which are commonly used in natural language processing applications. We show that most problems for such RNNs are undecidable, including consistency, equivalence, minimization, and the determination of the highest-weighted string. However, for consistent RNNs the last problem becomes decidable, although the solution length can surpass all computable bounds. If additionally the string is limited to polynomial length, the problem becomes NP-complete. In summary, this shows that approximations and heuristic algorithms are necessary in practical applications of those RNNs.
@InProceedings{chen-EtAl:2018:N18-14,
author = {Chen, Yining and Gilroy, Sorcha and Maletti, Andreas and May, Jonathan and Knight, Kevin},
title = {Recurrent Neural Networks as Weighted Language Recognizers},
booktitle = {Proceedings of the 2018 Conference of the North American Chapter of the Association for Computational Linguistics: Human Language Technologies, Volume 1 (Long Papers)},
month = {June},
year = {2018},
address = {New Orleans, Louisiana},
publisher = {Association for Computational Linguistics},
pages = {2261--2271},
abstract = {We investigate the computational complexity of various problems for simple recurrent neural networks (RNNs) as formal models for recognizing weighted languages. We focus on the single-layer, ReLU-activation, rational-weight RNNs with softmax, which are commonly used in natural language processing applications. We show that most problems for such RNNs are undecidable, including consistency, equivalence, minimization, and the determination of the highest-weighted string. However, for consistent RNNs the last problem becomes decidable, although the solution length can surpass all computable bounds. If additionally the string is limited to polynomial length, the problem becomes NP-complete. In summary, this shows that approximations and heuristic algorithms are necessary in practical applications of those RNNs.},
url = {http://www.aclweb.org/anthology/N18-1205}
}
Downloads: 0
{"_id":"xHj6AtKZaZNS9gnpN","bibbaseid":"chen-gilroy-maletti-may-knight-recurrentneuralnetworksasweightedlanguagerecognizers-2018","author_short":["Chen, Y.","Gilroy, S.","Maletti, A.","May, J.","Knight, K."],"bibdata":{"bibtype":"inproceedings","type":"inproceedings","author":[{"propositions":[],"lastnames":["Chen"],"firstnames":["Yining"],"suffixes":[]},{"propositions":[],"lastnames":["Gilroy"],"firstnames":["Sorcha"],"suffixes":[]},{"propositions":[],"lastnames":["Maletti"],"firstnames":["Andreas"],"suffixes":[]},{"propositions":[],"lastnames":["May"],"firstnames":["Jonathan"],"suffixes":[]},{"propositions":[],"lastnames":["Knight"],"firstnames":["Kevin"],"suffixes":[]}],"title":"Recurrent Neural Networks as Weighted Language Recognizers","booktitle":"Proceedings of the 2018 Conference of the North American Chapter of the Association for Computational Linguistics: Human Language Technologies, Volume 1 (Long Papers)","month":"June","year":"2018","address":"New Orleans, Louisiana","publisher":"Association for Computational Linguistics","pages":"2261–2271","abstract":"We investigate the computational complexity of various problems for simple recurrent neural networks (RNNs) as formal models for recognizing weighted languages. We focus on the single-layer, ReLU-activation, rational-weight RNNs with softmax, which are commonly used in natural language processing applications. We show that most problems for such RNNs are undecidable, including consistency, equivalence, minimization, and the determination of the highest-weighted string. However, for consistent RNNs the last problem becomes decidable, although the solution length can surpass all computable bounds. If additionally the string is limited to polynomial length, the problem becomes NP-complete. In summary, this shows that approximations and heuristic algorithms are necessary in practical applications of those RNNs.","url":"http://www.aclweb.org/anthology/N18-1205","bibtex":"@InProceedings{chen-EtAl:2018:N18-14,\n author = {Chen, Yining and Gilroy, Sorcha and Maletti, Andreas and May, Jonathan and Knight, Kevin},\n title = {Recurrent Neural Networks as Weighted Language Recognizers},\n booktitle = {Proceedings of the 2018 Conference of the North American Chapter of the Association for Computational Linguistics: Human Language Technologies, Volume 1 (Long Papers)},\n month = {June},\n year = {2018},\n address = {New Orleans, Louisiana},\n publisher = {Association for Computational Linguistics},\n pages = {2261--2271},\n abstract = {We investigate the computational complexity of various problems for simple recurrent neural networks (RNNs) as formal models for recognizing weighted languages. We focus on the single-layer, ReLU-activation, rational-weight RNNs with softmax, which are commonly used in natural language processing applications. We show that most problems for such RNNs are undecidable, including consistency, equivalence, minimization, and the determination of the highest-weighted string. However, for consistent RNNs the last problem becomes decidable, although the solution length can surpass all computable bounds. If additionally the string is limited to polynomial length, the problem becomes NP-complete. In summary, this shows that approximations and heuristic algorithms are necessary in practical applications of those RNNs.},\n url = {http://www.aclweb.org/anthology/N18-1205}\n}\n\n","author_short":["Chen, Y.","Gilroy, S.","Maletti, A.","May, J.","Knight, K."],"key":"chen-EtAl:2018:N18-14","id":"chen-EtAl:2018:N18-14","bibbaseid":"chen-gilroy-maletti-may-knight-recurrentneuralnetworksasweightedlanguagerecognizers-2018","role":"author","urls":{"Paper":"http://www.aclweb.org/anthology/N18-1205"},"metadata":{"authorlinks":{}}},"bibtype":"inproceedings","biburl":"https://jonmay.github.io/webpage/cutelabname/cutelabname.bib","dataSources":["ZdhKtP2cSp3Aki2ge","X5WBAKQabka5TW5z7","hbZSwot2msWk92m5B","fcWjcoAgajPvXWcp7","GvHfaAWP6AfN6oLQE","j3Qzx9HAAC6WtJDHS","5eM3sAccSEpjSDHHQ"],"keywords":[],"search_terms":["recurrent","neural","networks","weighted","language","recognizers","chen","gilroy","maletti","may","knight"],"title":"Recurrent Neural Networks as Weighted Language Recognizers","year":2018}