Long-term Forecasting using Higher Order Tensor RNNs. Yu, R., Zheng, S., Anandkumar, A., & Yue, Y. arXiv:1711.00073 [cs], August, 2019. arXiv: 1711.00073
Paper abstract bibtex We present Higher-Order Tensor RNN (HOT-RNN), a novel family of neural sequence architectures for multivariate forecasting in environments with nonlinear dynamics. Long-term forecasting in such systems is highly challenging, since there exist long-term temporal dependencies, higher-order correlations and sensitivity to error propagation. Our proposed recurrent architecture addresses these issues by learning the nonlinear dynamics directly using higher-order moments and higher-order state transition functions. Furthermore, we decompose the higher-order structure using the tensor-train decomposition to reduce the number of parameters while preserving the model performance. We theoretically establish the approximation guarantees and the variance bound for HOT-RNN for general sequence inputs. We also demonstrate 5% \textasciitilde 12% improvements for long-term prediction over general RNN and LSTM architectures on a range of simulated environments with nonlinear dynamics, as well on real-world time series data.
@article{yu_long-term_2019,
title = {Long-term {Forecasting} using {Higher} {Order} {Tensor} {RNNs}},
url = {http://arxiv.org/abs/1711.00073},
abstract = {We present Higher-Order Tensor RNN (HOT-RNN), a novel family of neural sequence architectures for multivariate forecasting in environments with nonlinear dynamics. Long-term forecasting in such systems is highly challenging, since there exist long-term temporal dependencies, higher-order correlations and sensitivity to error propagation. Our proposed recurrent architecture addresses these issues by learning the nonlinear dynamics directly using higher-order moments and higher-order state transition functions. Furthermore, we decompose the higher-order structure using the tensor-train decomposition to reduce the number of parameters while preserving the model performance. We theoretically establish the approximation guarantees and the variance bound for HOT-RNN for general sequence inputs. We also demonstrate 5\% {\textasciitilde} 12\% improvements for long-term prediction over general RNN and LSTM architectures on a range of simulated environments with nonlinear dynamics, as well on real-world time series data.},
urldate = {2019-12-12},
journal = {arXiv:1711.00073 [cs]},
author = {Yu, Rose and Zheng, Stephan and Anandkumar, Anima and Yue, Yisong},
month = aug,
year = {2019},
note = {arXiv: 1711.00073},
keywords = {Computer Science - Machine Learning, forecasting, long-lead forescasting}
}
Downloads: 0
{"_id":"cQZyXRo4jQz9hqt7n","bibbaseid":"yu-zheng-anandkumar-yue-longtermforecastingusinghigherordertensorrnns-2019","authorIDs":[],"author_short":["Yu, R.","Zheng, S.","Anandkumar, A.","Yue, Y."],"bibdata":{"bibtype":"article","type":"article","title":"Long-term Forecasting using Higher Order Tensor RNNs","url":"http://arxiv.org/abs/1711.00073","abstract":"We present Higher-Order Tensor RNN (HOT-RNN), a novel family of neural sequence architectures for multivariate forecasting in environments with nonlinear dynamics. Long-term forecasting in such systems is highly challenging, since there exist long-term temporal dependencies, higher-order correlations and sensitivity to error propagation. Our proposed recurrent architecture addresses these issues by learning the nonlinear dynamics directly using higher-order moments and higher-order state transition functions. Furthermore, we decompose the higher-order structure using the tensor-train decomposition to reduce the number of parameters while preserving the model performance. We theoretically establish the approximation guarantees and the variance bound for HOT-RNN for general sequence inputs. We also demonstrate 5% \\textasciitilde 12% improvements for long-term prediction over general RNN and LSTM architectures on a range of simulated environments with nonlinear dynamics, as well on real-world time series data.","urldate":"2019-12-12","journal":"arXiv:1711.00073 [cs]","author":[{"propositions":[],"lastnames":["Yu"],"firstnames":["Rose"],"suffixes":[]},{"propositions":[],"lastnames":["Zheng"],"firstnames":["Stephan"],"suffixes":[]},{"propositions":[],"lastnames":["Anandkumar"],"firstnames":["Anima"],"suffixes":[]},{"propositions":[],"lastnames":["Yue"],"firstnames":["Yisong"],"suffixes":[]}],"month":"August","year":"2019","note":"arXiv: 1711.00073","keywords":"Computer Science - Machine Learning, forecasting, long-lead forescasting","bibtex":"@article{yu_long-term_2019,\n\ttitle = {Long-term {Forecasting} using {Higher} {Order} {Tensor} {RNNs}},\n\turl = {http://arxiv.org/abs/1711.00073},\n\tabstract = {We present Higher-Order Tensor RNN (HOT-RNN), a novel family of neural sequence architectures for multivariate forecasting in environments with nonlinear dynamics. Long-term forecasting in such systems is highly challenging, since there exist long-term temporal dependencies, higher-order correlations and sensitivity to error propagation. Our proposed recurrent architecture addresses these issues by learning the nonlinear dynamics directly using higher-order moments and higher-order state transition functions. Furthermore, we decompose the higher-order structure using the tensor-train decomposition to reduce the number of parameters while preserving the model performance. We theoretically establish the approximation guarantees and the variance bound for HOT-RNN for general sequence inputs. We also demonstrate 5\\% {\\textasciitilde} 12\\% improvements for long-term prediction over general RNN and LSTM architectures on a range of simulated environments with nonlinear dynamics, as well on real-world time series data.},\n\turldate = {2019-12-12},\n\tjournal = {arXiv:1711.00073 [cs]},\n\tauthor = {Yu, Rose and Zheng, Stephan and Anandkumar, Anima and Yue, Yisong},\n\tmonth = aug,\n\tyear = {2019},\n\tnote = {arXiv: 1711.00073},\n\tkeywords = {Computer Science - Machine Learning, forecasting, long-lead forescasting}\n}\n\n","author_short":["Yu, R.","Zheng, S.","Anandkumar, A.","Yue, Y."],"key":"yu_long-term_2019","id":"yu_long-term_2019","bibbaseid":"yu-zheng-anandkumar-yue-longtermforecastingusinghigherordertensorrnns-2019","role":"author","urls":{"Paper":"http://arxiv.org/abs/1711.00073"},"keyword":["Computer Science - Machine Learning","forecasting","long-lead forescasting"],"downloads":0},"bibtype":"article","biburl":"https://bibbase.org/zotero/carlosgogo","creationDate":"2019-12-13T10:16:40.596Z","downloads":0,"keywords":["computer science - machine learning","forecasting","long-lead forescasting"],"search_terms":["long","term","forecasting","using","higher","order","tensor","rnns","yu","zheng","anandkumar","yue"],"title":"Long-term Forecasting using Higher Order Tensor RNNs","year":2019,"dataSources":["8KsDsKJXTPZFnaHDn"]}