Neural data-to-text generation with dynamic content planning. Chen, K., Li, F., Hu, B., Peng, W., Chen, Q., Yu, H., & Xiang, Y. Knowledge-Based Systems, November, 2020. Paper doi abstract bibtex Neural data-to-text generation models have achieved significant advancement in recent years. However, these models have two shortcomings: the generated texts tend to miss some vital information, and they often generate descriptions that are not consistent with the structured input data. To alleviate these problems, we propose a Neural data-to-text generation model with Dynamic content Planning, named NDP 2 2This work was completed in cooperation with Baidu Inc.for abbreviation. The NDP can utilize the previously generated text to dynamically select the appropriate entry from the given structured data. We further design a reconstruction mechanism with a novel objective function that can reconstruct the whole entry of the used data sequentially from the hidden states of the decoder, which aids the accuracy of the generated text. Empirical results show that the NDP achieves superior performance over the state-of-the-art on ROTOWIRE and NBAZHN datasets, in terms of relation generation (RG), content selection (CS), content ordering (CO) and BLEU metrics. The human evaluation result shows that the texts generated by the proposed NDP are better than the corresponding ones generated by NCP in most of time. And using the proposed reconstruction mechanism, the fidelity of the generated text can be further improved significantly.
@article{chen_neural_2020,
title = {Neural data-to-text generation with dynamic content planning},
issn = {0950-7051},
url = {http://www.sciencedirect.com/science/article/pii/S0950705120307395},
doi = {10.1016/j.knosys.2020.106610},
abstract = {Neural data-to-text generation models have achieved significant advancement in recent years. However, these models have two shortcomings: the generated texts tend to miss some vital information, and they often generate descriptions that are not consistent with the structured input data. To alleviate these problems, we propose a Neural data-to-text generation model with Dynamic content Planning, named NDP 2 2This work was completed in cooperation with Baidu Inc.for abbreviation. The NDP can utilize the previously generated text to dynamically select the appropriate entry from the given structured data. We further design a reconstruction mechanism with a novel objective function that can reconstruct the whole entry of the used data sequentially from the hidden states of the decoder, which aids the accuracy of the generated text. Empirical results show that the NDP achieves superior performance over the state-of-the-art on ROTOWIRE and NBAZHN datasets, in terms of relation generation (RG), content selection (CS), content ordering (CO) and BLEU metrics. The human evaluation result shows that the texts generated by the proposed NDP are better than the corresponding ones generated by NCP in most of time. And using the proposed reconstruction mechanism, the fidelity of the generated text can be further improved significantly.},
language = {en},
urldate = {2020-12-29},
journal = {Knowledge-Based Systems},
author = {Chen, Kai and Li, Fayuan and Hu, Baotian and Peng, Weihua and Chen, Qingcai and Yu, Hong and Xiang, Yang},
month = nov,
year = {2020},
keywords = {Data-to-text, Dynamic content planning, Reconstruction mechanism},
pages = {106610},
}
Downloads: 0
{"_id":"JXC3WmkN7YPGuKh9Z","bibbaseid":"chen-li-hu-peng-chen-yu-xiang-neuraldatatotextgenerationwithdynamiccontentplanning-2020","author_short":["Chen, K.","Li, F.","Hu, B.","Peng, W.","Chen, Q.","Yu, H.","Xiang, Y."],"bibdata":{"bibtype":"article","type":"article","title":"Neural data-to-text generation with dynamic content planning","issn":"0950-7051","url":"http://www.sciencedirect.com/science/article/pii/S0950705120307395","doi":"10.1016/j.knosys.2020.106610","abstract":"Neural data-to-text generation models have achieved significant advancement in recent years. However, these models have two shortcomings: the generated texts tend to miss some vital information, and they often generate descriptions that are not consistent with the structured input data. To alleviate these problems, we propose a Neural data-to-text generation model with Dynamic content Planning, named NDP 2 2This work was completed in cooperation with Baidu Inc.for abbreviation. The NDP can utilize the previously generated text to dynamically select the appropriate entry from the given structured data. We further design a reconstruction mechanism with a novel objective function that can reconstruct the whole entry of the used data sequentially from the hidden states of the decoder, which aids the accuracy of the generated text. Empirical results show that the NDP achieves superior performance over the state-of-the-art on ROTOWIRE and NBAZHN datasets, in terms of relation generation (RG), content selection (CS), content ordering (CO) and BLEU metrics. The human evaluation result shows that the texts generated by the proposed NDP are better than the corresponding ones generated by NCP in most of time. And using the proposed reconstruction mechanism, the fidelity of the generated text can be further improved significantly.","language":"en","urldate":"2020-12-29","journal":"Knowledge-Based Systems","author":[{"propositions":[],"lastnames":["Chen"],"firstnames":["Kai"],"suffixes":[]},{"propositions":[],"lastnames":["Li"],"firstnames":["Fayuan"],"suffixes":[]},{"propositions":[],"lastnames":["Hu"],"firstnames":["Baotian"],"suffixes":[]},{"propositions":[],"lastnames":["Peng"],"firstnames":["Weihua"],"suffixes":[]},{"propositions":[],"lastnames":["Chen"],"firstnames":["Qingcai"],"suffixes":[]},{"propositions":[],"lastnames":["Yu"],"firstnames":["Hong"],"suffixes":[]},{"propositions":[],"lastnames":["Xiang"],"firstnames":["Yang"],"suffixes":[]}],"month":"November","year":"2020","keywords":"Data-to-text, Dynamic content planning, Reconstruction mechanism","pages":"106610","bibtex":"@article{chen_neural_2020,\n\ttitle = {Neural data-to-text generation with dynamic content planning},\n\tissn = {0950-7051},\n\turl = {http://www.sciencedirect.com/science/article/pii/S0950705120307395},\n\tdoi = {10.1016/j.knosys.2020.106610},\n\tabstract = {Neural data-to-text generation models have achieved significant advancement in recent years. However, these models have two shortcomings: the generated texts tend to miss some vital information, and they often generate descriptions that are not consistent with the structured input data. To alleviate these problems, we propose a Neural data-to-text generation model with Dynamic content Planning, named NDP 2 2This work was completed in cooperation with Baidu Inc.for abbreviation. The NDP can utilize the previously generated text to dynamically select the appropriate entry from the given structured data. We further design a reconstruction mechanism with a novel objective function that can reconstruct the whole entry of the used data sequentially from the hidden states of the decoder, which aids the accuracy of the generated text. Empirical results show that the NDP achieves superior performance over the state-of-the-art on ROTOWIRE and NBAZHN datasets, in terms of relation generation (RG), content selection (CS), content ordering (CO) and BLEU metrics. The human evaluation result shows that the texts generated by the proposed NDP are better than the corresponding ones generated by NCP in most of time. And using the proposed reconstruction mechanism, the fidelity of the generated text can be further improved significantly.},\n\tlanguage = {en},\n\turldate = {2020-12-29},\n\tjournal = {Knowledge-Based Systems},\n\tauthor = {Chen, Kai and Li, Fayuan and Hu, Baotian and Peng, Weihua and Chen, Qingcai and Yu, Hong and Xiang, Yang},\n\tmonth = nov,\n\tyear = {2020},\n\tkeywords = {Data-to-text, Dynamic content planning, Reconstruction mechanism},\n\tpages = {106610},\n}\n\n","author_short":["Chen, K.","Li, F.","Hu, B.","Peng, W.","Chen, Q.","Yu, H.","Xiang, Y."],"key":"chen_neural_2020","id":"chen_neural_2020","bibbaseid":"chen-li-hu-peng-chen-yu-xiang-neuraldatatotextgenerationwithdynamiccontentplanning-2020","role":"author","urls":{"Paper":"http://www.sciencedirect.com/science/article/pii/S0950705120307395"},"keyword":["Data-to-text","Dynamic content planning","Reconstruction mechanism"],"metadata":{"authorlinks":{}},"html":""},"bibtype":"article","biburl":"http://fenway.cs.uml.edu/papers/pubs-all.bib","dataSources":["TqaA9miSB65nRfS5H"],"keywords":["data-to-text","dynamic content planning","reconstruction mechanism"],"search_terms":["neural","data","text","generation","dynamic","content","planning","chen","li","hu","peng","chen","yu","xiang"],"title":"Neural data-to-text generation with dynamic content planning","year":2020}