Neural Sequence Modeling in Physical Language Understanding:. Bleiweiss, A. In Proceedings of the 11th International Joint Conference on Computational Intelligence, pages 464–472, Vienna, Austria, 2019. SCITEPRESS - Science and Technology Publications.
Paper doi abstract bibtex Automating the tasks of generating test questions and analyzing content for assessment of written student re-sponses has been one of the more sought-after applications to support classroom educators. However, a majorimpediment to algorithm advances in developing such tools is the lack of large and publicly available domaincorpora. In this paper, we explore deep learning of physics word problems performed at scale using the trans-former, a state-of-the-art self-attention neural architecture. Our study proposes an intuitive novel approach toa tree-based data generation that relies mainly on physical knowledge structure and defers compositionalityof natural language clauses to the terminal nodes. Applying our method to the simpler kinematics domainthat describes motion properties of an object at a uniform acceleration rate and using our neural sequencemodel pretrained on a dataset of ten thousand machine-produced problems, we achieved BLEU scores of 0.54and 0.81 for predicting derivation expressions on real-world and synthetic test sets, respectively. Notablyincreasing the number of trained problems resulted in a diminishing return on performance.
@inproceedings{bleiweiss_neural_2019,
address = {Vienna, Austria},
title = {Neural {Sequence} {Modeling} in {Physical} {Language} {Understanding}:},
isbn = {978-989-758-384-1},
shorttitle = {Neural {Sequence} {Modeling} in {Physical} {Language} {Understanding}},
url = {http://www.scitepress.org/DigitalLibrary/Link.aspx?doi=10.5220/0008071104640472},
doi = {10.5220/0008071104640472},
abstract = {Automating the tasks of generating test questions and analyzing content for assessment of written student re-sponses has been one of the more sought-after applications to support classroom educators. However, a majorimpediment to algorithm advances in developing such tools is the lack of large and publicly available domaincorpora. In this paper, we explore deep learning of physics word problems performed at scale using the trans-former, a state-of-the-art self-attention neural architecture. Our study proposes an intuitive novel approach toa tree-based data generation that relies mainly on physical knowledge structure and defers compositionalityof natural language clauses to the terminal nodes. Applying our method to the simpler kinematics domainthat describes motion properties of an object at a uniform acceleration rate and using our neural sequencemodel pretrained on a dataset of ten thousand machine-produced problems, we achieved BLEU scores of 0.54and 0.81 for predicting derivation expressions on real-world and synthetic test sets, respectively. Notablyincreasing the number of trained problems resulted in a diminishing return on performance.},
urldate = {2019-11-19},
booktitle = {Proceedings of the 11th {International} {Joint} {Conference} on {Computational} {Intelligence}},
publisher = {SCITEPRESS - Science and Technology Publications},
author = {Bleiweiss, Avi},
year = {2019},
keywords = {Recurrent neural networks, mentions sympy},
pages = {464--472},
}
Downloads: 0
{"_id":"HoHKaQ3piH67SFu5G","bibbaseid":"bleiweiss-neuralsequencemodelinginphysicallanguageunderstanding-2019","authorIDs":[],"author_short":["Bleiweiss, A."],"bibdata":{"bibtype":"inproceedings","type":"inproceedings","address":"Vienna, Austria","title":"Neural Sequence Modeling in Physical Language Understanding:","isbn":"978-989-758-384-1","shorttitle":"Neural Sequence Modeling in Physical Language Understanding","url":"http://www.scitepress.org/DigitalLibrary/Link.aspx?doi=10.5220/0008071104640472","doi":"10.5220/0008071104640472","abstract":"Automating the tasks of generating test questions and analyzing content for assessment of written student re-sponses has been one of the more sought-after applications to support classroom educators. However, a majorimpediment to algorithm advances in developing such tools is the lack of large and publicly available domaincorpora. In this paper, we explore deep learning of physics word problems performed at scale using the trans-former, a state-of-the-art self-attention neural architecture. Our study proposes an intuitive novel approach toa tree-based data generation that relies mainly on physical knowledge structure and defers compositionalityof natural language clauses to the terminal nodes. Applying our method to the simpler kinematics domainthat describes motion properties of an object at a uniform acceleration rate and using our neural sequencemodel pretrained on a dataset of ten thousand machine-produced problems, we achieved BLEU scores of 0.54and 0.81 for predicting derivation expressions on real-world and synthetic test sets, respectively. Notablyincreasing the number of trained problems resulted in a diminishing return on performance.","urldate":"2019-11-19","booktitle":"Proceedings of the 11th International Joint Conference on Computational Intelligence","publisher":"SCITEPRESS - Science and Technology Publications","author":[{"propositions":[],"lastnames":["Bleiweiss"],"firstnames":["Avi"],"suffixes":[]}],"year":"2019","keywords":"Recurrent neural networks, mentions sympy","pages":"464–472","bibtex":"@inproceedings{bleiweiss_neural_2019,\n\taddress = {Vienna, Austria},\n\ttitle = {Neural {Sequence} {Modeling} in {Physical} {Language} {Understanding}:},\n\tisbn = {978-989-758-384-1},\n\tshorttitle = {Neural {Sequence} {Modeling} in {Physical} {Language} {Understanding}},\n\turl = {http://www.scitepress.org/DigitalLibrary/Link.aspx?doi=10.5220/0008071104640472},\n\tdoi = {10.5220/0008071104640472},\n\tabstract = {Automating the tasks of generating test questions and analyzing content for assessment of written student re-sponses has been one of the more sought-after applications to support classroom educators. However, a majorimpediment to algorithm advances in developing such tools is the lack of large and publicly available domaincorpora. In this paper, we explore deep learning of physics word problems performed at scale using the trans-former, a state-of-the-art self-attention neural architecture. Our study proposes an intuitive novel approach toa tree-based data generation that relies mainly on physical knowledge structure and defers compositionalityof natural language clauses to the terminal nodes. Applying our method to the simpler kinematics domainthat describes motion properties of an object at a uniform acceleration rate and using our neural sequencemodel pretrained on a dataset of ten thousand machine-produced problems, we achieved BLEU scores of 0.54and 0.81 for predicting derivation expressions on real-world and synthetic test sets, respectively. Notablyincreasing the number of trained problems resulted in a diminishing return on performance.},\n\turldate = {2019-11-19},\n\tbooktitle = {Proceedings of the 11th {International} {Joint} {Conference} on {Computational} {Intelligence}},\n\tpublisher = {SCITEPRESS - Science and Technology Publications},\n\tauthor = {Bleiweiss, Avi},\n\tyear = {2019},\n\tkeywords = {Recurrent neural networks, mentions sympy},\n\tpages = {464--472},\n}\n\n\n\n","author_short":["Bleiweiss, A."],"key":"bleiweiss_neural_2019","id":"bleiweiss_neural_2019","bibbaseid":"bleiweiss-neuralsequencemodelinginphysicallanguageunderstanding-2019","role":"author","urls":{"Paper":"http://www.scitepress.org/DigitalLibrary/Link.aspx?doi=10.5220/0008071104640472"},"keyword":["Recurrent neural networks","mentions sympy"],"metadata":{"authorlinks":{}},"downloads":0},"bibtype":"inproceedings","biburl":"https://bibbase.org/zotero-group/nicoguaro/525293","creationDate":"2019-12-03T22:21:29.774Z","downloads":0,"keywords":["recurrent neural networks","mentions sympy"],"search_terms":["neural","sequence","modeling","physical","language","understanding","bleiweiss"],"title":"Neural Sequence Modeling in Physical Language Understanding:","year":2019,"dataSources":["YtBDXPDiQEyhyEDZC","fhHfrQgj3AaGp7e9E","qzbMjEJf5d9Lk78vE","45tA9RFoXA9XeH4MM","MeSgs2KDKZo3bEbxH","nSXCrcahhCNfzvXEY","ecatNAsyr4f2iQyGq","tpWeaaCgFjPTYCjg3"]}