BBN PLUM System as Used for MUC-4. Ayuso, D., Boisen, S., Fox, H., J., Gish, H., Ingria, B., & Weischedel, R. In Proceedings of the Fourth Message Understanding Conference MUC4, 1992. Website abstract bibtex Traditional approaches to the problem of extracting data from texts have emphasized hand-crafted linguisti c knowledge . In contrast, BBN's PLUM system (Probabilistic Language Understanding Model) was developed a s part of a DARPA-funded research effort on integrating probabilistic language models with more traditional linguistic techniques . Our research and development goals are • more rapid development of new applications, • the ability to train (and re-train) systems based on user markings of correct and incorrect output, • more accurate selection among interpretations when more than one is found, an d • more robust partial interpretation when no complete interpretation can be found. A central assumption of our approach is that in processing unrestricted text for data extraction, a non-trivia l amount of the text will not be understood. As a result, all components of PLUM are designed to operate on partially understood input, taking advantage of information when available, and not failing when information is unavailable . We had previously performed experiments on components of the system with texts from the Wall Stree t Journal, however, the MUC-3 task was the first end-to-end application of PLUM. Very little hand-tuning of knowledge bases was done for MUC-4 ; since MUC-3, the system architecture as depicted in figure 1 has remained essentially the same. In addition to participating in MUC-4, since MUC-3 we focused on porting to new domains and a new language, and on performing various experiments designed to control recall/precision tradeoffs . To support these goals, the preprocessing component and the fragment combiner were made declarative; the semantics component was generalized to use probabilities on word senses ; we expanded our treatment of reference ; we enlarged the set of system parameters at all levels ; and we created a new probabilistic classifier for text relevance which filter s discourse events.
@inProceedings{
title = {BBN PLUM System as Used for MUC-4},
type = {inProceedings},
year = {1992},
websites = {http://aclweb.org/anthology-new/M/M92/M92-1024.pdf},
id = {46f82449-b392-3064-9a76-fa459d7f7109},
created = {2012-02-28T00:51:15.000Z},
file_attached = {false},
profile_id = {5284e6aa-156c-3ce5-bc0e-b80cf09f3ef6},
group_id = {066b42c8-f712-3fc3-abb2-225c158d2704},
last_modified = {2017-03-14T14:36:19.698Z},
read = {false},
starred = {false},
authored = {false},
confirmed = {true},
hidden = {false},
citation_key = {Ayuso1992},
private_publication = {false},
abstract = {Traditional approaches to the problem of extracting data from texts have emphasized hand-crafted linguisti c knowledge . In contrast, BBN's PLUM system (Probabilistic Language Understanding Model) was developed a s part of a DARPA-funded research effort on integrating probabilistic language models with more traditional linguistic techniques . Our research and development goals are • more rapid development of new applications, • the ability to train (and re-train) systems based on user markings of correct and incorrect output, • more accurate selection among interpretations when more than one is found, an d • more robust partial interpretation when no complete interpretation can be found. A central assumption of our approach is that in processing unrestricted text for data extraction, a non-trivia l amount of the text will not be understood. As a result, all components of PLUM are designed to operate on partially understood input, taking advantage of information when available, and not failing when information is unavailable . We had previously performed experiments on components of the system with texts from the Wall Stree t Journal, however, the MUC-3 task was the first end-to-end application of PLUM. Very little hand-tuning of knowledge bases was done for MUC-4 ; since MUC-3, the system architecture as depicted in figure 1 has remained essentially the same. In addition to participating in MUC-4, since MUC-3 we focused on porting to new domains and a new language, and on performing various experiments designed to control recall/precision tradeoffs . To support these goals, the preprocessing component and the fragment combiner were made declarative; the semantics component was generalized to use probabilities on word senses ; we expanded our treatment of reference ; we enlarged the set of system parameters at all levels ; and we created a new probabilistic classifier for text relevance which filter s discourse events.},
bibtype = {inProceedings},
author = {Ayuso, Damaris and Boisen, Sean and Fox, Heidi J and Gish, Herb and Ingria, Bob and Weischedel, Ralph},
booktitle = {Proceedings of the Fourth Message Understanding Conference MUC4}
}
Downloads: 0
{"_id":"nC3APK6tgRzpDrqKb","bibbaseid":"ayuso-boisen-fox-gish-ingria-weischedel-bbnplumsystemasusedformuc4-1992","authorIDs":[],"author_short":["Ayuso, D.","Boisen, S.","Fox, H., J.","Gish, H.","Ingria, B.","Weischedel, R."],"bibdata":{"title":"BBN PLUM System as Used for MUC-4","type":"inProceedings","year":"1992","websites":"http://aclweb.org/anthology-new/M/M92/M92-1024.pdf","id":"46f82449-b392-3064-9a76-fa459d7f7109","created":"2012-02-28T00:51:15.000Z","file_attached":false,"profile_id":"5284e6aa-156c-3ce5-bc0e-b80cf09f3ef6","group_id":"066b42c8-f712-3fc3-abb2-225c158d2704","last_modified":"2017-03-14T14:36:19.698Z","read":false,"starred":false,"authored":false,"confirmed":"true","hidden":false,"citation_key":"Ayuso1992","private_publication":false,"abstract":"Traditional approaches to the problem of extracting data from texts have emphasized hand-crafted linguisti c knowledge . In contrast, BBN's PLUM system (Probabilistic Language Understanding Model) was developed a s part of a DARPA-funded research effort on integrating probabilistic language models with more traditional linguistic techniques . Our research and development goals are • more rapid development of new applications, • the ability to train (and re-train) systems based on user markings of correct and incorrect output, • more accurate selection among interpretations when more than one is found, an d • more robust partial interpretation when no complete interpretation can be found. A central assumption of our approach is that in processing unrestricted text for data extraction, a non-trivia l amount of the text will not be understood. As a result, all components of PLUM are designed to operate on partially understood input, taking advantage of information when available, and not failing when information is unavailable . We had previously performed experiments on components of the system with texts from the Wall Stree t Journal, however, the MUC-3 task was the first end-to-end application of PLUM. Very little hand-tuning of knowledge bases was done for MUC-4 ; since MUC-3, the system architecture as depicted in figure 1 has remained essentially the same. In addition to participating in MUC-4, since MUC-3 we focused on porting to new domains and a new language, and on performing various experiments designed to control recall/precision tradeoffs . To support these goals, the preprocessing component and the fragment combiner were made declarative; the semantics component was generalized to use probabilities on word senses ; we expanded our treatment of reference ; we enlarged the set of system parameters at all levels ; and we created a new probabilistic classifier for text relevance which filter s discourse events.","bibtype":"inProceedings","author":"Ayuso, Damaris and Boisen, Sean and Fox, Heidi J and Gish, Herb and Ingria, Bob and Weischedel, Ralph","booktitle":"Proceedings of the Fourth Message Understanding Conference MUC4","bibtex":"@inProceedings{\n title = {BBN PLUM System as Used for MUC-4},\n type = {inProceedings},\n year = {1992},\n websites = {http://aclweb.org/anthology-new/M/M92/M92-1024.pdf},\n id = {46f82449-b392-3064-9a76-fa459d7f7109},\n created = {2012-02-28T00:51:15.000Z},\n file_attached = {false},\n profile_id = {5284e6aa-156c-3ce5-bc0e-b80cf09f3ef6},\n group_id = {066b42c8-f712-3fc3-abb2-225c158d2704},\n last_modified = {2017-03-14T14:36:19.698Z},\n read = {false},\n starred = {false},\n authored = {false},\n confirmed = {true},\n hidden = {false},\n citation_key = {Ayuso1992},\n private_publication = {false},\n abstract = {Traditional approaches to the problem of extracting data from texts have emphasized hand-crafted linguisti c knowledge . In contrast, BBN's PLUM system (Probabilistic Language Understanding Model) was developed a s part of a DARPA-funded research effort on integrating probabilistic language models with more traditional linguistic techniques . Our research and development goals are • more rapid development of new applications, • the ability to train (and re-train) systems based on user markings of correct and incorrect output, • more accurate selection among interpretations when more than one is found, an d • more robust partial interpretation when no complete interpretation can be found. A central assumption of our approach is that in processing unrestricted text for data extraction, a non-trivia l amount of the text will not be understood. As a result, all components of PLUM are designed to operate on partially understood input, taking advantage of information when available, and not failing when information is unavailable . We had previously performed experiments on components of the system with texts from the Wall Stree t Journal, however, the MUC-3 task was the first end-to-end application of PLUM. Very little hand-tuning of knowledge bases was done for MUC-4 ; since MUC-3, the system architecture as depicted in figure 1 has remained essentially the same. In addition to participating in MUC-4, since MUC-3 we focused on porting to new domains and a new language, and on performing various experiments designed to control recall/precision tradeoffs . To support these goals, the preprocessing component and the fragment combiner were made declarative; the semantics component was generalized to use probabilities on word senses ; we expanded our treatment of reference ; we enlarged the set of system parameters at all levels ; and we created a new probabilistic classifier for text relevance which filter s discourse events.},\n bibtype = {inProceedings},\n author = {Ayuso, Damaris and Boisen, Sean and Fox, Heidi J and Gish, Herb and Ingria, Bob and Weischedel, Ralph},\n booktitle = {Proceedings of the Fourth Message Understanding Conference MUC4}\n}","author_short":["Ayuso, D.","Boisen, S.","Fox, H., J.","Gish, H.","Ingria, B.","Weischedel, R."],"urls":{"Website":"http://aclweb.org/anthology-new/M/M92/M92-1024.pdf"},"bibbaseid":"ayuso-boisen-fox-gish-ingria-weischedel-bbnplumsystemasusedformuc4-1992","role":"author","downloads":0,"html":""},"bibtype":"inProceedings","creationDate":"2020-02-06T23:48:11.986Z","downloads":0,"keywords":[],"search_terms":["bbn","plum","system","used","muc","ayuso","boisen","fox","gish","ingria","weischedel"],"title":"BBN PLUM System as Used for MUC-4","year":1992}