Extraction of Historical Events from Wikipedia. Hienert, D. Arxiv preprint arXiv12054138, 2012.
Extraction of Historical Events from Wikipedia [link]Website  abstract   bibtex   
The DBpedia project extracts structured information from Wikipedia and makes it available on the web. Information is gathered mainly with the help of infoboxes that contain structured information of the Wikipedia article. A lot of information is only contained in the article body and is not yet included in DBpedia. In this paper we focus on the extraction of historical events from Wikipedia articles that are available for about 2,500 years for different languages. We have extracted about 121,000 events with more than 325,000 links to DBpedia entities and provide access to this data via a Web API, SPARQL endpoint, Linked Data Interface and in a timeline application.
@article{
 title = {Extraction of Historical Events from Wikipedia},
 type = {article},
 year = {2012},
 websites = {http://arxiv.org/abs/1205.4138},
 id = {5c47288c-1bff-318a-8f0f-c4dcba50e9c4},
 created = {2012-12-24T15:02:36.000Z},
 file_attached = {false},
 profile_id = {5284e6aa-156c-3ce5-bc0e-b80cf09f3ef6},
 group_id = {066b42c8-f712-3fc3-abb2-225c158d2704},
 last_modified = {2017-03-14T14:36:19.698Z},
 tags = {event extraction},
 read = {false},
 starred = {false},
 authored = {false},
 confirmed = {true},
 hidden = {false},
 citation_key = {Hienert2012},
 private_publication = {false},
 abstract = {The DBpedia project extracts structured information from Wikipedia and makes it available on the web. Information is gathered mainly with the help of infoboxes that contain structured information of the Wikipedia article. A lot of information is only contained in the article body and is not yet included in DBpedia. In this paper we focus on the extraction of historical events from Wikipedia articles that are available for about 2,500 years for different languages. We have extracted about 121,000 events with more than 325,000 links to DBpedia entities and provide access to this data via a Web API, SPARQL endpoint, Linked Data Interface and in a timeline application.},
 bibtype = {article},
 author = {Hienert, Daniel},
 journal = {Arxiv preprint arXiv12054138}
}

Downloads: 0