diff --git a/aksw.bib b/aksw.bib index 7634e24..b8af11e 100644 --- a/aksw.bib +++ b/aksw.bib @@ -9502,18 +9502,17 @@ @InProceedings{stadler-www } @InProceedings{stadler-c-2010--a, - Title = {{U}pdate {S}trategies for {DB}pedia {L}ive}, - Author = {Claus Stadler and Michael Martin and Jens Lehmann and Sebastian Hellmann}, - Booktitle = {6th Workshop on Scripting and Development for the Semantic Web Colocated with ESWC 2010 30th or 31st May, 2010 Crete, Greece}, - Year = {2010}, - - Abstract = {Wikipedia is one of the largest public information spaces with a huge user community, which collaboratively works on the largest online encyclopedia. Their users add or edit up to 150 thousand wiki pages per day. The DBpedia project extracts RDF from Wikipedia and interlinks it with other knowledge bases. In the DBpedia live extraction mode, Wikipedia edits are instantly processed to update information in DBpedia. Due to the high number of edits and the growth of Wikipedia, the update process has to be very efficient and scalable. In this paper, we present different strategies to tackle this challenging problem and describe how we modified the DBpedia live extraction algorithm to work more efficiently.}, - Bdsk-url-1 = {http://www.semanticscripting.org/SFSW2010/papers/sfsw2010_submission_5.pdf}, - Date-modified = {2012-12-02 12:30:10 +0000}, - Keywords = {sys:relevantFor:bis sys:relevantFor:infai martin stadler hellmann kilt lehmann dbpedia event_sfsw 2010 group_aksw peer-reviewed ontowiki_eu MOLE}, - Owner = {michael}, - Timestamp = {2010.06.24}, - Url = {http://www.semanticscripting.org/SFSW2010/papers/sfsw2010_submission_5.pdf} + author = {Claus Stadler and Michael Martin and Jens Lehmann and Sebastian Hellmann}, + booktitle = {6th Workshop on Scripting and Development for the Semantic Web Colocated with ESWC 2010 30th or 31st May, 2010 Crete, Greece}, + title = {{U}pdate {S}trategies for {DB}pedia {L}ive}, + year = {2010}, + abstract = {Wikipedia is one of the largest public information spaces with a huge user community, which collaboratively works on the largest online encyclopedia. Their users add or edit up to 150 thousand wiki pages per day. The DBpedia project extracts RDF from Wikipedia and interlinks it with other knowledge bases. In the DBpedia live extraction mode, Wikipedia edits are instantly processed to update information in DBpedia. Due to the high number of edits and the growth of Wikipedia, the update process has to be very efficient and scalable. In this paper, we present different strategies to tackle this challenging problem and describe how we modified the DBpedia live extraction algorithm to work more efficiently.}, + bdsk-url-1 = {http://www.semanticscripting.org/SFSW2010/papers/sfsw2010_submission_5.pdf}, + date-modified = {2012-12-02 12:30:10 +0000}, + keywords = {sys:relevantFor:bis sys:relevantFor:infai martin stadler hellmann kilt lehmann dbpedia event_sfsw 2010 group_aksw peer-reviewed ontowiki_eu MOLE}, + owner = {michael}, + timestamp = {2010.06.24}, + url = {http://jens-lehmann.org/files/2010/dbpedia_live_eswc.pdf}, } @InProceedings{stadler2018containment,