{"_id":"jiogruFqSHdzS5WLw","bibbaseid":"techtalkstv-bestpapersbertpretrainingofdeepbidirectionaltransformersforlanguageunderstanding-2019","author_short":["TechTalksTV"],"bibdata":{"bibtype":"misc","type":"misc","title":"BEST PAPERS: BERT: Pre-training of Deep Bidirectional Transformers for Language Understanding","shorttitle":"BEST PAPERS","url":"https://vimeo.com/365139010","abstract":"This is "BEST PAPERS: BERT: Pre-training of Deep Bidirectional Transformers for Language Understanding" by TechTalksTV on Vimeo, the home for high quality…","language":"en","urldate":"2023-07-28","author":[{"firstnames":[],"propositions":[],"lastnames":["TechTalksTV"],"suffixes":[]}],"month":"October","year":"2019","keywords":"#NLP, #Transformer, #Youtube, /unread","bibtex":"@misc{techtalkstv_best_2019,\n\ttitle = {{BEST} {PAPERS}: {BERT}: {Pre}-training of {Deep} {Bidirectional} {Transformers} for {Language} {Understanding}},\n\tshorttitle = {{BEST} {PAPERS}},\n\turl = {https://vimeo.com/365139010},\n\tabstract = {This is \\"BEST PAPERS: BERT: Pre-training of Deep Bidirectional Transformers for Language Understanding\\" by TechTalksTV on Vimeo, the home for high quality\\…},\n\tlanguage = {en},\n\turldate = {2023-07-28},\n\tauthor = {TechTalksTV},\n\tmonth = oct,\n\tyear = {2019},\n\tkeywords = {\\#NLP, \\#Transformer, \\#Youtube, /unread},\n}\n\n\n\n","author_short":["TechTalksTV"],"key":"techtalkstv_best_2019","id":"techtalkstv_best_2019","bibbaseid":"techtalkstv-bestpapersbertpretrainingofdeepbidirectionaltransformersforlanguageunderstanding-2019","role":"author","urls":{"Paper":"https://vimeo.com/365139010"},"keyword":["#NLP","#Transformer","#Youtube","/unread"],"metadata":{"authorlinks":{}},"downloads":0,"html":""},"bibtype":"misc","biburl":"https://bibbase.org/zotero/zzhenry2012","dataSources":["nZHrFJKyxKKDaWYM8"],"keywords":["#nlp","#transformer","#youtube","/unread"],"search_terms":["best","papers","bert","pre","training","deep","bidirectional","transformers","language","understanding","techtalkstv"],"title":"BEST PAPERS: BERT: Pre-training of Deep Bidirectional Transformers for Language Understanding","year":2019}