id,title,author,source_link,archive_link,publication_year 1,Improving Language Understanding by Generative Pre-Training,"[""Alec Radford"",""Karthik Narasimhan"",""Tim Salimans"",""Ilya Sutskever""]",https://cdn.openai.com/research-covers/language-unsupervised/language_understanding_paper.pdf,,2018 2,Language Models are Unsupervised Multitask Learners,"[""Alec Radford"",""Jeffrey Wu"",""Rewon Child"",""David Luan"",""Dario Amodei"",""Ilya Sutskever""]",https://cdn.openai.com/better-language-models/language_models_are_unsupervised_multitask_learners.pdf,,2019 3,Language Models are Few-Shot Learners,"[""Tom B. Brown"",""Benjamin Mann"",""Nick Ryder"",""Melanie Subbiah"",""Jared Kaplan"",""Prafulla Dhariwal"",""Arvind Neelakantan"",""Pranav Shyam"",""Girish Sastry"",""Amanda Askell"",""Sandhini Agarwal"",""Ariel Herbert-Voss"",""Gretchen Krueger"",""Tom Henighan"",""Rewon Child"",""Aditya Ramesh"",""Daniel M. Ziegler"",""Jeffrey Wu"",""Clemens Winter"",""Christopher Hesse"",""Mark Chen"",""Eric Sigler"",""Mateusz Litwin"",""Scott Gray"",""Benjamin Chess"",""Jack Clark"",""Christopher Berner"",""Sam McCandlish"",""Alec Radford"",""Ilya Sutskever"",""Dario Amodei""]",https://arxiv.org/pdf/2005.14165,,2020