Thanks to Stedy's suggestion, I found a solution. Note there are 2 similarly named Wikipedia interface packages for R. This one uses WikipediR, not WikipediaR.
require(WikipediR); require(rvest)
#titles= vector of page name(s)
#res= desired width in pixels (220 px thumbnail by default)
#savedest= save destination (w terminal '/'); wd by default
getwikipic<-function(titles,res,savedest){
if(missing(res)){res=220}
if(missing(savedest)){savedest=NA}
lapply(titles, function (ttl,...){
d<-page_info("en","wikipedia",page=ttl,clean_response=T)
url<-d[[1]]$fullurl
wikipage<-html_session(url)
imginfo<-wikipage %>% html_nodes("tr:nth-child(2) img")
img.url<- imginfo[1] %>% html_attr("src")
img.url<-paste0("https:",img.url)
if(is.na(savedest)){
savefilename<-paste0(ttl,".jpg")
}else{savefilename<-paste0(savedest,ttl,".jpg")}
if(res!=220){img.url<-gsub(220,res,img.url)}
download.file(img.url,savefilename)
return(paste0("orig.file: ",basename(img.url)))#tell user original filename (or error)
},res,savedest)#End lapply
}#End function
Alternatively, I created a GitHub repo with the code here. You can source and run this quite simply in R.
devtools::source_url("https://raw.githubusercontent.com/drwilkins/getwikipic/master/getwikipic.R")
titles<-c("numbat")
getwikipic(titles,1024)
Downloads this to your working directory
pic