I'm not sure how you want to write csv, but here is how you can get all the tables between these dates. I tested this on the first few URLs and it worked well. Please note that you do not need to parse html before reading the table, as it readHTMLTable()is able to read and parse directly from the URL.
library(XML)
library(RCurl)
x <- seq(as.Date("2014-10-12"), as.Date("2015-06-18"), by = "day")
utmp <- "http://www.hockey-reference.com/friv/dailyleaders.cgi?month=%d&day=%d&year=%d"
m <- do.call(rbind, lapply(strsplit(as.character(x), "-"), type.convert))
tables <- vector("list", length(allurls))
for(i in seq_len(nrow(m))) {
tables[[i]] <- if(url.exists(u <- sprintf(utmp, m[i, 2], m[i, 3], m[i, 1])))
readHTMLTable(u, stringsAsFactors = FALSE)
else NULL
}
str() quite long so here is a little look at the dimensions of the first element
lapply(tables[[1]], dim)
# $skaters
# [1] 72 23
#
# $goalies
# [1] 7 15
for() URL-, , . , . , NULL. , , , .