This is a large-scale problem, I think you can normalize or scale it. There are differences between scalingand normalizing, this will affect your results and there is a separate question about SO:
normalize inputs
norm.fun = function(x){
(x - min(x))/(max(x) - min(x))
}
require(ggplot2)
require(neuralnet)
data = mpg[, c('cty', 'displ', 'year', 'cyl', 'hwy')]
data.norm = apply(data, 2, norm.fun)
net = neuralnet(cty ~ displ + year + cyl + hwy, data.norm, hidden = 2)
Then you can denormalize the data
y.net = min(data[, 'cty']) + net$net.result[[1]] * range(data[, 'cty'])
plot(data[, 'cty'], col = 'red')
points(y.net)

scale inputs
data.scaled = scale(data)
net = neuralnet(cty ~ displ + year + cyl + hwy, data.scaled, hidden = 2)
y.sd = sd(data[, 'cty'])
y.mean = mean(data[, 'cty'])
y.net = net$net.result[[1]] * y.sd + y.mean
plot(data[, 'cty'], col = 'red')
points(y.net)

You can also try the nnet package, it is very fast:
require(nnet)
data2 = mpg
data2$year = scale(data2$year)
fit = nnet(cty ~ displ + year + cyl + hwy, size = 10, data = data2, linout = TRUE)
plot(mpg$cty)
points(fit$fitted.values, col = 'red')