I have a CSV file with ~ 10k URLs that I need to get HTTP. What is the easiest way to limit concurrency of Go routines to no more than 16 at a time?
func getUrl(url string) { request := gorequest.New() resp, body, errs := request.Get(each[1]).End() _ = resp _ = body _ = errs } func main() { csvfile, err := os.Open("urls.csv") defer csvfile.Close() reader := csv.NewReader(csvfile) reader.FieldsPerRecord = -1 rawCSVdata, err := reader.ReadAll() completed := 0 for _, each := range rawCSVdata { go getUrl(each[1]) completed++ } }
source share