Limited concurrent HTTP downloads

Download files from HTTP using a limited number of concurrent requests.

6

Votes

package main

import (
    "fmt"
    "io/ioutil"
    "net/http"
    "time"
)

func downloadAll(urls chan string, resps chan []byte, errs chan error) {
    for url := range urls {
        resp, err := download(url)
        if err != nil {
            errs <- err
        }
        resps <- resp
    }
}

func download(url string) ([]byte, error) {
    resp, err := http.Get(url)
    if err != nil {
        return nil, err
    }
    defer resp.Body.Close()
    b, err := ioutil.ReadAll(resp.Body)
    if err != nil {
        return nil, err
    }
    time.Sleep(5 * time.Second)
    return b, nil
}

func main() {
    sources := []string{"https://paddy.carvers.co", "https://www.google.com",
        "https://dstaley.com", "https://golang.org",
        "https://getunitrack.com"}
    results := make(chan []byte)
    urls := make(chan string)
    errs := make(chan error)
    for i := 0; i < 3; i++ {
        go downloadAll(urls, results, errs)
    }
    go func(in []string, out chan string) {
        for _, i := range in {
            out <- i
        }
        close(out)
    }(sources, urls)
    var numResults int
    for {
        select {
        case result := <-results:
            fmt.Println(string(result))
            numResults++
            if numResults >= len(sources) {
                return
            }
        case err := <-errs:
            fmt.Println(err)
            return
        }
    }
}

Vote Here

You must earn at least 1 vote on your snippets to be allowed to vote

Terms Of Use

Privacy Policy

Featured snippets are MIT license

Gears & Masters

Advertise

DevOpsnipp.com © 2020

medium.png