2013-06-12 20:41:44 -04:00
|
|
|
package common
|
|
|
|
|
|
|
|
import (
|
|
|
|
"bytes"
|
2013-07-14 02:50:34 -04:00
|
|
|
"crypto/md5"
|
2013-07-14 03:13:07 -04:00
|
|
|
"crypto/sha1"
|
|
|
|
"crypto/sha256"
|
2013-08-28 12:09:37 -04:00
|
|
|
"crypto/sha512"
|
2013-06-12 20:41:44 -04:00
|
|
|
"encoding/hex"
|
|
|
|
"errors"
|
|
|
|
"fmt"
|
|
|
|
"hash"
|
|
|
|
"io"
|
2013-06-28 22:34:43 -04:00
|
|
|
"log"
|
2013-06-12 20:41:44 -04:00
|
|
|
"net/http"
|
|
|
|
"net/url"
|
|
|
|
"os"
|
2018-01-03 17:34:11 -05:00
|
|
|
"runtime"
|
2013-06-12 20:41:44 -04:00
|
|
|
)
|
|
|
|
|
|
|
|
// DownloadConfig is the configuration given to instantiate a new
|
|
|
|
// download instance. Once a configuration is used to instantiate
|
|
|
|
// a download client, it must not be modified.
|
|
|
|
type DownloadConfig struct {
|
|
|
|
// The source URL in the form of a string.
|
|
|
|
Url string
|
|
|
|
|
|
|
|
// This is the path to download the file to.
|
|
|
|
TargetPath string
|
|
|
|
|
|
|
|
// DownloaderMap maps a schema to a Download.
|
|
|
|
DownloaderMap map[string]Downloader
|
|
|
|
|
|
|
|
// If true, this will copy even a local file to the target
|
|
|
|
// location. If false, then it will "download" the file by just
|
|
|
|
// returning the local path to the file.
|
|
|
|
CopyFile bool
|
|
|
|
|
|
|
|
// The hashing implementation to use to checksum the downloaded file.
|
|
|
|
Hash hash.Hash
|
|
|
|
|
|
|
|
// The checksum for the downloaded file. The hash implementation configuration
|
|
|
|
// for the downloader will be used to verify with this checksum after
|
|
|
|
// it is downloaded.
|
|
|
|
Checksum []byte
|
2014-01-09 11:41:34 -05:00
|
|
|
|
|
|
|
// What to use for the user agent for HTTP requests. If set to "", use the
|
|
|
|
// default user agent provided by Go.
|
|
|
|
UserAgent string
|
2013-06-12 20:41:44 -04:00
|
|
|
}
|
|
|
|
|
|
|
|
// A DownloadClient helps download, verify checksums, etc.
|
|
|
|
type DownloadClient struct {
|
|
|
|
config *DownloadConfig
|
|
|
|
downloader Downloader
|
|
|
|
}
|
|
|
|
|
2013-07-14 02:50:34 -04:00
|
|
|
// HashForType returns the Hash implementation for the given string
|
|
|
|
// type, or nil if the type is not supported.
|
|
|
|
func HashForType(t string) hash.Hash {
|
|
|
|
switch t {
|
|
|
|
case "md5":
|
|
|
|
return md5.New()
|
2013-07-14 03:13:07 -04:00
|
|
|
case "sha1":
|
|
|
|
return sha1.New()
|
|
|
|
case "sha256":
|
|
|
|
return sha256.New()
|
2013-08-28 12:09:37 -04:00
|
|
|
case "sha512":
|
|
|
|
return sha512.New()
|
2013-07-14 02:50:34 -04:00
|
|
|
default:
|
|
|
|
return nil
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2013-06-12 20:41:44 -04:00
|
|
|
// NewDownloadClient returns a new DownloadClient for the given
|
|
|
|
// configuration.
|
|
|
|
func NewDownloadClient(c *DownloadConfig) *DownloadClient {
|
|
|
|
if c.DownloaderMap == nil {
|
|
|
|
c.DownloaderMap = map[string]Downloader{
|
2014-01-09 11:41:34 -05:00
|
|
|
"http": &HTTPDownloader{userAgent: c.UserAgent},
|
|
|
|
"https": &HTTPDownloader{userAgent: c.UserAgent},
|
2013-06-12 20:41:44 -04:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
return &DownloadClient{config: c}
|
|
|
|
}
|
|
|
|
|
|
|
|
// A downloader is responsible for actually taking a remote URL and
|
|
|
|
// downloading it.
|
|
|
|
type Downloader interface {
|
|
|
|
Cancel()
|
2015-06-15 02:43:25 -04:00
|
|
|
Download(*os.File, *url.URL) error
|
2013-06-12 20:41:44 -04:00
|
|
|
Progress() uint
|
|
|
|
Total() uint
|
|
|
|
}
|
|
|
|
|
|
|
|
func (d *DownloadClient) Cancel() {
|
|
|
|
// TODO(mitchellh): Implement
|
|
|
|
}
|
|
|
|
|
|
|
|
func (d *DownloadClient) Get() (string, error) {
|
|
|
|
// If we already have the file and it matches, then just return the target path.
|
|
|
|
if verify, _ := d.VerifyChecksum(d.config.TargetPath); verify {
|
2015-08-19 16:15:23 -04:00
|
|
|
log.Println("[DEBUG] Initial checksum matched, no download needed.")
|
2013-06-12 20:41:44 -04:00
|
|
|
return d.config.TargetPath, nil
|
|
|
|
}
|
|
|
|
|
2016-07-03 05:36:29 -04:00
|
|
|
u, err := url.Parse(d.config.Url)
|
2013-06-12 20:41:44 -04:00
|
|
|
if err != nil {
|
|
|
|
return "", err
|
|
|
|
}
|
|
|
|
|
2016-07-03 05:36:29 -04:00
|
|
|
log.Printf("Parsed URL: %#v", u)
|
2013-06-28 22:34:43 -04:00
|
|
|
|
2013-06-12 20:41:44 -04:00
|
|
|
// Files when we don't copy the file are special cased.
|
2015-06-22 15:14:35 -04:00
|
|
|
var f *os.File
|
2013-06-12 20:41:44 -04:00
|
|
|
var finalPath string
|
2015-08-14 20:49:08 -04:00
|
|
|
sourcePath := ""
|
2016-07-03 05:36:29 -04:00
|
|
|
if u.Scheme == "file" && !d.config.CopyFile {
|
|
|
|
// This is special case for relative path in this case user specify
|
|
|
|
// file:../ and after parse destination goes to Opaque
|
|
|
|
if u.Path != "" {
|
|
|
|
// If url.Path is set just use this
|
|
|
|
finalPath = u.Path
|
|
|
|
} else if u.Opaque != "" {
|
|
|
|
// otherwise try url.Opaque
|
|
|
|
finalPath = u.Opaque
|
|
|
|
}
|
2015-08-14 20:49:08 -04:00
|
|
|
// This is a special case where we use a source file that already exists
|
|
|
|
// locally and we don't make a copy. Normally we would copy or download.
|
2015-08-19 16:15:23 -04:00
|
|
|
log.Printf("[DEBUG] Using local file: %s", finalPath)
|
2013-08-03 16:34:48 -04:00
|
|
|
|
2018-01-03 17:34:11 -05:00
|
|
|
// Remove forward slash on absolute Windows file URLs before processing
|
|
|
|
if runtime.GOOS == "windows" && len(finalPath) > 0 && finalPath[0] == '/' {
|
|
|
|
finalPath = finalPath[1:]
|
|
|
|
}
|
|
|
|
|
2015-08-14 20:49:08 -04:00
|
|
|
// Keep track of the source so we can make sure not to delete this later
|
|
|
|
sourcePath = finalPath
|
2016-07-03 05:36:29 -04:00
|
|
|
if _, err = os.Stat(finalPath); err != nil {
|
|
|
|
return "", err
|
|
|
|
}
|
2013-06-12 20:41:44 -04:00
|
|
|
} else {
|
2013-06-23 18:58:47 -04:00
|
|
|
finalPath = d.config.TargetPath
|
|
|
|
|
2013-06-12 20:41:44 -04:00
|
|
|
var ok bool
|
2016-07-03 05:36:29 -04:00
|
|
|
d.downloader, ok = d.config.DownloaderMap[u.Scheme]
|
2013-06-12 20:41:44 -04:00
|
|
|
if !ok {
|
2016-07-03 05:36:29 -04:00
|
|
|
return "", fmt.Errorf("No downloader for scheme: %s", u.Scheme)
|
2013-06-12 20:41:44 -04:00
|
|
|
}
|
|
|
|
|
|
|
|
// Otherwise, download using the downloader.
|
2015-06-15 02:43:25 -04:00
|
|
|
f, err = os.OpenFile(finalPath, os.O_RDWR|os.O_CREATE, os.FileMode(0666))
|
2013-06-12 20:41:44 -04:00
|
|
|
if err != nil {
|
|
|
|
return "", err
|
|
|
|
}
|
|
|
|
|
2016-07-03 05:36:29 -04:00
|
|
|
log.Printf("[DEBUG] Downloading: %s", u.String())
|
|
|
|
err = d.downloader.Download(f, u)
|
2015-06-22 15:17:29 -04:00
|
|
|
f.Close()
|
2013-07-07 15:16:31 -04:00
|
|
|
if err != nil {
|
|
|
|
return "", err
|
|
|
|
}
|
2013-06-12 20:41:44 -04:00
|
|
|
}
|
|
|
|
|
|
|
|
if d.config.Hash != nil {
|
|
|
|
var verify bool
|
|
|
|
verify, err = d.VerifyChecksum(finalPath)
|
|
|
|
if err == nil && !verify {
|
2015-08-14 20:49:08 -04:00
|
|
|
// Only delete the file if we made a copy or downloaded it
|
|
|
|
if sourcePath != finalPath {
|
|
|
|
os.Remove(finalPath)
|
|
|
|
}
|
2015-06-22 15:17:29 -04:00
|
|
|
|
|
|
|
err = fmt.Errorf(
|
|
|
|
"checksums didn't match expected: %s",
|
|
|
|
hex.EncodeToString(d.config.Checksum))
|
2013-06-12 20:41:44 -04:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
return finalPath, err
|
|
|
|
}
|
|
|
|
|
|
|
|
// PercentProgress returns the download progress as a percentage.
|
2013-08-14 11:15:25 -04:00
|
|
|
func (d *DownloadClient) PercentProgress() int {
|
2013-06-28 22:34:43 -04:00
|
|
|
if d.downloader == nil {
|
2013-08-14 11:15:25 -04:00
|
|
|
return -1
|
2013-06-28 22:34:43 -04:00
|
|
|
}
|
|
|
|
|
2013-08-14 11:15:25 -04:00
|
|
|
return int((float64(d.downloader.Progress()) / float64(d.downloader.Total())) * 100)
|
2013-06-12 20:41:44 -04:00
|
|
|
}
|
|
|
|
|
|
|
|
// VerifyChecksum tests that the path matches the checksum for the
|
|
|
|
// download.
|
|
|
|
func (d *DownloadClient) VerifyChecksum(path string) (bool, error) {
|
|
|
|
if d.config.Checksum == nil || d.config.Hash == nil {
|
|
|
|
return false, errors.New("Checksum or Hash isn't set on download.")
|
|
|
|
}
|
|
|
|
|
|
|
|
f, err := os.Open(path)
|
|
|
|
if err != nil {
|
|
|
|
return false, err
|
|
|
|
}
|
|
|
|
defer f.Close()
|
|
|
|
|
2013-06-28 22:34:43 -04:00
|
|
|
log.Printf("Verifying checksum of %s", path)
|
2013-06-12 20:41:44 -04:00
|
|
|
d.config.Hash.Reset()
|
|
|
|
io.Copy(d.config.Hash, f)
|
2017-03-28 21:29:55 -04:00
|
|
|
return bytes.Equal(d.config.Hash.Sum(nil), d.config.Checksum), nil
|
2013-06-12 20:41:44 -04:00
|
|
|
}
|
|
|
|
|
|
|
|
// HTTPDownloader is an implementation of Downloader that downloads
|
|
|
|
// files over HTTP.
|
|
|
|
type HTTPDownloader struct {
|
2014-01-09 11:41:34 -05:00
|
|
|
progress uint
|
|
|
|
total uint
|
|
|
|
userAgent string
|
2013-06-12 20:41:44 -04:00
|
|
|
}
|
|
|
|
|
|
|
|
func (*HTTPDownloader) Cancel() {
|
|
|
|
// TODO(mitchellh): Implement
|
|
|
|
}
|
|
|
|
|
2015-06-15 02:43:25 -04:00
|
|
|
func (d *HTTPDownloader) Download(dst *os.File, src *url.URL) error {
|
2013-07-07 15:16:31 -04:00
|
|
|
log.Printf("Starting download: %s", src.String())
|
2015-06-22 15:14:35 -04:00
|
|
|
|
|
|
|
// Seek to the beginning by default
|
|
|
|
if _, err := dst.Seek(0, 0); err != nil {
|
|
|
|
return err
|
|
|
|
}
|
|
|
|
|
2015-06-22 17:59:38 -04:00
|
|
|
// Reset our progress
|
|
|
|
d.progress = 0
|
|
|
|
|
2015-06-22 15:14:35 -04:00
|
|
|
// Make the request. We first make a HEAD request so we can check
|
|
|
|
// if the server supports range queries. If the server/URL doesn't
|
|
|
|
// support HEAD requests, we just fall back to GET.
|
2015-06-15 02:43:25 -04:00
|
|
|
req, err := http.NewRequest("HEAD", src.String(), nil)
|
2013-08-18 14:34:36 -04:00
|
|
|
if err != nil {
|
|
|
|
return err
|
|
|
|
}
|
|
|
|
|
2014-01-09 11:41:34 -05:00
|
|
|
if d.userAgent != "" {
|
|
|
|
req.Header.Set("User-Agent", d.userAgent)
|
|
|
|
}
|
|
|
|
|
2013-08-18 14:34:36 -04:00
|
|
|
httpClient := &http.Client{
|
|
|
|
Transport: &http.Transport{
|
|
|
|
Proxy: http.ProxyFromEnvironment,
|
|
|
|
},
|
|
|
|
}
|
|
|
|
|
|
|
|
resp, err := httpClient.Do(req)
|
2015-06-22 15:14:35 -04:00
|
|
|
if err == nil && (resp.StatusCode >= 200 && resp.StatusCode < 300) {
|
|
|
|
// If the HEAD request succeeded, then attempt to set the range
|
|
|
|
// query if we can.
|
|
|
|
if resp.Header.Get("Accept-Ranges") == "bytes" {
|
|
|
|
if fi, err := dst.Stat(); err == nil {
|
|
|
|
if _, err = dst.Seek(0, os.SEEK_END); err == nil {
|
|
|
|
req.Header.Set("Range", fmt.Sprintf("bytes=%d-", fi.Size()))
|
|
|
|
d.progress = uint(fi.Size())
|
|
|
|
}
|
2015-06-15 18:04:48 -04:00
|
|
|
}
|
2015-06-15 02:43:25 -04:00
|
|
|
}
|
2013-07-07 15:16:31 -04:00
|
|
|
}
|
|
|
|
|
2015-06-22 15:14:35 -04:00
|
|
|
// Set the request to GET now, and redo the query to download
|
2015-06-15 18:04:48 -04:00
|
|
|
req.Method = "GET"
|
2015-06-15 02:43:25 -04:00
|
|
|
|
|
|
|
resp, err = httpClient.Do(req)
|
|
|
|
if err != nil {
|
|
|
|
return err
|
|
|
|
}
|
|
|
|
|
2015-06-22 17:58:27 -04:00
|
|
|
d.total = d.progress + uint(resp.ContentLength)
|
2013-06-12 20:41:44 -04:00
|
|
|
var buffer [4096]byte
|
|
|
|
for {
|
|
|
|
n, err := resp.Body.Read(buffer[:])
|
|
|
|
if err != nil && err != io.EOF {
|
|
|
|
return err
|
|
|
|
}
|
|
|
|
|
|
|
|
d.progress += uint(n)
|
|
|
|
|
|
|
|
if _, werr := dst.Write(buffer[:n]); werr != nil {
|
|
|
|
return werr
|
|
|
|
}
|
|
|
|
|
|
|
|
if err == io.EOF {
|
|
|
|
break
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
return nil
|
|
|
|
}
|
|
|
|
|
|
|
|
func (d *HTTPDownloader) Progress() uint {
|
|
|
|
return d.progress
|
|
|
|
}
|
|
|
|
|
|
|
|
func (d *HTTPDownloader) Total() uint {
|
|
|
|
return d.total
|
|
|
|
}
|