2015-10-20 19:27:47 -04:00
|
|
|
package common
|
|
|
|
|
|
|
|
import (
|
2019-06-27 17:29:25 -04:00
|
|
|
"encoding/hex"
|
2015-10-20 19:27:47 -04:00
|
|
|
"errors"
|
|
|
|
"fmt"
|
2019-06-27 17:29:25 -04:00
|
|
|
"log"
|
|
|
|
"net/url"
|
|
|
|
"os"
|
2015-10-20 19:27:47 -04:00
|
|
|
"strings"
|
|
|
|
|
2019-06-27 17:29:25 -04:00
|
|
|
getter "github.com/hashicorp/go-getter"
|
2017-04-04 16:39:01 -04:00
|
|
|
"github.com/hashicorp/packer/template/interpolate"
|
2015-10-20 19:27:47 -04:00
|
|
|
)
|
|
|
|
|
|
|
|
// ISOConfig contains configuration for downloading ISO images.
|
|
|
|
type ISOConfig struct {
|
|
|
|
ISOChecksum string `mapstructure:"iso_checksum"`
|
2016-02-08 09:51:43 -05:00
|
|
|
ISOChecksumURL string `mapstructure:"iso_checksum_url"`
|
2015-10-20 19:27:47 -04:00
|
|
|
ISOChecksumType string `mapstructure:"iso_checksum_type"`
|
|
|
|
ISOUrls []string `mapstructure:"iso_urls"`
|
|
|
|
TargetPath string `mapstructure:"iso_target_path"`
|
2016-12-17 05:49:54 -05:00
|
|
|
TargetExtension string `mapstructure:"iso_target_extension"`
|
2015-10-20 19:27:47 -04:00
|
|
|
RawSingleISOUrl string `mapstructure:"iso_url"`
|
|
|
|
}
|
|
|
|
|
2016-10-14 03:56:05 -04:00
|
|
|
func (c *ISOConfig) Prepare(ctx *interpolate.Context) (warnings []string, errs []error) {
|
Use the hashicorp/go-getter to download files
* removed packer.Cache and references since packer.Cache is never used except in the download step. The download step now uses the new func packer.CachePath(targetPath) for this, the behavior is the same.
* removed download code from packer that was reimplemented into the go-getter library: progress bar, http download restart, checksuming from file, skip already downloaded files, symlinking, make a download cancellable by context.
* on windows if packer is running without symlinking rights and we are getting a local file, the file will be copied instead to avoid errors.
* added unit tests for step_download that are now CI tested on windows, mac & linux.
* files are now downloaded under cache dir `sha1(filename + "?checksum=" + checksum) + file_extension`
* since the output dir is based on the source url and the checksum, when the checksum fails, the file is auto deleted.
* a download file is protected and locked by a file lock,
* updated docs
* updated go modules and vendors
2019-03-13 07:11:58 -04:00
|
|
|
if len(c.ISOUrls) != 0 && c.RawSingleISOUrl != "" {
|
2015-11-05 08:06:18 -05:00
|
|
|
errs = append(
|
Use the hashicorp/go-getter to download files
* removed packer.Cache and references since packer.Cache is never used except in the download step. The download step now uses the new func packer.CachePath(targetPath) for this, the behavior is the same.
* removed download code from packer that was reimplemented into the go-getter library: progress bar, http download restart, checksuming from file, skip already downloaded files, symlinking, make a download cancellable by context.
* on windows if packer is running without symlinking rights and we are getting a local file, the file will be copied instead to avoid errors.
* added unit tests for step_download that are now CI tested on windows, mac & linux.
* files are now downloaded under cache dir `sha1(filename + "?checksum=" + checksum) + file_extension`
* since the output dir is based on the source url and the checksum, when the checksum fails, the file is auto deleted.
* a download file is protected and locked by a file lock,
* updated docs
* updated go modules and vendors
2019-03-13 07:11:58 -04:00
|
|
|
errs, errors.New("Only one of iso_url or iso_urls must be specified"))
|
2016-10-14 03:56:05 -04:00
|
|
|
return
|
2015-11-05 08:06:18 -05:00
|
|
|
}
|
|
|
|
|
Use the hashicorp/go-getter to download files
* removed packer.Cache and references since packer.Cache is never used except in the download step. The download step now uses the new func packer.CachePath(targetPath) for this, the behavior is the same.
* removed download code from packer that was reimplemented into the go-getter library: progress bar, http download restart, checksuming from file, skip already downloaded files, symlinking, make a download cancellable by context.
* on windows if packer is running without symlinking rights and we are getting a local file, the file will be copied instead to avoid errors.
* added unit tests for step_download that are now CI tested on windows, mac & linux.
* files are now downloaded under cache dir `sha1(filename + "?checksum=" + checksum) + file_extension`
* since the output dir is based on the source url and the checksum, when the checksum fails, the file is auto deleted.
* a download file is protected and locked by a file lock,
* updated docs
* updated go modules and vendors
2019-03-13 07:11:58 -04:00
|
|
|
if c.RawSingleISOUrl != "" {
|
|
|
|
// make sure only array is set
|
|
|
|
c.ISOUrls = append([]string{c.RawSingleISOUrl}, c.ISOUrls...)
|
|
|
|
c.RawSingleISOUrl = ""
|
|
|
|
}
|
|
|
|
if len(c.ISOUrls) == 0 {
|
2015-10-20 19:27:47 -04:00
|
|
|
errs = append(
|
Use the hashicorp/go-getter to download files
* removed packer.Cache and references since packer.Cache is never used except in the download step. The download step now uses the new func packer.CachePath(targetPath) for this, the behavior is the same.
* removed download code from packer that was reimplemented into the go-getter library: progress bar, http download restart, checksuming from file, skip already downloaded files, symlinking, make a download cancellable by context.
* on windows if packer is running without symlinking rights and we are getting a local file, the file will be copied instead to avoid errors.
* added unit tests for step_download that are now CI tested on windows, mac & linux.
* files are now downloaded under cache dir `sha1(filename + "?checksum=" + checksum) + file_extension`
* since the output dir is based on the source url and the checksum, when the checksum fails, the file is auto deleted.
* a download file is protected and locked by a file lock,
* updated docs
* updated go modules and vendors
2019-03-13 07:11:58 -04:00
|
|
|
errs, errors.New("One of iso_url or iso_urls must be specified"))
|
|
|
|
return
|
2015-10-20 19:27:47 -04:00
|
|
|
}
|
|
|
|
|
Use the hashicorp/go-getter to download files
* removed packer.Cache and references since packer.Cache is never used except in the download step. The download step now uses the new func packer.CachePath(targetPath) for this, the behavior is the same.
* removed download code from packer that was reimplemented into the go-getter library: progress bar, http download restart, checksuming from file, skip already downloaded files, symlinking, make a download cancellable by context.
* on windows if packer is running without symlinking rights and we are getting a local file, the file will be copied instead to avoid errors.
* added unit tests for step_download that are now CI tested on windows, mac & linux.
* files are now downloaded under cache dir `sha1(filename + "?checksum=" + checksum) + file_extension`
* since the output dir is based on the source url and the checksum, when the checksum fails, the file is auto deleted.
* a download file is protected and locked by a file lock,
* updated docs
* updated go modules and vendors
2019-03-13 07:11:58 -04:00
|
|
|
c.ISOChecksumType = strings.ToLower(c.ISOChecksumType)
|
2015-10-20 19:27:47 -04:00
|
|
|
|
2016-12-17 05:49:54 -05:00
|
|
|
if c.TargetExtension == "" {
|
|
|
|
c.TargetExtension = "iso"
|
|
|
|
}
|
|
|
|
c.TargetExtension = strings.ToLower(c.TargetExtension)
|
|
|
|
|
2015-10-20 19:27:47 -04:00
|
|
|
// Warnings
|
|
|
|
if c.ISOChecksumType == "none" {
|
|
|
|
warnings = append(warnings,
|
|
|
|
"A checksum type of 'none' was specified. Since ISO files are so big,\n"+
|
|
|
|
"a checksum is highly recommended.")
|
Use the hashicorp/go-getter to download files
* removed packer.Cache and references since packer.Cache is never used except in the download step. The download step now uses the new func packer.CachePath(targetPath) for this, the behavior is the same.
* removed download code from packer that was reimplemented into the go-getter library: progress bar, http download restart, checksuming from file, skip already downloaded files, symlinking, make a download cancellable by context.
* on windows if packer is running without symlinking rights and we are getting a local file, the file will be copied instead to avoid errors.
* added unit tests for step_download that are now CI tested on windows, mac & linux.
* files are now downloaded under cache dir `sha1(filename + "?checksum=" + checksum) + file_extension`
* since the output dir is based on the source url and the checksum, when the checksum fails, the file is auto deleted.
* a download file is protected and locked by a file lock,
* updated docs
* updated go modules and vendors
2019-03-13 07:11:58 -04:00
|
|
|
return warnings, errs
|
2015-10-20 19:27:47 -04:00
|
|
|
}
|
|
|
|
|
Use the hashicorp/go-getter to download files
* removed packer.Cache and references since packer.Cache is never used except in the download step. The download step now uses the new func packer.CachePath(targetPath) for this, the behavior is the same.
* removed download code from packer that was reimplemented into the go-getter library: progress bar, http download restart, checksuming from file, skip already downloaded files, symlinking, make a download cancellable by context.
* on windows if packer is running without symlinking rights and we are getting a local file, the file will be copied instead to avoid errors.
* added unit tests for step_download that are now CI tested on windows, mac & linux.
* files are now downloaded under cache dir `sha1(filename + "?checksum=" + checksum) + file_extension`
* since the output dir is based on the source url and the checksum, when the checksum fails, the file is auto deleted.
* a download file is protected and locked by a file lock,
* updated docs
* updated go modules and vendors
2019-03-13 07:11:58 -04:00
|
|
|
if c.ISOChecksumURL != "" {
|
|
|
|
if strings.HasSuffix(strings.ToLower(c.ISOChecksumURL), ".iso") {
|
|
|
|
errs = append(errs, fmt.Errorf("Error parsing checksum:"+
|
|
|
|
" .iso is not a valid checksum extension"))
|
|
|
|
}
|
|
|
|
// go-getter auto-parses checksum files
|
|
|
|
c.ISOChecksumType = "file"
|
|
|
|
c.ISOChecksum = c.ISOChecksumURL
|
2018-07-13 12:21:04 -04:00
|
|
|
}
|
|
|
|
|
Use the hashicorp/go-getter to download files
* removed packer.Cache and references since packer.Cache is never used except in the download step. The download step now uses the new func packer.CachePath(targetPath) for this, the behavior is the same.
* removed download code from packer that was reimplemented into the go-getter library: progress bar, http download restart, checksuming from file, skip already downloaded files, symlinking, make a download cancellable by context.
* on windows if packer is running without symlinking rights and we are getting a local file, the file will be copied instead to avoid errors.
* added unit tests for step_download that are now CI tested on windows, mac & linux.
* files are now downloaded under cache dir `sha1(filename + "?checksum=" + checksum) + file_extension`
* since the output dir is based on the source url and the checksum, when the checksum fails, the file is auto deleted.
* a download file is protected and locked by a file lock,
* updated docs
* updated go modules and vendors
2019-03-13 07:11:58 -04:00
|
|
|
if c.ISOChecksum == "" {
|
|
|
|
errs = append(errs, fmt.Errorf("A checksum must be specified"))
|
2017-11-05 18:32:39 -05:00
|
|
|
}
|
2019-06-27 17:29:25 -04:00
|
|
|
if c.ISOChecksumType == "file" {
|
|
|
|
u, err := url.Parse(c.ISOUrls[0])
|
|
|
|
wd, err := os.Getwd()
|
|
|
|
if err != nil {
|
|
|
|
log.Printf("get working directory: %v", err)
|
|
|
|
// here we ignore the error in case the
|
|
|
|
// working directory is not needed.
|
|
|
|
}
|
|
|
|
gc := getter.Client{
|
|
|
|
Dst: "no-op",
|
|
|
|
Src: u.String(),
|
|
|
|
Pwd: wd,
|
|
|
|
Dir: false,
|
|
|
|
Getters: getter.Getters,
|
|
|
|
}
|
|
|
|
cksum, err := gc.ChecksumFromFile(c.ISOChecksumURL, u)
|
|
|
|
if err != nil {
|
|
|
|
errs = append(errs, fmt.Errorf("Couldn't extract checksum from checksum file"))
|
|
|
|
}
|
|
|
|
c.ISOChecksumType = cksum.Type
|
|
|
|
c.ISOChecksum = hex.EncodeToString(cksum.Value)
|
|
|
|
}
|
2017-11-05 18:32:39 -05:00
|
|
|
|
Use the hashicorp/go-getter to download files
* removed packer.Cache and references since packer.Cache is never used except in the download step. The download step now uses the new func packer.CachePath(targetPath) for this, the behavior is the same.
* removed download code from packer that was reimplemented into the go-getter library: progress bar, http download restart, checksuming from file, skip already downloaded files, symlinking, make a download cancellable by context.
* on windows if packer is running without symlinking rights and we are getting a local file, the file will be copied instead to avoid errors.
* added unit tests for step_download that are now CI tested on windows, mac & linux.
* files are now downloaded under cache dir `sha1(filename + "?checksum=" + checksum) + file_extension`
* since the output dir is based on the source url and the checksum, when the checksum fails, the file is auto deleted.
* a download file is protected and locked by a file lock,
* updated docs
* updated go modules and vendors
2019-03-13 07:11:58 -04:00
|
|
|
return warnings, errs
|
2016-02-08 09:51:43 -05:00
|
|
|
}
|