2019-06-12 10:02:49 -04:00
|
|
|
//go:generate struct-markdown
|
|
|
|
|
2015-10-20 19:27:47 -04:00
|
|
|
package common
|
|
|
|
|
|
|
|
import (
|
2020-02-06 11:51:15 -05:00
|
|
|
"context"
|
2019-06-27 17:29:25 -04:00
|
|
|
"encoding/hex"
|
2015-10-20 19:27:47 -04:00
|
|
|
"errors"
|
|
|
|
"fmt"
|
2020-05-11 05:14:50 -04:00
|
|
|
"log"
|
|
|
|
"os"
|
2015-10-20 19:27:47 -04:00
|
|
|
"strings"
|
|
|
|
|
2020-05-11 05:14:50 -04:00
|
|
|
urlhelper "github.com/hashicorp/go-getter/v2/helper/url"
|
|
|
|
|
|
|
|
"github.com/hashicorp/go-getter/v2"
|
|
|
|
|
2017-04-04 16:39:01 -04:00
|
|
|
"github.com/hashicorp/packer/template/interpolate"
|
2015-10-20 19:27:47 -04:00
|
|
|
)
|
|
|
|
|
2019-06-12 10:02:02 -04:00
|
|
|
// By default, Packer will symlink, download or copy image files to the Packer
|
|
|
|
// cache into a "`hash($iso_url+$iso_checksum).$iso_target_extension`" file.
|
|
|
|
// Packer uses [hashicorp/go-getter](https://github.com/hashicorp/go-getter) in
|
|
|
|
// file mode in order to perform a download.
|
|
|
|
//
|
|
|
|
// go-getter supports the following protocols:
|
|
|
|
//
|
|
|
|
// * Local files
|
|
|
|
// * Git
|
|
|
|
// * Mercurial
|
|
|
|
// * HTTP
|
|
|
|
// * Amazon S3
|
|
|
|
//
|
|
|
|
// Examples:
|
|
|
|
// go-getter can guess the checksum type based on `iso_checksum` len.
|
|
|
|
//
|
2020-03-12 10:05:08 -04:00
|
|
|
// ```json
|
2019-06-12 10:02:02 -04:00
|
|
|
// {
|
|
|
|
// "iso_checksum": "946a6077af6f5f95a51f82fdc44051c7aa19f9cfc5f737954845a6050543d7c2",
|
|
|
|
// "iso_url": "ubuntu.org/.../ubuntu-14.04.1-server-amd64.iso"
|
|
|
|
// }
|
|
|
|
// ```
|
|
|
|
//
|
2020-03-12 10:05:08 -04:00
|
|
|
// ```json
|
2019-06-12 10:02:02 -04:00
|
|
|
// {
|
|
|
|
// "iso_checksum_type": "file",
|
|
|
|
// "iso_checksum": "ubuntu.org/..../ubuntu-14.04.1-server-amd64.iso.sum",
|
|
|
|
// "iso_url": "ubuntu.org/.../ubuntu-14.04.1-server-amd64.iso"
|
|
|
|
// }
|
|
|
|
// ```
|
|
|
|
//
|
2020-03-12 10:05:08 -04:00
|
|
|
// ```json
|
2019-06-12 10:02:02 -04:00
|
|
|
// {
|
|
|
|
// "iso_checksum_url": "./shasums.txt",
|
|
|
|
// "iso_url": "ubuntu.org/.../ubuntu-14.04.1-server-amd64.iso"
|
|
|
|
// }
|
|
|
|
// ```
|
|
|
|
//
|
2020-03-12 10:05:08 -04:00
|
|
|
// ```json
|
2019-06-12 10:02:02 -04:00
|
|
|
// {
|
|
|
|
// "iso_checksum_type": "sha256",
|
|
|
|
// "iso_checksum_url": "./shasums.txt",
|
|
|
|
// "iso_url": "ubuntu.org/.../ubuntu-14.04.1-server-amd64.iso"
|
|
|
|
// }
|
|
|
|
// ```
|
|
|
|
//
|
2015-10-20 19:27:47 -04:00
|
|
|
type ISOConfig struct {
|
2019-06-12 10:02:02 -04:00
|
|
|
// The checksum for the ISO file or virtual hard drive file. The algorithm
|
|
|
|
// to use when computing the checksum can be optionally specified with
|
|
|
|
// `iso_checksum_type`. When `iso_checksum_type` is not set packer will
|
|
|
|
// guess the checksumming type based on `iso_checksum` length.
|
|
|
|
// `iso_checksum` can be also be a file or an URL, in which case
|
|
|
|
// `iso_checksum_type` must be set to `file`; the go-getter will download
|
|
|
|
// it and use the first hash found.
|
|
|
|
ISOChecksum string `mapstructure:"iso_checksum" required:"true"`
|
|
|
|
// An URL to a checksum file containing a checksum for the ISO file. At
|
|
|
|
// least one of `iso_checksum` and `iso_checksum_url` must be defined.
|
|
|
|
// `iso_checksum_url` will be ignored if `iso_checksum` is non empty.
|
|
|
|
ISOChecksumURL string `mapstructure:"iso_checksum_url"`
|
|
|
|
// The algorithm to be used when computing the checksum of the file
|
|
|
|
// specified in `iso_checksum`. Currently, valid values are "", "none",
|
|
|
|
// "md5", "sha1", "sha256", "sha512" or "file". Since the validity of ISO
|
|
|
|
// and virtual disk files are typically crucial to a successful build,
|
|
|
|
// Packer performs a check of any supplied media by default. While setting
|
|
|
|
// "none" will cause Packer to skip this check, corruption of large files
|
|
|
|
// such as ISOs and virtual hard drives can occur from time to time. As
|
|
|
|
// such, skipping this check is not recommended. `iso_checksum_type` must
|
|
|
|
// be set to `file` when `iso_checksum` is an url.
|
|
|
|
ISOChecksumType string `mapstructure:"iso_checksum_type"`
|
|
|
|
// A URL to the ISO containing the installation image or virtual hard drive
|
|
|
|
// (VHD or VHDX) file to clone.
|
|
|
|
RawSingleISOUrl string `mapstructure:"iso_url" required:"true"`
|
|
|
|
// Multiple URLs for the ISO to download. Packer will try these in order.
|
|
|
|
// If anything goes wrong attempting to download or while downloading a
|
|
|
|
// single URL, it will move on to the next. All URLs must point to the same
|
|
|
|
// file (same checksum). By default this is empty and `iso_url` is used.
|
|
|
|
// Only one of `iso_url` or `iso_urls` can be specified.
|
|
|
|
ISOUrls []string `mapstructure:"iso_urls"`
|
|
|
|
// The path where the iso should be saved after download. By default will
|
|
|
|
// go in the packer cache, with a hash of the original filename and
|
|
|
|
// checksum as its name.
|
|
|
|
TargetPath string `mapstructure:"iso_target_path"`
|
|
|
|
// The extension of the iso file after download. This defaults to `iso`.
|
|
|
|
TargetExtension string `mapstructure:"iso_target_extension"`
|
2015-10-20 19:27:47 -04:00
|
|
|
}
|
|
|
|
|
2020-02-06 11:51:15 -05:00
|
|
|
func (c *ISOConfig) Prepare(*interpolate.Context) (warnings []string, errs []error) {
|
Use the hashicorp/go-getter to download files
* removed packer.Cache and references since packer.Cache is never used except in the download step. The download step now uses the new func packer.CachePath(targetPath) for this, the behavior is the same.
* removed download code from packer that was reimplemented into the go-getter library: progress bar, http download restart, checksuming from file, skip already downloaded files, symlinking, make a download cancellable by context.
* on windows if packer is running without symlinking rights and we are getting a local file, the file will be copied instead to avoid errors.
* added unit tests for step_download that are now CI tested on windows, mac & linux.
* files are now downloaded under cache dir `sha1(filename + "?checksum=" + checksum) + file_extension`
* since the output dir is based on the source url and the checksum, when the checksum fails, the file is auto deleted.
* a download file is protected and locked by a file lock,
* updated docs
* updated go modules and vendors
2019-03-13 07:11:58 -04:00
|
|
|
if len(c.ISOUrls) != 0 && c.RawSingleISOUrl != "" {
|
2015-11-05 08:06:18 -05:00
|
|
|
errs = append(
|
Use the hashicorp/go-getter to download files
* removed packer.Cache and references since packer.Cache is never used except in the download step. The download step now uses the new func packer.CachePath(targetPath) for this, the behavior is the same.
* removed download code from packer that was reimplemented into the go-getter library: progress bar, http download restart, checksuming from file, skip already downloaded files, symlinking, make a download cancellable by context.
* on windows if packer is running without symlinking rights and we are getting a local file, the file will be copied instead to avoid errors.
* added unit tests for step_download that are now CI tested on windows, mac & linux.
* files are now downloaded under cache dir `sha1(filename + "?checksum=" + checksum) + file_extension`
* since the output dir is based on the source url and the checksum, when the checksum fails, the file is auto deleted.
* a download file is protected and locked by a file lock,
* updated docs
* updated go modules and vendors
2019-03-13 07:11:58 -04:00
|
|
|
errs, errors.New("Only one of iso_url or iso_urls must be specified"))
|
2016-10-14 03:56:05 -04:00
|
|
|
return
|
2015-11-05 08:06:18 -05:00
|
|
|
}
|
|
|
|
|
Use the hashicorp/go-getter to download files
* removed packer.Cache and references since packer.Cache is never used except in the download step. The download step now uses the new func packer.CachePath(targetPath) for this, the behavior is the same.
* removed download code from packer that was reimplemented into the go-getter library: progress bar, http download restart, checksuming from file, skip already downloaded files, symlinking, make a download cancellable by context.
* on windows if packer is running without symlinking rights and we are getting a local file, the file will be copied instead to avoid errors.
* added unit tests for step_download that are now CI tested on windows, mac & linux.
* files are now downloaded under cache dir `sha1(filename + "?checksum=" + checksum) + file_extension`
* since the output dir is based on the source url and the checksum, when the checksum fails, the file is auto deleted.
* a download file is protected and locked by a file lock,
* updated docs
* updated go modules and vendors
2019-03-13 07:11:58 -04:00
|
|
|
if c.RawSingleISOUrl != "" {
|
|
|
|
// make sure only array is set
|
|
|
|
c.ISOUrls = append([]string{c.RawSingleISOUrl}, c.ISOUrls...)
|
|
|
|
c.RawSingleISOUrl = ""
|
|
|
|
}
|
|
|
|
if len(c.ISOUrls) == 0 {
|
2015-10-20 19:27:47 -04:00
|
|
|
errs = append(
|
Use the hashicorp/go-getter to download files
* removed packer.Cache and references since packer.Cache is never used except in the download step. The download step now uses the new func packer.CachePath(targetPath) for this, the behavior is the same.
* removed download code from packer that was reimplemented into the go-getter library: progress bar, http download restart, checksuming from file, skip already downloaded files, symlinking, make a download cancellable by context.
* on windows if packer is running without symlinking rights and we are getting a local file, the file will be copied instead to avoid errors.
* added unit tests for step_download that are now CI tested on windows, mac & linux.
* files are now downloaded under cache dir `sha1(filename + "?checksum=" + checksum) + file_extension`
* since the output dir is based on the source url and the checksum, when the checksum fails, the file is auto deleted.
* a download file is protected and locked by a file lock,
* updated docs
* updated go modules and vendors
2019-03-13 07:11:58 -04:00
|
|
|
errs, errors.New("One of iso_url or iso_urls must be specified"))
|
|
|
|
return
|
2015-10-20 19:27:47 -04:00
|
|
|
}
|
|
|
|
|
Use the hashicorp/go-getter to download files
* removed packer.Cache and references since packer.Cache is never used except in the download step. The download step now uses the new func packer.CachePath(targetPath) for this, the behavior is the same.
* removed download code from packer that was reimplemented into the go-getter library: progress bar, http download restart, checksuming from file, skip already downloaded files, symlinking, make a download cancellable by context.
* on windows if packer is running without symlinking rights and we are getting a local file, the file will be copied instead to avoid errors.
* added unit tests for step_download that are now CI tested on windows, mac & linux.
* files are now downloaded under cache dir `sha1(filename + "?checksum=" + checksum) + file_extension`
* since the output dir is based on the source url and the checksum, when the checksum fails, the file is auto deleted.
* a download file is protected and locked by a file lock,
* updated docs
* updated go modules and vendors
2019-03-13 07:11:58 -04:00
|
|
|
c.ISOChecksumType = strings.ToLower(c.ISOChecksumType)
|
2015-10-20 19:27:47 -04:00
|
|
|
|
2016-12-17 05:49:54 -05:00
|
|
|
if c.TargetExtension == "" {
|
|
|
|
c.TargetExtension = "iso"
|
|
|
|
}
|
|
|
|
c.TargetExtension = strings.ToLower(c.TargetExtension)
|
|
|
|
|
2015-10-20 19:27:47 -04:00
|
|
|
// Warnings
|
|
|
|
if c.ISOChecksumType == "none" {
|
|
|
|
warnings = append(warnings,
|
|
|
|
"A checksum type of 'none' was specified. Since ISO files are so big,\n"+
|
|
|
|
"a checksum is highly recommended.")
|
Use the hashicorp/go-getter to download files
* removed packer.Cache and references since packer.Cache is never used except in the download step. The download step now uses the new func packer.CachePath(targetPath) for this, the behavior is the same.
* removed download code from packer that was reimplemented into the go-getter library: progress bar, http download restart, checksuming from file, skip already downloaded files, symlinking, make a download cancellable by context.
* on windows if packer is running without symlinking rights and we are getting a local file, the file will be copied instead to avoid errors.
* added unit tests for step_download that are now CI tested on windows, mac & linux.
* files are now downloaded under cache dir `sha1(filename + "?checksum=" + checksum) + file_extension`
* since the output dir is based on the source url and the checksum, when the checksum fails, the file is auto deleted.
* a download file is protected and locked by a file lock,
* updated docs
* updated go modules and vendors
2019-03-13 07:11:58 -04:00
|
|
|
return warnings, errs
|
2015-10-20 19:27:47 -04:00
|
|
|
}
|
|
|
|
|
Use the hashicorp/go-getter to download files
* removed packer.Cache and references since packer.Cache is never used except in the download step. The download step now uses the new func packer.CachePath(targetPath) for this, the behavior is the same.
* removed download code from packer that was reimplemented into the go-getter library: progress bar, http download restart, checksuming from file, skip already downloaded files, symlinking, make a download cancellable by context.
* on windows if packer is running without symlinking rights and we are getting a local file, the file will be copied instead to avoid errors.
* added unit tests for step_download that are now CI tested on windows, mac & linux.
* files are now downloaded under cache dir `sha1(filename + "?checksum=" + checksum) + file_extension`
* since the output dir is based on the source url and the checksum, when the checksum fails, the file is auto deleted.
* a download file is protected and locked by a file lock,
* updated docs
* updated go modules and vendors
2019-03-13 07:11:58 -04:00
|
|
|
if c.ISOChecksumURL != "" {
|
2019-06-28 18:29:39 -04:00
|
|
|
if c.ISOChecksum != "" {
|
|
|
|
warnings = append(warnings, "You have provided both an "+
|
|
|
|
"iso_checksum and an iso_checksum_url. Discarding the "+
|
|
|
|
"iso_checksum_url and using the checksum.")
|
|
|
|
} else {
|
|
|
|
if strings.HasSuffix(strings.ToLower(c.ISOChecksumURL), ".iso") {
|
|
|
|
errs = append(errs, fmt.Errorf("Error parsing checksum:"+
|
|
|
|
" .iso is not a valid checksum extension"))
|
|
|
|
}
|
|
|
|
// go-getter auto-parses checksum files
|
|
|
|
c.ISOChecksumType = "file"
|
|
|
|
c.ISOChecksum = c.ISOChecksumURL
|
Use the hashicorp/go-getter to download files
* removed packer.Cache and references since packer.Cache is never used except in the download step. The download step now uses the new func packer.CachePath(targetPath) for this, the behavior is the same.
* removed download code from packer that was reimplemented into the go-getter library: progress bar, http download restart, checksuming from file, skip already downloaded files, symlinking, make a download cancellable by context.
* on windows if packer is running without symlinking rights and we are getting a local file, the file will be copied instead to avoid errors.
* added unit tests for step_download that are now CI tested on windows, mac & linux.
* files are now downloaded under cache dir `sha1(filename + "?checksum=" + checksum) + file_extension`
* since the output dir is based on the source url and the checksum, when the checksum fails, the file is auto deleted.
* a download file is protected and locked by a file lock,
* updated docs
* updated go modules and vendors
2019-03-13 07:11:58 -04:00
|
|
|
}
|
2018-07-13 12:21:04 -04:00
|
|
|
}
|
|
|
|
|
Use the hashicorp/go-getter to download files
* removed packer.Cache and references since packer.Cache is never used except in the download step. The download step now uses the new func packer.CachePath(targetPath) for this, the behavior is the same.
* removed download code from packer that was reimplemented into the go-getter library: progress bar, http download restart, checksuming from file, skip already downloaded files, symlinking, make a download cancellable by context.
* on windows if packer is running without symlinking rights and we are getting a local file, the file will be copied instead to avoid errors.
* added unit tests for step_download that are now CI tested on windows, mac & linux.
* files are now downloaded under cache dir `sha1(filename + "?checksum=" + checksum) + file_extension`
* since the output dir is based on the source url and the checksum, when the checksum fails, the file is auto deleted.
* a download file is protected and locked by a file lock,
* updated docs
* updated go modules and vendors
2019-03-13 07:11:58 -04:00
|
|
|
if c.ISOChecksum == "" {
|
|
|
|
errs = append(errs, fmt.Errorf("A checksum must be specified"))
|
2017-11-05 18:32:39 -05:00
|
|
|
}
|
2019-06-27 17:29:25 -04:00
|
|
|
if c.ISOChecksumType == "file" {
|
2020-02-10 11:54:11 -05:00
|
|
|
if c.ISOChecksumURL != "" {
|
2020-05-11 05:14:50 -04:00
|
|
|
c.ISOChecksum = c.ISOChecksumURL
|
|
|
|
}
|
|
|
|
|
|
|
|
u, err := urlhelper.Parse(c.ISOUrls[0])
|
|
|
|
if err != nil {
|
|
|
|
return warnings, append(errs, fmt.Errorf("url parse: %s", err))
|
|
|
|
}
|
|
|
|
|
|
|
|
if cs := u.Query().Get("checksum"); cs != "" {
|
|
|
|
c.ISOChecksum = cs
|
|
|
|
}
|
|
|
|
if c.ISOChecksumType != "" && c.ISOChecksumType != "none" {
|
|
|
|
// add checksum to url query params as go getter will checksum for us
|
|
|
|
q := u.Query()
|
|
|
|
q.Set("checksum", c.ISOChecksumType+":"+c.ISOChecksum)
|
|
|
|
u.RawQuery = q.Encode()
|
|
|
|
} else if c.ISOChecksum != "" {
|
|
|
|
q := u.Query()
|
|
|
|
q.Set("checksum", c.ISOChecksum)
|
|
|
|
u.RawQuery = q.Encode()
|
|
|
|
}
|
|
|
|
|
|
|
|
wd, err := os.Getwd()
|
|
|
|
if err != nil {
|
|
|
|
log.Printf("get working directory: %v", err)
|
|
|
|
// here we ignore the error in case the
|
|
|
|
// working directory is not needed.
|
|
|
|
}
|
|
|
|
|
|
|
|
req := &getter.Request{
|
|
|
|
Src: u.String(),
|
|
|
|
Pwd: wd,
|
2019-10-23 12:34:32 -04:00
|
|
|
}
|
2020-05-11 05:14:50 -04:00
|
|
|
cksum, err := defaultGetterClient.GetChecksum(context.TODO(), req)
|
2019-06-27 17:29:25 -04:00
|
|
|
if err != nil {
|
2020-02-10 11:54:11 -05:00
|
|
|
errs = append(errs, fmt.Errorf("Couldn't extract checksum from checksum file: %v", err))
|
2019-06-28 18:29:39 -04:00
|
|
|
} else {
|
|
|
|
c.ISOChecksumType = cksum.Type
|
|
|
|
c.ISOChecksum = hex.EncodeToString(cksum.Value)
|
2019-06-27 17:29:25 -04:00
|
|
|
}
|
|
|
|
}
|
2017-11-05 18:32:39 -05:00
|
|
|
|
Use the hashicorp/go-getter to download files
* removed packer.Cache and references since packer.Cache is never used except in the download step. The download step now uses the new func packer.CachePath(targetPath) for this, the behavior is the same.
* removed download code from packer that was reimplemented into the go-getter library: progress bar, http download restart, checksuming from file, skip already downloaded files, symlinking, make a download cancellable by context.
* on windows if packer is running without symlinking rights and we are getting a local file, the file will be copied instead to avoid errors.
* added unit tests for step_download that are now CI tested on windows, mac & linux.
* files are now downloaded under cache dir `sha1(filename + "?checksum=" + checksum) + file_extension`
* since the output dir is based on the source url and the checksum, when the checksum fails, the file is auto deleted.
* a download file is protected and locked by a file lock,
* updated docs
* updated go modules and vendors
2019-03-13 07:11:58 -04:00
|
|
|
return warnings, errs
|
2016-02-08 09:51:43 -05:00
|
|
|
}
|