HCL2 Parse packer.required_plugins block + packer init (#10304)

This adds the new `required_plugins` block to be nested under the packer block.

Example:
```hcl
packer {
  required_plugins {
    aws = {
      version = ">= 2.7.0"
      source = "azr/aws"
    }
    azure = ">= 2.7.0"
  }
}
```

For example on darwin_amd64 Packer will install those under :
* "${PACKER_HOME_DIR}/plugin/github.com/azr/amazon/packer-plugin-amazon_2.7.0_x5.0_darwin_amd64"
* "${PACKER_HOME_DIR}/plugin/github.com/hashicorp/azure/packer-plugin-azure_2.7.0_x5.0_darwin_amd64_x5"

+ docs
+ tests
This commit is contained in:
Adrien Delorme 2021-02-02 18:05:04 +01:00 committed by GitHub
parent ac014fc1c3
commit ed091163be
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
416 changed files with 83357 additions and 6718 deletions

View File

@ -121,14 +121,14 @@ type Config struct {
// provisioners should connect to the local IP address of the instance.
UseLocalIPAddress bool `mapstructure:"use_local_ip_address" required:"false"`
// User data to launch with the instance. This is a
// template engine; see "User Data" bellow for
// template engine; see "User Data" below for
// more details. Packer will not automatically wait for a user script to
// finish before shutting down the instance this must be handled in a
// provisioner.
UserData string `mapstructure:"user_data" required:"false"`
// Path to a file that will be used for the user
// data when launching the instance. This file will be parsed as a template
// engine see User Data bellow for more
// engine see User Data below for more
// details.
UserDataFile string `mapstructure:"user_data_file" required:"false"`
// The name or ID of the zone where the instance will be

View File

@ -12,7 +12,7 @@ import (
"strings"
packersdk "github.com/hashicorp/packer-plugin-sdk/packer"
"github.com/hashicorp/packer/packer/plugin"
"github.com/hashicorp/packer/packer"
)
const packerPluginsCheck = "packer-plugins-check"
@ -119,7 +119,7 @@ func checkPluginName(name string) error {
// in the plugin configuration. At least one builder, provisioner, or post-processor should be found to validate the plugin's
// compatibility with Packer.
func discoverAndLoad() error {
config := plugin.Config{
config := packer.PluginConfig{
PluginMinPort: 10000,
PluginMaxPort: 25000,
}
@ -129,20 +129,18 @@ func discoverAndLoad() error {
}
// TODO: validate correctness of plugins loaded by checking them against the output of the `describe` command.
plugins := config.GetPlugins()
if len(plugins.Builders) == 0 &&
len(plugins.Provisioners) == 0 &&
len(plugins.PostProcessors) == 0 &&
len(plugins.DataSources) == 0 {
return fmt.Errorf("couldn't load any Builder/Provisioner/Post-Processor/Datasource from the plugin binary")
if len(config.Builders.List()) == 0 &&
len(config.Provisioners.List()) == 0 &&
len(config.PostProcessors.List()) == 0 {
return fmt.Errorf("couldn't load any Builder/Provisioner/Post-Processor from the plugin binary")
}
return checkHCL2ConfigSpec(plugins)
return checkHCL2ConfigSpec(config)
}
// checkHCL2ConfigSpec checks if the hcl2spec config is present for the given plugins by validating that ConfigSpec() does not
// return an empty map of specs.
func checkHCL2ConfigSpec(plugins plugin.Plugins) error {
func checkHCL2ConfigSpec(plugins packer.PluginConfig) error {
var errs *packersdk.MultiError
for _, b := range plugins.Builders.List() {
builder, err := plugins.Builders.Start(b)

View File

@ -67,10 +67,7 @@ func (m *Meta) GetConfigFromHCL(cla *MetaArgs) (*hcl2template.PackerConfig, int)
CorePackerVersion: version.SemVer,
CorePackerVersionString: version.FormattedVersion(),
Parser: hclparse.NewParser(),
BuilderSchemas: m.CoreConfig.Components.BuilderStore,
ProvisionersSchemas: m.CoreConfig.Components.ProvisionerStore,
PostProcessorsSchemas: m.CoreConfig.Components.PostProcessorStore,
DatasourceSchemas: m.CoreConfig.Components.DatasourceStore,
PluginConfig: m.CoreConfig.Components.PluginConfig,
}
cfg, diags := parser.Parse(cla.Path, cla.VarFiles, cla.Vars)
return cfg, writeDiags(m.Ui, parser.Files(), diags)

View File

@ -68,13 +68,15 @@ func testMetaParallel(t *testing.T, builder *ParallelTestBuilder, locked *Locked
return Meta{
CoreConfig: &packer.CoreConfig{
Components: packer.ComponentFinder{
BuilderStore: packer.MapOfBuilder{
"parallel-test": func() (packersdk.Builder, error) { return builder, nil },
"file": func() (packersdk.Builder, error) { return &file.Builder{}, nil },
"lock": func() (packersdk.Builder, error) { return locked, nil },
},
ProvisionerStore: packer.MapOfProvisioner{
"sleep": func() (packersdk.Provisioner, error) { return &sleep.Provisioner{}, nil },
PluginConfig: &packer.PluginConfig{
Builders: packer.MapOfBuilder{
"parallel-test": func() (packersdk.Builder, error) { return builder, nil },
"file": func() (packersdk.Builder, error) { return &file.Builder{}, nil },
"lock": func() (packersdk.Builder, error) { return locked, nil },
},
Provisioners: packer.MapOfProvisioner{
"sleep": func() (packersdk.Provisioner, error) { return &sleep.Provisioner{}, nil },
},
},
},
},

View File

@ -847,21 +847,23 @@ func fileExists(filename string) bool {
// available. This allows us to test a builder that writes files to disk.
func testCoreConfigBuilder(t *testing.T) *packer.CoreConfig {
components := packer.ComponentFinder{
BuilderStore: packer.MapOfBuilder{
"file": func() (packersdk.Builder, error) { return &file.Builder{}, nil },
"null": func() (packersdk.Builder, error) { return &null.Builder{}, nil },
},
ProvisionerStore: packer.MapOfProvisioner{
"shell-local": func() (packersdk.Provisioner, error) { return &shell_local.Provisioner{}, nil },
"shell": func() (packersdk.Provisioner, error) { return &shell.Provisioner{}, nil },
"file": func() (packersdk.Provisioner, error) { return &filep.Provisioner{}, nil },
},
PostProcessorStore: packer.MapOfPostProcessor{
"shell-local": func() (packersdk.PostProcessor, error) { return &shell_local_pp.PostProcessor{}, nil },
"manifest": func() (packersdk.PostProcessor, error) { return &manifest.PostProcessor{}, nil },
},
DatasourceStore: packersdk.MapOfDatasource{
"mock": func() (packersdk.Datasource, error) { return &packersdk.MockDatasource{}, nil },
PluginConfig: &packer.PluginConfig{
Builders: packer.MapOfBuilder{
"file": func() (packersdk.Builder, error) { return &file.Builder{}, nil },
"null": func() (packersdk.Builder, error) { return &null.Builder{}, nil },
},
Provisioners: packer.MapOfProvisioner{
"shell-local": func() (packersdk.Provisioner, error) { return &shell_local.Provisioner{}, nil },
"shell": func() (packersdk.Provisioner, error) { return &shell.Provisioner{}, nil },
"file": func() (packersdk.Provisioner, error) { return &filep.Provisioner{}, nil },
},
PostProcessors: packer.MapOfPostProcessor{
"shell-local": func() (packersdk.PostProcessor, error) { return &shell_local_pp.PostProcessor{}, nil },
"manifest": func() (packersdk.PostProcessor, error) { return &manifest.PostProcessor{}, nil },
},
DataSources: packer.MapOfDatasource{
"mock": func() (packersdk.Datasource, error) { return &packersdk.MockDatasource{}, nil },
},
},
}
return &packer.CoreConfig{

View File

@ -16,12 +16,14 @@ import (
// available. This allows us to test a builder that writes files to disk.
func testCoreConfigSleepBuilder(t *testing.T) *packer.CoreConfig {
components := packer.ComponentFinder{
BuilderStore: packer.MapOfBuilder{
"file": func() (packersdk.Builder, error) { return &file.Builder{}, nil },
},
ProvisionerStore: packer.MapOfProvisioner{
"sleep": func() (packersdk.Provisioner, error) { return &sleep.Provisioner{}, nil },
"shell-local": func() (packersdk.Provisioner, error) { return &shell_local.Provisioner{}, nil },
PluginConfig: &packer.PluginConfig{
Builders: packer.MapOfBuilder{
"file": func() (packersdk.Builder, error) { return &file.Builder{}, nil },
},
Provisioners: packer.MapOfProvisioner{
"sleep": func() (packersdk.Provisioner, error) { return &sleep.Provisioner{}, nil },
"shell-local": func() (packersdk.Provisioner, error) { return &shell_local.Provisioner{}, nil },
},
},
}
return &packer.CoreConfig{

View File

@ -93,6 +93,18 @@ type BuildArgs struct {
OnError string
}
func (ia *InitArgs) AddFlagSets(flags *flag.FlagSet) {
flags.BoolVar(&ia.Upgrade, "upgrade", false, "upgrade any present plugin to the highest allowed version.")
ia.MetaArgs.AddFlagSets(flags)
}
// InitArgs represents a parsed cli line for a `packer build`
type InitArgs struct {
MetaArgs
Upgrade bool
}
// ConsoleArgs represents a parsed cli line for a `packer console`
type ConsoleArgs struct {
MetaArgs

View File

@ -3,6 +3,7 @@ package command
import (
"github.com/hashicorp/hcl/v2"
"github.com/hashicorp/packer/packer"
plugingetter "github.com/hashicorp/packer/packer/plugin-getter"
)
// CoreWrapper wraps a packer.Core in order to have it's Initialize func return
@ -23,3 +24,13 @@ func (c *CoreWrapper) Initialize(_ packer.InitializeOptions) hcl.Diagnostics {
}
return nil
}
func (c *CoreWrapper) PluginRequirements() (plugingetter.Requirements, hcl.Diagnostics) {
return nil, hcl.Diagnostics{
&hcl.Diagnostic{
Summary: "Packer init is supported for HCL2 configuration templates only",
Detail: "Please manually install plugins or use a HCL2 configuration that will do that for you.",
Severity: hcl.DiagError,
},
}
}

View File

@ -118,19 +118,21 @@ func commandMeta() Meta {
func getBareComponentFinder() packer.ComponentFinder {
return packer.ComponentFinder{
BuilderStore: packer.MapOfBuilder{
"file": func() (packersdk.Builder, error) { return &file.Builder{}, nil },
"null": func() (packersdk.Builder, error) { return &null.Builder{}, nil },
"amazon-ebs": func() (packersdk.Builder, error) { return &ebs.Builder{}, nil },
},
ProvisionerStore: packer.MapOfProvisioner{
"shell-local": func() (packersdk.Provisioner, error) { return &shell_local.Provisioner{}, nil },
"shell": func() (packersdk.Provisioner, error) { return &shell.Provisioner{}, nil },
"file": func() (packersdk.Provisioner, error) { return &filep.Provisioner{}, nil },
},
PostProcessorStore: packer.MapOfPostProcessor{
"shell-local": func() (packersdk.PostProcessor, error) { return &shell_local_pp.PostProcessor{}, nil },
"manifest": func() (packersdk.PostProcessor, error) { return &manifest.PostProcessor{}, nil },
PluginConfig: &packer.PluginConfig{
Builders: packer.MapOfBuilder{
"file": func() (packersdk.Builder, error) { return &file.Builder{}, nil },
"null": func() (packersdk.Builder, error) { return &null.Builder{}, nil },
"amazon-ebs": func() (packersdk.Builder, error) { return &ebs.Builder{}, nil },
},
Provisioners: packer.MapOfProvisioner{
"shell-local": func() (packersdk.Provisioner, error) { return &shell_local.Provisioner{}, nil },
"shell": func() (packersdk.Provisioner, error) { return &shell.Provisioner{}, nil },
"file": func() (packersdk.Provisioner, error) { return &filep.Provisioner{}, nil },
},
PostProcessors: packer.MapOfPostProcessor{
"shell-local": func() (packersdk.PostProcessor, error) { return &shell_local_pp.PostProcessor{}, nil },
"manifest": func() (packersdk.PostProcessor, error) { return &manifest.PostProcessor{}, nil },
},
},
}
}

View File

@ -202,7 +202,7 @@ func (c *HCL2UpgradeCommand) RunContext(buildCtx context.Context, cla *HCL2Upgra
body := sourcesContent.Body()
body.AppendNewline()
if !c.Meta.CoreConfig.Components.BuilderStore.Has(builderCfg.Type) {
if !c.Meta.CoreConfig.Components.PluginConfig.Builders.Has(builderCfg.Type) {
c.Ui.Error(fmt.Sprintf("unknown builder type: %q\n", builderCfg.Type))
return 1
}

164
command/init.go Normal file
View File

@ -0,0 +1,164 @@
package command
import (
"context"
"crypto/sha256"
"fmt"
"log"
"runtime"
"strings"
pluginsdk "github.com/hashicorp/packer-plugin-sdk/plugin"
plugingetter "github.com/hashicorp/packer/packer/plugin-getter"
"github.com/hashicorp/packer/packer/plugin-getter/github"
"github.com/hashicorp/packer/version"
"github.com/posener/complete"
)
type InitCommand struct {
Meta
}
func (c *InitCommand) Run(args []string) int {
ctx, cleanup := handleTermInterrupt(c.Ui)
defer cleanup()
cfg, ret := c.ParseArgs(args)
if ret != 0 {
return ret
}
return c.RunContext(ctx, cfg)
}
func (c *InitCommand) ParseArgs(args []string) (*InitArgs, int) {
var cfg InitArgs
flags := c.Meta.FlagSet("init", 0)
flags.Usage = func() { c.Ui.Say(c.Help()) }
cfg.AddFlagSets(flags)
if err := flags.Parse(args); err != nil {
return &cfg, 1
}
args = flags.Args()
if len(args) != 1 {
flags.Usage()
return &cfg, 1
}
cfg.Path = args[0]
return &cfg, 0
}
func (c *InitCommand) RunContext(buildCtx context.Context, cla *InitArgs) int {
packerStarter, ret := c.GetConfig(&cla.MetaArgs)
if ret != 0 {
return ret
}
// Get plugins requirements
reqs, diags := packerStarter.PluginRequirements()
ret = writeDiags(c.Ui, nil, diags)
if ret != 0 {
return ret
}
opts := plugingetter.ListInstallationsOptions{
FromFolders: c.Meta.CoreConfig.Components.PluginConfig.KnownPluginFolders,
BinaryInstallationOptions: plugingetter.BinaryInstallationOptions{
OS: runtime.GOOS,
ARCH: runtime.GOARCH,
APIVersionMajor: pluginsdk.APIVersionMajor,
APIVersionMinor: pluginsdk.APIVersionMinor,
Checksummers: []plugingetter.Checksummer{
{Type: "sha256", Hash: sha256.New()},
},
},
}
if runtime.GOOS == "windows" && opts.Ext == "" {
opts.BinaryInstallationOptions.Ext = ".exe"
}
log.Printf("[TRACE] init: %#v", opts)
getters := []plugingetter.Getter{
&github.Getter{
// In the past some terraform plugins downloads were blocked from a
// specific aws region by s3. Changing the user agent unblocked the
// downloads so having one user agent per version will help mitigate
// that a little more. Especially in the case someone forks this
// code to make it more aggressive or something.
// TODO: allow to set this from the config file or an environment
// variable.
UserAgent: "packer-getter-github-" + version.String(),
},
}
for _, pluginRequirement := range reqs {
// Get installed plugins that match requirement
installs, err := pluginRequirement.ListInstallations(opts)
if err != nil {
c.Ui.Error(err.Error())
return 1
}
log.Printf("[TRACE] for plugin %s found %d matching installation(s)", pluginRequirement.Identifier.ForDisplay(), len(installs))
if len(installs) > 0 && cla.Upgrade == false {
continue
}
newInstall, err := pluginRequirement.InstallLatest(plugingetter.InstallOptions{
InFolders: opts.FromFolders,
BinaryInstallationOptions: opts.BinaryInstallationOptions,
Getters: getters,
})
if err != nil {
c.Ui.Error(err.Error())
}
if newInstall != nil {
msg := fmt.Sprintf("Installed plugin %s %s in %q", pluginRequirement.Identifier.ForDisplay(), newInstall.Version, newInstall.BinaryPath)
c.Ui.Say(msg)
}
}
return ret
}
func (*InitCommand) Help() string {
helpText := `
Usage: packer init [options] [config.pkr.hcl|folder/]
Install all the missing plugins required in a Packer config. Note that Packer
does not have a state.
This is the first command that should be executed when working with a new
or existing template.
This command is always safe to run multiple times. Though subsequent runs may
give errors, this command will never delete anything.
Options:
-upgrade On top of installing missing plugins, update
installed plugins to the latest available
version, if there is a new higher one. Note that
this still takes into consideration the version
constraint of the config.
`
return strings.TrimSpace(helpText)
}
func (*InitCommand) Synopsis() string {
return "Install missing plugins or upgrade plugins"
}
func (*InitCommand) AutocompleteArgs() complete.Predictor {
return complete.PredictNothing
}
func (*InitCommand) AutocompleteFlags() complete.Flags {
return complete.Flags{
"-upgrade": complete.PredictNothing,
}
}

View File

@ -6,6 +6,7 @@ import (
"github.com/google/go-cmp/cmp"
packersdk "github.com/hashicorp/packer-plugin-sdk/packer"
"github.com/hashicorp/packer/packer"
)
func TestValidateCommand(t *testing.T) {
@ -49,7 +50,7 @@ func TestValidateCommand(t *testing.T) {
func TestValidateCommand_SkipDatasourceExecution(t *testing.T) {
datasourceMock := &packersdk.MockDatasource{}
meta := testMetaFile(t)
meta.CoreConfig.Components.DatasourceStore = packersdk.MapOfDatasource{
meta.CoreConfig.Components.PluginConfig.DataSources = packer.MapOfDatasource{
"mock": func() (packersdk.Datasource, error) {
return datasourceMock, nil
},

View File

@ -44,6 +44,12 @@ func init() {
}, nil
},
"init": func() (cli.Command, error) {
return &command.InitCommand{
Meta: *CommandMeta,
}, nil
},
"inspect": func() (cli.Command, error) {
return &command.InspectCommand{
Meta: *CommandMeta,

View File

@ -14,7 +14,6 @@ import (
packersdk "github.com/hashicorp/packer-plugin-sdk/packer"
"github.com/hashicorp/packer/command"
"github.com/hashicorp/packer/packer"
"github.com/hashicorp/packer/packer/plugin"
)
// PACKERSPACE is used to represent the spaces that separate args for a command
@ -22,16 +21,13 @@ import (
const PACKERSPACE = "-PACKERSPACE-"
type config struct {
DisableCheckpoint bool `json:"disable_checkpoint"`
DisableCheckpointSignature bool `json:"disable_checkpoint_signature"`
RawBuilders map[string]string `json:"builders"`
RawProvisioners map[string]string `json:"provisioners"`
RawPostProcessors map[string]string `json:"post-processors"`
Builders packer.MapOfBuilder `json:"-"`
Provisioners packer.MapOfProvisioner `json:"-"`
PostProcessors packer.MapOfPostProcessor `json:"-"`
Datasources packer.MapOfDatasource `json:"-"`
Plugins plugin.Config
DisableCheckpoint bool `json:"disable_checkpoint"`
DisableCheckpointSignature bool `json:"disable_checkpoint_signature"`
RawBuilders map[string]string `json:"builders"`
RawProvisioners map[string]string `json:"provisioners"`
RawPostProcessors map[string]string `json:"post-processors"`
Plugins *packer.PluginConfig
}
// decodeConfig decodes configuration in JSON format from the given io.Reader into
@ -97,19 +93,19 @@ func (c *config) loadSingleComponent(path string) (string, error) {
switch {
case strings.HasPrefix(pluginName, "packer-builder-"):
pluginName = pluginName[len("packer-builder-"):]
c.Builders[pluginName] = func() (packersdk.Builder, error) {
c.Plugins.Builders.Set(pluginName, func() (packersdk.Builder, error) {
return c.Plugins.Client(path).Builder()
}
})
case strings.HasPrefix(pluginName, "packer-post-processor-"):
pluginName = pluginName[len("packer-post-processor-"):]
c.PostProcessors[pluginName] = func() (packersdk.PostProcessor, error) {
c.Plugins.PostProcessors.Set(pluginName, func() (packersdk.PostProcessor, error) {
return c.Plugins.Client(path).PostProcessor()
}
})
case strings.HasPrefix(pluginName, "packer-provisioner-"):
pluginName = pluginName[len("packer-provisioner-"):]
c.Provisioners[pluginName] = func() (packersdk.Provisioner, error) {
c.Plugins.Provisioners.Set(pluginName, func() (packersdk.Provisioner, error) {
return c.Plugins.Client(path).Provisioner()
}
})
}
return pluginName, nil
@ -119,7 +115,7 @@ func (c *config) loadSingleComponent(path string) (string, error) {
// implementations from the defined plugins.
func (c *config) StartBuilder(name string) (packersdk.Builder, error) {
log.Printf("Loading builder: %s\n", name)
return c.Builders.Start(name)
return c.Plugins.Builders.Start(name)
}
// This is a proper implementation of packer.HookFunc that can be used
@ -133,14 +129,14 @@ func (c *config) StarHook(name string) (packersdk.Hook, error) {
// packersdk.PostProcessor implementations from defined plugins.
func (c *config) StartPostProcessor(name string) (packersdk.PostProcessor, error) {
log.Printf("Loading post-processor: %s", name)
return c.PostProcessors.Start(name)
return c.Plugins.PostProcessors.Start(name)
}
// This is a proper packer.ProvisionerFunc that can be used to load
// packer.Provisioner implementations from defined plugins.
func (c *config) StartProvisioner(name string) (packersdk.Provisioner, error) {
log.Printf("Loading provisioner: %s\n", name)
return c.Provisioners.Start(name)
return c.Plugins.Provisioners.Start(name)
}
func (c *config) discoverInternalComponents() error {
@ -153,49 +149,45 @@ func (c *config) discoverInternalComponents() error {
for builder := range command.Builders {
builder := builder
_, found := (c.Builders)[builder]
if !found {
c.Builders[builder] = func() (packersdk.Builder, error) {
bin := fmt.Sprintf("%s%splugin%spacker-builder-%s",
packerPath, PACKERSPACE, PACKERSPACE, builder)
if !c.Plugins.Builders.Has(builder) {
bin := fmt.Sprintf("%s%splugin%spacker-builder-%s",
packerPath, PACKERSPACE, PACKERSPACE, builder)
c.Plugins.Builders.Set(builder, func() (packersdk.Builder, error) {
return c.Plugins.Client(bin).Builder()
}
})
}
}
for provisioner := range command.Provisioners {
provisioner := provisioner
_, found := (c.Provisioners)[provisioner]
if !found {
c.Provisioners[provisioner] = func() (packersdk.Provisioner, error) {
bin := fmt.Sprintf("%s%splugin%spacker-provisioner-%s",
packerPath, PACKERSPACE, PACKERSPACE, provisioner)
if !c.Plugins.Provisioners.Has(provisioner) {
bin := fmt.Sprintf("%s%splugin%spacker-provisioner-%s",
packerPath, PACKERSPACE, PACKERSPACE, provisioner)
c.Plugins.Provisioners.Set(provisioner, func() (packersdk.Provisioner, error) {
return c.Plugins.Client(bin).Provisioner()
}
})
}
}
for postProcessor := range command.PostProcessors {
postProcessor := postProcessor
_, found := (c.PostProcessors)[postProcessor]
if !found {
c.PostProcessors[postProcessor] = func() (packersdk.PostProcessor, error) {
bin := fmt.Sprintf("%s%splugin%spacker-post-processor-%s",
packerPath, PACKERSPACE, PACKERSPACE, postProcessor)
if !c.Plugins.PostProcessors.Has(postProcessor) {
bin := fmt.Sprintf("%s%splugin%spacker-post-processor-%s",
packerPath, PACKERSPACE, PACKERSPACE, postProcessor)
c.Plugins.PostProcessors.Set(postProcessor, func() (packersdk.PostProcessor, error) {
return c.Plugins.Client(bin).PostProcessor()
}
})
}
}
for dataSource := range command.Datasources {
dataSource := dataSource
_, found := (c.Datasources)[dataSource]
if !found {
c.Datasources[dataSource] = func() (packersdk.Datasource, error) {
bin := fmt.Sprintf("%s%splugin%spacker-datasource-%s",
packerPath, PACKERSPACE, PACKERSPACE, dataSource)
if !c.Plugins.DataSources.Has(dataSource) {
bin := fmt.Sprintf("%s%splugin%spacker-datasource-%s",
packerPath, PACKERSPACE, PACKERSPACE, dataSource)
c.Plugins.DataSources.Set(dataSource, func() (packersdk.Datasource, error) {
return c.Plugins.Client(bin).Datasource()
}
})
}
}

View File

@ -48,10 +48,13 @@ func TestLoadExternalComponentsFromConfig(t *testing.T) {
}
defer cleanUpFunc()
var cfg config
cfg.Builders = packer.MapOfBuilder{}
cfg.PostProcessors = packer.MapOfPostProcessor{}
cfg.Provisioners = packer.MapOfProvisioner{}
cfg := config{
Plugins: &packer.PluginConfig{
Builders: packer.MapOfBuilder{},
PostProcessors: packer.MapOfPostProcessor{},
Provisioners: packer.MapOfProvisioner{},
},
}
if err := decodeConfig(strings.NewReader(packerConfigData), &cfg); err != nil {
t.Fatalf("error encountered decoding configuration: %v", err)
@ -59,16 +62,16 @@ func TestLoadExternalComponentsFromConfig(t *testing.T) {
cfg.LoadExternalComponentsFromConfig()
if len(cfg.Builders) != 1 || !cfg.Builders.Has("cloud-xyz") {
t.Errorf("failed to load external builders; got %v as the resulting config", cfg.Builders)
if len(cfg.Plugins.Builders.List()) != 1 || !cfg.Plugins.Builders.Has("cloud-xyz") {
t.Errorf("failed to load external builders; got %v as the resulting config", cfg.Plugins.Builders)
}
if len(cfg.PostProcessors) != 1 || !cfg.PostProcessors.Has("noop") {
t.Errorf("failed to load external post-processors; got %v as the resulting config", cfg.PostProcessors)
if len(cfg.Plugins.PostProcessors.List()) != 1 || !cfg.Plugins.PostProcessors.Has("noop") {
t.Errorf("failed to load external post-processors; got %v as the resulting config", cfg.Plugins.PostProcessors)
}
if len(cfg.Provisioners) != 1 || !cfg.Provisioners.Has("super-shell") {
t.Errorf("failed to load external provisioners; got %v as the resulting config", cfg.Provisioners)
if len(cfg.Plugins.Provisioners.List()) != 1 || !cfg.Plugins.Provisioners.Has("super-shell") {
t.Errorf("failed to load external provisioners; got %v as the resulting config", cfg.Plugins.Provisioners)
}
}
@ -80,8 +83,13 @@ func TestLoadExternalComponentsFromConfig_onlyProvisioner(t *testing.T) {
}
defer cleanUpFunc()
var cfg config
cfg.Provisioners = packer.MapOfProvisioner{}
cfg := config{
Plugins: &packer.PluginConfig{
Builders: packer.MapOfBuilder{},
PostProcessors: packer.MapOfPostProcessor{},
Provisioners: packer.MapOfProvisioner{},
},
}
if err := decodeConfig(strings.NewReader(packerConfigData), &cfg); err != nil {
t.Fatalf("error encountered decoding configuration: %v", err)
@ -95,16 +103,16 @@ func TestLoadExternalComponentsFromConfig_onlyProvisioner(t *testing.T) {
cfg.LoadExternalComponentsFromConfig()
if len(cfg.Builders) != 0 {
t.Errorf("loaded external builders when it wasn't supposed to; got %v as the resulting config", cfg.Builders)
if len(cfg.Plugins.Builders.List()) != 0 {
t.Errorf("loaded external builders when it wasn't supposed to; got %v as the resulting config", cfg.Plugins.Builders)
}
if len(cfg.PostProcessors) != 0 {
t.Errorf("loaded external post-processors when it wasn't supposed to; got %v as the resulting config", cfg.PostProcessors)
if len(cfg.Plugins.PostProcessors.List()) != 0 {
t.Errorf("loaded external post-processors when it wasn't supposed to; got %v as the resulting config", cfg.Plugins.PostProcessors)
}
if len(cfg.Provisioners) != 1 || !cfg.Provisioners.Has("super-shell") {
t.Errorf("failed to load external provisioners; got %v as the resulting config", cfg.Provisioners)
if len(cfg.Plugins.Provisioners.List()) != 1 || !cfg.Plugins.Provisioners.Has("super-shell") {
t.Errorf("failed to load external provisioners; got %v as the resulting config", cfg.Plugins.Provisioners)
}
}
@ -125,10 +133,13 @@ func TestLoadSingleComponent(t *testing.T) {
{pluginPath: "./non-existing-file", errorExpected: true},
}
var cfg config
cfg.Builders = packer.MapOfBuilder{}
cfg.PostProcessors = packer.MapOfPostProcessor{}
cfg.Provisioners = packer.MapOfProvisioner{}
cfg := config{
Plugins: &packer.PluginConfig{
Builders: packer.MapOfBuilder{},
PostProcessors: packer.MapOfPostProcessor{},
Provisioners: packer.MapOfProvisioner{},
},
}
for _, tc := range tt {
tc := tc

View File

@ -1,7 +1,39 @@
source "null" "example" {
source "amazon-ebs" "example" {
communicator = "none"
source_ami = "potato"
ami_name = "potato"
instance_type = "potato"
}
build {
sources = ["source.null.example"]
name = "my-provisioners-are-cooler"
sources = ["source.amazon-ebs.example"]
provisioner "comment-that-works" {
}
}
packer {
required_plugins {
comment = {
source = "sylviamoss/comment"
version = "v0.2.15"
}
comment-that-works = {
source = "sylviamoss/comment"
version = "v0.2.19"
}
}
}
build {
sources = ["source.amazon-ebs.example"]
provisioner "comment-my-provisioner" {
}
provisioner "shell-local" {
inline = ["yo"]
}
}

8
go.mod
View File

@ -23,7 +23,7 @@ require (
github.com/cheggaaa/pb v1.0.27
github.com/chzyer/readline v0.0.0-20180603132655-2972be24d48e
github.com/dgrijalva/jwt-go v3.2.0+incompatible
github.com/digitalocean/go-qemu v0.0.0-20181112162955-dd7bb9c771b8
github.com/digitalocean/go-qemu v0.0.0-20201211181942-d361e7b4965f
github.com/digitalocean/godo v1.11.1
github.com/exoscale/egoscale v0.18.1
github.com/fatih/camelcase v1.0.0
@ -33,6 +33,7 @@ require (
github.com/gobwas/glob v0.2.3
github.com/golang-collections/collections v0.0.0-20130729185459-604e922904d3
github.com/google/go-cmp v0.5.2
github.com/google/go-github/v33 v33.0.1-0.20210113204525-9318e629ec69
github.com/google/uuid v1.1.2
github.com/gophercloud/gophercloud v0.12.0
github.com/gophercloud/utils v0.0.0-20200508015959-b0167b94122c
@ -49,7 +50,7 @@ require (
github.com/hashicorp/go-uuid v1.0.2
github.com/hashicorp/go-version v1.2.0
github.com/hashicorp/hcl/v2 v2.8.0
github.com/hashicorp/packer-plugin-sdk v0.0.9
github.com/hashicorp/packer-plugin-sdk v0.0.11-0.20210127164048-448d64e93ee6
github.com/hashicorp/vault/api v1.0.4
github.com/hetznercloud/hcloud-go v1.15.1
github.com/hyperonecom/h1-client-go v0.0.0-20191203060043-b46280e4c4a4
@ -75,6 +76,7 @@ require (
github.com/profitbricks/profitbricks-sdk-go v4.0.2+incompatible
github.com/scaleway/scaleway-sdk-go v1.0.0-beta.7
github.com/shirou/gopsutil v2.18.12+incompatible
github.com/smartystreets/goconvey v1.6.4 // indirect
github.com/stretchr/testify v1.6.1
github.com/tencentcloud/tencentcloud-sdk-go v3.0.222+incompatible
github.com/ucloud/ucloud-sdk-go v0.16.3
@ -97,4 +99,6 @@ require (
google.golang.org/grpc v1.32.0
)
// replace github.com/hashicorp/packer-plugin-sdk => /Users/azr/go/src/github.com/hashicorp/packer-plugin-sdk
go 1.13

100
go.sum
View File

@ -38,6 +38,8 @@ github.com/1and1/oneandone-cloudserver-sdk-go v1.0.1 h1:RMTyvS5bjvSWiUcfqfr/E2px
github.com/1and1/oneandone-cloudserver-sdk-go v1.0.1/go.mod h1:61apmbkVJH4kg+38ftT+/l0XxdUCVnHggqcOTqZRSEE=
github.com/Azure/azure-sdk-for-go v40.5.0+incompatible h1:CVQNKuUepSFBo6BW6gM1J9slPHLRcjn6vaw+j+causw=
github.com/Azure/azure-sdk-for-go v40.5.0+incompatible/go.mod h1:9XXNKU+eRnpl9moKnB4QOLf1HestfXbmab5FXxiDBjc=
github.com/Azure/go-autorest v14.2.0+incompatible h1:V5VMDjClD3GiElqLWO7mz2MxNAK/vTfRHdAubSIPRgs=
github.com/Azure/go-autorest v14.2.0+incompatible/go.mod h1:r+4oMnoxhatjLLJ6zxSWATqVooLgysK6ZNox3g/xq24=
github.com/Azure/go-autorest/autorest v0.9.0/go.mod h1:xyHB1BMZT0cuDHU7I0+g046+BFDTQ8rEZB0s4Yfa6bI=
github.com/Azure/go-autorest/autorest v0.9.3/go.mod h1:GsRuLYvwzLjjjRoWEIyMUaYq8GNUx2nRB378IPt/1p0=
github.com/Azure/go-autorest/autorest v0.10.0 h1:mvdtztBqcL8se7MdrUweNieTNi4kfNG6GOJuurQJpuY=
@ -62,10 +64,13 @@ github.com/Azure/go-autorest/autorest/to v0.3.0 h1:zebkZaadz7+wIQYgC7GXaz3Wb28yK
github.com/Azure/go-autorest/autorest/to v0.3.0/go.mod h1:MgwOyqaIuKdG4TL/2ywSsIWKAfJfgHDo8ObuUk3t5sA=
github.com/Azure/go-autorest/autorest/validation v0.2.0 h1:15vMO4y76dehZSq7pAaOLQxC6dZYsSrj2GQpflyM/L4=
github.com/Azure/go-autorest/autorest/validation v0.2.0/go.mod h1:3EEqHnBxQGHXRYq3HT1WyXAvT7LLY3tl70hw6tQIbjI=
github.com/Azure/go-autorest/autorest/validation v0.3.1 h1:AgyqjAd94fwNAoTjl/WQXg4VvFeRFpO+UhNyRXqF1ac=
github.com/Azure/go-autorest/autorest/validation v0.3.1/go.mod h1:yhLgjC0Wda5DYXl6JAsWyUe4KVNffhoDhG0zVzUMo3E=
github.com/Azure/go-autorest/logger v0.1.0 h1:ruG4BSDXONFRrZZJ2GUXDiUyVpayPmb1GnWeHDdaNKY=
github.com/Azure/go-autorest/logger v0.1.0/go.mod h1:oExouG+K6PryycPJfVSxi/koC6LSNgds39diKLz7Vrc=
github.com/Azure/go-autorest/tracing v0.5.0 h1:TRn4WjSnkcSy5AEG3pnbtFSwNtwzjr4VYyQflFE619k=
github.com/Azure/go-autorest/tracing v0.5.0/go.mod h1:r/s2XiOKccPW3HrqB+W0TQzfbtp2fGCgRFtBroKn4Dk=
github.com/Azure/go-ntlmssp v0.0.0-20180810175552-4a21cbd618b4 h1:pSm8mp0T2OH2CPmPDPtwHPr3VAQaOwVF/JbllOPP4xA=
github.com/Azure/go-ntlmssp v0.0.0-20180810175552-4a21cbd618b4/go.mod h1:chxPXzSsl7ZWRAuOIE23GDNzjWuZquvFlgA8xmpunjU=
github.com/Azure/go-ntlmssp v0.0.0-20191115201650-bad6df29494a h1:3FwiePtHk5YJrooV799oo5jIfsgRdES25VdngJM03dU=
github.com/Azure/go-ntlmssp v0.0.0-20191115201650-bad6df29494a/go.mod h1:chxPXzSsl7ZWRAuOIE23GDNzjWuZquvFlgA8xmpunjU=
@ -90,8 +95,12 @@ github.com/aliyun/aliyun-oss-go-sdk v0.0.0-20170113022742-e6dbea820a9f h1:jI4DIE
github.com/aliyun/aliyun-oss-go-sdk v0.0.0-20170113022742-e6dbea820a9f/go.mod h1:T/Aws4fEfogEE9v+HPhhw+CntffsBHJ8nXQCwKr0/g8=
github.com/antchfx/xpath v0.0.0-20170728053731-b5c552e1acbd h1:S3Fr6QnkpW9VRjiEY4psQHhhbbahASuNVj52YIce7lI=
github.com/antchfx/xpath v0.0.0-20170728053731-b5c552e1acbd/go.mod h1:Yee4kTMuNiPYJ7nSNorELQMr1J33uOpXDMByNYhvtNk=
github.com/antchfx/xpath v1.1.11 h1:WOFtK8TVAjLm3lbgqeP0arlHpvCEeTANeWZ/csPpJkQ=
github.com/antchfx/xpath v1.1.11/go.mod h1:i54GszH55fYfBmoZXapTHN8T8tkcHfRgLyVwwqzXNcs=
github.com/antchfx/xquery v0.0.0-20170730121040-eb8c3c172607 h1:BFFG6KP8ASFBg2ptWsJn8p8RDufBjBDKIxLU7BTYGOM=
github.com/antchfx/xquery v0.0.0-20170730121040-eb8c3c172607/go.mod h1:LzD22aAzDP8/dyiCKFp31He4m2GPjl0AFyzDtZzUu9M=
github.com/antchfx/xquery v0.0.0-20180515051857-ad5b8c7a47b0 h1:JaCC8jz0zdMLk2m+qCCVLLLM/PL93p84w4pK3aJWj60=
github.com/antchfx/xquery v0.0.0-20180515051857-ad5b8c7a47b0/go.mod h1:LzD22aAzDP8/dyiCKFp31He4m2GPjl0AFyzDtZzUu9M=
github.com/antihax/optional v1.0.0 h1:xK2lYat7ZLaVVcIuj82J8kIro4V6kDe0AUDFboUCwcg=
github.com/antihax/optional v1.0.0/go.mod h1:uupD/76wgC+ih3iEmQUL+0Ugr19nfwCT1kdvxnR2qWY=
github.com/apparentlymart/go-cidr v1.0.1 h1:NmIwLZ/KdsjIUlhf+/Np40atNXm/+lZ5txfTJ/SpF+U=
@ -105,9 +114,11 @@ github.com/apparentlymart/go-textseg/v12 v12.0.0/go.mod h1:S/4uRK2UtaQttw1GenVJE
github.com/approvals/go-approval-tests v0.0.0-20160714161514-ad96e53bea43 h1:ePCAQPf5tUc5IMcUvu6euhSGna7jzs7eiXtJXHig6Zc=
github.com/approvals/go-approval-tests v0.0.0-20160714161514-ad96e53bea43/go.mod h1:S6puKjZ9ZeqUPBv2hEBnMZGcM2J6mOsDRQcmxkMAND0=
github.com/armon/circbuf v0.0.0-20150827004946-bbbad097214e/go.mod h1:3U/XgcO3hCbHZ8TKRvWD2dDTCfh9M9ya+I9JpbB7O8o=
github.com/armon/go-metrics v0.0.0-20180917152333-f0300d1749da h1:8GUt8eRujhVEGZFFEjBj46YV4rDjvGrNxb0KMWYkL2I=
github.com/armon/go-metrics v0.0.0-20180917152333-f0300d1749da/go.mod h1:Q73ZrmVTwzkszR9V5SSuryQ31EELlFMUz1kKyl939pY=
github.com/armon/go-metrics v0.0.0-20190430140413-ec5e00d3c878 h1:EFSB7Zo9Eg91v7MJPVsifUysc/wPdN+NOnVe6bWbdBM=
github.com/armon/go-metrics v0.0.0-20190430140413-ec5e00d3c878/go.mod h1:3AMJUQhVx52RsWOnlkpikZr01T/yAVN2gn0861vByNg=
github.com/armon/go-radix v0.0.0-20180808171621-7fddfc383310 h1:BUAU3CGlLvorLI26FmByPp2eC2qla6E1Tw+scpcg/to=
github.com/armon/go-radix v0.0.0-20180808171621-7fddfc383310/go.mod h1:ufUuZ+zHj4x4TnLV4JWEpy2hxWSpsRywHrMgIH9cCH8=
github.com/armon/go-radix v1.0.0 h1:F4z6KzEeeQIMeLFa97iZU6vupzoecKdU5TX24SNppXI=
github.com/armon/go-radix v1.0.0/go.mod h1:ufUuZ+zHj4x4TnLV4JWEpy2hxWSpsRywHrMgIH9cCH8=
@ -150,8 +161,16 @@ github.com/dgrijalva/jwt-go v3.2.0+incompatible h1:7qlOGliEKZXTDg6OTjfoBKDXWrumC
github.com/dgrijalva/jwt-go v3.2.0+incompatible/go.mod h1:E3ru+11k8xSBh+hMPgOLZmtrrCbhqsmaPHjLKYnJCaQ=
github.com/digitalocean/go-libvirt v0.0.0-20190626172931-4d226dd6c437 h1:phR13shVFOIpa1pnLBmewI9p16NEladLPvVylLPeexo=
github.com/digitalocean/go-libvirt v0.0.0-20190626172931-4d226dd6c437/go.mod h1:PRcPVAAma6zcLpFd4GZrjR/MRpood3TamjKI2m/z/Uw=
github.com/digitalocean/go-libvirt v0.0.0-20201209184759-e2a69bcd5bd1 h1:j6vGflaQ2T7yOWqVgPdiRF73j/U2Zmpbbzab8nyDCRQ=
github.com/digitalocean/go-libvirt v0.0.0-20201209184759-e2a69bcd5bd1/go.mod h1:QS1XzqZLcDniNYrN7EZefq3wIyb/M2WmJbql4ZKoc1Q=
github.com/digitalocean/go-libvirt v0.0.0-20210108193637-3a8ae49ba8cd h1:+96Lbk3f8glkOcsRdy3Nubga8pE40kor2OgxDzVGNZM=
github.com/digitalocean/go-libvirt v0.0.0-20210108193637-3a8ae49ba8cd/go.mod h1:gtar3MgGsIO64GgphCHw1cbyxSI6qEuTIm9+izMmlfk=
github.com/digitalocean/go-libvirt v0.0.0-20210112203132-25518eb2c840 h1:F3RVNV8SLLNhkNFcbDTgD3wAPMcrMJW6xjjI0JXy9z8=
github.com/digitalocean/go-libvirt v0.0.0-20210112203132-25518eb2c840/go.mod h1:gtar3MgGsIO64GgphCHw1cbyxSI6qEuTIm9+izMmlfk=
github.com/digitalocean/go-qemu v0.0.0-20181112162955-dd7bb9c771b8 h1:N7nH2py78LcMqYY3rZjjrsX6N7uCN7sjvaosgpXN9Ow=
github.com/digitalocean/go-qemu v0.0.0-20181112162955-dd7bb9c771b8/go.mod h1:/YnlngP1PARC0SKAZx6kaAEMOp8bNTQGqS+Ka3MctNI=
github.com/digitalocean/go-qemu v0.0.0-20201211181942-d361e7b4965f h1:BYkBJhHxUJJn27mhqfqWycWaEOWv9JQqLgQ2pOFJMqE=
github.com/digitalocean/go-qemu v0.0.0-20201211181942-d361e7b4965f/go.mod h1:y4Eq3ZfZQFWQwVyW0qvgo5seXUIq2C7BlHsdE+xtXL4=
github.com/digitalocean/godo v1.11.1 h1:OsTh37YFKk+g6DnAOrkXJ9oDArTkRx5UTkBJ2EWAO38=
github.com/digitalocean/godo v1.11.1/go.mod h1:h6faOIcZ8lWIwNQ+DN7b3CgX4Kwby5T+nbpNqkUIozU=
github.com/dimchansky/utfbom v1.1.0 h1:FcM3g+nofKgUteL8dm/UpdRXNC9KmADgTpLKsu0TRo4=
@ -170,6 +189,7 @@ github.com/exoscale/egoscale v0.18.1 h1:1FNZVk8jHUx0AvWhOZxLEDNlacTU0chMXUUNkm9E
github.com/exoscale/egoscale v0.18.1/go.mod h1:Z7OOdzzTOz1Q1PjQXumlz9Wn/CddH0zSYdCF3rnBKXE=
github.com/fatih/camelcase v1.0.0 h1:hxNvNX/xYBp0ovncs8WyWZrOrpBNub/JfaMvbURyft8=
github.com/fatih/camelcase v1.0.0/go.mod h1:yN2Sb0lFhZJUdVvtELVWefmrXpuZESvPmqwoZc+/fpc=
github.com/fatih/color v1.7.0 h1:DkWD4oS2D8LGGgTQ6IvwJJXSL5Vp2ffcQg58nFV38Ys=
github.com/fatih/color v1.7.0/go.mod h1:Zm6kSWBoL9eyXnKyktHP6abPY2pDugNf5KwzbycvMj4=
github.com/fatih/color v1.9.0 h1:8xPHl4/q1VyqGIPif1F+1V3Y3lSmrq01EabUW3CoW5s=
github.com/fatih/color v1.9.0/go.mod h1:eQcE1qtQxscV5RaZvpXrrb8Drkc3/DdQ+uUYCNjL+zU=
@ -186,6 +206,8 @@ github.com/go-ini/ini v1.25.4/go.mod h1:ByCAeIL28uOIIG0E3PJtZPDL8WnHpFKFOtgjp+3I
github.com/go-ldap/ldap v3.0.2+incompatible/go.mod h1:qfd9rJvER9Q0/D/Sqn1DfHRoBp40uXYvFoEVrNEPqRc=
github.com/go-ole/go-ole v1.2.4 h1:nNBDSCOigTSiarFpYE9J/KtEA1IOW4CNeqT9TQDqCxI=
github.com/go-ole/go-ole v1.2.4/go.mod h1:XCwSNxSkXRo4vlyPy93sltvi/qJq0jqQhjqQNIwKuxM=
github.com/go-ole/go-ole v1.2.5 h1:t4MGB5xEDZvXI+0rMjjsfBsD7yAgp/s9ZDkL1JndXwY=
github.com/go-ole/go-ole v1.2.5/go.mod h1:pprOEPIfldk/42T2oK7lQ4v4JSDwmV0As9GaiUsvbm0=
github.com/go-resty/resty/v2 v2.1.1-0.20191201195748-d7b97669fe48/go.mod h1:dZGr0i9PLlaaTD4H/hoZIDjQ+r6xq8mgbRzHZf7f2J8=
github.com/go-resty/resty/v2 v2.3.0 h1:JOOeAvjSlapTT92p8xiS19Zxev1neGikoHsXJeOq8So=
github.com/go-resty/resty/v2 v2.3.0/go.mod h1:UpN9CgLZNsv4e9XG50UU8xdI0F43UQ4HmxLBDwaroHU=
@ -243,8 +265,12 @@ github.com/google/go-cmp v0.5.0/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/
github.com/google/go-cmp v0.5.1/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE=
github.com/google/go-cmp v0.5.2 h1:X2ev0eStA3AbceY54o37/0PQ/UWqKEiiO2dKL5OPaFM=
github.com/google/go-cmp v0.5.2/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE=
github.com/google/go-github/v33 v33.0.0/go.mod h1:GMdDnVZY/2TsWgp/lkYnpSAh6TrzhANBBwm6k6TTEXg=
github.com/google/go-github/v33 v33.0.1-0.20210113204525-9318e629ec69 h1:zL0/Ug5CMhV0XRb3A6vnK1SQ9kJM3VIyRxPQ5t9w8Bg=
github.com/google/go-github/v33 v33.0.1-0.20210113204525-9318e629ec69/go.mod h1:GMdDnVZY/2TsWgp/lkYnpSAh6TrzhANBBwm6k6TTEXg=
github.com/google/go-querystring v1.0.0 h1:Xkwi/a1rcvNg1PPYe5vI8GbeBY/jrVuDX5ASuANWTrk=
github.com/google/go-querystring v1.0.0/go.mod h1:odCYkC5MyYFN7vkCjXpyrEuKhc/BUO6wN/zVPAxq5ck=
github.com/google/gofuzz v1.0.0/go.mod h1:dBl0BpW6vV/+mYPU4Po3pmUjxk6FQPldtuIdl/M65Eg=
github.com/google/martian v2.1.0+incompatible h1:/CP5g8u/VJHijgedC/Legn3BAbAaWPgecwXBIDzw5no=
github.com/google/martian v2.1.0+incompatible/go.mod h1:9I4somxYTbIHy5NJKHRl3wXiIaQGbYVAs8BPL6v8lEs=
github.com/google/martian/v3 v3.0.0 h1:pMen7vLs8nvgEYhywH3KDWJIJTeEr2ULsVWHWYHQyBs=
@ -272,6 +298,8 @@ github.com/gophercloud/gophercloud v0.12.0 h1:mZrie07npp6ODiwHZolTicr5jV8Ogn43Av
github.com/gophercloud/gophercloud v0.12.0/go.mod h1:gmC5oQqMDOMO1t1gq5DquX/yAU808e/4mzjjDA76+Ss=
github.com/gophercloud/utils v0.0.0-20200508015959-b0167b94122c h1:iawx2ojEQA7c+GmkaVO5sN+k8YONibXyDO8RlsC+1bs=
github.com/gophercloud/utils v0.0.0-20200508015959-b0167b94122c/go.mod h1:ehWUbLQJPqS0Ep+CxeD559hsm9pthPXadJNKwZkp43w=
github.com/gopherjs/gopherjs v0.0.0-20181017120253-0766667cb4d1 h1:EGx4pi6eqNxGaHF6qqu48+N2wcFQ5qg5FXgOdqsJ5d8=
github.com/gopherjs/gopherjs v0.0.0-20181017120253-0766667cb4d1/go.mod h1:wJfORRmW1u3UXTncJ5qlYoELFm8eSnnEO6hX4iZ3EWY=
github.com/gopherjs/gopherjs v0.0.0-20181103185306-d547d1d9531e h1:JKmoR8x90Iww1ks85zJ1lfDGgIiMDuIptTOhJq+zKyg=
github.com/gopherjs/gopherjs v0.0.0-20181103185306-d547d1d9531e/go.mod h1:wJfORRmW1u3UXTncJ5qlYoELFm8eSnnEO6hX4iZ3EWY=
github.com/grpc-ecosystem/go-grpc-middleware v1.1.0 h1:THDBEeQ9xZ8JEaCLyLQqXMMdRqNr0QAUJTIkQAUtFjg=
@ -306,9 +334,11 @@ github.com/hashicorp/go-hclog v0.0.0-20180709165350-ff2cf002a8dd/go.mod h1:9bjs9
github.com/hashicorp/go-hclog v0.8.0/go.mod h1:5CU+agLiy3J7N7QjHK5d05KxGsuXiQLrjA0H7acj2lQ=
github.com/hashicorp/go-hclog v0.12.0 h1:d4QkX8FRTYaKaCZBoXYY8zJX2BXjWxurN/GA2tkrmZM=
github.com/hashicorp/go-hclog v0.12.0/go.mod h1:whpDNt7SSdeAju8AWKIWsul05p54N/39EeqMAyrmvFQ=
github.com/hashicorp/go-immutable-radix v1.0.0 h1:AKDB1HM5PWEA7i4nhcpwOrO2byshxBjXVn/J/3+z5/0=
github.com/hashicorp/go-immutable-radix v1.0.0/go.mod h1:0y9vanUI8NX6FsYoO3zeMjhV/C5i9g4Q3DwcSNZ4P60=
github.com/hashicorp/go-immutable-radix v1.1.0 h1:vN9wG1D6KG6YHRTWr8512cxGOVgTMEfgEdSj/hr8MPc=
github.com/hashicorp/go-immutable-radix v1.1.0/go.mod h1:0y9vanUI8NX6FsYoO3zeMjhV/C5i9g4Q3DwcSNZ4P60=
github.com/hashicorp/go-msgpack v0.5.3 h1:zKjpN5BK/P5lMYrLmBHdBULWbJ0XpYR+7NGzqkZzoD4=
github.com/hashicorp/go-msgpack v0.5.3/go.mod h1:ahLV/dePpqEmjfWmKiqvPkv/twdG7iPBM1vqhUKIvfM=
github.com/hashicorp/go-msgpack v0.5.5 h1:i9R9JSrqIz0QVLz3sz+i3YJdT7TTSLcfLLzJi9aZTuI=
github.com/hashicorp/go-msgpack v0.5.5/go.mod h1:ahLV/dePpqEmjfWmKiqvPkv/twdG7iPBM1vqhUKIvfM=
@ -321,6 +351,7 @@ github.com/hashicorp/go-plugin v1.0.1/go.mod h1:++UyYGoz3o5w9ZzAdZxtQKrWWP+iqPBn
github.com/hashicorp/go-retryablehttp v0.5.3/go.mod h1:9B5zBasrRhHXnJnui7y6sL7es7NDiJgTc6Er0maI1Xs=
github.com/hashicorp/go-retryablehttp v0.5.4 h1:1BZvpawXoJCWX6pNtow9+rpEj+3itIlutiqnntI6jOE=
github.com/hashicorp/go-retryablehttp v0.5.4/go.mod h1:9B5zBasrRhHXnJnui7y6sL7es7NDiJgTc6Er0maI1Xs=
github.com/hashicorp/go-rootcerts v1.0.1 h1:DMo4fmknnz0E0evoNYnV48RjWndOsmd6OW+09R3cEP8=
github.com/hashicorp/go-rootcerts v1.0.1/go.mod h1:pqUvnprVnM5bf7AOirdbb01K4ccR319Vf4pU3K5EGc8=
github.com/hashicorp/go-rootcerts v1.0.2 h1:jzhAVGtqPKbwpyCPELlgNWhE1znq+qwJtW5Oi2viEzc=
github.com/hashicorp/go-rootcerts v1.0.2/go.mod h1:pqUvnprVnM5bf7AOirdbb01K4ccR319Vf4pU3K5EGc8=
@ -340,6 +371,7 @@ github.com/hashicorp/go-version v1.2.0 h1:3vNe/fWF5CBgRIguda1meWhsZHy3m8gCJ5wx+d
github.com/hashicorp/go-version v1.2.0/go.mod h1:fltr4n8CU8Ke44wwGCBoEymUuxUHl09ZGVZPK5anwXA=
github.com/hashicorp/go.net v0.0.1/go.mod h1:hjKkEWcCURg++eb33jQU7oqQcI9XDCnUzHA0oac0k90=
github.com/hashicorp/golang-lru v0.5.0/go.mod h1:/m3WP610KZHVQ1SGc6re/UDhFvYD7pJ4Ao+sR/qLZy8=
github.com/hashicorp/golang-lru v0.5.1 h1:0hERBMJE1eitiLkihrMvRVBYAkpHzc/J3QdDN+dAcgU=
github.com/hashicorp/golang-lru v0.5.1/go.mod h1:/m3WP610KZHVQ1SGc6re/UDhFvYD7pJ4Ao+sR/qLZy8=
github.com/hashicorp/golang-lru v0.5.3 h1:YPkqC67at8FYaadspW/6uE0COsBxS2656RLEr8Bppgk=
github.com/hashicorp/golang-lru v0.5.3/go.mod h1:iADmTwqILo4mZ8BN3D2Q6+9jd8WM5uGBxy+E8yxSoD4=
@ -351,24 +383,31 @@ github.com/hashicorp/hcl/v2 v2.8.0/go.mod h1:bQTN5mpo+jewjJgh8jr0JUguIi7qPHUF6yI
github.com/hashicorp/logutils v1.0.0/go.mod h1:QIAnNjmIWmVIIkWDTG1z5v++HQmx9WQRO+LraFDTW64=
github.com/hashicorp/mdns v1.0.0/go.mod h1:tL+uN++7HEJ6SQLQ2/p+z2pH24WQKWjBPkE0mNTz8vQ=
github.com/hashicorp/mdns v1.0.1/go.mod h1:4gW7WsVCke5TE7EPeYliwHlRUyBtfCwuFwuMg2DmyNY=
github.com/hashicorp/memberlist v0.1.3 h1:EmmoJme1matNzb+hMpDuR/0sbJSUisxyqBGG676r31M=
github.com/hashicorp/memberlist v0.1.3/go.mod h1:ajVTdAv/9Im8oMAAj5G31PhhMCZJV2pPBoIllUwCN7I=
github.com/hashicorp/memberlist v0.2.2 h1:5+RffWKwqJ71YPu9mWsF7ZOscZmwfasdA8kbdC7AO2g=
github.com/hashicorp/memberlist v0.2.2/go.mod h1:MS2lj3INKhZjWNqd3N0m3J+Jxf3DAOnAH9VT3Sh9MUE=
github.com/hashicorp/packer v1.6.6/go.mod h1:meJucaEeJro8UP1jw+KaOCpaiC4VE+itPLXY8lbIU2A=
github.com/hashicorp/packer v1.6.7-0.20210107234516-6564ee76e807/go.mod h1:fBz288Z4of8zkpDWwL/ngG1txC36jGSXS7dnUmUaLUs=
github.com/hashicorp/packer v1.6.7-0.20210112155033-1bc89e34714f/go.mod h1:sKsuxExP+0/WCnIwwCRFFEHKy9TuQrZcwqbidE2oSq8=
github.com/hashicorp/packer v1.6.7-0.20210120105538-140685fb4f8b/go.mod h1:Yaw0q8brThFN2PKMWMRcuhQBuwPVDJRHHioNuMJTS7Q=
github.com/hashicorp/packer v1.6.7-0.20210125170305-539638b0f951/go.mod h1:Z3eunaxVQ3XgQ+rW7TEH0T/PRQzCUSyCBUTkm/VL7io=
github.com/hashicorp/packer v1.6.7-0.20210126105722-aef4ced967ec/go.mod h1:2+Vo/c/fA+TD9yFc/h9jQMFm4yG+IymQIr0OdJJOPiE=
github.com/hashicorp/packer-plugin-sdk v0.0.6 h1:BN2G4APXSMvDURFdnk+6DspwsU83pZeMsbEur7NmGsA=
github.com/hashicorp/packer-plugin-sdk v0.0.6/go.mod h1:Nvh28f+Jmpp2rcaN79bULTouNkGNDRfHckhHKTAXtyU=
github.com/hashicorp/packer-plugin-sdk v0.0.7-0.20210113192617-8a28198491f7 h1:2N1NAfBCmG1vIkbdlIOb/YbaYXCW40YOllWqMZDjnHM=
github.com/hashicorp/packer-plugin-sdk v0.0.7-0.20210113192617-8a28198491f7/go.mod h1:YdWTt5w6cYfaQG7IOi5iorL+3SXnz8hI0gJCi8Db/LI=
github.com/hashicorp/packer-plugin-sdk v0.0.7-0.20210120130732-6167b5e5b2e8 h1:50/m5nP40RaXnXyd0GHHUd+CfkmcYeTNGAY5eXQlBeY=
github.com/hashicorp/packer-plugin-sdk v0.0.7-0.20210120130732-6167b5e5b2e8/go.mod h1:YdWTt5w6cYfaQG7IOi5iorL+3SXnz8hI0gJCi8Db/LI=
github.com/hashicorp/packer-plugin-sdk v0.0.7-0.20210121103409-4b079ce99178 h1:AVT2ugu3+UzTDEViAxMFbUzzxgUpSVMMpbuaOEd97HY=
github.com/hashicorp/packer-plugin-sdk v0.0.7-0.20210121103409-4b079ce99178/go.mod h1:YdWTt5w6cYfaQG7IOi5iorL+3SXnz8hI0gJCi8Db/LI=
github.com/hashicorp/packer-plugin-sdk v0.0.7 h1:adELlId/KOGWXmQ79L+NwYSgKES6811RVXiRCj4FE0s=
github.com/hashicorp/packer-plugin-sdk v0.0.7/go.mod h1:YdWTt5w6cYfaQG7IOi5iorL+3SXnz8hI0gJCi8Db/LI=
github.com/hashicorp/packer-plugin-sdk v0.0.8 h1:/qyCO9YqALnaHSE++y+//tNy68++4SThZctqTwqikrU=
github.com/hashicorp/packer-plugin-sdk v0.0.8/go.mod h1:YdWTt5w6cYfaQG7IOi5iorL+3SXnz8hI0gJCi8Db/LI=
github.com/hashicorp/packer-plugin-sdk v0.0.9 h1:PWX6g0TeAbev5zhiRR91k3Z0wVCqsivs6xyBTRmPMkQ=
github.com/hashicorp/packer-plugin-sdk v0.0.9/go.mod h1:YdWTt5w6cYfaQG7IOi5iorL+3SXnz8hI0gJCi8Db/LI=
github.com/hashicorp/packer-plugin-sdk v0.0.6/go.mod h1:Nvh28f+Jmpp2rcaN79bULTouNkGNDRfHckhHKTAXtyU=
github.com/hashicorp/packer-plugin-sdk v0.0.7-0.20210111224258-fd30ebb797f0/go.mod h1:YdWTt5w6cYfaQG7IOi5iorL+3SXnz8hI0gJCi8Db/LI=
github.com/hashicorp/packer-plugin-sdk v0.0.7-0.20210120105339-f6fd68d2570a h1:QbS+UBmK9DZuEDPodi1pCiS66dLYI3rmUX/cowNopsk=
github.com/hashicorp/packer-plugin-sdk v0.0.7-0.20210120105339-f6fd68d2570a/go.mod h1:exN0C+Pe+3zu18l4nxueNjX5cfmslxUX/m/xk4IVmZQ=
github.com/hashicorp/packer-plugin-sdk v0.0.7-0.20210122130548-45a6ca0a9365 h1:u7DeYY9ukhSZpLE11qCFU8pxnO+YM2/85wwVXHJZdRE=
github.com/hashicorp/packer-plugin-sdk v0.0.7-0.20210122130548-45a6ca0a9365/go.mod h1:K7VsU0lfJBDyiUrSNnS/j+zMxSRwwH9WC9QvHv32KsU=
github.com/hashicorp/packer-plugin-sdk v0.0.10-0.20210126105622-8e1648006d93 h1:3wFACjFiBkF5sZrai0zvcWv2fHIgLa4g6ZXxbqngBhs=
github.com/hashicorp/packer-plugin-sdk v0.0.10-0.20210126105622-8e1648006d93/go.mod h1:AtWQLNfpn7cgH2SmZ1PTedwqNOhiPvzcuKfH5sDvIQ0=
github.com/hashicorp/packer-plugin-sdk v0.0.11-0.20210127164048-448d64e93ee6 h1:nz8r5Stxq3kan94Rpy5JpRLAMHLukgrdafQOVFzyawk=
github.com/hashicorp/packer-plugin-sdk v0.0.11-0.20210127164048-448d64e93ee6/go.mod h1:GNb0WNs7zibb8vzUZce1As64z2AW0FEMwhe2J7/NW5I=
github.com/hashicorp/packer-plugin-sdk v0.0.11-0.20210128163027-e8ca18774ef6 h1:cIEMJEQNtAyx1t0aOodcQT7B6qjbQFJ8p6Az1nfK3fw=
github.com/hashicorp/packer-plugin-sdk v0.0.11-0.20210128163027-e8ca18774ef6/go.mod h1:GNb0WNs7zibb8vzUZce1As64z2AW0FEMwhe2J7/NW5I=
github.com/hashicorp/serf v0.8.2 h1:YZ7UKsJv+hKjqGVUUbtE3HNj79Eln2oQ75tniF6iPt0=
github.com/hashicorp/serf v0.8.2/go.mod h1:6hOLApaqBFA1NXqRQAsxw9QxuDEvNxSQRwA/JwenrHc=
github.com/hashicorp/serf v0.9.2 h1:yJoyfZXo4Pk2p/M/viW+YLibBFiIbKoP79gu7kDAFP0=
github.com/hashicorp/serf v0.9.2/go.mod h1:UWDWwZeL5cuWDJdl0C6wrvrUwEqtQ4ZKBKKENpqIUyk=
@ -399,25 +438,38 @@ github.com/joyent/triton-go v0.0.0-20180628001255-830d2b111e62 h1:JHCT6xuyPUrbbg
github.com/joyent/triton-go v0.0.0-20180628001255-830d2b111e62/go.mod h1:U+RSyWxWd04xTqnuOQxnai7XGS2PrPY2cfGoDKtMHjA=
github.com/json-iterator/go v1.1.6 h1:MrUvLMLTMxbqFJ9kzlvat/rYZqZnW3u4wkLzWTaFwKs=
github.com/json-iterator/go v1.1.6/go.mod h1:+SdeFBvtyEkXs7REEP0seUULqWtbJapLOCVDaaPEHmU=
github.com/json-iterator/go v1.1.10 h1:Kz6Cvnvv2wGdaG/V8yMvfkmNiXq9Ya2KUv4rouJJr68=
github.com/json-iterator/go v1.1.10/go.mod h1:KdQUCv79m/52Kvf8AW2vK1V8akMuk1QjK/uOdHXbAo4=
github.com/jstemmer/go-junit-report v0.0.0-20190106144839-af01ea7f8024/go.mod h1:6v2b51hI/fHJwM22ozAgKL4VKDeJcHhJFhtBdhmNjmU=
github.com/jstemmer/go-junit-report v0.9.1 h1:6QPYqodiu3GuPL+7mfx+NwDdp2eTkp9IfEUpgAwUN0o=
github.com/jstemmer/go-junit-report v0.9.1/go.mod h1:Brl9GWCQeLvo8nXZwPNNblvFj/XSXhF0NWZEnDohbsk=
github.com/jtolds/gls v4.2.1+incompatible h1:fSuqC+Gmlu6l/ZYAoZzx2pyucC8Xza35fpRVWLVmUEE=
github.com/jtolds/gls v4.2.1+incompatible/go.mod h1:QJZ7F/aHp+rZTRtaJ1ow/lLfFfVYBRgL+9YlvaHOwJU=
github.com/jtolds/gls v4.20.0+incompatible h1:xdiiI2gbIgH/gLH7ADydsJ1uDOEzR8yvV7C0MuV77Wo=
github.com/jtolds/gls v4.20.0+incompatible/go.mod h1:QJZ7F/aHp+rZTRtaJ1ow/lLfFfVYBRgL+9YlvaHOwJU=
github.com/kisielk/errcheck v1.1.0/go.mod h1:EZBBE59ingxPouuu3KfxchcWSUPOHkagtvWXihfKN4Q=
github.com/kisielk/gotool v1.0.0/go.mod h1:XhKaO+MFFWcvkIS/tQcRk01m1F5IRFswLeQ+oQHNcck=
github.com/klauspost/compress v0.0.0-20160131094358-f86d2e6d8a77 h1:rJnR80lkojFgjdg/oQPhbZoY8t8uM51XMz8DrJrjabk=
github.com/klauspost/compress v0.0.0-20160131094358-f86d2e6d8a77/go.mod h1:RyIbtBH6LamlWaDj8nUwkbUhJ87Yi3uG0guNDohfE1A=
github.com/klauspost/compress v1.11.6 h1:EgWPCW6O3n1D5n99Zq3xXBt9uCwRGvpwGOusOLNBRSQ=
github.com/klauspost/compress v1.11.6/go.mod h1:aoV0uJVorq1K+umq18yTdKaF57EivdYsUV+/s2qKfXs=
github.com/klauspost/compress v1.11.7 h1:0hzRabrMN4tSTvMfnL3SCv1ZGeAP23ynzodBgaHeMeg=
github.com/klauspost/compress v1.11.7/go.mod h1:aoV0uJVorq1K+umq18yTdKaF57EivdYsUV+/s2qKfXs=
github.com/klauspost/cpuid v0.0.0-20160106104451-349c67577817 h1:/7pPahIC+GoCm/euDCi2Pm29bAj9tc6TcK4Zcc8D3WI=
github.com/klauspost/cpuid v0.0.0-20160106104451-349c67577817/go.mod h1:Pj4uuM528wm8OyEC2QMXAi2YiTZ96dNQPGgoMS4s3ek=
github.com/klauspost/crc32 v0.0.0-20160114101742-999f3125931f h1:UD9YLTi2aBhdOOThzatodQ/pGd9nd5255swS+UzHZj4=
github.com/klauspost/crc32 v0.0.0-20160114101742-999f3125931f/go.mod h1:+ZoRqAPRLkC4NPOvfYeR5KNOrY6TD+/sAC3HXPZgDYg=
github.com/klauspost/crc32 v1.2.0 h1:0VuyqOCruD33/lJ/ojXNvzVyl8Zr5zdTmj9l9qLZ86I=
github.com/klauspost/crc32 v1.2.0/go.mod h1:+ZoRqAPRLkC4NPOvfYeR5KNOrY6TD+/sAC3HXPZgDYg=
github.com/klauspost/pgzip v0.0.0-20151221113845-47f36e165cec h1:PYqF3Tiz2W2Ag0ezyDhAobWDWlrFv7U+qct4spLeDBM=
github.com/klauspost/pgzip v0.0.0-20151221113845-47f36e165cec/go.mod h1:Ch1tH69qFZu15pkjo5kYi6mth2Zzwzt50oCQKQE9RUs=
github.com/konsorten/go-windows-terminal-sequences v1.0.1 h1:mweAR1A6xJ3oS2pRaGiHgQ4OO8tzTaLawm8vnODuwDk=
github.com/konsorten/go-windows-terminal-sequences v1.0.1/go.mod h1:T0+1ngSBFLxvqU3pZ+m/2kptfBszLMUkC4ZK/EgS/cQ=
github.com/kr/fs v0.0.0-20131111012553-2788f0dbd169 h1:YUrU1/jxRqnt0PSrKj1Uj/wEjk/fjnE80QFfi2Zlj7Q=
github.com/kr/fs v0.0.0-20131111012553-2788f0dbd169/go.mod h1:glhvuHOU9Hy7/8PwwdtnarXqLagOX0b/TbZx2zLMqEg=
github.com/kr/fs v0.1.0 h1:Jskdu9ieNAYnjxsi0LbQp1ulIKZV1LAFgK1tWhpZgl8=
github.com/kr/fs v0.1.0/go.mod h1:FFnZGqtBN9Gxj7eW1uZ42v5BccTP0vu6NEaFoC2HwRg=
github.com/kr/pretty v0.1.0 h1:L/CwN0zerZDmRFUapSPitk6f+Q3+0za1rQkzVuMiMFI=
github.com/kr/pretty v0.1.0/go.mod h1:dAy3ld7l9f0ibDNOQOHHMYYIIbhfbHSm3C4ZsoJORNo=
github.com/kr/pretty v0.2.0 h1:s5hAObm+yFO5uHYt5dYjxi2rXrsnmRpJx4OYvIWUaQs=
github.com/kr/pretty v0.2.0/go.mod h1:ipq/a2n7PKx3OHsz4KJII5eveXtPO4qwEXGdVfWzfnI=
@ -428,16 +480,20 @@ github.com/kylelemons/godebug v0.0.0-20170820004349-d65d576e9348 h1:MtvEpTB6LX3v
github.com/kylelemons/godebug v0.0.0-20170820004349-d65d576e9348/go.mod h1:B69LEHPfb2qLo0BaaOLcbitczOKLWTsrBG9LczfCD4k=
github.com/linode/linodego v0.14.0 h1:0APKMjiVGyry2TTUVDiok72H6cWpFNMMrFWBFn14aFU=
github.com/linode/linodego v0.14.0/go.mod h1:2ce3S00NrDqJfp4i55ZuSlT0U3cKNELNYACWBPI8Tnw=
github.com/masterzen/simplexml v0.0.0-20160608183007-4572e39b1ab9 h1:SmVbOZFWAlyQshuMfOkiAx1f5oUTsOGG5IXplAEYeeM=
github.com/masterzen/simplexml v0.0.0-20160608183007-4572e39b1ab9/go.mod h1:kCEbxUJlNDEBNbdQMkPSp6yaKcRXVI6f4ddk8Riv4bc=
github.com/masterzen/simplexml v0.0.0-20190410153822-31eea3082786 h1:2ZKn+w/BJeL43sCxI2jhPLRv73oVVOjEKZjKkflyqxg=
github.com/masterzen/simplexml v0.0.0-20190410153822-31eea3082786/go.mod h1:kCEbxUJlNDEBNbdQMkPSp6yaKcRXVI6f4ddk8Riv4bc=
github.com/masterzen/winrm v0.0.0-20200615185753-c42b5136ff88/go.mod h1:a2HXwefeat3evJHxFXSayvRHpYEPJYtErl4uIzfaUqY=
github.com/masterzen/winrm v0.0.0-20201030141608-56ca5c5f2380 h1:uKhPH5dYpx3Z8ZAnaTGfGZUiHOWa5p5mdG8wZlh+tLo=
github.com/masterzen/winrm v0.0.0-20201030141608-56ca5c5f2380/go.mod h1:a2HXwefeat3evJHxFXSayvRHpYEPJYtErl4uIzfaUqY=
github.com/mattn/go-colorable v0.0.9 h1:UVL0vNpWh04HeJXV0KLcaT7r06gOH2l4OW6ddYRUIY4=
github.com/mattn/go-colorable v0.0.9/go.mod h1:9vuHe8Xs5qXnSaW/c/ABM9alt+Vo+STaOChaDxuIBZU=
github.com/mattn/go-colorable v0.1.4 h1:snbPLB8fVfU9iwbbo30TPtbLRzwWu6aJS6Xh4eaaviA=
github.com/mattn/go-colorable v0.1.4/go.mod h1:U0ppj6V5qS13XJ6of8GYAs25YV2eR4EVcfRqFIhoBtE=
github.com/mattn/go-colorable v0.1.6 h1:6Su7aK7lXmJ/U79bYtBjLNaha4Fs1Rg9plHpcH+vvnE=
github.com/mattn/go-colorable v0.1.6/go.mod h1:u6P/XSegPjTcexA+o6vUJrdnUu04hMope9wVRipJSqc=
github.com/mattn/go-isatty v0.0.3 h1:ns/ykhmWi7G9O+8a448SecJU3nSMBXJfqQkl0upE1jI=
github.com/mattn/go-isatty v0.0.3/go.mod h1:M+lRXTBqGeGNdLjl/ufCoiOlB5xdOkqRJdNxMWT7Zi4=
github.com/mattn/go-isatty v0.0.4/go.mod h1:M+lRXTBqGeGNdLjl/ufCoiOlB5xdOkqRJdNxMWT7Zi4=
github.com/mattn/go-isatty v0.0.8/go.mod h1:Iq45c/XA43vh69/j3iqttzPXn0bhXyGjM0Hdxcsrc5s=
@ -447,9 +503,12 @@ github.com/mattn/go-isatty v0.0.12 h1:wuysRhFDzyxgEmMf5xjvJ2M9dZoWAXNNr5LSBS7uHX
github.com/mattn/go-isatty v0.0.12/go.mod h1:cbi8OIDigv2wuxKPP5vlRcQ1OAZbq2CE4Kysco4FUpU=
github.com/mattn/go-runewidth v0.0.4 h1:2BvfKmzob6Bmd4YsL0zygOqfdFnK7GR4QL06Do4/p7Y=
github.com/mattn/go-runewidth v0.0.4/go.mod h1:LwmH8dsx7+W8Uxz3IHJYH5QSwggIsqBzpuz5H//U1FU=
github.com/mattn/go-runewidth v0.0.10 h1:CoZ3S2P7pvtP45xOtBw+/mDL2z0RKI576gSkzRRpdGg=
github.com/mattn/go-runewidth v0.0.10/go.mod h1:RAqKPSqVFrSLVXbA8x7dzmKdmGzieGRCM46jaSJTDAk=
github.com/mattn/go-tty v0.0.0-20191112051231-74040eebce08 h1:8YAWbq7rJqfbc6IaAvA2eCQuOQvf6Bs4vHKcOyWw//E=
github.com/mattn/go-tty v0.0.0-20191112051231-74040eebce08/go.mod h1:XPvLUNfbS4fJH25nqRHfWLMa1ONC8Amw+mIA639KxkE=
github.com/matttproud/golang_protobuf_extensions v1.0.1/go.mod h1:D8He9yQNgCq6Z5Ld7szi9bcBfOoFv/3dc6xSMkL2PC0=
github.com/miekg/dns v1.0.14 h1:9jZdLNd/P4+SfEJ0TNyxYpsK8N4GtfylBLqtbYN1sbA=
github.com/miekg/dns v1.0.14/go.mod h1:W1PPwlIAgtquWBMBEV9nkV9Cazfe8ScdGz/Lj7v3Nrg=
github.com/miekg/dns v1.1.26 h1:gPxPSwALAeHJSjarOs00QjVdV9QoBvc1D2ujQUr5BzU=
github.com/miekg/dns v1.1.26/go.mod h1:bPDLeHnStXmXAq1m/Ch/hvfNHr14JKNPMBo3VZKjuso=
@ -463,6 +522,7 @@ github.com/mitchellh/go-homedir v1.0.0/go.mod h1:SfyaCUpYCn1Vlf4IUYiD9fPX4A5wJrk
github.com/mitchellh/go-homedir v1.1.0 h1:lukF9ziXFxDFPkA1vsr5zpc1XuPDn/wFntq5mG+4E0Y=
github.com/mitchellh/go-homedir v1.1.0/go.mod h1:SfyaCUpYCn1Vlf4IUYiD9fPX4A5wJrkLzIz1N1q0pr0=
github.com/mitchellh/go-testing-interface v0.0.0-20171004221916-a61a99592b77/go.mod h1:kRemZodwjscx+RGhAo8eIhFbs2+BFgRtFPeD/KE+zxI=
github.com/mitchellh/go-testing-interface v1.0.0 h1:fzU/JVNcaqHQEcVFAKeR41fkiLdIPrefOvVG1VZ96U0=
github.com/mitchellh/go-testing-interface v1.0.0/go.mod h1:kRemZodwjscx+RGhAo8eIhFbs2+BFgRtFPeD/KE+zxI=
github.com/mitchellh/go-testing-interface v1.0.3 h1:gqwbsGvc0jbhAPW/26WfEoSiPANAVlR49AAVdvaTjI4=
github.com/mitchellh/go-testing-interface v1.0.3/go.mod h1:kRemZodwjscx+RGhAo8eIhFbs2+BFgRtFPeD/KE+zxI=
@ -486,8 +546,12 @@ github.com/mitchellh/prefixedio v0.0.0-20151214002211-6e6954073784 h1:+DAetXqxv/
github.com/mitchellh/prefixedio v0.0.0-20151214002211-6e6954073784/go.mod h1:kB1naBgV9ORnkiTVeyJOI1DavaJkG4oNIq0Af6ZVKUo=
github.com/mitchellh/reflectwalk v1.0.0 h1:9D+8oIskB4VJBN5SFlmc27fSlIBZaov1Wpk/IfikLNY=
github.com/mitchellh/reflectwalk v1.0.0/go.mod h1:mSTlrgnPZtwu0c4WaC2kGObEpuNDbx0jmZXqmk4esnw=
github.com/modern-go/concurrent v0.0.0-20180228061459-e0a39a4cb421 h1:ZqeYNhU3OHLH3mGKHDcjJRFFRrJa6eAM5H+CtDdOsPc=
github.com/modern-go/concurrent v0.0.0-20180228061459-e0a39a4cb421/go.mod h1:6dJC0mAP4ikYIbvyc7fijjWJddQyLn8Ig3JB5CqoB9Q=
github.com/modern-go/concurrent v0.0.0-20180306012644-bacd9c7ef1dd h1:TRLaZ9cD/w8PVh93nsPXa1VrQ6jlwL5oN8l14QlcNfg=
github.com/modern-go/concurrent v0.0.0-20180306012644-bacd9c7ef1dd/go.mod h1:6dJC0mAP4ikYIbvyc7fijjWJddQyLn8Ig3JB5CqoB9Q=
github.com/modern-go/reflect2 v0.0.0-20180701023420-4b7aa43c6742 h1:Esafd1046DLDQ0W1YjYsBW+p8U2u7vzgW2SQVmlNazg=
github.com/modern-go/reflect2 v0.0.0-20180701023420-4b7aa43c6742/go.mod h1:bx2lNnkwVCuqBIxFjflWJWanXIb3RllmbCylyMrvgv0=
github.com/modern-go/reflect2 v1.0.1 h1:9f412s+6RmYXLWZSEzVVgPGK7C2PphHj5RJrvfx9AWI=
github.com/modern-go/reflect2 v1.0.1/go.mod h1:bx2lNnkwVCuqBIxFjflWJWanXIb3RllmbCylyMrvgv0=
github.com/nu7hatch/gouuid v0.0.0-20131221200532-179d4d0c4d8d h1:VhgPp6v9qf9Agr/56bj7Y/xa04UccTW04VP0Qed4vnQ=
@ -525,6 +589,8 @@ github.com/prometheus/client_model v0.0.0-20180712105110-5c3871d89910/go.mod h1:
github.com/prometheus/client_model v0.0.0-20190812154241-14fe0d1b01d4/go.mod h1:xMI15A0UPsDsEKsMN9yxemIoYk6Tm2C1GtYGdfGttqA=
github.com/prometheus/common v0.0.0-20181126121408-4724e9255275/go.mod h1:daVV7qP5qjZbuso7PdcryaAu0sAZbrN9i7WWcTMWvro=
github.com/prometheus/procfs v0.0.0-20181204211112-1dc9a6cbc91a/go.mod h1:c3At6R/oaqEKCNdg8wHV1ftS6bRYblBhIjjI8uT2IGk=
github.com/rivo/uniseg v0.1.0 h1:+2KBaVoUmb9XzDsrx/Ct0W/EYOSFf/nWTauy++DprtY=
github.com/rivo/uniseg v0.1.0/go.mod h1:J6wj4VEh+S6ZtnVlnTBMWIodfgj8LQOQFoIToxlJtxc=
github.com/rogpeppe/go-internal v1.3.0/go.mod h1:M8bDsm7K2OlrFYOpmOWEs/qY81heoFRclV5y23lUDJ4=
github.com/ryanuber/columnize v0.0.0-20160712163229-9b3edd62028f/go.mod h1:sm1tb6uqfes/u+d4ooFouqFdy9/2g9QGwK3SQygK0Ts=
github.com/ryanuber/columnize v2.1.0+incompatible/go.mod h1:sm1tb6uqfes/u+d4ooFouqFdy9/2g9QGwK3SQygK0Ts=
@ -549,9 +615,12 @@ github.com/smartystreets/assertions v0.0.0-20180927180507-b2de0cb4f26d h1:zE9ykE
github.com/smartystreets/assertions v0.0.0-20180927180507-b2de0cb4f26d/go.mod h1:OnSkiWE9lh6wB0YB77sQom3nweQdgAjqCqsofrRNTgc=
github.com/smartystreets/goconvey v0.0.0-20181108003508-044398e4856c h1:Ho+uVpkel/udgjbwB5Lktg9BtvJSh2DT0Hi6LPSyI2w=
github.com/smartystreets/goconvey v0.0.0-20181108003508-044398e4856c/go.mod h1:XDJAKZRPZ1CvBcN2aX5YOUTYGHki24fSF0Iv48Ibg0s=
github.com/smartystreets/goconvey v1.6.4 h1:fv0U8FUIMPNf1L9lnHLvLhgicrIVChEkdzIKYqbNC9s=
github.com/smartystreets/goconvey v1.6.4/go.mod h1:syvi0/a8iFYH4r/RixwvyeAJjdLS9QV7WQ/tjFTllLA=
github.com/spf13/pflag v1.0.2/go.mod h1:DYY7MBk1bdzusC3SYhjObp+wFpr4gzcvqqNjLnInEg4=
github.com/spf13/pflag v1.0.5/go.mod h1:McXfInJRrz4CZXVZOBLb0bTZqETkiAhM9Iw0y3An2Bg=
github.com/stretchr/objx v0.1.0/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME=
github.com/stretchr/objx v0.1.1 h1:2vfRuCMp5sSVIDSqO8oNnWJq7mPa6KVP3iPIwFBuy8A=
github.com/stretchr/objx v0.1.1/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME=
github.com/stretchr/objx v0.2.0 h1:Hbg2NidpLE8veEBkEZTL3CvlkUIVzuU9jDplZO54c48=
github.com/stretchr/objx v0.2.0/go.mod h1:qt09Ya8vawLte6SNmTgCsAVtYtaKzEcn8ATUoHMkEqE=
@ -728,6 +797,7 @@ golang.org/x/sys v0.0.0-20190507160741-ecd444e8653b/go.mod h1:h1NjWce9XRLGQEsW7w
golang.org/x/sys v0.0.0-20190606165138-5da285871e9c/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
golang.org/x/sys v0.0.0-20190624142023-c5567b49c5d0/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
golang.org/x/sys v0.0.0-20190726091711-fc99dfbffb4e/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
golang.org/x/sys v0.0.0-20190916202348-b4ddaad3f8a3/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
golang.org/x/sys v0.0.0-20190922100055-0a153f010e69/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
golang.org/x/sys v0.0.0-20190924154521-2837fb4f24fe/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
golang.org/x/sys v0.0.0-20191001151750-bb3f8db39f24/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
@ -777,6 +847,7 @@ golang.org/x/tools v0.0.0-20190226205152-f727befe758c/go.mod h1:9Yl7xja0Znq3iFh3
golang.org/x/tools v0.0.0-20190311212946-11955173bddd/go.mod h1:LCzVGOaR6xXOjkQ3onu1FJEFr0SW1gC7cKk1uF8kGRs=
golang.org/x/tools v0.0.0-20190312151545-0bb0c0a6e846/go.mod h1:LCzVGOaR6xXOjkQ3onu1FJEFr0SW1gC7cKk1uF8kGRs=
golang.org/x/tools v0.0.0-20190312170243-e65039ee4138/go.mod h1:LCzVGOaR6xXOjkQ3onu1FJEFr0SW1gC7cKk1uF8kGRs=
golang.org/x/tools v0.0.0-20190328211700-ab21143f2384/go.mod h1:LCzVGOaR6xXOjkQ3onu1FJEFr0SW1gC7cKk1uF8kGRs=
golang.org/x/tools v0.0.0-20190425150028-36563e24a262/go.mod h1:RgjU9mgBXZiqYHBnxXauZ1Gv1EHHAz9KjViQ78xBX0Q=
golang.org/x/tools v0.0.0-20190506145303-2d16b83fe98c/go.mod h1:RgjU9mgBXZiqYHBnxXauZ1Gv1EHHAz9KjViQ78xBX0Q=
golang.org/x/tools v0.0.0-20190524140312-2c0ae7006135/go.mod h1:RgjU9mgBXZiqYHBnxXauZ1Gv1EHHAz9KjViQ78xBX0Q=
@ -812,6 +883,7 @@ golang.org/x/tools v0.0.0-20200501065659-ab2804fb9c9d/go.mod h1:EkVYQZoAsY45+roY
golang.org/x/tools v0.0.0-20200512131952-2bc93b1c0c88/go.mod h1:EkVYQZoAsY45+roYkvgYkIh4xh/qjgUK9TdY2XT94GE=
golang.org/x/tools v0.0.0-20200515010526-7d3b6ebf133d/go.mod h1:EkVYQZoAsY45+roYkvgYkIh4xh/qjgUK9TdY2XT94GE=
golang.org/x/tools v0.0.0-20200618134242-20370b0cb4b2/go.mod h1:EkVYQZoAsY45+roYkvgYkIh4xh/qjgUK9TdY2XT94GE=
golang.org/x/tools v0.0.0-20200711155855-7342f9734a7d/go.mod h1:njjCfa9FT2d7l9Bc6FUM5FLjQPp3cFF28FI3qnDFljA=
golang.org/x/tools v0.0.0-20200729194436-6467de6f59a7/go.mod h1:njjCfa9FT2d7l9Bc6FUM5FLjQPp3cFF28FI3qnDFljA=
golang.org/x/tools v0.0.0-20200804011535-6c149bb5ef0d/go.mod h1:njjCfa9FT2d7l9Bc6FUM5FLjQPp3cFF28FI3qnDFljA=
golang.org/x/tools v0.0.0-20200825202427-b303f430e36d/go.mod h1:njjCfa9FT2d7l9Bc6FUM5FLjQPp3cFF28FI3qnDFljA=
@ -885,6 +957,7 @@ google.golang.org/genproto v0.0.0-20200804131852-c06518451d9c/go.mod h1:FWY/as6D
google.golang.org/genproto v0.0.0-20200825200019-8632dd797987/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no=
google.golang.org/genproto v0.0.0-20200831141814-d751682dd103/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no=
google.golang.org/genproto v0.0.0-20200904004341-0bd0a958aa1d/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no=
google.golang.org/genproto v0.0.0-20200914193844-75d14daec038 h1:SnvTpXhVDJGFxzZiHbMUZTh3VjU2Vx2feJ7Zfl5+OIY=
google.golang.org/genproto v0.0.0-20200914193844-75d14daec038/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no=
google.golang.org/genproto v0.0.0-20200918140846-d0d605568037 h1:ujwz1DPMeHwCvo36rK5shXhAzc4GMRecrqQFaMZJBKQ=
google.golang.org/genproto v0.0.0-20200918140846-d0d605568037/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no=
@ -926,6 +999,8 @@ gopkg.in/cheggaaa/pb.v1 v1.0.27/go.mod h1:V/YB90LKu/1FcN3WVnfiiE5oMCibMjukxqG/qS
gopkg.in/errgo.v2 v2.1.0/go.mod h1:hNsd1EY+bozCKY1Ytp96fpM3vjJbqLJn88ws8XvfDNI=
gopkg.in/ini.v1 v1.42.0 h1:7N3gPTt50s8GuLortA00n8AqRTk75qOP98+mTPpgzRk=
gopkg.in/ini.v1 v1.42.0/go.mod h1:pNLf8WUiyNEtQjuu5G5vTm06TEv9tsIgeAvK8hOrP4k=
gopkg.in/ini.v1 v1.62.0 h1:duBzk771uxoUuOlyRLkHsygud9+5lrlGjdFBb4mSKDU=
gopkg.in/ini.v1 v1.62.0/go.mod h1:pNLf8WUiyNEtQjuu5G5vTm06TEv9tsIgeAvK8hOrP4k=
gopkg.in/jarcoal/httpmock.v1 v1.0.0-20181117152235-275e9df93516 h1:H6trpavCIuipdInWrab8l34Mf+GGVfphniHostMdMaQ=
gopkg.in/jarcoal/httpmock.v1 v1.0.0-20181117152235-275e9df93516/go.mod h1:d3R+NllX3X5e0zlG1Rful3uLvsGC/Q3OHut5464DEQw=
gopkg.in/square/go-jose.v2 v2.3.1 h1:SK5KegNXmKmqE342YYN2qPHEnUYeoMiXXl1poUlI+o4=
@ -934,6 +1009,7 @@ gopkg.in/yaml.v2 v2.2.1/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI=
gopkg.in/yaml.v2 v2.2.2/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI=
gopkg.in/yaml.v2 v2.2.4/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI=
gopkg.in/yaml.v2 v2.2.7/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI=
gopkg.in/yaml.v2 v2.2.8 h1:obN1ZagJSUGI0Ek/LBmuj4SNLPfIny3KsKFopxRdj10=
gopkg.in/yaml.v2 v2.2.8/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI=
gopkg.in/yaml.v2 v2.3.0 h1:clyUAQHOM3G0M3f5vQj7LuJrETvjVot3Z5el9nffUtU=
gopkg.in/yaml.v2 v2.3.0/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI=

View File

@ -0,0 +1,238 @@
package addrs
import (
"fmt"
"strings"
"github.com/hashicorp/hcl/v2"
"golang.org/x/net/idna"
)
// Plugin encapsulates a single plugin type.
type Plugin struct {
Type string
Namespace string
Hostname string
}
func (p Plugin) RealRelativePath() string {
ns := DefaultPluginNamespace
if p.Namespace != "" {
ns = p.Namespace
}
return ns + "/packer-plugin-" + p.Type
}
func (p Plugin) Parts() []string {
return []string{p.Hostname, p.Namespace, p.Type}
}
func (p Plugin) String() string {
return strings.Join(p.Parts(), "/")
}
// ForDisplay returns a user-friendly FQN string, simplified for readability. If
// the plugin is using the default hostname, the hostname is omitted.
func (p *Plugin) ForDisplay() string {
parts := []string{}
if p.Hostname != DefaultPluginHost {
parts = append(parts, p.Hostname)
}
if p.Namespace != DefaultPluginNamespace {
parts = append(parts, p.Namespace)
}
parts = append(parts, p.Type)
return strings.Join(parts, "/")
}
const DefaultPluginHost = "github.com"
const DefaultPluginNamespace = "hashicorp"
// ParsePluginPart processes an addrs.Plugin namespace or type string
// provided by an end-user, producing a normalized version if possible or
// an error if the string contains invalid characters.
//
// A plugin part is processed in the same way as an individual label in a DNS
// domain name: it is transformed to lowercase per the usual DNS case mapping
// and normalization rules and may contain only letters, digits, and dashes.
// Additionally, dashes may not appear at the start or end of the string.
//
// These restrictions are intended to allow these names to appear in fussy
// contexts such as directory/file names on case-insensitive filesystems,
// repository names on GitHub, etc. We're using the DNS rules in particular,
// rather than some similar rules defined locally, because the hostname part
// of an addrs.Plugin is already a hostname and it's ideal to use exactly
// the same case folding and normalization rules for all of the parts.
//
// It's valid to pass the result of this function as the argument to a
// subsequent call, in which case the result will be identical.
func ParsePluginPart(given string) (string, error) {
if len(given) == 0 {
return "", fmt.Errorf("must have at least one character")
}
// We're going to process the given name using the same "IDNA" library we
// use for the hostname portion, since it already implements the case
// folding rules we want.
//
// The idna library doesn't expose individual label parsing directly, but
// once we've verified it doesn't contain any dots we can just treat it
// like a top-level domain for this library's purposes.
if strings.ContainsRune(given, '.') {
return "", fmt.Errorf("dots are not allowed")
}
// We don't allow names containing multiple consecutive dashes, just as
// a matter of preference: they look confusing, or incorrect.
// This also, as a side-effect, prevents the use of the "punycode"
// indicator prefix "xn--" that would cause the IDNA library to interpret
// the given name as punycode, because that would be weird and unexpected.
if strings.Contains(given, "--") {
return "", fmt.Errorf("cannot use multiple consecutive dashes")
}
result, err := idna.Lookup.ToUnicode(given)
if err != nil {
return "", fmt.Errorf("must contain only letters, digits, and dashes, and may not use leading or trailing dashes: %w", err)
}
return result, nil
}
// IsPluginPartNormalized compares a given string to the result of ParsePluginPart(string)
func IsPluginPartNormalized(str string) (bool, error) {
normalized, err := ParsePluginPart(str)
if err != nil {
return false, err
}
if str == normalized {
return true, nil
}
return false, nil
}
// ParsePluginSourceString parses the source attribute and returns a plugin.
// This is intended primarily to parse the FQN-like strings
//
// The following are valid source string formats:
// name
// namespace/name
// hostname/namespace/name
func ParsePluginSourceString(str string) (*Plugin, hcl.Diagnostics) {
ret := &Plugin{
Hostname: DefaultPluginHost,
Namespace: DefaultPluginNamespace,
}
var diags hcl.Diagnostics
// split the source string into individual components
parts := strings.Split(str, "/")
if len(parts) == 0 || len(parts) > 3 {
diags = diags.Append(&hcl.Diagnostic{
Severity: hcl.DiagError,
Summary: "Invalid plugin source string",
Detail: `The "source" attribute must be in the format "[hostname/][namespace/]name"`,
})
return nil, diags
}
// check for an invalid empty string in any part
for i := range parts {
if parts[i] == "" {
diags = diags.Append(&hcl.Diagnostic{
Severity: hcl.DiagError,
Summary: "Invalid plugin source string",
Detail: `The "source" attribute must be in the format "[hostname/][namespace/]name"`,
})
return nil, diags
}
}
// check the 'name' portion, which is always the last part
givenName := parts[len(parts)-1]
name, err := ParsePluginPart(givenName)
if err != nil {
diags = diags.Append(&hcl.Diagnostic{
Severity: hcl.DiagError,
Summary: "Invalid plugin type",
Detail: fmt.Sprintf(`Invalid plugin type %q in source %q: %s"`, givenName, str, err),
})
return nil, diags
}
ret.Type = name
if len(parts) == 1 {
return ret, diags
}
if len(parts) >= 2 {
// the namespace is always the second-to-last part
givenNamespace := parts[len(parts)-2]
namespace, err := ParsePluginPart(givenNamespace)
if err != nil {
diags = diags.Append(&hcl.Diagnostic{
Severity: hcl.DiagError,
Summary: "Invalid plugin namespace",
Detail: fmt.Sprintf(`Invalid plugin namespace %q in source %q: %s"`, namespace, str, err),
})
return nil, diags
}
ret.Namespace = namespace
}
// Final Case: 3 parts
if len(parts) == 3 {
// the hostname is always the first part in a three-part source string
hostname := parts[0]
// TODO(azr): validate host ? Can this be something else than a
// github.com host for now?
ret.Hostname = hostname
}
// Due to how plugin executables are named and plugin git repositories
// are conventionally named, it's a reasonable and
// apparently-somewhat-common user error to incorrectly use the
// "packer-plugin-" prefix in a plugin source address. There is
// no good reason for a plugin to have the prefix "packer-" anyway,
// so we've made that invalid from the start both so we can give feedback
// to plugin developers about the packer- prefix being redundant
// and give specialized feedback to folks who incorrectly use the full
// packer-plugin- prefix to help them self-correct.
const redundantPrefix = "packer-"
const userErrorPrefix = "packer-plugin-"
if strings.HasPrefix(ret.Type, redundantPrefix) {
if strings.HasPrefix(ret.Type, userErrorPrefix) {
// Likely user error. We only return this specialized error if
// whatever is after the prefix would otherwise be a
// syntactically-valid plugin type, so we don't end up advising
// the user to try something that would be invalid for another
// reason anyway.
// (This is mainly just for robustness, because the validation
// we already did above should've rejected most/all ways for
// the suggestedType to end up invalid here.)
suggestedType := ret.Type[len(userErrorPrefix):]
if _, err := ParsePluginPart(suggestedType); err == nil {
suggestedAddr := ret
suggestedAddr.Type = suggestedType
diags = diags.Append(&hcl.Diagnostic{
Severity: hcl.DiagError,
Summary: "Invalid plugin type",
Detail: fmt.Sprintf("Plugin source %q has a type with the prefix %q, which isn't valid. Although that prefix is often used in the names of version control repositories for Packer plugins, plugin source strings should not include it.\n\nDid you mean %q?", ret.ForDisplay(), userErrorPrefix, suggestedAddr.ForDisplay()),
})
return nil, diags
}
}
// Otherwise, probably instead an incorrectly-named plugin, perhaps
// arising from a similar instinct to what causes there to be
// thousands of Python packages on PyPI with "python-"-prefixed
// names.
diags = diags.Append(&hcl.Diagnostic{
Severity: hcl.DiagError,
Summary: "Invalid plugin type",
Detail: fmt.Sprintf("Plugin source %q has a type with the prefix %q, which isn't allowed because it would be redundant to name a Packer plugin with that prefix. If you are the author of this plugin, rename it to not include the prefix.", ret, redundantPrefix),
})
return nil, diags
}
return ret, diags
}

View File

@ -7,6 +7,7 @@ import (
"github.com/google/go-cmp/cmp"
"github.com/google/go-cmp/cmp/cmpopts"
"github.com/hashicorp/go-version"
"github.com/hashicorp/hcl/v2"
"github.com/hashicorp/hcl/v2/hclparse"
packersdk "github.com/hashicorp/packer-plugin-sdk/packer"
@ -17,24 +18,30 @@ import (
"github.com/zclconf/go-cty/cty"
)
const lockedVersion = "v1.5.0"
func getBasicParser() *Parser {
return &Parser{
Parser: hclparse.NewParser(),
BuilderSchemas: packer.MapOfBuilder{
"amazon-ebs": func() (packersdk.Builder, error) { return &MockBuilder{}, nil },
"virtualbox-iso": func() (packersdk.Builder, error) { return &MockBuilder{}, nil },
"null": func() (packersdk.Builder, error) { return &null.Builder{}, nil },
},
ProvisionersSchemas: packer.MapOfProvisioner{
"shell": func() (packersdk.Provisioner, error) { return &MockProvisioner{}, nil },
"file": func() (packersdk.Provisioner, error) { return &MockProvisioner{}, nil },
},
PostProcessorsSchemas: packer.MapOfPostProcessor{
"amazon-import": func() (packersdk.PostProcessor, error) { return &MockPostProcessor{}, nil },
"manifest": func() (packersdk.PostProcessor, error) { return &MockPostProcessor{}, nil },
},
DatasourceSchemas: packersdk.MapOfDatasource{
"amazon-ami": func() (packersdk.Datasource, error) { return &MockDatasource{}, nil },
CorePackerVersion: version.Must(version.NewSemver(lockedVersion)),
CorePackerVersionString: lockedVersion,
Parser: hclparse.NewParser(),
PluginConfig: &packer.PluginConfig{
Builders: packer.MapOfBuilder{
"amazon-ebs": func() (packersdk.Builder, error) { return &MockBuilder{}, nil },
"virtualbox-iso": func() (packersdk.Builder, error) { return &MockBuilder{}, nil },
"null": func() (packersdk.Builder, error) { return &null.Builder{}, nil },
},
Provisioners: packer.MapOfProvisioner{
"shell": func() (packersdk.Provisioner, error) { return &MockProvisioner{}, nil },
"file": func() (packersdk.Provisioner, error) { return &MockProvisioner{}, nil },
},
PostProcessors: packer.MapOfPostProcessor{
"amazon-import": func() (packersdk.PostProcessor, error) { return &MockPostProcessor{}, nil },
"manifest": func() (packersdk.PostProcessor, error) { return &MockPostProcessor{}, nil },
},
DataSources: packer.MapOfDatasource{
"amazon-ami": func() (packersdk.Datasource, error) { return &MockDatasource{}, nil },
},
},
}
}
@ -102,6 +109,39 @@ func testParse(t *testing.T, tests []parseTest) {
}
}
func testParse_only_Parse(t *testing.T, tests []parseTest) {
t.Helper()
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
gotCfg, gotDiags := tt.parser.Parse(tt.args.filename, tt.args.varFiles, tt.args.vars)
if tt.parseWantDiags == (gotDiags == nil) {
t.Fatalf("Parser.parse() unexpected %q diagnostics.", gotDiags)
}
if tt.parseWantDiagHasErrors != gotDiags.HasErrors() {
t.Fatalf("Parser.parse() unexpected diagnostics HasErrors. %s", gotDiags)
}
if diff := cmp.Diff(tt.parseWantCfg, gotCfg, cmpOpts...); diff != "" {
t.Fatalf("Parser.parse() wrong packer config. %s", diff)
}
if gotCfg != nil && !tt.parseWantDiagHasErrors {
if diff := cmp.Diff(tt.parseWantCfg.InputVariables, gotCfg.InputVariables, cmpOpts...); diff != "" {
t.Fatalf("Parser.parse() unexpected input vars. %s", diff)
}
if diff := cmp.Diff(tt.parseWantCfg.LocalVariables, gotCfg.LocalVariables, cmpOpts...); diff != "" {
t.Fatalf("Parser.parse() unexpected local vars. %s", diff)
}
}
if gotDiags.HasErrors() {
return
}
})
}
}
var (
// everything in the tests is a basicNestedMockConfig this allow to test
// each known type to packer ( and embedding ) in one go.
@ -189,6 +229,39 @@ var (
},
},
}
basicMockPostProcessorDynamicTags = &MockPostProcessor{
Config: MockConfig{
NotSquashed: "value <UNKNOWN>",
NestedMockConfig: NestedMockConfig{
String: "string",
Int: 42,
Int64: 43,
Bool: true,
Trilean: config.TriTrue,
Duration: 10 * time.Second,
MapStringString: map[string]string{
"a": "b",
"c": "d",
},
SliceString: []string{
"a",
"b",
"c",
},
SliceSliceString: [][]string{
{"a", "b"},
{"c", "d"},
},
Tags: []MockTag{
{Key: "first_tag_key", Value: "first_tag_value"},
{Key: "Component", Value: "user-service"},
{Key: "Environment", Value: "production"},
},
},
Nested: basicNestedMockConfig,
NestedSlice: []NestedMockConfig{},
},
}
basicMockCommunicator = &MockCommunicator{
Config: MockConfig{
NestedMockConfig: basicNestedMockConfig,
@ -248,9 +321,19 @@ var ctyTypeComparer = cmp.Comparer(func(x, y cty.Type) bool {
return x.Equals(y)
})
var versionComparer = cmp.Comparer(func(x, y *version.Version) bool {
return x.Equal(y)
})
var versionConstraintComparer = cmp.Comparer(func(x, y *version.Constraint) bool {
return x.String() == y.String()
})
var cmpOpts = []cmp.Option{
ctyValueComparer,
ctyTypeComparer,
versionComparer,
versionConstraintComparer,
cmpopts.IgnoreUnexported(
PackerConfig{},
Variable{},

View File

@ -46,6 +46,9 @@ var packerBlockSchema = &hcl.BodySchema{
Attributes: []hcl.AttributeSchema{
{Name: "required_version"},
},
Blocks: []hcl.BlockHeaderSchema{
{Type: "required_plugins"},
},
}
// Parser helps you parse HCL folders. It will parse an hcl file or directory
@ -59,13 +62,7 @@ type Parser struct {
*hclparse.Parser
BuilderSchemas packer.BuilderStore
ProvisionersSchemas packer.ProvisionerStore
PostProcessorsSchemas packer.PostProcessorStore
DatasourceSchemas packer.DatasourceStore
PluginConfig *packer.PluginConfig
}
const (
@ -132,10 +129,6 @@ func (p *Parser) Parse(filename string, varFiles []string, argVars map[string]st
Basedir: basedir,
Cwd: wd,
CorePackerVersionString: p.CorePackerVersionString,
builderSchemas: p.BuilderSchemas,
provisionersSchemas: p.ProvisionersSchemas,
postProcessorsSchemas: p.PostProcessorsSchemas,
datasourceSchemas: p.DatasourceSchemas,
parser: p,
files: files,
}
@ -147,7 +140,7 @@ func (p *Parser) Parse(filename string, varFiles []string, argVars map[string]st
}
// Before we go further, we'll check to make sure this version can read
// that file, so we can produce a version-related error message rather than
// all files, so we can produce a version-related error message rather than
// potentially-confusing downstream errors.
versionDiags := cfg.CheckCoreVersionRequirements(p.CorePackerVersion)
diags = append(diags, versionDiags...)
@ -155,8 +148,30 @@ func (p *Parser) Parse(filename string, varFiles []string, argVars map[string]st
return cfg, diags
}
// Decode required_plugins blocks and create implicit required_plugins
// blocks. Implicit required_plugins blocks happen when a builder or another
// plugin cannot be found, for example if one uses :
// source "amazon-ebs" "example" { ... }
// And no `amazon-ebs` builder can be found. This will then be the
// equivalent of having :
// packer {
// required_plugins {
// amazon = "latest"
// }
// }
// Note: using `latest` ( or actually an empty string ) in a config file
// does not work and packer will ask you to pick a version
{
for _, file := range files {
diags = append(diags, cfg.decodeRequiredPluginsBlock(file)...)
}
for _, file := range files {
diags = append(diags, cfg.decodeImplicitRequiredPluginsBlocks(file)...)
}
}
// Decode variable blocks so that they are available later on. Here locals
// can use input variables so we decode them firsthand.
// can use input variables so we decode input variables first.
{
for _, file := range files {
diags = append(diags, cfg.decodeInputVariables(file)...)
@ -212,6 +227,11 @@ func (p *Parser) Parse(filename string, varFiles []string, argVars map[string]st
diags = append(diags, cfg.collectInputVariableValues(os.Environ(), varFiles, argVars)...)
}
// parse the actual content // rest
for _, file := range cfg.files {
diags = append(diags, cfg.parser.parseConfig(file, cfg)...)
}
return cfg, diags
}
@ -277,7 +297,14 @@ func filterVarsFromLogs(inputOrLocal Variables) {
func (cfg *PackerConfig) Initialize(opts packer.InitializeOptions) hcl.Diagnostics {
var diags hcl.Diagnostics
_, moreDiags := cfg.InputVariables.Values()
// enable packer to start plugins requested in required_plugins.
moreDiags := cfg.detectPluginBinaries()
diags = append(diags, moreDiags...)
if moreDiags.HasErrors() {
return diags
}
_, moreDiags = cfg.InputVariables.Values()
diags = append(diags, moreDiags...)
_, moreDiags = cfg.LocalVariables.Values()
diags = append(diags, moreDiags...)
@ -287,21 +314,17 @@ func (cfg *PackerConfig) Initialize(opts packer.InitializeOptions) hcl.Diagnosti
filterVarsFromLogs(cfg.InputVariables)
filterVarsFromLogs(cfg.LocalVariables)
// decode the actual content
for _, file := range cfg.files {
diags = append(diags, cfg.parser.decodeConfig(file, cfg)...)
}
diags = append(diags, cfg.initializeBlocks()...)
return diags
}
// decodeConfig looks in the found blocks for everything that is not a variable
// block. It should be called after parsing input variables and locals so that
// they can be referenced.
func (p *Parser) decodeConfig(f *hcl.File, cfg *PackerConfig) hcl.Diagnostics {
// parseConfig looks in the found blocks for everything that is not a variable
// block.
func (p *Parser) parseConfig(f *hcl.File, cfg *PackerConfig) hcl.Diagnostics {
var diags hcl.Diagnostics
body := dynblock.Expand(f.Body, cfg.EvalContext(nil))
body := f.Body
content, moreDiags := body.Content(configSchema)
diags = append(diags, moreDiags...)

182
hcl2template/plugin.go Normal file
View File

@ -0,0 +1,182 @@
package hcl2template
import (
"crypto/sha256"
"fmt"
"log"
"runtime"
"github.com/hashicorp/hcl/v2"
"github.com/hashicorp/hcl/v2/ext/dynblock"
pluginsdk "github.com/hashicorp/packer-plugin-sdk/plugin"
plugingetter "github.com/hashicorp/packer/packer/plugin-getter"
)
// PluginRequirements returns a sorted list of plugin requirements.
func (cfg *PackerConfig) PluginRequirements() (plugingetter.Requirements, hcl.Diagnostics) {
var diags hcl.Diagnostics
var reqs plugingetter.Requirements
reqPluginsBlocks := cfg.Packer.RequiredPlugins
// Take all required plugins, make sure there are no conflicting blocks
// and append them to the list.
uniq := map[string]*RequiredPlugin{}
for _, requiredPluginsBlock := range reqPluginsBlocks {
for name, block := range requiredPluginsBlock.RequiredPlugins {
if previouslySeenBlock, found := uniq[name]; found {
diags = append(diags, &hcl.Diagnostic{
Severity: hcl.DiagError,
Summary: fmt.Sprintf("Duplicate required_plugin.%q block", name),
Detail: fmt.Sprintf("Block previously seen at %s is already named %q.\n", previouslySeenBlock.DeclRange, name) +
"Names at the left hand side of required_plugins are made available to use in your HCL2 configurations.\n" +
"To allow to calling to their features correctly two plugins have to have different accessors.",
Context: &block.DeclRange,
})
continue
}
reqs = append(reqs, &plugingetter.Requirement{
Accessor: name,
Identifier: block.Type,
VersionConstraints: block.Requirement.Required,
})
uniq[name] = block
}
}
return reqs, diags
}
func (cfg *PackerConfig) detectPluginBinaries() hcl.Diagnostics {
opts := plugingetter.ListInstallationsOptions{
FromFolders: cfg.parser.PluginConfig.KnownPluginFolders,
BinaryInstallationOptions: plugingetter.BinaryInstallationOptions{
OS: runtime.GOOS,
ARCH: runtime.GOARCH,
APIVersionMajor: pluginsdk.APIVersionMajor,
APIVersionMinor: pluginsdk.APIVersionMinor,
Checksummers: []plugingetter.Checksummer{
{Type: "sha256", Hash: sha256.New()},
},
},
}
if runtime.GOOS == "windows" && opts.Ext == "" {
opts.BinaryInstallationOptions.Ext = ".exe"
}
pluginReqs, diags := cfg.PluginRequirements()
if diags.HasErrors() {
return diags
}
for _, pluginRequirement := range pluginReqs {
sortedInstalls, err := pluginRequirement.ListInstallations(opts)
if err != nil {
diags = append(diags, &hcl.Diagnostic{
Severity: hcl.DiagError,
Summary: fmt.Sprintf("Failed to list installation for %s", pluginRequirement.Identifier.ForDisplay()),
Detail: err.Error(),
})
continue
}
if len(sortedInstalls) == 0 {
diags = append(diags, &hcl.Diagnostic{
Severity: hcl.DiagError,
Summary: fmt.Sprintf("no plugin installed for %s %v", pluginRequirement.Identifier.ForDisplay(), pluginRequirement.VersionConstraints.String()),
Detail: "Did you run packer init for this project ?",
})
continue
}
log.Printf("[TRACE] Found the following %q installations: %v", pluginRequirement.Identifier.ForDisplay(), sortedInstalls)
install := sortedInstalls[len(sortedInstalls)-1]
err = cfg.parser.PluginConfig.DiscoverMultiPlugin(pluginRequirement.Accessor, install.BinaryPath)
if err != nil {
diags = append(diags, &hcl.Diagnostic{
Severity: hcl.DiagError,
Summary: fmt.Sprintf("Failed to discover plugin %s", pluginRequirement.Identifier.ForDisplay()),
})
continue
}
}
return diags
}
func (cfg *PackerConfig) initializeBlocks() hcl.Diagnostics {
// verify that all used plugins do exist and expand dynamic bodies
var diags hcl.Diagnostics
for _, build := range cfg.Builds {
for i := range build.Sources {
// here we grab a pointer to the source usage because we will set
// its body.
srcUsage := &(build.Sources[i])
if !cfg.parser.PluginConfig.Builders.Has(srcUsage.Type) {
diags = append(diags, &hcl.Diagnostic{
Summary: "Unknown " + buildSourceLabel + " type " + srcUsage.Type,
Subject: &build.HCL2Ref.DefRange,
Detail: fmt.Sprintf("known builders: %v", cfg.parser.PluginConfig.Builders.List()),
Severity: hcl.DiagError,
})
continue
}
sourceDefinition, found := cfg.Sources[srcUsage.SourceRef]
if !found {
diags = append(diags, &hcl.Diagnostic{
Summary: "Unknown " + sourceLabel + " " + srcUsage.String(),
Subject: build.HCL2Ref.DefRange.Ptr(),
Severity: hcl.DiagError,
Detail: fmt.Sprintf("Known: %v", cfg.Sources),
// TODO: show known sources as a string slice here ^.
})
continue
}
body := sourceDefinition.block.Body
if srcUsage.Body != nil {
// merge additions into source definition to get a new body.
body = hcl.MergeBodies([]hcl.Body{body, srcUsage.Body})
}
// expand any dynamic block.
body = dynblock.Expand(body, cfg.EvalContext(nil))
srcUsage.Body = body
}
for _, provBlock := range build.ProvisionerBlocks {
if !cfg.parser.PluginConfig.Provisioners.Has(provBlock.PType) {
diags = append(diags, &hcl.Diagnostic{
Summary: fmt.Sprintf("Unknown "+buildProvisionerLabel+" type %q", provBlock.PType),
Subject: provBlock.HCL2Ref.TypeRange.Ptr(),
Detail: fmt.Sprintf("known "+buildProvisionerLabel+"s: %v", cfg.parser.PluginConfig.Provisioners.List()),
Severity: hcl.DiagError,
})
}
// Allow rest of the body to have dynamic blocks
provBlock.HCL2Ref.Rest = dynblock.Expand(provBlock.HCL2Ref.Rest, cfg.EvalContext(nil))
}
for _, ppList := range build.PostProcessorsLists {
for _, ppBlock := range ppList {
if !cfg.parser.PluginConfig.PostProcessors.Has(ppBlock.PType) {
diags = append(diags, &hcl.Diagnostic{
Summary: fmt.Sprintf("Unknown "+buildPostProcessorLabel+" type %q", ppBlock.PType),
Subject: ppBlock.HCL2Ref.TypeRange.Ptr(),
Detail: fmt.Sprintf("known "+buildPostProcessorLabel+"s: %v", cfg.parser.PluginConfig.Provisioners.List()),
Severity: hcl.DiagError,
})
}
// Allow the rest of the body to have dynamic blocks
ppBlock.HCL2Ref.Rest = dynblock.Expand(ppBlock.HCL2Ref.Rest, cfg.EvalContext(nil))
}
}
}
return diags
}

View File

@ -1,4 +1,8 @@
packer {
required_version = ">= v1"
}
// starts resources to provision them.
build {
sources = [
@ -7,6 +11,15 @@ build {
source "source.amazon-ebs.ubuntu-1604" {
string = "setting from build section"
nested {
dynamic "tag" {
for_each = local.standard_tags
content {
key = tag.key
value = tag.value
}
}
}
}
provisioner "shell" {
@ -162,7 +175,16 @@ build {
]
}
nested_slice {
tag {
key = "first_tag_key"
value = "first_tag_value"
}
dynamic "tag" {
for_each = local.standard_tags
content {
key = tag.key
value = tag.value
}
}
}

View File

@ -1,9 +1,17 @@
source "amazon-ebs" "ubuntu-1604" {
int = 42
nested_slice {
dynamic "tag" {
for_each = local.standard_tags
content {
key = tag.key
value = tag.value
}
}
}
}
source "virtualbox-iso" "ubuntu-1204" {
string = "string"
int = 42

View File

@ -0,0 +1,7 @@
packer {
required_plugins {
amazon = ">= v0"
amazon = ">= v4"
}
}

View File

@ -0,0 +1,465 @@
packer {
required_version = ">= v1"
required_plugins {
amazon = ">= v0"
amazon-v1 = {
source = "amazon"
version = ">= v1"
}
amazon-v2 = {
source = "amazon"
version = ">= v2"
}
amazon-v3 = {
source = "hashicorp/amazon"
version = ">= v3"
}
amazon-v3-azr = {
source = "azr/amazon"
version = ">= v3"
}
amazon-v4 = {
source = "github.com/hashicorp/amazon"
version = ">= v4"
}
}
}
// starts resources to provision them.
build {
sources = [
"source.virtualbox-iso.ubuntu-1204",
]
source "source.amazon-v3-ebs.my-image" {
string = "setting from build section"
}
provisioner "shell" {
name = "provisioner that does something"
not_squashed = "${var.foo} ${upper(build.ID)}"
string = "string"
int = "${41 + 1}"
int64 = "${42 + 1}"
bool = "true"
trilean = true
duration = "${9 + 1}s"
map_string_string = {
a = "b"
c = "d"
}
slice_string = [for s in var.availability_zone_names : lower(s)]
slice_slice_string = [
["a","b"],
["c","d"]
]
nested {
string = "string"
int = 42
int64 = 43
bool = true
trilean = true
duration = "10s"
map_string_string = {
a = "b"
c = "d"
}
slice_string = [for s in var.availability_zone_names : lower(s)]
slice_slice_string = [
["a","b"],
["c","d"]
]
}
nested_slice {
tag {
key = "first_tag_key"
value = "first_tag_value"
}
dynamic "tag" {
for_each = local.standard_tags
content {
key = tag.key
value = tag.value
}
}
}
}
provisioner "file" {
not_squashed = "${var.foo} ${upper(build.ID)}"
string = "string"
int = 42
int64 = 43
bool = true
trilean = true
duration = "10s"
map_string_string = {
a = "b"
c = "d"
}
slice_string = local.abc_map[*].id
slice_slice_string = [
["a","b"],
["c","d"]
]
nested {
string = "string"
int = 42
int64 = 43
bool = true
trilean = true
duration = "10s"
map_string_string = {
a = "b"
c = "d"
}
slice_string = [
"a",
"b",
"c",
]
slice_slice_string = [
["a","b"],
["c","d"]
]
}
nested_slice {
tag {
key = "first_tag_key"
value = "first_tag_value"
}
dynamic "tag" {
for_each = local.standard_tags
content {
key = tag.key
value = tag.value
}
}
}
}
post-processor "amazon-import" {
name = "something"
string = "string"
int = 42
int64 = 43
bool = true
trilean = true
duration = "10s"
keep_input_artifact = true
map_string_string = {
a = "b"
c = "d"
}
slice_string = [
"a",
"b",
"c",
]
slice_slice_string = [
["a","b"],
["c","d"]
]
not_squashed = "${var.foo} ${upper(build.ID)}"
nested {
string = "string"
int = 42
int64 = 43
bool = true
trilean = true
duration = "10s"
map_string_string = {
a = "b"
c = "d"
}
slice_string = [
"a",
"b",
"c",
]
slice_slice_string = [
["a","b"],
["c","d"]
]
}
nested_slice {
tag {
key = "first_tag_key"
value = "first_tag_value"
}
dynamic "tag" {
for_each = local.standard_tags
content {
key = tag.key
value = tag.value
}
}
}
}
post-processor "amazon-import" {
string = "string"
int = 42
int64 = 43
bool = true
trilean = true
duration = "10s"
map_string_string = {
a = "b"
c = "d"
}
slice_string = [
"a",
"b",
"c",
]
slice_slice_string = [
["a","b"],
["c","d"]
]
not_squashed = "${var.foo} ${upper(build.ID)}"
nested {
string = "string"
int = 42
int64 = 43
bool = true
trilean = true
duration = "10s"
map_string_string = {
a = "b"
c = "d"
}
slice_string = [
"a",
"b",
"c",
]
slice_slice_string = [
["a","b"],
["c","d"]
]
}
nested_slice {
}
}
post-processors {
post-processor "amazon-import" {
name = "first-nested-post-processor"
string = "string"
int = 42
int64 = 43
bool = true
trilean = true
duration = "10s"
map_string_string = {
a = "b"
c = "d"
}
slice_string = [
"a",
"b",
"c",
]
slice_slice_string = [
["a","b"],
["c","d"]
]
not_squashed = "${var.foo} ${upper(build.ID)}"
nested {
string = "string"
int = 42
int64 = 43
bool = true
trilean = true
duration = "10s"
map_string_string = {
a = "b"
c = "d"
}
slice_string = [
"a",
"b",
"c",
]
slice_slice_string = [
["a","b"],
["c","d"]
]
}
nested_slice {
}
}
post-processor "amazon-import" {
name = "second-nested-post-processor"
string = "string"
int = 42
int64 = 43
bool = true
trilean = true
duration = "10s"
map_string_string = {
a = "b"
c = "d"
}
slice_string = [
"a",
"b",
"c",
]
slice_slice_string = [
["a","b"],
["c","d"]
]
not_squashed = "${var.foo} ${upper(build.ID)}"
nested {
string = "string"
int = 42
int64 = 43
bool = true
trilean = true
duration = "10s"
map_string_string = {
a = "b"
c = "d"
}
slice_string = [
"a",
"b",
"c",
]
slice_slice_string = [
["a","b"],
["c","d"]
]
}
nested_slice {
}
}
}
post-processors {
post-processor "amazon-import" {
name = "third-nested-post-processor"
string = "string"
int = 42
int64 = 43
bool = true
trilean = true
duration = "10s"
map_string_string = {
a = "b"
c = "d"
}
slice_string = [
"a",
"b",
"c",
]
slice_slice_string = [
["a","b"],
["c","d"]
]
not_squashed = "${var.foo} ${upper(build.ID)}"
nested {
string = "string"
int = 42
int64 = 43
bool = true
trilean = true
duration = "10s"
map_string_string = {
a = "b"
c = "d"
}
slice_string = [
"a",
"b",
"c",
]
slice_slice_string = [
["a","b"],
["c","d"]
]
}
nested_slice {
}
}
post-processor "amazon-import" {
name = "fourth-nested-post-processor"
string = "string"
int = 42
int64 = 43
bool = true
trilean = true
duration = "10s"
map_string_string = {
a = "b"
c = "d"
}
slice_string = [
"a",
"b",
"c",
]
slice_slice_string = [
["a","b"],
["c","d"]
]
not_squashed = "${var.foo} ${upper(build.ID)}"
nested {
string = "string"
int = 42
int64 = 43
bool = true
trilean = true
duration = "10s"
map_string_string = {
a = "b"
c = "d"
}
slice_string = [
"a",
"b",
"c",
]
slice_slice_string = [
["a","b"],
["c","d"]
]
}
nested_slice {
}
}
}
}

View File

@ -0,0 +1,94 @@
source "amazon-v3-ebs" "my-image" {
int = 42
}
source "virtualbox-iso" "ubuntu-1204" {
string = "string"
int = 42
int64 = 43
bool = true
trilean = true
duration = "10s"
map_string_string = {
a = "b"
c = "d"
}
slice_string = [
"a",
"b",
"c",
]
slice_slice_string = [
["a","b"],
["c","d"]
]
nested {
string = "string"
int = 42
int64 = 43
bool = true
trilean = true
duration = "10s"
map_string_string = {
a = "b"
c = "d"
}
slice_string = [
"a",
"b",
"c",
]
slice_slice_string = [
["a","b"],
["c","d"]
]
}
nested_slice {
string = "string"
int = 42
int64 = 43
bool = true
trilean = true
duration = "10s"
map_string_string = {
a = "b"
c = "d"
}
slice_string = [
"a",
"b",
"c",
]
slice_slice_string = [
["a","b"],
["c","d"]
]
}
nested_slice {
string = "string"
int = 42
int64 = 43
bool = true
trilean = true
duration = "10s"
map_string_string = {
a = "b"
c = "d"
}
slice_string = [
"a",
"b",
"c",
]
slice_slice_string = [
["a","b"],
["c","d"]
]
}
}

View File

@ -0,0 +1,39 @@
variables {
foo = "value"
// my_secret = "foo"
// image_name = "foo-image-{{user `my_secret`}}"
}
variable "image_id" {
type = string
default = "image-id-default"
}
variable "port" {
type = number
default = 42
}
variable "availability_zone_names" {
type = list(string)
default = ["A", "B", "C"]
}
locals {
feefoo = "${var.foo}_${var.image_id}"
}
locals {
standard_tags = {
Component = "user-service"
Environment = "production"
}
abc_map = [
{id = "a"},
{id = "b"},
{id = "c"},
]
}

View File

@ -0,0 +1,8 @@
// a source represents a reusable setting for a system boot/start.
source "inexistant" "ubuntu-1204" {
foo = "bar"
}
build {
sources = ["inexistant.ubuntu-1204"]
}

View File

@ -52,7 +52,7 @@ type BuildBlock struct {
Description string
// Sources is the list of sources that we want to start in this build block.
Sources []SourceRef
Sources []SourceUseBlock
// ProvisionerBlocks references a list of HCL provisioner block that will
// will be ran against the sources.
@ -105,7 +105,8 @@ func (p *Parser) decodeBuildConfig(block *hcl.Block, cfg *PackerConfig) (*BuildB
continue
}
build.Sources = append(build.Sources, ref)
// source with no body
build.Sources = append(build.Sources, SourceUseBlock{SourceRef: ref})
}
content, moreDiags := b.Config.Content(buildSchema)

View File

@ -48,24 +48,14 @@ func (p *Parser) decodePostProcessor(block *hcl.Block) (*PostProcessorBlock, hcl
return nil, diags
}
if !p.PostProcessorsSchemas.Has(postProcessor.PType) {
diags = append(diags, &hcl.Diagnostic{
Summary: fmt.Sprintf("Unknown "+buildPostProcessorLabel+" type %q", postProcessor.PType),
Subject: block.LabelRanges[0].Ptr(),
Detail: fmt.Sprintf("known "+buildPostProcessorLabel+"s: %v", p.PostProcessorsSchemas.List()),
Severity: hcl.DiagError,
})
return nil, diags
}
return postProcessor, diags
}
func (cfg *PackerConfig) startPostProcessor(source SourceBlock, pp *PostProcessorBlock, ectx *hcl.EvalContext) (packersdk.PostProcessor, hcl.Diagnostics) {
func (cfg *PackerConfig) startPostProcessor(source SourceUseBlock, pp *PostProcessorBlock, ectx *hcl.EvalContext) (packersdk.PostProcessor, hcl.Diagnostics) {
// ProvisionerBlock represents a detected but unparsed provisioner
var diags hcl.Diagnostics
postProcessor, err := cfg.postProcessorsSchemas.Start(pp.PType)
postProcessor, err := cfg.parser.PluginConfig.PostProcessors.Start(pp.PType)
if err != nil {
diags = append(diags, &hcl.Diagnostic{
Summary: fmt.Sprintf("Failed loading %s", pp.PType),

View File

@ -136,22 +136,13 @@ func (p *Parser) decodeProvisioner(block *hcl.Block, cfg *PackerConfig) (*Provis
provisioner.Timeout = timeout
}
if !p.ProvisionersSchemas.Has(provisioner.PType) {
diags = append(diags, &hcl.Diagnostic{
Summary: fmt.Sprintf("Unknown "+buildProvisionerLabel+" type %q", provisioner.PType),
Subject: block.LabelRanges[0].Ptr(),
Detail: fmt.Sprintf("known "+buildProvisionerLabel+"s: %v", p.ProvisionersSchemas.List()),
Severity: hcl.DiagError,
})
return nil, diags
}
return provisioner, diags
}
func (cfg *PackerConfig) startProvisioner(source SourceBlock, pb *ProvisionerBlock, ectx *hcl.EvalContext) (packersdk.Provisioner, hcl.Diagnostics) {
func (cfg *PackerConfig) startProvisioner(source SourceUseBlock, pb *ProvisionerBlock, ectx *hcl.EvalContext) (packersdk.Provisioner, hcl.Diagnostics) {
var diags hcl.Diagnostics
provisioner, err := cfg.provisionersSchemas.Start(pb.PType)
provisioner, err := cfg.parser.PluginConfig.Provisioners.Start(pb.PType)
if err != nil {
diags = append(diags, &hcl.Diagnostic{
Summary: fmt.Sprintf("failed loading %s", pb.PType),

View File

@ -17,15 +17,20 @@ func TestParse_build(t *testing.T) {
defaultParser,
parseTestArgs{"testdata/build/basic.pkr.hcl", nil, nil},
&PackerConfig{
Basedir: filepath.Join("testdata", "build"),
CorePackerVersionString: lockedVersion,
Basedir: filepath.Join("testdata", "build"),
Builds: Builds{
&BuildBlock{
Sources: []SourceRef{
Sources: []SourceUseBlock{
{
Type: "amazon-ebs",
Name: "ubuntu-1604",
SourceRef: SourceRef{
Type: "amazon-ebs",
Name: "ubuntu-1604",
},
},
{
SourceRef: refVBIsoUbuntu1204,
},
refVBIsoUbuntu1204,
},
ProvisionerBlocks: []*ProvisionerBlock{
{
@ -45,7 +50,7 @@ func TestParse_build(t *testing.T) {
},
},
},
false, false,
true, true,
[]packersdk.Build{},
true,
},
@ -53,8 +58,9 @@ func TestParse_build(t *testing.T) {
defaultParser,
parseTestArgs{"testdata/build/provisioner_untyped.pkr.hcl", nil, nil},
&PackerConfig{
Basedir: filepath.Join("testdata", "build"),
Builds: nil,
CorePackerVersionString: lockedVersion,
Basedir: filepath.Join("testdata", "build"),
Builds: nil,
},
true, true,
nil,
@ -64,19 +70,31 @@ func TestParse_build(t *testing.T) {
defaultParser,
parseTestArgs{"testdata/build/provisioner_inexistent.pkr.hcl", nil, nil},
&PackerConfig{
Basedir: filepath.Join("testdata", "build"),
Builds: nil,
CorePackerVersionString: lockedVersion,
Basedir: filepath.Join("testdata", "build"),
Builds: Builds{
&BuildBlock{
ProvisionerBlocks: []*ProvisionerBlock{
{
PType: "inexistant",
},
},
},
},
},
true, true,
[]packersdk.Build{&packer.CoreBuild{}},
[]packersdk.Build{&packer.CoreBuild{
Provisioners: []packer.CoreBuildProvisioner{},
}},
false,
},
{"untyped post-processor",
defaultParser,
parseTestArgs{"testdata/build/post-processor_untyped.pkr.hcl", nil, nil},
&PackerConfig{
Basedir: filepath.Join("testdata", "build"),
Builds: nil,
CorePackerVersionString: lockedVersion,
Basedir: filepath.Join("testdata", "build"),
Builds: nil,
},
true, true,
[]packersdk.Build{&packer.CoreBuild{}},
@ -86,19 +104,33 @@ func TestParse_build(t *testing.T) {
defaultParser,
parseTestArgs{"testdata/build/post-processor_inexistent.pkr.hcl", nil, nil},
&PackerConfig{
Basedir: filepath.Join("testdata", "build"),
Builds: nil,
CorePackerVersionString: lockedVersion,
Basedir: filepath.Join("testdata", "build"),
Builds: Builds{
&BuildBlock{
PostProcessorsLists: [][]*PostProcessorBlock{
{
{
PType: "inexistant",
},
},
},
},
},
},
true, true,
[]packersdk.Build{},
false,
[]packersdk.Build{&packer.CoreBuild{
PostProcessors: [][]packer.CoreBuildPostProcessor{},
}},
true,
},
{"invalid source",
defaultParser,
parseTestArgs{"testdata/build/invalid_source_reference.pkr.hcl", nil, nil},
&PackerConfig{
Basedir: filepath.Join("testdata", "build"),
Builds: nil,
CorePackerVersionString: lockedVersion,
Basedir: filepath.Join("testdata", "build"),
Builds: nil,
},
true, true,
[]packersdk.Build{},
@ -108,21 +140,26 @@ func TestParse_build(t *testing.T) {
defaultParser,
parseTestArgs{"testdata/build/named.pkr.hcl", nil, nil},
&PackerConfig{
Basedir: filepath.Join("testdata", "build"),
CorePackerVersionString: lockedVersion,
Basedir: filepath.Join("testdata", "build"),
Builds: Builds{
&BuildBlock{
Name: "somebuild",
Sources: []SourceRef{
Sources: []SourceUseBlock{
{
Type: "amazon-ebs",
Name: "ubuntu-1604",
SourceRef: SourceRef{
Type: "amazon-ebs",
Name: "ubuntu-1604",
},
},
{
SourceRef: refVBIsoUbuntu1204,
},
refVBIsoUbuntu1204,
},
},
},
},
false, false,
true, true,
[]packersdk.Build{},
true,
},
@ -130,16 +167,22 @@ func TestParse_build(t *testing.T) {
defaultParser,
parseTestArgs{"testdata/build/post-processor_onlyexcept.pkr.hcl", nil, nil},
&PackerConfig{
Basedir: filepath.Join("testdata", "build"),
CorePackerVersionString: lockedVersion,
Basedir: filepath.Join("testdata", "build"),
Sources: map[SourceRef]SourceBlock{
refVBIsoUbuntu1204: {Type: "virtualbox-iso", Name: "ubuntu-1204"},
refAWSEBSUbuntu1604: {Type: "amazon-ebs", Name: "ubuntu-1604"},
},
Builds: Builds{
&BuildBlock{
Sources: []SourceRef{
refVBIsoUbuntu1204,
SourceRef{Type: "amazon-ebs", Name: "ubuntu-1604", LocalName: "aws-ubuntu-16.04"},
Sources: []SourceUseBlock{
{
SourceRef: refVBIsoUbuntu1204,
},
{
SourceRef: SourceRef{Type: "amazon-ebs", Name: "ubuntu-1604"},
LocalName: "aws-ubuntu-16.04",
},
},
ProvisionerBlocks: nil,
PostProcessorsLists: [][]*PostProcessorBlock{
@ -248,16 +291,22 @@ func TestParse_build(t *testing.T) {
defaultParser,
parseTestArgs{"testdata/build/provisioner_onlyexcept.pkr.hcl", nil, nil},
&PackerConfig{
Basedir: filepath.Join("testdata", "build"),
CorePackerVersionString: lockedVersion,
Basedir: filepath.Join("testdata", "build"),
Sources: map[SourceRef]SourceBlock{
refVBIsoUbuntu1204: {Type: "virtualbox-iso", Name: "ubuntu-1204"},
refAWSEBSUbuntu1604: {Type: "amazon-ebs", Name: "ubuntu-1604"},
},
Builds: Builds{
&BuildBlock{
Sources: []SourceRef{
refVBIsoUbuntu1204,
SourceRef{Type: "amazon-ebs", Name: "ubuntu-1604", LocalName: "aws-ubuntu-16.04"},
Sources: []SourceUseBlock{
{
SourceRef: refVBIsoUbuntu1204,
},
{
SourceRef: SourceRef{Type: "amazon-ebs", Name: "ubuntu-1604"},
LocalName: "aws-ubuntu-16.04",
},
},
ProvisionerBlocks: []*ProvisionerBlock{
{

View File

@ -15,7 +15,8 @@ func TestParse_datasource(t *testing.T) {
defaultParser,
parseTestArgs{"testdata/datasources/basic.pkr.hcl", nil, nil},
&PackerConfig{
Basedir: filepath.Join("testdata", "datasources"),
CorePackerVersionString: lockedVersion,
Basedir: filepath.Join("testdata", "datasources"),
Datasources: Datasources{
{
Type: "amazon-ami",
@ -34,7 +35,8 @@ func TestParse_datasource(t *testing.T) {
defaultParser,
parseTestArgs{"testdata/datasources/untyped.pkr.hcl", nil, nil},
&PackerConfig{
Basedir: filepath.Join("testdata", "datasources"),
CorePackerVersionString: lockedVersion,
Basedir: filepath.Join("testdata", "datasources"),
},
true, true,
nil,
@ -44,7 +46,8 @@ func TestParse_datasource(t *testing.T) {
defaultParser,
parseTestArgs{"testdata/datasources/unnamed.pkr.hcl", nil, nil},
&PackerConfig{
Basedir: filepath.Join("testdata", "datasources"),
CorePackerVersionString: lockedVersion,
Basedir: filepath.Join("testdata", "datasources"),
},
true, true,
nil,
@ -54,7 +57,8 @@ func TestParse_datasource(t *testing.T) {
defaultParser,
parseTestArgs{"testdata/datasources/inexistent.pkr.hcl", nil, nil},
&PackerConfig{
Basedir: filepath.Join("testdata", "datasources"),
CorePackerVersionString: lockedVersion,
Basedir: filepath.Join("testdata", "datasources"),
Datasources: Datasources{
{
Type: "inexistant",
@ -73,7 +77,8 @@ func TestParse_datasource(t *testing.T) {
defaultParser,
parseTestArgs{"testdata/datasources/duplicate.pkr.hcl", nil, nil},
&PackerConfig{
Basedir: filepath.Join("testdata", "datasources"),
CorePackerVersionString: lockedVersion,
Basedir: filepath.Join("testdata", "datasources"),
Datasources: Datasources{
{
Type: "amazon-ami",

View File

@ -21,7 +21,9 @@ import (
type PackerConfig struct {
Packer struct {
VersionConstraints []VersionConstraint
RequiredPlugins []*RequiredPlugins
}
// Directory where the config files are defined
Basedir string
@ -50,11 +52,6 @@ type PackerConfig struct {
// Builds is the list of Build blocks defined in the config files.
Builds Builds
builderSchemas packer.BuilderStore
provisionersSchemas packer.ProvisionerStore
postProcessorsSchemas packer.PostProcessorStore
datasourceSchemas packer.DatasourceStore
except []glob.Glob
only []glob.Glob
@ -259,7 +256,7 @@ func (cfg *PackerConfig) evaluateDatasources(skipExecution bool) hcl.Diagnostics
continue
}
datasource, startDiags := cfg.startDatasource(cfg.datasourceSchemas, ref)
datasource, startDiags := cfg.startDatasource(cfg.parser.PluginConfig.DataSources, ref)
diags = append(diags, startDiags...)
if diags.HasErrors() {
continue
@ -290,7 +287,7 @@ func (cfg *PackerConfig) evaluateDatasources(skipExecution bool) hcl.Diagnostics
// getCoreBuildProvisioners takes a list of provisioner block, starts according
// provisioners and sends parsed HCL2 over to it.
func (cfg *PackerConfig) getCoreBuildProvisioners(source SourceBlock, blocks []*ProvisionerBlock, ectx *hcl.EvalContext) ([]packer.CoreBuildProvisioner, hcl.Diagnostics) {
func (cfg *PackerConfig) getCoreBuildProvisioners(source SourceUseBlock, blocks []*ProvisionerBlock, ectx *hcl.EvalContext) ([]packer.CoreBuildProvisioner, hcl.Diagnostics) {
var diags hcl.Diagnostics
res := []packer.CoreBuildProvisioner{}
for _, pb := range blocks {
@ -333,7 +330,7 @@ func (cfg *PackerConfig) getCoreBuildProvisioners(source SourceBlock, blocks []*
// getCoreBuildProvisioners takes a list of post processor block, starts
// according provisioners and sends parsed HCL2 over to it.
func (cfg *PackerConfig) getCoreBuildPostProcessors(source SourceBlock, blocksList [][]*PostProcessorBlock, ectx *hcl.EvalContext) ([][]packer.CoreBuildPostProcessor, hcl.Diagnostics) {
func (cfg *PackerConfig) getCoreBuildPostProcessors(source SourceUseBlock, blocksList [][]*PostProcessorBlock, ectx *hcl.EvalContext) ([][]packer.CoreBuildPostProcessor, hcl.Diagnostics) {
var diags hcl.Diagnostics
res := [][]packer.CoreBuildPostProcessor{}
for _, blocks := range blocksList {
@ -387,23 +384,21 @@ func (cfg *PackerConfig) GetBuilds(opts packer.GetBuildsOptions) ([]packersdk.Bu
var diags hcl.Diagnostics
for _, build := range cfg.Builds {
for _, from := range build.Sources {
src, found := cfg.Sources[from.Ref()]
for _, srcUsage := range build.Sources {
src, found := cfg.Sources[srcUsage.SourceRef]
if !found {
diags = append(diags, &hcl.Diagnostic{
Summary: "Unknown " + sourceLabel + " " + from.String(),
Summary: "Unknown " + sourceLabel + " " + srcUsage.String(),
Subject: build.HCL2Ref.DefRange.Ptr(),
Severity: hcl.DiagError,
Detail: fmt.Sprintf("Known: %v", cfg.Sources),
})
continue
}
src.addition = from.addition
src.LocalName = from.LocalName
pcb := &packer.CoreBuild{
BuildName: build.Name,
Type: src.String(),
Type: srcUsage.String(),
}
// Apply the -only and -except command-line options to exclude matching builds.
@ -446,7 +441,7 @@ func (cfg *PackerConfig) GetBuilds(opts packer.GetBuildsOptions) ([]packersdk.Bu
}
}
builder, moreDiags, generatedVars := cfg.startBuilder(src, cfg.EvalContext(nil), opts)
builder, moreDiags, generatedVars := cfg.startBuilder(srcUsage, cfg.EvalContext(nil), opts)
diags = append(diags, moreDiags...)
if moreDiags.HasErrors() {
continue
@ -464,16 +459,16 @@ func (cfg *PackerConfig) GetBuilds(opts packer.GetBuildsOptions) ([]packersdk.Bu
unknownBuildValues["name"] = cty.StringVal(build.Name)
variables := map[string]cty.Value{
sourcesAccessor: cty.ObjectVal(src.ctyValues()),
sourcesAccessor: cty.ObjectVal(srcUsage.ctyValues()),
buildAccessor: cty.ObjectVal(unknownBuildValues),
}
provisioners, moreDiags := cfg.getCoreBuildProvisioners(src, build.ProvisionerBlocks, cfg.EvalContext(variables))
provisioners, moreDiags := cfg.getCoreBuildProvisioners(srcUsage, build.ProvisionerBlocks, cfg.EvalContext(variables))
diags = append(diags, moreDiags...)
if moreDiags.HasErrors() {
continue
}
pps, moreDiags := cfg.getCoreBuildPostProcessors(src, build.PostProcessorsLists, cfg.EvalContext(variables))
pps, moreDiags := cfg.getCoreBuildPostProcessors(srcUsage, build.PostProcessorsLists, cfg.EvalContext(variables))
diags = append(diags, moreDiags...)
if moreDiags.HasErrors() {
continue
@ -575,7 +570,7 @@ func (p *PackerConfig) printBuilds() string {
fmt.Fprintf(out, "\n <no source>\n")
}
for _, source := range build.Sources {
fmt.Fprintf(out, "\n %s\n", source)
fmt.Fprintf(out, "\n %s\n", source.String())
}
fmt.Fprintf(out, "\n provisioners:\n\n")
if len(build.ProvisionerBlocks) == 0 {

View File

@ -3,8 +3,11 @@ package hcl2template
import (
"testing"
"github.com/hashicorp/go-version"
packersdk "github.com/hashicorp/packer-plugin-sdk/packer"
"github.com/hashicorp/packer/hcl2template/addrs"
. "github.com/hashicorp/packer/hcl2template/internal"
hcl2template "github.com/hashicorp/packer/hcl2template/internal"
"github.com/hashicorp/packer/packer"
"github.com/zclconf/go-cty/cty"
)
@ -12,6 +15,7 @@ import (
var (
refVBIsoUbuntu1204 = SourceRef{Type: "virtualbox-iso", Name: "ubuntu-1204"}
refAWSEBSUbuntu1604 = SourceRef{Type: "amazon-ebs", Name: "ubuntu-1604"}
refAWSV3MyImage = SourceRef{Type: "amazon-v3-ebs", Name: "my-image"}
pTrue = pointerToBool(true)
)
@ -23,7 +27,20 @@ func TestParser_complete(t *testing.T) {
defaultParser,
parseTestArgs{"testdata/complete", nil, nil},
&PackerConfig{
Basedir: "testdata/complete",
Packer: struct {
VersionConstraints []VersionConstraint
RequiredPlugins []*RequiredPlugins
}{
VersionConstraints: []VersionConstraint{
{
Required: mustVersionConstraints(version.NewConstraint(">= v1")),
},
},
RequiredPlugins: nil,
},
CorePackerVersionString: lockedVersion,
Basedir: "testdata/complete",
InputVariables: Variables{
"foo": &Variable{
Name: "foo",
@ -125,9 +142,13 @@ func TestParser_complete(t *testing.T) {
},
Builds: Builds{
&BuildBlock{
Sources: []SourceRef{
refVBIsoUbuntu1204,
refAWSEBSUbuntu1604,
Sources: []SourceUseBlock{
{
SourceRef: refVBIsoUbuntu1204,
},
{
SourceRef: refAWSEBSUbuntu1604,
},
},
ProvisionerBlocks: []*ProvisionerBlock{
{
@ -200,7 +221,7 @@ func TestParser_complete(t *testing.T) {
PType: "amazon-import",
PName: "something",
PostProcessor: &HCL2PostProcessor{
PostProcessor: basicMockPostProcessor,
PostProcessor: basicMockPostProcessorDynamicTags,
},
KeepInputArtifact: pTrue,
},
@ -257,7 +278,20 @@ func TestParser_complete(t *testing.T) {
Int: 42,
Tags: []MockTag{},
},
NestedSlice: []NestedMockConfig{},
Nested: hcl2template.NestedMockConfig{
Tags: []hcl2template.MockTag{
{Key: "Component", Value: "user-service"},
{Key: "Environment", Value: "production"},
},
},
NestedSlice: []NestedMockConfig{
hcl2template.NestedMockConfig{
Tags: []hcl2template.MockTag{
{Key: "Component", Value: "user-service"},
{Key: "Environment", Value: "production"},
},
},
},
},
},
Provisioners: []packer.CoreBuildProvisioner{
@ -281,7 +315,7 @@ func TestParser_complete(t *testing.T) {
PType: "amazon-import",
PName: "something",
PostProcessor: &HCL2PostProcessor{
PostProcessor: basicMockPostProcessor,
PostProcessor: basicMockPostProcessorDynamicTags,
},
KeepInputArtifact: pTrue,
},
@ -357,6 +391,218 @@ func TestParser_ValidateFilterOption(t *testing.T) {
}
}
func TestParser_no_init(t *testing.T) {
defaultParser := getBasicParser()
tests := []parseTest{
{"working build with imports",
defaultParser,
parseTestArgs{"testdata/init/imports", nil, nil},
&PackerConfig{
Packer: struct {
VersionConstraints []VersionConstraint
RequiredPlugins []*RequiredPlugins
}{
VersionConstraints: []VersionConstraint{
{
Required: mustVersionConstraints(version.NewConstraint(">= v1")),
},
},
RequiredPlugins: []*RequiredPlugins{
{
RequiredPlugins: map[string]*RequiredPlugin{
"amazon": {
Name: "amazon",
Source: "",
Type: &addrs.Plugin{
Type: "amazon",
Namespace: "hashicorp",
Hostname: "github.com",
},
Requirement: VersionConstraint{
Required: mustVersionConstraints(version.NewConstraint(">= v0")),
},
},
"amazon-v1": {
Name: "amazon-v1",
Source: "amazon",
Type: &addrs.Plugin{
Type: "amazon",
Namespace: "hashicorp",
Hostname: "github.com",
},
Requirement: VersionConstraint{
Required: mustVersionConstraints(version.NewConstraint(">= v1")),
},
},
"amazon-v2": {
Name: "amazon-v2",
Source: "amazon",
Type: &addrs.Plugin{
Type: "amazon",
Namespace: "hashicorp",
Hostname: "github.com",
},
Requirement: VersionConstraint{
Required: mustVersionConstraints(version.NewConstraint(">= v2")),
},
},
"amazon-v3": {
Name: "amazon-v3",
Source: "hashicorp/amazon",
Type: &addrs.Plugin{
Type: "amazon",
Namespace: "hashicorp",
Hostname: "github.com",
},
Requirement: VersionConstraint{
Required: mustVersionConstraints(version.NewConstraint(">= v3")),
},
},
"amazon-v3-azr": {
Name: "amazon-v3-azr",
Source: "azr/amazon",
Type: &addrs.Plugin{
Type: "amazon",
Namespace: "azr",
Hostname: "github.com",
},
Requirement: VersionConstraint{
Required: mustVersionConstraints(version.NewConstraint(">= v3")),
},
},
"amazon-v4": {
Name: "amazon-v4",
Source: "github.com/hashicorp/amazon",
Type: &addrs.Plugin{
Type: "amazon",
Namespace: "hashicorp",
Hostname: "github.com",
},
Requirement: VersionConstraint{
Required: mustVersionConstraints(version.NewConstraint(">= v4")),
},
},
},
},
},
},
CorePackerVersionString: lockedVersion,
Basedir: "testdata/init/imports",
InputVariables: Variables{
"foo": &Variable{
Name: "foo",
Values: []VariableAssignment{{From: "default", Value: cty.StringVal("value")}},
Type: cty.String,
},
"image_id": &Variable{
Name: "image_id",
Values: []VariableAssignment{{From: "default", Value: cty.StringVal("image-id-default")}},
Type: cty.String,
},
"port": &Variable{
Name: "port",
Values: []VariableAssignment{{From: "default", Value: cty.NumberIntVal(42)}},
Type: cty.Number,
},
"availability_zone_names": &Variable{
Name: "availability_zone_names",
Values: []VariableAssignment{{
From: "default",
Value: cty.ListVal([]cty.Value{
cty.StringVal("A"),
cty.StringVal("B"),
cty.StringVal("C"),
}),
}},
Type: cty.List(cty.String),
},
},
Sources: map[SourceRef]SourceBlock{
refVBIsoUbuntu1204: {Type: "virtualbox-iso", Name: "ubuntu-1204"},
refAWSV3MyImage: {Type: "amazon-v3-ebs", Name: "my-image"},
},
Builds: Builds{
&BuildBlock{
Sources: []SourceUseBlock{
{
SourceRef: refVBIsoUbuntu1204,
},
{
SourceRef: refAWSV3MyImage,
},
},
ProvisionerBlocks: []*ProvisionerBlock{
{
PType: "shell",
PName: "provisioner that does something",
},
{
PType: "file",
},
},
PostProcessorsLists: [][]*PostProcessorBlock{
{
{
PType: "amazon-import",
PName: "something",
KeepInputArtifact: pTrue,
},
},
{
{
PType: "amazon-import",
},
},
{
{
PType: "amazon-import",
PName: "first-nested-post-processor",
},
{
PType: "amazon-import",
PName: "second-nested-post-processor",
},
},
{
{
PType: "amazon-import",
PName: "third-nested-post-processor",
},
{
PType: "amazon-import",
PName: "fourth-nested-post-processor",
},
},
},
},
},
},
false, false,
[]packersdk.Build{},
false,
},
{"duplicate required plugin accessor fails",
defaultParser,
parseTestArgs{"testdata/init/duplicate_required_plugins", nil, nil},
nil,
true, true,
[]packersdk.Build{},
false,
},
}
testParse_only_Parse(t, tests)
}
func pointerToBool(b bool) *bool {
return &b
}
func mustVersionConstraints(vs version.Constraints, err error) version.Constraints {
if err != nil {
panic(err)
}
return vs
}

View File

@ -0,0 +1,242 @@
package hcl2template
import (
"fmt"
"github.com/hashicorp/go-version"
"github.com/hashicorp/hcl/v2"
"github.com/hashicorp/packer/hcl2template/addrs"
"github.com/zclconf/go-cty/cty"
)
func (cfg *PackerConfig) decodeRequiredPluginsBlock(f *hcl.File) hcl.Diagnostics {
var diags hcl.Diagnostics
content, moreDiags := f.Body.Content(configSchema)
diags = append(diags, moreDiags...)
for _, block := range content.Blocks {
switch block.Type {
case packerLabel:
content, contentDiags := block.Body.Content(packerBlockSchema)
diags = append(diags, contentDiags...)
// We ignore "packer_version"" here because
// sniffCoreVersionRequirements already dealt with that
for _, innerBlock := range content.Blocks {
switch innerBlock.Type {
case "required_plugins":
reqs, reqsDiags := decodeRequiredPluginsBlock(innerBlock)
diags = append(diags, reqsDiags...)
cfg.Packer.RequiredPlugins = append(cfg.Packer.RequiredPlugins, reqs)
default:
continue
}
}
}
}
return diags
}
func (cfg *PackerConfig) decodeImplicitRequiredPluginsBlocks(f *hcl.File) hcl.Diagnostics {
// when a plugin is used but not defined in the required plugin blocks, it
// is 'implicitly used'. Here we read common configuration blocks to try to
// guess plugins.
var diags hcl.Diagnostics
content, moreDiags := f.Body.Content(configSchema)
diags = append(diags, moreDiags...)
for _, block := range content.Blocks {
switch block.Type {
case sourceLabel:
// TODO
}
}
return diags
}
// RequiredPlugin represents a declaration of a dependency on a particular
// Plugin version or source.
type RequiredPlugin struct {
Name string
Source string
Type *addrs.Plugin
Requirement VersionConstraint
DeclRange hcl.Range
}
type RequiredPlugins struct {
RequiredPlugins map[string]*RequiredPlugin
DeclRange hcl.Range
}
func decodeRequiredPluginsBlock(block *hcl.Block) (*RequiredPlugins, hcl.Diagnostics) {
attrs, diags := block.Body.JustAttributes()
ret := &RequiredPlugins{
RequiredPlugins: make(map[string]*RequiredPlugin),
DeclRange: block.DefRange,
}
for name, attr := range attrs {
expr, err := attr.Expr.Value(nil)
if err != nil {
diags = append(diags, err...)
}
nameDiags := checkPluginNameNormalized(name, attr.Expr.Range())
diags = append(diags, nameDiags...)
rp := &RequiredPlugin{
Name: name,
DeclRange: attr.Expr.Range(),
}
switch {
case expr.Type().IsPrimitiveType():
vc, reqDiags := decodeVersionConstraint(attr)
diags = append(diags, reqDiags...)
rp.Requirement = vc
rp.Type, err = addrs.ParsePluginSourceString(name)
if err != nil {
diags = diags.Append(&hcl.Diagnostic{
Severity: hcl.DiagError,
Summary: "Invalid plugin type",
Detail: fmt.Sprintf(`Invalid plugin type %q: %s"`, name, err),
})
}
case expr.Type().IsObjectType():
if !expr.Type().HasAttribute("version") {
diags = append(diags, &hcl.Diagnostic{
Severity: hcl.DiagError,
Summary: "No version constraint was set",
Detail: "The version field must be specified as a string. Ex: `version = \">= 1.2.0, < 2.0.0\". See https://www.packer.io/docs/templates/hcl_templates/blocks/packer#version-constraints for docs",
Subject: attr.Expr.Range().Ptr(),
})
continue
}
vc := VersionConstraint{
DeclRange: attr.Range,
}
constraint := expr.GetAttr("version")
if !constraint.Type().Equals(cty.String) || constraint.IsNull() {
diags = append(diags, &hcl.Diagnostic{
Severity: hcl.DiagError,
Summary: "Invalid version constraint",
Detail: "Version must be specified as a string. See https://www.packer.io/docs/templates/hcl_templates/blocks/packer#version-constraint-syntax for docs.",
Subject: attr.Expr.Range().Ptr(),
})
continue
}
constraintStr := constraint.AsString()
constraints, err := version.NewConstraint(constraintStr)
if err != nil {
// NewConstraint doesn't return user-friendly errors, so we'll just
// ignore the provided error and produce our own generic one.
diags = append(diags, &hcl.Diagnostic{
Severity: hcl.DiagError,
Summary: "Invalid version constraint",
Detail: "This string does not use correct version constraint syntax. See https://www.packer.io/docs/templates/hcl_templates/blocks/packer#version-constraint-syntax for docs.",
Subject: attr.Expr.Range().Ptr(),
})
continue
}
vc.Required = constraints
rp.Requirement = vc
if !expr.Type().HasAttribute("source") {
diags = append(diags, &hcl.Diagnostic{
Severity: hcl.DiagError,
Summary: "No source was set",
Detail: "The source field must be specified as a string. Ex: `source = \"coolcloud\". See https://www.packer.io/docs/templates/hcl_templates/blocks/packer#specifying-plugin-requirements for docs",
Subject: attr.Expr.Range().Ptr(),
})
continue
}
source := expr.GetAttr("source")
if !source.Type().Equals(cty.String) || source.IsNull() {
diags = append(diags, &hcl.Diagnostic{
Severity: hcl.DiagError,
Summary: "Invalid source",
Detail: "Source must be specified as a string. For example: " + `source = "coolcloud"`,
Subject: attr.Expr.Range().Ptr(),
})
continue
}
rp.Source = source.AsString()
p, sourceDiags := addrs.ParsePluginSourceString(rp.Source)
if sourceDiags.HasErrors() {
for _, diag := range sourceDiags {
if diag.Subject == nil {
diag.Subject = attr.Expr.Range().Ptr()
}
}
diags = append(diags, sourceDiags...)
} else {
rp.Type = p
}
attrTypes := expr.Type().AttributeTypes()
for name := range attrTypes {
if name == "version" || name == "source" {
continue
}
diags = append(diags, &hcl.Diagnostic{
Severity: hcl.DiagError,
Summary: "Invalid required_plugins object",
Detail: `required_plugins objects can only contain "version" and "source" attributes.`,
Subject: attr.Expr.Range().Ptr(),
})
break
}
default:
// should not happen
diags = append(diags, &hcl.Diagnostic{
Severity: hcl.DiagError,
Summary: "Invalid required_plugins syntax",
Detail: "required_plugins entries must be objects.",
Subject: attr.Expr.Range().Ptr(),
})
}
ret.RequiredPlugins[rp.Name] = rp
}
return ret, diags
}
// checkPluginNameNormalized verifies that the given string is already
// normalized and returns an error if not.
func checkPluginNameNormalized(name string, declrange hcl.Range) hcl.Diagnostics {
var diags hcl.Diagnostics
// verify that the plugin local name is normalized
normalized, err := addrs.IsPluginPartNormalized(name)
if err != nil {
diags = append(diags, &hcl.Diagnostic{
Severity: hcl.DiagError,
Summary: "Invalid plugin local name",
Detail: fmt.Sprintf("%s is an invalid plugin local name: %s", name, err),
Subject: &declrange,
})
return diags
}
if !normalized {
// we would have returned this error already
normalizedPlugin, _ := addrs.ParsePluginPart(name)
diags = append(diags, &hcl.Diagnostic{
Severity: hcl.DiagError,
Summary: "Invalid plugin local name",
Detail: fmt.Sprintf("Plugin names must be normalized. Replace %q with %q to fix this error.", name, normalizedPlugin),
Subject: &declrange,
})
}
return diags
}

View File

@ -12,7 +12,8 @@ import (
"github.com/zclconf/go-cty/cty"
)
// SourceBlock references an HCL 'source' block.
// SourceBlock references an HCL 'source' block to be used in a build for
// example.
type SourceBlock struct {
// Type of source; ex: virtualbox-iso
Type string
@ -21,27 +22,41 @@ type SourceBlock struct {
block *hcl.Block
// addition will be merged into block to allow user to override builder settings
// per build.source block.
addition hcl.Body
// LocalName can be set in a singular source block from a build block, it
// allows to give a special name to a build in the logs.
LocalName string
}
func (b *SourceBlock) name() string {
// SourceUseBlock is a SourceBlock 'usage' from a config stand point.
// For example when one uses `build.sources = ["..."]` or
// `build.source "..." {...}`.
type SourceUseBlock struct {
// reference to an actual source block definition, or SourceBlock.
SourceRef
// LocalName can be set in a singular source block from a build block, it
// allows to give a special name to a build in the logs.
LocalName string
// Rest of the body, in case the build.source block has more specific
// content
// Body can be expanded by a dynamic tag.
Body hcl.Body
}
func (b *SourceUseBlock) name() string {
if b.LocalName != "" {
return b.LocalName
}
return b.Name
}
func (b *SourceBlock) String() string {
func (b *SourceUseBlock) String() string {
return fmt.Sprintf("%s.%s", b.Type, b.name())
}
// EvalContext adds the values of the source to the passed eval context.
func (b *SourceBlock) ctyValues() map[string]cty.Value {
func (b *SourceUseBlock) ctyValues() map[string]cty.Value {
return map[string]cty.Value{
"type": cty.StringVal(b.Type),
"name": cty.StringVal(b.name()),
@ -54,19 +69,20 @@ func (b *SourceBlock) ctyValues() map[string]cty.Value {
// name = "local_name"
// }
// }
func (p *Parser) decodeBuildSource(block *hcl.Block) (SourceRef, hcl.Diagnostics) {
func (p *Parser) decodeBuildSource(block *hcl.Block) (SourceUseBlock, hcl.Diagnostics) {
ref := sourceRefFromString(block.Labels[0])
out := SourceUseBlock{SourceRef: ref}
var b struct {
Name string `hcl:"name,optional"`
Rest hcl.Body `hcl:",remain"`
}
diags := gohcl.DecodeBody(block.Body, nil, &b)
if diags.HasErrors() {
return ref, diags
return out, diags
}
ref.addition = b.Rest
ref.LocalName = b.Name
return ref, nil
out.LocalName = b.Name
out.Body = b.Rest
return out, nil
}
func (p *Parser) decodeSource(block *hcl.Block) (SourceBlock, hcl.Diagnostics) {
@ -77,41 +93,26 @@ func (p *Parser) decodeSource(block *hcl.Block) (SourceBlock, hcl.Diagnostics) {
}
var diags hcl.Diagnostics
if !p.BuilderSchemas.Has(source.Type) {
diags = append(diags, &hcl.Diagnostic{
Summary: "Unknown " + buildSourceLabel + " type " + source.Type,
Subject: block.LabelRanges[0].Ptr(),
Detail: fmt.Sprintf("known builders: %v", p.BuilderSchemas.List()),
Severity: hcl.DiagError,
})
return source, diags
}
return source, diags
}
func (cfg *PackerConfig) startBuilder(source SourceBlock, ectx *hcl.EvalContext, opts packer.GetBuildsOptions) (packersdk.Builder, hcl.Diagnostics, []string) {
func (cfg *PackerConfig) startBuilder(source SourceUseBlock, ectx *hcl.EvalContext, opts packer.GetBuildsOptions) (packersdk.Builder, hcl.Diagnostics, []string) {
var diags hcl.Diagnostics
builder, err := cfg.builderSchemas.Start(source.Type)
builder, err := cfg.parser.PluginConfig.Builders.Start(source.Type)
if err != nil {
diags = append(diags, &hcl.Diagnostic{
Summary: "Failed to load " + sourceLabel + " type",
Detail: err.Error(),
Subject: &source.block.LabelRanges[0],
})
return builder, diags, nil
}
body := source.block.Body
if source.addition != nil {
body = hcl.MergeBodies([]hcl.Body{source.block.Body, source.addition})
}
body := source.Body
decoded, moreDiags := decodeHCL2Spec(body, ectx, builder)
diags = append(diags, moreDiags...)
if moreDiags.HasErrors() {
return nil, diags, nil
return builder, diags, nil
}
// In case of cty.Unknown values, this will write a equivalent placeholder of the same type
@ -131,13 +132,13 @@ func (cfg *PackerConfig) startBuilder(source SourceBlock, ectx *hcl.EvalContext,
builderVars["packer_on_error"] = opts.OnError
generatedVars, warning, err := builder.Prepare(builderVars, decoded)
moreDiags = warningErrorsToDiags(source.block, warning, err)
moreDiags = warningErrorsToDiags(cfg.Sources[source.SourceRef].block, warning, err)
diags = append(diags, moreDiags...)
return builder, diags, generatedVars
}
// These variables will populate the PackerConfig inside of the builders.
func (source *SourceBlock) builderVariables() map[string]string {
func (source *SourceUseBlock) builderVariables() map[string]string {
return map[string]string{
"packer_build_name": source.Name,
"packer_builder_type": source.Type,
@ -151,25 +152,15 @@ func (source *SourceBlock) Ref() SourceRef {
}
}
// SourceRef is a nice way to put `virtualbox-iso.source_name`
type SourceRef struct {
// Type of the source, for example `virtualbox-iso`
Type string
// Name of the source, for example `source_name`
Name string
// The content of this body will be merged into a new block to allow to
// override builder settings per build section.
addition hcl.Body
// LocalName can be set in a singular source block from a build block, it
// allows to give a special name to a build in the logs.
LocalName string
}
// the 'addition' field makes of ref a different entry in the sources map, so
// Ref is here to make sure only one is returned.
func (r *SourceRef) Ref() SourceRef {
return SourceRef{
Type: r.Type,
Name: r.Name,
}
// No other field should be added to the SourceRef because we used that
// struct as a map accessor in many places.
}
// NoSource is the zero value of sourceRef, representing the absense of an

View File

@ -15,7 +15,8 @@ func TestParse_source(t *testing.T) {
defaultParser,
parseTestArgs{"testdata/sources/basic.pkr.hcl", nil, nil},
&PackerConfig{
Basedir: filepath.Join("testdata", "sources"),
CorePackerVersionString: lockedVersion,
Basedir: filepath.Join("testdata", "sources"),
Sources: map[SourceRef]SourceBlock{
{
Type: "virtualbox-iso",
@ -34,7 +35,8 @@ func TestParse_source(t *testing.T) {
defaultParser,
parseTestArgs{"testdata/sources/untyped.pkr.hcl", nil, nil},
&PackerConfig{
Basedir: filepath.Join("testdata", "sources"),
CorePackerVersionString: lockedVersion,
Basedir: filepath.Join("testdata", "sources"),
},
true, true,
nil,
@ -44,17 +46,45 @@ func TestParse_source(t *testing.T) {
defaultParser,
parseTestArgs{"testdata/sources/unnamed.pkr.hcl", nil, nil},
&PackerConfig{
Basedir: filepath.Join("testdata", "sources"),
CorePackerVersionString: lockedVersion,
Basedir: filepath.Join("testdata", "sources"),
},
true, true,
nil,
false,
},
{"inexistent source",
{"unused source with unknown type fails",
defaultParser,
parseTestArgs{"testdata/sources/inexistent.pkr.hcl", nil, nil},
&PackerConfig{
Basedir: filepath.Join("testdata", "sources"),
CorePackerVersionString: lockedVersion,
Basedir: filepath.Join("testdata", "sources"),
Sources: map[SourceRef]SourceBlock{
{Type: "inexistant", Name: "ubuntu-1204"}: {Type: "inexistant", Name: "ubuntu-1204"},
},
},
false, false,
[]packersdk.Build{},
false,
},
{"used source with unknown type fails",
defaultParser,
parseTestArgs{"testdata/sources/inexistent_used.pkr.hcl", nil, nil},
&PackerConfig{
CorePackerVersionString: lockedVersion,
Basedir: filepath.Join("testdata", "sources"),
Sources: map[SourceRef]SourceBlock{
{Type: "inexistant", Name: "ubuntu-1204"}: {Type: "inexistant", Name: "ubuntu-1204"},
},
Builds: Builds{
&BuildBlock{
Sources: []SourceUseBlock{
{
SourceRef: SourceRef{Type: "inexistant", Name: "ubuntu-1204"},
},
},
},
},
},
true, true,
nil,
@ -64,7 +94,8 @@ func TestParse_source(t *testing.T) {
defaultParser,
parseTestArgs{"testdata/sources/duplicate.pkr.hcl", nil, nil},
&PackerConfig{
Basedir: filepath.Join("testdata", "sources"),
CorePackerVersionString: lockedVersion,
Basedir: filepath.Join("testdata", "sources"),
Sources: map[SourceRef]SourceBlock{
{
Type: "virtualbox-iso",

View File

@ -23,7 +23,8 @@ func TestParse_variables(t *testing.T) {
defaultParser,
parseTestArgs{"testdata/variables/basic.pkr.hcl", nil, nil},
&PackerConfig{
Basedir: filepath.Join("testdata", "variables"),
CorePackerVersionString: lockedVersion,
Basedir: filepath.Join("testdata", "variables"),
InputVariables: Variables{
"image_name": &Variable{
Name: "image_name",
@ -108,7 +109,8 @@ func TestParse_variables(t *testing.T) {
defaultParser,
parseTestArgs{"testdata/variables/duplicate_variable.pkr.hcl", nil, nil},
&PackerConfig{
Basedir: filepath.Join("testdata", "variables"),
CorePackerVersionString: lockedVersion,
Basedir: filepath.Join("testdata", "variables"),
InputVariables: Variables{
"boolean_value": &Variable{
Name: "boolean_value",
@ -128,7 +130,8 @@ func TestParse_variables(t *testing.T) {
defaultParser,
parseTestArgs{"testdata/variables/duplicate_variables.pkr.hcl", nil, nil},
&PackerConfig{
Basedir: filepath.Join("testdata", "variables"),
CorePackerVersionString: lockedVersion,
Basedir: filepath.Join("testdata", "variables"),
InputVariables: Variables{
"boolean_value": &Variable{
Name: "boolean_value",
@ -148,7 +151,8 @@ func TestParse_variables(t *testing.T) {
defaultParser,
parseTestArgs{"testdata/variables/duplicate_locals", nil, nil},
&PackerConfig{
Basedir: "testdata/variables/duplicate_locals",
CorePackerVersionString: lockedVersion,
Basedir: "testdata/variables/duplicate_locals",
LocalVariables: Variables{
"sensible": &Variable{
Values: []VariableAssignment{
@ -170,7 +174,8 @@ func TestParse_variables(t *testing.T) {
defaultParser,
parseTestArgs{"testdata/variables/invalid_default.pkr.hcl", nil, nil},
&PackerConfig{
Basedir: filepath.Join("testdata", "variables"),
CorePackerVersionString: lockedVersion,
Basedir: filepath.Join("testdata", "variables"),
InputVariables: Variables{
"broken_type": &Variable{
Name: "broken_type",
@ -191,7 +196,8 @@ func TestParse_variables(t *testing.T) {
defaultParser,
parseTestArgs{"testdata/variables/unknown_key.pkr.hcl", nil, nil},
&PackerConfig{
Basedir: filepath.Join("testdata", "variables"),
CorePackerVersionString: lockedVersion,
Basedir: filepath.Join("testdata", "variables"),
InputVariables: Variables{
"broken_variable": &Variable{
Name: "broken_variable",
@ -209,7 +215,8 @@ func TestParse_variables(t *testing.T) {
defaultParser,
parseTestArgs{"testdata/variables/unset_used_string_variable.pkr.hcl", nil, nil},
&PackerConfig{
Basedir: filepath.Join("testdata", "variables"),
CorePackerVersionString: lockedVersion,
Basedir: filepath.Join("testdata", "variables"),
InputVariables: Variables{
"foo": &Variable{
Name: "foo",
@ -226,7 +233,8 @@ func TestParse_variables(t *testing.T) {
defaultParser,
parseTestArgs{"testdata/variables/unset_unused_string_variable.pkr.hcl", nil, nil},
&PackerConfig{
Basedir: filepath.Join("testdata", "variables"),
CorePackerVersionString: lockedVersion,
Basedir: filepath.Join("testdata", "variables"),
InputVariables: Variables{
"foo": &Variable{
Name: "foo",
@ -241,8 +249,10 @@ func TestParse_variables(t *testing.T) {
},
Builds: Builds{
&BuildBlock{
Sources: []SourceRef{
{Type: "null", Name: "null-builder"},
Sources: []SourceUseBlock{
{
SourceRef: SourceRef{Type: "null", Name: "null-builder"},
},
},
},
},
@ -264,7 +274,8 @@ func TestParse_variables(t *testing.T) {
defaultParser,
parseTestArgs{"testdata/variables/complicated", nil, nil},
&PackerConfig{
Basedir: "testdata/variables/complicated",
CorePackerVersionString: lockedVersion,
Basedir: "testdata/variables/complicated",
InputVariables: Variables{
"name_prefix": &Variable{
Name: "name_prefix",
@ -319,8 +330,9 @@ func TestParse_variables(t *testing.T) {
defaultParser,
parseTestArgs{"testdata/variables/recursive_locals.pkr.hcl", nil, nil},
&PackerConfig{
Basedir: filepath.Join("testdata", "variables"),
LocalVariables: Variables{},
CorePackerVersionString: lockedVersion,
Basedir: filepath.Join("testdata", "variables"),
LocalVariables: Variables{},
},
true, true,
[]packersdk.Build{},
@ -331,7 +343,8 @@ func TestParse_variables(t *testing.T) {
defaultParser,
parseTestArgs{"testdata/variables/foo-string.variable.pkr.hcl", nil, []string{"testdata/variables/set-foo-too-wee.hcl"}},
&PackerConfig{
Basedir: filepath.Join("testdata", "variables"),
CorePackerVersionString: lockedVersion,
Basedir: filepath.Join("testdata", "variables"),
InputVariables: Variables{
"foo": &Variable{
Name: "foo",
@ -352,7 +365,8 @@ func TestParse_variables(t *testing.T) {
defaultParser,
parseTestArgs{"testdata/variables/empty.pkr.hcl", nil, []string{"testdata/variables/set-foo-too-wee.hcl"}},
&PackerConfig{
Basedir: filepath.Join("testdata", "variables"),
CorePackerVersionString: lockedVersion,
Basedir: filepath.Join("testdata", "variables"),
},
true, false,
[]packersdk.Build{},
@ -363,7 +377,8 @@ func TestParse_variables(t *testing.T) {
defaultParser,
parseTestArgs{"testdata/variables/provisioner_variable_decoding.pkr.hcl", nil, nil},
&PackerConfig{
Basedir: filepath.Join("testdata", "variables"),
CorePackerVersionString: lockedVersion,
Basedir: filepath.Join("testdata", "variables"),
InputVariables: Variables{
"max_retries": &Variable{
Name: "max_retries",
@ -384,8 +399,10 @@ func TestParse_variables(t *testing.T) {
},
Builds: Builds{
&BuildBlock{
Sources: []SourceRef{
{Type: "null", Name: "null-builder"},
Sources: []SourceUseBlock{
{
SourceRef: SourceRef{Type: "null", Name: "null-builder"},
},
},
ProvisionerBlocks: []*ProvisionerBlock{
{
@ -449,7 +466,8 @@ func TestParse_variables(t *testing.T) {
defaultParser,
parseTestArgs{"testdata/variables/validation/valid.pkr.hcl", nil, nil},
&PackerConfig{
Basedir: filepath.Join("testdata", "variables", "validation"),
CorePackerVersionString: lockedVersion,
Basedir: filepath.Join("testdata", "variables", "validation"),
InputVariables: Variables{
"image_id": &Variable{
Values: []VariableAssignment{
@ -474,7 +492,8 @@ func TestParse_variables(t *testing.T) {
defaultParser,
parseTestArgs{"testdata/variables/validation/invalid_default.pkr.hcl", nil, nil},
&PackerConfig{
Basedir: filepath.Join("testdata", "variables", "validation"),
CorePackerVersionString: lockedVersion,
Basedir: filepath.Join("testdata", "variables", "validation"),
InputVariables: Variables{
"image_id": &Variable{
Values: []VariableAssignment{{"default", cty.StringVal("potato"), nil}},

35
main.go
View File

@ -22,7 +22,6 @@ import (
"github.com/hashicorp/packer-plugin-sdk/tmp"
"github.com/hashicorp/packer/command"
"github.com/hashicorp/packer/packer"
"github.com/hashicorp/packer/packer/plugin"
"github.com/hashicorp/packer/version"
"github.com/mitchellh/cli"
"github.com/mitchellh/panicwrap"
@ -174,13 +173,13 @@ func wrappedMain() int {
fmt.Fprintf(os.Stderr, "Error preparing cache directory: \n\n%s\n", err)
return 1
}
log.Printf("Setting cache directory: %s", cacheDir)
log.Printf("[INFO] Setting cache directory: %s", cacheDir)
// Determine if we're in machine-readable mode by mucking around with
// the arguments...
args, machineReadable := extractMachineReadable(os.Args[1:])
defer plugin.CleanupClients()
defer packer.CleanupClients()
var ui packersdk.Ui
if machineReadable {
@ -225,12 +224,8 @@ func wrappedMain() int {
CommandMeta = &command.Meta{
CoreConfig: &packer.CoreConfig{
Components: packer.ComponentFinder{
Hook: config.StarHook,
BuilderStore: config.Builders,
ProvisionerStore: config.Provisioners,
PostProcessorStore: config.PostProcessors,
DatasourceStore: config.Datasources,
Hook: config.StarHook,
PluginConfig: config.Plugins,
},
Version: version.Version,
},
@ -302,21 +297,15 @@ func extractMachineReadable(args []string) ([]string, bool) {
func loadConfig() (*config, error) {
var config config
config.Plugins = plugin.Config{
PluginMinPort: 10000,
PluginMaxPort: 25000,
config.Plugins = &packer.PluginConfig{
PluginMinPort: 10000,
PluginMaxPort: 25000,
KnownPluginFolders: packer.PluginFolders("."),
}
if err := config.Plugins.Discover(); err != nil {
return nil, err
}
// Copy plugins to general list
plugins := config.Plugins.GetPlugins()
config.Builders = plugins.Builders
config.Provisioners = plugins.Provisioners
config.PostProcessors = plugins.PostProcessors
config.Datasources = plugins.DataSources
// Finally, try to use an internal plugin. Note that this will not override
// any previously-loaded plugins.
if err := config.discoverInternalComponents(); err != nil {
@ -324,23 +313,19 @@ func loadConfig() (*config, error) {
}
// start by loading from PACKER_CONFIG if available
log.Print("Checking 'PACKER_CONFIG' for a config file path")
configFilePath := os.Getenv("PACKER_CONFIG")
if configFilePath == "" {
var err error
log.Print("'PACKER_CONFIG' not set; checking the default config file path")
log.Print("[INFO] PACKER_CONFIG env var not set; checking the default config file path")
configFilePath, err = pathing.ConfigFile()
if err != nil {
log.Printf("Error detecting default config file path: %s", err)
}
}
if configFilePath == "" {
return &config, nil
}
log.Printf("Attempting to open config file: %s", configFilePath)
log.Printf("[INFO] PACKER_CONFIG env var set; attempting to open config file: %s", configFilePath)
f, err := os.Open(configFilePath)
if err != nil {
if !os.IsNotExist(err) {

View File

@ -21,6 +21,9 @@
{
"pattern": "^/docs/templates/hcl_templates/"
},
{
"pattern": "^/commands/init"
},
{
"pattern": "^/docs/datasources"
},

View File

@ -1,4 +1,4 @@
package plugin
package packer
import (
"bytes"
@ -11,7 +11,7 @@ import (
func TestClient(t *testing.T) {
process := helperProcess("mock")
c := NewClient(&ClientConfig{Cmd: process})
c := NewClient(&PluginClientConfig{Cmd: process})
defer c.Kill()
// Test that it parses the proper address
@ -42,7 +42,7 @@ func TestClient(t *testing.T) {
}
func TestClientStart_badVersion(t *testing.T) {
config := &ClientConfig{
config := &PluginClientConfig{
Cmd: helperProcess("bad-version"),
StartTimeout: 50 * time.Millisecond,
}
@ -57,7 +57,7 @@ func TestClientStart_badVersion(t *testing.T) {
}
func TestClient_Start_Timeout(t *testing.T) {
config := &ClientConfig{
config := &PluginClientConfig{
Cmd: helperProcess("start-timeout"),
StartTimeout: 50 * time.Millisecond,
}
@ -74,7 +74,7 @@ func TestClient_Start_Timeout(t *testing.T) {
func TestClient_Stderr(t *testing.T) {
stderr := new(bytes.Buffer)
process := helperProcess("stderr")
c := NewClient(&ClientConfig{
c := NewClient(&PluginClientConfig{
Cmd: process,
Stderr: stderr,
})
@ -123,7 +123,7 @@ func TestClient_Stdin(t *testing.T) {
os.Stdin = tf
process := helperProcess("stdin")
c := NewClient(&ClientConfig{Cmd: process})
c := NewClient(&PluginClientConfig{Cmd: process})
defer c.Kill()
_, err = c.Start()

View File

@ -1,4 +1,4 @@
package plugin
package packer
import (
"context"
@ -10,7 +10,7 @@ import (
type cmdBuilder struct {
builder packersdk.Builder
client *Client
client *PluginClient
}
func (b *cmdBuilder) ConfigSpec() hcldec.ObjectSpec {

View File

@ -1,4 +1,4 @@
package plugin
package packer
import (
"os/exec"
@ -6,7 +6,7 @@ import (
)
func TestBuilder_NoExist(t *testing.T) {
c := NewClient(&ClientConfig{Cmd: exec.Command("i-should-not-exist")})
c := NewClient(&PluginClientConfig{Cmd: exec.Command("i-should-not-exist")})
defer c.Kill()
_, err := c.Builder()
@ -16,7 +16,7 @@ func TestBuilder_NoExist(t *testing.T) {
}
func TestBuilder_Good(t *testing.T) {
c := NewClient(&ClientConfig{Cmd: helperProcess("builder")})
c := NewClient(&PluginClientConfig{Cmd: helperProcess("builder")})
defer c.Kill()
_, err := c.Builder()

View File

@ -1,4 +1,4 @@
package plugin
package packer
import (
"log"
@ -10,7 +10,7 @@ import (
type cmdDatasource struct {
d packersdk.Datasource
client *Client
client *PluginClient
}
func (d *cmdDatasource) ConfigSpec() hcldec.ObjectSpec {

View File

@ -1,4 +1,4 @@
package plugin
package packer
import (
"os/exec"
@ -6,7 +6,7 @@ import (
)
func TestDatasource_NoExist(t *testing.T) {
c := NewClient(&ClientConfig{Cmd: exec.Command("i-should-not-exist")})
c := NewClient(&PluginClientConfig{Cmd: exec.Command("i-should-not-exist")})
defer c.Kill()
_, err := c.Datasource()
@ -16,7 +16,7 @@ func TestDatasource_NoExist(t *testing.T) {
}
func TestDatasource_Good(t *testing.T) {
c := NewClient(&ClientConfig{Cmd: helperProcess("datasource")})
c := NewClient(&PluginClientConfig{Cmd: helperProcess("datasource")})
defer c.Kill()
_, err := c.Datasource()

View File

@ -1,4 +1,4 @@
package plugin
package packer
import (
"context"
@ -9,7 +9,7 @@ import (
type cmdHook struct {
hook packersdk.Hook
client *Client
client *PluginClient
}
func (c *cmdHook) Run(ctx context.Context, name string, ui packersdk.Ui, comm packersdk.Communicator, data interface{}) error {

View File

@ -1,4 +1,4 @@
package plugin
package packer
import (
"os/exec"
@ -6,7 +6,7 @@ import (
)
func TestHook_NoExist(t *testing.T) {
c := NewClient(&ClientConfig{Cmd: exec.Command("i-should-not-exist")})
c := NewClient(&PluginClientConfig{Cmd: exec.Command("i-should-not-exist")})
defer c.Kill()
_, err := c.Hook()
@ -16,7 +16,7 @@ func TestHook_NoExist(t *testing.T) {
}
func TestHook_Good(t *testing.T) {
c := NewClient(&ClientConfig{Cmd: helperProcess("hook")})
c := NewClient(&PluginClientConfig{Cmd: helperProcess("hook")})
defer c.Kill()
_, err := c.Hook()

View File

@ -1,4 +1,4 @@
package plugin
package packer
import (
"context"
@ -10,7 +10,7 @@ import (
type cmdPostProcessor struct {
p packersdk.PostProcessor
client *Client
client *PluginClient
}
func (b *cmdPostProcessor) ConfigSpec() hcldec.ObjectSpec {

View File

@ -1,4 +1,4 @@
package plugin
package packer
import (
"context"
@ -22,7 +22,7 @@ func (helperPostProcessor) PostProcess(context.Context, packersdk.Ui, packersdk.
}
func TestPostProcessor_NoExist(t *testing.T) {
c := NewClient(&ClientConfig{Cmd: exec.Command("i-should-not-exist")})
c := NewClient(&PluginClientConfig{Cmd: exec.Command("i-should-not-exist")})
defer c.Kill()
_, err := c.PostProcessor()
@ -32,7 +32,7 @@ func TestPostProcessor_NoExist(t *testing.T) {
}
func TestPostProcessor_Good(t *testing.T) {
c := NewClient(&ClientConfig{Cmd: helperProcess("post-processor")})
c := NewClient(&PluginClientConfig{Cmd: helperProcess("post-processor")})
defer c.Kill()
_, err := c.PostProcessor()

View File

@ -1,4 +1,4 @@
package plugin
package packer
import (
"context"
@ -10,7 +10,7 @@ import (
type cmdProvisioner struct {
p packersdk.Provisioner
client *Client
client *PluginClient
}
func (p *cmdProvisioner) ConfigSpec() hcldec.ObjectSpec {

View File

@ -1,4 +1,4 @@
package plugin
package packer
import (
"os/exec"
@ -6,7 +6,7 @@ import (
)
func TestProvisioner_NoExist(t *testing.T) {
c := NewClient(&ClientConfig{Cmd: exec.Command("i-should-not-exist")})
c := NewClient(&PluginClientConfig{Cmd: exec.Command("i-should-not-exist")})
defer c.Kill()
_, err := c.Provisioner()
@ -16,7 +16,7 @@ func TestProvisioner_NoExist(t *testing.T) {
}
func TestProvisioner_Good(t *testing.T) {
c := NewClient(&ClientConfig{Cmd: helperProcess("provisioner")})
c := NewClient(&PluginClientConfig{Cmd: helperProcess("provisioner")})
defer c.Kill()
_, err := c.Provisioner()

View File

@ -71,32 +71,47 @@ type BuilderStore interface {
Start(name string) (packersdk.Builder, error)
}
type BuilderSet interface {
BuilderStore
Set(name string, starter func() (packersdk.Builder, error))
}
type ProvisionerStore interface {
BasicStore
Start(name string) (packersdk.Provisioner, error)
}
type ProvisionerSet interface {
ProvisionerStore
Set(name string, starter func() (packersdk.Provisioner, error))
}
type PostProcessorStore interface {
BasicStore
Start(name string) (packersdk.PostProcessor, error)
}
type PostProcessorSet interface {
PostProcessorStore
Set(name string, starter func() (packersdk.PostProcessor, error))
}
type DatasourceStore interface {
BasicStore
Start(name string) (packersdk.Datasource, error)
}
type DatasourceSet interface {
DatasourceStore
Set(name string, starter func() (packersdk.Datasource, error))
}
// ComponentFinder is a struct that contains the various function
// pointers necessary to look up components of Packer such as builders,
// commands, etc.
type ComponentFinder struct {
Hook HookFunc
// For HCL2
BuilderStore BuilderStore
ProvisionerStore ProvisionerStore
PostProcessorStore PostProcessorStore
DatasourceStore DatasourceStore
Hook HookFunc
PluginConfig *PluginConfig
}
// NewCore creates a new Core.
@ -168,7 +183,7 @@ func (c *Core) BuildNames(only, except []string) []string {
func (c *Core) generateCoreBuildProvisioner(rawP *template.Provisioner, rawName string) (CoreBuildProvisioner, error) {
// Get the provisioner
cbp := CoreBuildProvisioner{}
provisioner, err := c.components.ProvisionerStore.Start(rawP.Type)
provisioner, err := c.components.PluginConfig.Provisioners.Start(rawP.Type)
if err != nil {
return cbp, fmt.Errorf(
"error initializing provisioner '%s': %s",
@ -287,7 +302,7 @@ func (c *Core) Build(n string) (packersdk.Build, error) {
// the Start command launches the builder plugin of the given type without
// calling Prepare() or passing any build-specific details.
builder, err := c.components.BuilderStore.Start(configBuilder.Type)
builder, err := c.components.PluginConfig.Builders.Start(configBuilder.Type)
if err != nil {
return nil, fmt.Errorf(
"error initializing builder '%s': %s",
@ -347,7 +362,7 @@ func (c *Core) Build(n string) (packersdk.Build, error) {
}
// Get the post-processor
postProcessor, err := c.components.PostProcessorStore.Start(rawP.Type)
postProcessor, err := c.components.PluginConfig.PostProcessors.Start(rawP.Type)
if err != nil {
return nil, fmt.Errorf(
"error initializing post-processor '%s': %s",

View File

@ -800,7 +800,7 @@ func TestCoreBuild_provRetry(t *testing.T) {
b := TestBuilder(t, config, "test")
pString := new(packersdk.MockProvisioner)
pInt := new(packersdk.MockProvisioner)
config.Components.ProvisionerStore = MapOfProvisioner{
config.Components.PluginConfig.Provisioners = MapOfProvisioner{
"test-string": func() (packersdk.Provisioner, error) { return pString, nil },
// backwards compatibility
"test-integer": func() (packersdk.Provisioner, error) { return pInt, nil },

View File

@ -13,6 +13,10 @@ func (mop MapOfProvisioner) Has(provisioner string) bool {
return res
}
func (mop MapOfProvisioner) Set(provisioner string, starter func() (packersdk.Provisioner, error)) {
mop[provisioner] = starter
}
func (mop MapOfProvisioner) Start(provisioner string) (packersdk.Provisioner, error) {
p, found := mop[provisioner]
if !found {
@ -36,6 +40,10 @@ func (mopp MapOfPostProcessor) Has(postProcessor string) bool {
return res
}
func (mopp MapOfPostProcessor) Set(postProcessor string, starter func() (packersdk.PostProcessor, error)) {
mopp[postProcessor] = starter
}
func (mopp MapOfPostProcessor) Start(postProcessor string) (packersdk.PostProcessor, error) {
p, found := mopp[postProcessor]
if !found {
@ -59,6 +67,10 @@ func (mob MapOfBuilder) Has(builder string) bool {
return res
}
func (mob MapOfBuilder) Set(builder string, starter func() (packersdk.Builder, error)) {
mob[builder] = starter
}
func (mob MapOfBuilder) Start(builder string) (packersdk.Builder, error) {
d, found := mob[builder]
if !found {
@ -82,6 +94,10 @@ func (mod MapOfDatasource) Has(dataSource string) bool {
return res
}
func (mod MapOfDatasource) Set(dataSource string, starter func() (packersdk.Datasource, error)) {
mod[dataSource] = starter
}
func (mod MapOfDatasource) Start(dataSource string) (packersdk.Datasource, error) {
d, found := mod[dataSource]
if !found {

View File

@ -0,0 +1,116 @@
package plugingetter
import (
"bytes"
"encoding/hex"
"fmt"
"hash"
"io"
"os"
"strings"
)
// A ChecksumError is returned when a checksum differs
type ChecksumError struct {
Hash hash.Hash
Actual []byte
Expected []byte
File string
}
func (cerr *ChecksumError) Error() string {
if cerr == nil {
return "<nil>"
}
return fmt.Sprintf(
"Checksums (%T) did not match.\nExpected: %s\nGot : %s\n",
cerr.Hash, // ex: *sha256.digest
hex.EncodeToString(cerr.Expected),
hex.EncodeToString(cerr.Actual),
)
}
type Checksum []byte
func (c Checksum) String() string { return hex.EncodeToString(c) }
type FileChecksum struct {
Filename string
Expected Checksum
Checksummer
}
type Checksummer struct {
// Something like md5 or sha256
Type string
// Hash function
hash.Hash
}
func (c *Checksummer) FileExt() string {
return "_" + strings.ToUpper(c.Type) + "SUM"
}
// GetCacheChecksumOfFile will extract the checksum from file `filePath + c.FileExt()`.
// It expects the checksum file to only contains the checksum and nothing else.
func (c *Checksummer) GetCacheChecksumOfFile(filePath string) ([]byte, error) {
checksumFile := filePath + c.FileExt()
f, err := os.Open(checksumFile)
if err != nil {
return nil, err
}
defer f.Close()
return c.ParseChecksum(f)
}
// ParseChecksum expects the checksum reader to only contain the checksum and
// nothing else.
func (c *Checksummer) ParseChecksum(f io.Reader) (Checksum, error) {
res := make([]byte, c.Hash.Size())
_, err := hex.NewDecoder(f).Read(res)
if err == io.EOF {
err = nil
}
return res, err
}
// ChecksumFile compares the expected checksum to the checksum of the file in
// filePath using the hash function.
func (c *Checksummer) ChecksumFile(expected []byte, filePath string) error {
f, err := os.Open(filePath)
if err != nil {
return fmt.Errorf("Checksum: failed to open file for checksum: %s", err)
}
defer f.Close()
err = c.Checksum(expected, f)
if cerr, ok := err.(*ChecksumError); ok {
cerr.File = filePath
}
return err
}
func (c *Checksummer) Sum(f io.Reader) ([]byte, error) {
c.Hash.Reset()
if _, err := io.Copy(c.Hash, f); err != nil {
return nil, fmt.Errorf("Failed to hash: %s", err)
}
return c.Hash.Sum(nil), nil
}
func (c *Checksummer) Checksum(expected []byte, f io.Reader) error {
actual, err := c.Sum(f)
if err != nil {
return err
}
if !bytes.Equal(actual, expected) {
return &ChecksumError{
Hash: c.Hash,
Actual: actual,
Expected: expected,
}
}
return nil
}

View File

@ -0,0 +1,2 @@
// Package plugingetter defines means to download and install plugins.
package plugingetter

View File

@ -0,0 +1,3 @@
// Package github defines a Github getter.
package github

View File

@ -0,0 +1,226 @@
package github
import (
"bufio"
"bytes"
"context"
"encoding/json"
"errors"
"fmt"
"io"
"io/ioutil"
"log"
"net/http"
"os"
"path/filepath"
"strings"
"github.com/google/go-github/v33/github"
plugingetter "github.com/hashicorp/packer/packer/plugin-getter"
"golang.org/x/oauth2"
)
const (
ghTokenAccessor = "PKR_GITHUB_API_TOKEN"
defaultUserAgent = "packer-plugin-getter"
)
type Getter struct {
Client *github.Client
UserAgent string
}
var _ plugingetter.Getter = &Getter{}
func tranformChecksumStream() func(in io.ReadCloser) (io.ReadCloser, error) {
return func(in io.ReadCloser) (io.ReadCloser, error) {
defer in.Close()
rd := bufio.NewReader(in)
buffer := bytes.NewBufferString("[")
json := json.NewEncoder(buffer)
for i := 0; ; i++ {
line, err := rd.ReadString('\n')
if err != nil {
if err != io.EOF {
return nil, fmt.Errorf(
"Error reading checksum file: %s", err)
}
break
}
parts := strings.Fields(line)
switch len(parts) {
case 2: // nominal case
checksumString, checksumFilename := parts[0], parts[1]
if i > 0 {
_, _ = buffer.WriteString(",")
}
if err := json.Encode(struct {
Checksum string `json:"checksum"`
Filename string `json:"filename"`
}{
Checksum: checksumString,
Filename: checksumFilename,
}); err != nil {
return nil, err
}
}
}
_, _ = buffer.WriteString("]")
return ioutil.NopCloser(buffer), nil
}
}
// transformVersionStream get a stream from github tags and transforms it into
// something Packer wants, namely a json list of Release.
func transformVersionStream(in io.ReadCloser) (io.ReadCloser, error) {
if in == nil {
return nil, fmt.Errorf("transformVersionStream got nil body")
}
defer in.Close()
dec := json.NewDecoder(in)
m := []struct {
Ref string `json:"ref"`
}{}
if err := dec.Decode(&m); err != nil {
return nil, err
}
out := []plugingetter.Release{}
for _, m := range m {
out = append(out, plugingetter.Release{
Version: strings.TrimPrefix(m.Ref, "refs/tags/"),
})
}
buf := &bytes.Buffer{}
if err := json.NewEncoder(buf).Encode(out); err != nil {
return nil, err
}
return ioutil.NopCloser(buf), nil
}
// HostSpecificTokenAuthTransport makes sure the http roundtripper only sets an
// auth token for requests aimed at a specific host.
//
// This helps for example to get release files from Github as Github will
// redirect to s3 which will error if we give it a Github auth token.
type HostSpecificTokenAuthTransport struct {
// Host to TokenSource map
TokenSources map[string]oauth2.TokenSource
// actual RoundTripper, nil means we use the default one from http.
Base http.RoundTripper
}
// RoundTrip authorizes and authenticates the request with an
// access token from Transport's Source.
func (t *HostSpecificTokenAuthTransport) RoundTrip(req *http.Request) (*http.Response, error) {
source, found := t.TokenSources[req.Host]
if found {
reqBodyClosed := false
if req.Body != nil {
defer func() {
if !reqBodyClosed {
req.Body.Close()
}
}()
}
if source == nil {
return nil, errors.New("transport's Source is nil")
}
token, err := source.Token()
if err != nil {
return nil, err
}
token.SetAuthHeader(req)
// req.Body is assumed to be closed by the base RoundTripper.
reqBodyClosed = true
}
return t.base().RoundTrip(req)
}
func (t *HostSpecificTokenAuthTransport) base() http.RoundTripper {
if t.Base != nil {
return t.Base
}
return http.DefaultTransport
}
func (g *Getter) Get(what string, opts plugingetter.GetOptions) (io.ReadCloser, error) {
ctx := context.TODO()
if g.Client == nil {
var tc *http.Client
if tk := os.Getenv(ghTokenAccessor); tk != "" {
log.Printf("[DEBUG] github-getter: using %s", ghTokenAccessor)
ts := oauth2.StaticTokenSource(
&oauth2.Token{AccessToken: tk},
)
tc = &http.Client{
Transport: &HostSpecificTokenAuthTransport{
TokenSources: map[string]oauth2.TokenSource{
"api.github.com": ts,
},
},
}
}
g.Client = github.NewClient(tc)
g.Client.UserAgent = defaultUserAgent
if g.UserAgent != "" {
g.Client.UserAgent = g.UserAgent
}
}
var req *http.Request
var err error
transform := func(in io.ReadCloser) (io.ReadCloser, error) {
return in, nil
}
switch what {
case "releases":
u := filepath.ToSlash("/repos/" + opts.PluginRequirement.Identifier.RealRelativePath() + "/git/matching-refs/tags")
req, err = g.Client.NewRequest("GET", u, nil)
transform = transformVersionStream
case "sha256":
// something like https://github.com/sylviamoss/packer-plugin-comment/releases/download/v0.2.11/packer-plugin-comment_v0.2.11_x5_SHA256SUMS
u := filepath.ToSlash("https://github.com/" + opts.PluginRequirement.Identifier.RealRelativePath() + "/releases/download/" + opts.Version() + "/" + opts.PluginRequirement.FilenamePrefix() + opts.Version() + "_SHA256SUMS")
req, err = g.Client.NewRequest(
"GET",
u,
nil,
)
transform = tranformChecksumStream()
case "zip":
u := filepath.ToSlash("https://github.com/" + opts.PluginRequirement.Identifier.RealRelativePath() + "/releases/download/" + opts.Version() + "/" + opts.ExpectedZipFilename())
req, err = g.Client.NewRequest(
"GET",
u,
nil,
)
default:
return nil, fmt.Errorf("%q not implemented", what)
}
if err != nil {
return nil, err
}
log.Printf("[DEBUG] github-getter: getting %q", req.URL)
resp, err := g.Client.BareDo(ctx, req)
if err != nil {
// here BareDo will return an err if the request failed or if the
// status is not considered a valid http status.
if resp != nil {
resp.Body.Close()
}
return nil, err
}
return transform(resp.Body)
}

View File

@ -0,0 +1,622 @@
package plugingetter
import (
"archive/zip"
"encoding/hex"
"encoding/json"
"fmt"
"io"
"io/ioutil"
"log"
"os"
"path/filepath"
"sort"
"strconv"
"strings"
"github.com/hashicorp/go-version"
"github.com/hashicorp/packer-plugin-sdk/tmp"
"github.com/hashicorp/packer/hcl2template/addrs"
)
type Requirements []*Requirement
// Requirement describes a required plugin and how it is installed. Usually a list
// of required plugins is generated from a config file. From it we check what
// is actually installed and what needs to happen to get in the desired state.
type Requirement struct {
// Plugin accessor as defined in the config file.
// For Packer, using :
// required_plugins { amazon = ">= v0" }
// Will set Accessor to `amazon`.
Accessor string
// Something like github.com/hashicorp/packer-plugin-amazon, from the
// previous example.
Identifier *addrs.Plugin
// VersionConstraints as defined by user. Empty ( to be avoided ) means
// highest found version.
VersionConstraints version.Constraints
}
type BinaryInstallationOptions struct {
//
APIVersionMajor, APIVersionMinor string
// OS and ARCH usually should be runtime.GOOS and runtime.ARCH, they allow
// to pick the correct binary.
OS, ARCH string
// Ext is ".exe" on windows
Ext string
Checksummers []Checksummer
}
type ListInstallationsOptions struct {
// FromFolders where plugins could be installed. Paths should be absolute for
// safety but can also be relative.
FromFolders []string
BinaryInstallationOptions
}
func (pr Requirement) FilenamePrefix() string {
return "packer-plugin-" + pr.Identifier.Type + "_"
}
func (opts BinaryInstallationOptions) filenameSuffix() string {
return "_" + opts.OS + "_" + opts.ARCH + opts.Ext
}
// ListInstallations lists unique installed versions of plugin Requirement pr
// with opts as a filter.
//
// Installations are sorted by version and one binary per version is returned.
// Last binary detected takes precedence: in the order 'FromFolders' option.
//
// At least one opts.Checksumers must be given for a binary to be even
// considered.
func (pr Requirement) ListInstallations(opts ListInstallationsOptions) (InstallList, error) {
res := InstallList{}
FilenamePrefix := pr.FilenamePrefix()
filenameSuffix := opts.filenameSuffix()
log.Printf("[TRACE] listing potential installations for %q that match %q. %#v", pr.Identifier.ForDisplay(), pr.VersionConstraints, opts)
for _, knownFolder := range opts.FromFolders {
glob := filepath.Join(knownFolder, pr.Identifier.Hostname, pr.Identifier.Namespace, pr.Identifier.Type, FilenamePrefix+"*"+filenameSuffix)
matches, err := filepath.Glob(glob)
if err != nil {
return nil, fmt.Errorf("ListInstallations: %q failed to list binaries in folder: %v", pr.Identifier.String(), err)
}
for _, path := range matches {
fname := filepath.Base(path)
if fname == "." {
continue
}
// base name could look like packer-plugin-amazon_v1.2.3_x5.1_darwin_amd64.exe
versionsStr := strings.TrimPrefix(fname, FilenamePrefix)
versionsStr = strings.TrimSuffix(versionsStr, filenameSuffix)
// versionsStr now looks like v1.2.3_x5.1
parts := strings.SplitN(versionsStr, "_", 2)
pluginVersionStr, protocolVerionStr := parts[0], parts[1]
pv, err := version.NewVersion(pluginVersionStr)
if err != nil {
// could not be parsed, ignoring the file
log.Printf("found %q with an incorrect %q version, ignoring it. %v", path, pluginVersionStr, err)
continue
}
// no constraint means always pass, this will happen for implicit
// plugin requirements
if !pr.VersionConstraints.Check(pv) {
log.Printf("[TRACE] version %q of file %q does not match constraint %q", pluginVersionStr, path, pr.VersionConstraints.String())
continue
}
if err := opts.CheckProtocolVersion(protocolVerionStr); err != nil {
log.Printf("[NOTICE] binary %s requires protocol version %s that is incompatible "+
"with this version of Packer. %s", path, protocolVerionStr, err)
continue
}
checksumOk := false
for _, checksummer := range opts.Checksummers {
cs, err := checksummer.GetCacheChecksumOfFile(path)
if err != nil {
log.Printf("[TRACE] GetChecksumOfFile(%q) failed: %v", path, err)
continue
}
if err := checksummer.ChecksumFile(cs, path); err != nil {
log.Printf("[TRACE] ChecksumFile(%q) failed: %v", path, err)
continue
}
checksumOk = true
break
}
if !checksumOk {
log.Printf("[TRACE] No checksum found for %q ignoring possibly unsafe binary", path)
continue
}
res.InsertSortedUniq(&Installation{
BinaryPath: path,
Version: pluginVersionStr,
})
}
}
return res, nil
}
// InstallList is a list of installed plugins (binaries) with their versions,
// ListInstallations should be used to get an InstallList.
//
// ListInstallations sorts binaries by version and one binary per version is
// returned.
type InstallList []*Installation
func (l InstallList) String() string {
v := &strings.Builder{}
v.Write([]byte("["))
for i, inst := range l {
if i > 0 {
v.Write([]byte(","))
}
fmt.Fprintf(v, "%v", *inst)
}
v.Write([]byte("]"))
return v.String()
}
// InsertSortedUniq inserts the installation in the right spot in the list by
// comparing the version lexicographically.
// A Duplicate version will replace any already present version.
func (l *InstallList) InsertSortedUniq(install *Installation) {
pos := sort.Search(len(*l), func(i int) bool { return (*l)[i].Version >= install.Version })
if len(*l) > pos && (*l)[pos].Version == install.Version {
// already detected, let's ignore any new foundings, this way any plugin
// close to cwd or the packer exec takes precedence; this will be better
// for plugin development/tests.
return
}
(*l) = append((*l), nil)
copy((*l)[pos+1:], (*l)[pos:])
(*l)[pos] = install
}
// Installation describes a plugin installation
type Installation struct {
// path to where binary is installed, if installed.
// Ex: /usr/azr/.packer.d/plugins/github.com/hashicorp/packer-plugin-amazon/packer-plugin-amazon_v1.2.3_darwin_amd64
BinaryPath string
// Version of this plugin, if installed and versionned. Ex:
// * v1.2.3 for packer-plugin-amazon_v1.2.3_darwin_x5
// * empty for packer-plugin-amazon
Version string
}
// InstallOptions describes the possible options for installing the plugin that
// fits the plugin Requirement.
type InstallOptions struct {
// Different means to get releases, sha256 and binary files.
Getters []Getter
// Any downloaded binary and checksum file will be put in the last possible
// folder of this list.
InFolders []string
BinaryInstallationOptions
}
type GetOptions struct {
PluginRequirement *Requirement
BinaryInstallationOptions
version *version.Version
expectedZipFilename string
}
// ExpectedZipFilename is the filename of the zip we expect to find, the
// value is known only after parsing the checksum file file.
func (gp *GetOptions) ExpectedZipFilename() string {
return gp.expectedZipFilename
}
func (binOpts *BinaryInstallationOptions) CheckProtocolVersion(remoteProt string) error {
remoteProt = strings.TrimPrefix(remoteProt, "x")
parts := strings.Split(remoteProt, ".")
if len(parts) < 2 {
return fmt.Errorf("Invalid remote protocol: %q, expected something like '%s.%s'", remoteProt, binOpts.APIVersionMajor, binOpts.APIVersionMinor)
}
vMajor, vMinor := parts[0], parts[1]
if vMajor != binOpts.APIVersionMajor {
return fmt.Errorf("Unsupported remote protocol MAJOR version %q. The current MAJOR protocol version is %q."+
" This version of Packer can only communicate with plugins using that version.", vMajor, binOpts.APIVersionMajor)
}
if vMinor == binOpts.APIVersionMinor {
return nil
}
vMinori, err := strconv.Atoi(vMinor)
if err != nil {
return err
}
APIVersoinMinori, err := strconv.Atoi(binOpts.APIVersionMinor)
if err != nil {
return err
}
if vMinori > APIVersoinMinori {
return fmt.Errorf("Unsupported remote protocol MINOR version %q. The supported MINOR protocol versions are version %q and bellow."+
"Please upgrade Packer or use an older version of the plugin if possible.", vMinor, binOpts.APIVersionMinor)
}
return nil
}
func (gp *GetOptions) Version() string {
return "v" + gp.version.String()
}
// A Getter helps get the appropriate files to download a binary.
type Getter interface {
// Get:
// * 'releases'
// * 'sha256'
// * 'binary'
Get(what string, opts GetOptions) (io.ReadCloser, error)
}
type Release struct {
Version string `json:"version"`
}
func ParseReleases(f io.ReadCloser) ([]Release, error) {
var releases []Release
defer f.Close()
return releases, json.NewDecoder(f).Decode(&releases)
}
type ChecksumFileEntry struct {
Filename string `json:"filename"`
Checksum string `json:"checksum"`
ext, binVersion, os, arch string
protVersion string
}
func (e ChecksumFileEntry) Ext() string { return e.ext }
func (e ChecksumFileEntry) BinVersion() string { return e.binVersion }
func (e ChecksumFileEntry) ProtVersion() string { return e.protVersion }
func (e ChecksumFileEntry) Os() string { return e.os }
func (e ChecksumFileEntry) Arch() string { return e.arch }
// a file inside will look like so:
// packer-plugin-comment_v0.2.12_x5.0_freebsd_amd64.zip
//
func (e *ChecksumFileEntry) init(req *Requirement) (err error) {
filename := e.Filename
res := strings.TrimLeft(filename, req.FilenamePrefix())
// res now looks like v0.2.12_x5.0_freebsd_amd64.zip
e.ext = filepath.Ext(res)
res = strings.TrimRight(res, e.ext)
// res now looks like v0.2.12_x5.0_freebsd_amd64
parts := strings.Split(res, "_")
// ["v0.2.12", "x5.0", "freebsd", "amd64"]
if len(parts) < 4 {
return fmt.Errorf("malformed filename expected %s{version}_x{protocol-version}_{os}_{arch}", req.FilenamePrefix())
}
e.binVersion, e.protVersion, e.os, e.arch = parts[0], parts[1], parts[2], parts[3]
return err
}
func (e *ChecksumFileEntry) validate(expectedVersion string, installOpts BinaryInstallationOptions) error {
if e.binVersion != expectedVersion {
return fmt.Errorf("wrong version, expected %s ", expectedVersion)
}
if e.os != installOpts.OS || e.arch != installOpts.ARCH {
return fmt.Errorf("wrong system, expected %s_%s ", installOpts.OS, installOpts.ARCH)
}
return installOpts.CheckProtocolVersion(e.protVersion)
}
func ParseChecksumFileEntries(f io.Reader) ([]ChecksumFileEntry, error) {
var entries []ChecksumFileEntry
return entries, json.NewDecoder(f).Decode(&entries)
}
func (pr *Requirement) InstallLatest(opts InstallOptions) (*Installation, error) {
getters := opts.Getters
fail := fmt.Errorf("could not find a local nor a remote checksum for plugin %q %q", pr.Identifier, pr.VersionConstraints)
log.Printf("[TRACE] getting available versions for the the %s plugin", pr.Identifier.ForDisplay())
versions := version.Collection{}
for _, getter := range getters {
releasesFile, err := getter.Get("releases", GetOptions{
PluginRequirement: pr,
BinaryInstallationOptions: opts.BinaryInstallationOptions,
})
if err != nil {
err := fmt.Errorf("%q getter could not get release: %w", getter, err)
log.Printf("[TRACE] %s", err.Error())
continue
}
releases, err := ParseReleases(releasesFile)
if err != nil {
err := fmt.Errorf("could not parse release: %w", err)
log.Printf("[TRACE] %s", err.Error())
continue
}
if len(releases) == 0 {
err := fmt.Errorf("no release found")
log.Printf("[TRACE] %s", err.Error())
continue
}
for _, release := range releases {
v, err := version.NewVersion(release.Version)
if err != nil {
err := fmt.Errorf("Could not parse release version %s. %w", release.Version, err)
log.Printf("[TRACE] %s, ignoring it", err.Error())
continue
}
if pr.VersionConstraints.Check(v) {
versions = append(versions, v)
}
}
if len(versions) == 0 {
err := fmt.Errorf("no matching version found in releases. In %v", releases)
log.Printf("[TRACE] %s", err.Error())
continue
}
break
}
// Here we want to try every relese in order, starting from the highest one
// that matches the requirements.
// The system and protocol version need to match too.
sort.Sort(sort.Reverse(versions))
log.Printf("[DEBUG] will try to install: %s", versions)
if len(versions) == 0 {
err := fmt.Errorf("no release version found for the %s plugin matching the constraint(s): %q", pr.Identifier.ForDisplay(), pr.VersionConstraints.String())
return nil, err
}
for _, version := range versions {
//TODO(azr): split in its own InstallVersion(version, opts) function
outputFolder := filepath.Join(
// Pick last folder as it's the one with the highest priority
opts.InFolders[len(opts.InFolders)-1],
// add expected full path
filepath.Join(pr.Identifier.Parts()...),
)
log.Printf("[TRACE] fetching checksums file for the %q version of the %s plugin in %q...", version, pr.Identifier.ForDisplay(), outputFolder)
var checksum *FileChecksum
for _, getter := range getters {
if checksum != nil {
break
}
for _, checksummer := range opts.Checksummers {
if checksum != nil {
break
}
checksumFile, err := getter.Get(checksummer.Type, GetOptions{
PluginRequirement: pr,
BinaryInstallationOptions: opts.BinaryInstallationOptions,
version: version,
})
if err != nil {
err := fmt.Errorf("could not get %s checksum file for %s version %s. Is the file present on the release and correctly named ? %s", checksummer.Type, pr.Identifier.ForDisplay(), version, err)
log.Printf("[TRACE] %s", err.Error())
return nil, err
}
entries, err := ParseChecksumFileEntries(checksumFile)
_ = checksumFile.Close()
if err != nil {
log.Printf("[TRACE] could not parse %s checksumfile: %v. Make sure the checksum file contains a checksum and a binary filename per line.", checksummer.Type, err)
continue
}
for _, entry := range entries {
if err := entry.init(pr); err != nil {
log.Printf("[TRACE] could not parse checksum filename %s. Is it correctly formatted ? %s", entry.Filename, err)
continue
}
if err := entry.validate("v"+version.String(), opts.BinaryInstallationOptions); err != nil {
log.Printf("[TRACE] Ignoring remote binary %s, %s", entry.Filename, err)
continue
}
log.Printf("[TRACE] About to get: %s", entry.Filename)
cs, err := checksummer.ParseChecksum(strings.NewReader(entry.Checksum))
if err != nil {
log.Printf("[TRACE] could not parse %s checksum: %v. Make sure the checksum file contains the checksum and only the checksum.", checksummer.Type, err)
continue
}
checksum = &FileChecksum{
Filename: entry.Filename,
Expected: cs,
Checksummer: checksummer,
}
expectedZipFilename := checksum.Filename
expectedBinaryFilename := strings.TrimSuffix(expectedZipFilename, filepath.Ext(expectedZipFilename)) + opts.BinaryInstallationOptions.Ext
for _, outputFolder := range opts.InFolders {
potentialOutputFilename := filepath.Join(
outputFolder,
filepath.Join(pr.Identifier.Parts()...),
expectedBinaryFilename,
)
for _, potentialChecksumer := range opts.Checksummers {
// First check if a local checksum file is already here in the expected
// download folder. Here we want to download a binary so we only check
// for an existing checksum file from the folder we want to download
// into.
cs, err := potentialChecksumer.GetCacheChecksumOfFile(potentialOutputFilename)
if err == nil && len(cs) > 0 {
localChecksum := &FileChecksum{
Expected: cs,
Checksummer: potentialChecksumer,
}
log.Printf("[TRACE] found a pre-exising %q checksum file", potentialChecksumer.Type)
// if outputFile is there and matches the checksum: do nothing more.
if err := localChecksum.ChecksumFile(localChecksum.Expected, potentialOutputFilename); err == nil {
log.Printf("[INFO] %s v%s plugin is already correctly installed in %q", pr.Identifier.ForDisplay(), version, potentialOutputFilename)
return nil, nil
}
}
}
}
// The last folder from the installation list is where we will install.
outputFileName := filepath.Join(outputFolder, expectedBinaryFilename)
// create directories if need be
if err := os.MkdirAll(outputFolder, 0755); err != nil {
err := fmt.Errorf("could not create plugin folder %q: %w", outputFolder, err)
log.Printf("[TRACE] %s", err.Error())
return nil, err
}
for _, getter := range getters {
// create temporary file that will receive a temporary binary.zip
tmpFile, err := tmp.File("packer-plugin-*.zip")
if err != nil {
return nil, fmt.Errorf("could not create temporary file to dowload plugin: %w", err)
}
defer tmpFile.Close()
// start fetching binary
remoteZipFile, err := getter.Get("zip", GetOptions{
PluginRequirement: pr,
BinaryInstallationOptions: opts.BinaryInstallationOptions,
version: version,
expectedZipFilename: expectedZipFilename,
})
if err != nil {
err := fmt.Errorf("could not get binary for %s version %s. Is the file present on the release and correctly named ? %s", pr.Identifier.ForDisplay(), version, err)
log.Printf("[TRACE] %v", err)
continue
}
// write binary to tmp file
_, err = io.Copy(tmpFile, remoteZipFile)
_ = remoteZipFile.Close()
if err != nil {
err := fmt.Errorf("Error getting plugin: %w", err)
log.Printf("[TRACE] %v, trying another getter", err)
continue
}
if _, err := tmpFile.Seek(0, 0); err != nil {
err := fmt.Errorf("Error seeking begining of temporary file for checksumming: %w", err)
log.Printf("[TRACE] %v, continuing", err)
continue
}
// verify that the checksum for the zip is what we expect.
if err := checksum.Checksummer.Checksum(checksum.Expected, tmpFile); err != nil {
err := fmt.Errorf("%w. Is the checksum file correct ? Is the binary file correct ?", err)
log.Printf("%s, truncating the zipfile", err)
if err := tmpFile.Truncate(0); err != nil {
log.Printf("[TRACE] %v", err)
}
continue
}
tmpFileStat, err := tmpFile.Stat()
if err != nil {
err := fmt.Errorf("failed to stat: %v", err)
return nil, err
}
zr, err := zip.NewReader(tmpFile, tmpFileStat.Size())
if err != nil {
err := fmt.Errorf("zip : %v", err)
return nil, err
}
var copyFrom io.ReadCloser
for _, f := range zr.File {
if f.Name != expectedBinaryFilename {
continue
}
copyFrom, err = f.Open()
if err != nil {
return nil, err
}
break
}
if copyFrom == nil {
err := fmt.Errorf("could not find a %s file in zipfile", checksum.Filename)
return nil, err
}
outputFile, err := os.OpenFile(outputFileName, os.O_RDWR|os.O_CREATE|os.O_TRUNC, 0755)
if err != nil {
err := fmt.Errorf("Failed to create %s: %v", outputFileName, err)
return nil, err
}
defer outputFile.Close()
if _, err := io.Copy(outputFile, copyFrom); err != nil {
err := fmt.Errorf("Extract file: %v", err)
return nil, err
}
if _, err := outputFile.Seek(0, 0); err != nil {
err := fmt.Errorf("Error seeking begining of binary file for checksumming: %w", err)
log.Printf("[WARNING] %v, ignoring", err)
}
cs, err := checksum.Checksummer.Sum(outputFile)
if err != nil {
err := fmt.Errorf("failed to checksum binary file: %s", err)
log.Printf("[WARNING] %v, ignoring", err)
}
if err := ioutil.WriteFile(outputFileName+checksum.Checksummer.FileExt(), []byte(hex.EncodeToString(cs)), 0555); err != nil {
err := fmt.Errorf("failed to write local binary checksum file: %s", err)
log.Printf("[WARNING] %v, ignoring", err)
}
// Success !!
return &Installation{
BinaryPath: strings.ReplaceAll(outputFileName, "\\", "/"),
Version: "v" + version.String(),
}, nil
}
}
}
}
}
return nil, fail
}

View File

@ -0,0 +1,641 @@
package plugingetter
import (
"archive/zip"
"bytes"
"crypto/sha256"
"encoding/json"
"fmt"
"io"
"io/ioutil"
"log"
"os"
"path/filepath"
"strings"
"testing"
"github.com/google/go-cmp/cmp"
"github.com/hashicorp/go-version"
"github.com/hashicorp/packer/hcl2template/addrs"
)
var (
pluginFolderOne = filepath.Join("testdata", "plugins")
pluginFolderTwo = filepath.Join("testdata", "plugins_2")
pluginFolderWrongChecksums = filepath.Join("testdata", "wrong_checksums")
)
func TestPlugin_ListInstallations(t *testing.T) {
type fields struct {
Identifier string
VersionConstraints version.Constraints
}
tests := []struct {
name string
fields fields
opts ListInstallationsOptions
wantErr bool
want InstallList
}{
{
"darwin_amazon_prot_5.0",
fields{
Identifier: "amazon",
},
ListInstallationsOptions{
[]string{
pluginFolderOne,
pluginFolderTwo,
},
BinaryInstallationOptions{
APIVersionMajor: "5", APIVersionMinor: "0",
OS: "darwin", ARCH: "amd64",
Checksummers: []Checksummer{
{
Type: "sha256",
Hash: sha256.New(),
},
},
},
},
false,
[]*Installation{
{
Version: "v1.2.3",
BinaryPath: filepath.Join(pluginFolderOne, "github.com", "hashicorp", "amazon", "packer-plugin-amazon_v1.2.3_x5.0_darwin_amd64"),
},
{
Version: "v1.2.4",
BinaryPath: filepath.Join(pluginFolderOne, "github.com", "hashicorp", "amazon", "packer-plugin-amazon_v1.2.4_x5.0_darwin_amd64"),
},
{
Version: "v1.2.5",
BinaryPath: filepath.Join(pluginFolderOne, "github.com", "hashicorp", "amazon", "packer-plugin-amazon_v1.2.5_x5.0_darwin_amd64"),
},
},
},
{
"darwin_amazon_prot_5.1",
fields{
Identifier: "amazon",
},
ListInstallationsOptions{
[]string{
pluginFolderOne,
pluginFolderTwo,
},
BinaryInstallationOptions{
APIVersionMajor: "5", APIVersionMinor: "1",
OS: "darwin", ARCH: "amd64",
Checksummers: []Checksummer{
{
Type: "sha256",
Hash: sha256.New(),
},
},
},
},
false,
[]*Installation{
{
Version: "v1.2.3",
BinaryPath: filepath.Join(pluginFolderOne, "github.com", "hashicorp", "amazon", "packer-plugin-amazon_v1.2.3_x5.0_darwin_amd64"),
},
{
Version: "v1.2.4",
BinaryPath: filepath.Join(pluginFolderOne, "github.com", "hashicorp", "amazon", "packer-plugin-amazon_v1.2.4_x5.0_darwin_amd64"),
},
{
Version: "v1.2.5",
BinaryPath: filepath.Join(pluginFolderOne, "github.com", "hashicorp", "amazon", "packer-plugin-amazon_v1.2.5_x5.0_darwin_amd64"),
},
{
Version: "v1.2.6",
BinaryPath: filepath.Join(pluginFolderTwo, "github.com", "hashicorp", "amazon", "packer-plugin-amazon_v1.2.6_x5.1_darwin_amd64"),
},
},
},
{
"windows_amazon",
fields{
Identifier: "amazon",
},
ListInstallationsOptions{
[]string{
pluginFolderOne,
pluginFolderTwo,
},
BinaryInstallationOptions{
APIVersionMajor: "5", APIVersionMinor: "0",
OS: "windows", ARCH: "amd64",
Ext: ".exe",
Checksummers: []Checksummer{
{
Type: "sha256",
Hash: sha256.New(),
},
},
},
},
false,
[]*Installation{
{
Version: "v1.2.3",
BinaryPath: filepath.Join(pluginFolderOne, "github.com", "hashicorp", "amazon", "packer-plugin-amazon_v1.2.3_x5.0_windows_amd64.exe"),
},
{
Version: "v1.2.4",
BinaryPath: filepath.Join(pluginFolderOne, "github.com", "hashicorp", "amazon", "packer-plugin-amazon_v1.2.4_x5.0_windows_amd64.exe"),
},
{
Version: "v1.2.5",
BinaryPath: filepath.Join(pluginFolderOne, "github.com", "hashicorp", "amazon", "packer-plugin-amazon_v1.2.5_x5.0_windows_amd64.exe"),
},
},
},
{
"windows_google_multifolder",
fields{
Identifier: "hashicorp/google",
},
ListInstallationsOptions{
[]string{
pluginFolderOne,
pluginFolderTwo,
},
BinaryInstallationOptions{
APIVersionMajor: "5", APIVersionMinor: "0",
OS: "windows", ARCH: "amd64",
Ext: ".exe",
Checksummers: []Checksummer{
{
Type: "sha256",
Hash: sha256.New(),
},
},
},
},
false,
[]*Installation{
{
Version: "v4.5.6",
BinaryPath: filepath.Join(pluginFolderOne, "github.com", "hashicorp", "google", "packer-plugin-google_v4.5.6_x5.0_windows_amd64.exe"),
},
{
Version: "v4.5.7",
BinaryPath: filepath.Join(pluginFolderOne, "github.com", "hashicorp", "google", "packer-plugin-google_v4.5.7_x5.0_windows_amd64.exe"),
},
{
Version: "v4.5.8",
BinaryPath: filepath.Join(pluginFolderOne, "github.com", "hashicorp", "google", "packer-plugin-google_v4.5.8_x5.0_windows_amd64.exe"),
},
{
Version: "v4.5.9",
BinaryPath: filepath.Join(pluginFolderTwo, "github.com", "hashicorp", "google", "packer-plugin-google_v4.5.9_x5.0_windows_amd64.exe"),
},
},
},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
identifier, diags := addrs.ParsePluginSourceString(tt.fields.Identifier)
if diags.HasErrors() {
t.Fatalf("%v", diags)
}
p := Requirement{
Identifier: identifier,
VersionConstraints: tt.fields.VersionConstraints,
}
got, err := p.ListInstallations(tt.opts)
if (err != nil) != tt.wantErr {
t.Errorf("Plugin.ListInstallations() error = %v, wantErr %v", err, tt.wantErr)
return
}
if diff := cmp.Diff(tt.want, got); diff != "" {
t.Errorf("Plugin.ListInstallations() unexpected output: %s", diff)
}
})
}
}
func TestRequirement_InstallLatest(t *testing.T) {
type fields struct {
Identifier string
VersionConstraints string
}
type args struct {
opts InstallOptions
}
tests := []struct {
name string
fields fields
args args
want *Installation
wantErr bool
}{
{"already-installed-same-api-version",
fields{"amazon", "v1.2.3"},
args{InstallOptions{
[]Getter{
&mockPluginGetter{
Releases: []Release{
{Version: "v1.2.3"},
},
ChecksumFileEntries: map[string][]ChecksumFileEntry{
"1.2.3": {{
// here the checksum file tells us what zipfiles
// to expect. maybe we could cache the zip file
// ? but then the plugin is present on the drive
// twice.
Filename: "packer-plugin-amazon_v1.2.3_x5.0_darwin_amd64.zip",
Checksum: "1337c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855",
}},
},
},
},
[]string{
pluginFolderWrongChecksums,
pluginFolderOne,
pluginFolderTwo,
},
BinaryInstallationOptions{
APIVersionMajor: "5", APIVersionMinor: "0",
OS: "darwin", ARCH: "amd64",
Checksummers: []Checksummer{
{
Type: "sha256",
Hash: sha256.New(),
},
},
},
}},
nil, false},
{"already-installed-compatible-api-minor-version",
// here 'packer' uses the procol version 5.1 which is compatible
// with the 5.0 one of an already installed plugin.
fields{"amazon", "v1.2.3"},
args{InstallOptions{
[]Getter{
&mockPluginGetter{
Releases: []Release{
{Version: "v1.2.3"},
},
ChecksumFileEntries: map[string][]ChecksumFileEntry{
"1.2.3": {{
Filename: "packer-plugin-amazon_v1.2.3_x5.0_darwin_amd64.zip",
Checksum: "1337c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855",
}},
},
},
},
[]string{
pluginFolderWrongChecksums,
pluginFolderOne,
pluginFolderTwo,
},
BinaryInstallationOptions{
APIVersionMajor: "5", APIVersionMinor: "1",
OS: "darwin", ARCH: "amd64",
Checksummers: []Checksummer{
{
Type: "sha256",
Hash: sha256.New(),
},
},
},
}},
nil, false},
{"ignore-incompatible-higher-protocol-version",
// here 'packer' needs a binary with protocol version 5.0, and a
// working plugin is already installed; but a plugin with version
// 6.0 is available locally and remotely. It simply needs to be
// ignored.
fields{"amazon", ">= v1"},
args{InstallOptions{
[]Getter{
&mockPluginGetter{
Releases: []Release{
{Version: "v1.2.3"},
{Version: "v1.2.4"},
{Version: "v1.2.5"},
{Version: "v2.0.0"},
},
ChecksumFileEntries: map[string][]ChecksumFileEntry{
"2.0.0": {{
Filename: "packer-plugin-amazon_v2.0.0_x6.0_darwin_amd64.zip",
Checksum: "1337c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855",
}},
"1.2.5": {{
Filename: "packer-plugin-amazon_v1.2.5_x5.0_darwin_amd64.zip",
Checksum: "1337c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855",
}},
},
},
},
[]string{
pluginFolderWrongChecksums,
pluginFolderOne,
pluginFolderTwo,
},
BinaryInstallationOptions{
APIVersionMajor: "5", APIVersionMinor: "0",
OS: "darwin", ARCH: "amd64",
Checksummers: []Checksummer{
{
Type: "sha256",
Hash: sha256.New(),
},
},
},
}},
nil, false},
{"upgrade-with-diff-protocol-version",
// here we have something locally and test that a newer version will
// be installed, the newer version has a lower minor protocol
// version than the one we support.
fields{"amazon", ">= v2"},
args{InstallOptions{
[]Getter{
&mockPluginGetter{
Releases: []Release{
{Version: "v1.2.3"},
{Version: "v1.2.4"},
{Version: "v1.2.5"},
{Version: "v2.0.0"},
{Version: "v2.1.0"},
{Version: "v2.10.0"},
},
ChecksumFileEntries: map[string][]ChecksumFileEntry{
"2.10.0": {{
Filename: "packer-plugin-amazon_v2.10.0_x6.0_darwin_amd64.zip",
Checksum: "43156b1900dc09b026b54610c4a152edd277366a7f71ff3812583e4a35dd0d4a",
}},
},
Zips: map[string]io.ReadCloser{
"github.com/hashicorp/packer-plugin-amazon/packer-plugin-amazon_v2.10.0_x6.0_darwin_amd64.zip": zipFile(map[string]string{
"packer-plugin-amazon_v2.10.0_x6.0_darwin_amd64": "v2.10.0_x6.0_darwin_amd64",
}),
},
},
},
[]string{
pluginFolderWrongChecksums,
pluginFolderOne,
pluginFolderTwo,
},
BinaryInstallationOptions{
APIVersionMajor: "6", APIVersionMinor: "1",
OS: "darwin", ARCH: "amd64",
Checksummers: []Checksummer{
{
Type: "sha256",
Hash: sha256.New(),
},
},
},
}},
&Installation{
BinaryPath: "testdata/plugins_2/github.com/hashicorp/amazon/packer-plugin-amazon_v2.10.0_x6.0_darwin_amd64",
Version: "v2.10.0",
}, false},
{"upgrade-with-same-protocol-version",
// here we have something locally and test that a newer version will
// be installed.
fields{"amazon", ">= v2"},
args{InstallOptions{
[]Getter{
&mockPluginGetter{
Releases: []Release{
{Version: "v1.2.3"},
{Version: "v1.2.4"},
{Version: "v1.2.5"},
{Version: "v2.0.0"},
{Version: "v2.1.0"},
{Version: "v2.10.0"},
{Version: "v2.10.1"},
},
ChecksumFileEntries: map[string][]ChecksumFileEntry{
"2.10.1": {{
Filename: "packer-plugin-amazon_v2.10.1_x6.1_darwin_amd64.zip",
Checksum: "90ca5b0f13a90238b62581bbf30bacd7e2c9af6592c7f4849627bddbcb039dec",
}},
},
Zips: map[string]io.ReadCloser{
"github.com/hashicorp/packer-plugin-amazon/packer-plugin-amazon_v2.10.1_x6.1_darwin_amd64.zip": zipFile(map[string]string{
"packer-plugin-amazon_v2.10.1_x6.1_darwin_amd64": "v2.10.1_x6.1_darwin_amd64",
}),
},
},
},
[]string{
pluginFolderWrongChecksums,
pluginFolderOne,
pluginFolderTwo,
},
BinaryInstallationOptions{
APIVersionMajor: "6", APIVersionMinor: "1",
OS: "darwin", ARCH: "amd64",
Checksummers: []Checksummer{
{
Type: "sha256",
Hash: sha256.New(),
},
},
},
}},
&Installation{
BinaryPath: "testdata/plugins_2/github.com/hashicorp/amazon/packer-plugin-amazon_v2.10.1_x6.1_darwin_amd64",
Version: "v2.10.1",
}, false},
{"wrong-zip-checksum",
// here we have something locally and test that a newer version with
// a wrong checksum will not be installed and error.
fields{"amazon", ">= v2"},
args{InstallOptions{
[]Getter{
&mockPluginGetter{
Releases: []Release{
{Version: "v2.10.0"},
},
ChecksumFileEntries: map[string][]ChecksumFileEntry{
"2.10.0": {{
Filename: "packer-plugin-amazon_v2.10.0_x6.0_darwin_amd64.zip",
Checksum: "133713371337133713371337c4a152edd277366a7f71ff3812583e4a35dd0d4a",
}},
},
Zips: map[string]io.ReadCloser{
"github.com/hashicorp/packer-plugin-amazon/packer-plugin-amazon_v2.10.0_x6.0_darwin_amd64.zip": zipFile(map[string]string{
"packer-plugin-amazon_v2.10.0_x6.0_darwin_amd64": "h4xx",
}),
},
},
},
[]string{
pluginFolderWrongChecksums,
pluginFolderOne,
pluginFolderTwo,
},
BinaryInstallationOptions{
APIVersionMajor: "6", APIVersionMinor: "1",
OS: "darwin", ARCH: "amd64",
Checksummers: []Checksummer{
{
Type: "sha256",
Hash: sha256.New(),
},
},
},
}},
nil, true},
{"wrong-local-checksum",
// here we have something wrong locally and test that a newer
// version with a wrong checksum will not be installed
// this should totally error.
fields{"amazon", ">= v1"},
args{InstallOptions{
[]Getter{
&mockPluginGetter{
Releases: []Release{
{Version: "v2.10.0"},
},
ChecksumFileEntries: map[string][]ChecksumFileEntry{
"2.10.0": {{
Filename: "packer-plugin-amazon_v2.10.0_x6.0_darwin_amd64.zip",
Checksum: "133713371337133713371337c4a152edd277366a7f71ff3812583e4a35dd0d4a",
}},
},
Zips: map[string]io.ReadCloser{
"github.com/hashicorp/packer-plugin-amazon/packer-plugin-amazon_v2.10.0_x6.0_darwin_amd64.zip": zipFile(map[string]string{
"packer-plugin-amazon_v2.10.0_x6.0_darwin_amd64": "h4xx",
}),
},
},
},
[]string{
pluginFolderWrongChecksums,
},
BinaryInstallationOptions{
APIVersionMajor: "6", APIVersionMinor: "1",
OS: "darwin", ARCH: "amd64",
Checksummers: []Checksummer{
{
Type: "sha256",
Hash: sha256.New(),
},
},
},
}},
nil, true},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
log.Printf("starting %s test", tt.name)
identifier, diags := addrs.ParsePluginSourceString(tt.fields.Identifier)
if len(diags) != 0 {
t.Fatalf("ParsePluginSourceString(%q): %v", tt.fields.Identifier, diags)
}
cts, err := version.NewConstraint(tt.fields.VersionConstraints)
if err != nil {
t.Fatalf("version.NewConstraint(%q): %v", tt.fields.Identifier, err)
}
pr := &Requirement{
Identifier: identifier,
VersionConstraints: cts,
}
got, err := pr.InstallLatest(tt.args.opts)
if (err != nil) != tt.wantErr {
t.Errorf("Requirement.InstallLatest() error = %v, wantErr %v", err, tt.wantErr)
return
}
if diff := cmp.Diff(got, tt.want); diff != "" {
t.Errorf("Requirement.InstallLatest() %s", diff)
}
if tt.want != nil && tt.want.BinaryPath != "" {
// Cleanup.
// These two files should be here by now and os.Remove will fail if
// they aren't.
if err := os.Remove(filepath.Clean(tt.want.BinaryPath)); err != nil {
t.Fatal(err)
}
if err := os.Remove(filepath.Clean(tt.want.BinaryPath + "_SHA256SUM")); err != nil {
t.Fatal(err)
}
}
})
}
}
type mockPluginGetter struct {
Releases []Release
ChecksumFileEntries map[string][]ChecksumFileEntry
Zips map[string]io.ReadCloser
}
func (g *mockPluginGetter) Get(what string, options GetOptions) (io.ReadCloser, error) {
var toEncode interface{}
switch what {
case "releases":
toEncode = g.Releases
case "sha256":
toEncode = g.ChecksumFileEntries[options.version.String()]
case "zip":
acc := options.PluginRequirement.Identifier.Hostname + "/" +
options.PluginRequirement.Identifier.RealRelativePath() + "/" +
options.ExpectedZipFilename()
zip, found := g.Zips[acc]
if found == false {
panic(fmt.Sprintf("could not find zipfile %s. %v", acc, g.Zips))
}
return zip, nil
default:
panic("Don't know how to get " + what)
}
read, write := io.Pipe()
go func() {
if err := json.NewEncoder(write).Encode(toEncode); err != nil {
panic(err)
}
}()
return ioutil.NopCloser(read), nil
}
func zipFile(content map[string]string) io.ReadCloser {
buff := bytes.NewBuffer(nil)
zipWriter := zip.NewWriter(buff)
for fileName, content := range content {
header := &zip.FileHeader{
Name: fileName,
UncompressedSize: uint32(len([]byte(content))),
}
fWriter, err := zipWriter.CreateHeader(header)
if err != nil {
panic(err)
}
_, err = io.Copy(fWriter, strings.NewReader(content))
if err != nil {
panic(err)
}
}
err := zipWriter.Close()
if err != nil {
panic(err)
}
return ioutil.NopCloser(buff)
}
var _ Getter = &mockPluginGetter{}

View File

@ -0,0 +1 @@
4e07408562bedb8b60ce05c1decfe3ad16b72230967de01f640b7e4729b49fce

View File

@ -0,0 +1 @@
4e07408562bedb8b60ce05c1decfe3ad16b72230967de01f640b7e4729b49fce

View File

@ -0,0 +1 @@
4e07408562bedb8b60ce05c1decfe3ad16b72230967de01f640b7e4729b49fce

View File

@ -0,0 +1 @@
4b227777d4dd1fc61c6f884f48641d02b4d121d3fd328cb08b5531fcacdabf8a

View File

@ -0,0 +1 @@
4b227777d4dd1fc61c6f884f48641d02b4d121d3fd328cb08b5531fcacdabf8a

View File

@ -0,0 +1 @@
ef2d127de37b942baad06145e54b0c619a1f22327b2ebbcfbec78f5564afe39d

View File

@ -0,0 +1 @@
ef2d127de37b942baad06145e54b0c619a1f22327b2ebbcfbec78f5564afe39d

View File

@ -0,0 +1 @@
ef2d127de37b942baad06145e54b0c619a1f22327b2ebbcfbec78f5564afe39d

View File

@ -0,0 +1 @@
4b227777d4dd1fc61c6f884f48641d02b4d121d3fd328cb08b5531fcacdabf8a

View File

@ -0,0 +1 @@
e7f6c011776e8db7cd330b54174fd76f7d0216b612387a5ffcfb81e6f0919683

View File

@ -0,0 +1 @@
4b227777d4dd1fc61c6f884f48641d02b4d121d3fd328cb08b5531fcacdabf8a

View File

@ -0,0 +1 @@
7902699be42c8a8e46fbbb4501726517e86b22c56a189f7625a6da49081b2451

View File

@ -0,0 +1 @@
4b227777d4dd1fc61c6f884f48641d02b4d121d3fd328cb08b5531fcacdabf8a

View File

@ -0,0 +1 @@
2c624232cdd221771294dfbb310aca000a0df6ac8b66b696d90ef06fdefb64a3

View File

@ -0,0 +1 @@
e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855

View File

@ -0,0 +1 @@
ef2d127de37b942baad06145e54b0c619a1f22327b2ebbcfbec78f5564afe39d

View File

@ -0,0 +1 @@
e7f6c011776e8db7cd330b54174fd76f7d0216b612387a5ffcfb81e6f0919683

View File

@ -0,0 +1 @@
ef2d127de37b942baad06145e54b0c619a1f22327b2ebbcfbec78f5564afe39d

View File

@ -0,0 +1 @@
19581e27de7ced00ff1ce50b2047e7a567c76b1cbaebabe5ef03f7c3017bb5b7

Some files were not shown because too many files have changed in this diff Show More