Merge pull request #10619 from hashicorp/hcl2_upgrade_annotations
Add -with-annotation flag to hcl2_upgrade command
This commit is contained in:
commit
4f5af3b919
@ -145,6 +145,7 @@ type InspectArgs struct {
|
|||||||
|
|
||||||
func (va *HCL2UpgradeArgs) AddFlagSets(flags *flag.FlagSet) {
|
func (va *HCL2UpgradeArgs) AddFlagSets(flags *flag.FlagSet) {
|
||||||
flags.StringVar(&va.OutputFile, "output-file", "", "File where to put the hcl2 generated config. Defaults to JSON_TEMPLATE.pkr.hcl")
|
flags.StringVar(&va.OutputFile, "output-file", "", "File where to put the hcl2 generated config. Defaults to JSON_TEMPLATE.pkr.hcl")
|
||||||
|
flags.BoolVar(&va.WithAnnotations, "with-annotations", false, "Adds helper annotations with information about the generated HCL2 blocks.")
|
||||||
|
|
||||||
va.MetaArgs.AddFlagSets(flags)
|
va.MetaArgs.AddFlagSets(flags)
|
||||||
}
|
}
|
||||||
@ -152,7 +153,8 @@ func (va *HCL2UpgradeArgs) AddFlagSets(flags *flag.FlagSet) {
|
|||||||
// HCL2UpgradeArgs represents a parsed cli line for a `packer hcl2_upgrade`
|
// HCL2UpgradeArgs represents a parsed cli line for a `packer hcl2_upgrade`
|
||||||
type HCL2UpgradeArgs struct {
|
type HCL2UpgradeArgs struct {
|
||||||
MetaArgs
|
MetaArgs
|
||||||
OutputFile string
|
OutputFile string
|
||||||
|
WithAnnotations bool
|
||||||
}
|
}
|
||||||
|
|
||||||
func (va *FormatArgs) AddFlagSets(flags *flag.FlagSet) {
|
func (va *FormatArgs) AddFlagSets(flags *flag.FlagSet) {
|
||||||
|
@ -15,6 +15,7 @@ import (
|
|||||||
"github.com/hashicorp/hcl/v2/hclwrite"
|
"github.com/hashicorp/hcl/v2/hclwrite"
|
||||||
hcl2shim "github.com/hashicorp/packer-plugin-sdk/hcl2helper"
|
hcl2shim "github.com/hashicorp/packer-plugin-sdk/hcl2helper"
|
||||||
"github.com/hashicorp/packer-plugin-sdk/template"
|
"github.com/hashicorp/packer-plugin-sdk/template"
|
||||||
|
"github.com/hashicorp/packer/packer"
|
||||||
"github.com/mitchellh/mapstructure"
|
"github.com/mitchellh/mapstructure"
|
||||||
"github.com/posener/complete"
|
"github.com/posener/complete"
|
||||||
"github.com/zclconf/go-cty/cty"
|
"github.com/zclconf/go-cty/cty"
|
||||||
@ -94,7 +95,6 @@ const (
|
|||||||
# a build block invokes sources and runs provisioning steps on them. The
|
# a build block invokes sources and runs provisioning steps on them. The
|
||||||
# documentation for build blocks can be found here:
|
# documentation for build blocks can be found here:
|
||||||
# https://www.packer.io/docs/templates/hcl_templates/blocks/build
|
# https://www.packer.io/docs/templates/hcl_templates/blocks/build
|
||||||
build {
|
|
||||||
`
|
`
|
||||||
|
|
||||||
amazonAmiDataHeader = `
|
amazonAmiDataHeader = `
|
||||||
@ -120,6 +120,11 @@ var (
|
|||||||
timestamp = false
|
timestamp = false
|
||||||
)
|
)
|
||||||
|
|
||||||
|
type BlockParser interface {
|
||||||
|
Parse(*template.Template) error
|
||||||
|
Write(*bytes.Buffer)
|
||||||
|
}
|
||||||
|
|
||||||
func (c *HCL2UpgradeCommand) RunContext(_ context.Context, cla *HCL2UpgradeArgs) int {
|
func (c *HCL2UpgradeCommand) RunContext(_ context.Context, cla *HCL2UpgradeArgs) int {
|
||||||
var output io.Writer
|
var output io.Writer
|
||||||
if err := os.MkdirAll(filepath.Dir(cla.OutputFile), 0); err != nil {
|
if err := os.MkdirAll(filepath.Dir(cla.OutputFile), 0); err != nil {
|
||||||
@ -134,9 +139,11 @@ func (c *HCL2UpgradeCommand) RunContext(_ context.Context, cla *HCL2UpgradeArgs)
|
|||||||
return 1
|
return 1
|
||||||
}
|
}
|
||||||
|
|
||||||
if _, err := output.Write([]byte(hcl2UpgradeFileHeader)); err != nil {
|
if cla.WithAnnotations {
|
||||||
c.Ui.Error(fmt.Sprintf("Failed to write to file: %v", err))
|
if _, err := output.Write([]byte(hcl2UpgradeFileHeader)); err != nil {
|
||||||
return 1
|
c.Ui.Error(fmt.Sprintf("Failed to write to file: %v", err))
|
||||||
|
return 1
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
hdl, ret := c.GetConfigFromJSON(&cla.MetaArgs)
|
hdl, ret := c.GetConfigFromJSON(&cla.MetaArgs)
|
||||||
@ -150,67 +157,31 @@ func (c *HCL2UpgradeCommand) RunContext(_ context.Context, cla *HCL2UpgradeArgs)
|
|||||||
}
|
}
|
||||||
tpl := core.Template
|
tpl := core.Template
|
||||||
|
|
||||||
// OutPut Locals and Local blocks
|
// Parse blocks
|
||||||
localsContent := hclwrite.NewEmptyFile()
|
|
||||||
localsBody := localsContent.Body()
|
|
||||||
localsBody.AppendNewline()
|
|
||||||
localBody := localsBody.AppendNewBlock("locals", nil).Body()
|
|
||||||
|
|
||||||
localsOut := []byte{}
|
packerBlock := &PackerParser{
|
||||||
|
WithAnnotations: cla.WithAnnotations,
|
||||||
// Output variables section
|
}
|
||||||
variablesOut := []byte{}
|
if err := packerBlock.Parse(tpl); err != nil {
|
||||||
variables := []*template.Variable{}
|
c.Ui.Error(err.Error())
|
||||||
{
|
return 1
|
||||||
// sort variables to avoid map's randomness
|
|
||||||
for _, variable := range tpl.Variables {
|
|
||||||
variables = append(variables, variable)
|
|
||||||
}
|
|
||||||
sort.Slice(variables, func(i, j int) bool {
|
|
||||||
return variables[i].Key < variables[j].Key
|
|
||||||
})
|
|
||||||
}
|
}
|
||||||
|
|
||||||
hasLocals := false
|
variables := &VariableParser{
|
||||||
for _, variable := range variables {
|
WithAnnotations: cla.WithAnnotations,
|
||||||
variablesContent := hclwrite.NewEmptyFile()
|
}
|
||||||
variablesBody := variablesContent.Body()
|
if err := variables.Parse(tpl); err != nil {
|
||||||
variablesBody.AppendNewline()
|
c.Ui.Error(err.Error())
|
||||||
variableBody := variablesBody.AppendNewBlock("variable", []string{variable.Key}).Body()
|
return 1
|
||||||
variableBody.SetAttributeRaw("type", hclwrite.Tokens{&hclwrite.Token{Bytes: []byte("string")}})
|
|
||||||
|
|
||||||
if variable.Default != "" || !variable.Required {
|
|
||||||
variableBody.SetAttributeValue("default", hcl2shim.HCL2ValueFromConfigValue(variable.Default))
|
|
||||||
}
|
|
||||||
sensitive := false
|
|
||||||
if isSensitiveVariable(variable.Key, tpl.SensitiveVariables) {
|
|
||||||
sensitive = true
|
|
||||||
variableBody.SetAttributeValue("sensitive", cty.BoolVal(true))
|
|
||||||
}
|
|
||||||
isLocal, out := variableTransposeTemplatingCalls(variablesContent.Bytes())
|
|
||||||
if isLocal {
|
|
||||||
if sensitive {
|
|
||||||
// Create Local block because this is sensitive
|
|
||||||
localContent := hclwrite.NewEmptyFile()
|
|
||||||
body := localContent.Body()
|
|
||||||
body.AppendNewline()
|
|
||||||
localBody := body.AppendNewBlock("local", []string{variable.Key}).Body()
|
|
||||||
localBody.SetAttributeValue("sensitive", cty.BoolVal(true))
|
|
||||||
localBody.SetAttributeValue("expression", hcl2shim.HCL2ValueFromConfigValue(variable.Default))
|
|
||||||
localsOut = append(localsOut, transposeTemplatingCalls(localContent.Bytes())...)
|
|
||||||
localsVariableMap[variable.Key] = "local"
|
|
||||||
continue
|
|
||||||
}
|
|
||||||
localBody.SetAttributeValue(variable.Key, hcl2shim.HCL2ValueFromConfigValue(variable.Default))
|
|
||||||
localsVariableMap[variable.Key] = "locals"
|
|
||||||
hasLocals = true
|
|
||||||
continue
|
|
||||||
}
|
|
||||||
variablesOut = append(variablesOut, out...)
|
|
||||||
}
|
}
|
||||||
|
|
||||||
if hasLocals {
|
locals := &LocalsParser{
|
||||||
localsOut = append(localsOut, transposeTemplatingCalls(localsContent.Bytes())...)
|
LocalsOut: variables.localsOut,
|
||||||
|
WithAnnotations: cla.WithAnnotations,
|
||||||
|
}
|
||||||
|
if err := locals.Parse(tpl); err != nil {
|
||||||
|
c.Ui.Error(err.Error())
|
||||||
|
return 1
|
||||||
}
|
}
|
||||||
|
|
||||||
builders := []*template.Builder{}
|
builders := []*template.Builder{}
|
||||||
@ -221,9 +192,12 @@ func (c *HCL2UpgradeCommand) RunContext(_ context.Context, cla *HCL2UpgradeArgs)
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// Output amazon-ami data source section
|
amazonAmiDatasource := &AmazonAmiDatasourceParser{
|
||||||
amazonAmiOut, err := c.writeAmazonAmiDatasource(builders)
|
Builders: builders,
|
||||||
if err != nil {
|
WithAnnotations: cla.WithAnnotations,
|
||||||
|
}
|
||||||
|
if err := amazonAmiDatasource.Parse(tpl); err != nil {
|
||||||
|
c.Ui.Error(err.Error())
|
||||||
return 1
|
return 1
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -231,238 +205,58 @@ func (c *HCL2UpgradeCommand) RunContext(_ context.Context, cla *HCL2UpgradeArgs)
|
|||||||
return builders[i].Type+builders[i].Name < builders[j].Type+builders[j].Name
|
return builders[i].Type+builders[i].Name < builders[j].Type+builders[j].Name
|
||||||
})
|
})
|
||||||
|
|
||||||
// Output sources section
|
sources := &SourceParser{
|
||||||
sourcesOut := []byte{}
|
Builders: builders,
|
||||||
for i, builderCfg := range builders {
|
BuilderPlugins: c.Meta.CoreConfig.Components.PluginConfig.Builders,
|
||||||
sourcesContent := hclwrite.NewEmptyFile()
|
WithAnnotations: cla.WithAnnotations,
|
||||||
body := sourcesContent.Body()
|
}
|
||||||
|
if err := sources.Parse(tpl); err != nil {
|
||||||
body.AppendNewline()
|
c.Ui.Error(err.Error())
|
||||||
if !c.Meta.CoreConfig.Components.PluginConfig.Builders.Has(builderCfg.Type) {
|
return 1
|
||||||
c.Ui.Error(fmt.Sprintf("unknown builder type: %q\n", builderCfg.Type))
|
|
||||||
return 1
|
|
||||||
}
|
|
||||||
if builderCfg.Name == "" || builderCfg.Name == builderCfg.Type {
|
|
||||||
builderCfg.Name = fmt.Sprintf("autogenerated_%d", i+1)
|
|
||||||
}
|
|
||||||
builderCfg.Name = strings.ReplaceAll(strings.TrimSpace(builderCfg.Name), " ", "_")
|
|
||||||
|
|
||||||
sourceBody := body.AppendNewBlock("source", []string{builderCfg.Type, builderCfg.Name}).Body()
|
|
||||||
|
|
||||||
jsonBodyToHCL2Body(sourceBody, builderCfg.Config)
|
|
||||||
|
|
||||||
sourcesOut = append(sourcesOut, transposeTemplatingCalls(sourcesContent.Bytes())...)
|
|
||||||
}
|
}
|
||||||
|
|
||||||
// Output build section
|
build := &BuildParser{
|
||||||
buildContent := hclwrite.NewEmptyFile()
|
Builders: builders,
|
||||||
buildBody := buildContent.Body()
|
WithAnnotations: cla.WithAnnotations,
|
||||||
if tpl.Description != "" {
|
}
|
||||||
buildBody.SetAttributeValue("description", cty.StringVal(tpl.Description))
|
if err := build.Parse(tpl); err != nil {
|
||||||
buildBody.AppendNewline()
|
c.Ui.Error(err.Error())
|
||||||
|
return 1
|
||||||
}
|
}
|
||||||
|
|
||||||
sourceNames := []string{}
|
amazonSecretsDatasource := &AmazonSecretsDatasourceParser{
|
||||||
for _, builder := range builders {
|
WithAnnotations: cla.WithAnnotations,
|
||||||
sourceNames = append(sourceNames, fmt.Sprintf("source.%s.%s", builder.Type, builder.Name))
|
|
||||||
}
|
}
|
||||||
buildBody.SetAttributeValue("sources", hcl2shim.HCL2ValueFromConfigValue(sourceNames))
|
if err := amazonSecretsDatasource.Parse(tpl); err != nil {
|
||||||
buildBody.AppendNewline()
|
c.Ui.Error(err.Error())
|
||||||
buildOut := buildContent.Bytes()
|
return 1
|
||||||
|
|
||||||
// Output provisioners section
|
|
||||||
provisionersOut := []byte{}
|
|
||||||
for _, provisioner := range tpl.Provisioners {
|
|
||||||
buildBody.AppendNewline()
|
|
||||||
contentBytes := c.writeProvisioner("provisioner", provisioner)
|
|
||||||
provisionersOut = append(provisionersOut, transposeTemplatingCalls(contentBytes)...)
|
|
||||||
}
|
|
||||||
|
|
||||||
if tpl.CleanupProvisioner != nil {
|
|
||||||
buildBody.AppendNewline()
|
|
||||||
contentBytes := c.writeProvisioner("error-cleanup-provisioner", tpl.CleanupProvisioner)
|
|
||||||
provisionersOut = append(provisionersOut, transposeTemplatingCalls(contentBytes)...)
|
|
||||||
}
|
|
||||||
|
|
||||||
// Output post-processors section
|
|
||||||
postProcessorsOut := []byte{}
|
|
||||||
for _, pps := range tpl.PostProcessors {
|
|
||||||
postProcessorContent := hclwrite.NewEmptyFile()
|
|
||||||
body := postProcessorContent.Body()
|
|
||||||
|
|
||||||
switch len(pps) {
|
|
||||||
case 0:
|
|
||||||
continue
|
|
||||||
case 1:
|
|
||||||
default:
|
|
||||||
body = body.AppendNewBlock("post-processors", nil).Body()
|
|
||||||
}
|
|
||||||
for _, pp := range pps {
|
|
||||||
ppBody := body.AppendNewBlock("post-processor", []string{pp.Type}).Body()
|
|
||||||
if pp.KeepInputArtifact != nil {
|
|
||||||
ppBody.SetAttributeValue("keep_input_artifact", cty.BoolVal(*pp.KeepInputArtifact))
|
|
||||||
}
|
|
||||||
cfg := pp.Config
|
|
||||||
if len(pp.Except) > 0 {
|
|
||||||
cfg["except"] = pp.Except
|
|
||||||
}
|
|
||||||
if len(pp.Only) > 0 {
|
|
||||||
cfg["only"] = pp.Only
|
|
||||||
}
|
|
||||||
if pp.Name != "" && pp.Name != pp.Type {
|
|
||||||
cfg["name"] = pp.Name
|
|
||||||
}
|
|
||||||
jsonBodyToHCL2Body(ppBody, cfg)
|
|
||||||
}
|
|
||||||
|
|
||||||
postProcessorsOut = append(postProcessorsOut, transposeTemplatingCalls(postProcessorContent.Bytes())...)
|
|
||||||
}
|
|
||||||
|
|
||||||
// Output amazon-secretsmanager data source section
|
|
||||||
keys := make([]string, 0, len(amazonSecretsManagerMap))
|
|
||||||
for k := range amazonSecretsManagerMap {
|
|
||||||
keys = append(keys, k)
|
|
||||||
}
|
|
||||||
sort.Strings(keys)
|
|
||||||
|
|
||||||
amazonSecretsDataOut := []byte{}
|
|
||||||
for _, dataSourceName := range keys {
|
|
||||||
datasourceContent := hclwrite.NewEmptyFile()
|
|
||||||
body := datasourceContent.Body()
|
|
||||||
body.AppendNewline()
|
|
||||||
datasourceBody := body.AppendNewBlock("data", []string{"amazon-secretsmanager", dataSourceName}).Body()
|
|
||||||
jsonBodyToHCL2Body(datasourceBody, amazonSecretsManagerMap[dataSourceName])
|
|
||||||
amazonSecretsDataOut = append(amazonSecretsDataOut, datasourceContent.Bytes()...)
|
|
||||||
}
|
}
|
||||||
|
|
||||||
// Write file
|
// Write file
|
||||||
out := &bytes.Buffer{}
|
out := &bytes.Buffer{}
|
||||||
|
|
||||||
// Packer section
|
blocks := map[int]BlockParser{
|
||||||
if tpl.MinVersion != "" {
|
1: packerBlock,
|
||||||
out.Write([]byte(packerBlockHeader))
|
2: variables,
|
||||||
fileContent := hclwrite.NewEmptyFile()
|
3: amazonSecretsDatasource,
|
||||||
body := fileContent.Body()
|
4: amazonAmiDatasource,
|
||||||
packerBody := body.AppendNewBlock("packer", nil).Body()
|
5: locals,
|
||||||
packerBody.SetAttributeValue("required_version", cty.StringVal(fmt.Sprintf(">= %s", tpl.MinVersion)))
|
6: sources,
|
||||||
out.Write(fileContent.Bytes())
|
7: build,
|
||||||
|
}
|
||||||
|
for i := 1; i <= len(blocks); i++ {
|
||||||
|
blocks[i].Write(out)
|
||||||
}
|
}
|
||||||
|
|
||||||
if len(variablesOut) > 0 {
|
if _, err := output.Write(hclwrite.Format(out.Bytes())); err != nil {
|
||||||
out.Write([]byte(inputVarHeader))
|
c.Ui.Error(fmt.Sprintf("Failed to write to file: %v", err))
|
||||||
out.Write(variablesOut)
|
return 1
|
||||||
}
|
}
|
||||||
|
|
||||||
if len(amazonSecretsManagerMap) > 0 {
|
|
||||||
out.Write([]byte(amazonSecretsManagerDataHeader))
|
|
||||||
out.Write(amazonSecretsDataOut)
|
|
||||||
}
|
|
||||||
|
|
||||||
if len(amazonAmiOut) > 0 {
|
|
||||||
out.Write([]byte(amazonAmiDataHeader))
|
|
||||||
out.Write(amazonAmiOut)
|
|
||||||
}
|
|
||||||
|
|
||||||
if timestamp {
|
|
||||||
_, _ = out.Write([]byte("\n"))
|
|
||||||
fmt.Fprintln(out, `# "timestamp" template function replacement`)
|
|
||||||
fmt.Fprintln(out, `locals { timestamp = regex_replace(timestamp(), "[- TZ:]", "") }`)
|
|
||||||
}
|
|
||||||
|
|
||||||
if len(localsOut) > 0 {
|
|
||||||
out.Write([]byte(localsVarHeader))
|
|
||||||
out.Write(localsOut)
|
|
||||||
}
|
|
||||||
|
|
||||||
out.Write([]byte(sourcesHeader))
|
|
||||||
out.Write(sourcesOut)
|
|
||||||
|
|
||||||
out.Write([]byte(buildHeader))
|
|
||||||
out.Write(buildOut)
|
|
||||||
out.Write(provisionersOut)
|
|
||||||
out.Write(postProcessorsOut)
|
|
||||||
|
|
||||||
_, _ = out.Write([]byte("}\n"))
|
|
||||||
|
|
||||||
_, _ = output.Write(hclwrite.Format(out.Bytes()))
|
|
||||||
|
|
||||||
c.Ui.Say(fmt.Sprintf("Successfully created %s ", cla.OutputFile))
|
c.Ui.Say(fmt.Sprintf("Successfully created %s ", cla.OutputFile))
|
||||||
|
|
||||||
return 0
|
return 0
|
||||||
}
|
}
|
||||||
|
|
||||||
func (c *HCL2UpgradeCommand) writeProvisioner(typeName string, provisioner *template.Provisioner) []byte {
|
|
||||||
provisionerContent := hclwrite.NewEmptyFile()
|
|
||||||
body := provisionerContent.Body()
|
|
||||||
block := body.AppendNewBlock(typeName, []string{provisioner.Type})
|
|
||||||
cfg := provisioner.Config
|
|
||||||
if len(provisioner.Except) > 0 {
|
|
||||||
cfg["except"] = provisioner.Except
|
|
||||||
}
|
|
||||||
if len(provisioner.Only) > 0 {
|
|
||||||
cfg["only"] = provisioner.Only
|
|
||||||
}
|
|
||||||
if provisioner.MaxRetries != "" {
|
|
||||||
cfg["max_retries"] = provisioner.MaxRetries
|
|
||||||
}
|
|
||||||
if provisioner.Timeout > 0 {
|
|
||||||
cfg["timeout"] = provisioner.Timeout.String()
|
|
||||||
}
|
|
||||||
jsonBodyToHCL2Body(block.Body(), cfg)
|
|
||||||
return provisionerContent.Bytes()
|
|
||||||
}
|
|
||||||
|
|
||||||
func (c *HCL2UpgradeCommand) writeAmazonAmiDatasource(builders []*template.Builder) ([]byte, error) {
|
|
||||||
amazonAmiOut := []byte{}
|
|
||||||
amazonAmiFilters := []map[string]interface{}{}
|
|
||||||
i := 1
|
|
||||||
for _, builder := range builders {
|
|
||||||
if strings.HasPrefix(builder.Type, "amazon-") {
|
|
||||||
if sourceAmiFilter, ok := builder.Config["source_ami_filter"]; ok {
|
|
||||||
sourceAmiFilterCfg := map[string]interface{}{}
|
|
||||||
if err := mapstructure.Decode(sourceAmiFilter, &sourceAmiFilterCfg); err != nil {
|
|
||||||
c.Ui.Error(fmt.Sprintf("Failed to write amazon-ami data source: %v", err))
|
|
||||||
return nil, err
|
|
||||||
}
|
|
||||||
|
|
||||||
duplicate := false
|
|
||||||
dataSourceName := fmt.Sprintf("autogenerated_%d", i)
|
|
||||||
for j, filter := range amazonAmiFilters {
|
|
||||||
if reflect.DeepEqual(filter, sourceAmiFilter) {
|
|
||||||
duplicate = true
|
|
||||||
dataSourceName = fmt.Sprintf("autogenerated_%d", j+1)
|
|
||||||
continue
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// This is a hack...
|
|
||||||
// Use templating so that it could be correctly transformed later into a data resource
|
|
||||||
sourceAmiDataRef := fmt.Sprintf("{{ data `amazon-ami.%s.id` }}", dataSourceName)
|
|
||||||
|
|
||||||
if duplicate {
|
|
||||||
delete(builder.Config, "source_ami_filter")
|
|
||||||
builder.Config["source_ami"] = sourceAmiDataRef
|
|
||||||
continue
|
|
||||||
}
|
|
||||||
|
|
||||||
amazonAmiFilters = append(amazonAmiFilters, sourceAmiFilterCfg)
|
|
||||||
delete(builder.Config, "source_ami_filter")
|
|
||||||
builder.Config["source_ami"] = sourceAmiDataRef
|
|
||||||
i++
|
|
||||||
|
|
||||||
datasourceContent := hclwrite.NewEmptyFile()
|
|
||||||
body := datasourceContent.Body()
|
|
||||||
body.AppendNewline()
|
|
||||||
sourceBody := body.AppendNewBlock("data", []string{"amazon-ami", dataSourceName}).Body()
|
|
||||||
jsonBodyToHCL2Body(sourceBody, sourceAmiFilterCfg)
|
|
||||||
amazonAmiOut = append(amazonAmiOut, transposeTemplatingCalls(datasourceContent.Bytes())...)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
return amazonAmiOut, nil
|
|
||||||
}
|
|
||||||
|
|
||||||
type UnhandleableArgumentError struct {
|
type UnhandleableArgumentError struct {
|
||||||
Call string
|
Call string
|
||||||
Correspondance string
|
Correspondance string
|
||||||
@ -789,3 +583,441 @@ func (*HCL2UpgradeCommand) AutocompleteArgs() complete.Predictor {
|
|||||||
func (*HCL2UpgradeCommand) AutocompleteFlags() complete.Flags {
|
func (*HCL2UpgradeCommand) AutocompleteFlags() complete.Flags {
|
||||||
return complete.Flags{}
|
return complete.Flags{}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// Specific blocks parser responsible to parse and write the block
|
||||||
|
|
||||||
|
type PackerParser struct {
|
||||||
|
WithAnnotations bool
|
||||||
|
out []byte
|
||||||
|
}
|
||||||
|
|
||||||
|
func (p *PackerParser) Parse(tpl *template.Template) error {
|
||||||
|
if tpl.MinVersion != "" {
|
||||||
|
fileContent := hclwrite.NewEmptyFile()
|
||||||
|
body := fileContent.Body()
|
||||||
|
packerBody := body.AppendNewBlock("packer", nil).Body()
|
||||||
|
packerBody.SetAttributeValue("required_version", cty.StringVal(fmt.Sprintf(">= %s", tpl.MinVersion)))
|
||||||
|
p.out = fileContent.Bytes()
|
||||||
|
}
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (p *PackerParser) Write(out *bytes.Buffer) {
|
||||||
|
if len(p.out) > 0 {
|
||||||
|
if p.WithAnnotations {
|
||||||
|
out.Write([]byte(packerBlockHeader))
|
||||||
|
}
|
||||||
|
out.Write(p.out)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
type VariableParser struct {
|
||||||
|
WithAnnotations bool
|
||||||
|
variablesOut []byte
|
||||||
|
localsOut []byte
|
||||||
|
}
|
||||||
|
|
||||||
|
func (p *VariableParser) Parse(tpl *template.Template) error {
|
||||||
|
// OutPut Locals and Local blocks
|
||||||
|
localsContent := hclwrite.NewEmptyFile()
|
||||||
|
localsBody := localsContent.Body()
|
||||||
|
localsBody.AppendNewline()
|
||||||
|
localBody := localsBody.AppendNewBlock("locals", nil).Body()
|
||||||
|
|
||||||
|
if len(p.variablesOut) == 0 {
|
||||||
|
p.variablesOut = []byte{}
|
||||||
|
}
|
||||||
|
if len(p.localsOut) == 0 {
|
||||||
|
p.localsOut = []byte{}
|
||||||
|
}
|
||||||
|
|
||||||
|
variables := []*template.Variable{}
|
||||||
|
{
|
||||||
|
// sort variables to avoid map's randomness
|
||||||
|
for _, variable := range tpl.Variables {
|
||||||
|
variables = append(variables, variable)
|
||||||
|
}
|
||||||
|
sort.Slice(variables, func(i, j int) bool {
|
||||||
|
return variables[i].Key < variables[j].Key
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
hasLocals := false
|
||||||
|
for _, variable := range variables {
|
||||||
|
variablesContent := hclwrite.NewEmptyFile()
|
||||||
|
variablesBody := variablesContent.Body()
|
||||||
|
variablesBody.AppendNewline()
|
||||||
|
variableBody := variablesBody.AppendNewBlock("variable", []string{variable.Key}).Body()
|
||||||
|
variableBody.SetAttributeRaw("type", hclwrite.Tokens{&hclwrite.Token{Bytes: []byte("string")}})
|
||||||
|
|
||||||
|
if variable.Default != "" || !variable.Required {
|
||||||
|
variableBody.SetAttributeValue("default", hcl2shim.HCL2ValueFromConfigValue(variable.Default))
|
||||||
|
}
|
||||||
|
sensitive := false
|
||||||
|
if isSensitiveVariable(variable.Key, tpl.SensitiveVariables) {
|
||||||
|
sensitive = true
|
||||||
|
variableBody.SetAttributeValue("sensitive", cty.BoolVal(true))
|
||||||
|
}
|
||||||
|
isLocal, out := variableTransposeTemplatingCalls(variablesContent.Bytes())
|
||||||
|
if isLocal {
|
||||||
|
if sensitive {
|
||||||
|
// Create Local block because this is sensitive
|
||||||
|
localContent := hclwrite.NewEmptyFile()
|
||||||
|
body := localContent.Body()
|
||||||
|
body.AppendNewline()
|
||||||
|
localBody := body.AppendNewBlock("local", []string{variable.Key}).Body()
|
||||||
|
localBody.SetAttributeValue("sensitive", cty.BoolVal(true))
|
||||||
|
localBody.SetAttributeValue("expression", hcl2shim.HCL2ValueFromConfigValue(variable.Default))
|
||||||
|
p.localsOut = append(p.localsOut, transposeTemplatingCalls(localContent.Bytes())...)
|
||||||
|
localsVariableMap[variable.Key] = "local"
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
localBody.SetAttributeValue(variable.Key, hcl2shim.HCL2ValueFromConfigValue(variable.Default))
|
||||||
|
localsVariableMap[variable.Key] = "locals"
|
||||||
|
hasLocals = true
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
p.variablesOut = append(p.variablesOut, out...)
|
||||||
|
}
|
||||||
|
|
||||||
|
if hasLocals {
|
||||||
|
p.localsOut = append(p.localsOut, transposeTemplatingCalls(localsContent.Bytes())...)
|
||||||
|
}
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (p *VariableParser) Write(out *bytes.Buffer) {
|
||||||
|
if len(p.variablesOut) > 0 {
|
||||||
|
if p.WithAnnotations {
|
||||||
|
out.Write([]byte(inputVarHeader))
|
||||||
|
}
|
||||||
|
out.Write(p.variablesOut)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
type LocalsParser struct {
|
||||||
|
WithAnnotations bool
|
||||||
|
LocalsOut []byte
|
||||||
|
}
|
||||||
|
|
||||||
|
func (p *LocalsParser) Parse(tpl *template.Template) error {
|
||||||
|
// Locals where parsed with Variables
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (p *LocalsParser) Write(out *bytes.Buffer) {
|
||||||
|
if timestamp {
|
||||||
|
_, _ = out.Write([]byte("\n"))
|
||||||
|
if p.WithAnnotations {
|
||||||
|
fmt.Fprintln(out, `# "timestamp" template function replacement`)
|
||||||
|
}
|
||||||
|
fmt.Fprintln(out, `locals { timestamp = regex_replace(timestamp(), "[- TZ:]", "") }`)
|
||||||
|
}
|
||||||
|
if len(p.LocalsOut) > 0 {
|
||||||
|
if p.WithAnnotations {
|
||||||
|
out.Write([]byte(localsVarHeader))
|
||||||
|
}
|
||||||
|
out.Write(p.LocalsOut)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
type AmazonSecretsDatasourceParser struct {
|
||||||
|
WithAnnotations bool
|
||||||
|
out []byte
|
||||||
|
}
|
||||||
|
|
||||||
|
func (p *AmazonSecretsDatasourceParser) Parse(_ *template.Template) error {
|
||||||
|
if p.out == nil {
|
||||||
|
p.out = []byte{}
|
||||||
|
}
|
||||||
|
|
||||||
|
keys := make([]string, 0, len(amazonSecretsManagerMap))
|
||||||
|
for k := range amazonSecretsManagerMap {
|
||||||
|
keys = append(keys, k)
|
||||||
|
}
|
||||||
|
sort.Strings(keys)
|
||||||
|
|
||||||
|
for _, dataSourceName := range keys {
|
||||||
|
datasourceContent := hclwrite.NewEmptyFile()
|
||||||
|
body := datasourceContent.Body()
|
||||||
|
body.AppendNewline()
|
||||||
|
datasourceBody := body.AppendNewBlock("data", []string{"amazon-secretsmanager", dataSourceName}).Body()
|
||||||
|
jsonBodyToHCL2Body(datasourceBody, amazonSecretsManagerMap[dataSourceName])
|
||||||
|
p.out = append(p.out, datasourceContent.Bytes()...)
|
||||||
|
}
|
||||||
|
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (p *AmazonSecretsDatasourceParser) Write(out *bytes.Buffer) {
|
||||||
|
if len(p.out) > 0 {
|
||||||
|
if p.WithAnnotations {
|
||||||
|
out.Write([]byte(amazonSecretsManagerDataHeader))
|
||||||
|
}
|
||||||
|
out.Write(p.out)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
type AmazonAmiDatasourceParser struct {
|
||||||
|
Builders []*template.Builder
|
||||||
|
WithAnnotations bool
|
||||||
|
out []byte
|
||||||
|
}
|
||||||
|
|
||||||
|
func (p *AmazonAmiDatasourceParser) Parse(_ *template.Template) error {
|
||||||
|
if p.out == nil {
|
||||||
|
p.out = []byte{}
|
||||||
|
}
|
||||||
|
|
||||||
|
amazonAmiFilters := []map[string]interface{}{}
|
||||||
|
i := 1
|
||||||
|
for _, builder := range p.Builders {
|
||||||
|
if strings.HasPrefix(builder.Type, "amazon-") {
|
||||||
|
if sourceAmiFilter, ok := builder.Config["source_ami_filter"]; ok {
|
||||||
|
sourceAmiFilterCfg := map[string]interface{}{}
|
||||||
|
if err := mapstructure.Decode(sourceAmiFilter, &sourceAmiFilterCfg); err != nil {
|
||||||
|
return fmt.Errorf("Failed to write amazon-ami data source: %v", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
duplicate := false
|
||||||
|
dataSourceName := fmt.Sprintf("autogenerated_%d", i)
|
||||||
|
for j, filter := range amazonAmiFilters {
|
||||||
|
if reflect.DeepEqual(filter, sourceAmiFilter) {
|
||||||
|
duplicate = true
|
||||||
|
dataSourceName = fmt.Sprintf("autogenerated_%d", j+1)
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// This is a hack...
|
||||||
|
// Use templating so that it could be correctly transformed later into a data resource
|
||||||
|
sourceAmiDataRef := fmt.Sprintf("{{ data `amazon-ami.%s.id` }}", dataSourceName)
|
||||||
|
|
||||||
|
if duplicate {
|
||||||
|
delete(builder.Config, "source_ami_filter")
|
||||||
|
builder.Config["source_ami"] = sourceAmiDataRef
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
|
amazonAmiFilters = append(amazonAmiFilters, sourceAmiFilterCfg)
|
||||||
|
delete(builder.Config, "source_ami_filter")
|
||||||
|
builder.Config["source_ami"] = sourceAmiDataRef
|
||||||
|
i++
|
||||||
|
|
||||||
|
datasourceContent := hclwrite.NewEmptyFile()
|
||||||
|
body := datasourceContent.Body()
|
||||||
|
body.AppendNewline()
|
||||||
|
sourceBody := body.AppendNewBlock("data", []string{"amazon-ami", dataSourceName}).Body()
|
||||||
|
jsonBodyToHCL2Body(sourceBody, sourceAmiFilterCfg)
|
||||||
|
p.out = append(p.out, transposeTemplatingCalls(datasourceContent.Bytes())...)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (p *AmazonAmiDatasourceParser) Write(out *bytes.Buffer) {
|
||||||
|
if len(p.out) > 0 {
|
||||||
|
if p.WithAnnotations {
|
||||||
|
out.Write([]byte(amazonAmiDataHeader))
|
||||||
|
}
|
||||||
|
out.Write(p.out)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
type SourceParser struct {
|
||||||
|
Builders []*template.Builder
|
||||||
|
BuilderPlugins packer.BuilderSet
|
||||||
|
WithAnnotations bool
|
||||||
|
out []byte
|
||||||
|
}
|
||||||
|
|
||||||
|
func (p *SourceParser) Parse(tpl *template.Template) error {
|
||||||
|
if p.out == nil {
|
||||||
|
p.out = []byte{}
|
||||||
|
}
|
||||||
|
for i, builderCfg := range p.Builders {
|
||||||
|
sourcesContent := hclwrite.NewEmptyFile()
|
||||||
|
body := sourcesContent.Body()
|
||||||
|
|
||||||
|
body.AppendNewline()
|
||||||
|
if !p.BuilderPlugins.Has(builderCfg.Type) {
|
||||||
|
return fmt.Errorf("unknown builder type: %q\n", builderCfg.Type)
|
||||||
|
}
|
||||||
|
if builderCfg.Name == "" || builderCfg.Name == builderCfg.Type {
|
||||||
|
builderCfg.Name = fmt.Sprintf("autogenerated_%d", i+1)
|
||||||
|
}
|
||||||
|
builderCfg.Name = strings.ReplaceAll(strings.TrimSpace(builderCfg.Name), " ", "_")
|
||||||
|
|
||||||
|
sourceBody := body.AppendNewBlock("source", []string{builderCfg.Type, builderCfg.Name}).Body()
|
||||||
|
|
||||||
|
jsonBodyToHCL2Body(sourceBody, builderCfg.Config)
|
||||||
|
|
||||||
|
p.out = append(p.out, transposeTemplatingCalls(sourcesContent.Bytes())...)
|
||||||
|
}
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (p *SourceParser) Write(out *bytes.Buffer) {
|
||||||
|
if len(p.out) > 0 {
|
||||||
|
if p.WithAnnotations {
|
||||||
|
out.Write([]byte(sourcesHeader))
|
||||||
|
}
|
||||||
|
out.Write(p.out)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
type BuildParser struct {
|
||||||
|
Builders []*template.Builder
|
||||||
|
WithAnnotations bool
|
||||||
|
|
||||||
|
provisioners BlockParser
|
||||||
|
postProcessors BlockParser
|
||||||
|
out []byte
|
||||||
|
}
|
||||||
|
|
||||||
|
func (p *BuildParser) Parse(tpl *template.Template) error {
|
||||||
|
buildContent := hclwrite.NewEmptyFile()
|
||||||
|
buildBody := buildContent.Body()
|
||||||
|
if tpl.Description != "" {
|
||||||
|
buildBody.SetAttributeValue("description", cty.StringVal(tpl.Description))
|
||||||
|
buildBody.AppendNewline()
|
||||||
|
}
|
||||||
|
|
||||||
|
sourceNames := []string{}
|
||||||
|
for _, builder := range p.Builders {
|
||||||
|
sourceNames = append(sourceNames, fmt.Sprintf("source.%s.%s", builder.Type, builder.Name))
|
||||||
|
}
|
||||||
|
buildBody.SetAttributeValue("sources", hcl2shim.HCL2ValueFromConfigValue(sourceNames))
|
||||||
|
buildBody.AppendNewline()
|
||||||
|
p.out = buildContent.Bytes()
|
||||||
|
|
||||||
|
p.provisioners = &ProvisionerParser{
|
||||||
|
WithAnnotations: p.WithAnnotations,
|
||||||
|
}
|
||||||
|
if err := p.provisioners.Parse(tpl); err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
p.postProcessors = &PostProcessorParser{
|
||||||
|
WithAnnotations: p.WithAnnotations,
|
||||||
|
}
|
||||||
|
if err := p.postProcessors.Parse(tpl); err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (p *BuildParser) Write(out *bytes.Buffer) {
|
||||||
|
if len(p.out) > 0 {
|
||||||
|
if p.WithAnnotations {
|
||||||
|
out.Write([]byte(buildHeader))
|
||||||
|
} else {
|
||||||
|
_, _ = out.Write([]byte("\n"))
|
||||||
|
}
|
||||||
|
_, _ = out.Write([]byte("build {\n"))
|
||||||
|
out.Write(p.out)
|
||||||
|
p.provisioners.Write(out)
|
||||||
|
p.postProcessors.Write(out)
|
||||||
|
_, _ = out.Write([]byte("}\n"))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
type ProvisionerParser struct {
|
||||||
|
WithAnnotations bool
|
||||||
|
out []byte
|
||||||
|
}
|
||||||
|
|
||||||
|
func (p *ProvisionerParser) Parse(tpl *template.Template) error {
|
||||||
|
if p.out == nil {
|
||||||
|
p.out = []byte{}
|
||||||
|
}
|
||||||
|
for _, provisioner := range tpl.Provisioners {
|
||||||
|
contentBytes := writeProvisioner("provisioner", provisioner)
|
||||||
|
p.out = append(p.out, transposeTemplatingCalls(contentBytes)...)
|
||||||
|
}
|
||||||
|
|
||||||
|
if tpl.CleanupProvisioner != nil {
|
||||||
|
contentBytes := writeProvisioner("error-cleanup-provisioner", tpl.CleanupProvisioner)
|
||||||
|
p.out = append(p.out, transposeTemplatingCalls(contentBytes)...)
|
||||||
|
}
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func writeProvisioner(typeName string, provisioner *template.Provisioner) []byte {
|
||||||
|
provisionerContent := hclwrite.NewEmptyFile()
|
||||||
|
body := provisionerContent.Body()
|
||||||
|
block := body.AppendNewBlock(typeName, []string{provisioner.Type})
|
||||||
|
cfg := provisioner.Config
|
||||||
|
if len(provisioner.Except) > 0 {
|
||||||
|
cfg["except"] = provisioner.Except
|
||||||
|
}
|
||||||
|
if len(provisioner.Only) > 0 {
|
||||||
|
cfg["only"] = provisioner.Only
|
||||||
|
}
|
||||||
|
if provisioner.MaxRetries != "" {
|
||||||
|
cfg["max_retries"] = provisioner.MaxRetries
|
||||||
|
}
|
||||||
|
if provisioner.Timeout > 0 {
|
||||||
|
cfg["timeout"] = provisioner.Timeout.String()
|
||||||
|
}
|
||||||
|
body.AppendNewline()
|
||||||
|
jsonBodyToHCL2Body(block.Body(), cfg)
|
||||||
|
return provisionerContent.Bytes()
|
||||||
|
}
|
||||||
|
|
||||||
|
func (p *ProvisionerParser) Write(out *bytes.Buffer) {
|
||||||
|
if len(p.out) > 0 {
|
||||||
|
out.Write(p.out)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
type PostProcessorParser struct {
|
||||||
|
WithAnnotations bool
|
||||||
|
out []byte
|
||||||
|
}
|
||||||
|
|
||||||
|
func (p *PostProcessorParser) Parse(tpl *template.Template) error {
|
||||||
|
if p.out == nil {
|
||||||
|
p.out = []byte{}
|
||||||
|
}
|
||||||
|
for _, pps := range tpl.PostProcessors {
|
||||||
|
postProcessorContent := hclwrite.NewEmptyFile()
|
||||||
|
body := postProcessorContent.Body()
|
||||||
|
|
||||||
|
switch len(pps) {
|
||||||
|
case 0:
|
||||||
|
continue
|
||||||
|
case 1:
|
||||||
|
default:
|
||||||
|
body = body.AppendNewBlock("post-processors", nil).Body()
|
||||||
|
}
|
||||||
|
for _, pp := range pps {
|
||||||
|
ppBody := body.AppendNewBlock("post-processor", []string{pp.Type}).Body()
|
||||||
|
if pp.KeepInputArtifact != nil {
|
||||||
|
ppBody.SetAttributeValue("keep_input_artifact", cty.BoolVal(*pp.KeepInputArtifact))
|
||||||
|
}
|
||||||
|
cfg := pp.Config
|
||||||
|
if len(pp.Except) > 0 {
|
||||||
|
cfg["except"] = pp.Except
|
||||||
|
}
|
||||||
|
if len(pp.Only) > 0 {
|
||||||
|
cfg["only"] = pp.Only
|
||||||
|
}
|
||||||
|
if pp.Name != "" && pp.Name != pp.Type {
|
||||||
|
cfg["name"] = pp.Name
|
||||||
|
}
|
||||||
|
jsonBodyToHCL2Body(ppBody, cfg)
|
||||||
|
}
|
||||||
|
|
||||||
|
p.out = append(p.out, transposeTemplatingCalls(postProcessorContent.Bytes())...)
|
||||||
|
}
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (p *PostProcessorParser) Write(out *bytes.Buffer) {
|
||||||
|
if len(p.out) > 0 {
|
||||||
|
out.Write(p.out)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
@ -18,11 +18,13 @@ func Test_hcl2_upgrade(t *testing.T) {
|
|||||||
|
|
||||||
tc := []struct {
|
tc := []struct {
|
||||||
folder string
|
folder string
|
||||||
|
flags []string
|
||||||
}{
|
}{
|
||||||
{"complete"},
|
{folder: "complete", flags: []string{"-with-annotations"}},
|
||||||
{"minimal"},
|
{folder: "without-annotations", flags: []string{}},
|
||||||
{"source-name"},
|
{folder: "minimal", flags: []string{"-with-annotations"}},
|
||||||
{"error-cleanup-provisioner"},
|
{folder: "source-name", flags: []string{"-with-annotations"}},
|
||||||
|
{folder: "error-cleanup-provisioner", flags: []string{"-with-annotations"}},
|
||||||
}
|
}
|
||||||
|
|
||||||
for _, tc := range tc {
|
for _, tc := range tc {
|
||||||
@ -30,7 +32,12 @@ func Test_hcl2_upgrade(t *testing.T) {
|
|||||||
inputPath := filepath.Join(testFixture("hcl2_upgrade", tc.folder, "input.json"))
|
inputPath := filepath.Join(testFixture("hcl2_upgrade", tc.folder, "input.json"))
|
||||||
outputPath := inputPath + ".pkr.hcl"
|
outputPath := inputPath + ".pkr.hcl"
|
||||||
expectedPath := filepath.Join(testFixture("hcl2_upgrade", tc.folder, "expected.pkr.hcl"))
|
expectedPath := filepath.Join(testFixture("hcl2_upgrade", tc.folder, "expected.pkr.hcl"))
|
||||||
p := helperCommand(t, "hcl2_upgrade", inputPath)
|
args := []string{"hcl2_upgrade"}
|
||||||
|
if len(tc.flags) > 0 {
|
||||||
|
args = append(args, tc.flags...)
|
||||||
|
}
|
||||||
|
args = append(args, inputPath)
|
||||||
|
p := helperCommand(t, args...)
|
||||||
bs, err := p.CombinedOutput()
|
bs, err := p.CombinedOutput()
|
||||||
if err != nil {
|
if err != nil {
|
||||||
t.Fatalf("%v %s", err, bs)
|
t.Fatalf("%v %s", err, bs)
|
||||||
|
@ -174,13 +174,16 @@ build {
|
|||||||
inline = ["echo ${var.secret_account}", "echo ${build.ID}", "echo ${build.SSHPublicKey} | head -c 14", "echo ${path.root} is not ${path.cwd}", "echo ${packer.version}", "echo ${uuidv4()}"]
|
inline = ["echo ${var.secret_account}", "echo ${build.ID}", "echo ${build.SSHPublicKey} | head -c 14", "echo ${path.root} is not ${path.cwd}", "echo ${packer.version}", "echo ${uuidv4()}"]
|
||||||
max_retries = "5"
|
max_retries = "5"
|
||||||
}
|
}
|
||||||
|
|
||||||
provisioner "shell" {
|
provisioner "shell" {
|
||||||
inline = ["echo ${local.password}", "echo ${data.amazon-secretsmanager.autogenerated_1.value}", "echo ${local.password_key}", "echo ${data.amazon-secretsmanager.autogenerated_2.value}"]
|
inline = ["echo ${local.password}", "echo ${data.amazon-secretsmanager.autogenerated_1.value}", "echo ${local.password_key}", "echo ${data.amazon-secretsmanager.autogenerated_2.value}"]
|
||||||
}
|
}
|
||||||
|
|
||||||
provisioner "shell" {
|
provisioner "shell" {
|
||||||
inline = ["echo ${data.amazon-secretsmanager.autogenerated_3.value}", "echo ${data.amazon-secretsmanager.autogenerated_4.value}"]
|
inline = ["echo ${data.amazon-secretsmanager.autogenerated_3.value}", "echo ${data.amazon-secretsmanager.autogenerated_4.value}"]
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
# template: hcl2_upgrade:2:38: executing "hcl2_upgrade" at <clean_resource_name>: error calling clean_resource_name: unhandled "clean_resource_name" call:
|
# template: hcl2_upgrade:2:38: executing "hcl2_upgrade" at <clean_resource_name>: error calling clean_resource_name: unhandled "clean_resource_name" call:
|
||||||
# there is no way to automatically upgrade the "clean_resource_name" call.
|
# there is no way to automatically upgrade the "clean_resource_name" call.
|
||||||
# Please manually upgrade to use custom validation rules, `replace(string, substring, replacement)` or `regex_replace(string, substring, replacement)`
|
# Please manually upgrade to use custom validation rules, `replace(string, substring, replacement)` or `regex_replace(string, substring, replacement)`
|
||||||
@ -189,6 +192,7 @@ build {
|
|||||||
inline = ["echo mybuild-{{isotime | clean_resource_name}}"]
|
inline = ["echo mybuild-{{isotime | clean_resource_name}}"]
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
# template: hcl2_upgrade:2:35: executing "hcl2_upgrade" at <lower>: error calling lower: unhandled "lower" call:
|
# template: hcl2_upgrade:2:35: executing "hcl2_upgrade" at <lower>: error calling lower: unhandled "lower" call:
|
||||||
# there is no way to automatically upgrade the "lower" call.
|
# there is no way to automatically upgrade the "lower" call.
|
||||||
# Please manually upgrade to `lower(var.example)`
|
# Please manually upgrade to `lower(var.example)`
|
||||||
@ -197,6 +201,7 @@ build {
|
|||||||
inline = ["echo {{ `SOMETHING` | lower }}"]
|
inline = ["echo {{ `SOMETHING` | lower }}"]
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
# template: hcl2_upgrade:2:35: executing "hcl2_upgrade" at <upper>: error calling upper: unhandled "upper" call:
|
# template: hcl2_upgrade:2:35: executing "hcl2_upgrade" at <upper>: error calling upper: unhandled "upper" call:
|
||||||
# there is no way to automatically upgrade the "upper" call.
|
# there is no way to automatically upgrade the "upper" call.
|
||||||
# Please manually upgrade to `upper(var.example)`
|
# Please manually upgrade to `upper(var.example)`
|
||||||
@ -205,6 +210,7 @@ build {
|
|||||||
inline = ["echo {{ `something` | upper }}"]
|
inline = ["echo {{ `something` | upper }}"]
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
# template: hcl2_upgrade:2:21: executing "hcl2_upgrade" at <split `some-string` `-` 0>: error calling split: unhandled "split" call:
|
# template: hcl2_upgrade:2:21: executing "hcl2_upgrade" at <split `some-string` `-` 0>: error calling split: unhandled "split" call:
|
||||||
# there is no way to automatically upgrade the "split" call.
|
# there is no way to automatically upgrade the "split" call.
|
||||||
# Please manually upgrade to `split(separator, string)`
|
# Please manually upgrade to `split(separator, string)`
|
||||||
@ -213,6 +219,7 @@ build {
|
|||||||
inline = ["echo {{ split `some-string` `-` 0 }}"]
|
inline = ["echo {{ split `some-string` `-` 0 }}"]
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
# template: hcl2_upgrade:2:21: executing "hcl2_upgrade" at <replace_all `-` `/` build_name>: error calling replace_all: unhandled "replace_all" call:
|
# template: hcl2_upgrade:2:21: executing "hcl2_upgrade" at <replace_all `-` `/` build_name>: error calling replace_all: unhandled "replace_all" call:
|
||||||
# there is no way to automatically upgrade the "replace_all" call.
|
# there is no way to automatically upgrade the "replace_all" call.
|
||||||
# Please manually upgrade to `replace(string, substring, replacement)` or `regex_replace(string, substring, replacement)`
|
# Please manually upgrade to `replace(string, substring, replacement)` or `regex_replace(string, substring, replacement)`
|
||||||
@ -221,6 +228,7 @@ build {
|
|||||||
inline = ["echo {{ replace_all `-` `/` build_name }}"]
|
inline = ["echo {{ replace_all `-` `/` build_name }}"]
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
# template: hcl2_upgrade:2:21: executing "hcl2_upgrade" at <replace `some-string` `-` `/` 1>: error calling replace: unhandled "replace" call:
|
# template: hcl2_upgrade:2:21: executing "hcl2_upgrade" at <replace `some-string` `-` `/` 1>: error calling replace: unhandled "replace" call:
|
||||||
# there is no way to automatically upgrade the "replace" call.
|
# there is no way to automatically upgrade the "replace" call.
|
||||||
# Please manually upgrade to `replace(string, substring, replacement)` or `regex_replace(string, substring, replacement)`
|
# Please manually upgrade to `replace(string, substring, replacement)` or `regex_replace(string, substring, replacement)`
|
||||||
@ -228,11 +236,13 @@ build {
|
|||||||
provisioner "shell" {
|
provisioner "shell" {
|
||||||
inline = ["echo {{ replace `some-string` `-` `/` 1 }}"]
|
inline = ["echo {{ replace `some-string` `-` `/` 1 }}"]
|
||||||
}
|
}
|
||||||
|
|
||||||
provisioner "shell-local" {
|
provisioner "shell-local" {
|
||||||
inline = ["sleep 100000"]
|
inline = ["sleep 100000"]
|
||||||
only = ["amazon-ebs"]
|
only = ["amazon-ebs"]
|
||||||
timeout = "5s"
|
timeout = "5s"
|
||||||
}
|
}
|
||||||
|
|
||||||
post-processor "amazon-import" {
|
post-processor "amazon-import" {
|
||||||
format = "vmdk"
|
format = "vmdk"
|
||||||
license_type = "BYOL"
|
license_type = "BYOL"
|
||||||
|
@ -28,7 +28,9 @@ build {
|
|||||||
provisioner "shell-local" {
|
provisioner "shell-local" {
|
||||||
inline = ["exit 2"]
|
inline = ["exit 2"]
|
||||||
}
|
}
|
||||||
|
|
||||||
error-cleanup-provisioner "shell-local" {
|
error-cleanup-provisioner "shell-local" {
|
||||||
inline = ["echo 'rubber ducky'> ducky.txt"]
|
inline = ["echo 'rubber ducky'> ducky.txt"]
|
||||||
}
|
}
|
||||||
|
|
||||||
}
|
}
|
||||||
|
@ -0,0 +1,229 @@
|
|||||||
|
packer {
|
||||||
|
required_version = ">= 1.6.0"
|
||||||
|
}
|
||||||
|
|
||||||
|
variable "aws_access_key" {
|
||||||
|
type = string
|
||||||
|
default = ""
|
||||||
|
sensitive = true
|
||||||
|
}
|
||||||
|
|
||||||
|
variable "aws_region" {
|
||||||
|
type = string
|
||||||
|
}
|
||||||
|
|
||||||
|
variable "aws_secondary_region" {
|
||||||
|
type = string
|
||||||
|
default = "${env("AWS_DEFAULT_REGION")}"
|
||||||
|
}
|
||||||
|
|
||||||
|
variable "aws_secret_key" {
|
||||||
|
type = string
|
||||||
|
default = ""
|
||||||
|
sensitive = true
|
||||||
|
}
|
||||||
|
|
||||||
|
variable "secret_account" {
|
||||||
|
type = string
|
||||||
|
default = "🤷"
|
||||||
|
sensitive = true
|
||||||
|
}
|
||||||
|
|
||||||
|
data "amazon-secretsmanager" "autogenerated_1" {
|
||||||
|
name = "sample/app/password"
|
||||||
|
}
|
||||||
|
|
||||||
|
data "amazon-secretsmanager" "autogenerated_2" {
|
||||||
|
key = "api_key"
|
||||||
|
name = "sample/app/passwords"
|
||||||
|
}
|
||||||
|
|
||||||
|
data "amazon-secretsmanager" "autogenerated_3" {
|
||||||
|
name = "some_secret"
|
||||||
|
}
|
||||||
|
|
||||||
|
data "amazon-secretsmanager" "autogenerated_4" {
|
||||||
|
key = "with_key"
|
||||||
|
name = "some_secret"
|
||||||
|
}
|
||||||
|
|
||||||
|
data "amazon-ami" "autogenerated_1" {
|
||||||
|
filters = {
|
||||||
|
name = "ubuntu/images/*/ubuntu-xenial-16.04-amd64-server-*"
|
||||||
|
root-device-type = "ebs"
|
||||||
|
virtualization-type = "hvm"
|
||||||
|
}
|
||||||
|
most_recent = true
|
||||||
|
owners = ["099720109477"]
|
||||||
|
}
|
||||||
|
|
||||||
|
locals { timestamp = regex_replace(timestamp(), "[- TZ:]", "") }
|
||||||
|
|
||||||
|
local "password" {
|
||||||
|
sensitive = true
|
||||||
|
expression = "${data.amazon-secretsmanager.autogenerated_1.value}"
|
||||||
|
}
|
||||||
|
|
||||||
|
locals {
|
||||||
|
password_key = "MY_KEY_${data.amazon-secretsmanager.autogenerated_2.value}"
|
||||||
|
}
|
||||||
|
|
||||||
|
source "amazon-ebs" "autogenerated_1" {
|
||||||
|
access_key = "${var.aws_access_key}"
|
||||||
|
ami_description = "Ubuntu 16.04 LTS - expand root partition"
|
||||||
|
ami_name = "ubuntu-16-04-test-${local.timestamp}"
|
||||||
|
encrypt_boot = true
|
||||||
|
launch_block_device_mappings {
|
||||||
|
delete_on_termination = true
|
||||||
|
device_name = "/dev/sda1"
|
||||||
|
volume_size = 48
|
||||||
|
volume_type = "gp2"
|
||||||
|
}
|
||||||
|
region = "${var.aws_region}"
|
||||||
|
secret_key = "${var.aws_secret_key}"
|
||||||
|
source_ami = "${data.amazon-ami.autogenerated_1.id}"
|
||||||
|
spot_instance_types = ["t2.small", "t2.medium", "t2.large"]
|
||||||
|
spot_price = "0.0075"
|
||||||
|
ssh_interface = "session_manager"
|
||||||
|
ssh_username = "ubuntu"
|
||||||
|
temporary_iam_instance_profile_policy_document {
|
||||||
|
Statement {
|
||||||
|
Action = ["*"]
|
||||||
|
Effect = "Allow"
|
||||||
|
Resource = ["*"]
|
||||||
|
}
|
||||||
|
Version = "2012-10-17"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
source "amazon-ebs" "named_builder" {
|
||||||
|
access_key = "${var.aws_access_key}"
|
||||||
|
ami_description = "Ubuntu 16.04 LTS - expand root partition"
|
||||||
|
ami_name = "ubuntu-16-04-test-${local.timestamp}"
|
||||||
|
encrypt_boot = true
|
||||||
|
launch_block_device_mappings {
|
||||||
|
delete_on_termination = true
|
||||||
|
device_name = "/dev/sda1"
|
||||||
|
volume_size = 48
|
||||||
|
volume_type = "gp2"
|
||||||
|
}
|
||||||
|
region = "${var.aws_region}"
|
||||||
|
secret_key = "${var.aws_secret_key}"
|
||||||
|
source_ami = "${data.amazon-ami.autogenerated_1.id}"
|
||||||
|
spot_instance_types = ["t2.small", "t2.medium", "t2.large"]
|
||||||
|
spot_price = "0.0075"
|
||||||
|
ssh_interface = "session_manager"
|
||||||
|
ssh_username = "ubuntu"
|
||||||
|
temporary_iam_instance_profile_policy_document {
|
||||||
|
Statement {
|
||||||
|
Action = ["*"]
|
||||||
|
Effect = "Allow"
|
||||||
|
Resource = ["*"]
|
||||||
|
}
|
||||||
|
Version = "2012-10-17"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
build {
|
||||||
|
sources = ["source.amazon-ebs.autogenerated_1", "source.amazon-ebs.named_builder"]
|
||||||
|
|
||||||
|
provisioner "shell" {
|
||||||
|
except = ["amazon-ebs"]
|
||||||
|
inline = ["echo ${var.secret_account}", "echo ${build.ID}", "echo ${build.SSHPublicKey} | head -c 14", "echo ${path.root} is not ${path.cwd}", "echo ${packer.version}", "echo ${uuidv4()}"]
|
||||||
|
max_retries = "5"
|
||||||
|
}
|
||||||
|
|
||||||
|
provisioner "shell" {
|
||||||
|
inline = ["echo ${local.password}", "echo ${data.amazon-secretsmanager.autogenerated_1.value}", "echo ${local.password_key}", "echo ${data.amazon-secretsmanager.autogenerated_2.value}"]
|
||||||
|
}
|
||||||
|
|
||||||
|
provisioner "shell" {
|
||||||
|
inline = ["echo ${data.amazon-secretsmanager.autogenerated_3.value}", "echo ${data.amazon-secretsmanager.autogenerated_4.value}"]
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
# template: hcl2_upgrade:2:38: executing "hcl2_upgrade" at <clean_resource_name>: error calling clean_resource_name: unhandled "clean_resource_name" call:
|
||||||
|
# there is no way to automatically upgrade the "clean_resource_name" call.
|
||||||
|
# Please manually upgrade to use custom validation rules, `replace(string, substring, replacement)` or `regex_replace(string, substring, replacement)`
|
||||||
|
# Visit https://packer.io/docs/templates/hcl_templates/variables#custom-validation-rules , https://www.packer.io/docs/templates/hcl_templates/functions/string/replace or https://www.packer.io/docs/templates/hcl_templates/functions/string/regex_replace for more infos.
|
||||||
|
provisioner "shell" {
|
||||||
|
inline = ["echo mybuild-{{isotime | clean_resource_name}}"]
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
# template: hcl2_upgrade:2:35: executing "hcl2_upgrade" at <lower>: error calling lower: unhandled "lower" call:
|
||||||
|
# there is no way to automatically upgrade the "lower" call.
|
||||||
|
# Please manually upgrade to `lower(var.example)`
|
||||||
|
# Visit https://www.packer.io/docs/templates/hcl_templates/functions/string/lower for more infos.
|
||||||
|
provisioner "shell" {
|
||||||
|
inline = ["echo {{ `SOMETHING` | lower }}"]
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
# template: hcl2_upgrade:2:35: executing "hcl2_upgrade" at <upper>: error calling upper: unhandled "upper" call:
|
||||||
|
# there is no way to automatically upgrade the "upper" call.
|
||||||
|
# Please manually upgrade to `upper(var.example)`
|
||||||
|
# Visit https://www.packer.io/docs/templates/hcl_templates/functions/string/upper for more infos.
|
||||||
|
provisioner "shell" {
|
||||||
|
inline = ["echo {{ `something` | upper }}"]
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
# template: hcl2_upgrade:2:21: executing "hcl2_upgrade" at <split `some-string` `-` 0>: error calling split: unhandled "split" call:
|
||||||
|
# there is no way to automatically upgrade the "split" call.
|
||||||
|
# Please manually upgrade to `split(separator, string)`
|
||||||
|
# Visit https://www.packer.io/docs/templates/hcl_templates/functions/string/split for more infos.
|
||||||
|
provisioner "shell" {
|
||||||
|
inline = ["echo {{ split `some-string` `-` 0 }}"]
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
# template: hcl2_upgrade:2:21: executing "hcl2_upgrade" at <replace_all `-` `/` build_name>: error calling replace_all: unhandled "replace_all" call:
|
||||||
|
# there is no way to automatically upgrade the "replace_all" call.
|
||||||
|
# Please manually upgrade to `replace(string, substring, replacement)` or `regex_replace(string, substring, replacement)`
|
||||||
|
# Visit https://www.packer.io/docs/templates/hcl_templates/functions/string/replace or https://www.packer.io/docs/templates/hcl_templates/functions/string/regex_replace for more infos.
|
||||||
|
provisioner "shell" {
|
||||||
|
inline = ["echo {{ replace_all `-` `/` build_name }}"]
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
# template: hcl2_upgrade:2:21: executing "hcl2_upgrade" at <replace `some-string` `-` `/` 1>: error calling replace: unhandled "replace" call:
|
||||||
|
# there is no way to automatically upgrade the "replace" call.
|
||||||
|
# Please manually upgrade to `replace(string, substring, replacement)` or `regex_replace(string, substring, replacement)`
|
||||||
|
# Visit https://www.packer.io/docs/templates/hcl_templates/functions/string/replace or https://www.packer.io/docs/templates/hcl_templates/functions/string/regex_replace for more infos.
|
||||||
|
provisioner "shell" {
|
||||||
|
inline = ["echo {{ replace `some-string` `-` `/` 1 }}"]
|
||||||
|
}
|
||||||
|
|
||||||
|
provisioner "shell-local" {
|
||||||
|
inline = ["sleep 100000"]
|
||||||
|
only = ["amazon-ebs"]
|
||||||
|
timeout = "5s"
|
||||||
|
}
|
||||||
|
|
||||||
|
post-processor "amazon-import" {
|
||||||
|
format = "vmdk"
|
||||||
|
license_type = "BYOL"
|
||||||
|
region = "eu-west-3"
|
||||||
|
s3_bucket_name = "hashicorp.adrien"
|
||||||
|
tags = {
|
||||||
|
Description = "packer amazon-import ${local.timestamp}"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
post-processors {
|
||||||
|
post-processor "artifice" {
|
||||||
|
keep_input_artifact = true
|
||||||
|
files = ["path/something.ova"]
|
||||||
|
name = "very_special_artifice_post-processor"
|
||||||
|
only = ["amazon-ebs"]
|
||||||
|
}
|
||||||
|
post-processor "amazon-import" {
|
||||||
|
except = ["amazon-ebs"]
|
||||||
|
license_type = "BYOL"
|
||||||
|
s3_bucket_name = "hashicorp.adrien"
|
||||||
|
tags = {
|
||||||
|
Description = "packer amazon-import ${local.timestamp}"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
@ -0,0 +1,235 @@
|
|||||||
|
{
|
||||||
|
"min_packer_version": "1.6.0",
|
||||||
|
"variables": {
|
||||||
|
"secret_account": "🤷",
|
||||||
|
"aws_region": null,
|
||||||
|
"aws_secondary_region": "{{ env `AWS_DEFAULT_REGION` }}",
|
||||||
|
"aws_secret_key": "",
|
||||||
|
"aws_access_key": "",
|
||||||
|
"password": "{{ aws_secretsmanager `sample/app/password` }}",
|
||||||
|
"password_key": "MY_KEY_{{ aws_secretsmanager `sample/app/passwords` `api_key` }}"
|
||||||
|
},
|
||||||
|
"sensitive-variables": [
|
||||||
|
"aws_secret_key",
|
||||||
|
"aws_access_key",
|
||||||
|
"secret_account",
|
||||||
|
"potato",
|
||||||
|
"password"
|
||||||
|
],
|
||||||
|
"builders": [
|
||||||
|
{
|
||||||
|
"type": "amazon-ebs",
|
||||||
|
"region": "{{ user `aws_region` }}",
|
||||||
|
"secret_key": "{{ user `aws_secret_key` }}",
|
||||||
|
"access_key": "{{ user `aws_access_key` }}",
|
||||||
|
"ami_name": "ubuntu-16-04-test-{{ timestamp }}",
|
||||||
|
"ami_description": "Ubuntu 16.04 LTS - expand root partition",
|
||||||
|
"source_ami_filter": {
|
||||||
|
"filters": {
|
||||||
|
"virtualization-type": "hvm",
|
||||||
|
"name": "ubuntu/images/*/ubuntu-xenial-16.04-amd64-server-*",
|
||||||
|
"root-device-type": "ebs"
|
||||||
|
},
|
||||||
|
"owners": [
|
||||||
|
"099720109477"
|
||||||
|
],
|
||||||
|
"most_recent": true
|
||||||
|
},
|
||||||
|
"launch_block_device_mappings": [
|
||||||
|
{
|
||||||
|
"delete_on_termination": true,
|
||||||
|
"device_name": "/dev/sda1",
|
||||||
|
"volume_type": "gp2",
|
||||||
|
"volume_size": 48
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"spot_price": "0.0075",
|
||||||
|
"spot_instance_types": [
|
||||||
|
"t2.small",
|
||||||
|
"t2.medium",
|
||||||
|
"t2.large"
|
||||||
|
],
|
||||||
|
"encrypt_boot": true,
|
||||||
|
"ssh_username": "ubuntu",
|
||||||
|
"temporary_iam_instance_profile_policy_document": {
|
||||||
|
"Version": "2012-10-17",
|
||||||
|
"Statement": [
|
||||||
|
{
|
||||||
|
"Effect": "Allow",
|
||||||
|
"Action": [
|
||||||
|
"*"
|
||||||
|
],
|
||||||
|
"Resource": ["*"]
|
||||||
|
}
|
||||||
|
]
|
||||||
|
},
|
||||||
|
"ssh_interface": "session_manager"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"type": "amazon-ebs",
|
||||||
|
"name": "named_builder",
|
||||||
|
"region": "{{ user `aws_region` }}",
|
||||||
|
"secret_key": "{{ user `aws_secret_key` }}",
|
||||||
|
"access_key": "{{ user `aws_access_key` }}",
|
||||||
|
"ami_name": "ubuntu-16-04-test-{{ timestamp }}",
|
||||||
|
"ami_description": "Ubuntu 16.04 LTS - expand root partition",
|
||||||
|
"source_ami_filter": {
|
||||||
|
"filters": {
|
||||||
|
"virtualization-type": "hvm",
|
||||||
|
"name": "ubuntu/images/*/ubuntu-xenial-16.04-amd64-server-*",
|
||||||
|
"root-device-type": "ebs"
|
||||||
|
},
|
||||||
|
"owners": [
|
||||||
|
"099720109477"
|
||||||
|
],
|
||||||
|
"most_recent": true
|
||||||
|
},
|
||||||
|
"launch_block_device_mappings": [
|
||||||
|
{
|
||||||
|
"delete_on_termination": true,
|
||||||
|
"device_name": "/dev/sda1",
|
||||||
|
"volume_type": "gp2",
|
||||||
|
"volume_size": 48
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"spot_price": "0.0075",
|
||||||
|
"spot_instance_types": [
|
||||||
|
"t2.small",
|
||||||
|
"t2.medium",
|
||||||
|
"t2.large"
|
||||||
|
],
|
||||||
|
"encrypt_boot": true,
|
||||||
|
"ssh_username": "ubuntu",
|
||||||
|
"temporary_iam_instance_profile_policy_document": {
|
||||||
|
"Version": "2012-10-17",
|
||||||
|
"Statement": [
|
||||||
|
{
|
||||||
|
"Effect": "Allow",
|
||||||
|
"Action": [
|
||||||
|
"*"
|
||||||
|
],
|
||||||
|
"Resource": ["*"]
|
||||||
|
}
|
||||||
|
]
|
||||||
|
},
|
||||||
|
"ssh_interface": "session_manager"
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"provisioners": [
|
||||||
|
{
|
||||||
|
"type": "shell",
|
||||||
|
"except": [
|
||||||
|
"amazon-ebs"
|
||||||
|
],
|
||||||
|
"max_retries": 5,
|
||||||
|
"inline": [
|
||||||
|
"echo {{ user `secret_account` }}",
|
||||||
|
"echo {{ build `ID` }}",
|
||||||
|
"echo {{ build `SSHPublicKey` }} | head -c 14",
|
||||||
|
"echo {{ template_dir }} is not {{ pwd }}",
|
||||||
|
"echo {{ packer_version }}",
|
||||||
|
"echo {{ uuid }}"
|
||||||
|
]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"type": "shell",
|
||||||
|
"inline": [
|
||||||
|
"echo {{ user `password` }}",
|
||||||
|
"echo {{ aws_secretsmanager `sample/app/password` }}",
|
||||||
|
"echo {{ user `password_key` }}",
|
||||||
|
"echo {{ aws_secretsmanager `sample/app/passwords` `api_key` }}"
|
||||||
|
]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"type": "shell",
|
||||||
|
"inline": [
|
||||||
|
"echo {{ aws_secretsmanager `some_secret` }}",
|
||||||
|
"echo {{ aws_secretsmanager `some_secret` `with_key` }}"
|
||||||
|
]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"type": "shell",
|
||||||
|
"inline": [
|
||||||
|
"echo mybuild-{{isotime | clean_resource_name}}"
|
||||||
|
]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"type": "shell",
|
||||||
|
"inline": [
|
||||||
|
"echo {{ `SOMETHING` | lower }}"
|
||||||
|
]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"type": "shell",
|
||||||
|
"inline": [
|
||||||
|
"echo {{ `something` | upper }}"
|
||||||
|
]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"type": "shell",
|
||||||
|
"inline": [
|
||||||
|
"echo {{ split `some-string` `-` 0 }}"
|
||||||
|
]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"type": "shell",
|
||||||
|
"inline": [
|
||||||
|
"echo {{ replace_all `-` `/` build_name }}"
|
||||||
|
]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"type": "shell",
|
||||||
|
"inline": [
|
||||||
|
"echo {{ replace `some-string` `-` `/` 1 }}"
|
||||||
|
]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"type": "shell-local",
|
||||||
|
"only": [
|
||||||
|
"amazon-ebs"
|
||||||
|
],
|
||||||
|
"timeout": "5s",
|
||||||
|
"inline": [
|
||||||
|
"sleep 100000"
|
||||||
|
]
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"post-processors": [
|
||||||
|
[
|
||||||
|
{
|
||||||
|
"type": "amazon-import",
|
||||||
|
"region": "eu-west-3",
|
||||||
|
"s3_bucket_name": "hashicorp.adrien",
|
||||||
|
"license_type": "BYOL",
|
||||||
|
"format": "vmdk",
|
||||||
|
"tags": {
|
||||||
|
"Description": "packer amazon-import {{timestamp}}"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
],
|
||||||
|
[
|
||||||
|
{
|
||||||
|
"only": [
|
||||||
|
"amazon-ebs"
|
||||||
|
],
|
||||||
|
"files": [
|
||||||
|
"path/something.ova"
|
||||||
|
],
|
||||||
|
"keep_input_artifact": true,
|
||||||
|
"name": "very_special_artifice_post-processor",
|
||||||
|
"type": "artifice"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"except": [
|
||||||
|
"amazon-ebs"
|
||||||
|
],
|
||||||
|
"type": "amazon-import",
|
||||||
|
"s3_bucket_name": "hashicorp.adrien",
|
||||||
|
"license_type": "BYOL",
|
||||||
|
"tags": {
|
||||||
|
"Description": "packer amazon-import {{timestamp}}"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
]
|
||||||
|
]
|
||||||
|
}
|
@ -78,3 +78,4 @@ working on improving this part of the transformer.
|
|||||||
|
|
||||||
- `-output-file` - File where to put the hcl2 generated config. Defaults to
|
- `-output-file` - File where to put the hcl2 generated config. Defaults to
|
||||||
JSON_TEMPLATE.pkr.hcl
|
JSON_TEMPLATE.pkr.hcl
|
||||||
|
- `-with-annotations` - Adds helper annotations with information about the generated HCL2 blocks.
|
||||||
|
Loading…
x
Reference in New Issue
Block a user