add with-annotation flag to hcl2_upgrade

This commit is contained in:
sylviamoss 2021-02-12 15:18:53 +01:00
parent c5fca1f876
commit 76e3d57cae
7 changed files with 1002 additions and 283 deletions

View File

@ -145,6 +145,7 @@ type InspectArgs struct {
func (va *HCL2UpgradeArgs) AddFlagSets(flags *flag.FlagSet) {
flags.StringVar(&va.OutputFile, "output-file", "", "File where to put the hcl2 generated config. Defaults to JSON_TEMPLATE.pkr.hcl")
flags.BoolVar(&va.WithAnnotations, "with-annotations", false, "Add to the generated file helper annotations about the generated HCL2 blocks.")
va.MetaArgs.AddFlagSets(flags)
}
@ -152,7 +153,8 @@ func (va *HCL2UpgradeArgs) AddFlagSets(flags *flag.FlagSet) {
// HCL2UpgradeArgs represents a parsed cli line for a `packer hcl2_upgrade`
type HCL2UpgradeArgs struct {
MetaArgs
OutputFile string
OutputFile string
WithAnnotations bool
}
func (va *FormatArgs) AddFlagSets(flags *flag.FlagSet) {

View File

@ -4,6 +4,7 @@ import (
"bytes"
"context"
"fmt"
"github.com/hashicorp/packer/packer"
"io"
"os"
"path/filepath"
@ -94,7 +95,6 @@ const (
# a build block invokes sources and runs provisioning steps on them. The
# documentation for build blocks can be found here:
# https://www.packer.io/docs/templates/hcl_templates/blocks/build
build {
`
amazonAmiDataHeader = `
@ -120,6 +120,11 @@ var (
timestamp = false
)
type BlockParser interface {
Parse(*template.Template) error
Write(*bytes.Buffer)
}
func (c *HCL2UpgradeCommand) RunContext(_ context.Context, cla *HCL2UpgradeArgs) int {
var output io.Writer
if err := os.MkdirAll(filepath.Dir(cla.OutputFile), 0); err != nil {
@ -134,9 +139,11 @@ func (c *HCL2UpgradeCommand) RunContext(_ context.Context, cla *HCL2UpgradeArgs)
return 1
}
if _, err := output.Write([]byte(hcl2UpgradeFileHeader)); err != nil {
c.Ui.Error(fmt.Sprintf("Failed to write to file: %v", err))
return 1
if cla.WithAnnotations {
if _, err := output.Write([]byte(hcl2UpgradeFileHeader)); err != nil {
c.Ui.Error(fmt.Sprintf("Failed to write to file: %v", err))
return 1
}
}
hdl, ret := c.GetConfigFromJSON(&cla.MetaArgs)
@ -150,67 +157,31 @@ func (c *HCL2UpgradeCommand) RunContext(_ context.Context, cla *HCL2UpgradeArgs)
}
tpl := core.Template
// OutPut Locals and Local blocks
localsContent := hclwrite.NewEmptyFile()
localsBody := localsContent.Body()
localsBody.AppendNewline()
localBody := localsBody.AppendNewBlock("locals", nil).Body()
// Parse blocks
localsOut := []byte{}
// Output variables section
variablesOut := []byte{}
variables := []*template.Variable{}
{
// sort variables to avoid map's randomness
for _, variable := range tpl.Variables {
variables = append(variables, variable)
}
sort.Slice(variables, func(i, j int) bool {
return variables[i].Key < variables[j].Key
})
packerBlock := &PackerParser{
withAnnotations: cla.WithAnnotations,
}
if err := packerBlock.Parse(tpl); err != nil {
c.Ui.Error(err.Error())
return 1
}
hasLocals := false
for _, variable := range variables {
variablesContent := hclwrite.NewEmptyFile()
variablesBody := variablesContent.Body()
variablesBody.AppendNewline()
variableBody := variablesBody.AppendNewBlock("variable", []string{variable.Key}).Body()
variableBody.SetAttributeRaw("type", hclwrite.Tokens{&hclwrite.Token{Bytes: []byte("string")}})
if variable.Default != "" || !variable.Required {
variableBody.SetAttributeValue("default", hcl2shim.HCL2ValueFromConfigValue(variable.Default))
}
sensitive := false
if isSensitiveVariable(variable.Key, tpl.SensitiveVariables) {
sensitive = true
variableBody.SetAttributeValue("sensitive", cty.BoolVal(true))
}
isLocal, out := variableTransposeTemplatingCalls(variablesContent.Bytes())
if isLocal {
if sensitive {
// Create Local block because this is sensitive
localContent := hclwrite.NewEmptyFile()
body := localContent.Body()
body.AppendNewline()
localBody := body.AppendNewBlock("local", []string{variable.Key}).Body()
localBody.SetAttributeValue("sensitive", cty.BoolVal(true))
localBody.SetAttributeValue("expression", hcl2shim.HCL2ValueFromConfigValue(variable.Default))
localsOut = append(localsOut, transposeTemplatingCalls(localContent.Bytes())...)
localsVariableMap[variable.Key] = "local"
continue
}
localBody.SetAttributeValue(variable.Key, hcl2shim.HCL2ValueFromConfigValue(variable.Default))
localsVariableMap[variable.Key] = "locals"
hasLocals = true
continue
}
variablesOut = append(variablesOut, out...)
variables := &VariableParser{
withAnnotations: cla.WithAnnotations,
}
if err := variables.Parse(tpl); err != nil {
c.Ui.Error(err.Error())
return 1
}
if hasLocals {
localsOut = append(localsOut, transposeTemplatingCalls(localsContent.Bytes())...)
locals := &LocalsParser{
LocalsOut: variables.localsOut,
withAnnotations: cla.WithAnnotations,
}
if err := locals.Parse(tpl); err != nil {
c.Ui.Error(err.Error())
return 1
}
builders := []*template.Builder{}
@ -221,9 +192,12 @@ func (c *HCL2UpgradeCommand) RunContext(_ context.Context, cla *HCL2UpgradeArgs)
}
}
// Output amazon-ami data source section
amazonAmiOut, err := c.writeAmazonAmiDatasource(builders)
if err != nil {
amazonAmiDatasource := &AmazonAmiDatasourceParser{
builders: builders,
withAnnotations: cla.WithAnnotations,
}
if err := amazonAmiDatasource.Parse(tpl); err != nil {
c.Ui.Error(err.Error())
return 1
}
@ -231,238 +205,58 @@ func (c *HCL2UpgradeCommand) RunContext(_ context.Context, cla *HCL2UpgradeArgs)
return builders[i].Type+builders[i].Name < builders[j].Type+builders[j].Name
})
// Output sources section
sourcesOut := []byte{}
for i, builderCfg := range builders {
sourcesContent := hclwrite.NewEmptyFile()
body := sourcesContent.Body()
body.AppendNewline()
if !c.Meta.CoreConfig.Components.PluginConfig.Builders.Has(builderCfg.Type) {
c.Ui.Error(fmt.Sprintf("unknown builder type: %q\n", builderCfg.Type))
return 1
}
if builderCfg.Name == "" || builderCfg.Name == builderCfg.Type {
builderCfg.Name = fmt.Sprintf("autogenerated_%d", i+1)
}
builderCfg.Name = strings.ReplaceAll(strings.TrimSpace(builderCfg.Name), " ", "_")
sourceBody := body.AppendNewBlock("source", []string{builderCfg.Type, builderCfg.Name}).Body()
jsonBodyToHCL2Body(sourceBody, builderCfg.Config)
sourcesOut = append(sourcesOut, transposeTemplatingCalls(sourcesContent.Bytes())...)
sources := &SourceParser{
Builders: builders,
BuilderPlugins: c.Meta.CoreConfig.Components.PluginConfig.Builders,
withAnnotations: cla.WithAnnotations,
}
if err := sources.Parse(tpl); err != nil {
c.Ui.Error(err.Error())
return 1
}
// Output build section
buildContent := hclwrite.NewEmptyFile()
buildBody := buildContent.Body()
if tpl.Description != "" {
buildBody.SetAttributeValue("description", cty.StringVal(tpl.Description))
buildBody.AppendNewline()
build := &BuildParser{
Builders: builders,
withAnnotations: cla.WithAnnotations,
}
if err := build.Parse(tpl); err != nil {
c.Ui.Error(err.Error())
return 1
}
sourceNames := []string{}
for _, builder := range builders {
sourceNames = append(sourceNames, fmt.Sprintf("source.%s.%s", builder.Type, builder.Name))
amazonSecretsDatasource := &AmazonSecretsDatasourceParser{
withAnnotations: cla.WithAnnotations,
}
buildBody.SetAttributeValue("sources", hcl2shim.HCL2ValueFromConfigValue(sourceNames))
buildBody.AppendNewline()
buildOut := buildContent.Bytes()
// Output provisioners section
provisionersOut := []byte{}
for _, provisioner := range tpl.Provisioners {
buildBody.AppendNewline()
contentBytes := c.writeProvisioner("provisioner", provisioner)
provisionersOut = append(provisionersOut, transposeTemplatingCalls(contentBytes)...)
}
if tpl.CleanupProvisioner != nil {
buildBody.AppendNewline()
contentBytes := c.writeProvisioner("error-cleanup-provisioner", tpl.CleanupProvisioner)
provisionersOut = append(provisionersOut, transposeTemplatingCalls(contentBytes)...)
}
// Output post-processors section
postProcessorsOut := []byte{}
for _, pps := range tpl.PostProcessors {
postProcessorContent := hclwrite.NewEmptyFile()
body := postProcessorContent.Body()
switch len(pps) {
case 0:
continue
case 1:
default:
body = body.AppendNewBlock("post-processors", nil).Body()
}
for _, pp := range pps {
ppBody := body.AppendNewBlock("post-processor", []string{pp.Type}).Body()
if pp.KeepInputArtifact != nil {
ppBody.SetAttributeValue("keep_input_artifact", cty.BoolVal(*pp.KeepInputArtifact))
}
cfg := pp.Config
if len(pp.Except) > 0 {
cfg["except"] = pp.Except
}
if len(pp.Only) > 0 {
cfg["only"] = pp.Only
}
if pp.Name != "" && pp.Name != pp.Type {
cfg["name"] = pp.Name
}
jsonBodyToHCL2Body(ppBody, cfg)
}
postProcessorsOut = append(postProcessorsOut, transposeTemplatingCalls(postProcessorContent.Bytes())...)
}
// Output amazon-secretsmanager data source section
keys := make([]string, 0, len(amazonSecretsManagerMap))
for k := range amazonSecretsManagerMap {
keys = append(keys, k)
}
sort.Strings(keys)
amazonSecretsDataOut := []byte{}
for _, dataSourceName := range keys {
datasourceContent := hclwrite.NewEmptyFile()
body := datasourceContent.Body()
body.AppendNewline()
datasourceBody := body.AppendNewBlock("data", []string{"amazon-secretsmanager", dataSourceName}).Body()
jsonBodyToHCL2Body(datasourceBody, amazonSecretsManagerMap[dataSourceName])
amazonSecretsDataOut = append(amazonSecretsDataOut, datasourceContent.Bytes()...)
if err := amazonSecretsDatasource.Parse(tpl); err != nil {
c.Ui.Error(err.Error())
return 1
}
// Write file
out := &bytes.Buffer{}
// Packer section
if tpl.MinVersion != "" {
out.Write([]byte(packerBlockHeader))
fileContent := hclwrite.NewEmptyFile()
body := fileContent.Body()
packerBody := body.AppendNewBlock("packer", nil).Body()
packerBody.SetAttributeValue("required_version", cty.StringVal(fmt.Sprintf(">= %s", tpl.MinVersion)))
out.Write(fileContent.Bytes())
blocks := map[int]BlockParser{
1: packerBlock,
2: variables,
3: amazonSecretsDatasource,
4: amazonAmiDatasource,
5: locals,
6: sources,
7: build,
}
for i := 1; i <= len(blocks); i++ {
blocks[i].Write(out)
}
if len(variablesOut) > 0 {
out.Write([]byte(inputVarHeader))
out.Write(variablesOut)
if _, err := output.Write(hclwrite.Format(out.Bytes())); err != nil {
c.Ui.Error(fmt.Sprintf("Failed to write to file: %v", err))
return 1
}
if len(amazonSecretsManagerMap) > 0 {
out.Write([]byte(amazonSecretsManagerDataHeader))
out.Write(amazonSecretsDataOut)
}
if len(amazonAmiOut) > 0 {
out.Write([]byte(amazonAmiDataHeader))
out.Write(amazonAmiOut)
}
if timestamp {
_, _ = out.Write([]byte("\n"))
fmt.Fprintln(out, `# "timestamp" template function replacement`)
fmt.Fprintln(out, `locals { timestamp = regex_replace(timestamp(), "[- TZ:]", "") }`)
}
if len(localsOut) > 0 {
out.Write([]byte(localsVarHeader))
out.Write(localsOut)
}
out.Write([]byte(sourcesHeader))
out.Write(sourcesOut)
out.Write([]byte(buildHeader))
out.Write(buildOut)
out.Write(provisionersOut)
out.Write(postProcessorsOut)
_, _ = out.Write([]byte("}\n"))
_, _ = output.Write(hclwrite.Format(out.Bytes()))
c.Ui.Say(fmt.Sprintf("Successfully created %s ", cla.OutputFile))
return 0
}
func (c *HCL2UpgradeCommand) writeProvisioner(typeName string, provisioner *template.Provisioner) []byte {
provisionerContent := hclwrite.NewEmptyFile()
body := provisionerContent.Body()
block := body.AppendNewBlock(typeName, []string{provisioner.Type})
cfg := provisioner.Config
if len(provisioner.Except) > 0 {
cfg["except"] = provisioner.Except
}
if len(provisioner.Only) > 0 {
cfg["only"] = provisioner.Only
}
if provisioner.MaxRetries != "" {
cfg["max_retries"] = provisioner.MaxRetries
}
if provisioner.Timeout > 0 {
cfg["timeout"] = provisioner.Timeout.String()
}
jsonBodyToHCL2Body(block.Body(), cfg)
return provisionerContent.Bytes()
}
func (c *HCL2UpgradeCommand) writeAmazonAmiDatasource(builders []*template.Builder) ([]byte, error) {
amazonAmiOut := []byte{}
amazonAmiFilters := []map[string]interface{}{}
i := 1
for _, builder := range builders {
if strings.HasPrefix(builder.Type, "amazon-") {
if sourceAmiFilter, ok := builder.Config["source_ami_filter"]; ok {
sourceAmiFilterCfg := map[string]interface{}{}
if err := mapstructure.Decode(sourceAmiFilter, &sourceAmiFilterCfg); err != nil {
c.Ui.Error(fmt.Sprintf("Failed to write amazon-ami data source: %v", err))
return nil, err
}
duplicate := false
dataSourceName := fmt.Sprintf("autogenerated_%d", i)
for j, filter := range amazonAmiFilters {
if reflect.DeepEqual(filter, sourceAmiFilter) {
duplicate = true
dataSourceName = fmt.Sprintf("autogenerated_%d", j+1)
continue
}
}
// This is a hack...
// Use templating so that it could be correctly transformed later into a data resource
sourceAmiDataRef := fmt.Sprintf("{{ data `amazon-ami.%s.id` }}", dataSourceName)
if duplicate {
delete(builder.Config, "source_ami_filter")
builder.Config["source_ami"] = sourceAmiDataRef
continue
}
amazonAmiFilters = append(amazonAmiFilters, sourceAmiFilterCfg)
delete(builder.Config, "source_ami_filter")
builder.Config["source_ami"] = sourceAmiDataRef
i++
datasourceContent := hclwrite.NewEmptyFile()
body := datasourceContent.Body()
body.AppendNewline()
sourceBody := body.AppendNewBlock("data", []string{"amazon-ami", dataSourceName}).Body()
jsonBodyToHCL2Body(sourceBody, sourceAmiFilterCfg)
amazonAmiOut = append(amazonAmiOut, transposeTemplatingCalls(datasourceContent.Bytes())...)
}
}
}
return amazonAmiOut, nil
}
type UnhandleableArgumentError struct {
Call string
Correspondance string
@ -789,3 +583,443 @@ func (*HCL2UpgradeCommand) AutocompleteArgs() complete.Predictor {
func (*HCL2UpgradeCommand) AutocompleteFlags() complete.Flags {
return complete.Flags{}
}
// Specific blocks parser
type PackerParser struct {
out []byte
withAnnotations bool
}
func (p *PackerParser) Parse(tpl *template.Template) error {
if tpl.MinVersion != "" {
fileContent := hclwrite.NewEmptyFile()
body := fileContent.Body()
packerBody := body.AppendNewBlock("packer", nil).Body()
packerBody.SetAttributeValue("required_version", cty.StringVal(fmt.Sprintf(">= %s", tpl.MinVersion)))
p.out = fileContent.Bytes()
}
return nil
}
func (p *PackerParser) Write(out *bytes.Buffer) {
if len(p.out) > 0 {
if p.withAnnotations {
out.Write([]byte(packerBlockHeader))
}
out.Write(p.out)
}
}
type VariableParser struct {
variablesOut []byte
localsOut []byte
withAnnotations bool
}
func (p *VariableParser) Parse(tpl *template.Template) error {
// OutPut Locals and Local blocks
localsContent := hclwrite.NewEmptyFile()
localsBody := localsContent.Body()
localsBody.AppendNewline()
localBody := localsBody.AppendNewBlock("locals", nil).Body()
if len(p.variablesOut) == 0 {
p.variablesOut = []byte{}
}
if len(p.localsOut) == 0 {
p.localsOut = []byte{}
}
variables := []*template.Variable{}
{
// sort variables to avoid map's randomness
for _, variable := range tpl.Variables {
variables = append(variables, variable)
}
sort.Slice(variables, func(i, j int) bool {
return variables[i].Key < variables[j].Key
})
}
hasLocals := false
for _, variable := range variables {
variablesContent := hclwrite.NewEmptyFile()
variablesBody := variablesContent.Body()
variablesBody.AppendNewline()
variableBody := variablesBody.AppendNewBlock("variable", []string{variable.Key}).Body()
variableBody.SetAttributeRaw("type", hclwrite.Tokens{&hclwrite.Token{Bytes: []byte("string")}})
if variable.Default != "" || !variable.Required {
variableBody.SetAttributeValue("default", hcl2shim.HCL2ValueFromConfigValue(variable.Default))
}
sensitive := false
if isSensitiveVariable(variable.Key, tpl.SensitiveVariables) {
sensitive = true
variableBody.SetAttributeValue("sensitive", cty.BoolVal(true))
}
isLocal, out := variableTransposeTemplatingCalls(variablesContent.Bytes())
if isLocal {
if sensitive {
// Create Local block because this is sensitive
localContent := hclwrite.NewEmptyFile()
body := localContent.Body()
body.AppendNewline()
localBody := body.AppendNewBlock("local", []string{variable.Key}).Body()
localBody.SetAttributeValue("sensitive", cty.BoolVal(true))
localBody.SetAttributeValue("expression", hcl2shim.HCL2ValueFromConfigValue(variable.Default))
p.localsOut = append(p.localsOut, transposeTemplatingCalls(localContent.Bytes())...)
localsVariableMap[variable.Key] = "local"
continue
}
localBody.SetAttributeValue(variable.Key, hcl2shim.HCL2ValueFromConfigValue(variable.Default))
localsVariableMap[variable.Key] = "locals"
hasLocals = true
continue
}
p.variablesOut = append(p.variablesOut, out...)
}
if hasLocals {
p.localsOut = append(p.localsOut, transposeTemplatingCalls(localsContent.Bytes())...)
}
return nil
}
func (p *VariableParser) Write(out *bytes.Buffer) {
if len(p.variablesOut) > 0 {
if p.withAnnotations {
out.Write([]byte(inputVarHeader))
}
out.Write(p.variablesOut)
}
}
type LocalsParser struct {
LocalsOut []byte
withAnnotations bool
}
func (p *LocalsParser) Parse(tpl *template.Template) error {
// Locals where parsed with Variables
return nil
}
func (p *LocalsParser) Write(out *bytes.Buffer) {
if timestamp {
_, _ = out.Write([]byte("\n"))
if p.withAnnotations {
fmt.Fprintln(out, `# "timestamp" template function replacement`)
}
fmt.Fprintln(out, `locals { timestamp = regex_replace(timestamp(), "[- TZ:]", "") }`)
}
if len(p.LocalsOut) > 0 {
if p.withAnnotations {
out.Write([]byte(localsVarHeader))
}
out.Write(p.LocalsOut)
}
}
type AmazonSecretsDatasourceParser struct {
out []byte
withAnnotations bool
}
func (p *AmazonSecretsDatasourceParser) Parse(_ *template.Template) error {
if p.out == nil {
p.out = []byte{}
}
keys := make([]string, 0, len(amazonSecretsManagerMap))
for k := range amazonSecretsManagerMap {
keys = append(keys, k)
}
sort.Strings(keys)
for _, dataSourceName := range keys {
datasourceContent := hclwrite.NewEmptyFile()
body := datasourceContent.Body()
body.AppendNewline()
datasourceBody := body.AppendNewBlock("data", []string{"amazon-secretsmanager", dataSourceName}).Body()
jsonBodyToHCL2Body(datasourceBody, amazonSecretsManagerMap[dataSourceName])
p.out = append(p.out, datasourceContent.Bytes()...)
}
return nil
}
func (p *AmazonSecretsDatasourceParser) Write(out *bytes.Buffer) {
if len(p.out) > 0 {
if p.withAnnotations {
out.Write([]byte(amazonSecretsManagerDataHeader))
}
out.Write(p.out)
}
}
type AmazonAmiDatasourceParser struct {
out []byte
builders []*template.Builder
withAnnotations bool
}
func (p *AmazonAmiDatasourceParser) Parse(_ *template.Template) error {
if p.out == nil {
p.out = []byte{}
}
amazonAmiFilters := []map[string]interface{}{}
i := 1
for _, builder := range p.builders {
if strings.HasPrefix(builder.Type, "amazon-") {
if sourceAmiFilter, ok := builder.Config["source_ami_filter"]; ok {
sourceAmiFilterCfg := map[string]interface{}{}
if err := mapstructure.Decode(sourceAmiFilter, &sourceAmiFilterCfg); err != nil {
return fmt.Errorf("Failed to write amazon-ami data source: %v", err)
}
duplicate := false
dataSourceName := fmt.Sprintf("autogenerated_%d", i)
for j, filter := range amazonAmiFilters {
if reflect.DeepEqual(filter, sourceAmiFilter) {
duplicate = true
dataSourceName = fmt.Sprintf("autogenerated_%d", j+1)
continue
}
}
// This is a hack...
// Use templating so that it could be correctly transformed later into a data resource
sourceAmiDataRef := fmt.Sprintf("{{ data `amazon-ami.%s.id` }}", dataSourceName)
if duplicate {
delete(builder.Config, "source_ami_filter")
builder.Config["source_ami"] = sourceAmiDataRef
continue
}
amazonAmiFilters = append(amazonAmiFilters, sourceAmiFilterCfg)
delete(builder.Config, "source_ami_filter")
builder.Config["source_ami"] = sourceAmiDataRef
i++
datasourceContent := hclwrite.NewEmptyFile()
body := datasourceContent.Body()
body.AppendNewline()
sourceBody := body.AppendNewBlock("data", []string{"amazon-ami", dataSourceName}).Body()
jsonBodyToHCL2Body(sourceBody, sourceAmiFilterCfg)
p.out = append(p.out, transposeTemplatingCalls(datasourceContent.Bytes())...)
}
}
}
return nil
}
func (p *AmazonAmiDatasourceParser) Write(out *bytes.Buffer) {
if len(p.out) > 0 {
if p.withAnnotations {
out.Write([]byte(amazonAmiDataHeader))
}
out.Write(p.out)
}
}
type SourceParser struct {
Builders []*template.Builder
BuilderPlugins packer.BuilderSet
out []byte
withAnnotations bool
}
func (p *SourceParser) Parse(tpl *template.Template) error {
if p.out == nil {
p.out = []byte{}
}
for i, builderCfg := range p.Builders {
sourcesContent := hclwrite.NewEmptyFile()
body := sourcesContent.Body()
body.AppendNewline()
if !p.BuilderPlugins.Has(builderCfg.Type) {
return fmt.Errorf("unknown builder type: %q\n", builderCfg.Type)
}
if builderCfg.Name == "" || builderCfg.Name == builderCfg.Type {
builderCfg.Name = fmt.Sprintf("autogenerated_%d", i+1)
}
builderCfg.Name = strings.ReplaceAll(strings.TrimSpace(builderCfg.Name), " ", "_")
sourceBody := body.AppendNewBlock("source", []string{builderCfg.Type, builderCfg.Name}).Body()
jsonBodyToHCL2Body(sourceBody, builderCfg.Config)
p.out = append(p.out, transposeTemplatingCalls(sourcesContent.Bytes())...)
}
return nil
}
func (p *SourceParser) Write(out *bytes.Buffer) {
if len(p.out) > 0 {
if p.withAnnotations {
out.Write([]byte(sourcesHeader))
}
out.Write(p.out)
}
}
type BuildParser struct {
Builders []*template.Builder
provisioners BlockParser
postProcessors BlockParser
out []byte
withAnnotations bool
}
func (p *BuildParser) Parse(tpl *template.Template) error {
buildContent := hclwrite.NewEmptyFile()
buildBody := buildContent.Body()
if tpl.Description != "" {
buildBody.SetAttributeValue("description", cty.StringVal(tpl.Description))
buildBody.AppendNewline()
}
sourceNames := []string{}
for _, builder := range p.Builders {
sourceNames = append(sourceNames, fmt.Sprintf("source.%s.%s", builder.Type, builder.Name))
}
buildBody.SetAttributeValue("sources", hcl2shim.HCL2ValueFromConfigValue(sourceNames))
buildBody.AppendNewline()
p.out = buildContent.Bytes()
p.provisioners = &ProvisionerParser{
withAnnotations: p.withAnnotations,
}
if err := p.provisioners.Parse(tpl); err != nil {
return err
}
p.postProcessors = &PostProcessorParser{
withAnnotations: p.withAnnotations,
}
if err := p.postProcessors.Parse(tpl); err != nil {
return err
}
return nil
}
func (p *BuildParser) Write(out *bytes.Buffer) {
if len(p.out) > 0 {
if p.withAnnotations {
out.Write([]byte(buildHeader))
} else {
_, _ = out.Write([]byte("\n"))
}
_, _ = out.Write([]byte("build {\n"))
out.Write(p.out)
p.provisioners.Write(out)
p.postProcessors.Write(out)
_, _ = out.Write([]byte("}\n"))
}
}
type ProvisionerParser struct {
out []byte
withAnnotations bool
}
func (p *ProvisionerParser) Parse(tpl *template.Template) error {
if p.out == nil {
p.out = []byte{}
}
for _, provisioner := range tpl.Provisioners {
contentBytes := writeProvisioner("provisioner", provisioner)
p.out = append(p.out, transposeTemplatingCalls(contentBytes)...)
}
if tpl.CleanupProvisioner != nil {
contentBytes := writeProvisioner("error-cleanup-provisioner", tpl.CleanupProvisioner)
p.out = append(p.out, transposeTemplatingCalls(contentBytes)...)
}
return nil
}
func writeProvisioner(typeName string, provisioner *template.Provisioner) []byte {
provisionerContent := hclwrite.NewEmptyFile()
body := provisionerContent.Body()
block := body.AppendNewBlock(typeName, []string{provisioner.Type})
cfg := provisioner.Config
if len(provisioner.Except) > 0 {
cfg["except"] = provisioner.Except
}
if len(provisioner.Only) > 0 {
cfg["only"] = provisioner.Only
}
if provisioner.MaxRetries != "" {
cfg["max_retries"] = provisioner.MaxRetries
}
if provisioner.Timeout > 0 {
cfg["timeout"] = provisioner.Timeout.String()
}
body.AppendNewline()
jsonBodyToHCL2Body(block.Body(), cfg)
return provisionerContent.Bytes()
}
func (p *ProvisionerParser) Write(out *bytes.Buffer) {
if len(p.out) > 0 {
out.Write(p.out)
}
}
type PostProcessorParser struct {
out []byte
withAnnotations bool
}
func (p *PostProcessorParser) Parse(tpl *template.Template) error {
if p.out == nil {
p.out = []byte{}
}
for _, pps := range tpl.PostProcessors {
postProcessorContent := hclwrite.NewEmptyFile()
body := postProcessorContent.Body()
switch len(pps) {
case 0:
continue
case 1:
default:
body = body.AppendNewBlock("post-processors", nil).Body()
}
for _, pp := range pps {
ppBody := body.AppendNewBlock("post-processor", []string{pp.Type}).Body()
if pp.KeepInputArtifact != nil {
ppBody.SetAttributeValue("keep_input_artifact", cty.BoolVal(*pp.KeepInputArtifact))
}
cfg := pp.Config
if len(pp.Except) > 0 {
cfg["except"] = pp.Except
}
if len(pp.Only) > 0 {
cfg["only"] = pp.Only
}
if pp.Name != "" && pp.Name != pp.Type {
cfg["name"] = pp.Name
}
jsonBodyToHCL2Body(ppBody, cfg)
}
p.out = append(p.out, transposeTemplatingCalls(postProcessorContent.Bytes())...)
}
return nil
}
func (p *PostProcessorParser) Write(out *bytes.Buffer) {
if len(p.out) > 0 {
out.Write(p.out)
}
}

View File

@ -18,11 +18,13 @@ func Test_hcl2_upgrade(t *testing.T) {
tc := []struct {
folder string
flags []string
}{
{"complete"},
{"minimal"},
{"source-name"},
{"error-cleanup-provisioner"},
{folder: "complete", flags: []string{"-with-annotations"}},
{folder: "without-annotations", flags: []string{}},
{folder: "minimal", flags: []string{"-with-annotations"}},
{folder: "source-name", flags: []string{"-with-annotations"}},
{folder: "error-cleanup-provisioner", flags: []string{"-with-annotations"}},
}
for _, tc := range tc {
@ -30,7 +32,12 @@ func Test_hcl2_upgrade(t *testing.T) {
inputPath := filepath.Join(testFixture("hcl2_upgrade", tc.folder, "input.json"))
outputPath := inputPath + ".pkr.hcl"
expectedPath := filepath.Join(testFixture("hcl2_upgrade", tc.folder, "expected.pkr.hcl"))
p := helperCommand(t, "hcl2_upgrade", inputPath)
args := []string{"hcl2_upgrade"}
if len(tc.flags) > 0 {
args = append(args, tc.flags...)
}
args = append(args, inputPath)
p := helperCommand(t, args...)
bs, err := p.CombinedOutput()
if err != nil {
t.Fatalf("%v %s", err, bs)

View File

@ -174,13 +174,16 @@ build {
inline = ["echo ${var.secret_account}", "echo ${build.ID}", "echo ${build.SSHPublicKey} | head -c 14", "echo ${path.root} is not ${path.cwd}", "echo ${packer.version}", "echo ${uuidv4()}"]
max_retries = "5"
}
provisioner "shell" {
inline = ["echo ${local.password}", "echo ${data.amazon-secretsmanager.autogenerated_1.value}", "echo ${local.password_key}", "echo ${data.amazon-secretsmanager.autogenerated_2.value}"]
}
provisioner "shell" {
inline = ["echo ${data.amazon-secretsmanager.autogenerated_3.value}", "echo ${data.amazon-secretsmanager.autogenerated_4.value}"]
}
# template: hcl2_upgrade:2:38: executing "hcl2_upgrade" at <clean_resource_name>: error calling clean_resource_name: unhandled "clean_resource_name" call:
# there is no way to automatically upgrade the "clean_resource_name" call.
# Please manually upgrade to use custom validation rules, `replace(string, substring, replacement)` or `regex_replace(string, substring, replacement)`
@ -189,6 +192,7 @@ build {
inline = ["echo mybuild-{{isotime | clean_resource_name}}"]
}
# template: hcl2_upgrade:2:35: executing "hcl2_upgrade" at <lower>: error calling lower: unhandled "lower" call:
# there is no way to automatically upgrade the "lower" call.
# Please manually upgrade to `lower(var.example)`
@ -197,6 +201,7 @@ build {
inline = ["echo {{ `SOMETHING` | lower }}"]
}
# template: hcl2_upgrade:2:35: executing "hcl2_upgrade" at <upper>: error calling upper: unhandled "upper" call:
# there is no way to automatically upgrade the "upper" call.
# Please manually upgrade to `upper(var.example)`
@ -205,6 +210,7 @@ build {
inline = ["echo {{ `something` | upper }}"]
}
# template: hcl2_upgrade:2:21: executing "hcl2_upgrade" at <split `some-string` `-` 0>: error calling split: unhandled "split" call:
# there is no way to automatically upgrade the "split" call.
# Please manually upgrade to `split(separator, string)`
@ -213,6 +219,7 @@ build {
inline = ["echo {{ split `some-string` `-` 0 }}"]
}
# template: hcl2_upgrade:2:21: executing "hcl2_upgrade" at <replace_all `-` `/` build_name>: error calling replace_all: unhandled "replace_all" call:
# there is no way to automatically upgrade the "replace_all" call.
# Please manually upgrade to `replace(string, substring, replacement)` or `regex_replace(string, substring, replacement)`
@ -221,6 +228,7 @@ build {
inline = ["echo {{ replace_all `-` `/` build_name }}"]
}
# template: hcl2_upgrade:2:21: executing "hcl2_upgrade" at <replace `some-string` `-` `/` 1>: error calling replace: unhandled "replace" call:
# there is no way to automatically upgrade the "replace" call.
# Please manually upgrade to `replace(string, substring, replacement)` or `regex_replace(string, substring, replacement)`
@ -228,11 +236,13 @@ build {
provisioner "shell" {
inline = ["echo {{ replace `some-string` `-` `/` 1 }}"]
}
provisioner "shell-local" {
inline = ["sleep 100000"]
only = ["amazon-ebs"]
timeout = "5s"
}
post-processor "amazon-import" {
format = "vmdk"
license_type = "BYOL"

View File

@ -28,7 +28,9 @@ build {
provisioner "shell-local" {
inline = ["exit 2"]
}
error-cleanup-provisioner "shell-local" {
inline = ["echo 'rubber ducky'> ducky.txt"]
}
}

View File

@ -0,0 +1,229 @@
packer {
required_version = ">= 1.6.0"
}
variable "aws_access_key" {
type = string
default = ""
sensitive = true
}
variable "aws_region" {
type = string
}
variable "aws_secondary_region" {
type = string
default = "${env("AWS_DEFAULT_REGION")}"
}
variable "aws_secret_key" {
type = string
default = ""
sensitive = true
}
variable "secret_account" {
type = string
default = "🤷"
sensitive = true
}
data "amazon-secretsmanager" "autogenerated_1" {
name = "sample/app/password"
}
data "amazon-secretsmanager" "autogenerated_2" {
key = "api_key"
name = "sample/app/passwords"
}
data "amazon-secretsmanager" "autogenerated_3" {
name = "some_secret"
}
data "amazon-secretsmanager" "autogenerated_4" {
key = "with_key"
name = "some_secret"
}
data "amazon-ami" "autogenerated_1" {
filters = {
name = "ubuntu/images/*/ubuntu-xenial-16.04-amd64-server-*"
root-device-type = "ebs"
virtualization-type = "hvm"
}
most_recent = true
owners = ["099720109477"]
}
locals { timestamp = regex_replace(timestamp(), "[- TZ:]", "") }
local "password" {
sensitive = true
expression = "${data.amazon-secretsmanager.autogenerated_1.value}"
}
locals {
password_key = "MY_KEY_${data.amazon-secretsmanager.autogenerated_2.value}"
}
source "amazon-ebs" "autogenerated_1" {
access_key = "${var.aws_access_key}"
ami_description = "Ubuntu 16.04 LTS - expand root partition"
ami_name = "ubuntu-16-04-test-${local.timestamp}"
encrypt_boot = true
launch_block_device_mappings {
delete_on_termination = true
device_name = "/dev/sda1"
volume_size = 48
volume_type = "gp2"
}
region = "${var.aws_region}"
secret_key = "${var.aws_secret_key}"
source_ami = "${data.amazon-ami.autogenerated_1.id}"
spot_instance_types = ["t2.small", "t2.medium", "t2.large"]
spot_price = "0.0075"
ssh_interface = "session_manager"
ssh_username = "ubuntu"
temporary_iam_instance_profile_policy_document {
Statement {
Action = ["*"]
Effect = "Allow"
Resource = ["*"]
}
Version = "2012-10-17"
}
}
source "amazon-ebs" "named_builder" {
access_key = "${var.aws_access_key}"
ami_description = "Ubuntu 16.04 LTS - expand root partition"
ami_name = "ubuntu-16-04-test-${local.timestamp}"
encrypt_boot = true
launch_block_device_mappings {
delete_on_termination = true
device_name = "/dev/sda1"
volume_size = 48
volume_type = "gp2"
}
region = "${var.aws_region}"
secret_key = "${var.aws_secret_key}"
source_ami = "${data.amazon-ami.autogenerated_1.id}"
spot_instance_types = ["t2.small", "t2.medium", "t2.large"]
spot_price = "0.0075"
ssh_interface = "session_manager"
ssh_username = "ubuntu"
temporary_iam_instance_profile_policy_document {
Statement {
Action = ["*"]
Effect = "Allow"
Resource = ["*"]
}
Version = "2012-10-17"
}
}
build {
sources = ["source.amazon-ebs.autogenerated_1", "source.amazon-ebs.named_builder"]
provisioner "shell" {
except = ["amazon-ebs"]
inline = ["echo ${var.secret_account}", "echo ${build.ID}", "echo ${build.SSHPublicKey} | head -c 14", "echo ${path.root} is not ${path.cwd}", "echo ${packer.version}", "echo ${uuidv4()}"]
max_retries = "5"
}
provisioner "shell" {
inline = ["echo ${local.password}", "echo ${data.amazon-secretsmanager.autogenerated_1.value}", "echo ${local.password_key}", "echo ${data.amazon-secretsmanager.autogenerated_2.value}"]
}
provisioner "shell" {
inline = ["echo ${data.amazon-secretsmanager.autogenerated_3.value}", "echo ${data.amazon-secretsmanager.autogenerated_4.value}"]
}
# template: hcl2_upgrade:2:38: executing "hcl2_upgrade" at <clean_resource_name>: error calling clean_resource_name: unhandled "clean_resource_name" call:
# there is no way to automatically upgrade the "clean_resource_name" call.
# Please manually upgrade to use custom validation rules, `replace(string, substring, replacement)` or `regex_replace(string, substring, replacement)`
# Visit https://packer.io/docs/templates/hcl_templates/variables#custom-validation-rules , https://www.packer.io/docs/templates/hcl_templates/functions/string/replace or https://www.packer.io/docs/templates/hcl_templates/functions/string/regex_replace for more infos.
provisioner "shell" {
inline = ["echo mybuild-{{isotime | clean_resource_name}}"]
}
# template: hcl2_upgrade:2:35: executing "hcl2_upgrade" at <lower>: error calling lower: unhandled "lower" call:
# there is no way to automatically upgrade the "lower" call.
# Please manually upgrade to `lower(var.example)`
# Visit https://www.packer.io/docs/templates/hcl_templates/functions/string/lower for more infos.
provisioner "shell" {
inline = ["echo {{ `SOMETHING` | lower }}"]
}
# template: hcl2_upgrade:2:35: executing "hcl2_upgrade" at <upper>: error calling upper: unhandled "upper" call:
# there is no way to automatically upgrade the "upper" call.
# Please manually upgrade to `upper(var.example)`
# Visit https://www.packer.io/docs/templates/hcl_templates/functions/string/upper for more infos.
provisioner "shell" {
inline = ["echo {{ `something` | upper }}"]
}
# template: hcl2_upgrade:2:21: executing "hcl2_upgrade" at <split `some-string` `-` 0>: error calling split: unhandled "split" call:
# there is no way to automatically upgrade the "split" call.
# Please manually upgrade to `split(separator, string)`
# Visit https://www.packer.io/docs/templates/hcl_templates/functions/string/split for more infos.
provisioner "shell" {
inline = ["echo {{ split `some-string` `-` 0 }}"]
}
# template: hcl2_upgrade:2:21: executing "hcl2_upgrade" at <replace_all `-` `/` build_name>: error calling replace_all: unhandled "replace_all" call:
# there is no way to automatically upgrade the "replace_all" call.
# Please manually upgrade to `replace(string, substring, replacement)` or `regex_replace(string, substring, replacement)`
# Visit https://www.packer.io/docs/templates/hcl_templates/functions/string/replace or https://www.packer.io/docs/templates/hcl_templates/functions/string/regex_replace for more infos.
provisioner "shell" {
inline = ["echo {{ replace_all `-` `/` build_name }}"]
}
# template: hcl2_upgrade:2:21: executing "hcl2_upgrade" at <replace `some-string` `-` `/` 1>: error calling replace: unhandled "replace" call:
# there is no way to automatically upgrade the "replace" call.
# Please manually upgrade to `replace(string, substring, replacement)` or `regex_replace(string, substring, replacement)`
# Visit https://www.packer.io/docs/templates/hcl_templates/functions/string/replace or https://www.packer.io/docs/templates/hcl_templates/functions/string/regex_replace for more infos.
provisioner "shell" {
inline = ["echo {{ replace `some-string` `-` `/` 1 }}"]
}
provisioner "shell-local" {
inline = ["sleep 100000"]
only = ["amazon-ebs"]
timeout = "5s"
}
post-processor "amazon-import" {
format = "vmdk"
license_type = "BYOL"
region = "eu-west-3"
s3_bucket_name = "hashicorp.adrien"
tags = {
Description = "packer amazon-import ${local.timestamp}"
}
}
post-processors {
post-processor "artifice" {
keep_input_artifact = true
files = ["path/something.ova"]
name = "very_special_artifice_post-processor"
only = ["amazon-ebs"]
}
post-processor "amazon-import" {
except = ["amazon-ebs"]
license_type = "BYOL"
s3_bucket_name = "hashicorp.adrien"
tags = {
Description = "packer amazon-import ${local.timestamp}"
}
}
}
}

View File

@ -0,0 +1,235 @@
{
"min_packer_version": "1.6.0",
"variables": {
"secret_account": "🤷",
"aws_region": null,
"aws_secondary_region": "{{ env `AWS_DEFAULT_REGION` }}",
"aws_secret_key": "",
"aws_access_key": "",
"password": "{{ aws_secretsmanager `sample/app/password` }}",
"password_key": "MY_KEY_{{ aws_secretsmanager `sample/app/passwords` `api_key` }}"
},
"sensitive-variables": [
"aws_secret_key",
"aws_access_key",
"secret_account",
"potato",
"password"
],
"builders": [
{
"type": "amazon-ebs",
"region": "{{ user `aws_region` }}",
"secret_key": "{{ user `aws_secret_key` }}",
"access_key": "{{ user `aws_access_key` }}",
"ami_name": "ubuntu-16-04-test-{{ timestamp }}",
"ami_description": "Ubuntu 16.04 LTS - expand root partition",
"source_ami_filter": {
"filters": {
"virtualization-type": "hvm",
"name": "ubuntu/images/*/ubuntu-xenial-16.04-amd64-server-*",
"root-device-type": "ebs"
},
"owners": [
"099720109477"
],
"most_recent": true
},
"launch_block_device_mappings": [
{
"delete_on_termination": true,
"device_name": "/dev/sda1",
"volume_type": "gp2",
"volume_size": 48
}
],
"spot_price": "0.0075",
"spot_instance_types": [
"t2.small",
"t2.medium",
"t2.large"
],
"encrypt_boot": true,
"ssh_username": "ubuntu",
"temporary_iam_instance_profile_policy_document": {
"Version": "2012-10-17",
"Statement": [
{
"Effect": "Allow",
"Action": [
"*"
],
"Resource": ["*"]
}
]
},
"ssh_interface": "session_manager"
},
{
"type": "amazon-ebs",
"name": "named_builder",
"region": "{{ user `aws_region` }}",
"secret_key": "{{ user `aws_secret_key` }}",
"access_key": "{{ user `aws_access_key` }}",
"ami_name": "ubuntu-16-04-test-{{ timestamp }}",
"ami_description": "Ubuntu 16.04 LTS - expand root partition",
"source_ami_filter": {
"filters": {
"virtualization-type": "hvm",
"name": "ubuntu/images/*/ubuntu-xenial-16.04-amd64-server-*",
"root-device-type": "ebs"
},
"owners": [
"099720109477"
],
"most_recent": true
},
"launch_block_device_mappings": [
{
"delete_on_termination": true,
"device_name": "/dev/sda1",
"volume_type": "gp2",
"volume_size": 48
}
],
"spot_price": "0.0075",
"spot_instance_types": [
"t2.small",
"t2.medium",
"t2.large"
],
"encrypt_boot": true,
"ssh_username": "ubuntu",
"temporary_iam_instance_profile_policy_document": {
"Version": "2012-10-17",
"Statement": [
{
"Effect": "Allow",
"Action": [
"*"
],
"Resource": ["*"]
}
]
},
"ssh_interface": "session_manager"
}
],
"provisioners": [
{
"type": "shell",
"except": [
"amazon-ebs"
],
"max_retries": 5,
"inline": [
"echo {{ user `secret_account` }}",
"echo {{ build `ID` }}",
"echo {{ build `SSHPublicKey` }} | head -c 14",
"echo {{ template_dir }} is not {{ pwd }}",
"echo {{ packer_version }}",
"echo {{ uuid }}"
]
},
{
"type": "shell",
"inline": [
"echo {{ user `password` }}",
"echo {{ aws_secretsmanager `sample/app/password` }}",
"echo {{ user `password_key` }}",
"echo {{ aws_secretsmanager `sample/app/passwords` `api_key` }}"
]
},
{
"type": "shell",
"inline": [
"echo {{ aws_secretsmanager `some_secret` }}",
"echo {{ aws_secretsmanager `some_secret` `with_key` }}"
]
},
{
"type": "shell",
"inline": [
"echo mybuild-{{isotime | clean_resource_name}}"
]
},
{
"type": "shell",
"inline": [
"echo {{ `SOMETHING` | lower }}"
]
},
{
"type": "shell",
"inline": [
"echo {{ `something` | upper }}"
]
},
{
"type": "shell",
"inline": [
"echo {{ split `some-string` `-` 0 }}"
]
},
{
"type": "shell",
"inline": [
"echo {{ replace_all `-` `/` build_name }}"
]
},
{
"type": "shell",
"inline": [
"echo {{ replace `some-string` `-` `/` 1 }}"
]
},
{
"type": "shell-local",
"only": [
"amazon-ebs"
],
"timeout": "5s",
"inline": [
"sleep 100000"
]
}
],
"post-processors": [
[
{
"type": "amazon-import",
"region": "eu-west-3",
"s3_bucket_name": "hashicorp.adrien",
"license_type": "BYOL",
"format": "vmdk",
"tags": {
"Description": "packer amazon-import {{timestamp}}"
}
}
],
[
{
"only": [
"amazon-ebs"
],
"files": [
"path/something.ova"
],
"keep_input_artifact": true,
"name": "very_special_artifice_post-processor",
"type": "artifice"
},
{
"except": [
"amazon-ebs"
],
"type": "amazon-import",
"s3_bucket_name": "hashicorp.adrien",
"license_type": "BYOL",
"tags": {
"Description": "packer amazon-import {{timestamp}}"
}
}
]
]
}